天天看点

Octave实现线性回归(梯度下降)

Octave实现线性回归(梯度下降)

这几天看了吴恩达老师的视频,看到神经网络的时候想着把以前的东西巩固一下,所以用Octave实现了一下线性回归。

我这就先直接贴代码,过几天再来加注释。

function jVal = costFunction(theta)
    sum_ = ;
    len = size(theta)();
    load('train3.txt')
    % load('train2.txt')
    % x = data(:,:len-);
    x = [linspace(,,)' data(:,1:len-1)]';
    y = data(:,len);
    m=size(x,); 
    h_y = fitFunction(x,theta)';
    delta = h_y - y;
    % sum(sum(delta.* x')
    % sum(delta)
    jVal=sum(delta.^);
    % gradien()=sum(delta)/m  %梯度值
    % gradien()=sum(delta.* x)/m;
           
function data = randData(len)
    data = [];
    for c = [::len];
        a = [::];
        for b = a
            a(b) = (-rand()*)*c*b;
        end
        data = [data a'];
    end
end
           
function hx = fitFunction(x,theta)

    % hx = theta(1) + theta(2) * x;
    hx = theta' * x;
           
function [optTheta,functionVal,exitFlag] = GradientFunction()
    options = optimset('GradObj','off','MaxIter',);  
    initialTheta = zeros(,);  
    [optTheta,functionVal,exitFlag] = fminunc(@costFunction,initialTheta,options);
    % data = randData(3);
    load('train3.txt');
    % scatter(data(:,1),data(:,2),'*');

    scatter3(data(:,1),data(:,2),data(:,3));
    hold on
    x = [linspace(,,)' [::]' [::]' ];
    % y =  optTheta()  + optTheta() * x;
    y =  optTheta' *x';
    plot3(x(:,),x(:,),y);
    % plot(x,y)
           

继续阅读