参考了这个博客中的数据
# -*- coding:utf-8 -*-
import numpy as np
import math
m =
X = [[, ],
[, ,],
[, ,],
[, ,],
[, ,],
[, ,],
[, ,],
[, ,],
[, ,],
[,,],
[,,],
[,,],
[, ,],
[, ,],
[, ,],
[, ,],
[, ,],
[, ,],
[, ,],
[, ,]
]
y = [, , , , , , , , , , ,
, , , , , , , , ]
alpha =
# 梯度函数
def gradient_function(theta, X, y):
diff = []
for index in range(len(y)):
diff.append((theta[] * X[index][] + theta[] * X[index][]) - y[index])
result = [, ]
for index in range(len(y)):
result[] += (X[index][] * diff[index])
result[] += (X[index][] * diff[index])
result[] = result[] /
result[] = result[] /
return result
def gradient_descent(X, y, alpha):
theta = [, ]
gradient = gradient_function(theta, X, y)
while True :
theta[] = theta[] - (alpha * gradient[])
theta[] = theta[] - (alpha * gradient[])
gradient = gradient_function(theta, X, y)
print theta
return theta
gradient_descent(X, y, alpha)