请稍等 ...
×

采纳答案成功!

向帮助你的同学说点啥吧!感谢那些助人为乐的人

打印维度是(100,2),函数内运行是(100,1)?哪里错了?

from playML.LinearRegression import LinearRegression
import numpy as np
import matplotlib.pyplot as plt

if __name__ == "__main__":

    x = np.random.random(size=100)
    y = x * 3. + 4. + np.random.normal(size=100)
    # plt.scatter(x,y)
    # plt.show()
    X = x.reshape(-1,1)
    X_b = np.hstack([np.ones((len(y),1)),X])
    print(X_b.shape)
    lin_reg = LinearRegression()
    lin_reg.fit_gd(X,y)
def fit_gd(self, X, y, eta=0.01, n_iters=1e4):
        assert X.shape[0] == y.shape[0], \
             "The size of X_train and y_train must be same!"
        X_b = np.hstack([np.ones((len(y), 1)),X])
        initial_theta = np.zeros(X_b.shape[1])

        def J(theta, X_b, y):
            try:
                return (y -X_b.dot(theta)) ** 2 / len(y)
            except:
                return float('inf')

        def dJ(theta, X_b,y):
            # res = np.empty(len(theta))
            # res[0] = np.sum(X_b.dot(theta) - y)
            # for i in range(1,len(theta)):
            #     res[i] = (X_b.dot(theta - y)).dot(X_b[:,i])
            return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(theta)
        
        def gradient_descend(X_b,y,initial_theta,eta=0.01,n_iters=1e4,epsilon=1e-8):
            theta = initial_theta
            i_iters = 0
            while i_iters < n_iters:
                gradient = dJ(theta,X_b,y)
                last_theta = theta
                theta = theta - eta * gradient
                if abs(J(last_theta,X_b,y) - J(theta,X_b,y)) < epsilon:
                    break
                i_iters += 1
            return theta

        self._theta = gradient_descend(X,y,initial_theta,eta,n_iters=1e4,epsilon=1e-8)
        self.interception_ = self._theta[0]
        self.coef_ = self._theta[1:]
        return self

运行结果:
/Users/jason/anaconda3/bin/python3.7 /Users/jason/Desktop/MarchineLearning/main-gradient-descendent.py
Traceback (most recent call last):
File “/Users/jason/Desktop/MarchineLearning/main-gradient-descendent.py”, line 15, in
lin_reg.fit_gd(X,y)
File “/Users/jason/Desktop/MarchineLearning/playML/LinearRegression.py”, line 52, in fit_gd
self._theta = gradient_descend(X,y,initial_theta,eta,n_iters=1e4,epsilon=1e-8)
File “/Users/jason/Desktop/MarchineLearning/playML/LinearRegression.py”, line 44, in gradient_descend
gradient = dJ(theta,X_b,y)
File “/Users/jason/Desktop/MarchineLearning/playML/LinearRegression.py”, line 38, in dJ
return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(theta)
ValueError: shapes (100,1) and (2,) not aligned: 1 (dim 1) != 2 (dim 0)
**(100, 2)**这是打印的维度

正在回答 回答被采纳积分+3

1回答

liuyubobobo 2020-02-09 02:31:08

你传的是 X 不是 X_b?

0 回复 有任何疑惑可以回复我~
  • 提问者 Jason_chen0755 #1
    X_b 封装在fit_gd里面; dJ函数里应该传X而不是X_b?
    回复 有任何疑惑可以回复我~ 2020-02-09 08:22:55
  • 提问者 Jason_chen0755 #2
    老师,按我的代码,为什么我在 dJ 函数里传X,而不是X_b,反而得到正确的打印结果?
    回复 有任何疑惑可以回复我~ 2020-02-09 09:47:18
  • liuyubobobo 回复 提问者 Jason_chen0755 #3
    你需要下载课程的官方代码,在你的环境下运行一下,看是不是也有问题?如果没有问题,请仔细调试,和自己的程序比对一下,看一下问题在哪里。你这样贴代码,我无法给你调试,请谅解。加油!:)
    回复 有任何疑惑可以回复我~ 2020-02-09 13:44:04
问题已解决,确定采纳
还有疑问,暂不采纳
意见反馈 帮助中心 APP下载
官方微信