import numpy as np
class OLSLinearRegression:
def _ols(self, X, y):
'''最小二乘法估算'''
tmp = np.linalg.inv(np.matmul(X.T, X))
tmp = np.matmul(tmp, X.T)
return np.matmul(tmp, y)
def _preprocess_data_X(self, X):
'''数据预处理'''
m,n = X.shape
X_ = np.mpty((m, n+1))
X_[:,0] = 1
X_[:,1:] = X_
return X_
def train(self, X_train, y_train):
'''训练模型'''
X_train = self._preprocess_data_X(X_train)
self.w = self._ols(X_train, y_train)
def predict(self, X):
'''预测'''
X = self._preprocess_data_X(X)
return np.matmul(X, self.w)
class GDLinearRegression:
def _init_(self, n_iter=200, eta=1e-3, tol=None):
self.n_iter = n_iter
self.eta = eta
self.tol = tol
self.w = None
def _loss(self, y , y_pred):
'''计算损失'''
return np.sum((y_pred - y) ** 2 ) / y.size
def _gradient(self, X, y, y_pred):
'''计算梯度'''
return np.matmul(y_pred - y, X) / y.size
def _gradient_decent(self, w, X, y):
'''梯度下降算法'''
if self.tol is not None:
loss_old = np.inf
for step_i in range(self.n_iter):
y_pred = self._predict(X, w)
loss = self._loss(y, y_pred)
print('%4i Loss: %s' % (step_i, loss))
if self.tol is not None:
if loss_old -loss < self.tol:
break
loss_old = loss
grad = self._gradient(X, y, y_pred)
w -= self.eta * grad
def _preprocess_data_X(self, X):
'''数据预处理'''
m,n = X.shape
X_ = np.empty((m, n+1))
X_[:, 0] = 1
X_[:, 1:] = X
return X_
def train(self, X_train, y_train):
'''训练'''
X_train = self._preprocess_data_X(X_train)
_, n = X_train.shape
self.w = np.random.random(n) * 0.05
self._gradient_decent(self.w, X_train, y_train)
def _predict(self, X, w):
'''预测内部接口,实现函数h(x)'''
return np.matmul(X, w)
def predict(self, X):
'''预测'''
X = self._preprocess_data_X(X)
return self._predict(X, self.w)
这是我们老师布置的抄作业的代码。但是在代码的
_, n = X_train.shape
self.w = np.random.random(n) * 0.05
这两行VScode提示出问题:
Attempting to unpack a non-sequence defined at line 17 of
Module 'numpy.random' has no 'random' member
求大佬解答
百度无法找到第一个的解决方案
第二个的installpip install --user --upgrade numpy也做了,但没有效果
全部评论
(0) 回帖