问题描述
def sigmoid(X,theta):
z = np.dot(X,theta[1:]) + theta[0]
y =expit(1/1+np.exp(-z))
return (y)
def costFunction(y,yhat):
cost = -y.dot(np.log(yhat)) - (1-y).dot(np.log(1-yhat))
return cost
def gradientFun(X,y,theta,learning_rate,iterations):
cost = []
for i in range(iterations):
yhat = sigmoid(X,theta)
error = yhat - y
grad = X.T.dot(error)
theta[0] = theta[0] - learning_rate * error.sum()
theta[1:] = theta[1:] - learning_rate * grad
cost.append(costFunction(y,yhat))
return cost
m,n = X.shape
theta = np.zeros(1+n)
learning_rate = 0.001
iteration = 1000
cost = gradientFun(X,iteration)
<ipython-input-87-26e2d5e7bf1c>:2: RuntimeWarning: divide by zero encountered in log
cost = -y.dot(np.log(yhat)) - (1-y).dot(np.log(1-yhat))
解决方法
暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!
如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。
小编邮箱:dio#foxmail.com (将#修改为@)