问题描述
我想对y截距强制为0.115进行线性回归。这是我尝试的代码。我设置为fit_intercept = True以获得非零的y截距,但是我可以将其设置为一个值吗?
此外,如何获得最佳拟合线而不是连接每个点的线?
谢谢。
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from sklearn.metrics import r2_score
from sklearn.linear_model import LinearRegression
x=np.array([0,10,20,30,40,50,60,70,80,90,100]).reshape(-1,1)
y=np.array([0.113,0.116,0.130,0.150,0.160,0.180,0.210,0.220,0.260,0.280])
regression=LinearRegression(fit_intercept=True).fit(x,y)
r_sq=round(regression.score(x,y),4)
m=round(regression.coef_[0],4)
b=round(regression.intercept_,4)
print("r_sq:",r_sq,"m:",m,"b:",b)
plt.figure()
plt.scatter(x,y)
plt.title('A')
plt.ylabel('X')
plt.xlabel('Y')
plt.plot(x,y,'r--',label='measured')
plt.legend(loc='best')
解决方法
从数据中减去要修正的y截距,然后设置json_encode($input)
。
例如
fit_intercept=False
哪些印刷品:
import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import LinearRegression
x = np.array([0,10,20,30,40,50,60,70,80,90,100]).reshape(-1,1)
y = np.array([0.113,0.116,0.130,0.150,0.160,0.180,0.210,0.220,0.260,0.280])
fig,ax = plt.subplots()
for fit,y_intercept in zip((True,False),(0.0,0.115)):
regression = LinearRegression(fit_intercept=fit)
regression.fit(x,y - y_intercept)
r_sq = regression.score(x,y - y_intercept)
m = regression.coef_[0]
b = regression.intercept_ + y_intercept
print(f"Fit intercept: {regression.fit_intercept}")
print(f"r_sq: {r_sq:0.4f}\nm: {m:0.4f}\nb: {b:0.4f}")
ax.plot(x,y,"bo")
ax.plot(
x,regression.predict(x) + y_intercept,"r" + "--" * fit,label=f"Fit Intercept: {regression.fit_intercept}",)
ax.set_title("A")
ax.set_ylabel("X")
ax.set_xlabel("Y")
ax.legend(loc="best")
plt.show()
,
我找到了一个通用的解决方案,它给了我相同的答案,但也使我可以通过简单地修改函数来拟合非线性方程。
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
import numpy as np
#set y-intercept
b=0.115
#Fitting function
def func(x,m):
return (x*m)+b
#Experimental x and y data points
x_A1 = np.array([0,100])
y_A1 = np.array([0.113,0.280])
#Plot experimental data points
plt.plot(x_A1,y_A1,'bo',label='experimental')
#Perform the curve-fit
popt,pcov = curve_fit(func,x_A1,y_A1) #,initialGuess)
#print(popt)
#x values for the fitted function
x_A1_Fit = np.arange(x_A1[0],x_A1[-1],0.1)
residuals = y_A1- func(x_A1,*popt)
ss_res = np.sum(residuals**2)
ss_tot = np.sum((y_A1-np.mean(y_A1))**2)
r_sq = 1 - (ss_res / ss_tot)
#Plot the fitted function
plt.plot(x_A1_Fit,func(x_A1_Fit,*popt),'r--',label='fitted: m=%5.4f' % tuple(popt))
plt.xlabel('x')
plt.ylabel('y')
plt.legend()
plt.show()
print ('r_sq=',"%.4f"%r_sq,'m=',"%.4f"%popt,"b=","%.4f"%b)
,
fit_intercept上的帖子https://stackoverflow.com/questions/46779605
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from sklearn.metrics import r2_score
from sklearn.linear_model import LinearRegression
x=np.array([0,1)
y=np.array([0.113,0.280])
lr_fi_true = LinearRegression(fit_intercept=True)
lr_fi_false = LinearRegression(fit_intercept=False)
lr_fi_true.fit(x,y)
lr_fi_false.fit(x,y)
print('Intercept when fit_intercept=True : {:.5f}'.format(lr_fi_true.intercept_))
print('Intercept when fit_intercept=False : {:.5f}'.format(lr_fi_false.intercept_))
lr_fi_true_yhat = np.dot(x,lr_fi_true.coef_) + lr_fi_true.intercept_
lr_fi_false_yhat = np.dot(x,lr_fi_false.coef_) + lr_fi_false.intercept_
plt.scatter(x,label='Actual points')
plt.plot(x,lr_fi_true_yhat,label='fit_intercept=True')
plt.plot(x,lr_fi_false_yhat,'r-',label='fit_intercept=False')
plt.legend()
plt.vlines(0,y.max())
plt.hlines(0,x.min(),x.max())
plt.show()
印刷品:
Intercept when fit_intercept=True : 0.09577
Intercept when fit_intercept=False : 0.00000