本文介紹了python 支持向量機非線性回歸SVR模型,廢話不多說,具體如下:
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets, linear_model,svm
from sklearn.model_selection import train_test_split
def load_data_regression():
'''
加載用于回歸問題的數(shù)據(jù)集
'''
diabetes = datasets.load_diabetes() #使用 scikit-learn 自帶的一個糖尿病病人的數(shù)據(jù)集
# 拆分成訓(xùn)練集和測試集,測試集大小為原始數(shù)據(jù)集大小的 1/4
return train_test_split(diabetes.data,diabetes.target,test_size=0.25,random_state=0)
#支持向量機非線性回歸SVR模型
def test_SVR_linear(*data):
X_train,X_test,y_train,y_test=data
regr=svm.SVR(kernel='linear')
regr.fit(X_train,y_train)
print('Coefficients:%s, intercept %s'%(regr.coef_,regr.intercept_))
print('Score: %.2f' % regr.score(X_test, y_test))
# 生成用于回歸問題的數(shù)據(jù)集
X_train,X_test,y_train,y_test=load_data_regression()
# 調(diào)用 test_LinearSVR
test_SVR_linear(X_train,X_test,y_train,y_test)
def test_SVR_poly(*data):
'''
測試 多項式核的 SVR 的預(yù)測性能隨 degree、gamma、coef0 的影響.
'''
X_train,X_test,y_train,y_test=data
fig=plt.figure()
### 測試 degree ####
degrees=range(1,20)
train_scores=[]
test_scores=[]
for degree in degrees:
regr=svm.SVR(kernel='poly',degree=degree,coef0=1)
regr.fit(X_train,y_train)
train_scores.append(regr.score(X_train,y_train))
test_scores.append(regr.score(X_test, y_test))
ax=fig.add_subplot(1,3,1)
ax.plot(degrees,train_scores,label="Training score ",marker='+' )
ax.plot(degrees,test_scores,label= " Testing score ",marker='o' )
ax.set_title( "SVR_poly_degree r=1")
ax.set_xlabel("p")
ax.set_ylabel("score")
ax.set_ylim(-1,1.)
ax.legend(loc="best",framealpha=0.5)
### 測試 gamma,固定 degree為3, coef0 為 1 ####
gammas=range(1,40)
train_scores=[]
test_scores=[]
for gamma in gammas:
regr=svm.SVR(kernel='poly',gamma=gamma,degree=3,coef0=1)
regr.fit(X_train,y_train)
train_scores.append(regr.score(X_train,y_train))
test_scores.append(regr.score(X_test, y_test))
ax=fig.add_subplot(1,3,2)
ax.plot(gammas,train_scores,label="Training score ",marker='+' )
ax.plot(gammas,test_scores,label= " Testing score ",marker='o' )
ax.set_title( "SVR_poly_gamma r=1")
ax.set_xlabel(r"$\gamma$")
ax.set_ylabel("score")
ax.set_ylim(-1,1)
ax.legend(loc="best",framealpha=0.5)
### 測試 r,固定 gamma 為 20,degree為 3 ######
rs=range(0,20)
train_scores=[]
test_scores=[]
for r in rs:
regr=svm.SVR(kernel='poly',gamma=20,degree=3,coef0=r)
regr.fit(X_train,y_train)
train_scores.append(regr.score(X_train,y_train))
test_scores.append(regr.score(X_test, y_test))
ax=fig.add_subplot(1,3,3)
ax.plot(rs,train_scores,label="Training score ",marker='+' )
ax.plot(rs,test_scores,label= " Testing score ",marker='o' )
ax.set_title( "SVR_poly_r gamma=20 degree=3")
ax.set_xlabel(r"r")
ax.set_ylabel("score")
ax.set_ylim(-1,1.)
ax.legend(loc="best",framealpha=0.5)
plt.show()
# 調(diào)用 test_SVR_poly
test_SVR_poly(X_train,X_test,y_train,y_test)
def test_SVR_rbf(*data):
'''
測試 高斯核的 SVR 的預(yù)測性能隨 gamma 參數(shù)的影響
'''
X_train,X_test,y_train,y_test=data
gammas=range(1,20)
train_scores=[]
test_scores=[]
for gamma in gammas:
regr=svm.SVR(kernel='rbf',gamma=gamma)
regr.fit(X_train,y_train)
train_scores.append(regr.score(X_train,y_train))
test_scores.append(regr.score(X_test, y_test))
fig=plt.figure()
ax=fig.add_subplot(1,1,1)
ax.plot(gammas,train_scores,label="Training score ",marker='+' )
ax.plot(gammas,test_scores,label= " Testing score ",marker='o' )
ax.set_title( "SVR_rbf")
ax.set_xlabel(r"$\gamma$")
ax.set_ylabel("score")
ax.set_ylim(-1,1)
ax.legend(loc="best",framealpha=0.5)
plt.show()
# 調(diào)用 test_SVR_rbf
test_SVR_rbf(X_train,X_test,y_train,y_test)
def test_SVR_sigmoid(*data):
'''
測試 sigmoid 核的 SVR 的預(yù)測性能隨 gamma、coef0 的影響.
'''
X_train,X_test,y_train,y_test=data
fig=plt.figure()
### 測試 gammam,固定 coef0 為 0.01 ####
gammas=np.logspace(-1,3)
train_scores=[]
test_scores=[]
for gamma in gammas:
regr=svm.SVR(kernel='sigmoid',gamma=gamma,coef0=0.01)
regr.fit(X_train,y_train)
train_scores.append(regr.score(X_train,y_train))
test_scores.append(regr.score(X_test, y_test))
ax=fig.add_subplot(1,2,1)
ax.plot(gammas,train_scores,label="Training score ",marker='+' )
ax.plot(gammas,test_scores,label= " Testing score ",marker='o' )
ax.set_title( "SVR_sigmoid_gamma r=0.01")
ax.set_xscale("log")
ax.set_xlabel(r"$\gamma$")
ax.set_ylabel("score")
ax.set_ylim(-1,1)
ax.legend(loc="best",framealpha=0.5)
### 測試 r ,固定 gamma 為 10 ######
rs=np.linspace(0,5)
train_scores=[]
test_scores=[]
for r in rs:
regr=svm.SVR(kernel='sigmoid',coef0=r,gamma=10)
regr.fit(X_train,y_train)
train_scores.append(regr.score(X_train,y_train))
test_scores.append(regr.score(X_test, y_test))
ax=fig.add_subplot(1,2,2)
ax.plot(rs,train_scores,label="Training score ",marker='+' )
ax.plot(rs,test_scores,label= " Testing score ",marker='o' )
ax.set_title( "SVR_sigmoid_r gamma=10")
ax.set_xlabel(r"r")
ax.set_ylabel("score")
ax.set_ylim(-1,1)
ax.legend(loc="best",framealpha=0.5)
plt.show()
# 調(diào)用 test_SVR_sigmoid
test_SVR_sigmoid(X_train,X_test,y_train,y_test)
以上就是本文的全部內(nèi)容,希望對大家的學(xué)習(xí)有所幫助,也希望大家多多支持腳本之家。
更多文章、技術(shù)交流、商務(wù)合作、聯(lián)系博主
微信掃碼或搜索:z360901061
微信掃一掃加我為好友
QQ號聯(lián)系: 360901061
您的支持是博主寫作最大的動力,如果您喜歡我的文章,感覺我的文章對您有幫助,請用微信掃描下面二維碼支持博主2元、5元、10元、20元等您想捐的金額吧,狠狠點擊下面給點支持吧,站長非常感激您!手機微信長按不能支付解決辦法:請將微信支付二維碼保存到相冊,切換到微信,然后點擊微信右上角掃一掃功能,選擇支付二維碼完成支付。
【本文對您有幫助就好】元

