於是看到了python有: sklearn (science kit learning )的svm(Support Vecotr Machine) SVR(Suppert Vector Regression)演算法
better_score =0
X = np.sort(5 * np.random.rand(500, 1), axis=0)
y = np.exp( -X**2 ) + np.random.normal(0, 0.05, X.shape)
y_= y. ravel()
for i in range(20):
x_train, x_test, y_train, y_test = train_test_split(X, y_, test_size=0.3)
svr_poly = SVR(kernel='rbf')
x_test =sorted(x_test)
y_test= sorted(y_test, reverse=True)
y_poly= svr_poly.fit(x_train,y_train).predict(x_test)
score = svr_poly.score(x_test,y_test)
print score
if better_score>score:
better_score = better_score
else:
better_score = score
fitting = svr_poly
xx=x_test
yy=y_test
print ('Best R^2 is: {}'.format(better_score))
# plt.scatter(X,y,lw =1,marker='.',c= 'cornflowerblue')
# plt.scatter(x_test,y_test,lw= 0.1,marker='*',c ='r')
# plt.plot(x_test,y_poly,c='g',lw =2)
# plt.show()
xx = sorted(xx)
yy = sorted(yy,reverse=True)
yyy= fitting.fit(x_train,y_train).predict(xx)
plt.scatter(X,y,lw =1,marker='.',c= 'cornflowerblue')
plt.scatter(xx,yy,lw= 0.1,marker='*',c ='r')
plt.plot( xx,yyy, color='g', lw=2)
plt.legend()
plt.show()
0.981262758171
0.985928238754
0.983921017322
0.978366081228
0.981761295843
0.983258298598
0.984401149668
0.975115055289
0.982420619817
0.984895618793
0.984486517169
0.98190445156
0.980037096507
0.975000835056
0.974193198206
0.986632990775
0.983566759576
0.984506261356
0.981129872633
0.979422542099
Best R^2 is: 0.986632990775