- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
8 e- E/ e+ A2 o5 e: Gimport matplotlib.pyplot as plt$ c, Q! a; _& v I% d; i
; h. Y0 |7 C% {+ w; z* w* U
import utilities $ @0 M& Y) i- P4 S; @, P
( O0 A2 I) B9 v& P8 ?: ?
# Load input data
" s+ M2 N+ l7 a) J+ ?' h# Sinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 i! d& p' `( `- I9 ~3 mX, y = utilities.load_data(input_file)9 P' }$ I$ I) D$ ?7 E0 u& r
1 x1 L! ~5 C0 n/ M+ F, `$ [( {' e
###############################################
: f8 K3 h! p7 M) S: Y$ A# Separate the data into classes based on 'y'
) e8 T* w" |1 [3 x8 M/ qclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
3 o- r0 b& I( e+ E/ Tclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])) `& Z: c$ E- A- v8 L; ~, H% U
: K b* e0 x; J
# Plot the input data2 ~. S# D0 D! C2 \3 i3 w
plt.figure()1 r' B6 n/ P2 K, X+ u
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'). X+ I& p+ A9 f
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
& I% W4 F1 D" K. \1 F1 G$ jplt.title('Input data')
1 P( p7 A& D8 A" w9 [* A7 g. P$ l" K
: ~5 Q$ k& p; u3 J! e7 P6 \$ H###############################################
3 }& E" `) _. X4 n# Train test split and SVM training
) R6 a$ K F* T2 I3 W7 Vfrom sklearn import cross_validation+ \% a8 K- w* _" b
from sklearn.svm import SVC
+ ~* X$ x$ p y1 n: R7 l
. p: @) Y: A3 a( |: N+ G% cX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)- y" N9 ~# D m( L& ?: {% c& E
9 n- O, C7 d7 `6 r d#params = {'kernel': 'linear'} y! j" O4 N! x
#params = {'kernel': 'poly', 'degree': 3}
6 t( H' J( \% X F1 gparams = {'kernel': 'rbf'}: ]7 g; B0 B6 ^! r
classifier = SVC(**params)
1 F4 [. B/ `! U( p& K8 oclassifier.fit(X_train, y_train)2 O8 n5 a8 C7 @
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
4 A6 s" S- J- H, B8 e1 D# x( J9 z8 |* E/ K* h' X
y_test_pred = classifier.predict(X_test)
9 |1 ^: ^ F* a- k3 u1 h9 Butilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')) l7 W; `4 \; w4 A/ F
5 K/ |% q( X0 p" K" K; O* D###############################################
6 I3 Z& d. \8 U; f& g0 }# Evaluate classifier performance
4 _# H1 B% t2 R+ E) E' G7 b2 a; r4 y
from sklearn.metrics import classification_report
5 ?; C! I1 q0 {- d6 T$ }7 r4 W3 _$ c
target_names = ['Class-' + str(int(i)) for i in set(y)]
- e, E! h# a0 V" t4 c8 d! O, mprint "\n" + "#"*30$ n- n& W+ Q# V7 Q) q/ r( g
print "\nClassifier performance on training dataset\n"
. G& m0 R6 W }" mprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
3 h& G0 y O' Xprint "#"*30 + "\n"
~( D; l1 y+ E# m/ r) H
& W) U* ~5 i8 R) Tprint "#"*30
) Q! a' ^6 R. kprint "\nClassification report on test dataset\n"5 \* W; l( a) J$ w2 ^
print classification_report(y_test, y_test_pred, target_names=target_names)
% H! x+ e( G4 ]2 A' wprint "#"*30 + "\n"
6 m. ]% t- }, `! L+ e( F
R& P6 s+ ~4 m9 x2 t- f4 e- s1 m% y |
|