- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np6 F+ c% z) S3 V8 A" g9 _8 s8 F3 J
import matplotlib.pyplot as plt
/ k$ K6 K& }$ J+ ^4 H! E3 V1 Y }2 ?9 z
import utilities
' @2 a) I0 f" Z% Z, R! Y9 _* Y; V: z3 z! S* v- Q* ]
# Load input data
7 Q! O2 U I. P, n3 u0 }input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'3 F0 j' x6 t1 E. E0 L
X, y = utilities.load_data(input_file): N2 w- a2 d) L; q
+ d) }7 r8 Z; y# _ L/ }###############################################
- E, L/ f( E* Q& w& J# Separate the data into classes based on 'y'7 e2 A, S2 \3 Q$ P
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
. Q1 [5 H' Z) A4 r, ^% i% O% nclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])- Z% g5 x/ [2 `' I- {
, \6 Z$ E( D4 Z' s! D' y+ k# Plot the input data8 r0 C [6 z. B* N2 j
plt.figure()
, _: d7 {- m* G& e5 Iplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& X) v5 r/ s" M9 Tplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
3 B/ b; n" L1 a& S& M5 v% W& v3 ]plt.title('Input data')
: U2 q/ Z) ]! v/ Q" ^& ?, `0 m. v
/ k; Q& J/ d& g4 {- l5 R+ F: O. z###############################################( T$ M' j' z a% p, b0 l8 V9 s
# Train test split and SVM training
' `' ?8 P: M- Z, Wfrom sklearn import cross_validation
: R) q8 V6 I+ u! N) t: T# Wfrom sklearn.svm import SVC7 V$ n9 x0 k7 o3 @0 T& q
: p$ H) W2 F1 Y5 rX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 c* Z' v% c }2 I5 D9 h# Q
2 t8 r2 K) K/ G; ]! Q$ t# w#params = {'kernel': 'linear'}
* s' i; {, d0 r& S* P#params = {'kernel': 'poly', 'degree': 3}8 {$ z' t+ ]$ t
params = {'kernel': 'rbf'}: O5 m+ j& T( j: X; u2 G/ ]
classifier = SVC(**params)$ Z8 P$ {1 }: R' x
classifier.fit(X_train, y_train)' x) |. t+ k' x( i D$ g
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 T8 T/ ~: a+ T
: N5 ?' v, P5 D4 K* Sy_test_pred = classifier.predict(X_test)
* q5 a" z* r* o8 \/ `; N* x6 ]utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')& U. ^9 l& X6 R8 a$ q# }8 a6 ] r
2 S: T7 n' Q- v% m. t; [4 n$ R5 W
###############################################5 O; p1 S) B' X
# Evaluate classifier performance; e( U3 I4 m7 F& F1 a+ U3 c# s
/ [' O: ^& }/ X4 [" ^/ Z; f
from sklearn.metrics import classification_report
) |8 B: o( m% L
+ R1 j, L6 r0 Z0 U& l* Z$ g1 Ytarget_names = ['Class-' + str(int(i)) for i in set(y)]
/ V2 e& [( T: nprint "\n" + "#"*30( T' i* e4 d5 W% q
print "\nClassifier performance on training dataset\n"
/ |8 g- i0 X ` L3 iprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
2 K4 K5 T7 ?# c+ f5 Lprint "#"*30 + "\n"
" d* V7 g* v6 V
% z% R8 e9 q ^3 q' {" yprint "#"*30
9 G2 G, J4 e8 u$ j6 t4 l( Uprint "\nClassification report on test dataset\n"! D) A" ~( Z5 w* s
print classification_report(y_test, y_test_pred, target_names=target_names)& Y# X) v0 a8 p. F. O8 u* j K
print "#"*30 + "\n"6 Q: _. X. C4 Q8 J- A `+ b) q6 u7 a
+ a5 d- S8 k3 I/ H; w, K0 f
|
|