- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
: N5 m2 \( L" H% Z( u: D; D3 C- simport matplotlib.pyplot as plt9 j0 A3 k' m( J: ]
6 ?* ?: y4 [& a8 Z$ bimport utilities
; N$ `' l; J6 }' z1 E0 ~% b, { I, c7 ~" I9 x* F
# Load input data s; X z1 v& N6 z: z
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'2 Y) r! b* `+ }
X, y = utilities.load_data(input_file)
$ {1 }$ |" b) h: D8 E
( U% |; \7 C. v7 @0 Q% d###############################################
- N8 A9 H- \4 Z, p2 e" J8 L# Separate the data into classes based on 'y'- U$ W7 C+ }" E5 }
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])& h7 v4 r0 S8 h" G$ P
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( L3 @- p; `4 K0 ]+ j u# m' Q: r n7 g/ y
# Plot the input data
I3 O, }- s- ?$ O6 hplt.figure(). {6 k' b# O' C, H
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
. l- q- X9 q D5 {, ?+ _, m! l2 `plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
0 y* _( ?" \$ ?( }plt.title('Input data')
0 r* D# n, f7 Q
" W2 M' Q" m4 t# z; b& h8 ^. O###############################################5 r! @% W! _' _) h1 ^ u% E- T; `
# Train test split and SVM training
9 `- z3 I% f, f% gfrom sklearn import cross_validation
# t1 X! j9 `: a: Vfrom sklearn.svm import SVC
" W7 t* [& Q% j T) T( f( m" `; L+ X, t: e% p# R
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
6 x2 Q3 }8 p& G4 i/ z w& ^# V: h: W4 s
#params = {'kernel': 'linear'}
9 r. W3 O% Z# d9 R1 a#params = {'kernel': 'poly', 'degree': 3}$ v: u" v/ x# _1 D7 I
params = {'kernel': 'rbf'}
, h3 F( g3 {% q9 q! ?3 Z- `. Dclassifier = SVC(**params)
$ e Y( s; ?3 \$ s) V2 iclassifier.fit(X_train, y_train)( j$ Y) }( F4 a {
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
" `( j+ i+ v6 q N1 }* n( ` G1 n+ i. h/ s% h2 L* p: j
y_test_pred = classifier.predict(X_test)
; S7 R4 @/ p1 w( T7 e) E! Rutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')0 m$ D: r0 m8 G: [
# H' z/ w. |! k, m0 M! G. `###############################################
8 R( |8 U; v Z! ] p7 ]# Evaluate classifier performance3 m) u3 Z4 y0 _& b0 b
' ]( |& b- _' |& S5 x6 cfrom sklearn.metrics import classification_report
2 W* o0 S% N' I0 z, K7 S+ d5 O' Q9 W
target_names = ['Class-' + str(int(i)) for i in set(y)]
+ q- \& H4 v7 J' y" }, j. Oprint "\n" + "#"*30, ~3 F7 C* x; |" Q$ S0 E; I) k. E: |
print "\nClassifier performance on training dataset\n"9 U. n: o/ ~! g) b1 _9 I
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
# `* v* y0 y+ h7 r7 ]print "#"*30 + "\n"- }7 l1 t7 C7 T; i4 n
3 }7 m1 L% u; f4 i' Z/ k8 Z9 S$ _* mprint "#"*30' o4 ?* l, S6 m. N; }
print "\nClassification report on test dataset\n"
9 u# ~3 V* X" g) I. J3 lprint classification_report(y_test, y_test_pred, target_names=target_names)
2 d! ]9 h; U! Y4 L; t& Kprint "#"*30 + "\n"
. V$ x: B7 a4 Z( i8 D( O: P* M* F% D W0 q: e
|
|