- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
$ E( U! n3 R% G8 X! O* dimport matplotlib.pyplot as plt' Y; f$ |/ R9 _4 {) M0 A
* y* E; I: z. f" m; i2 _
import utilities ; ^& }; i% M. e1 `1 d
) R3 v6 r' M" I) c) ]/ A
# Load input data
( W% n* E5 x! \2 S$ ?; `input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
8 L( z( n. U- E4 G ~: C$ E. v% }X, y = utilities.load_data(input_file)
' j. h6 p9 z: O" ~6 B
+ {" v- R5 n; r& x* a###############################################+ s" u, @# j1 J N
# Separate the data into classes based on 'y', x- Q' G- a9 m3 L4 \+ m' q, v
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]) v0 P9 k* v1 q; z$ A
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 y2 N P. L: p+ m O* g6 x$ {( @
, ?% r8 O! A$ g' t% S4 @# Plot the input data
! ~ P; Q8 r F' M+ Nplt.figure()
- Z% J* D2 G8 i; d2 x& i- Wplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')1 j2 ~5 H' _5 d
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
. G. M& x: [& o, K7 |; {/ Cplt.title('Input data') `, ~0 f# {) W0 Y, |" g* v
& L$ {) }. [9 u& x8 {7 L: X# Y
###############################################+ M: _1 m, G: c
# Train test split and SVM training
3 J9 W( j% ]5 p; ^- R) sfrom sklearn import cross_validation
% ?# u& ?* D1 [; a" C% xfrom sklearn.svm import SVC
% w6 a6 i0 n5 S8 o C0 w
& M' Z8 S4 P5 g' R1 K9 NX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)* \) x; A- N- K, f
3 b1 U. Z( e, f% T/ M#params = {'kernel': 'linear'}
- h! H% }( Z/ E, P#params = {'kernel': 'poly', 'degree': 3}
# K6 q& C4 O9 z! _' zparams = {'kernel': 'rbf'}
$ Y& G& m* n2 L* Z: Kclassifier = SVC(**params) _3 k% f" S6 }1 g8 Q
classifier.fit(X_train, y_train)7 C) o% I* W5 @/ @3 @5 _9 J V
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')/ j. \5 f& `* {. ~
# A* H7 w- C: ~2 o4 ?7 _: O$ Xy_test_pred = classifier.predict(X_test)% B: ^. a6 W/ K7 O, L
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
- A* H+ ~1 b8 W0 F& m
) p. P! h- o5 z1 ?7 p###############################################
' B; K4 F( b, K1 }) M; a# Evaluate classifier performance
8 Y. W) |' _5 A, D5 ~ _; M. b* v2 g
from sklearn.metrics import classification_report* a( C: b! v! b. ]6 q g( d
% P" {1 q* l3 ^1 P( ytarget_names = ['Class-' + str(int(i)) for i in set(y)]
& U! @- n; N6 q2 kprint "\n" + "#"*30
5 X( v6 J- q) Cprint "\nClassifier performance on training dataset\n". i! K, \4 D/ T7 f
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
, G% r8 R2 D* b- W* aprint "#"*30 + "\n"
; d m0 b+ m# {; Q5 A2 {8 G# |- Y
9 e B K4 P. @5 v+ M5 uprint "#"*30
$ @% |( n; g1 e( C, C! sprint "\nClassification report on test dataset\n"; S8 L( h# L- R! H
print classification_report(y_test, y_test_pred, target_names=target_names)0 {1 V! u5 m! Q& p* h, f0 W; C
print "#"*30 + "\n") K4 y) G; j3 F5 m( ?
1 f6 z5 r1 z/ Q. Y5 c" b' V, K) G |
|