- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np5 ]* q: d- p$ Z3 C( h
import matplotlib.pyplot as plt
$ F5 R; g, g0 N8 Z3 Y: {& t0 w6 [. b: a$ C) i" H1 p! E
import utilities
$ b( l% j' k; F' h; l- ?0 }, S( z5 f; k+ u, b
# Load input data
# a- G7 t3 w( `input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" T1 C( \+ g% ~- x; [ q cX, y = utilities.load_data(input_file)
6 g3 _ ?3 x, r9 j' H3 w6 C& G
% k: A& |& j( I5 E6 q5 Q###############################################0 b) R! J0 g# `, C3 o
# Separate the data into classes based on 'y'+ u, z3 U7 `+ e" J
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
1 ]" m- ?. F, B$ u9 Qclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])$ L. E1 F# p1 |6 c% C- {
! W4 a5 u' A* h+ J# Plot the input data
) Z4 h4 _) Y' fplt.figure()
* K& n% P, d6 x; W; ?plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
0 V$ |: N8 O+ gplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
, w8 m" `2 A' f# Rplt.title('Input data')7 E, k- N! u& z7 A" S7 P2 `
& U% c3 u, A8 l6 N: D2 j
###############################################
% g1 H* | r3 E6 S; s( D/ {8 J6 z# Train test split and SVM training/ Q9 q* H+ h# C6 P
from sklearn import cross_validation' t ~8 q: T- r1 _) G. z. y% g
from sklearn.svm import SVC, v; J! w. `, ?: `* z# }) \
2 a, Y5 d, L/ O1 s) p
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
+ _4 c4 a/ d! v( Z2 }) q* T& `) S2 ^1 k8 K
#params = {'kernel': 'linear'}
5 Z5 c- J( r0 g#params = {'kernel': 'poly', 'degree': 3}
) F% u/ b/ m& b( v# |1 _9 t( z- Cparams = {'kernel': 'rbf'}. U* [: V8 {% F3 r+ e
classifier = SVC(**params)
) I2 Y! I. N' U* Kclassifier.fit(X_train, y_train): r' Q- _7 ]& C/ x
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')" n$ E0 r4 ]4 ?0 G2 j
6 |& O- p4 a% w6 G) c
y_test_pred = classifier.predict(X_test)
: D9 a7 \: T g* V' k: R# {' vutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'), j% i; t' ~+ e
8 t. F0 q/ k& S/ k& W% V5 a/ f% E
###############################################" s% c& D. _' r0 X- [7 c0 ~) w
# Evaluate classifier performance) E4 B4 ~6 _2 C5 _/ s) H0 G
+ D% M2 j4 G( z/ q8 W- a# a$ w! Rfrom sklearn.metrics import classification_report
- \6 J/ h: j0 O% k+ ^, K8 ]
+ D w j0 C f" K) ftarget_names = ['Class-' + str(int(i)) for i in set(y)]0 n) X, {. F9 S `& r+ P
print "\n" + "#"*30, o M) V6 U+ V! a4 }8 H
print "\nClassifier performance on training dataset\n"( T& X2 n8 U* Z2 B
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
2 |# i9 G, t$ B' ~print "#"*30 + "\n"# k0 t E# ?. p2 s; R5 i
3 Q" r. I& h8 A0 B% e4 w a6 fprint "#"*30
) ~! C, l9 u& T' e" fprint "\nClassification report on test dataset\n"! W& |- u+ G/ ~2 j& u) @' X
print classification_report(y_test, y_test_pred, target_names=target_names)
( j6 \/ J. `0 c- n% g! ?# ]' D5 C2 ]print "#"*30 + "\n"7 b8 A1 L7 N# B0 C1 U5 [4 _, r7 k3 u& U
; ]; f5 R' [+ e" i- n9 w |
|