- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np2 T( d# h2 R. H1 N. o& ^, z2 q
import matplotlib.pyplot as plt& D1 r% N/ L$ N6 p- r; l% c
% x3 Y. u- a( `/ {% Q: wimport utilities
9 m4 K* ]% I8 j
$ Z, h1 q C, y: W, M# p2 F! q# Load input data
" `* w! o- V" v1 n1 w, linput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" \3 w2 e# `" G, O, s @) WX, y = utilities.load_data(input_file)
4 F" B0 {1 ~. U3 i9 i% h9 n
! U5 T9 a- Q6 |: s* k###############################################! f# A7 Z: g( w, S+ o
# Separate the data into classes based on 'y'
E/ ?! F& E- d% z P* ^class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
& p- j! j: n. {* f0 ~class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]) _. L; w/ h$ U+ L8 A/ v5 e& B
, B `! |4 ]" ~2 M8 u2 @# Plot the input data; K" I$ W" }; ~9 W
plt.figure()
3 r8 X: T( m6 n( D1 uplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')1 s% u" w6 X7 U; o
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
+ X6 c& I/ `+ ^" y/ Xplt.title('Input data')+ }% u: n! M2 B& j
- O4 w# |# y- f6 n) ?############################################### z4 ?7 x8 F0 ]9 `* c
# Train test split and SVM training
/ \7 R# u1 J4 Wfrom sklearn import cross_validation0 W3 R/ F$ z+ {
from sklearn.svm import SVC
! v6 f0 a( g3 ]8 k
& G1 P: H% C& r9 ~ A: b ?X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)1 U7 X1 o* W% D: K; e7 K; b
5 D! P/ U( J. l) r# t! i4 T#params = {'kernel': 'linear'}
4 i9 `+ ^1 r+ F) v% D#params = {'kernel': 'poly', 'degree': 3}" F- d6 ^; a5 F+ l, V
params = {'kernel': 'rbf'}
6 G6 I* b: I* u" O9 M2 gclassifier = SVC(**params)/ P' m, {! \+ o; M" d7 \! _/ c2 ?
classifier.fit(X_train, y_train)( z" n6 a* x8 ?8 d P* {/ m4 c8 ^" s
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
+ o. n7 W i6 X; D0 E9 n
" y4 m/ s. }2 J% L5 @y_test_pred = classifier.predict(X_test)
; `% g+ V* G7 q! C! K$ Jutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
, e9 b% ]) @9 |5 b& V: \+ I+ a, g5 d/ `5 W* V' d
###############################################
& E/ L0 }; t1 s) B( R# Evaluate classifier performance* F$ X% @# n& j- c, b
) r/ m1 G: d5 K% V5 [- e: |
from sklearn.metrics import classification_report7 p* K/ O! T, S, s
2 T: X9 f) d/ H$ D5 i ]% o
target_names = ['Class-' + str(int(i)) for i in set(y)]* G- H3 A+ W% @+ R9 P+ `
print "\n" + "#"*305 T8 C1 f( d% ]6 e/ n
print "\nClassifier performance on training dataset\n"
& [5 v n! K$ b! v( c! ?1 ^print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
9 K+ g/ |$ H9 Z( u+ s1 J( B0 qprint "#"*30 + "\n"
! X% j5 z" H% D% V: d3 `% B/ \ e2 p# ?5 s! C* U+ R
print "#"*30- F! {4 ?& g4 r B7 j6 S
print "\nClassification report on test dataset\n"# Y0 ` o2 J0 N; Z/ L1 k: {) C, [
print classification_report(y_test, y_test_pred, target_names=target_names)2 I3 K2 G+ u, H7 ]! V1 B$ u
print "#"*30 + "\n"8 v* H0 E1 P) L8 k2 C) Q
) M b3 W2 I. P6 f
|
|