- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
: M0 j( H. L: q4 Vimport matplotlib.pyplot as plt
5 ?6 G. d" q, V% E& ?5 Y: E! Q9 d* t2 {+ {
import utilities
" K9 G& c/ ], z- M+ h, B- y4 K4 U+ W1 y& a, }* ?
# Load input data
7 q! A4 F" m2 O( p6 winput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'& g( r2 t2 e- A# N4 o( W
X, y = utilities.load_data(input_file)
0 ]3 w& z! [; _5 ^5 K1 j; y+ {0 T l C
###############################################4 z/ e5 n! F0 Y3 M8 V \- z+ {$ \
# Separate the data into classes based on 'y'3 t! T- x" ?! }9 P
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
+ l5 M, S3 s6 k1 Y- O9 _class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
, }* h5 ]5 \+ q; M5 }6 I& [/ V- |
8 k+ J9 v& |" K: H# Plot the input data6 @; y- c2 d: Z2 N3 f8 l
plt.figure()0 v* f( x$ D$ ^! j
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
: q+ @4 r' C1 l* splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
. x; L; K* {1 {- aplt.title('Input data')
( ^. W9 t1 P% g$ l9 s, }
- ^- l q7 A7 q/ y3 c% m& k1 j% ?* X###############################################9 H5 S. I ?" C/ W
# Train test split and SVM training, _! ^2 [) P% e4 o6 h" u c
from sklearn import cross_validation
* A/ {, A" F- D! x6 \) k" Qfrom sklearn.svm import SVC
; Q" [9 s8 P6 l. w9 Q4 H- N3 E0 v: O
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)2 m5 x+ ]* y& [* Q" h- M
, d& R3 u: X! e$ |( M8 Q
#params = {'kernel': 'linear'}
6 `* c6 x6 w/ X' A#params = {'kernel': 'poly', 'degree': 3}* E* Y/ _ Q% U
params = {'kernel': 'rbf'}
3 x- Z6 M, B0 a& Q, c4 Hclassifier = SVC(**params); P# B8 r( ]5 o9 A
classifier.fit(X_train, y_train)
6 ?1 G+ W/ P- I- D. H' L; sutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset') y0 x+ E c }. q# j
4 g5 x# A9 t3 I: a, c* V2 \" by_test_pred = classifier.predict(X_test)
c7 R( b; o1 Q9 M# yutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'); \' S5 ]# d- M
$ V/ G3 J9 i- p1 P
###############################################
, e% J0 r" L9 R/ L1 b+ ^, g# Evaluate classifier performance
1 ]8 b5 J% v" e9 ?
% n$ Q' z; Q1 G5 P: e" r8 @, M6 Qfrom sklearn.metrics import classification_report% R U6 b2 Z7 g$ ~) t
$ g% d' F8 X! H5 J* S- j
target_names = ['Class-' + str(int(i)) for i in set(y)]
& J1 m [( T) w7 a- Rprint "\n" + "#"*30
# A. b" X/ @% Pprint "\nClassifier performance on training dataset\n"
! y* c, H; \2 h3 ~4 Y/ U8 mprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)' b: N! ]& I& d( t" N' [3 r: ?4 i
print "#"*30 + "\n"
3 j1 ]0 i" {: `8 ~- d' X' U, O, b+ a3 {$ A2 u2 a
print "#"*30
' I7 I2 }$ C+ Y8 R& h5 Rprint "\nClassification report on test dataset\n"& s' c8 h% p p
print classification_report(y_test, y_test_pred, target_names=target_names)' z& p# @- o+ a, c% |5 J2 n6 {
print "#"*30 + "\n"
1 \7 B6 K5 w' a; @
1 g6 ]7 ?' A4 l: V4 E |
|