- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np$ L" Z, _2 ^# G: [! R2 \
import matplotlib.pyplot as plt2 E7 P& Q k6 h7 Y
3 @7 z# B5 {; x3 S& Z( r% G$ O- j( |' i
import utilities ; n9 Z, B) s8 `' D0 q
+ w" [$ v0 A3 L6 v& t# Load input data
; [8 P, A" m& | m/ vinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'( d2 p# p) a/ q* P2 z
X, y = utilities.load_data(input_file)
6 d& X0 @/ ^5 X, H" e0 j4 W# @0 T# d9 r
###############################################
$ D) P& ^; }+ z; p( M$ q$ Y# Separate the data into classes based on 'y'% u5 a. j$ p8 r# @( H' l3 B0 N
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 L+ n% \; s: O7 r" n6 Y- X! `
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
7 D; }( O4 h5 D- x
; p2 Y4 r8 j" a% @% Q J# Plot the input data
9 X; ~% n( {4 [3 ]+ v5 {" yplt.figure()
, D/ W' {. ^! x# Lplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) h3 B5 I2 e6 B/ Fplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')( R9 s5 U5 z Y; o' s. n
plt.title('Input data')
3 V- |7 O( g, A( t/ k7 }1 e! b/ v9 ], n/ R6 F. j
###############################################7 U4 ]' N# b& g5 y7 n
# Train test split and SVM training
. M& i4 r/ _- g1 dfrom sklearn import cross_validation! G4 U) s! T; X1 ~( s
from sklearn.svm import SVC9 p4 r' |. v+ @, i; U* U
; n4 u" a4 R& T
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
% H/ n. D/ H) N7 v7 i' r" w" c( V( G- `' W0 [8 Y
#params = {'kernel': 'linear'}5 z) \; B% M5 V" O& `1 z) v
#params = {'kernel': 'poly', 'degree': 3}
, I) t5 z% M. h+ {params = {'kernel': 'rbf'}# A- k5 x, n- q- G
classifier = SVC(**params)& p7 o# {/ i; b- |. |4 g0 U
classifier.fit(X_train, y_train)
; f1 u+ C2 n: ]$ x# A# W7 {/ _7 outilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 Y+ ^* ]5 Y; f r
' J) n% u6 i4 d( o" W% _y_test_pred = classifier.predict(X_test)7 t2 K1 `4 ^! X3 {9 x1 w3 c: j7 L
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')/ v5 P0 d R% [* D
X. r$ t: ?6 N# ^" y
###############################################
, n: I0 ^2 Z' d; s# Evaluate classifier performance
1 P* ?9 f1 v* m ]: U/ m2 X- t0 e$ s, j
from sklearn.metrics import classification_report4 p1 O2 b( {4 G% l$ e Y: d
: s8 d3 K8 C# I2 v$ B
target_names = ['Class-' + str(int(i)) for i in set(y)]
$ N& n1 \9 ~, P% h' G6 w' W. ~print "\n" + "#"*30* D; h. P, l- K" a" J0 Y# E% i4 k
print "\nClassifier performance on training dataset\n"$ H6 ?( T j, \, b; q
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)8 P. v# R4 B" ]8 h( J- T
print "#"*30 + "\n"7 M, G; O" @. K& I' j- a
~1 z; ^$ Y, Q8 j/ X3 x' o2 I5 j
print "#"*30
" T$ S% Y' ]: O+ m5 ?. iprint "\nClassification report on test dataset\n"
$ e6 a$ Y9 C; Sprint classification_report(y_test, y_test_pred, target_names=target_names)- Q- @6 a% V, J( V5 e
print "#"*30 + "\n"
9 `' u+ N: _* o+ l& ]! u% U
6 a! F. G# u* D5 @ |
|