- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np: P( o* u; ]1 o- g! V2 Q
import matplotlib.pyplot as plt
7 {: i) Z4 C4 f9 T# C0 h1 C& z) e2 w6 K5 z4 O
import utilities ! r) F# \; D$ e0 d* k
8 s [) b/ |) U& L$ `$ b' H
# Load input data
& t- x, @" p( b/ |6 x s9 tinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'6 m4 e& ?% J. p. ]: g' K# L
X, y = utilities.load_data(input_file); o3 K5 D! u+ F1 f
9 U. t0 m5 v, _( K+ Q
###############################################
7 y3 z+ H$ p9 Q! {% Y# Separate the data into classes based on 'y'
) w$ N, O# D* Y: z" i, H% p* K1 c: iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
7 b, ^; X7 A% J& U5 U+ rclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])% @& \. Y- t, A, W# c
# K6 J8 F2 d' k" @% J; i8 r9 m# Plot the input data
1 L% R* o+ @+ _4 Pplt.figure()
* T( k' u/ W6 y3 _plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) H1 f& a d$ h+ I! x; l$ kplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')5 l# |8 i: d5 W2 D' J6 `7 J8 d! w
plt.title('Input data')
- R1 \- m1 J4 x; j2 K( ^/ f1 |* b- [$ J* G* P, B/ d. h* K
###############################################
( `, H/ O4 H/ u, L4 u# Train test split and SVM training
$ F( U5 Z- c( C. kfrom sklearn import cross_validation" B: q/ I# m" q
from sklearn.svm import SVC4 T8 x$ u, h& b
: U* Q4 L$ f! u, C# H$ TX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
7 J4 ^: z/ q& V: j4 P: \8 E5 H, q# V+ K( E$ c8 g$ d: \3 i/ E
#params = {'kernel': 'linear'}& U- j3 u I$ p) Z: \4 e
#params = {'kernel': 'poly', 'degree': 3}8 \# d! i' I5 Q9 w2 _/ p- X) N
params = {'kernel': 'rbf'}- ^$ w6 Z0 M9 Q. c3 m6 B% F* X/ O" @
classifier = SVC(**params)
+ [: L/ M6 E6 M/ }6 g+ y/ Q: [classifier.fit(X_train, y_train)
) `7 F4 z3 r( G$ Q0 d1 z9 v! Cutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
4 n5 _& y6 d5 ~. [& w: {) Y" k3 L# e5 i, O. W9 ]6 f
y_test_pred = classifier.predict(X_test)
3 Q [3 y q/ d1 K8 Mutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
# h+ B: {+ ~6 `1 m6 b/ ?
( ]" {4 {) q* x# W' y7 S###############################################
! I6 |3 u' F/ i# Evaluate classifier performance
% x, j# F, F* X _9 Q9 \5 _/ Q( i: }& r- m
from sklearn.metrics import classification_report
6 \3 q% F! Z+ p; o9 j+ I, u+ K+ K
5 u* l; [& y2 {target_names = ['Class-' + str(int(i)) for i in set(y)]
# y" C! H3 M6 I6 E$ pprint "\n" + "#"*305 s) F5 q7 |. r/ V6 E" y! b8 F7 T7 m
print "\nClassifier performance on training dataset\n"1 ^' t- B) h7 s [
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
. Y' e0 I" I. M; Y' Uprint "#"*30 + "\n"8 O. x) t% k' A& T; c
% O4 z" V0 z& U$ e0 ~# B5 N
print "#"*30
1 i# p# D- f, P% N* N( |print "\nClassification report on test dataset\n"
! c( r( B. ]" r6 Oprint classification_report(y_test, y_test_pred, target_names=target_names)7 q' T( q) W, _1 i3 Z! o
print "#"*30 + "\n"
2 B+ v+ j7 r! R$ w$ o( G! }$ h K8 S6 a4 Y
|
|