- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np4 `. e# n/ ^1 {- i: W
import matplotlib.pyplot as plt
0 W2 A: }# o7 K& c
; l, n/ F7 A. [( ]5 D4 g' aimport utilities ! `, `& {4 C3 R+ f
$ |3 D, V6 P" }8 p
# Load input data" l, b- L" C" X( t; n
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'' d, p* h: y$ {: W- B4 H
X, y = utilities.load_data(input_file)
5 b0 F. I9 s8 G; y# b+ J. ~5 S1 n D, o; z. N8 ?; m
###############################################
# I0 A8 ^" h0 f2 n6 q, y# Separate the data into classes based on 'y'
: _2 q, W, o$ O$ {# a" m/ cclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
% J% j" Y/ l5 c2 ?$ oclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
; ]5 L/ f& a& D. N, a( J' X, B% [& h; N1 V! ]3 t, J
# Plot the input data7 C, g" O9 |% W+ q# D: I9 j! U
plt.figure()- @8 h# o. M% q" D9 h: I
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
9 d7 U, c. ~( j2 f" |7 e7 Q ~* Q- [plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
6 `: t2 N7 Z2 a- P) p( j0 ^7 Fplt.title('Input data'). s1 k# \- P$ |3 ^: k/ a7 o
; A7 t; X+ |! Q+ `###############################################
9 m( s3 D- Y& Z9 {1 `4 f ]# Train test split and SVM training
) N: }, x7 _. f4 \6 zfrom sklearn import cross_validation
6 t: y0 p$ T4 d. Y4 ]& N6 J+ rfrom sklearn.svm import SVC
) @2 q8 K2 [& D8 ~( T! v8 ? z# j: n! |& e' U; ^( ^
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)$ W( o: P0 u2 W$ {/ K
& b2 Q$ \ C; E# m4 V8 P#params = {'kernel': 'linear'}) v9 E4 q/ g" x7 b/ A4 P& @ J" Q7 F
#params = {'kernel': 'poly', 'degree': 3}
" h3 e1 A* N6 ]6 R* ?$ vparams = {'kernel': 'rbf'}
% s3 Q1 B- ^ _3 I/ ?: ]4 dclassifier = SVC(**params)
# R6 W9 _' ]* r, Y) a! D, L" b1 vclassifier.fit(X_train, y_train)/ `5 q/ k+ r+ ?- [
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
1 T0 C) |* {: z
4 ?" k" f# V# E0 O- M& O1 H# M; Xy_test_pred = classifier.predict(X_test)
9 Z9 r0 W+ |. }$ ]1 V! S. cutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
* R; L% m2 ~+ \
& q9 J% S: u" U2 c D! {###############################################
6 t- H W! g/ V7 }# Evaluate classifier performance
1 D: ~& ~2 `- G f7 f1 R- Z: N& C! s2 N8 k6 u
from sklearn.metrics import classification_report
" J5 K9 f+ E+ [6 G- i6 w9 @, T# M8 N
target_names = ['Class-' + str(int(i)) for i in set(y)]
' q5 j0 G. |- p5 {7 Cprint "\n" + "#"*30
2 D- b& V2 m) S J5 ]" Oprint "\nClassifier performance on training dataset\n"
# l* U$ n, G1 t8 e. |print classification_report(y_train, classifier.predict(X_train), target_names=target_names)( |$ D6 s2 p) ^3 ?) {% s- {6 S2 A! g
print "#"*30 + "\n". { x3 f9 i0 W+ M
! z6 j$ |6 a+ _) |
print "#"*304 z! J* w' ?/ G' n) s
print "\nClassification report on test dataset\n"3 X: T+ L$ O; L" n% c5 |# r0 {0 d
print classification_report(y_test, y_test_pred, target_names=target_names)
- K' U% i2 U( M1 j# R# ]print "#"*30 + "\n"
* ^' U# \6 h! p K. n! v2 F4 q
0 X3 i" }; C: X) C1 ?/ Z8 O2 _ |
|