- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 C l$ {7 q ~3 @1 L3 i
import matplotlib.pyplot as plt
* x w7 K- |# l+ [8 X: N1 l% ~6 O0 [: T, Q) ]2 B
import utilities 6 y/ X4 }2 m4 [( `
" j# ~/ J1 i' q: Y( g; U# Load input data g5 B% g8 r' Z2 k. [
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'* \+ K: }0 h9 i8 @; s2 c/ T
X, y = utilities.load_data(input_file)
3 X7 L$ Z0 Z( s( E: {7 x Z( s3 j" d* [: k8 Y
###############################################/ M2 N4 A. X$ m
# Separate the data into classes based on 'y'- f( z) @# B- E+ j/ I$ T2 X6 r
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])6 v; I% W7 H K* {3 i1 t: E% e
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( L* ~* g7 O& v( t5 g/ h. E
( W, K3 g: X" L d# Plot the input data! o: r* j+ s$ g/ e; h
plt.figure()
4 A9 Y- j! M4 Y# _. ?% B5 pplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
! I& F4 f( H* ]plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s') B+ _# `( [5 R/ ?# u5 }3 R8 B" l6 e# O
plt.title('Input data')
]2 M( u: t; a B, w; E* e$ a) [3 f0 o- n0 ^- i; a' Y
###############################################
) I3 r" y" Z: |3 X/ i& u# Train test split and SVM training' V7 w! Z( Y& C; f
from sklearn import cross_validation, z y8 y c. S
from sklearn.svm import SVC
0 W' F1 c2 J1 @ U9 r& \0 l" M+ x& b# U- g+ \) d
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
3 w# H0 E% s0 w5 ]
6 L( _. u" E# V* T3 W* u#params = {'kernel': 'linear'}
, @+ T4 s/ d. t) N* ~#params = {'kernel': 'poly', 'degree': 3}% [8 ?5 M0 m9 v
params = {'kernel': 'rbf'}3 `$ C- ?1 _) A, I
classifier = SVC(**params): g1 L2 k: J! K: s- s2 N
classifier.fit(X_train, y_train)9 F3 A7 b3 g5 _% L" @# W
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
; a2 ? U7 N- ?+ F, w3 [' y# W
: I: I8 b$ F7 {% c- _y_test_pred = classifier.predict(X_test)% b3 s) ?% T2 z
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% ]4 W* S+ j; I1 ?- X( Z8 O' I" }0 s2 U$ P* M
###############################################
3 D* [9 C( y* w& b: G! m# Evaluate classifier performance
. c& P& u% X& b# @
, @# o" g* [! X" S+ lfrom sklearn.metrics import classification_report
9 d, j, F3 ~. ~4 Q0 w& M) w: o8 v' V. M8 e$ ^
target_names = ['Class-' + str(int(i)) for i in set(y)]
8 N. J! q& a, I# _5 L7 w+ t4 i1 Pprint "\n" + "#"*30
2 H( n! ?' \; m' vprint "\nClassifier performance on training dataset\n"! A) ~3 d; U) f$ {* S
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)- W' ~! F$ _% P6 O+ C
print "#"*30 + "\n"
) }5 w6 d& T) q
+ F1 }# R- d1 o F9 Z/ p9 d) d: mprint "#"*30
/ }, z$ c- d L" V9 _$ ^# C7 zprint "\nClassification report on test dataset\n"6 [- p8 [8 ` M0 d0 e8 R$ J
print classification_report(y_test, y_test_pred, target_names=target_names)5 C3 I) f) {7 n6 x5 K' f
print "#"*30 + "\n"
* C1 y' P7 k, l$ R A2 \4 q. H
0 G% h! E, K2 o, N |
|