- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 [: u0 g* K& U9 E' e* V( ]" x
import matplotlib.pyplot as plt$ F6 W* s* C6 o
' p3 I) [- U2 i# {0 D; h* t) ]
import utilities
8 i+ o' z7 v4 ? @3 C0 K& z* p# P) _/ l. L! X' v7 u1 [! l
# Load input data- E& \; f4 x0 ?, t, Z2 a
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
. O. P e9 y" g2 O( t( d$ O1 Y& JX, y = utilities.load_data(input_file)
% }! d9 D5 y* o. o8 p
+ Q, p: Y6 I$ c) D+ t" b###############################################" |6 G3 h2 u" ~1 e& s
# Separate the data into classes based on 'y'
, G/ w2 Q+ j$ s) @class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]), i1 X! M# o) e! E) \
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
' F5 I& R8 I4 X% [. I% d* y5 h9 d
# Plot the input data
$ K2 S) |) W' Q! k0 N3 fplt.figure()
6 o( R1 X y3 O9 b# T; Fplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')1 n1 @( b( X# W% h2 C
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
# G1 o+ D' M/ I6 k1 t4 z, cplt.title('Input data')5 L' s& ]; ?7 M ~+ J5 o- c
6 s/ ^( c, E, i r
###############################################
& }) r* V! F4 X1 B# Train test split and SVM training8 R" Z8 T$ ?. `7 s2 r- c+ r: Z
from sklearn import cross_validation
8 W5 @' l2 C$ {% v" B5 E# O* Qfrom sklearn.svm import SVC' N3 h$ M& u( `. n) B1 i
3 u5 [( l+ c3 w8 H$ CX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 h4 a" t, o" |" l" z# ^. f
; c4 c# M J$ I2 Y
#params = {'kernel': 'linear'}
- I+ y* H, l. v; a3 `% I#params = {'kernel': 'poly', 'degree': 3} F# G8 m |( U# J' k1 h
params = {'kernel': 'rbf'}5 @* a# g+ A: ]9 r; H- b1 G
classifier = SVC(**params)
! y1 X+ u( B: u; I4 ~" }8 T" Vclassifier.fit(X_train, y_train)( s1 a' |( ` Z
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset') k8 {+ s8 s9 S9 V' ^5 } l: G3 D
3 u: S% d+ _8 ?% Y. r+ |0 j
y_test_pred = classifier.predict(X_test)8 q5 |+ ^1 |! J4 Z1 F Y" a% O
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
* D4 l3 C1 H* L/ C0 ?
0 V A) B. B3 l0 I w###############################################
- m# Z* I7 I- z; X. a: g W# Evaluate classifier performance
) X: T* X7 `7 w0 h! K
- `7 E. z! F9 z6 ] [1 d% sfrom sklearn.metrics import classification_report X, L( T$ Z/ d+ E
2 X" `. t8 h" ~+ O& N/ {" c. t# b4 f* Wtarget_names = ['Class-' + str(int(i)) for i in set(y)]
/ e6 y6 D. _* j" {print "\n" + "#"*30* f& Y5 i' j/ H& Z
print "\nClassifier performance on training dataset\n"
! t4 A; _' T; ?( U" u( J5 xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names); l0 B- l+ r! M6 \; J7 s8 ?
print "#"*30 + "\n"7 \9 S+ \. |0 c+ \8 f$ Q' j
4 J) l* B1 \! O. t ?+ Cprint "#"*307 f5 L5 \/ R; D- j
print "\nClassification report on test dataset\n"( ~! D3 W( S9 E" D, q+ r! e
print classification_report(y_test, y_test_pred, target_names=target_names)
7 |: o. k9 M4 k8 ]3 pprint "#"*30 + "\n"# l! f* ~6 F" `1 F* h; `
5 I7 s# I' z5 r& x* c |
|