- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
# N; }5 | k* N4 Timport matplotlib.pyplot as plt
6 l/ k; v- c9 R! A. U3 O* \, K/ j& {) F8 l
import utilities
' N$ s# T) v* J W& J6 f
( ~; {6 P/ u4 b# Load input data
! y9 t4 B- D( c6 H' O6 Uinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
' J% o( O1 S2 l% k7 FX, y = utilities.load_data(input_file)
4 V, Y& k( T; ?
$ J' Z M! C8 `) h###############################################
) ]- S. d$ N D5 q7 a# Separate the data into classes based on 'y'7 z0 h4 z5 |1 X
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
: W2 a4 Q S$ Q2 y$ Hclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( o6 j- k7 c% [: G& I+ J0 D1 ?/ O/ w5 M7 ]- Y+ {( ]- A
# Plot the input data
5 m9 `- I2 C. d/ r0 Nplt.figure()8 _3 b& T6 y4 k r
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')' r8 T* r0 n, D" U* g7 X
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
* a- I5 X; Z- h1 c% l+ x! Q/ Aplt.title('Input data')
( j/ g" y* i, \0 W( ^; J
% P* s: j8 T( ^9 z# |###############################################
5 @! _% G0 O! I' q" T' w# Train test split and SVM training. j9 l* H# s+ t+ J& `
from sklearn import cross_validation3 C( ~' u ?; G9 K- \- u
from sklearn.svm import SVC
+ Q' @4 v! W$ J8 Y$ }
) L" k7 Z) |+ G3 gX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
R$ f7 t1 v" l+ c$ r7 N ]/ j- ~5 }4 R- z
#params = {'kernel': 'linear'}: W0 R- |1 F0 {. a8 v, T
#params = {'kernel': 'poly', 'degree': 3}, X/ @, z! a: [3 L1 z: Q/ R9 ?3 q
params = {'kernel': 'rbf'}' M! _7 Y% ]0 W9 f
classifier = SVC(**params)5 }# x8 y+ Y. R( L4 r; C
classifier.fit(X_train, y_train)4 X& D, h3 S7 B) }% F1 d
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')$ l1 k4 D6 d/ ^1 M* g2 M# |
$ W O5 Y2 W4 e$ M1 dy_test_pred = classifier.predict(X_test)
1 c+ G/ b7 C# V: Z- }+ } o4 ?utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
8 x# e5 p. R2 o$ ^3 f1 r( u2 U2 p- T0 q2 e: T9 N
###############################################
& j2 H4 J7 B& _& h+ _0 w- V# Evaluate classifier performance
7 `& n; b; X2 T: n1 f3 U) p( v) H* R' Z" D* p) D
from sklearn.metrics import classification_report
# u3 ^( Z6 C0 ^1 n, v7 @. Z: x2 y" y' a
9 X& }9 L- U. ]& B: X9 p, W% ztarget_names = ['Class-' + str(int(i)) for i in set(y)]) l2 ?/ ~ w! D; p
print "\n" + "#"*30, l; ]' j5 N5 h* O6 s" J
print "\nClassifier performance on training dataset\n"
' i6 ^5 K1 @' _- l0 q8 Uprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
( |9 K- w2 Y: l8 F, v, f/ {9 Fprint "#"*30 + "\n"
4 \0 H' s% y( r( P1 y" u. x% r$ i# A- t. t6 U# o6 n% }
print "#"*30
7 k/ }" R& T* ]# Wprint "\nClassification report on test dataset\n"( X ?5 M+ V. }( \, Y% ]
print classification_report(y_test, y_test_pred, target_names=target_names)
4 E8 h9 @' h0 xprint "#"*30 + "\n" ]$ D4 h; I4 D# j
# h7 N2 | l) U' Z* f% {4 W |
|