- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 }6 X+ u8 O* ]6 n
import matplotlib.pyplot as plt
) k3 p. |1 s$ b/ l1 k. K) {" X" M$ f& b; M- t' F* X. G1 l
import utilities 9 o/ u, D% M# p/ a" W8 f
( M9 d5 e4 W: k) w* g
# Load input data- z3 \' _# T# X* g
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'" L, K4 Q9 [& |5 v; r5 o
X, y = utilities.load_data(input_file)
; w8 Z/ M- C0 a! e
! n9 P, k- H2 W3 @* R8 I################################################ c( L+ V# `) x2 J( Z* G& Y
# Separate the data into classes based on 'y'
9 X3 M" K8 b( `# }$ Zclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])8 Y- [: w3 b1 N' u* l- {3 E1 J
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])/ O/ z+ w f# `" m8 e, ?/ b3 M. I
9 |3 A. v- z, A( |: i: R
# Plot the input data$ ]# { M: X5 h, }' J
plt.figure()
: O, H' p v8 nplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) S0 r4 l6 F2 P5 w9 uplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s') C3 a3 C$ e. d' W" U6 O V
plt.title('Input data')
! @( w0 b. l* p6 t" z8 L `
; w* j* o4 z8 h( g###############################################% g! v% b; N! j1 o8 W5 K
# Train test split and SVM training
Y, {8 W; A) W" Zfrom sklearn import cross_validation* k2 K* i3 U4 o8 n0 e+ ^; n+ O7 o- V1 ]: l
from sklearn.svm import SVC
( |& _; T2 b- w
1 [8 U, z. h h. rX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
6 j4 c; h6 l5 v, f4 E
% s1 M7 T2 c7 X1 |* G2 F& s#params = {'kernel': 'linear'}
( e* L+ |! p) q7 R# Q% g: o' L#params = {'kernel': 'poly', 'degree': 3}4 ]6 _; P2 C7 k
params = {'kernel': 'rbf'}
7 _* E J4 D) f2 h( T* jclassifier = SVC(**params)7 n- R- a \, h# G' v1 Y* V
classifier.fit(X_train, y_train) i/ {6 X$ |* E8 O1 R% [
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')+ I& g3 c( b3 a; [* S
. |& M) c+ Q: Y4 i& g+ u- ~3 z
y_test_pred = classifier.predict(X_test)
2 A- w l0 y$ ^1 [# o! W# eutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')2 G; s) _: k+ |- o
/ W3 I$ j& j4 E1 ^/ i###############################################5 p7 P& d( L9 h1 {5 T: y9 r$ u3 P: g
# Evaluate classifier performance
' l9 ^; F+ v8 N7 j, N
/ C. Y5 W& j- H$ I6 G5 q- Q8 h! gfrom sklearn.metrics import classification_report, M4 K" e0 Q) c6 d/ Q; _; x8 Q) t
0 g1 X7 b6 S; x2 j" s
target_names = ['Class-' + str(int(i)) for i in set(y)]9 |( {1 ]. b8 v/ Z* c+ Y3 z# v, S
print "\n" + "#"*30# j1 @3 N Y) q" e3 k X, l5 J+ m
print "\nClassifier performance on training dataset\n"6 B8 q; a4 q( W) w
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
+ a7 }9 n, K+ O8 S: pprint "#"*30 + "\n"
6 R& ^' f) Q4 Q! j. \
- B! G0 f: K2 N, A0 [3 n# C1 v, S gprint "#"*30
" _+ h) U0 l- v% l& ~+ d: ` kprint "\nClassification report on test dataset\n"9 B4 e+ D- v* s7 D# o
print classification_report(y_test, y_test_pred, target_names=target_names); l$ e$ w, Q% _' {$ x7 j
print "#"*30 + "\n"( s2 G3 X1 B- }; w, W9 g
5 R! g' T; j1 r; [& j* i; n* L& A9 f
|
|