- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
# T' k6 @, b- d: T8 M$ aimport matplotlib.pyplot as plt; h/ s& i* S* C
. o! v' q! U3 R- o# ^3 Y! P7 uimport utilities $ k# f. B) G/ W% q& W! v5 z$ W
" Z) C; ?, F) M9 Q/ ~ H
# Load input data& z! y) v- a0 {
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
8 S6 u6 ` I" }8 U& [0 @0 nX, y = utilities.load_data(input_file)2 P7 }9 m" s, h1 G, U
' h! `0 C: j! u+ A
###############################################: m7 `& D2 g& c, [) K) d% Z' E
# Separate the data into classes based on 'y'
8 ` a$ L' b4 K$ W8 L( Hclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])7 C% {% \" q; [1 p, y2 u% x8 g1 W
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])! D2 u0 \3 d+ g! R1 ]6 y
* K6 b8 R: G& h# Plot the input data
1 d5 J0 r0 I" r# hplt.figure()3 ?% V* ^) R* T$ j! R/ z
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
! p' }! F9 n* Q+ F$ I6 E+ {plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'), i6 b* \" H0 t
plt.title('Input data')
1 n, D3 j8 y' [2 |2 h; D: Q
% d1 Y+ B( l4 G( w3 L9 {- n5 X! X###############################################
o- F2 d2 Z( n; N# |# Train test split and SVM training
6 H+ N* i' S( O5 M2 r0 ofrom sklearn import cross_validation5 o' ~1 D9 G! \! S6 ^
from sklearn.svm import SVC2 e( O. z- x0 O) A9 y2 v& b/ A
9 @/ O8 F) P9 ~4 O, a a2 [# u
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5); j7 _9 \0 o- q
. A/ o, n8 n {#params = {'kernel': 'linear'}! B0 w# O3 h H. [
#params = {'kernel': 'poly', 'degree': 3}" q; P8 o6 ?9 r' q; C
params = {'kernel': 'rbf'}6 p0 `, W: M+ J% h5 w
classifier = SVC(**params)0 w4 m1 w. {2 n
classifier.fit(X_train, y_train)% x$ _/ J) O& }% ]6 y' P
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 @" R, M/ ?/ x+ y. P# \9 { n+ y
/ K. q3 V, U& k* H( }6 C0 fy_test_pred = classifier.predict(X_test). w1 P+ m! h, d) Y1 T# x$ e
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
9 }# v9 v5 ^/ z) n5 P
% ~: p: r) U# T5 o5 k9 x###############################################
; k$ {$ o D5 P" z: g# Evaluate classifier performance
8 `. b$ d% s6 G
7 m% R% | k2 U9 rfrom sklearn.metrics import classification_report
8 R& R% |- L7 k) [
8 r. ]$ j' d# R1 V9 ~' Gtarget_names = ['Class-' + str(int(i)) for i in set(y)]/ y. Y; d& [) t6 p& B4 S* g
print "\n" + "#"*308 e1 U8 O+ G! I# ^( R7 y3 ]
print "\nClassifier performance on training dataset\n"0 |' }* r$ A2 t* } |
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
9 P* D2 e' u O% B7 Gprint "#"*30 + "\n"
% u! ~2 j. G* G& C4 u' G4 g& F: u' t3 T! W5 H2 D
print "#"*30! b* S1 V/ Z8 J) S. Y) q) L
print "\nClassification report on test dataset\n"0 h# B! ~- H& l7 }+ }' t8 V+ ^4 h
print classification_report(y_test, y_test_pred, target_names=target_names) C6 w+ S* F1 f8 V2 N7 k, ~5 d; Y3 b2 ]$ j
print "#"*30 + "\n"
; b$ k. V2 n/ j2 s. n& A/ l1 ~7 z& L3 Q
|
|