- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np: k9 S8 k* Z0 _9 b9 K5 P0 q
import matplotlib.pyplot as plt
( [0 l7 `! d" h" z
?5 e* U3 r" [6 I2 {+ R! N: S" Timport utilities ; R+ N4 c3 ~/ X% ~( |# Z- A
7 S: j y. H% U0 d# Load input data6 E8 V# U$ K- x, o
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
( ^! G' ?3 D& U1 C) l% L, D7 TX, y = utilities.load_data(input_file)
2 \# j" @6 r6 ?) Q: b! j
6 V$ x0 I7 G! w5 q4 b###############################################
7 @6 `3 }& r l5 r" } t# Separate the data into classes based on 'y'
6 ]1 l5 o$ \8 P$ p( M/ Xclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
6 I. \( P% d; k1 |class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
* O# Y3 M" y: [" ^9 Y9 f) F
: F! _& z1 F4 e$ y# Plot the input data) q0 i7 Q+ N; r* I7 F4 ^
plt.figure()7 |2 x6 A% X/ Q6 q3 C6 g r
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
0 X# ]- L& J. Z8 splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'). F4 c+ Z) B0 y; S H4 B/ v2 u
plt.title('Input data')
- n+ P/ Y' E; H' m0 C
4 d2 {& ^! P. |6 ^! q+ J0 p; k( O###############################################$ i: h" d' I/ j" ~
# Train test split and SVM training
. S' f# O, B; X. U' mfrom sklearn import cross_validation. Y( q8 I% ?+ ?0 ]( x* V
from sklearn.svm import SVC$ {, T2 w4 g" N! _$ f3 S& b
+ r! g# T/ W6 k! n! mX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5). s" r' i/ n/ D; Q+ }" [
8 i; x9 X/ l& u! r0 e) p- z#params = {'kernel': 'linear'}4 y4 v: M$ S( ^4 e
#params = {'kernel': 'poly', 'degree': 3}8 C% V) o( g5 n: z: L
params = {'kernel': 'rbf'}$ |: N4 l( _/ U: L* F
classifier = SVC(**params)' j: Z& T2 Z& b/ i, f( S; a% z3 G
classifier.fit(X_train, y_train) c" o$ \2 a/ K+ {6 E# J# F4 x
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'): `! {# h4 s% u0 y" r0 M
" b# J" s+ H; ay_test_pred = classifier.predict(X_test)
) o# Z* v' `2 y) _ Nutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
3 X: r- ?4 w6 \5 P$ _- k5 _* f- W7 d7 ^7 r/ P: p
###############################################
0 M% Z" }7 ?0 r7 ~# Evaluate classifier performance
3 G: Q* Q7 Z: x( A6 o: Y# B3 |1 g/ t. h$ b
from sklearn.metrics import classification_report
7 A- W8 m: @ e
a$ T! H, F8 l8 \: Ctarget_names = ['Class-' + str(int(i)) for i in set(y)]8 S6 h9 P7 ]6 j. W1 i
print "\n" + "#"*30( \& x3 k- v5 n4 P$ j
print "\nClassifier performance on training dataset\n"3 @+ ?. Q/ M g1 X
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)5 ~! K8 j$ r }1 y# b! i4 n
print "#"*30 + "\n"$ X3 }( o6 p9 X) A$ M5 Y5 r. Q
' e" Y! @1 ^6 ~$ g) [
print "#"*30: b7 ~& |# _2 T0 J6 U
print "\nClassification report on test dataset\n"
: u8 g/ O* e+ n! ?5 Y5 F0 {) ^2 S: ?print classification_report(y_test, y_test_pred, target_names=target_names)
9 @9 i: W* y+ N1 rprint "#"*30 + "\n"8 z- V2 @4 \, ^; }. R
! W0 ^! J* z; |+ M8 J% [9 c |
|