- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np L& Y: R' Q2 e2 [
import matplotlib.pyplot as plt
1 C4 q$ g% |) d' D! y. b% S9 D( ~; q$ x/ ~ E- p8 g+ `; f* S' y4 J8 E
import utilities 3 ^$ N# z% @* A* X# v
! N, j$ O; C0 m% m% g; ^# Load input data
& v2 X7 i. g( y# r5 _; A- Uinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
1 [; m, N. `/ i6 {- ?, ~X, y = utilities.load_data(input_file)
6 I" M W) A- L7 i& I! ^ H
# h/ Z2 j9 `6 M1 ~9 O! c###############################################8 M# @! V7 d2 V5 X$ W$ F* Z
# Separate the data into classes based on 'y'3 d. d) a7 K* {1 d; y
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])+ h# M+ Y# s4 F! T
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
# T2 Y1 a! r' m% ^; S
. ~# R7 o. K* O) G# Plot the input data6 g" e) B5 }3 ^! [, W+ E
plt.figure()
# q5 M( s% |! N( j3 G; _- [$ [plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
0 n$ y9 f: A' P5 l$ a p; s/ Iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')+ }. c* j) g, `8 X% p+ w7 P
plt.title('Input data')
7 V' k" G4 j9 r* C8 m, T6 W; C
9 @: q |6 d2 P5 Q& Q- E1 \###############################################
( y0 H9 X3 U! s# t: V( c. S# Train test split and SVM training
7 @7 V0 f* x6 o* Y1 S' wfrom sklearn import cross_validation* e8 J/ |0 i- T5 E
from sklearn.svm import SVC- t; p. D& V7 K" [6 v. D
" p# d- \% `- {6 K& x4 p* ~: n
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)% @6 i4 L o r2 h) O/ u, r4 o
, S e7 |% ~5 e. f& l
#params = {'kernel': 'linear'} c9 o' U7 E' ~7 P7 B
#params = {'kernel': 'poly', 'degree': 3}
9 N7 n* N' D+ [* aparams = {'kernel': 'rbf'}/ s- P$ {4 N2 d& Z2 F: ?6 Y, n
classifier = SVC(**params)
# ]7 H) w t/ ~9 ?classifier.fit(X_train, y_train)
U! l' P. D/ a& N% Lutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')' s& f* E0 j9 ?8 Q& N1 L$ |
% r8 r; {6 O4 e8 Ty_test_pred = classifier.predict(X_test), E& y. R) R" u& J( Y% U
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
$ d2 ]' K% _7 o( i q$ [5 B) Q9 {" z
###############################################* C/ \$ w$ E! T2 M- t
# Evaluate classifier performance; I0 w6 i1 T# y6 y6 w
$ l4 o' x" e5 j0 c# |
from sklearn.metrics import classification_report9 a" q+ D, F- {6 u0 o1 f7 S k
7 w6 q/ R( \0 j7 s. a0 ~
target_names = ['Class-' + str(int(i)) for i in set(y)]
- I4 V; z* U* t- j6 S' oprint "\n" + "#"*30& O1 X) L. W Y" C6 p8 W) T3 d5 N
print "\nClassifier performance on training dataset\n"9 [8 u- C% v4 j! l2 q( @
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)6 z* z7 b# w( U% f
print "#"*30 + "\n"$ M9 Y! [- l9 d; c: v. G5 b
t/ F) Y; a, Z# i' B& tprint "#"*30
m% |" u2 A* t2 e7 c" xprint "\nClassification report on test dataset\n"
v4 d, C/ D1 h$ a, ^! Oprint classification_report(y_test, y_test_pred, target_names=target_names) X' G. A8 _2 s- Y
print "#"*30 + "\n"
$ A4 w: n' e9 o6 W: a( H' N: w B9 T, q
|
|