- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np% c# j* q3 T9 k3 ^$ m P
import matplotlib.pyplot as plt
9 Z- n" d: T# U
" X b7 J+ ]. p3 M nimport utilities - r3 ?% Y3 T; J1 t
5 A" U! W. D1 P, U# Load input data
$ k2 K" M4 m& m! j) c2 X! {3 n) ]input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'$ [# s i) j) [! I6 d5 i
X, y = utilities.load_data(input_file)1 ~# E1 e/ }8 h" l) p
' u: W! E6 w$ { M! p6 M J: u# V###############################################: L3 f0 X0 e, Y# i; r1 K
# Separate the data into classes based on 'y'8 u% y: ~8 V* L4 r! V) ^* ?: H( T
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 U. x# d7 I' k
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])1 Y5 u u& Y: g
7 ? i' Q' |$ E9 }7 M# Plot the input data6 W5 R9 A2 v3 @9 t
plt.figure()
) y6 V' u; u! D8 P' i# u" Y; Q, Vplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) ]" o' {$ H4 E8 qplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'): H3 t4 b. v3 @$ l
plt.title('Input data')
- `1 `) P6 T; _$ Z( J7 ]
) x$ j% E; v+ X) f, `; J `, A* }###############################################* f9 l% A4 M! W# j. K
# Train test split and SVM training, a w/ F3 ?+ w- X+ Q
from sklearn import cross_validation
7 B% O4 O$ U( G7 N, R# E; Pfrom sklearn.svm import SVC6 R2 x# {6 C+ _6 e+ C
9 ^' K/ b9 z0 q) ZX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5), G; A0 L. @- `+ p
) | M. f1 x) {% r$ Z
#params = {'kernel': 'linear'}
$ ~4 z R6 q+ c, Z. _" U#params = {'kernel': 'poly', 'degree': 3}
4 z7 s" o. Y) c5 qparams = {'kernel': 'rbf'}6 F2 \4 Q/ v$ B) |
classifier = SVC(**params)- l% C8 x3 p0 i+ R( o' f
classifier.fit(X_train, y_train)
5 C* r" h9 h% T& outilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')4 g$ o" _! g0 u' O
8 @, s' q9 a3 y$ C4 @8 T3 vy_test_pred = classifier.predict(X_test)
) w9 u* y) u5 A! \9 x* Dutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
) {5 v+ V; L$ w. O' p, Z1 Z. ~/ }3 Y4 J3 Y) `0 o$ ~ f0 `% A$ z! L
###############################################- g& E3 Z+ S3 a$ |) O( z( z/ E U
# Evaluate classifier performance7 @! N. \' I+ k1 u2 f( D0 {' s7 ]
+ }4 H9 J0 {0 f1 L Y# Q( x t0 q
from sklearn.metrics import classification_report$ \7 H4 ?$ C5 m* v7 Z, I& T
9 ~8 [% ^: I4 Z: ?( a
target_names = ['Class-' + str(int(i)) for i in set(y)]
& Y8 F6 c) r$ b/ r5 C' Mprint "\n" + "#"*30
* m( S6 \% k7 ^# j) r' ^9 hprint "\nClassifier performance on training dataset\n"
+ L, H6 c3 V+ l4 d: `+ ~9 |print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
! d; I9 a! p4 j9 \" z* }1 m# Iprint "#"*30 + "\n"
9 t. @. o4 z7 y
3 h, |( z6 I. f$ Yprint "#"*30
9 z- X) }# f! j# O1 Wprint "\nClassification report on test dataset\n"4 P) W5 i% m" e& t) a
print classification_report(y_test, y_test_pred, target_names=target_names)8 v: }5 K3 X6 i
print "#"*30 + "\n"
. c! N9 X4 |3 V8 B7 D
- _/ B% {/ [' ?. C$ c; A5 @ |
|