- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np$ s. m0 E1 N3 t/ j- ^/ S
import matplotlib.pyplot as plt5 _6 \) }1 {) ~* C+ z
g6 N' _& w% ^( `8 }$ D
import utilities 5 n1 C7 V. H- N6 D% l0 S! B
; N, F/ {: E8 V: O& P- U) i6 B# Load input data+ ~ ^1 ^% k L r; f6 r
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
7 e+ H' z( t {9 UX, y = utilities.load_data(input_file)5 X2 g+ I! I5 u8 x
6 m' Z3 t2 ]% N& m9 u1 Q+ q###############################################
R e! ]8 H- x- N, w% U, g E# Separate the data into classes based on 'y'
' k' O, d2 r. n E# ]5 lclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])8 [0 V' e+ [+ V% {. I* n" x
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
2 Y- P! c' J/ f, {, c7 v! e7 P: B& _
# Plot the input data
3 U1 S& M8 N! U: p6 l0 b$ Dplt.figure()4 `/ c1 v# T/ C) y" K5 N, v
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
8 N3 x1 ~# o K; k* [plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
" O3 n1 c# I& u+ Wplt.title('Input data')
% e. J7 U% C& l$ v" }7 E1 x8 M
- K/ ^1 x' l5 _6 `- [ K###############################################) w& m& K5 Q. n( S1 F/ m: c0 e
# Train test split and SVM training
& M. S- J( @# b5 kfrom sklearn import cross_validation- n" X" M/ K- a( T$ n
from sklearn.svm import SVC
v3 j6 |1 c* g8 u( }2 ]/ N# ?5 j1 n" ~2 q
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
) Q5 G) N, ]! u" \- T% B+ @9 w
7 y5 k R2 y: ^- M; W#params = {'kernel': 'linear'}/ V4 O2 F4 c1 G" b
#params = {'kernel': 'poly', 'degree': 3}
0 W* J% a6 Y; F3 t4 y) wparams = {'kernel': 'rbf'}
L# |8 n C6 i$ S8 z) Z8 l7 tclassifier = SVC(**params)
N! U' a5 _- oclassifier.fit(X_train, y_train)
* V+ I# D6 _! {3 [utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'). q( R, K2 M9 U8 |
4 G5 x. I2 o2 H8 [4 }) H' wy_test_pred = classifier.predict(X_test): U. ` x9 m. ?2 v! g0 a
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
: w8 O+ Q0 `' C' U
- R7 S$ j8 g* E! {( ^###############################################% b' ]7 }' o* {1 C4 J
# Evaluate classifier performance5 b2 K$ r J) G
+ G3 G {5 Q8 L2 T+ ^# z3 jfrom sklearn.metrics import classification_report
9 U- g# L) c5 e$ v2 S" d' _/ E$ m V7 j
target_names = ['Class-' + str(int(i)) for i in set(y)]
f! u6 k( a8 Eprint "\n" + "#"*30: G! d5 [# c( V; L
print "\nClassifier performance on training dataset\n"
" Q t% g9 {7 K+ Dprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
0 ^7 v. r9 z: m7 f) ^ g- jprint "#"*30 + "\n" c& K' T5 G7 `- B
! _! T3 ^3 O1 i6 b# \& Zprint "#"*30+ A5 }$ X) I; k" x+ `3 s B" ?9 S
print "\nClassification report on test dataset\n"& b, R8 e: @4 m6 {' S7 S
print classification_report(y_test, y_test_pred, target_names=target_names)' m& D/ W! W" q& Q; S
print "#"*30 + "\n"1 Q+ I( S! X$ a) c4 k
, ~, _, H5 j4 z( H6 z# | |
|