Machine Learning Programming Workshop

3.1 Implementation with Sci-Kit Learn

Prepared By: Cheong Shiu Hong (FTFNCE)


Demonstrate with Digit Dataset, then let students play around with faces dataset, feel free to import other algorithms from sklearn to try and compare accuracy.


In [1]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import sklearn as sk
from sklearn import datasets
In [2]:
%matplotlib inline
In [3]:
from sklearn.linear_model import LogisticRegression
from sklearn.neural_network import MLPClassifier


1) Digits Dataset

return to top

Load Digits Dataset from Sklearn

In [4]:
digits = datasets.load_digits()

Explore Dataset

In [5]:
digits.keys()
Out[5]:
dict_keys(['data', 'target', 'target_names', 'images', 'DESCR'])
In [6]:
digits['images'].shape, digits['data'].shape, digits['target'].shape
Out[6]:
((1797, 8, 8), (1797, 64), (1797,))

Visualize Images

In [7]:
fig=plt.figure(figsize=(10, 4))
columns = 5
rows = 2

for i in range(columns*rows):
    img = digits['images'][i]
    fig.add_subplot(rows, columns, i+1)
    plt.imshow(img)
    
plt.show()

Splitting the Dataset

In [8]:
X = digits['data'][:1600]
Y = digits['target'][:1600]
X_val = digits['data'][1600:]
Y_val = digits['target'][1600:]


Logistic Regression

In [9]:
logreg = LogisticRegression(solver='liblinear', max_iter=1000, multi_class='ovr')
In [10]:
logreg.fit(digits['data'], digits['target'])
Out[10]:
LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,
          intercept_scaling=1, max_iter=1000, multi_class='ovr',
          n_jobs=None, penalty='l2', random_state=None, solver='liblinear',
          tol=0.0001, verbose=0, warm_start=False)
In [11]:
logreg.score(X, Y)
Out[11]:
0.99375
In [12]:
logreg.score(X_val, Y_val)
Out[12]:
0.9898477157360406


Multi-Layer Perceptron (Neural Network)

In [13]:
NN = MLPClassifier(max_iter=1000, hidden_layer_sizes=(16,32,16))
In [14]:
NN.fit(X, Y)
Out[14]:
MLPClassifier(activation='relu', alpha=0.0001, batch_size='auto', beta_1=0.9,
       beta_2=0.999, early_stopping=False, epsilon=1e-08,
       hidden_layer_sizes=(16, 32, 16), learning_rate='constant',
       learning_rate_init=0.001, max_iter=1000, momentum=0.9,
       n_iter_no_change=10, nesterovs_momentum=True, power_t=0.5,
       random_state=None, shuffle=True, solver='adam', tol=0.0001,
       validation_fraction=0.1, verbose=False, warm_start=False)
In [15]:
NN.score(X, Y)
Out[15]:
1.0
In [16]:
NN.score(X_val, Y_val)
Out[16]:
0.9187817258883249


2) Faces Dataset

return to top

Load Faces Dataset from Sklearn

In [17]:
faces = datasets.fetch_olivetti_faces()
downloading Olivetti faces from https://ndownloader.figshare.com/files/5976027 to C:\Users\cheon\scikit_learn_data

Explore Dataset

In [18]:
faces.keys()
Out[18]:
dict_keys(['data', 'images', 'target', 'DESCR'])
In [19]:
faces['images'].shape, faces['data'].shape, faces['target'].shape
Out[19]:
((400, 64, 64), (400, 4096), (400,))

Shuffling Dataset

In [20]:
s = np.arange(len(faces['data']))
np.random.shuffle(s)
In [21]:
data = faces['data'][s]
images = faces['images'][s]
target = faces['target'][s]

Visualizing Images

In [22]:
fig=plt.figure(figsize=(10, 8))
columns = 5
rows = 4

for i in range(columns*rows):
    img = images[i]
    fig.add_subplot(rows, columns, i+1)
    plt.imshow(img)
    
plt.show()

Splitting the Dataset

In [23]:
from sklearn.model_selection import train_test_split

X, X_val, Y, Y_val = train_test_split(data, target, test_size=0.15, shuffle=True)


Logistic Regression

In [24]:
logreg = LogisticRegression(solver='liblinear', multi_class='ovr', verbose=1)
In [25]:
logreg.fit(X, Y)
[LibLinear]
Out[25]:
LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,
          intercept_scaling=1, max_iter=100, multi_class='ovr',
          n_jobs=None, penalty='l2', random_state=None, solver='liblinear',
          tol=0.0001, verbose=1, warm_start=False)
In [26]:
logreg.score(X, Y)
Out[26]:
1.0
In [27]:
logreg.score(X_val, Y_val)
Out[27]:
0.8833333333333333


Neural Network

In [28]:
NN = MLPClassifier(max_iter=1000, hidden_layer_sizes=(64,128,32), verbose=1)
In [29]:
NN.fit(X, Y)
Iteration 1, loss = 3.75954071
Iteration 2, loss = 3.72140950
Iteration 3, loss = 3.70619785
Iteration 4, loss = 3.68376290
Iteration 5, loss = 3.68153736
Iteration 6, loss = 3.66720544
Iteration 7, loss = 3.65718644
Iteration 8, loss = 3.64696736
Iteration 9, loss = 3.63108768
Iteration 10, loss = 3.61720944
Iteration 11, loss = 3.61465325
Iteration 12, loss = 3.59060380
Iteration 13, loss = 3.57816035
Iteration 14, loss = 3.55691958
Iteration 15, loss = 3.54625345
Iteration 16, loss = 3.53181864
Iteration 17, loss = 3.51005005
Iteration 18, loss = 3.49752272
Iteration 19, loss = 3.48133042
Iteration 20, loss = 3.46167566
Iteration 21, loss = 3.43496562
Iteration 22, loss = 3.41331253
Iteration 23, loss = 3.38550214
Iteration 24, loss = 3.36396797
Iteration 25, loss = 3.34827182
Iteration 26, loss = 3.32193922
Iteration 27, loss = 3.29446621
Iteration 28, loss = 3.27982908
Iteration 29, loss = 3.25549940
Iteration 30, loss = 3.24871105
Iteration 31, loss = 3.23136084
Iteration 32, loss = 3.19844415
Iteration 33, loss = 3.17389748
Iteration 34, loss = 3.13645985
Iteration 35, loss = 3.11111321
Iteration 36, loss = 3.07590951
Iteration 37, loss = 3.05748153
Iteration 38, loss = 3.00648661
Iteration 39, loss = 2.97882342
Iteration 40, loss = 2.95222317
Iteration 41, loss = 2.92433482
Iteration 42, loss = 2.87766698
Iteration 43, loss = 2.84157559
Iteration 44, loss = 2.80229867
Iteration 45, loss = 2.77733917
Iteration 46, loss = 2.73528812
Iteration 47, loss = 2.68913375
Iteration 48, loss = 2.65344173
Iteration 49, loss = 2.61353237
Iteration 50, loss = 2.57332767
Iteration 51, loss = 2.53151929
Iteration 52, loss = 2.49047507
Iteration 53, loss = 2.44962179
Iteration 54, loss = 2.39516765
Iteration 55, loss = 2.35388779
Iteration 56, loss = 2.31397803
Iteration 57, loss = 2.26433314
Iteration 58, loss = 2.21760090
Iteration 59, loss = 2.16825672
Iteration 60, loss = 2.12607762
Iteration 61, loss = 2.07491906
Iteration 62, loss = 2.04225005
Iteration 63, loss = 1.99339672
Iteration 64, loss = 1.93721357
Iteration 65, loss = 1.89332128
Iteration 66, loss = 1.86803934
Iteration 67, loss = 1.79924067
Iteration 68, loss = 1.75339201
Iteration 69, loss = 1.70378525
Iteration 70, loss = 1.67736540
Iteration 71, loss = 1.64136275
Iteration 72, loss = 1.58711864
Iteration 73, loss = 1.54917226
Iteration 74, loss = 1.55913130
Iteration 75, loss = 1.49538858
Iteration 76, loss = 1.47076767
Iteration 77, loss = 1.42799106
Iteration 78, loss = 1.39227456
Iteration 79, loss = 1.32316739
Iteration 80, loss = 1.28982082
Iteration 81, loss = 1.25309132
Iteration 82, loss = 1.22602660
Iteration 83, loss = 1.20845928
Iteration 84, loss = 1.15448441
Iteration 85, loss = 1.14477938
Iteration 86, loss = 1.13661710
Iteration 87, loss = 1.09302458
Iteration 88, loss = 1.05183419
Iteration 89, loss = 1.02013175
Iteration 90, loss = 0.99427615
Iteration 91, loss = 0.97625919
Iteration 92, loss = 0.97179443
Iteration 93, loss = 0.96213442
Iteration 94, loss = 0.94417529
Iteration 95, loss = 0.89343697
Iteration 96, loss = 0.86868141
Iteration 97, loss = 0.83869770
Iteration 98, loss = 0.84988500
Iteration 99, loss = 0.81855619
Iteration 100, loss = 0.80149408
Iteration 101, loss = 0.78258410
Iteration 102, loss = 0.76940857
Iteration 103, loss = 0.74374014
Iteration 104, loss = 0.72535538
Iteration 105, loss = 0.69463459
Iteration 106, loss = 0.69320310
Iteration 107, loss = 0.65589089
Iteration 108, loss = 0.64180526
Iteration 109, loss = 0.62316153
Iteration 110, loss = 0.59844031
Iteration 111, loss = 0.59853173
Iteration 112, loss = 0.58465789
Iteration 113, loss = 0.57435614
Iteration 114, loss = 0.56047892
Iteration 115, loss = 0.55440207
Iteration 116, loss = 0.53871299
Iteration 117, loss = 0.53495423
Iteration 118, loss = 0.51459883
Iteration 119, loss = 0.50563233
Iteration 120, loss = 0.48999512
Iteration 121, loss = 0.47660723
Iteration 122, loss = 0.47631254
Iteration 123, loss = 0.46928231
Iteration 124, loss = 0.44455376
Iteration 125, loss = 0.44597476
Iteration 126, loss = 0.43617797
Iteration 127, loss = 0.43218209
Iteration 128, loss = 0.43067635
Iteration 129, loss = 0.44426868
Iteration 130, loss = 0.41005727
Iteration 131, loss = 0.41884195
Iteration 132, loss = 0.43447502
Iteration 133, loss = 0.41371142
Iteration 134, loss = 0.37829587
Iteration 135, loss = 0.38682798
Iteration 136, loss = 0.38506825
Iteration 137, loss = 0.36213362
Iteration 138, loss = 0.34255904
Iteration 139, loss = 0.34609381
Iteration 140, loss = 0.33686154
Iteration 141, loss = 0.31842860
Iteration 142, loss = 0.32875350
Iteration 143, loss = 0.31977963
Iteration 144, loss = 0.30814497
Iteration 145, loss = 0.30265359
Iteration 146, loss = 0.29568893
Iteration 147, loss = 0.29081939
Iteration 148, loss = 0.29069644
Iteration 149, loss = 0.27925275
Iteration 150, loss = 0.27504099
Iteration 151, loss = 0.27051837
Iteration 152, loss = 0.26244727
Iteration 153, loss = 0.25657315
Iteration 154, loss = 0.25383056
Iteration 155, loss = 0.24638926
Iteration 156, loss = 0.24002387
Iteration 157, loss = 0.24658428
Iteration 158, loss = 0.23892174
Iteration 159, loss = 0.23154157
Iteration 160, loss = 0.22787429
Iteration 161, loss = 0.24497651
Iteration 162, loss = 0.23156868
Iteration 163, loss = 0.22120913
Iteration 164, loss = 0.22866860
Iteration 165, loss = 0.22689475
Iteration 166, loss = 0.21326342
Iteration 167, loss = 0.20711492
Iteration 168, loss = 0.21083784
Iteration 169, loss = 0.19768966
Iteration 170, loss = 0.19196966
Iteration 171, loss = 0.19025933
Iteration 172, loss = 0.18947794
Iteration 173, loss = 0.18634857
Iteration 174, loss = 0.18300351
Iteration 175, loss = 0.18574889
Iteration 176, loss = 0.17894688
Iteration 177, loss = 0.17840593
Iteration 178, loss = 0.16738274
Iteration 179, loss = 0.16278082
Iteration 180, loss = 0.16525237
Iteration 181, loss = 0.15945735
Iteration 182, loss = 0.15338608
Iteration 183, loss = 0.15013074
Iteration 184, loss = 0.14846023
Iteration 185, loss = 0.14362081
Iteration 186, loss = 0.14464999
Iteration 187, loss = 0.14179348
Iteration 188, loss = 0.14007166
Iteration 189, loss = 0.13683656
Iteration 190, loss = 0.13529510
Iteration 191, loss = 0.13184997
Iteration 192, loss = 0.13319329
Iteration 193, loss = 0.12867006
Iteration 194, loss = 0.12691674
Iteration 195, loss = 0.12388174
Iteration 196, loss = 0.12129520
Iteration 197, loss = 0.12144853
Iteration 198, loss = 0.11742077
Iteration 199, loss = 0.11685327
Iteration 200, loss = 0.11659604
Iteration 201, loss = 0.11212597
Iteration 202, loss = 0.11238415
Iteration 203, loss = 0.10977778
Iteration 204, loss = 0.10625456
Iteration 205, loss = 0.10500364
Iteration 206, loss = 0.10397974
Iteration 207, loss = 0.10138307
Iteration 208, loss = 0.10068983
Iteration 209, loss = 0.10044981
Iteration 210, loss = 0.09820354
Iteration 211, loss = 0.09681749
Iteration 212, loss = 0.09594799
Iteration 213, loss = 0.09514420
Iteration 214, loss = 0.09519489
Iteration 215, loss = 0.09213506
Iteration 216, loss = 0.09046436
Iteration 217, loss = 0.09004049
Iteration 218, loss = 0.08985042
Iteration 219, loss = 0.08777924
Iteration 220, loss = 0.08503973
Iteration 221, loss = 0.08696804
Iteration 222, loss = 0.08305980
Iteration 223, loss = 0.08518691
Iteration 224, loss = 0.08043955
Iteration 225, loss = 0.07977641
Iteration 226, loss = 0.07986072
Iteration 227, loss = 0.08040168
Iteration 228, loss = 0.07708025
Iteration 229, loss = 0.07505198
Iteration 230, loss = 0.07602573
Iteration 231, loss = 0.07276219
Iteration 232, loss = 0.07562113
Iteration 233, loss = 0.07114271
Iteration 234, loss = 0.06978438
Iteration 235, loss = 0.06880738
Iteration 236, loss = 0.06996384
Iteration 237, loss = 0.06570266
Iteration 238, loss = 0.06669985
Iteration 239, loss = 0.06482239
Iteration 240, loss = 0.06402789
Iteration 241, loss = 0.06531135
Iteration 242, loss = 0.06280282
Iteration 243, loss = 0.06236728
Iteration 244, loss = 0.06286855
Iteration 245, loss = 0.06056425
Iteration 246, loss = 0.06014335
Iteration 247, loss = 0.05853573
Iteration 248, loss = 0.05723045
Iteration 249, loss = 0.05739741
Iteration 250, loss = 0.05481132
Iteration 251, loss = 0.05721661
Iteration 252, loss = 0.05524616
Iteration 253, loss = 0.05538381
Iteration 254, loss = 0.05325890
Iteration 255, loss = 0.05295593
Iteration 256, loss = 0.05275334
Iteration 257, loss = 0.04955006
Iteration 258, loss = 0.05139339
Iteration 259, loss = 0.04921451
Iteration 260, loss = 0.04931748
Iteration 261, loss = 0.04837648
Iteration 262, loss = 0.04710878
Iteration 263, loss = 0.04723575
Iteration 264, loss = 0.04614887
Iteration 265, loss = 0.04621279
Iteration 266, loss = 0.04605704
Iteration 267, loss = 0.04540013
Iteration 268, loss = 0.04544381
Iteration 269, loss = 0.04328662
Iteration 270, loss = 0.04382627
Iteration 271, loss = 0.04234309
Iteration 272, loss = 0.04218345
Iteration 273, loss = 0.04129309
Iteration 274, loss = 0.04030642
Iteration 275, loss = 0.04055009
Iteration 276, loss = 0.03951972
Iteration 277, loss = 0.03992980
Iteration 278, loss = 0.03867121
Iteration 279, loss = 0.03785469
Iteration 280, loss = 0.03802675
Iteration 281, loss = 0.03737693
Iteration 282, loss = 0.03770338
Iteration 283, loss = 0.03622505
Iteration 284, loss = 0.03686695
Iteration 285, loss = 0.03556263
Iteration 286, loss = 0.03647389
Iteration 287, loss = 0.03474954
Iteration 288, loss = 0.03502431
Iteration 289, loss = 0.03475862
Iteration 290, loss = 0.03426351
Iteration 291, loss = 0.03363183
Iteration 292, loss = 0.03349951
Iteration 293, loss = 0.03277998
Iteration 294, loss = 0.03235946
Iteration 295, loss = 0.03203785
Iteration 296, loss = 0.03145067
Iteration 297, loss = 0.03155733
Iteration 298, loss = 0.03137658
Iteration 299, loss = 0.03134420
Iteration 300, loss = 0.03019447
Iteration 301, loss = 0.03042876
Iteration 302, loss = 0.02975197
Iteration 303, loss = 0.02924291
Iteration 304, loss = 0.02888459
Iteration 305, loss = 0.02880626
Iteration 306, loss = 0.02834854
Iteration 307, loss = 0.02822452
Iteration 308, loss = 0.02768527
Iteration 309, loss = 0.02764576
Iteration 310, loss = 0.02725293
Iteration 311, loss = 0.02739186
Iteration 312, loss = 0.02692737
Iteration 313, loss = 0.02696154
Iteration 314, loss = 0.02662775
Iteration 315, loss = 0.02618628
Iteration 316, loss = 0.02565593
Iteration 317, loss = 0.02552524
Iteration 318, loss = 0.02495794
Iteration 319, loss = 0.02504031
Iteration 320, loss = 0.02465111
Iteration 321, loss = 0.02433126
Iteration 322, loss = 0.02406670
Iteration 323, loss = 0.02404056
Iteration 324, loss = 0.02363011
Iteration 325, loss = 0.02374876
Iteration 326, loss = 0.02354046
Iteration 327, loss = 0.02302479
Iteration 328, loss = 0.02286175
Iteration 329, loss = 0.02257783
Iteration 330, loss = 0.02284992
Iteration 331, loss = 0.02230184
Iteration 332, loss = 0.02262902
Iteration 333, loss = 0.02168639
Iteration 334, loss = 0.02195591
Iteration 335, loss = 0.02171162
Iteration 336, loss = 0.02165383
Iteration 337, loss = 0.02109661
Iteration 338, loss = 0.02131012
Iteration 339, loss = 0.02080546
Iteration 340, loss = 0.02075935
Iteration 341, loss = 0.02038401
Iteration 342, loss = 0.02058243
Iteration 343, loss = 0.02002892
Iteration 344, loss = 0.02011800
Iteration 345, loss = 0.01960437
Iteration 346, loss = 0.01973390
Iteration 347, loss = 0.01905136
Iteration 348, loss = 0.01914744
Iteration 349, loss = 0.01889186
Iteration 350, loss = 0.01867411
Iteration 351, loss = 0.01844701
Iteration 352, loss = 0.01825469
Iteration 353, loss = 0.01851393
Iteration 354, loss = 0.01800076
Iteration 355, loss = 0.01821595
Iteration 356, loss = 0.01807276
Iteration 357, loss = 0.01756892
Iteration 358, loss = 0.01735994
Iteration 359, loss = 0.01731166
Iteration 360, loss = 0.01713034
Iteration 361, loss = 0.01706996
Iteration 362, loss = 0.01678749
Iteration 363, loss = 0.01676352
Iteration 364, loss = 0.01661132
Iteration 365, loss = 0.01648569
Iteration 366, loss = 0.01638358
Iteration 367, loss = 0.01618822
Iteration 368, loss = 0.01610310
Iteration 369, loss = 0.01581669
Iteration 370, loss = 0.01577435
Iteration 371, loss = 0.01570061
Iteration 372, loss = 0.01550728
Iteration 373, loss = 0.01542710
Iteration 374, loss = 0.01518061
Iteration 375, loss = 0.01526464
Iteration 376, loss = 0.01500148
Iteration 377, loss = 0.01494031
Iteration 378, loss = 0.01472426
Iteration 379, loss = 0.01488313
Iteration 380, loss = 0.01446523
Iteration 381, loss = 0.01444317
Iteration 382, loss = 0.01446955
Iteration 383, loss = 0.01422004
Iteration 384, loss = 0.01456266
Iteration 385, loss = 0.01400591
Iteration 386, loss = 0.01420237
Iteration 387, loss = 0.01378501
Iteration 388, loss = 0.01393369
Iteration 389, loss = 0.01378539
Iteration 390, loss = 0.01358504
Iteration 391, loss = 0.01339481
Iteration 392, loss = 0.01316152
Iteration 393, loss = 0.01331656
Iteration 394, loss = 0.01305214
Iteration 395, loss = 0.01313528
Iteration 396, loss = 0.01292357
Iteration 397, loss = 0.01281346
Iteration 398, loss = 0.01278512
Iteration 399, loss = 0.01266124
Iteration 400, loss = 0.01275489
Iteration 401, loss = 0.01246800
Iteration 402, loss = 0.01238718
Iteration 403, loss = 0.01222806
Iteration 404, loss = 0.01232714
Iteration 405, loss = 0.01211633
Iteration 406, loss = 0.01213327
Iteration 407, loss = 0.01192353
Iteration 408, loss = 0.01179859
Iteration 409, loss = 0.01181734
Iteration 410, loss = 0.01167429
Iteration 411, loss = 0.01161459
Iteration 412, loss = 0.01150635
Iteration 413, loss = 0.01137979
Iteration 414, loss = 0.01136293
Iteration 415, loss = 0.01124148
Iteration 416, loss = 0.01116909
Iteration 417, loss = 0.01106958
Iteration 418, loss = 0.01112885
Iteration 419, loss = 0.01102717
Iteration 420, loss = 0.01100092
Iteration 421, loss = 0.01088462
Iteration 422, loss = 0.01071617
Iteration 423, loss = 0.01067515
Iteration 424, loss = 0.01061826
Iteration 425, loss = 0.01058898
Iteration 426, loss = 0.01043970
Iteration 427, loss = 0.01046051
Iteration 428, loss = 0.01036475
Iteration 429, loss = 0.01025783
Iteration 430, loss = 0.01016333
Iteration 431, loss = 0.01011236
Iteration 432, loss = 0.01002891
Iteration 433, loss = 0.00997298
Iteration 434, loss = 0.00990440
Iteration 435, loss = 0.00987405
Iteration 436, loss = 0.00977368
Iteration 437, loss = 0.00972299
Iteration 438, loss = 0.00965307
Iteration 439, loss = 0.00963013
Iteration 440, loss = 0.00963374
Iteration 441, loss = 0.00947792
Iteration 442, loss = 0.00944945
Iteration 443, loss = 0.00936516
Iteration 444, loss = 0.00936413
Iteration 445, loss = 0.00933189
Iteration 446, loss = 0.00925145
Iteration 447, loss = 0.00915292
Iteration 448, loss = 0.00909979
Iteration 449, loss = 0.00908135
Iteration 450, loss = 0.00893894
Iteration 451, loss = 0.00897952
Iteration 452, loss = 0.00894545
Iteration 453, loss = 0.00875862
Iteration 454, loss = 0.00879139
Iteration 455, loss = 0.00872836
Iteration 456, loss = 0.00870548
Iteration 457, loss = 0.00859446
Iteration 458, loss = 0.00853730
Iteration 459, loss = 0.00852085
Iteration 460, loss = 0.00844686
Iteration 461, loss = 0.00843206
Iteration 462, loss = 0.00833343
Iteration 463, loss = 0.00836799
Iteration 464, loss = 0.00823136
Iteration 465, loss = 0.00823016
Iteration 466, loss = 0.00817902
Iteration 467, loss = 0.00808829
Iteration 468, loss = 0.00804319
Iteration 469, loss = 0.00799800
Iteration 470, loss = 0.00800092
Iteration 471, loss = 0.00787082
Iteration 472, loss = 0.00781751
Iteration 473, loss = 0.00785724
Iteration 474, loss = 0.00775055
Iteration 475, loss = 0.00772799
Iteration 476, loss = 0.00764483
Iteration 477, loss = 0.00769345
Iteration 478, loss = 0.00761416
Iteration 479, loss = 0.00753918
Iteration 480, loss = 0.00749739
Iteration 481, loss = 0.00748003
Iteration 482, loss = 0.00742645
Training loss did not improve more than tol=0.000100 for 10 consecutive epochs. Stopping.
Out[29]:
MLPClassifier(activation='relu', alpha=0.0001, batch_size='auto', beta_1=0.9,
       beta_2=0.999, early_stopping=False, epsilon=1e-08,
       hidden_layer_sizes=(64, 128, 32), learning_rate='constant',
       learning_rate_init=0.001, max_iter=1000, momentum=0.9,
       n_iter_no_change=10, nesterovs_momentum=True, power_t=0.5,
       random_state=None, shuffle=True, solver='adam', tol=0.0001,
       validation_fraction=0.1, verbose=1, warm_start=False)
In [30]:
NN.score(X, Y)
Out[30]:
1.0
In [31]:
NN.score(X_val, Y_val)
Out[31]:
0.75