XOR opertaion by NN


In [1]:
import numpy as np
import tensorflow as tf

xy = np.loadtxt('../data/xor_data.txt', unpack=True)

# Need to change data structure. THESE LINES ARE DIFFERNT FROM Video BUT IT MAKES THIS CODE WORKS!
x_data = np.transpose( xy[0:-1] )
y_data = np.reshape( xy[-1], (4,1) )

X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)

W1 = tf.Variable(tf.random_uniform( [2,2], -1.0, 1.0))
W2 = tf.Variable(tf.random_uniform( [2,1], -1.0, 1.0))

b1 = tf.Variable(tf.zeros([2]), name="Bias1")
b2 = tf.Variable(tf.zeros([1]), name="Bias2")

# Hypotheses 
L2 =  tf.sigmoid(tf.matmul(X,W1)+b1)
hypothesis = tf.sigmoid( tf.matmul(L2,W2) + b2)

# Cost function 
cost = -tf.reduce_mean( Y*tf.log(hypothesis)+(1-Y)* tf.log(1.-hypothesis) )

# Minimize cost.
a = tf.Variable(0.1)
optimizer = tf.train.GradientDescentOptimizer(a)
train = optimizer.minimize(cost)

# Initializa all variables.
init = tf.global_variables_initializer()


# Launch the graph
with tf.Session() as sess:
    sess.run(init)
    
    for step in range(8001):
        sess.run(train, feed_dict={X:x_data, Y:y_data})
        
        if step % 1000 == 0:
            print(
                step, 
                sess.run(cost, feed_dict={X:x_data, Y:y_data}), 
                sess.run(W1),
                sess.run(W2)
            )
    
    # Test model
    correct_prediction = tf.equal( tf.floor(hypothesis+0.5), Y)
    accuracy = tf.reduce_mean(tf.cast( correct_prediction, "float" ) )
    
    # Check accuracy
    print( sess.run( [hypothesis, tf.floor(hypothesis+0.5), correct_prediction, accuracy], 
                   feed_dict={X:x_data, Y:y_data}) )
    print( "Accuracy:", accuracy.eval({X:x_data, Y:y_data}) )


0 0.775069 [[ 0.44247141  0.39098403]
 [ 0.46621644 -0.90690005]] [[-0.81665426]
 [-0.75026894]]
1000 0.693558 [[ 0.23873281  0.23232996]
 [ 0.36647406 -0.79091221]] [[-0.50986218]
 [-0.3859019 ]]
2000 0.69328 [[ 0.15748738  0.19119805]
 [ 0.34501934 -0.72418791]] [[-0.51492286]
 [-0.3273876 ]]
3000 0.693182 [[ 0.1142639   0.18492918]
 [ 0.33559394 -0.68340254]] [[-0.51999915]
 [-0.30035368]]
4000 0.693122 [[ 0.09158097  0.20464942]
 [ 0.3303014  -0.6536147 ]] [[-0.52253979]
 [-0.29085284]]
5000 0.693046 [[ 0.07976373  0.25198007]
 [ 0.32712704 -0.6303038 ]] [[-0.52165639]
 [-0.29719684]]
6000 0.692888 [[ 0.07216749  0.336905  ]
 [ 0.32650626 -0.61573386]] [[-0.51575494]
 [-0.3259992 ]]
7000 0.692444 [[ 0.06209626  0.48499179]
 [ 0.33071601 -0.62609982]] [[-0.50058013]
 [-0.3962374 ]]
8000 0.690672 [[ 0.03798737  0.76697594]
 [ 0.34549046 -0.72662771]] [[-0.46474057]
 [-0.55941784]]
[array([[ 0.50302058],
       [ 0.51744181],
       [ 0.48202512],
       [ 0.49077356]], dtype=float32), array([[ 1.],
       [ 1.],
       [ 0.],
       [ 0.]], dtype=float32), array([[False],
       [ True],
       [False],
       [ True]], dtype=bool), 0.5]
Accuracy: 0.5

Wide NN


In [2]:
xy = np.loadtxt('../data/xor_data.txt', unpack=True)

# Need to change data structure. THESE LINES ARE DIFFERNT FROM Video BUT IT MAKES THIS CODE WORKS!
x_data = np.transpose( xy[0:-1] )
y_data = np.reshape( xy[-1], (4,1) )

X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)

W1 = tf.Variable(tf.random_uniform( [2,10], -1.0, 1.0))
W2 = tf.Variable(tf.random_uniform( [10,1], -1.0, 1.0))

b1 = tf.Variable(tf.zeros([10]), name="Bias1")
b2 = tf.Variable(tf.zeros([1]), name="Bias2")

# Hypotheses 
L2 =  tf.sigmoid(tf.matmul(X,W1)+b1)
hypothesis = tf.sigmoid( tf.matmul(L2,W2) + b2)

# Cost function 
cost = -tf.reduce_mean( Y*tf.log(hypothesis)+(1-Y)* tf.log(1.-hypothesis) )

# Minimize cost.
a = tf.Variable(0.1)
optimizer = tf.train.GradientDescentOptimizer(a)
train = optimizer.minimize(cost)

# Initializa all variables.
init = tf.global_variables_initializer()


# Launch the graph
with tf.Session() as sess:
    sess.run(init)
    
    for step in range(8001):
        sess.run(train, feed_dict={X:x_data, Y:y_data})
        
        if step % 1000 == 0:
            print(
                step, 
                sess.run(cost, feed_dict={X:x_data, Y:y_data}), 
                sess.run(W1),
                sess.run(W2)
            )
    
    # Test model
    correct_prediction = tf.equal( tf.floor(hypothesis+0.5), Y)
    accuracy = tf.reduce_mean(tf.cast( correct_prediction, "float" ) )
    
    # Check accuracy
    print( sess.run( [hypothesis, tf.floor(hypothesis+0.5), correct_prediction, accuracy], 
                   feed_dict={X:x_data, Y:y_data}) )
    print( "Accuracy:", accuracy.eval({X:x_data, Y:y_data}) )


0 0.798678 [[ 0.02103399  0.19675161  0.5541057  -0.56165111  0.56709307 -0.34424877
  -0.16402172  0.09791764  0.1427699  -0.66125613]
 [-0.66796654  0.71240723 -0.79094571 -0.86746484  0.23207888 -0.71637052
   0.52411413  0.50042135 -0.94200617  0.62007707]] [[ 0.95815873]
 [-0.22290814]
 [ 0.28464881]
 [ 0.54175043]
 [ 0.80042553]
 [ 0.34139615]
 [-0.71788913]
 [-0.15658754]
 [-0.41779959]
 [ 0.81233573]]
1000 0.674114 [[ 0.23058712  0.07503888  0.53520584 -0.39591685  0.95791775 -0.28949049
  -0.26402894  0.04645005 -0.14575563 -1.22078001]
 [-0.72072309  0.65677917 -0.85449433 -0.73510706  0.8175838  -0.67511785
   0.6127491   0.47028908 -0.96545297  0.79778731]] [[ 0.80496705]
 [-0.29705819]
 [ 0.18894559]
 [ 0.07139804]
 [ 0.93787915]
 [ 0.00439789]
 [-0.89556873]
 [-0.27704036]
 [-0.6317597 ]
 [ 1.05922735]]
2000 0.444744 [[ 0.63023311 -0.05193163  0.57011539 -0.39619333  2.34365916 -0.30975381
  -0.79345232 -0.01040776 -0.77836603 -3.08707786]
 [-1.35575676  0.83707166 -1.12573266 -0.72972399  2.16088963 -0.66915774
   1.49985325  0.64205426 -1.09338105  2.37807918]] [[ 1.20824671]
 [-0.78070277]
 [ 0.54673725]
 [-0.02933369]
 [ 2.24504542]
 [-0.058596  ]
 [-1.76924944]
 [-0.69885093]
 [-0.81919789]
 [ 3.02683663]]
3000 0.123128 [[ 1.3183403  -0.16039018  0.71800286 -0.39610964  3.44169927 -0.31379375
  -2.02206373 -0.03584113 -1.24078321 -4.8269639 ]
 [-2.39472508  1.27319074 -1.63876629 -0.73464441  3.26790428 -0.66862875
   3.11318493  1.00874352 -1.33984303  3.9434588 ]] [[ 2.23195291]
 [-1.36395729]
 [ 1.1314671 ]
 [ 0.11876618]
 [ 3.66509509]
 [ 0.07996891]
 [-3.56468892]
 [-1.18854141]
 [-1.01113951]
 [ 5.85423279]]
4000 0.0526786 [[  1.68054318e+00  -1.71366557e-01   8.04726243e-01  -4.04419214e-01
    3.81446195e+00  -3.22349697e-01  -2.59864974e+00   2.49014073e-03
   -1.37761593e+00  -5.51529932e+00]
 [ -2.91105413e+00   1.46791112e+00  -1.90026057e+00  -7.41572499e-01
    3.73271775e+00  -6.76115513e-01   3.82756948e+00   1.15304899e+00
   -1.48331809e+00   4.53346872e+00]] [[ 2.77428627]
 [-1.58503699]
 [ 1.3770802 ]
 [ 0.21091343]
 [ 4.25415468]
 [ 0.16072898]
 [-4.49925423]
 [-1.3760848 ]
 [-1.08589625]
 [ 7.36511087]]
5000 0.0314661 [[ 1.88953292 -0.17138986  0.86016405 -0.41612732  3.99735355 -0.33335376
  -2.8988626   0.03657828 -1.44194257 -5.87418509]
 [-3.19682384  1.57087457 -2.04890609 -0.74536031  3.97594452 -0.68103719
   4.19246292  1.22163498 -1.56544173  4.83361912]] [[ 3.08997798]
 [-1.70144176]
 [ 1.50979912]
 [ 0.26629934]
 [ 4.56199837]
 [ 0.20839572]
 [-5.02603102]
 [-1.47653663]
 [-1.1246599 ]
 [ 8.25666237]]
6000 0.0219704 [[ 2.0317955  -0.17034146  0.90115798 -0.42764357  4.11244297 -0.34395161
  -3.09131813  0.06423376 -1.48173332 -6.10644388]
 [-3.3870039   1.63825917 -2.15019774 -0.7476241   4.13295031 -0.68442529
   4.42459297  1.2626127  -1.6207937   5.0258007 ]] [[ 3.30780697]
 [-1.77871704]
 [ 1.59888577]
 [ 0.30537474]
 [ 4.7611804 ]
 [ 0.24174933]
 [-5.38122654]
 [-1.54418278]
 [-1.14998353]
 [ 8.87486267]]
7000 0.0167176 [[ 2.13827395 -0.16939855  0.93387616 -0.43835813  4.19457865 -0.35371229
  -3.22977877  0.08730399 -1.50988472 -6.27492046]
 [-3.5273056   1.68762672 -2.22641635 -0.74906033  4.24651432 -0.6869095
   4.59087896  1.29023862 -1.66193569  5.16442537]] [[ 3.47291923]
 [-1.83618295]
 [ 1.66559207]
 [ 0.33555505]
 [ 4.90566778]
 [ 0.26738712]
 [-5.64606857]
 [-1.5950942 ]
 [-1.16861248]
 [ 9.34475899]]
8000 0.0134217 [[ 2.2227819  -0.16870861  0.96121216 -0.44823739  4.25767565 -0.36265504
  -3.33657885  0.10712343 -1.53140485 -6.40573263]
 [-3.6375041   1.72630429 -2.28730106 -0.74999362  4.33447456 -0.68881208
   4.71876383  1.31024504 -1.69442785  5.27170849]] [[ 3.60542035]
 [-1.88180625]
 [ 1.71882296]
 [ 0.36017933]
 [ 5.01789904]
 [ 0.28823808]
 [-5.85599089]
 [-1.63593173]
 [-1.18329799]
 [ 9.72258091]]
[array([[ 0.00762702],
       [ 0.98428243],
       [ 0.98944163],
       [ 0.01938342]], dtype=float32), array([[ 0.],
       [ 1.],
       [ 1.],
       [ 0.]], dtype=float32), array([[ True],
       [ True],
       [ True],
       [ True]], dtype=bool), 1.0]
Accuracy: 1.0

Deep NN


In [3]:
xy = np.loadtxt('../data/xor_data.txt', unpack=True)

x_data = np.transpose( xy[0:-1] )
y_data = np.reshape( xy[-1], (4,1) )

X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)

# Deep network configuration.: Use more layers. 
W1 = tf.Variable(tf.random_uniform( [2,5], -1.0, 1.0))
W2 = tf.Variable(tf.random_uniform( [5,4], -1.0, 1.0))
W3 = tf.Variable(tf.random_uniform( [4,1], -1.0, 1.0))


b1 = tf.Variable(tf.zeros([5]), name="Bias1")
b2 = tf.Variable(tf.zeros([4]), name="Bias2")
b3 = tf.Variable(tf.zeros([1]), name="Bias3")


# Hypotheses 
L2 =  tf.sigmoid(tf.matmul(X,W1)+b1)
L3 =  tf.sigmoid(tf.matmul(L2,W2)+b2)
hypothesis = tf.sigmoid( tf.matmul(L3,W3) + b3)

# Cost function 
cost = -tf.reduce_mean( Y*tf.log(hypothesis)+(1-Y)* tf.log(1.-hypothesis) )

# Minimize cost.
a = tf.Variable(0.1)
optimizer = tf.train.GradientDescentOptimizer(a)
train = optimizer.minimize(cost)

# Initializa all variables.
init = tf.global_variables_initializer()


# Launch the graph
with tf.Session() as sess:
    sess.run(init)
    
    for step in range(20001):
        sess.run(train, feed_dict={X:x_data, Y:y_data})
        
        if step % 1000 == 0:
            print(
                step, 
                sess.run(cost, feed_dict={X:x_data, Y:y_data}), 
                sess.run(W1),
                sess.run(W2)
            )
    
    # Test model
    correct_prediction = tf.equal( tf.floor(hypothesis+0.5), Y)
    accuracy = tf.reduce_mean(tf.cast( correct_prediction, "float" ) )
    
    # Check accuracy
    print( sess.run( [hypothesis, tf.floor(hypothesis+0.5), correct_prediction, accuracy], 
                   feed_dict={X:x_data, Y:y_data}) )
    print( "Accuracy:", accuracy.eval({X:x_data, Y:y_data}) )


0 0.697041 [[ 0.95605469  0.75711745  0.59702736 -0.9390952  -0.5310573 ]
 [-0.75973904 -0.50769818  0.06609664  0.99703813  0.00146515]] [[ 0.1345882  -0.51743108  0.88957232  0.11179852]
 [-0.5763818   0.16810472 -0.0363602  -0.8652603 ]
 [-0.33947784  0.01499494  0.48619843 -0.29336387]
 [-0.40024582  0.44368723  0.53096282  0.6771282 ]
 [ 0.23805985  0.24192876 -0.59005243  0.69964278]]
1000 0.692887 [[ 0.89666981  0.66485041  0.59060746 -0.93575126 -0.50193608]
 [-0.76122004 -0.57608128  0.1471519   1.03651834 -0.07077873]] [[ 0.15188825 -0.51847261  0.83967674  0.17491689]
 [-0.56681234  0.16726889 -0.06744049 -0.80958426]
 [-0.34369978  0.01416831  0.4955653  -0.23326661]
 [-0.42197332  0.44316617  0.60117179  0.72913063]
 [ 0.22717604  0.24157192 -0.56813687  0.72968936]]
2000 0.692072 [[ 0.86124533  0.63032818  0.60698205 -0.94208407 -0.50699174]
 [-0.76523095 -0.70051682  0.22499198  1.08733642 -0.12198293]] [[ 0.15883733 -0.52003384  0.81515908  0.17788871]
 [-0.56913435  0.16687359 -0.07545284 -0.81525958]
 [-0.34825626  0.01273432  0.51235676 -0.18635157]
 [-0.43029344  0.44173449  0.64277214  0.83490646]
 [ 0.22377518  0.24090479 -0.56230062  0.77040654]]
3000 0.690505 [[ 0.83578479  0.66076213  0.64482182 -0.95375896 -0.54633003]
 [-0.76988834 -0.8831231   0.31778693  1.17664909 -0.16855444]] [[ 0.15974495 -0.52108938  0.8074553   0.12853925]
 [-0.58658051  0.16973668 -0.05806975 -0.89326376]
 [-0.35835674  0.01163109  0.54690236 -0.15015759]
 [-0.43445969  0.4388375   0.67477387  1.00669992]
 [ 0.22190711  0.23990621 -0.56304699  0.82218254]]
4000 0.684755 [[ 0.81882542  0.82253301  0.71326393 -0.99781173 -0.64690965]
 [-0.78024286 -1.18089879  0.44794038  1.37533331 -0.2190403 ]] [[ 0.1618906  -0.52212369  0.80761236 -0.00727804]
 [-0.62525564  0.18219782 -0.00512543 -1.12422264]
 [-0.37974101  0.01346492  0.61702013 -0.13472921]
 [-0.43475848  0.43254897  0.70357978  1.33417702]
 [ 0.21949492  0.23923971 -0.56918323  0.90211952]]
5000 0.643024 [[ 0.85171658  1.38937545  0.83934069 -1.31017625 -0.90491003]
 [-0.83863509 -1.82011437  0.67481512  1.91900074 -0.28317237]] [[ 0.19223663 -0.53717667  0.78133768 -0.36577877]
 [-0.68919975  0.22788647  0.10131876 -1.87488222]
 [-0.42907885  0.03450707  0.77728707 -0.18282205]
 [-0.44689563  0.42439064  0.76639712  2.17600846]
 [ 0.20731129  0.24697189 -0.5757435   1.09122312]]
6000 0.516998 [[ 1.09183979  2.31015491  0.94646549 -2.08695912 -1.25955486]
 [-1.02132463 -2.7796247   1.11433983  2.71073413 -0.31848958]] [[ 0.36561677 -0.80900568  0.52907747 -0.83191264]
 [-0.67392212  0.21864384  0.11456567 -3.22845292]
 [-0.51984751  0.12312097  1.05324543 -0.1198236 ]
 [-0.59091914  0.5903554   1.01413727  3.7344439 ]
 [ 0.141872    0.35389698 -0.56018978  1.4912616 ]]
7000 0.230144 [[ 2.24733901  2.88104391  1.01565039 -2.94653988 -1.52406144]
 [-1.46160042 -3.46098495  1.69672036  2.8600862   0.0758318 ]] [[ 1.0089314  -2.48079491 -0.39121228 -0.95303684]
 [-0.70207727 -0.10389721  0.25492921 -4.16708899]
 [-0.56351823  0.21303873  1.11197484  0.29004368]
 [-1.04132593  1.86098683  1.42795753  4.8270731 ]
 [ 0.01767837  0.72127366 -0.6658811   1.88711059]]
8000 0.0509878 [[ 2.95263577  3.38959885  1.31469834 -3.47235584 -1.63999903]
 [-2.14629865 -4.03987455  1.92947459  3.18308353  0.68922371]] [[ 1.31603324 -3.5252254  -0.95746434 -0.98801726]
 [-1.28707242 -0.18136118  1.322016   -4.93823338]
 [-0.65290093  0.38271356  1.16526413  0.53123593]
 [-1.38757145  2.90834236  1.9234885   5.17796087]
 [ 0.03348464  0.90840387 -0.78736949  2.21367645]]
9000 0.0226877 [[ 3.19137001  3.61979628  1.46607673 -3.69799042 -1.66846132]
 [-2.38367224 -4.30958033  2.01128054  3.30803514  0.96641535]] [[ 1.43008459 -3.82325268 -1.18499613 -0.99471754]
 [-1.62210107 -0.18838213  1.90035188 -5.2304492 ]
 [-0.70819014  0.44056606  1.23058391  0.5818525 ]
 [-1.52888238  3.21121097  2.17703891  5.26014233]
 [ 0.091375    0.96855807 -0.88720882  2.36221695]]
10000 0.0135322 [[ 3.32075     3.7543242   1.55241275 -3.82823753 -1.6877737 ]
 [-2.51254344 -4.46109486  2.05879021  3.38238716  1.12141347]] [[ 1.50470626 -3.96974397 -1.3266325  -0.99846429]
 [-1.82391071 -0.18870907  2.23234177 -5.38218451]
 [-0.74466228  0.47015935  1.28024602  0.5979706 ]
 [-1.61819375  3.35982752  2.34128952  5.29529667]
 [ 0.13846341  0.99626601 -0.95690787  2.44488263]]
11000 0.00933125 [[ 3.40613961  3.84586573  1.61041641 -3.91574311 -1.70367575]
 [-2.59783745 -4.56155586  2.09151411  3.43512058  1.22388339]] [[ 1.55964208 -4.06113577 -1.42645502 -1.00093687]
 [-1.96385789 -0.18773744  2.45419717 -5.47860193]
 [-0.77159411  0.4884958   1.31866872  0.60471678]
 [-1.68275666  3.4518652   2.45900679  5.31542301]
 [ 0.1755984   1.01211452 -1.00798595  2.49921036]]
12000 0.00699918 [[ 3.46866822  3.91390514  1.65326893 -3.98021054 -1.71725893]
 [-2.66038156 -4.63502645  2.11615419  3.47563219  1.29871237]] [[ 1.60274911 -4.12557364 -1.50228775 -1.00267673]
 [-2.06957698 -0.18648183  2.61693549 -5.54715109]
 [-0.79289049  0.50121158  1.34941459  0.60796982]
 [-1.73298752  3.51629543  2.54921341  5.32881021]
 [ 0.20571899  1.02242649 -1.04760063  2.53865576]]
13000 0.00554254 [[ 3.51745749  3.96741438  1.68687642 -4.03061342 -1.72908831]
 [-2.70919895 -4.6921978   2.13576007  3.50830555  1.35690939]] [[ 1.63804007 -4.17447233 -1.56283844 -1.00395536]
 [-2.1539216  -0.18520726  2.74368286 -5.59940338]
 [-0.81048352  0.5106892   1.37477899  0.60964346]
 [-1.77394557  3.56488657  2.62161183  5.33853817]
 [ 0.23086864  1.02971005 -1.07966685  2.56917524]]
14000 0.00455733 [[ 3.55717897  4.01116705  1.71432638 -4.07166719 -1.73954642]
 [-2.74893022 -4.73861313  2.15195918  3.53554773  1.40415168]] [[ 1.6678189  -4.21342802 -1.61291301 -1.00492859]
 [-2.22377372 -0.18398842  2.84655333 -5.64114332]
 [-0.82546055  0.51811159  1.39622962  0.61050874]
 [-1.80844283  3.60339689  2.68168592  5.3460331 ]
 [ 0.2523737   1.03515363 -1.10645485  2.59383845]]
15000 0.00385177 [[ 3.59051466  4.0479722   1.73741317 -4.10610771 -1.74890876]
 [-2.78225279 -4.77745152  2.16571355  3.55882573  1.44370008]] [[ 1.69352365 -4.24555302 -1.65541077 -1.00569427]
 [-2.28320622 -0.18284798  2.93258071 -5.67562485]
 [-0.83849102  0.52413672  1.41473198  0.61092472]
 [-1.8381958   3.63501239  2.7327888   5.35207224]
 [ 0.27111489  1.03939247 -1.12937367  2.6144042 ]]
16000 0.00332436 [[ 3.61913466  4.0796051   1.75726295 -4.1356554  -1.75737369]
 [-2.81083941 -4.81071091  2.17762733  3.57909131  1.4775815 ]] [[ 1.71610117 -4.27273273 -1.69220114 -1.00630832]
 [-2.33481383 -0.18178701  3.00616789 -5.70483398]
 [-0.8500185   0.529163    1.43094957  0.61107308]
 [-1.86432528  3.66165972  2.77710438  5.35706043]
 [ 0.28769764  1.0427947  -1.1493485   2.63196254]]
17000 0.0029167 [[ 3.64414382  4.10726213  1.77462542 -4.16145039 -1.76509666]
 [-2.83579278 -4.83970118  2.1881249   3.59699512  1.50713336]] [[ 1.73621023 -4.29618692 -1.72455311 -1.00679636]
 [-2.38034511 -0.18080115  3.07023501 -5.73006296]
 [-0.86035013  0.53344399  1.4453516   0.61105937]
 [-1.88759935  3.68458033  2.81612563  5.36125708]
 [ 0.30255306  1.04559517 -1.16701555  2.64722705]]
18000 0.00259315 [[ 3.66630125  4.13176441  1.79002178 -4.18428802 -1.77219641]
 [-2.85788703 -4.86533642  2.1974802   3.61300731  1.53327978]] [[ 1.75432336 -4.31675959 -1.75336421 -1.00719929]
 [-2.42103028 -0.17988262  3.1268096  -5.75219011]
 [-0.86970794  0.53715283  1.45827866  0.61094409]
 [-1.90856993  3.70461249  2.85091376  5.36488962]
 [ 0.31599742  1.04793918 -1.18282938  2.66069889]]
19000 0.00233066 [[ 3.68616295  4.15370989  1.80382919 -4.20474148 -1.7787627 ]
 [-2.87766671 -4.88826847  2.20591927  3.62746549  1.55668592]] [[ 1.7707895  -4.33500957 -1.77929199 -1.00755692]
 [-2.4577651  -0.17902552  3.17735028 -5.77183867]
 [-0.87825876  0.54041117  1.46999025  0.61076528]
 [-1.92764163  3.72235489  2.88224578  5.36809063]
 [ 0.32826883  1.04994309 -1.19712472  2.67272687]]
20000 0.0021138 [[ 3.70413589  4.17356157  1.81632805 -4.22322464 -1.78487492]
 [-2.8955524  -4.90897608  2.21359062  3.64063215  1.57784128]] [[ 1.78587902 -4.35140085 -1.80282974 -1.00782335]
 [-2.49122238 -0.17822286  3.22293925 -5.78948212]
 [-0.88612741  0.54330546  1.48067904  0.61052734]
 [-1.94512582  3.73823786  2.91070914  5.37095165]
 [ 0.33955079  1.05167103 -1.21015584  2.68357778]]
[array([[ 0.00185121],
       [ 0.99781692],
       [ 0.9977603 ],
       [ 0.00217223]], dtype=float32), array([[ 0.],
       [ 1.],
       [ 1.],
       [ 0.]], dtype=float32), array([[ True],
       [ True],
       [ True],
       [ True]], dtype=bool), 1.0]
Accuracy: 1.0