In [1]:
import tensorflow as tf
import numpy as np
import matplotlib.pylab as plt
%matplotlib inline

In [9]:
# Create sample data
num_samples_class = 1000
positive_samples = 4 * np.random.randn(num_samples_class) + 4
negative_samples = 2 * np.random.randn(num_samples_class) - 8
x = np.concatenate((negative_samples, positive_samples), axis=0)
y = np.zeros(num_samples_class*2)
y[num_samples_class:] = 1
y_onehot = np.zeros((num_samples_class*2, 2))
y_onehot[:num_samples_class, 0] = 1
y_onehot[num_samples_class:, 1] = 1

In [10]:
plt.figure(figsize=(10, 10))
plt.subplot(2,1,1)
res = plt.hist(x, bins=100)
plt.subplot(2,1,2)
res = plt.hist([positive_samples, negative_samples], bins=100)



In [11]:
x = np.atleast_2d(x).T
print(x.shape)
print(y_onehot.shape)
print(y.shape)


(2000, 1)
(2000, 2)
(2000,)

In [19]:
# Create logistic regression model
w_init = np.random.randn(1)
print('Initial weight value = {}'.format(w_init[0]))
w = tf.Variable(w_init, dtype=tf.float32)
b = tf.Variable(1.0)
weighted_x = w * x + b
y_prob_pos = tf.nn.sigmoid(weighted_x)
y_prob_neg = 1 - y_prob_pos
y_prob = tf.concat([y_prob_neg, y_prob_pos], 1)


Initial weight value = -0.9175394857665731

In [20]:
# Loss: MSE
loss = tf.nn.l2_loss(y_prob - y_onehot)
# Logistic accuracy
correct_prediction = tf.equal(tf.arg_max(y_prob, 1), tf.arg_max(y_onehot, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

In [21]:
# Optimizer
optimizer = tf.train.GradientDescentOptimizer(0.0001)
train = optimizer.minimize(loss)

In [22]:
# Main loop
accuracy_vals = []
loss_vals = []
with tf.Session() as session:
    session.run(tf.global_variables_initializer())
    init_vals = session.run([loss, w, b])
    print('Initial values: loss={}  w={}  b={}'.format(*init_vals))
    
    for step in range(150):
        print('Step {}'.format(step))
        vals = session.run([train, loss, accuracy, w, b])
        loss_val, accuracy_val = vals[1:3]
        accuracy_vals.append(accuracy_val)
        loss_vals.append(loss_val)
        print('loss={}  accuracy={}  w={}  b={}'.format(*vals[1:]))
        print()


Initial values: loss=1668.0936279296875  w=[-0.91753948]  b=1.0
Step 0
loss=1668.0936279296875  accuracy=0.11150000244379044  w=[-0.89482242]  b=1.0087300539016724

Step 1
loss=1662.0087890625  accuracy=0.11299999803304672  w=[-0.87074345]  b=1.0176887512207031

Step 2
loss=1655.2130126953125  accuracy=0.11500000208616257  w=[-0.84510338]  b=1.0269001722335815

Step 3
loss=1647.548583984375  accuracy=0.1185000017285347  w=[-0.81765491]  b=1.0363929271697998

Step 4
loss=1638.811767578125  accuracy=0.12150000035762787  w=[-0.788086]  b=1.046201229095459

Step 5
loss=1628.723388671875  accuracy=0.12399999797344208  w=[-0.75599504]  b=1.056366205215454

Step 6
loss=1616.890380859375  accuracy=0.12700000405311584  w=[-0.72085387]  b=1.0669379234313965

Step 7
loss=1602.752685546875  accuracy=0.13050000369548798  w=[-0.68194872]  b=1.0779775381088257

Step 8
loss=1585.463134765625  accuracy=0.13600000739097595  w=[-0.63828444]  b=1.0895600318908691

Step 9
loss=1563.702392578125  accuracy=0.14249999821186066  w=[-0.58841938]  b=1.101775884628296

Step 10
loss=1535.2716064453125  accuracy=0.15049999952316284  w=[-0.53016603]  b=1.114728331565857

Step 11
loss=1496.25927734375  accuracy=0.16249999403953552  w=[-0.46001399]  b=1.128510594367981

Step 12
loss=1439.054443359375  accuracy=0.17249999940395355  w=[-0.3719539]  b=1.1431041955947876

Step 13
loss=1347.1923828125  accuracy=0.19449999928474426  w=[-0.25507811]  b=1.157923936843872

Step 14
loss=1181.400146484375  accuracy=0.27250000834465027  w=[-0.09048638]  b=1.1696035861968994

Step 15
loss=855.7839965820312  accuracy=0.4925000071525574  w=[ 0.13605534]  b=1.1632966995239258

Step 16
loss=310.75335693359375  accuracy=0.6834999918937683  w=[ 0.33673328]  b=1.1427761316299438

Step 17
loss=88.26105499267578  accuracy=0.9750000238418579  w=[ 0.37675837]  b=1.13965904712677

Step 18
loss=75.19526672363281  accuracy=0.9769999980926514  w=[ 0.40252435]  b=1.1384103298187256

Step 19
loss=69.45465850830078  accuracy=0.9764999747276306  w=[ 0.42151076]  b=1.1380817890167236

Step 20
loss=66.24431610107422  accuracy=0.9754999876022339  w=[ 0.43646017]  b=1.1383124589920044

Step 21
loss=64.21470642089844  accuracy=0.9750000238418579  w=[ 0.44871375]  b=1.1389214992523193

Step 22
loss=62.829383850097656  accuracy=0.9725000262260437  w=[ 0.45903474]  b=1.1398040056228638

Step 23
loss=61.83168029785156  accuracy=0.9710000157356262  w=[ 0.46790117]  b=1.1408931016921997

Step 24
loss=61.08378601074219  accuracy=0.9710000157356262  w=[ 0.47563332]  b=1.1421436071395874

Step 25
loss=60.50505065917969  accuracy=0.9710000157356262  w=[ 0.48245659]  b=1.1435232162475586

Step 26
loss=60.04534149169922  accuracy=0.9704999923706055  w=[ 0.48853591]  b=1.1450082063674927

Step 27
loss=59.672080993652344  accuracy=0.9710000157356262  w=[ 0.49399564]  b=1.146580457687378

Step 28
loss=59.36311721801758  accuracy=0.9710000157356262  w=[ 0.49893194]  b=1.1482259035110474

Step 29
loss=59.10296630859375  accuracy=0.9714999794960022  w=[ 0.50342077]  b=1.1499333381652832

Step 30
loss=58.88063049316406  accuracy=0.9710000157356262  w=[ 0.50752312]  b=1.1516937017440796

Step 31
loss=58.68788146972656  accuracy=0.9710000157356262  w=[ 0.51128876]  b=1.1534994840621948

Step 32
loss=58.518699645996094  accuracy=0.9710000157356262  w=[ 0.51475888]  b=1.1553446054458618

Step 33
loss=58.368473052978516  accuracy=0.9710000157356262  w=[ 0.51796782]  b=1.1572239398956299

Step 34
loss=58.233665466308594  accuracy=0.9700000286102295  w=[ 0.52094465]  b=1.1591330766677856

Step 35
loss=58.1114501953125  accuracy=0.9695000052452087  w=[ 0.52371407]  b=1.161068320274353

Step 36
loss=57.99969482421875  accuracy=0.9695000052452087  w=[ 0.52629739]  b=1.1630263328552246

Step 37
loss=57.896575927734375  accuracy=0.9695000052452087  w=[ 0.52871293]  b=1.1650043725967407

Step 38
loss=57.80073547363281  accuracy=0.9695000052452087  w=[ 0.53097671]  b=1.1670000553131104

Step 39
loss=57.711097717285156  accuracy=0.9695000052452087  w=[ 0.53310281]  b=1.1690112352371216

Step 40
loss=57.62662887573242  accuracy=0.968500018119812  w=[ 0.5351035]  b=1.1710360050201416

Step 41
loss=57.546627044677734  accuracy=0.968500018119812  w=[ 0.53698969]  b=1.1730725765228271

Step 42
loss=57.47046661376953  accuracy=0.968500018119812  w=[ 0.53877109]  b=1.1751196384429932

Step 43
loss=57.397605895996094  accuracy=0.9679999947547913  w=[ 0.54045641]  b=1.177175760269165

Step 44
loss=57.32758331298828  accuracy=0.9679999947547913  w=[ 0.54205334]  b=1.1792397499084473

Step 45
loss=57.260066986083984  accuracy=0.9679999947547913  w=[ 0.54356891]  b=1.1813106536865234

Step 46
loss=57.194725036621094  accuracy=0.9679999947547913  w=[ 0.54500932]  b=1.1833873987197876

Step 47
loss=57.13129425048828  accuracy=0.9679999947547913  w=[ 0.54638034]  b=1.185469150543213

Step 48
loss=57.069549560546875  accuracy=0.9674999713897705  w=[ 0.54768705]  b=1.1875550746917725

Step 49
loss=57.00932312011719  accuracy=0.9674999713897705  w=[ 0.54893416]  b=1.1896445751190186

Step 50
loss=56.95043182373047  accuracy=0.9674999713897705  w=[ 0.55012596]  b=1.1917369365692139

Step 51
loss=56.892730712890625  accuracy=0.9674999713897705  w=[ 0.55126637]  b=1.1938315629959106

Step 52
loss=56.836097717285156  accuracy=0.9674999713897705  w=[ 0.55235893]  b=1.1959278583526611

Step 53
loss=56.78046417236328  accuracy=0.9674999713897705  w=[ 0.55340689]  b=1.1980253458023071

Step 54
loss=56.725677490234375  accuracy=0.9674999713897705  w=[ 0.55441326]  b=1.20012366771698

Step 55
loss=56.671714782714844  accuracy=0.9674999713897705  w=[ 0.55538082]  b=1.202222228050232

Step 56
loss=56.61848068237305  accuracy=0.9674999713897705  w=[ 0.55631214]  b=1.2043207883834839

Step 57
loss=56.565948486328125  accuracy=0.9674999713897705  w=[ 0.55720955]  b=1.2064189910888672

Step 58
loss=56.51403045654297  accuracy=0.9674999713897705  w=[ 0.55807525]  b=1.2085164785385132

Step 59
loss=56.462646484375  accuracy=0.9674999713897705  w=[ 0.55891126]  b=1.2106128931045532

Step 60
loss=56.411842346191406  accuracy=0.9674999713897705  w=[ 0.55971938]  b=1.2127081155776978

Step 61
loss=56.361534118652344  accuracy=0.9674999713897705  w=[ 0.56050146]  b=1.2148017883300781

Step 62
loss=56.31169891357422  accuracy=0.9674999713897705  w=[ 0.56125897]  b=1.2168936729431152

Step 63
loss=56.262306213378906  accuracy=0.9674999713897705  w=[ 0.56199354]  b=1.21898353099823

Step 64
loss=56.213340759277344  accuracy=0.9674999713897705  w=[ 0.56270647]  b=1.2210712432861328

Step 65
loss=56.16477966308594  accuracy=0.9674999713897705  w=[ 0.56339908]  b=1.2231565713882446

Step 66
loss=56.116600036621094  accuracy=0.9679999947547913  w=[ 0.56407255]  b=1.2252393960952759

Step 67
loss=56.06874084472656  accuracy=0.9679999947547913  w=[ 0.56472808]  b=1.2273194789886475

Step 68
loss=56.021270751953125  accuracy=0.9679999947547913  w=[ 0.56536669]  b=1.2293967008590698

Step 69
loss=55.97412872314453  accuracy=0.9679999947547913  w=[ 0.56598932]  b=1.2314709424972534

Step 70
loss=55.92731857299805  accuracy=0.9679999947547913  w=[ 0.56659693]  b=1.2335420846939087

Step 71
loss=55.88078308105469  accuracy=0.9679999947547913  w=[ 0.56719041]  b=1.235610008239746

Step 72
loss=55.83457946777344  accuracy=0.9679999947547913  w=[ 0.56777048]  b=1.237674593925476

Step 73
loss=55.788665771484375  accuracy=0.9679999947547913  w=[ 0.56833798]  b=1.239735722541809

Step 74
loss=55.74302673339844  accuracy=0.9679999947547913  w=[ 0.56889355]  b=1.2417933940887451

Step 75
loss=55.69770812988281  accuracy=0.9679999947547913  w=[ 0.56943792]  b=1.2438474893569946

Step 76
loss=55.65260696411133  accuracy=0.968500018119812  w=[ 0.56997168]  b=1.245897889137268

Step 77
loss=55.60771942138672  accuracy=0.968500018119812  w=[ 0.57049543]  b=1.2479445934295654

Step 78
loss=55.56319046020508  accuracy=0.968500018119812  w=[ 0.5710097]  b=1.2499874830245972

Step 79
loss=55.518882751464844  accuracy=0.968500018119812  w=[ 0.57151496]  b=1.2520264387130737

Step 80
loss=55.474815368652344  accuracy=0.968500018119812  w=[ 0.57201177]  b=1.2540614604949951

Step 81
loss=55.43098449707031  accuracy=0.968500018119812  w=[ 0.57250053]  b=1.2560925483703613

Step 82
loss=55.38741683959961  accuracy=0.968500018119812  w=[ 0.57298172]  b=1.2581195831298828

Step 83
loss=55.34404754638672  accuracy=0.968999981880188  w=[ 0.57345569]  b=1.2601425647735596

Step 84
loss=55.30094909667969  accuracy=0.9695000052452087  w=[ 0.57392281]  b=1.2621614933013916

Step 85
loss=55.25807189941406  accuracy=0.9695000052452087  w=[ 0.5743835]  b=1.2641762495040894

Step 86
loss=55.21539306640625  accuracy=0.9695000052452087  w=[ 0.57483804]  b=1.2661868333816528

Step 87
loss=55.172969818115234  accuracy=0.9695000052452087  w=[ 0.57528681]  b=1.268193244934082

Step 88
loss=55.130767822265625  accuracy=0.9695000052452087  w=[ 0.57573003]  b=1.2701953649520874

Step 89
loss=55.08876037597656  accuracy=0.9695000052452087  w=[ 0.57616806]  b=1.2721933126449585

Step 90
loss=55.04697799682617  accuracy=0.9695000052452087  w=[ 0.57660109]  b=1.2741869688034058

Step 91
loss=55.00543975830078  accuracy=0.9695000052452087  w=[ 0.57702941]  b=1.2761763334274292

Step 92
loss=54.96405792236328  accuracy=0.9695000052452087  w=[ 0.57745326]  b=1.2781614065170288

Step 93
loss=54.92292022705078  accuracy=0.9695000052452087  w=[ 0.57787281]  b=1.2801421880722046

Step 94
loss=54.88195037841797  accuracy=0.9695000052452087  w=[ 0.57828832]  b=1.2821186780929565

Step 95
loss=54.84122085571289  accuracy=0.9695000052452087  w=[ 0.57869995]  b=1.2840907573699951

Step 96
loss=54.800697326660156  accuracy=0.9695000052452087  w=[ 0.57910794]  b=1.2860585451126099

Step 97
loss=54.76036071777344  accuracy=0.9695000052452087  w=[ 0.57951242]  b=1.2880219221115112

Step 98
loss=54.72019958496094  accuracy=0.9695000052452087  w=[ 0.57991356]  b=1.2899810075759888

Step 99
loss=54.68027877807617  accuracy=0.9695000052452087  w=[ 0.58031154]  b=1.291935682296753

Step 100
loss=54.640541076660156  accuracy=0.9695000052452087  w=[ 0.58070648]  b=1.2938860654830933

Step 101
loss=54.60096740722656  accuracy=0.9695000052452087  w=[ 0.58109856]  b=1.2958320379257202

Step 102
loss=54.561607360839844  accuracy=0.9695000052452087  w=[ 0.58148783]  b=1.2977737188339233

Step 103
loss=54.52244567871094  accuracy=0.9700000286102295  w=[ 0.58187449]  b=1.299710988998413

Step 104
loss=54.48345947265625  accuracy=0.9700000286102295  w=[ 0.58225864]  b=1.301643967628479

Step 105
loss=54.44470977783203  accuracy=0.9700000286102295  w=[ 0.58264041]  b=1.3035725355148315

Step 106
loss=54.40608215332031  accuracy=0.9700000286102295  w=[ 0.58301985]  b=1.3054968118667603

Step 107
loss=54.36764907836914  accuracy=0.9710000157356262  w=[ 0.58339709]  b=1.3074166774749756

Step 108
loss=54.32939910888672  accuracy=0.9710000157356262  w=[ 0.58377224]  b=1.309332251548767

Step 109
loss=54.29135513305664  accuracy=0.9710000157356262  w=[ 0.58414537]  b=1.3112435340881348

Step 110
loss=54.25348663330078  accuracy=0.9710000157356262  w=[ 0.58451658]  b=1.3131505250930786

Step 111
loss=54.21577453613281  accuracy=0.9710000157356262  w=[ 0.5848859]  b=1.315053105354309

Step 112
loss=54.17826843261719  accuracy=0.9710000157356262  w=[ 0.58525348]  b=1.3169513940811157

Step 113
loss=54.14093780517578  accuracy=0.9710000157356262  w=[ 0.58561933]  b=1.3188453912734985

Step 114
loss=54.103736877441406  accuracy=0.9710000157356262  w=[ 0.58598351]  b=1.3207350969314575

Step 115
loss=54.0667610168457  accuracy=0.9710000157356262  w=[ 0.58634615]  b=1.3226206302642822

Step 116
loss=54.029945373535156  accuracy=0.9710000157356262  w=[ 0.58670723]  b=1.324501872062683

Step 117
loss=53.99330139160156  accuracy=0.9710000157356262  w=[ 0.58706689]  b=1.3263788223266602

Step 118
loss=53.956809997558594  accuracy=0.9710000157356262  w=[ 0.58742511]  b=1.328251600265503

Step 119
loss=53.920501708984375  accuracy=0.9710000157356262  w=[ 0.58778197]  b=1.3301200866699219

Step 120
loss=53.88433837890625  accuracy=0.9710000157356262  w=[ 0.58813751]  b=1.3319844007492065

Step 121
loss=53.84837341308594  accuracy=0.9710000157356262  w=[ 0.5884918]  b=1.333844542503357

Step 122
loss=53.81256103515625  accuracy=0.9710000157356262  w=[ 0.58884484]  b=1.3357003927230835

Step 123
loss=53.776893615722656  accuracy=0.9710000157356262  w=[ 0.58919668]  b=1.3375520706176758

Step 124
loss=53.74141311645508  accuracy=0.9710000157356262  w=[ 0.5895474]  b=1.3393995761871338

Step 125
loss=53.706092834472656  accuracy=0.9710000157356262  w=[ 0.58989704]  b=1.341243028640747

Step 126
loss=53.67094039916992  accuracy=0.9710000157356262  w=[ 0.5902456]  b=1.343082308769226

Step 127
loss=53.63592529296875  accuracy=0.9710000157356262  w=[ 0.5905931]  b=1.3449174165725708

Step 128
loss=53.601097106933594  accuracy=0.9710000157356262  w=[ 0.59093958]  b=1.3467484712600708

Step 129
loss=53.566410064697266  accuracy=0.9710000157356262  w=[ 0.59128511]  b=1.348575472831726

Step 130
loss=53.531856536865234  accuracy=0.9710000157356262  w=[ 0.59162968]  b=1.350398302078247

Step 131
loss=53.49750518798828  accuracy=0.9710000157356262  w=[ 0.5919733]  b=1.3522170782089233

Step 132
loss=53.46323776245117  accuracy=0.9710000157356262  w=[ 0.59231603]  b=1.3540318012237549

Step 133
loss=53.429222106933594  accuracy=0.9710000157356262  w=[ 0.59265792]  b=1.3558424711227417

Step 134
loss=53.395267486572266  accuracy=0.9710000157356262  w=[ 0.59299892]  b=1.3576492071151733

Step 135
loss=53.36151885986328  accuracy=0.9710000157356262  w=[ 0.59333909]  b=1.3594518899917603

Step 136
loss=53.32786560058594  accuracy=0.9710000157356262  w=[ 0.59367847]  b=1.361250638961792

Step 137
loss=53.2944221496582  accuracy=0.9714999794960022  w=[ 0.59401703]  b=1.363045334815979

Step 138
loss=53.26110076904297  accuracy=0.9714999794960022  w=[ 0.59435481]  b=1.3648360967636108

Step 139
loss=53.22792434692383  accuracy=0.9714999794960022  w=[ 0.59469187]  b=1.3666229248046875

Step 140
loss=53.194889068603516  accuracy=0.9714999794960022  w=[ 0.59502816]  b=1.368405818939209

Step 141
loss=53.162017822265625  accuracy=0.9714999794960022  w=[ 0.59536374]  b=1.3701847791671753

Step 142
loss=53.1292724609375  accuracy=0.9714999794960022  w=[ 0.5956986]  b=1.3719598054885864

Step 143
loss=53.096675872802734  accuracy=0.9714999794960022  w=[ 0.59603274]  b=1.373731017112732

Step 144
loss=53.064231872558594  accuracy=0.9714999794960022  w=[ 0.59636623]  b=1.3754982948303223

Step 145
loss=53.03192138671875  accuracy=0.9714999794960022  w=[ 0.596699]  b=1.377261757850647

Step 146
loss=52.99974822998047  accuracy=0.9714999794960022  w=[ 0.59703112]  b=1.379021406173706

Step 147
loss=52.96772003173828  accuracy=0.9714999794960022  w=[ 0.59736264]  b=1.3807772397994995

Step 148
loss=52.93583297729492  accuracy=0.9710000157356262  w=[ 0.5976935]  b=1.3825292587280273

Step 149
loss=52.90406799316406  accuracy=0.9710000157356262  w=[ 0.59802371]  b=1.3842774629592896


In [25]:
plt.plot(accuracy_vals, label='Accuracy')
plt.legend()
plt.show()



In [24]:
plt.plot(loss_vals, label='Loss')
plt.legend()
plt.show()



In [ ]: