In [14]:
import tensorflow as tf
import matplotlib.pyplot as plt

from sklearn.pipeline import Pipeline
from sklearn import datasets, linear_model
from sklearn import cross_validation
import numpy as np
numpy.longfloat
import pandas as pd
from sklearn import preprocessing

In [31]:
df = pd.read_excel("data0505.xlsx",header=0,dtype=np.longfloat)
# clean up data
df = df.dropna(how = 'all')
df = df.fillna(0)
df = df.round(4)
df.head()


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-31-a7d594b6a404> in <module>()
----> 1 df = pd.read_excel("data0505.xlsx",header=0,dtype=np.longfloat)
      2 # clean up data
      3 df = df.dropna(how = 'all')
      4 df = df.fillna(0)
      5 df = df.round(4)

/Applications/anaconda/lib/python3.5/site-packages/pandas/io/excel.py in read_excel(io, sheetname, header, skiprows, skip_footer, index_col, names, parse_cols, parse_dates, date_parser, na_values, thousands, convert_float, has_index_names, converters, engine, squeeze, **kwds)
    176         convert_float=convert_float, has_index_names=has_index_names,
    177         skip_footer=skip_footer, converters=converters,
--> 178         squeeze=squeeze, **kwds)
    179 
    180 

/Applications/anaconda/lib/python3.5/site-packages/pandas/io/excel.py in _parse_excel(self, sheetname, header, skiprows, names, skip_footer, index_col, has_index_names, parse_cols, parse_dates, date_parser, na_values, thousands, convert_float, verbose, squeeze, **kwds)
    463                                     skip_footer=skip_footer,
    464                                     squeeze=squeeze,
--> 465                                     **kwds)
    466 
    467                 output[asheetname] = parser.read()

/Applications/anaconda/lib/python3.5/site-packages/pandas/io/parsers.py in TextParser(*args, **kwds)
   1470     """
   1471     kwds['engine'] = 'python'
-> 1472     return TextFileReader(*args, **kwds)
   1473 
   1474 

/Applications/anaconda/lib/python3.5/site-packages/pandas/io/parsers.py in __init__(self, f, engine, **kwds)
    633         self._engine = None
    634 
--> 635         options = self._get_options_with_defaults(engine)
    636 
    637         self.chunksize = options.pop('chunksize', None)

/Applications/anaconda/lib/python3.5/site-packages/pandas/io/parsers.py in _get_options_with_defaults(self, engine)
    670                         raise ValueError(
    671                             'The %r option is not supported with the'
--> 672                             ' %r engine' % (argname, engine))
    673             else:
    674                 value = default

ValueError: The 'dtype' option is not supported with the 'python' engine

In [28]:



Out[28]:
SOC SOH Power T SEI_after SEI_delta
0 80 0 -1.0000 23 0.0 -0.0
1 80 0 -0.8947 23 0.0 -0.0
2 80 0 -0.7895 23 0.0 0.0
3 80 0 -0.6842 23 0.0 -0.0
4 80 0 -0.5790 23 0.0 0.0

In [22]:
df_normalized=(df-df.mean())/df.std()
# min_max_scaler = preprocessing.MinMaxScaler()
# np_scaled = min_max_scaler.fit_transform(df)
# df_normalized = pd.DataFrame(np_scaled)
df_normalized.head()


Out[22]:
SOC SOH Power T SEI_after SEI_delta
0 1.703816 NaN -1.647432 NaN NaN NaN
1 1.703816 NaN -1.473957 NaN NaN NaN
2 1.703816 NaN -1.300648 NaN NaN NaN
3 1.703816 NaN -1.127173 NaN NaN NaN
4 1.703816 NaN -0.953863 NaN NaN NaN

In [17]:
x = np.array(df_normalized.ix[:,0:2])#first three column are SoC, SoH, power
y = np.array(df_normalized.ix[:,5])#delta SEI
X_train, X_test, Y_train, Y_test = cross_validation.train_test_split(
x, y, test_size=0.2, random_state=42)
total_len = X_train.shape[0]
total_len


Out[17]:
9760

In [20]:
print(str.format('{0:.15f}', y[1]))


0.000000000000000

In [8]:
# Parameters
learning_rate = 0.001
training_epochs = 50
batch_size = 100
display_step = 1
dropout_rate = 0.1
# Network Parameters
n_hidden_1 = 10 # 1st layer number of features
n_hidden_2 = 5 # 2nd layer number of features
n_input = X_train.shape[1]
n_classes = 1

# tf Graph input
x = tf.placeholder("float", [None, 3])
y = tf.placeholder("float", [None])

In [9]:
# Create model
def multilayer_perceptron(x, weights, biases):
    # Hidden layer with RELU activation
    layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
    layer_1 = tf.nn.relu(layer_1)

    # Hidden layer with RELU activation
    layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])
    layer_2 = tf.nn.relu(layer_2)

    # Output layer with linear activation
    out_layer = tf.matmul(layer_2, weights['out']) + biases['out']
    return out_layer

In [10]:
# Store layers weight & bias
weights = {
    'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1], 0, 0.1)),
    'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2], 0, 0.1)),
    'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes], 0, 0.1))
}
biases = {
    'b1': tf.Variable(tf.random_normal([n_hidden_1], 0, 0.1)),
    'b2': tf.Variable(tf.random_normal([n_hidden_2], 0, 0.1)),
    'out': tf.Variable(tf.random_normal([n_classes], 0, 0.1))
}

In [11]:
# Construct model
pred = multilayer_perceptron(x, weights, biases)

In [12]:
# Define loss and optimizer
cost = tf.reduce_mean((tf.transpose(pred)-y)*(tf.transpose(pred)-y)) 
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)

# Launch the graph
with tf.Session() as sess:
    sess.run(tf.initialize_all_variables())
    tf.initialize_all_variables()

    # Training cycle
    for epoch in range(training_epochs):
        avg_cost = 0.
        total_batch = int(total_len/batch_size)
        # Loop over all batches
        for i in range(total_batch-1):
            batch_x = X_train[i*batch_size:(i+1)*batch_size]
            batch_y = Y_train[i*batch_size:(i+1)*batch_size]
            # Run optimization op (backprop) and cost op (to get loss value)
            _, c, p = sess.run([optimizer, cost, pred], feed_dict={x: batch_x,
                                                          y: batch_y})
            # Compute average loss
            avg_cost += c / total_batch

        # sample prediction
        label_value = batch_y
        estimate = p
        err = label_value-estimate
        print ("num batch:", total_batch)

        # Display logs per epoch step
        if epoch % display_step == 0:
            print ("Epoch:", '%04d' % (epoch+1), "cost=", \
                "{:.9f}".format(avg_cost))
            print ("[*]----------------------------")
            for i in range(3):
                print ("label value:", label_value[i], \
                    "estimated value:", estimate[i])
            print ("[*]============================")

    print ("Optimization Finished!")
    
    # Test model
    # correct_prediction = tf.equal(tf.argmax(pred,0), tf.argmax(y,0))
    # Calculate accuracy
    accuracy = tf.reduce_mean((tf.transpose(pred)-y)*(tf.transpose(pred)-y)) 
    print ("MSE:", accuracy.eval({x: X_test, y: Y_test}))


num batch: 97
Epoch: 0001 cost= 0.001209991
[*]----------------------------
label value: 0.0 estimated value: [ 0.00404518]
label value: 0.0 estimated value: [ 0.00695085]
label value: 0.0 estimated value: [-0.00441298]
[*]============================
num batch: 97
Epoch: 0002 cost= 0.000012889
[*]----------------------------
label value: 0.0 estimated value: [ 0.0029598]
label value: 0.0 estimated value: [ 0.00522447]
label value: 0.0 estimated value: [-0.00387982]
[*]============================
num batch: 97
Epoch: 0003 cost= 0.000008064
[*]----------------------------
label value: 0.0 estimated value: [ 0.00236373]
label value: 0.0 estimated value: [ 0.00403679]
label value: 0.0 estimated value: [-0.0030103]
[*]============================
num batch: 97
Epoch: 0004 cost= 0.000004712
[*]----------------------------
label value: 0.0 estimated value: [ 0.00176073]
label value: 0.0 estimated value: [ 0.00297401]
label value: 0.0 estimated value: [-0.00228744]
[*]============================
num batch: 97
Epoch: 0005 cost= 0.000002586
[*]----------------------------
label value: 0.0 estimated value: [ 0.00125664]
label value: 0.0 estimated value: [ 0.00207181]
label value: 0.0 estimated value: [-0.00167676]
[*]============================
num batch: 97
Epoch: 0006 cost= 0.000001322
[*]----------------------------
label value: 0.0 estimated value: [ 0.00085248]
label value: 0.0 estimated value: [ 0.00133884]
label value: 0.0 estimated value: [-0.00117343]
[*]============================
num batch: 97
Epoch: 0007 cost= 0.000000626
[*]----------------------------
label value: 0.0 estimated value: [ 0.0005428]
label value: 0.0 estimated value: [ 0.00077347]
label value: 0.0 estimated value: [-0.0007759]
[*]============================
num batch: 97
Epoch: 0008 cost= 0.000000277
[*]----------------------------
label value: 0.0 estimated value: [ 0.00032021]
label value: 0.0 estimated value: [ 0.00036713]
label value: 0.0 estimated value: [-0.00047934]
[*]============================
num batch: 97
Epoch: 0009 cost= 0.000000122
[*]----------------------------
label value: 0.0 estimated value: [ 0.00017164]
label value: 0.0 estimated value: [  9.82955098e-05]
label value: 0.0 estimated value: [-0.00027397]
[*]============================
num batch: 97
Epoch: 0010 cost= 0.000000060
[*]----------------------------
label value: 0.0 estimated value: [  7.88420439e-05]
label value: 0.0 estimated value: [ -6.53229654e-05]
label value: 0.0 estimated value: [-0.00014188]
[*]============================
num batch: 97
Epoch: 0011 cost= 0.000000038
[*]----------------------------
label value: 0.0 estimated value: [  2.47173011e-05]
label value: 0.0 estimated value: [-0.00015584]
label value: 0.0 estimated value: [ -6.20558858e-05]
[*]============================
num batch: 97
Epoch: 0012 cost= 0.000000030
[*]----------------------------
label value: 0.0 estimated value: [ -4.93600965e-06]
label value: 0.0 estimated value: [-0.00020028]
label value: 0.0 estimated value: [ -1.58995390e-05]
[*]============================
num batch: 97
Epoch: 0013 cost= 0.000000027
[*]----------------------------
label value: 0.0 estimated value: [ -2.03326344e-05]
label value: 0.0 estimated value: [-0.00021815]
label value: 0.0 estimated value: [  1.05500221e-05]
[*]============================
num batch: 97
Epoch: 0014 cost= 0.000000025
[*]----------------------------
label value: 0.0 estimated value: [ -2.77869403e-05]
label value: 0.0 estimated value: [-0.00022158]
label value: 0.0 estimated value: [  2.61142850e-05]
[*]============================
num batch: 97
Epoch: 0015 cost= 0.000000024
[*]----------------------------
label value: 0.0 estimated value: [ -3.12067568e-05]
label value: 0.0 estimated value: [-0.00021797]
label value: 0.0 estimated value: [  3.65599990e-05]
[*]============================
num batch: 97
Epoch: 0016 cost= 0.000000023
[*]----------------------------
label value: 0.0 estimated value: [ -3.26894224e-05]
label value: 0.0 estimated value: [-0.00021117]
label value: 0.0 estimated value: [  4.46736813e-05]
[*]============================
num batch: 97
Epoch: 0017 cost= 0.000000021
[*]----------------------------
label value: 0.0 estimated value: [ -3.33562493e-05]
label value: 0.0 estimated value: [-0.00020317]
label value: 0.0 estimated value: [  5.17927110e-05]
[*]============================
num batch: 97
Epoch: 0018 cost= 0.000000020
[*]----------------------------
label value: 0.0 estimated value: [ -3.35872173e-05]
label value: 0.0 estimated value: [-0.00019474]
label value: 0.0 estimated value: [  5.88148832e-05]
[*]============================
num batch: 97
Epoch: 0019 cost= 0.000000019
[*]----------------------------
label value: 0.0 estimated value: [ -3.36728990e-05]
label value: 0.0 estimated value: [-0.00018631]
label value: 0.0 estimated value: [  6.59674406e-05]
[*]============================
num batch: 97
Epoch: 0020 cost= 0.000000017
[*]----------------------------
label value: 0.0 estimated value: [ -3.37138772e-05]
label value: 0.0 estimated value: [-0.00017807]
label value: 0.0 estimated value: [  7.33882189e-05]
[*]============================
num batch: 97
Epoch: 0021 cost= 0.000000016
[*]----------------------------
label value: 0.0 estimated value: [ -3.35760415e-05]
label value: 0.0 estimated value: [-0.00016985]
label value: 0.0 estimated value: [  8.10213387e-05]
[*]============================
num batch: 97
Epoch: 0022 cost= 0.000000015
[*]----------------------------
label value: 0.0 estimated value: [ -3.32482159e-05]
label value: 0.0 estimated value: [-0.00016174]
label value: 0.0 estimated value: [  8.89636576e-05]
[*]============================
num batch: 97
Epoch: 0023 cost= 0.000000014
[*]----------------------------
label value: 0.0 estimated value: [ -3.26558948e-05]
label value: 0.0 estimated value: [-0.00015367]
label value: 0.0 estimated value: [  9.74349678e-05]
[*]============================
num batch: 97
Epoch: 0024 cost= 0.000000013
[*]----------------------------
label value: 0.0 estimated value: [ -3.15904617e-05]
label value: 0.0 estimated value: [-0.00014537]
label value: 0.0 estimated value: [ 0.00010644]
[*]============================
num batch: 97
Epoch: 0025 cost= 0.000000012
[*]----------------------------
label value: 0.0 estimated value: [ -3.02121043e-05]
label value: 0.0 estimated value: [-0.00013707]
label value: 0.0 estimated value: [ 0.00011584]
[*]============================
num batch: 97
Epoch: 0026 cost= 0.000000011
[*]----------------------------
label value: 0.0 estimated value: [ -2.85692513e-05]
label value: 0.0 estimated value: [-0.0001288]
label value: 0.0 estimated value: [ 0.00011111]
[*]============================
num batch: 97
Epoch: 0027 cost= 0.000000010
[*]----------------------------
label value: 0.0 estimated value: [ -2.67848372e-05]
label value: 0.0 estimated value: [-0.00012073]
label value: 0.0 estimated value: [ 0.00010429]
[*]============================
num batch: 97
Epoch: 0028 cost= 0.000000009
[*]----------------------------
label value: 0.0 estimated value: [ -2.50190496e-05]
label value: 0.0 estimated value: [-0.00011299]
label value: 0.0 estimated value: [  9.78857279e-05]
[*]============================
num batch: 97
Epoch: 0029 cost= 0.000000008
[*]----------------------------
label value: 0.0 estimated value: [ -2.33948231e-05]
label value: 0.0 estimated value: [-0.00010575]
label value: 0.0 estimated value: [  9.17762518e-05]
[*]============================
num batch: 97
Epoch: 0030 cost= 0.000000008
[*]----------------------------
label value: 0.0 estimated value: [ -2.17221677e-05]
label value: 0.0 estimated value: [ -9.87946987e-05]
label value: 0.0 estimated value: [  8.61771405e-05]
[*]============================
num batch: 97
Epoch: 0031 cost= 0.000000007
[*]----------------------------
label value: 0.0 estimated value: [ -1.99079514e-05]
label value: 0.0 estimated value: [ -9.19662416e-05]
label value: 0.0 estimated value: [  8.10883939e-05]
[*]============================
num batch: 97
Epoch: 0032 cost= 0.000000006
[*]----------------------------
label value: 0.0 estimated value: [ -1.79074705e-05]
label value: 0.0 estimated value: [ -8.52420926e-05]
label value: 0.0 estimated value: [  7.65696168e-05]
[*]============================
num batch: 97
Epoch: 0033 cost= 0.000000006
[*]----------------------------
label value: 0.0 estimated value: [ -1.56797469e-05]
label value: 0.0 estimated value: [ -7.85738230e-05]
label value: 0.0 estimated value: [  7.26580620e-05]
[*]============================
num batch: 97
Epoch: 0034 cost= 0.000000005
[*]----------------------------
label value: 0.0 estimated value: [ -1.34594738e-05]
label value: 0.0 estimated value: [ -7.22110271e-05]
label value: 0.0 estimated value: [  6.91190362e-05]
[*]============================
num batch: 97
Epoch: 0035 cost= 0.000000005
[*]----------------------------
label value: 0.0 estimated value: [ -1.12429261e-05]
label value: 0.0 estimated value: [ -6.60941005e-05]
label value: 0.0 estimated value: [  6.59227371e-05]
[*]============================
num batch: 97
Epoch: 0036 cost= 0.000000004
[*]----------------------------
label value: 0.0 estimated value: [ -9.14186239e-06]
label value: 0.0 estimated value: [ -6.03497028e-05]
label value: 0.0 estimated value: [  6.29462302e-05]
[*]============================
num batch: 97
Epoch: 0037 cost= 0.000000004
[*]----------------------------
label value: 0.0 estimated value: [ -7.20098615e-06]
label value: 0.0 estimated value: [ -5.49964607e-05]
label value: 0.0 estimated value: [  6.01261854e-05]
[*]============================
num batch: 97
Epoch: 0038 cost= 0.000000004
[*]----------------------------
label value: 0.0 estimated value: [ -5.56558371e-06]
label value: 0.0 estimated value: [ -5.01796603e-05]
label value: 0.0 estimated value: [  5.73322177e-05]
[*]============================
num batch: 97
Epoch: 0039 cost= 0.000000003
[*]----------------------------
label value: 0.0 estimated value: [ -4.33251262e-06]
label value: 0.0 estimated value: [ -4.59924340e-05]
label value: 0.0 estimated value: [  5.44488430e-05]
[*]============================
num batch: 97
Epoch: 0040 cost= 0.000000003
[*]----------------------------
label value: 0.0 estimated value: [ -3.38628888e-06]
label value: 0.0 estimated value: [ -4.23081219e-05]
label value: 0.0 estimated value: [  5.15766442e-05]
[*]============================
num batch: 97
Epoch: 0041 cost= 0.000000003
[*]----------------------------
label value: 0.0 estimated value: [ -2.80141830e-06]
label value: 0.0 estimated value: [ -3.91751528e-05]
label value: 0.0 estimated value: [  4.86150384e-05]
[*]============================
num batch: 97
Epoch: 0042 cost= 0.000000003
[*]----------------------------
label value: 0.0 estimated value: [ -2.55554914e-06]
label value: 0.0 estimated value: [ -3.65562737e-05]
label value: 0.0 estimated value: [  4.55565751e-05]
[*]============================
num batch: 97
Epoch: 0043 cost= 0.000000002
[*]----------------------------
label value: 0.0 estimated value: [ -2.47731805e-06]
label value: 0.0 estimated value: [ -3.42875719e-05]
label value: 0.0 estimated value: [  4.25614417e-05]
[*]============================
num batch: 97
Epoch: 0044 cost= 0.000000002
[*]----------------------------
label value: 0.0 estimated value: [ -2.66358256e-06]
label value: 0.0 estimated value: [ -3.24323773e-05]
label value: 0.0 estimated value: [  3.95327806e-05]
[*]============================
num batch: 97
Epoch: 0045 cost= 0.000000002
[*]----------------------------
label value: 0.0 estimated value: [ -3.15159559e-06]
label value: 0.0 estimated value: [ -3.10689211e-05]
label value: 0.0 estimated value: [  3.64482403e-05]
[*]============================
num batch: 97
Epoch: 0046 cost= 0.000000002
[*]----------------------------
label value: 0.0 estimated value: [ -3.83704901e-06]
label value: 0.0 estimated value: [ -3.00332904e-05]
label value: 0.0 estimated value: [  3.33562493e-05]
[*]============================
num batch: 97
Epoch: 0047 cost= 0.000000002
[*]----------------------------
label value: 0.0 estimated value: [ -4.63053584e-06]
label value: 0.0 estimated value: [ -2.92174518e-05]
label value: 0.0 estimated value: [  3.03126872e-05]
[*]============================
num batch: 97
Epoch: 0048 cost= 0.000000001
[*]----------------------------
label value: 0.0 estimated value: [ -5.52088022e-06]
label value: 0.0 estimated value: [ -2.86288559e-05]
label value: 0.0 estimated value: [  2.73361802e-05]
[*]============================
num batch: 97
Epoch: 0049 cost= 0.000000001
[*]----------------------------
label value: 0.0 estimated value: [ -6.49690628e-06]
label value: 0.0 estimated value: [ -2.82227993e-05]
label value: 0.0 estimated value: [  2.44267285e-05]
[*]============================
num batch: 97
Epoch: 0050 cost= 0.000000001
[*]----------------------------
label value: 0.0 estimated value: [ -7.39470124e-06]
label value: 0.0 estimated value: [ -2.78316438e-05]
label value: 0.0 estimated value: [  2.17147171e-05]
[*]============================
Optimization Finished!
MSE: 1.16811e-09

In [ ]: