In [1]:
cd /root/git/SolarDataRESTfulAPI/


/root/git/SolarDataRESTfulAPI

In [2]:
#!/usr/bin/python
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>

# <codecell>

#cd git/SolarDataRESTfulAPI/

# <codecell>

import json
import pandas as pd
import InfluxDBInterface
import time
reload(InfluxDBInterface)
from ElasticsearchInterface import ESinterface
import sys
import mosquitto
import os
import argparse


def EpocToDate(timestamp):
    return time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(timestamp))
        
def SecToHMS(sec):
    sec = int(sec)
    hour = sec / 3600
    minutes = (sec - (hour * 3600))/60
    secs = sec % 60
    return "%i h %i min %i s" %(hour,minutes,secs)

def RemoveResets(series):
    
    FirstValue = series.iloc[0]
    change =  series.diff().clip(0)
    change.iloc[0] = FirstValue
    return change.cumsum()

def CalculateProduction(Site,LogDB,ProductionDB,Recalculate=False):

    #Create property lists
    EnergyProp = LogDB.GetPropertiesPartiallyMatchingAbutNotB(Site,"POWc","Tot")
    PowerProp = LogDB.GetPropertiesPartiallyMatchingAbutNotB(Site,"Pac","Tot")
    
    PreviousLastValidValue = 0
    PreviousLastValidValueTime = 0
    
    #Determine where to resume.
    if Recalculate == False:
        (PreviousLastValidValueTime,PreviousLastValidValue) = ProductionDB.GetLastValue(Site,"Energy")
        TimestampP = ProductionDB.GetLastTimestamp(Site,"Power")

        if (PreviousLastValidValueTime != None and TimestampP != None):
        
            #The start from where we have both power and energy values. 
            if TimestampP < PreviousLastValidValueTime:
                PreviousLastValidValueTime = TimestampP
                
            PreviousLastValidValueTime = PreviousLastValidValueTime / 1000
                
            print "\tResuming calculation from: %s" % EpocToDate(PreviousLastValidValueTime)
                
            #Get last data. 
            dfLog = LogDB.GetDataAfterTime(Site,EnergyProp + PowerProp,PreviousLastValidValueTime,1000)
        else:    
            dfLog = LogDB.GetDataAfterTime(Site,EnergyProp + PowerProp,None,1000)
            print "No previous data starting from first log data."
    else:  
        #Get a log data chunck
        dfLog = LogDB.GetDataAfterTime(Site,EnergyProp + PowerProp,None,1000)
        
    
    while (dfLog.shape[0] > 1):
    
        #Create a production frame.
        dfProduction = pd.DataFrame(columns = ["Power","Energy"])
        
        
        #Calculate power
        dfProduction["Power"] = dfLog[PowerProp].sum(axis=1)
        
        #Calculate energy
        dfPOWc = dfLog[EnergyProp]
        dfProduction["Energy"] = dfPOWc.apply(RemoveResets).sum(axis=1)
        
        #Add offset from previus iteration.
        
        #Check if we have overlap. Is the last time the same as the smallest countervalue in the current array.
        FirstValidValueTime = dfProduction["Energy"].idxmin()
        
        #First time ever... or just NaN values in data. 
        if PreviousLastValidValueTime == None or pd.isnull(FirstValidValueTime):
            offset = 0
        #Normal overlap
        else:   
            offset = PreviousLastValidValue - dfProduction["Energy"][FirstValidValueTime]
        
        dfProduction["Energy"] += offset
        
        #Update database
        ProductionDB.Replace(Site,dfProduction)
        
        #Keep track of counter max.
        MaxEnergyTime = dfProduction["Energy"].idxmax()
        
        if not pd.isnull(MaxEnergyTime):
            PreviousLastValidValue = dfProduction["Energy"][MaxEnergyTime]
            PreviousLastValidValueTime = MaxEnergyTime
        
        dfLog = LogDB.GetNextNRows(dfLog,1000)
        
    return dfLog.index[-1]

# <codecell>

In [3]:
reload(influxdb)


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-3-ac07d0f7288e> in <module>()
----> 1 reload(influxdb)

NameError: name 'influxdb' is not defined

In [7]:
if True:

    #Parse arguments
    #parser = argparse.ArgumentParser(add_help=False)
    #parser.add_argument('-h', dest='host', default="localhost", help='MQTT host send results to')
    #parser.add_argument('-t', dest='topic', default="", help='MQTT topic to process')
    #parser.add_argument('-m', dest='message', default="", help='MQTT message to process')

    #args = parser.parse_args()

    #Get location of script
    path = os.path.abspath(os.path.dirname(sys.argv[0]))


    #Set up MQTT
    ip = "localhost"
    port = 1883
    user = "driver"
    password = "1234"
    prefix = "SolarProductionProducer"
    
    mqtt=mosquitto.Mosquitto("ProductionProducer")
    mqtt.prefix = prefix
    mqtt.ip = ip
    mqtt.port = port
    #mqtt.clientId = clientId
    mqtt.user = user
    mqtt.password = password
                
    if mqtt != None:
        mqtt.username_pw_set(user,password)
    
    #mqtt.will_set( topic =  "system/" + prefix, payload="Idle", qos=1, retain=True)
    mqtt.connect(ip,keepalive=10)
    mqtt.publish(topic = "system/"+ prefix, payload="Updating", qos=1, retain=True)


    #Init resources 
    DataLink = InfluxDBInterface.InfluxDBInterface("/root/MQTT-Stage/topics/solardata/Operator/SLB/lastupdate" + "/" + "influxInterfaceCredentials2.json")
    LogDB = DataLink.databases[u'SolarLogdata']
    ProductionDB = DataLink.databases[u'SolarProductionSites']
    #es = ESinterface()

    #Init vars
    Sites = LogDB.ListSeries()
    now = time.time()
    
    
    
    #Loop throug all sites. 
    for Site in Sites:
        
        break
     
        print "Processing %s " % Site 
        
        sys.stdout.flush()

        until = CalculateProduction(Site,LogDB,ProductionDB,False)
        
        until = int(now - until)
        
        hour = until / 3600
        
        minutes = (until - (hour * 3600))/60
        
        secs = until % 60
        
        print "\tFinnished processing up to %i hours %i minutes and %i seconds from script start time" % (hour,minutes,secs)
        
        sys.stdout.flush()

    print "Done"

    sys.stdout.flush()

    mqtt.connect(ip,keepalive=10)
    #mqtt.publish(topic = "solardata/production/at", payload=str((TrailTime,LeadTime)), qos=1, retain=True) 
    mqtt.publish(topic = "solardata/production/lastupdate", payload=now, qos=1, retain=True)    
    mqtt.publish(topic = "system/"+ prefix, payload="Idle", qos=1, retain=True)
    time.sleep(0.5)
    
    del mqtt


Done

In [24]:
ls ~/MQTT-Stage/topics/solardata/Operator/SLB/lastupdate/influxInterfaceCredentials2.json


/root/MQTT-Stage/topics/solardata/Operator/SLB/lastupdate/influxInterfaceCredentials2.json

In [26]:
Recalculate=False
Site=Sites[0]

#Create property lists
EnergyProp = LogDB.GetPropertiesPartiallyMatchingAbutNotB(Site,"POWc","Tot")
PowerProp = LogDB.GetPropertiesPartiallyMatchingAbutNotB(Site,"Pac","Tot")

PreviousLastValidValue = 0
PreviousLastValidValueTime = 0

#Determine where to resume.
if Recalculate == False:
    (PreviousLastValidValueTime,PreviousLastValidValue) = ProductionDB.GetLastValue(Site,"Energy")
    TimestampP = ProductionDB.GetLastTimestamp(Site,"Power")

    if (PreviousLastValidValueTime != None and TimestampP != None):
    
        #The start from where we have both power and energy values. 
        if TimestampP < PreviousLastValidValueTime:
            PreviousLastValidValueTime = TimestampP
            
        PreviousLastValidValueTime = PreviousLastValidValueTime / 1000
            
        print "\tResuming calculation from: %s" % EpocToDate(PreviousLastValidValueTime)
            
        #Get last data. 
        dfLog = LogDB.GetDataAfterTime(Site,EnergyProp + PowerProp,PreviousLastValidValueTime,1000)
    else:    
        dfLog = LogDB.GetDataAfterTime(Site,EnergyProp + PowerProp,None,1000)
        print "No previous data starting from first log data."
else:  
    #Get a log data chunck
    dfLog = LogDB.GetDataAfterTime(Site,EnergyProp + PowerProp,None,1000)


	Resuming calculation from: 2014-11-30 11:20:00

In [27]:
#Create a production frame.
dfProduction = pd.DataFrame(columns = ["Power","Energy"])


#Calculate power
dfProduction["Power"] = dfLog[PowerProp].sum(axis=1)

#Calculate energy
dfPOWc = dfLog[EnergyProp]
dfProduction["Energy"] = dfPOWc.apply(RemoveResets).sum(axis=1)

#Add offset from previus iteration.

#Check if we have overlap. Is the last time the same as the smallest countervalue in the current array.
FirstValidValueTime = dfProduction["Energy"].idxmin()

#First time ever... or just NaN values in data. 
if PreviousLastValidValueTime == None or pd.isnull(FirstValidValueTime):
    offset = 0
#Normal overlap
else:   
    offset = PreviousLastValidValue - dfProduction["Energy"][FirstValidValueTime]

dfProduction["Energy"] += offset

#Update database
ProductionDB.Replace(Site,dfProduction)

#Keep track of counter max.
MaxEnergyTime = dfProduction["Energy"].idxmax()

if not pd.isnull(MaxEnergyTime):
    PreviousLastValidValue = dfProduction["Energy"][MaxEnergyTime]
    PreviousLastValidValueTime = MaxEnergyTime

dfLog = LogDB.GetNextNRows(dfLog,1000)
    
#return dfLog.index[-1]
if (dfLog.shape[0] > 1):
    print "Ended"


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-27-3286bbdb942b> in <module>()
     27 
     28 #Update database
---> 29 ProductionDB.Replace(Site,dfProduction)
     30 
     31 #Keep track of counter max.

/root/git/SolarDataRESTfulAPI/InfluxDBInterface.pyc in Replace(self, series, DataFrame, time_precision, Compressed)
    318     To = DataFrame.index[-1]
    319 
--> 320     self.ClearPeriod(series,From,To,time_precision)
    321 
    322     if Compressed:

/root/git/SolarDataRESTfulAPI/InfluxDBInterface.pyc in ClearPeriod(self, series, From, To, time_precision)
    342         return
    343 
--> 344     self.query("delete from %s where time > %i and time < %i" %(series,From*factor,To*factor) )
    345 
    346 

/usr/local/lib/python2.7/dist-packages/influxdb/client.pyc in query(self, query, time_precision, chunked)
    253             method='GET',
    254             params=params,
--> 255             status_code=200
    256             )
    257 

/usr/local/lib/python2.7/dist-packages/influxdb/client.pyc in request(self, url, method, params, data, status_code)
     91             data=data,
     92             headers=self._headers,
---> 93             verify=self._verify_ssl
     94             )
     95 

/usr/lib/python2.7/dist-packages/requests/sessions.pyc in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert)
    381             'allow_redirects': allow_redirects,
    382         }
--> 383         resp = self.send(prep, **send_kwargs)
    384 
    385         return resp

/usr/lib/python2.7/dist-packages/requests/sessions.pyc in send(self, request, **kwargs)
    484         start = datetime.utcnow()
    485         # Send the request
--> 486         r = adapter.send(request, **kwargs)
    487         # Total elapsed time of the request (approximately)
    488         r.elapsed = datetime.utcnow() - start

/usr/lib/python2.7/dist-packages/requests/adapters.pyc in send(self, request, stream, timeout, verify, cert, proxies)
    328                     decode_content=False,
    329                     retries=self.max_retries,
--> 330                     timeout=timeout
    331                 )
    332 

/usr/lib/python2.7/dist-packages/urllib3/connectionpool.pyc in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw)
    540             httplib_response = self._make_request(conn, method, url,
    541                                                   timeout=timeout,
--> 542                                                   body=body, headers=headers)
    543 
    544             # If we're going to release the connection in ``finally:``, then

/usr/lib/python2.7/dist-packages/urllib3/connectionpool.pyc in _make_request(self, conn, method, url, timeout, **httplib_request_kw)
    392         try:
    393             try: # Python 2.7+, use buffering of HTTP responses
--> 394                 httplib_response = conn.getresponse(buffering=True)
    395             except TypeError: # Python 2.6 and older
    396                 httplib_response = conn.getresponse()

/usr/lib/python2.7/httplib.pyc in getresponse(self, buffering)
   1043         response = self.response_class(*args, **kwds)
   1044 
-> 1045         response.begin()
   1046         assert response.will_close != _UNKNOWN
   1047         self.__state = _CS_IDLE

/usr/lib/python2.7/httplib.pyc in begin(self)
    407         # read until we get a non-100 response
    408         while True:
--> 409             version, status, reason = self._read_status()
    410             if status != CONTINUE:
    411                 break

/usr/lib/python2.7/httplib.pyc in _read_status(self)
    363     def _read_status(self):
    364         # Initialize with Simple-Response defaults
--> 365         line = self.fp.readline(_MAXLINE + 1)
    366         if len(line) > _MAXLINE:
    367             raise LineTooLong("header line")

/usr/lib/python2.7/socket.pyc in readline(self, size)
    474             while True:
    475                 try:
--> 476                     data = self._sock.recv(self._rbufsize)
    477                 except error, e:
    478                     if e.args[0] == EINTR:

KeyboardInterrupt: 

In [10]:
dfLog


Out[10]:
POWc001 POWc002 Pac001 Pac002
time
1417345200 1600 1400 336 250
1417345800 1700 1500 348 236
1417346400 1800 1500 336 230
1417347000 1800 1600 295 219
1417347600 1900 1600 313 216
1417348200 2000 1700 390 312
1417348800 2100 1800 404 335
1417349400 2100 1900 388 317
1417350000 2200 1900 327 235
1417350600 2200 2000 199 139
1417351200 2300 2000 197 132
1417351800 2300 2000 219 153
1417352400 2400 2100 209 148
1417353000 2400 2100 118 17
1417353600 2400 2100 52 0
1417354200 2400 2100 0 0
1417354800 2400 2100 0 0
1417355400 2400 2100 0 0
1417356000 2400 2100 0 0

In [12]:
dfProduction


Out[12]:
Power Energy
time
1417345200 586 17251400
1417345800 584 17251600
1417346400 566 17251700
1417347000 514 17251800
1417347600 529 17251900
1417348200 702 17252100
1417348800 739 17252300
1417349400 705 17252400
1417350000 562 17252500
1417350600 338 17252600
1417351200 329 17252700
1417351800 372 17252700
1417352400 357 17252900
1417353000 135 17252900
1417353600 52 17252900
1417354200 0 17252900
1417354800 0 17252900
1417355400 0 17252900
1417356000 0 17252900

In [40]:
ProductionDB.Replace(Site,dfProduction)


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-40-0f06fd9c40ac> in <module>()
----> 1 ProductionDB.Replace(Site,dfProduction)

/root/git/SolarDataRESTfulAPI/InfluxDBInterface.pyc in Replace(self, series, DataFrame, time_precision, Compressed)
    318     To = DataFrame.index[-1]
    319 
--> 320     self.ClearPeriod(series,From,To,time_precision)
    321 
    322     if Compressed:

/root/git/SolarDataRESTfulAPI/InfluxDBInterface.pyc in ClearPeriod(self, series, From, To, time_precision)
    342         return
    343 
--> 344     self.query("delete from %s where time > %i and time < %i" %(series,From*factor,To*factor) )
    345 
    346 

/usr/local/lib/python2.7/dist-packages/influxdb/client.pyc in query(self, query, time_precision, chunked)
    253             method='GET',
    254             params=params,
--> 255             status_code=200
    256             )
    257 

/usr/local/lib/python2.7/dist-packages/influxdb/client.pyc in request(self, url, method, params, data, status_code)
     91             data=data,
     92             headers=self._headers,
---> 93             verify=self._verify_ssl
     94             )
     95 

/usr/lib/python2.7/dist-packages/requests/sessions.pyc in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert)
    381             'allow_redirects': allow_redirects,
    382         }
--> 383         resp = self.send(prep, **send_kwargs)
    384 
    385         return resp

/usr/lib/python2.7/dist-packages/requests/sessions.pyc in send(self, request, **kwargs)
    484         start = datetime.utcnow()
    485         # Send the request
--> 486         r = adapter.send(request, **kwargs)
    487         # Total elapsed time of the request (approximately)
    488         r.elapsed = datetime.utcnow() - start

/usr/lib/python2.7/dist-packages/requests/adapters.pyc in send(self, request, stream, timeout, verify, cert, proxies)
    328                     decode_content=False,
    329                     retries=self.max_retries,
--> 330                     timeout=timeout
    331                 )
    332 

/usr/lib/python2.7/dist-packages/urllib3/connectionpool.pyc in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw)
    540             httplib_response = self._make_request(conn, method, url,
    541                                                   timeout=timeout,
--> 542                                                   body=body, headers=headers)
    543 
    544             # If we're going to release the connection in ``finally:``, then

/usr/lib/python2.7/dist-packages/urllib3/connectionpool.pyc in _make_request(self, conn, method, url, timeout, **httplib_request_kw)
    392         try:
    393             try: # Python 2.7+, use buffering of HTTP responses
--> 394                 httplib_response = conn.getresponse(buffering=True)
    395             except TypeError: # Python 2.6 and older
    396                 httplib_response = conn.getresponse()

/usr/lib/python2.7/httplib.pyc in getresponse(self, buffering)
   1043         response = self.response_class(*args, **kwds)
   1044 
-> 1045         response.begin()
   1046         assert response.will_close != _UNKNOWN
   1047         self.__state = _CS_IDLE

/usr/lib/python2.7/httplib.pyc in begin(self)
    407         # read until we get a non-100 response
    408         while True:
--> 409             version, status, reason = self._read_status()
    410             if status != CONTINUE:
    411                 break

/usr/lib/python2.7/httplib.pyc in _read_status(self)
    363     def _read_status(self):
    364         # Initialize with Simple-Response defaults
--> 365         line = self.fp.readline(_MAXLINE + 1)
    366         if len(line) > _MAXLINE:
    367             raise LineTooLong("header line")

/usr/lib/python2.7/socket.pyc in readline(self, size)
    474             while True:
    475                 try:
--> 476                     data = self._sock.recv(self._rbufsize)
    477                 except error, e:
    478                     if e.args[0] == EINTR:

KeyboardInterrupt: 

In [32]:
ProductionDB.get_database_list()


---------------------------------------------------------------------------
Exception                                 Traceback (most recent call last)
<ipython-input-32-43170e9e20af> in <module>()
----> 1 ProductionDB.get_database_list()

/usr/local/lib/python2.7/dist-packages/influxdb/client.pyc in get_database_list(self)
    319             url=url,
    320             method='GET',
--> 321             status_code=200
    322             )
    323 

/usr/local/lib/python2.7/dist-packages/influxdb/client.pyc in request(self, url, method, params, data, status_code)
     98         else:
     99             raise Exception(
--> 100                 "{0}: {1}".format(response.status_code, response.content))
    101 
    102     # Writing Data

Exception: 401: Invalid username/password

In [34]:
ProductionDB._password


Out[34]:
u'ryKkSSnveKVpUMROt8kqvZCGJXJveu8MkJO'

In [ ]:
ProductionDB.query("delete from %s where time > %s and time < %s" % (Sites[0],"'2014-11-01'","now()"))


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-41-7f054bede90c> in <module>()
----> 1 ProductionDB.query("delete from %s where time > %s and time < %s" % (Sites[0],"'2014-11-01'","now()"))

/usr/local/lib/python2.7/dist-packages/influxdb/client.pyc in query(self, query, time_precision, chunked)
    253             method='GET',
    254             params=params,
--> 255             status_code=200
    256             )
    257 

/usr/local/lib/python2.7/dist-packages/influxdb/client.pyc in request(self, url, method, params, data, status_code)
     91             data=data,
     92             headers=self._headers,
---> 93             verify=self._verify_ssl
     94             )
     95 

/usr/lib/python2.7/dist-packages/requests/sessions.pyc in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert)
    381             'allow_redirects': allow_redirects,
    382         }
--> 383         resp = self.send(prep, **send_kwargs)
    384 
    385         return resp

/usr/lib/python2.7/dist-packages/requests/sessions.pyc in send(self, request, **kwargs)
    484         start = datetime.utcnow()
    485         # Send the request
--> 486         r = adapter.send(request, **kwargs)
    487         # Total elapsed time of the request (approximately)
    488         r.elapsed = datetime.utcnow() - start

/usr/lib/python2.7/dist-packages/requests/adapters.pyc in send(self, request, stream, timeout, verify, cert, proxies)
    328                     decode_content=False,
    329                     retries=self.max_retries,
--> 330                     timeout=timeout
    331                 )
    332 

/usr/lib/python2.7/dist-packages/urllib3/connectionpool.pyc in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw)
    540             httplib_response = self._make_request(conn, method, url,
    541                                                   timeout=timeout,
--> 542                                                   body=body, headers=headers)
    543 
    544             # If we're going to release the connection in ``finally:``, then

/usr/lib/python2.7/dist-packages/urllib3/connectionpool.pyc in _make_request(self, conn, method, url, timeout, **httplib_request_kw)
    392         try:
    393             try: # Python 2.7+, use buffering of HTTP responses
--> 394                 httplib_response = conn.getresponse(buffering=True)
    395             except TypeError: # Python 2.6 and older
    396                 httplib_response = conn.getresponse()

/usr/lib/python2.7/httplib.pyc in getresponse(self, buffering)
   1043         response = self.response_class(*args, **kwds)
   1044 
-> 1045         response.begin()
   1046         assert response.will_close != _UNKNOWN
   1047         self.__state = _CS_IDLE

/usr/lib/python2.7/httplib.pyc in begin(self)
    407         # read until we get a non-100 response
    408         while True:
--> 409             version, status, reason = self._read_status()
    410             if status != CONTINUE:
    411                 break

/usr/lib/python2.7/httplib.pyc in _read_status(self)
    363     def _read_status(self):
    364         # Initialize with Simple-Response defaults
--> 365         line = self.fp.readline(_MAXLINE + 1)
    366         if len(line) > _MAXLINE:
    367             raise LineTooLong("header line")

/usr/lib/python2.7/socket.pyc in readline(self, size)
    474             while True:
    475                 try:
--> 476                     data = self._sock.recv(self._rbufsize)
    477                 except error, e:
    478                     if e.args[0] == EINTR:

KeyboardInterrupt: 

In [ ]:
Sites[0]

In [50]:
ProductionDB.ListSeries()


Out[50]:
[u'list_series_result']

In [70]:
try:
    ProductionDB.GetFirstValue(Sites[0])
except Exception, err:
    if err.message.find("400: Couldn't find series:") != -1:
        print 0.0
    else:
        raise err


0.0

In [59]:
err


Out[59]:
Exception("400: Couldn't find series: list_series_result")

In [61]:
err.args


Out[61]:
("400: Couldn't find series: list_series_result",)

In [62]:
err.message


Out[62]:
"400: Couldn't find series: list_series_result"

In [75]:
LogDB.ListSeries()


Out[75]:
[u'list_series_result']

In [76]:
ret = LogDB.query("list series")

In [82]:
ret[0]["points"]


Out[82]:
[[0, u'46d55815-f927-459f-a8e2-8bbcd88008ee'],
 [0, u'e93dc809-3491-11e4-8c21-0800200c9a66']]

In [84]:
for series in ret[0]["points"]:
    print series[1]


46d55815-f927-459f-a8e2-8bbcd88008ee
e93dc809-3491-11e4-8c21-0800200c9a66

In [90]:
LogDB.query("select * from \"%s\" limit 1" % '46d55815-f927-459f-a8e2-8bbcd88008ee' )


Out[90]:
[{u'columns': [u'time',
   u'sequence_number',
   u'Pdc2004',
   u'TmpM013',
   u'Udc1001',
   u'GLOc012',
   u'POWc006',
   u'Pdc1010',
   u'Pdc2007',
   u'Udc1002',
   u'Udc2009',
   u'Erro002',
   u'Pdc1009',
   u'Pdc2001',
   u'Pac007',
   u'Pdc2010',
   u'Udc2006',
   u'Udc1005',
   u'Pac011',
   u'Erro005',
   u'Pdc2006',
   u'Udc1008',
   u'Udc2001',
   u'Erro009',
   u'POWc009',
   u'Udc1007',
   u'Udc2011',
   u'Erro004',
   u'Erro006',
   u'Udc2007',
   u'Pdc1011',
   u'Udc1009',
   u'Pdc2003',
   u'Pdc2005',
   u'TmpM012',
   u'Udc1010',
   u'Udc1011',
   u'Pac003',
   u'Pac004',
   u'POWc004',
   u'GLOc013',
   u'POWc007',
   u'POWc008',
   u'POWc011',
   u'Pdc1006',
   u'Pac006',
   u'Pac010',
   u'Erro003',
   u'Pdc2008',
   u'Udc2005',
   u'POWc002',
   u'Pdc1001',
   u'Udc1006',
   u'Udc2002',
   u'PacTot',
   u'Erro007',
   u'GLOB012',
   u'Udc2003',
   u'Pac005',
   u'Pdc2011',
   u'Udc1003',
   u'Pac009',
   u'Pdc1008',
   u'Udc2004',
   u'POWc003',
   u'Pdc1003',
   u'Pdc1005',
   u'Pdc1007',
   u'Udc1004',
   u'Pac008',
   u'Erro008',
   u'Erro011',
   u'Udc2008',
   u'Udc2010',
   u'Pac002',
   u'Erro001',
   u'Pdc2009',
   u'POWc001',
   u'Pdc2002',
   u'Pac001',
   u'Erro010',
   u'GLOB013',
   u'Pdc1004',
   u'POWc005',
   u'POWc010',
   u'Pdc1002'],
  u'name': u'46d55815-f927-459f-a8e2-8bbcd88008ee',
  u'points': [[1392730800,
    759480001,
    403,
    5,
    658,
    1761,
    24983,
    1317,
    229,
    654,
    335,
    0,
    1349,
    229,
    1469,
    216,
    272,
    512,
    1253,
    0,
    193,
    654,
    334,
    0,
    27158,
    654,
    338,
    0,
    0,
    339,
    1056,
    647,
    503,
    283,
    5,
    659,
    523,
    1507,
    1509,
    18414,
    2059,
    27420,
    27101,
    16384,
    1332,
    1481,
    1469,
    0,
    234,
    422,
    17147,
    1320,
    641,
    342,
    16023,
    0,
    78,
    715,
    1379,
    221,
    523,
    1534,
    1294,
    548,
    27619,
    1114,
    1125,
    1298,
    515,
    1475,
    0,
    0,
    333,
    308,
    1469,
    0,
    236,
    28223,
    229,
    1478,
    0,
    73,
    1140,
    15706,
    26984,
    1314]]}]

In [88]:
"select * from '%s' limit 1" % '46d55815-f927-459f-a8e2-8bbcd88008ee'


Out[88]:
"select * from '46d55815-f927-459f-a8e2-8bbcd88008ee' limit 1"

In [10]:
LogDB.GetLastTimeStamp("46d55815-f927-459f-a8e2-8bbcd88008ee")


---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-10-21160decfff9> in <module>()
----> 1 LogDB.GetLastTimeStamp("46d55815-f927-459f-a8e2-8bbcd88008ee")

AttributeError: 'InfluxDBlayer' object has no attribute 'GetLastTimeStamp'

In [9]:
LogDB.GetLastTimestamp("46d55815-f927-459f-a8e2-8bbcd88008ee")


Out[9]:
1417355400000

In [11]:
from SLB_DataImporter import InfluxFeedLTSInterface


test

In [12]:
InfluxFeedLTSInterface.GetLastTimeStamp(LogDB,"46d55815-f927-459f-a8e2-8bbcd88008ee")


---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-12-aaea8ab93318> in <module>()
----> 1 InfluxFeedLTSInterface.GetLastTimeStamp(LogDB,"46d55815-f927-459f-a8e2-8bbcd88008ee")

TypeError: unbound method GetLastTimeStamp() must be called with InfluxFeedLTSInterface instance as first argument (got InfluxDBlayer instance instead)

In [13]:
from SLB_DataImporter import ParseSLBData

In [30]:
import time 
ParseSLBData("f07t",time.time()-3600*24*11.5,time.time()-3600*24*10.5)


Out[30]:
Time Pac001 Pac002 GLOB003 GLOc003 POWc001 POWc002 TmpM003 NAN Unnamed: 9
0 1.416393e+09 1019 1125 51 208 3900 4200 6 NaN NaN
1 1.416394e+09 1518 1653 72 221 4200 4400 6 NaN NaN
2 1.416394e+09 1963 2113 93 238 4500 4800 8 NaN NaN
3 1.416395e+09 1769 1938 85 253 4800 5100 8 NaN NaN
4 1.416395e+09 2336 2488 111 272 5200 5500 8 NaN NaN
5 1.416396e+09 2790 2916 129 295 5700 6000 9 NaN NaN
6 1.416397e+09 4090 4265 188 328 6300 6700 10 NaN NaN
7 1.416397e+09 3009 3193 140 353 6800 7300 10 NaN NaN
8 1.416398e+09 2230 2339 104 371 7200 7700 9 NaN NaN
9 1.416398e+09 1638 1801 78 385 7500 8000 8 NaN NaN
10 1.416399e+09 1437 1567 68 397 7700 8200 7 NaN NaN
11 1.4164e+09 1271 1419 62 408 7900 8500 6 NaN NaN
12 1.4164e+09 1194 1314 58 418 8100 8700 6 NaN NaN
13 1.416401e+09 1143 1295 56 428 8300 8900 6 NaN NaN
14 1.416401e+09 1226 1386 59 439 8500 9100 6 NaN NaN
15 1.416402e+09 1262 1409 61 450 8700 9400 6 NaN NaN
16 1.416403e+09 1306 1452 63 461 9000 9600 6 NaN NaN
17 1.416403e+09 748 854 39 468 9100 9700 5 NaN NaN
18 1.416404e+09 513 602 27 473 9200 9800 5 NaN NaN
19 1.416404e+09 297 461 20 476 9200 9900 4 NaN NaN
20 1.416405e+09 229 399 20 480 9300 10000 4 NaN NaN
21 1.416406e+09 2 97 10 481 9300 10000 4 NaN NaN
22 1.416406e+09 0 14 5 482 9300 10000 4 NaN NaN
23 1.416407e+09 0 0 1 483 9300 10000 3 NaN NaN
24 1.416407e+09 0 0 0 483 9300 10000 3 NaN NaN
25 1.416408e+09 0 0 0 483 9300 10000 3 NaN NaN
26 1.416409e+09 0 0 0 483 9300 10000 3 NaN NaN
27 1.416409e+09 0 0 0 NaN NaN NaN 0 NaN NaN
28 1.41641e+09 0 0 0 NaN NaN NaN 0 NaN NaN
29 1.41641e+09 0 0 0 NaN NaN NaN 0 NaN NaN
... ... ... ... ... ... ... ... ... ... ...
115 1.416462e+09 0 0 0 NaN NaN NaN 0 NaN NaN
116 1.416463e+09 0 0 0 NaN NaN NaN 0 NaN NaN
117 1.416463e+09 0 0 0 NaN NaN NaN 0 NaN NaN
118 1.416464e+09 0 0 0 NaN NaN NaN 0 NaN NaN
119 1.416464e+09 0 0 0 NaN NaN NaN 0 NaN NaN
120 1.416465e+09 0 0 0 NaN NaN NaN 0 NaN NaN
121 1.416466e+09 0 0 0 NaN NaN NaN 0 NaN NaN
122 1.416466e+09 0 0 0 NaN NaN NaN 0 NaN NaN
123 1.416467e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN
124 1.416467e+09 0 0 0 0 0 0 5 NaN NaN
125 1.416468e+09 0 0 0 0 0 0 6 NaN NaN
126 1.416469e+09 0 0 0 0 0 0 7 NaN NaN
127 1.416469e+09 0 0 0 0 0 0 7 NaN NaN
128 1.41647e+09 0 0 0 0 0 0 7 NaN NaN
129 1.41647e+09 0 0 1 0 0 0 7 NaN NaN
130 1.416471e+09 0 0 1 0 0 0 7 NaN NaN
131 1.416472e+09 20 31 5 1 0 0 7 NaN NaN
132 1.416472e+09 76 93 9 3 0 0 7 NaN NaN
133 1.416473e+09 11 12 5 3 0 0 7 NaN NaN
134 1.416473e+09 65 76 9 5 0 0 6 NaN NaN
135 1.416474e+09 105 107 10 7 0 0 6 NaN NaN
136 1.416475e+09 65 61 9 8 0 0 6 NaN NaN
137 1.416475e+09 112 120 11 10 0 0 6 NaN NaN
138 1.416476e+09 27 24 6 11 0 0 7 NaN NaN
139 1.416476e+09 7 13 5 12 0 0 7 NaN NaN
140 1.416477e+09 10 15 6 13 0 0 7 NaN NaN
141 1.416478e+09 1 1 5 14 0 0 7 NaN NaN
142 1.416478e+09 2 8 5 15 0 0 7 NaN NaN
143 1.416479e+09 104 145 11 17 100 100 7 NaN NaN
144 1.416479e+09 158 194 13 19 100 100 7 NaN NaN

145 rows × 10 columns


In [ ]: