In [1]:
cd git/SolarDataRESTfulAPI/


/root/git/SolarDataRESTfulAPI

In [23]:
data = debugstr
if type(data) == str:
    if data.find(",") != -1:
        data = float(data.replace(",","."))


 27.7

In [134]:
import requests
import pandas
import time
import json
from influxdb import InfluxDBClient
import numpy
import mosquitto 
from IPython.display import clear_output
import sys
from ElasticsearchInterface import ESinterface

debugstr = None


#Change to elasticsearch     
def LoadSiteIds(file="SiteIDs.json"):
    fp = open(file,"r")
    dic = json.load(fp)
    fp.close()
    return dic

def LoadSLBSiteIds(elasticsearch):
    #Elastic search search SLB operator
    sites = elasticsearch.GetHitsMatchingPropDict("solar-sites-index","meta-data",{"data_collection_operator":"SLB"})
    
    #for each hit read operator id and system id into dict. 
    ret = {}

    for site in sites:
        #print type(site)
        OperatorID = sites[site]["Operator_ID"] +"t"
        OurID = site
        ret[OperatorID] = OurID 
    
    #Return dict 
    return ret
    
def ParseSLBData(slb_id="h00t",start=time.time()-(24*60*60),stop=time.time()):
  
  starttime = time.strftime("%y%m%d%H%M",time.localtime(start))
  stoptime = time.strftime("%y%m%d%H%M",time.localtime(stop))
  url = "http://slb.nu/soldata/index.php?KEY=%s&start=%s&stop=%s" %(slb_id,starttime,stoptime)

  df = pandas.read_csv(url,sep = ";",parse_dates=[[0, 1]],skiprows=9, header = None ,infer_datetime_format = True,na_values = ["     ","    ","  "," ",""])
  cl = pandas.read_csv(url,sep = ";", header = 7,error_bad_lines= False,na_values = [""],nrows=1)

  #Align keys to data and rename time col. 
  cols = cl.keys()
  cols = cols[2:]
  col2 = cols.insert(0,"Time")
  col2 = col2.insert(-1,"NAN")

    
  #Remove SLB station id from key.
  NewCols = []
    
  lkey = len(slb_id)

  for datakey in col2:
        
    newkey = datakey
    
    #Remove SLB id from key
    if newkey.find(slb_id) != -1:
        newkey = newkey[lkey+1:]
    
        #Remove leading 0
        if newkey[0] == "0":
            newkey = newkey[1:]

    
    NewCols.append(newkey)
      
  #Set data keys as column descriptors
  df.columns = NewCols
  
  #Delete trailing columns with junk. 
  #for key in df.keys()[-5:df.shape[1]-1]:
  #    if key.find(slb_id) == -1:
  #        del df[key]

  #Reformat timestamps
  droplist = []
        
  for i in range(0,df.shape[0]):
    try:
      #print "*" + df["Time"][i]
      timestamp = time.mktime(time.strptime(df["Time"][i],"%y-%m-%d %H:%M"))
      df["Time"][i] = timestamp
    except:
      #print "*" + df["Time"][i]
      droplist.append(df.index[i]) 
      
  df = df.drop(droplist)
        
  return df
  

class InfluxFeedLTSInterface(InfluxDBClient):
  def __init__(self,config_file="influx2.json"):

    #Load database credentials
    fp = open(config_file,"r")
    self.config = json.load(fp)
    fp.close()
    
    #Connect
    InfluxDBClient.__init__(self,self.config["host"], self.config["port"], self.config["user"], self.config["password"], self.config["database"])

  def GetLastTimeStamp(self,FluxId):

    result = self.query('select time from \"%s\" order desc limit 1;' % FluxId, time_precision='m')

    try:
      return float(result[0]["points"][0][0])/1000.0
    except:
      return 0.0

  def SendToInfluxDB(self,df,FeedId):
    #Series name
    #series = FeedId + "/raw_data" 
    
    rows = 0

    #Save each row
    for i in range(0,df.shape[0]):
      timestamp = df.irow(i)[0]
      column = ["time"]
      data = [int(timestamp*1000)]
      
      
      #Iterate each value and remove NANs and fix floats.
      for j in range(1,df.shape[1]):
        value = df.iloc[i,j]
        
        #Float
        if type(value) == str:
            if value.find(",") != -1:
                value = float(value.replace(",","."))
        #Nan
        elif numpy.isnan(value):
            continue
        #Add key
        column.append(df.keys()[j])
        data.append(value)

      #If there where only nan on this row continue to next row. 
      if len(column) == 1:
        continue
          
      fdata = [{
          "points": [data],
          "name": FeedId,
          "columns": column
          }]

      self.write_points_with_precision(fdata,"m")
      
      rows += 1
        
    return rows


def Update():

    #Set up MQTT
    ip = "localhost"
    port = 1883
    user = "driver"
    password = "1234"
    prefix = "SLBimporter"
    
    mqtt=mosquitto.Mosquitto("SLB importer")
    mqtt.prefix = prefix
    mqtt.ip = ip
    mqtt.port = port
    #mqtt.clientId = clientId
    mqtt.user = user
    mqtt.password = password
                
    if mqtt != None:
        mqtt.username_pw_set(user,password)
    
    #mqtt.will_set( topic =  "system/" + prefix, payload="Idle", qos=1, retain=True)
    mqtt.connect(ip,keepalive=10)
    mqtt.publish(topic = "system/"+ prefix, payload="Updating", qos=1, retain=True)
    
    print "Starting update..."
    
    time.sleep(0.5)

    #Load sites.
    
    #ParseCVS("testdata/h00t_1310160720_1406030410.csv")
    SiteIDs = LoadSLBSiteIds(es)
    #SiteIDs = LoadSiteIds("/root/git/SolarDataRESTfulAPI/SiteIDs.json")

    #print SiteIDs

    Feeds = InfluxFeedLTSInterface()
    
    #Get all data until now + 1h
    StopTime = time.time() + 3600
    
    sum_rows = 0
    
    for Site in SiteIDs:
        FeedId = "%s" % SiteIDs[Site]
        StartTime = Feeds.GetLastTimeStamp(FeedId)
        
        if StartTime == 0:
            print "No previous records in: " + FeedId
            print "\tStarting from Okt 2013"
            StartTime = time.mktime(time.strptime("2013-10-01","%Y-%m-%d"))
        else:
            print "Last record in stream: " + FeedId
            print "\tat: " + time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(StartTime))
            
        #time.sleep(0.5)
        sys.stdout.flush()
        
        #Start a tiny bit after the last value.
        Current = StartTime + 0.5        
        PeriodLen = 60*60*24*7
        
        
            
        
        LeadTime = 0
        TrailTime = 99999999999999999
        
        while Current < StopTime:
            
            #Dont request data in the future.
            if (Current + PeriodLen) > StopTime:
                PeriodLen = StopTime - Current
                
            #But keep period to over 10 min atleast 
            if PeriodLen < 600:
                PeriodLen = 600
            
            print "\tReading SLB data from: " + Site 
            print "\tFrom: " + time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(Current))
            print "\tTo:   " + time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(Current + PeriodLen))
            Data = ParseSLBData(Site,Current,Current + PeriodLen)
            
            #Remove duplicate
            if Data["Time"][0] == StartTime:
              Data = Data.drop(Data.index[0])
                
            
            
            Current += PeriodLen
            print "Sending data to influx as: " + FeedId
            
            r = Feeds.SendToInfluxDB(Data,FeedId)
            print "%i Rows written" % r
            
            sum_rows += r
            
            AtTime = Feeds.GetLastTimeStamp(FeedId)
            
            if r > 0:
                mqtt.connect(ip,keepalive=10)
                mqtt.publish(topic = "solardata/sites/"+ FeedId + "/at", payload=AtTime, qos=1, retain=True)
            
            if AtTime > LeadTime:
                LeadTime = StopTime
            
            if AtTime < TrailTime:
                TrailTime = StopTime
        
    mqtt.connect(ip,keepalive=10)

    #Update operator data if anything was recived. 
    if sum_rows > 0:
        mqtt.publish(topic = "solardata/Operator/SLB/at", payload=str((TrailTime,LeadTime)), qos=1, retain=True) 
        mqtt.publish(topic = "solardata/Operator/SLB/lastupdate", payload=StopTime, qos=1, retain=True)    
        
    mqtt.publish(topic = "solardata/Operator/SLB/lastrun", payload=StopTime, qos=1, retain=True)
    mqtt.publish(topic = "system/"+ prefix, payload="Idle", qos=1, retain=True)
    time.sleep(0.5)
    
    del mqtt
    
    print "Finnished update!"
    sys.stdout.flush()
    
    return (TrailTime,LeadTime)

In [135]:
if True: # __name__ == "__main__":
    
    es = ESinterface()

    while True:
        Now = time.time()
        
        #try:
        (TrailTime,LeadTime) = Update()
        if False: #except Exception,e: 
            print str(e)
            print "Sleeping 1 min."
            sys.stdout.flush()
            time.sleep(60)
            print "Resuming"
            sys.stdout.flush()
            continue
            
        #Next data from SLB is expected in 10 minutes. No need to do anything before that. 
        NextData = TrailTime + 600
        TimeToNext = NextData - NextData
        
        if TimeToNext > 0:
            print "Sleeping %i seconds until next data is due to arrive" % int(TimeToNext)
            sys.stdout.flush()
            time.sleep(TimeToNext)
        else:
            print "Sleeping 2 min to see if new data has arrived."
            sys.stdout.flush()
            time.sleep(120)
            
        clear_output()


Starting update...
Got 25 Hits:
Last record in stream: e93dc809-3491-11e4-8c21-0800200c9a66
	at: 2014-11-30 11:40:00
	Reading SLB data from: b18t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: e93dc809-3491-11e4-8c21-0800200c9a66
0 Rows written
Last record in stream: 46d55815-f927-459f-a8e2-8bbcd88008ee
	at: 2014-11-14 08:20:00
	Reading SLB data from: h00t
	From: 2014-11-14 08:20:00
	To:   2014-11-21 08:20:00
Sending data to influx as: 46d55815-f927-459f-a8e2-8bbcd88008ee
1008 Rows written
	Reading SLB data from: h00t
	From: 2014-11-21 08:20:00
	To:   2014-11-28 08:20:00
Sending data to influx as: 46d55815-f927-459f-a8e2-8bbcd88008ee
1009 Rows written
	Reading SLB data from: h00t
	From: 2014-11-28 08:20:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 46d55815-f927-459f-a8e2-8bbcd88008ee
305 Rows written
Last record in stream: 4cf6c743-8d0b-45f0-aa72-8a0c10315cf5
	at: 2014-11-30 11:30:00
	Reading SLB data from: f09t
	From: 2014-11-30 11:30:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 4cf6c743-8d0b-45f0-aa72-8a0c10315cf5
0 Rows written
Last record in stream: 916b6e8e-1da8-11e4-a510-f23c9173ce4a
	at: 2014-11-30 11:40:00
	Reading SLB data from: b12t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 916b6e8e-1da8-11e4-a510-f23c9173ce4a
0 Rows written
Last record in stream: e8a13982-f651-11e3-a510-f23c9173ce4a
	at: 2014-11-30 11:30:00
	Reading SLB data from: f11t
	From: 2014-11-30 11:30:00
	To:   2014-11-30 12:55:47
Sending data to influx as: e8a13982-f651-11e3-a510-f23c9173ce4a
0 Rows written
Last record in stream: 71b0e5ec-1da8-11e4-a510-f23c9173ce4a
	at: 2014-11-30 11:40:00
	Reading SLB data from: b08t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 71b0e5ec-1da8-11e4-a510-f23c9173ce4a
1 Rows written
Last record in stream: 4a39b124-f594-11e3-a510-f23c9173ce4a
	at: 2014-11-30 11:40:00
	Reading SLB data from: b02t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 4a39b124-f594-11e3-a510-f23c9173ce4a
0 Rows written
Last record in stream: beeb4730-3491-11e4-8c21-0800200c9a66
	at: 2014-11-30 11:40:00
	Reading SLB data from: b14t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: beeb4730-3491-11e4-8c21-0800200c9a66
1 Rows written
Last record in stream: 7f3c36b0-44a6-11e4-916c-0800200c9a66
	at: 2014-11-30 11:40:00
	Reading SLB data from: b16t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 7f3c36b0-44a6-11e4-916c-0800200c9a66
1 Rows written
Last record in stream: 2a31fb24-347b-4924-ab89-5c434771a2ae
	at: 2014-11-30 11:40:00
	Reading SLB data from: b06t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 2a31fb24-347b-4924-ab89-5c434771a2ae
0 Rows written
Last record in stream: 845e5e54-1da8-11e4-a510-f23c9173ce4a
	at: 2014-11-30 11:40:00
	Reading SLB data from: b10t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 845e5e54-1da8-11e4-a510-f23c9173ce4a
1 Rows written
Last record in stream: 709e47a1-ca88-4c22-890f-2407e9cb131a
	at: 2014-11-30 11:30:00
	Reading SLB data from: f06t
	From: 2014-11-30 11:30:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 709e47a1-ca88-4c22-890f-2407e9cb131a
0 Rows written
Last record in stream: f22857f0-3491-11e4-8c21-0800200c9a66
	at: 2014-11-30 11:40:00
	Reading SLB data from: b19t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: f22857f0-3491-11e4-8c21-0800200c9a66
1 Rows written
Last record in stream: 79ea7d0e-1da8-11e4-a510-f23c9173ce4a
	at: 2014-11-30 11:40:00
	Reading SLB data from: b09t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 79ea7d0e-1da8-11e4-a510-f23c9173ce4a
1 Rows written
Last record in stream: b0dd71a8-efe8-41eb-bab1-633ebaaa778c
	at: 2014-11-30 11:20:00
	Reading SLB data from: f07t
	From: 2014-11-30 11:20:00
	To:   2014-11-30 12:55:47
Sending data to influx as: b0dd71a8-efe8-41eb-bab1-633ebaaa778c
3 Rows written
Last record in stream: 802afb51-c5eb-4230-9b22-9a77ef7260b3
	at: 2014-11-30 11:40:00
	Reading SLB data from: b05t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 802afb51-c5eb-4230-9b22-9a77ef7260b3
0 Rows written
Last record in stream: c6261166-f651-11e3-a510-f23c9173ce4a
	at: 2014-11-30 11:20:00
	Reading SLB data from: f10t
	From: 2014-11-30 11:20:00
	To:   2014-11-30 12:55:47
Sending data to influx as: c6261166-f651-11e3-a510-f23c9173ce4a
3 Rows written
Last record in stream: 7445f840-44a6-11e4-916c-0800200c9a66
	at: 2014-11-30 11:30:00
	Reading SLB data from: b15t
	From: 2014-11-30 11:30:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 7445f840-44a6-11e4-916c-0800200c9a66
2 Rows written
Last record in stream: 6f075b69-b823-4e6b-8021-dd751cc79eea
	at: 2014-11-30 11:40:00
	Reading SLB data from: b03t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 6f075b69-b823-4e6b-8021-dd751cc79eea
0 Rows written
Last record in stream: 32383bde-7648-4abe-9dac-44701eabd72f
	at: 2014-11-30 11:30:00
	Reading SLB data from: f04t
	From: 2014-11-30 11:30:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 32383bde-7648-4abe-9dac-44701eabd72f
0 Rows written
Last record in stream: d9c86a10-3491-11e4-8c21-0800200c9a66
	at: 2014-11-30 11:40:00
	Reading SLB data from: b17t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: d9c86a10-3491-11e4-8c21-0800200c9a66
1 Rows written
Last record in stream: f1136e33-d227-4fbd-ae72-d3488d70ba89
	at: 2014-11-30 11:40:00
	Reading SLB data from: b01t
	From: 2014-11-30 11:40:00
	To:   2014-11-30 12:55:47
Sending data to influx as: f1136e33-d227-4fbd-ae72-d3488d70ba89
0 Rows written
Last record in stream: 8b28b202-1da8-11e4-a510-f23c9173ce4a
	at: 2014-11-30 11:50:00
	Reading SLB data from: b11t
	From: 2014-11-30 11:50:00
	To:   2014-11-30 12:55:47
Sending data to influx as: 8b28b202-1da8-11e4-a510-f23c9173ce4a
0 Rows written
Last record in stream: face0d13-b22d-48fe-bec1-d29732604133
	at: 2014-09-30 09:20:00
	Reading SLB data from: f05t
	From: 2014-09-30 09:20:00
	To:   2014-10-07 09:20:00
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
0 Rows written
	Reading SLB data from: f05t
	From: 2014-10-07 09:20:00
	To:   2014-10-14 09:20:00
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
0 Rows written
	Reading SLB data from: f05t
	From: 2014-10-14 09:20:00
	To:   2014-10-21 09:20:00
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
0 Rows written
	Reading SLB data from: f05t
	From: 2014-10-21 09:20:00
	To:   2014-10-28 08:20:00
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
0 Rows written
	Reading SLB data from: f05t
	From: 2014-10-28 08:20:00
	To:   2014-11-04 08:20:00
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
0 Rows written
	Reading SLB data from: f05t
	From: 2014-11-04 08:20:00
	To:   2014-11-11 08:20:00
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
0 Rows written
	Reading SLB data from: f05t
	From: 2014-11-11 08:20:00
	To:   2014-11-18 08:20:00
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
0 Rows written
	Reading SLB data from: f05t
	From: 2014-11-18 08:20:00
	To:   2014-11-25 08:20:00
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
0 Rows written
	Reading SLB data from: f05t
	From: 2014-11-25 08:20:00
	To:   2014-11-30 12:55:47
Sending data to influx as: face0d13-b22d-48fe-bec1-d29732604133
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-135-6a0d0fa87f7c> in <module>()
      7 
      8         #try:
----> 9         (TrailTime,LeadTime) = Update()
     10         if False: #except Exception,e:
     11             print str(e)

<ipython-input-134-fd4d7c1322a1> in Update()
    254             print "Sending data to influx as: " + FeedId
    255 
--> 256             r = Feeds.SendToInfluxDB(Data,FeedId)
    257             print "%i Rows written" % r
    258 

<ipython-input-134-fd4d7c1322a1> in SendToInfluxDB(self, df, FeedId)
    139                 value = float(value.replace(",","."))
    140         #Nan
--> 141         elif numpy.isnan(value):
    142             continue
    143         #Add key

KeyboardInterrupt: 


In [ ]:
print debugstr

In [ ]:
df

In [7]:
es = ESinterface()
sites = LoadSLBSiteIds(es)


Got 19 Hits:

In [ ]:
sites

In [8]:
LoadSiteIds()


Out[8]:
{u'b01t': u'f1136e33-d227-4fbd-ae72-d3488d70ba89',
 u'b02t': u'4a39b124-f594-11e3-a510-f23c9173ce4a',
 u'b03t': u'e63710a4-78d9-4071-9a2e-ad35534355f4',
 u'b04t': u'6f075b69-b823-4e6b-8021-dd751cc79eea',
 u'b05t': u'802afb51-c5eb-4230-9b22-9a77ef7260b3',
 u'b06t': u'2a31fb24-347b-4924-ab89-5c434771a2ae',
 u'f04t': u'32383bde-7648-4abe-9dac-44701eabd72f',
 u'f05t': u'face0d13-b22d-48fe-bec1-d29732604133',
 u'f06t': u'709e47a1-ca88-4c22-890f-2407e9cb131a',
 u'f07t': u'b0dd71a8-efe8-41eb-bab1-633ebaaa778c',
 u'f09t': u'4cf6c743-8d0b-45f0-aa72-8a0c10315cf5',
 u'f10t': u'c6261166-f651-11e3-a510-f23c9173ce4a',
 u'f11t': u'e8a13982-f651-11e3-a510-f23c9173ce4a',
 u'h00t': u'46d55815-f927-459f-a8e2-8bbcd88008ee'}

In [140]:
u"ereäå".encode("ascii","ignore")


Out[140]:
'ere'

In [5]:
slb_id="h00t"
start=time.time()-(24*60*60)
stop=time.time()

In [7]:
starttime = time.strftime("%y%m%d%H%M",time.localtime(start))
stoptime = time.strftime("%y%m%d%H%M",time.localtime(stop))

url = "http://slb.nu/soldata/index.php?KEY=%s&start=%s&stop=%s" %(slb_id,starttime,stoptime)

In [11]:
df = pandas.read_csv(url,sep = ";",parse_dates=[[0, 1]],skiprows=9, header = None ,infer_datetime_format = True,na_values = ["     ","    ","  "," ",""])

In [13]:
cl = pandas.read_csv(url,sep = ";", header = 7,error_bad_lines= False,na_values = [""],nrows=1)

In [14]:
cl


Out[14]:
KEY => Unnamed: 1 h00tM0Pac001 h00tM0Pac002 h00tM0Pac003 h00tM0Pac004 h00tM0Pac005 h00tM0Pac006 h00tM0Pac007 h00tM0Pac008 ... h00tMUdc2003 h00tMUdc2004 h00tMUdc2005 h00tMUdc2006 h00tMUdc2007 h00tMUdc2008 h00tMUdc2009 h00tMUdc2010 h00tMUdc2011 Unnamed: 86
0 Date Time Data Data Data Data Data Data Data Data ... Data Data Data Data Data Data Data Data Data NaN

1 rows × 87 columns


In [16]:
cols = cl.keys()
cols = cols[2:]
col2 = cols.insert(0,"Time")
col2 = col2.insert(-1,"NAN")

In [20]:
col2


Out[20]:
Index([u'Time', u'h00tM0Pac001', u'h00tM0Pac002', u'h00tM0Pac003', u'h00tM0Pac004', u'h00tM0Pac005', u'h00tM0Pac006', u'h00tM0Pac007', u'h00tM0Pac008', u'h00tM0Pac009', u'h00tM0Pac010', u'h00tM0Pac011', u'h00tM0PacTot', u'h00tMErro001', u'h00tMErro002', u'h00tMErro003', u'h00tMErro004', u'h00tMErro005', u'h00tMErro006', u'h00tMErro007', u'h00tMErro008', u'h00tMErro009', u'h00tMErro010', u'h00tMErro011', u'h00tMGLOB012', u'h00tMGLOB013', u'h00tMGLOc012', u'h00tMGLOc013', u'h00tMPOWc001', u'h00tMPOWc002', u'h00tMPOWc003', u'h00tMPOWc004', u'h00tMPOWc005', u'h00tMPOWc006', u'h00tMPOWc007', u'h00tMPOWc008', u'h00tMPOWc009', u'h00tMPOWc010', u'h00tMPOWc011', u'h00tMPdc1001', u'h00tMPdc1002', u'h00tMPdc1003', u'h00tMPdc1004', u'h00tMPdc1005', u'h00tMPdc1006', u'h00tMPdc1007', u'h00tMPdc1008', u'h00tMPdc1009', u'h00tMPdc1010', u'h00tMPdc1011', u'h00tMPdc2001', u'h00tMPdc2002', u'h00tMPdc2003', u'h00tMPdc2004', u'h00tMPdc2005', u'h00tMPdc2006', u'h00tMPdc2007', u'h00tMPdc2008', u'h00tMPdc2009', u'h00tMPdc2010', u'h00tMPdc2011', u'h00tMTmpM012', u'h00tMTmpM013', u'h00tMUdc1001', u'h00tMUdc1002', u'h00tMUdc1003', u'h00tMUdc1004', u'h00tMUdc1005', u'h00tMUdc1006', u'h00tMUdc1007', u'h00tMUdc1008', u'h00tMUdc1009', u'h00tMUdc1010', u'h00tMUdc1011', u'h00tMUdc2001', u'h00tMUdc2002', u'h00tMUdc2003', u'h00tMUdc2004', u'h00tMUdc2005', u'h00tMUdc2006', u'h00tMUdc2007', u'h00tMUdc2008', u'h00tMUdc2009', u'h00tMUdc2010', u'h00tMUdc2011', u'NAN', u'Unnamed: 86'], dtype='object')

In [18]:
cl.keys()


Out[18]:
Index([u'KEY => ', u'Unnamed: 1', u'h00tM0Pac001', u'h00tM0Pac002', u'h00tM0Pac003', u'h00tM0Pac004', u'h00tM0Pac005', u'h00tM0Pac006', u'h00tM0Pac007', u'h00tM0Pac008', u'h00tM0Pac009', u'h00tM0Pac010', u'h00tM0Pac011', u'h00tM0PacTot', u'h00tMErro001', u'h00tMErro002', u'h00tMErro003', u'h00tMErro004', u'h00tMErro005', u'h00tMErro006', u'h00tMErro007', u'h00tMErro008', u'h00tMErro009', u'h00tMErro010', u'h00tMErro011', u'h00tMGLOB012', u'h00tMGLOB013', u'h00tMGLOc012', u'h00tMGLOc013', u'h00tMPOWc001', u'h00tMPOWc002', u'h00tMPOWc003', u'h00tMPOWc004', u'h00tMPOWc005', u'h00tMPOWc006', u'h00tMPOWc007', u'h00tMPOWc008', u'h00tMPOWc009', u'h00tMPOWc010', u'h00tMPOWc011', u'h00tMPdc1001', u'h00tMPdc1002', u'h00tMPdc1003', u'h00tMPdc1004', u'h00tMPdc1005', u'h00tMPdc1006', u'h00tMPdc1007', u'h00tMPdc1008', u'h00tMPdc1009', u'h00tMPdc1010', u'h00tMPdc1011', u'h00tMPdc2001', u'h00tMPdc2002', u'h00tMPdc2003', u'h00tMPdc2004', u'h00tMPdc2005', u'h00tMPdc2006', u'h00tMPdc2007', u'h00tMPdc2008', u'h00tMPdc2009', u'h00tMPdc2010', u'h00tMPdc2011', u'h00tMTmpM012', u'h00tMTmpM013', u'h00tMUdc1001', u'h00tMUdc1002', u'h00tMUdc1003', u'h00tMUdc1004', u'h00tMUdc1005', u'h00tMUdc1006', u'h00tMUdc1007', u'h00tMUdc1008', u'h00tMUdc1009', u'h00tMUdc1010', u'h00tMUdc1011', u'h00tMUdc2001', u'h00tMUdc2002', u'h00tMUdc2003', u'h00tMUdc2004', u'h00tMUdc2005', u'h00tMUdc2006', u'h00tMUdc2007', u'h00tMUdc2008', u'h00tMUdc2009', u'h00tMUdc2010', u'h00tMUdc2011', u'Unnamed: 86'], dtype='object')

In [49]:
data = ParseSLBData()

In [11]:
data.Time.max()


Out[11]:
1417008600.0

In [16]:
def tst(x):
    #print x 
    return 0

In [17]:
data.Time.apply(tst)


Out[17]:
0     0
1     0
2     0
3     0
4     0
5     0
6     0
7     0
8     0
9     0
10    0
11    0
12    0
13    0
14    0
...
130    0
131    0
132    0
133    0
134    0
135    0
136    0
137    0
138    0
139    0
140    0
141    0
142    0
143    0
144    0
Name: Time, Length: 145, dtype: int64

In [18]:
data


Out[18]:
Time Pac001 Pac002 Pac003 Pac004 Pac005 Pac006 Pac007 Pac008 Pac009 ... Udc2004 Udc2005 Udc2006 Udc2007 Udc2008 Udc2009 Udc2010 Udc2011 NAN Unnamed: 86
0 1.416922e+09 166 153 173 171 169 131 174 178 173 ... 281 223 284 284 280 287 263 284 NaN NaN
1 1.416923e+09 153 146 162 161 154 118 156 161 161 ... 279 229 283 283 275 291 253 285 NaN NaN
2 1.416923e+09 94 92 98 98 98 73 96 94 111 ... 276 220 284 282 278 283 254 280 NaN NaN
3 1.416924e+09 61 66 74 69 72 57 70 70 72 ... 275 216 275 274 270 281 250 276 NaN NaN
4 1.416925e+09 34 30 41 30 33 21 33 32 36 ... 263 211 269 268 261 278 244 272 NaN NaN
5 1.416925e+09 0 0 0 0 0 0 0 0 0 ... 262 205 260 266 256 269 238 265 NaN NaN
6 1.416926e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
7 1.416926e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
8 1.416927e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
9 1.416928e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
10 1.416928e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
11 1.416929e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
12 1.416929e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
13 1.41693e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
14 1.416931e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
15 1.416931e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
16 1.416932e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
17 1.416932e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
18 1.416933e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
19 1.416934e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
20 1.416934e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
21 1.416935e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
22 1.416935e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
23 1.416936e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
24 1.416937e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
25 1.416937e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
26 1.416938e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
27 1.416938e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
28 1.416939e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
29 1.41694e+09 0 0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0 NaN NaN
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
115 1.416991e+09 636 597 648 629 629 548 659 657 610 ... 314 252 322 320 310 323 288 319 NaN NaN
116 1.416992e+09 739 701 763 739 732 639 760 753 702 ... 317 249 321 321 316 326 293 321 NaN NaN
117 1.416992e+09 702 665 724 718 720 633 749 741 700 ... 317 251 322 320 312 325 291 321 NaN NaN
118 1.416993e+09 656 611 664 647 647 559 679 671 630 ... 312 250 320 320 310 323 289 318 NaN NaN
119 1.416994e+09 698 653 710 685 690 595 717 712 673 ... 317 253 321 321 313 325 290 320 NaN NaN
120 1.416994e+09 781 736 800 775 772 671 805 791 754 ... 315 252 320 321 315 328 291 320 NaN NaN
121 1.416995e+09 729 679 739 720 724 622 752 747 704 ... 316 252 321 316 312 326 290 320 NaN NaN
122 1.416995e+09 593 549 596 575 581 494 610 604 564 ... 309 250 318 317 306 321 287 317 NaN NaN
123 1.416996e+09 694 643 699 677 684 582 711 707 662 ... 316 254 320 321 310 324 288 320 NaN NaN
124 1.416997e+09 633 586 638 618 624 529 652 649 605 ... 314 250 318 318 307 324 288 318 NaN NaN
125 1.416997e+09 649 602 655 630 637 541 665 663 618 ... 313 251 322 319 312 323 289 320 NaN NaN
126 1.416998e+09 710 656 714 690 698 592 727 724 677 ... 317 251 322 319 311 324 290 319 NaN NaN
127 1.416998e+09 660 613 668 648 657 555 682 679 633 ... 313 250 319 318 312 324 290 321 NaN NaN
128 1.416999e+09 634 587 638 619 628 529 655 649 606 ... 315 252 318 318 309 325 288 318 NaN NaN
129 1.417e+09 742 682 740 714 719 612 755 749 703 ... 315 254 321 320 313 326 292 322 NaN NaN
130 1.417e+09 661 609 665 645 649 554 679 676 634 ... 310 251 318 319 310 324 288 320 NaN NaN
131 1.417001e+09 636 588 641 622 625 524 653 651 611 ... 313 251 317 318 310 324 287 319 NaN NaN
132 1.417001e+09 680 630 684 668 674 567 705 701 653 ... 313 253 320 318 311 324 288 319 NaN NaN
133 1.417002e+09 726 672 730 711 712 600 750 743 695 ... 314 250 322 319 312 325 291 321 NaN NaN
134 1.417003e+09 780 717 779 750 756 640 792 786 732 ... 316 253 320 320 316 325 291 320 NaN NaN
135 1.417003e+09 772 714 776 753 753 635 791 783 732 ... 318 252 322 318 313 326 292 321 NaN NaN
136 1.417004e+09 672 627 678 655 662 555 684 687 642 ... 314 252 322 318 312 323 287 319 NaN NaN
137 1.417004e+09 650 599 654 631 639 535 663 661 620 ... 314 251 319 317 311 323 288 320 NaN NaN
138 1.417005e+09 510 475 518 503 509 424 532 530 492 ... 307 247 313 315 301 319 286 311 NaN NaN
139 1.417006e+09 431 400 438 429 434 359 451 455 421 ... 307 242 310 304 302 314 283 313 NaN NaN
140 1.417006e+09 379 355 388 377 378 309 403 394 368 ... 302 241 311 300 304 313 282 309 NaN NaN
141 1.417007e+09 303 279 304 289 288 225 310 306 279 ... 305 238 303 300 287 304 281 302 NaN NaN
142 1.417007e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
143 1.417008e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
144 1.417009e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN

145 rows × 87 columns


In [19]:
time.localtime()


Out[19]:
time.struct_time(tm_year=2014, tm_mon=11, tm_mday=26, tm_hour=14, tm_min=59, tm_sec=31, tm_wday=2, tm_yday=330, tm_isdst=0)

In [28]:


In [21]:
time.time()


Out[21]:
1417011525.259775

In [23]:
import pytz

In [24]:
import datetime
today = datetime.datetime.now()
insummer = datetime.datetime(2009,8,15,10,0,0)
from pytz import reference
localtime = reference.LocalTimezone()
localtime.tzname(today)


Out[24]:
'CET'

In [25]:
localtime.tzname(insummer)


Out[25]:
'CEST'

In [26]:
import time

In [27]:
time.gmtime()


Out[27]:
time.struct_time(tm_year=2014, tm_mon=11, tm_mday=26, tm_hour=14, tm_min=26, tm_sec=19, tm_wday=2, tm_yday=330, tm_isdst=0)

In [39]:
def TestParseSLBData(slb_id="h00t",start=time.time()-(24*60*60),stop=time.time()):
  
  starttime = time.strftime("%y%m%d%H%M",time.localtime(start))
  stoptime = time.strftime("%y%m%d%H%M",time.localtime(stop))
  url = "http://slb.nu/soldata/index.php?KEY=%s&start=%s&stop=%s" %(slb_id,starttime,stoptime)

  df = pandas.read_csv(url,sep = ";",parse_dates=[[0, 1]],skiprows=9, header = None ,infer_datetime_format = True,na_values = ["     ","    ","  "," ",""])
  cl = pandas.read_csv(url,sep = ";", header = 7,error_bad_lines= False,na_values = [""],nrows=1)

  #Align keys to data and rename time col. 
  cols = cl.keys()
  cols = cols[2:]
  col2 = cols.insert(0,"Time")
  col2 = col2.insert(-1,"NAN")

    
  #Remove SLB station id from key.
  NewCols = []
    
  lkey = len(slb_id)

  for datakey in col2:
        
    newkey = datakey
    
    #Remove SLB id from key
    if newkey.find(slb_id) != -1:
        newkey = newkey[lkey+1:]
    
        #Remove leading 0
        if newkey[0] == "0":
            newkey = newkey[1:]

    
    NewCols.append(newkey)
      
  #Set data keys as column descriptors
  df.columns = NewCols
  
  #Delete trailing columns with junk. 
  #for key in df.keys()[-5:df.shape[1]-1]:
  #    if key.find(slb_id) == -1:
  #        del df[key]
        
  return df

In [41]:
tdata = TestParseSLBData()

In [57]:
tdata.Time


Out[57]:
0     14-11-25 15:50
1     14-11-25 16:00
2     14-11-25 16:10
3     14-11-25 16:20
4     14-11-25 16:30
5     14-11-25 16:40
6     14-11-25 16:50
7     14-11-25 17:00
8     14-11-25 17:10
9     14-11-25 17:20
10    14-11-25 17:30
11    14-11-25 17:40
12    14-11-25 17:50
13    14-11-25 18:00
14    14-11-25 18:10
...
131    14-11-26 13:40
132    14-11-26 13:50
133    14-11-26 14:00
134    14-11-26 14:10
135    14-11-26 14:20
136    14-11-26 14:30
137    14-11-26 14:40
138    14-11-26 14:50
139    14-11-26 15:00
140    14-11-26 15:10
141    14-11-26 15:20
142    14-11-26 15:30
143    14-11-26 15:40
144    14-11-26 15:50
145           nan nan
Name: Time, Length: 146, dtype: object

In [46]:
time.mktime(time.strptime(tdata["Time"][0],"%y-%m-%d %H:%M"))


Out[46]:
1416927000.0

In [48]:
time.localtime(1416927000.0)


Out[48]:
time.struct_time(tm_year=2014, tm_mon=11, tm_mday=25, tm_hour=15, tm_min=50, tm_sec=0, tm_wday=1, tm_yday=329, tm_isdst=0)

In [54]:
data.Time.max()


Out[54]:
1417008600.0

In [64]:
mt =  data.loc[144:,"Time"]

In [65]:
mt


Out[65]:
144    1.417009e+09
Name: Time, dtype: object

In [66]:
mt.values


Out[66]:
array([1417008600.0], dtype=object)

In [93]:
def T2ParseSLBData(slb_id="h00t",start=time.time()-(24*60*60),stop=time.time()):
  
  starttime = time.strftime("%y%m%d%H%M",time.localtime(start))
  stoptime = time.strftime("%y%m%d%H%M",time.localtime(stop))
  url = "http://slb.nu/soldata/index.php?KEY=%s&start=%s&stop=%s" %(slb_id,starttime,stoptime)

  df = pandas.read_csv(url,sep = ";",parse_dates=[[0, 1]],skiprows=9, header = None ,infer_datetime_format = True,na_values = ["     ","    ","  "," ",""])
  cl = pandas.read_csv(url,sep = ";", header = 7,error_bad_lines= False,na_values = [""],nrows=1)

  #Align keys to data and rename time col. 
  cols = cl.keys()
  cols = cols[2:]
  col2 = cols.insert(0,"Time")
  col2 = col2.insert(-1,"NAN")

    
  #Remove SLB station id from key.
  NewCols = []
    
  lkey = len(slb_id)

  for datakey in col2:
        
    newkey = datakey
    
    #Remove SLB id from key
    if newkey.find(slb_id) != -1:
        newkey = newkey[lkey+1:]
    
        #Remove leading 0
        if newkey[0] == "0":
            newkey = newkey[1:]

    
    NewCols.append(newkey)
      
  #Set data keys as column descriptors
  df.columns = NewCols
  
  #Delete trailing columns with junk. 
  #for key in df.keys()[-5:df.shape[1]-1]:
  #    if key.find(slb_id) == -1:
  #        del df[key]

  #Reformat timestamps
  droplist = []      
        
  for i in range(0,df.shape[0]):
    try:
      timestamp = time.mktime(time.strptime(str(df["Time"][i]),"%y-%m-%d %H:%M"))
    except ValueError:
      droplist.append(i)
    df["Time"][i] = timestamp
    
      #print ">" + df["Time"][i]
    #except:
     # try:
      #    print "*" + str(df["Time"][i])
     # except:
      #    print i 
       #   print df["Time"]
        #  print "error"
          #print df["Time"][i]
      #

  df = df.drop(droplist)
      
  return df

In [115]:
data = ParseSLBData("b06t",time.time()-3600,time.time()+3600)

In [116]:
data


Out[116]:
Time Pac001 Pac002 GLOB003 GLOc003 POWc001 POWc002 TmpM003 NAN Unnamed: 9
0 1.417078e+09 467 394 40 36 400 400 3 NaN NaN
1 1.417079e+09 382 304 35 42 500 500 4 NaN NaN
2 1.417079e+09 466 400 42 50 600 600 6 NaN NaN
3 1.41708e+09 390 315 36 56 700 600 6 NaN NaN
4 1.417081e+09 395 299 36 62 800 700 6 NaN NaN
5 1.417081e+09 362 284 34 69 800 800 7 NaN NaN
6 1.417082e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN
7 1.417082e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN
8 1.417083e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN
9 1.417084e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN
10 1.417084e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN
11 1.417085e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN
12 1.417085e+09 NaN NaN NaN NaN NaN NaN NaN NaN NaN

In [117]:
data.Time[5]


Out[117]:
1417081200.0

In [85]:
for i in timeser.iteritems():
    print i


(0, 1416928200.0)
(1, 1416928800.0)
(2, 1416929400.0)
(3, 1416930000.0)
(4, 1416930600.0)
(5, 1416931200.0)
(6, 1416931800.0)
(7, 1416932400.0)
(8, 1416933000.0)
(9, 1416933600.0)
(10, 1416934200.0)
(11, 1416934800.0)
(12, 1416935400.0)
(13, 1416936000.0)
(14, 1416936600.0)
(15, 1416937200.0)
(16, 1416937800.0)
(17, 1416938400.0)
(18, 1416939000.0)
(19, 1416939600.0)
(20, 1416940200.0)
(21, 1416940800.0)
(22, 1416941400.0)
(23, 1416942000.0)
(24, 1416942600.0)
(25, 1416943200.0)
(26, 1416943800.0)
(27, 1416944400.0)
(28, 1416945000.0)
(29, 1416945600.0)
(30, 1416946200.0)
(31, 1416946800.0)
(32, 1416947400.0)
(33, 1416948000.0)
(34, 1416948600.0)
(35, 1416949200.0)
(36, 1416949800.0)
(37, 1416950400.0)
(38, 1416951000.0)
(39, 1416951600.0)
(40, 1416952200.0)
(41, 1416952800.0)
(42, 1416953400.0)
(43, 1416954000.0)
(44, 1416954600.0)
(45, 1416955200.0)
(46, 1416955800.0)
(47, 1416956400.0)
(48, 1416957000.0)
(49, 1416957600.0)
(50, 1416958200.0)
(51, 1416958800.0)
(52, 1416959400.0)
(53, 1416960000.0)
(54, 1416960600.0)
(55, 1416961200.0)
(56, 1416961800.0)
(57, 1416962400.0)
(58, 1416963000.0)
(59, 1416963600.0)
(60, 1416964200.0)
(61, 1416964800.0)
(62, 1416965400.0)
(63, 1416966000.0)
(64, 1416966600.0)
(65, 1416967200.0)
(66, 1416967800.0)
(67, 1416968400.0)
(68, 1416969000.0)
(69, 1416969600.0)
(70, 1416970200.0)
(71, 1416970800.0)
(72, 1416971400.0)
(73, 1416972000.0)
(74, 1416972600.0)
(75, 1416973200.0)
(76, 1416973800.0)
(77, 1416974400.0)
(78, 1416975000.0)
(79, 1416975600.0)
(80, 1416976200.0)
(81, 1416976800.0)
(82, 1416977400.0)
(83, 1416978000.0)
(84, 1416978600.0)
(85, 1416979200.0)
(86, 1416979800.0)
(87, 1416980400.0)
(88, 1416981000.0)
(89, 1416981600.0)
(90, 1416982200.0)
(91, 1416982800.0)
(92, 1416983400.0)
(93, 1416984000.0)
(94, 1416984600.0)
(95, 1416985200.0)
(96, 1416985800.0)
(97, 1416986400.0)
(98, 1416987000.0)
(99, 1416987600.0)
(100, 1416988200.0)
(101, 1416988800.0)
(102, 1416989400.0)
(103, 1416990000.0)
(104, 1416990600.0)
(105, 1416991200.0)
(106, 1416991800.0)
(107, 1416992400.0)
(108, 1416993000.0)
(109, 1416993600.0)
(110, 1416994200.0)
(111, 1416994800.0)
(112, 1416995400.0)
(113, 1416996000.0)
(114, 1416996600.0)
(115, 1416997200.0)
(116, 1416997800.0)
(117, 1416998400.0)
(118, 1416999000.0)
(119, 1416999600.0)
(120, 1417000200.0)
(121, 1417000800.0)
(122, 1417001400.0)
(123, 1417002000.0)
(124, 1417002600.0)
(125, 1417003200.0)
(126, 1417003800.0)
(127, 1417004400.0)
(128, 1417005000.0)
(129, 1417005600.0)
(130, 1417006200.0)
(131, 1417006800.0)
(132, 1417007400.0)
(133, 1417008000.0)
(134, 1417008600.0)
(135, 1417009200.0)
(136, 1417009800.0)
(137, 1417010400.0)
(138, 1417011000.0)
(139, 1417011600.0)
(140, 1417012200.0)
(141, 1417012800.0)
(142, 1417013400.0)
(143, 1417014000.0)
(144, 1417014600.0)

In [120]:
data["Time"].max()


Out[120]:
1417085400.0

In [124]:
data3 = data.loc[2:4]

In [125]:
data3


Out[125]:
Time Pac001 Pac002 GLOB003 GLOc003 POWc001 POWc002 TmpM003 NAN Unnamed: 9
2 1.417079e+09 466 400 42 50 600 600 6 NaN NaN
3 1.41708e+09 390 315 36 56 700 600 6 NaN NaN
4 1.417081e+09 395 299 36 62 800 700 6 NaN NaN

In [126]:
data2.merge(data3,)


Out[126]:
Time Pac001 Pac002 GLOB003 GLOc003 POWc001 POWc002 TmpM003 NAN Unnamed: 9
0 1.417079e+09 466 400 42 50 600 600 6 NaN NaN

In [127]:
import pandas as pd

In [129]:
pd.concat([data2,data3])


Out[129]:
Time Pac001 Pac002 GLOB003 GLOc003 POWc001 POWc002 TmpM003 NAN Unnamed: 9
1 1.417079e+09 382 304 35 42 500 500 4 NaN NaN
2 1.417079e+09 466 400 42 50 600 600 6 NaN NaN
2 1.417079e+09 466 400 42 50 600 600 6 NaN NaN
3 1.41708e+09 390 315 36 56 700 600 6 NaN NaN
4 1.417081e+09 395 299 36 62 800 700 6 NaN NaN

In [130]:
data2


Out[130]:
Time Pac001 Pac002 GLOB003 GLOc003 POWc001 POWc002 TmpM003 NAN Unnamed: 9
1 1.417079e+09 382 304 35 42 500 500 4 NaN NaN
2 1.417079e+09 466 400 42 50 600 600 6 NaN NaN

In [131]:
data3


Out[131]:
Time Pac001 Pac002 GLOB003 GLOc003 POWc001 POWc002 TmpM003 NAN Unnamed: 9
2 1.417079e+09 466 400 42 50 600 600 6 NaN NaN
3 1.41708e+09 390 315 36 56 700 600 6 NaN NaN
4 1.417081e+09 395 299 36 62 800 700 6 NaN NaN

In [133]:
data3["Time"].values


Out[133]:
array([1417079400.0, 1417080000.0, 1417080600.0], dtype=object)

In [143]:
def PeriodToSecs(period):
    period = period.encode("ascii","ignore")

    if "u" in period:
        return float(period.strip("u"))/1000000
    elif "s" in period:
        return float(period.strip("s"))
    elif "m" in period:
        return float(period.strip("m"))/1000.0
    elif "h" in period:
        return float(period.strip("h"))*3600.0
    elif "d" in period:
        return float(period.strip("d"))*3600.0*24
    elif "w" in period:
        return float(period.strip("w"))*3600.0*24*7
    else:
        return float(period.strip("u"))/1000000

In [148]:
PeriodToSecs(u"1h")


Out[148]:
3600.0

In [149]:
df


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-149-01963f080230> in <module>()
      1 
----> 2 df

NameError: name 'df' is not defined

In [ ]: