In [1]:
import pandas as pd
from influxdb import DataFrameClient

"""Instantiate the connection to the InfluxDB client."""
user = 'root'
password = 'root'
dbname = 'base47'
host='localhost'
port=32768
# Temporarily avoid line protocol time conversion issues #412, #426, #431.
protocol = 'json'

client = DataFrameClient(host, port, user, password, dbname)

print("Create pandas DataFrame")
df = pd.DataFrame(data=list(range(30)),
                  index=pd.date_range(start='2017-11-16',
                                      periods=30, freq='H'))


Create pandas DataFrame

In [3]:
print("Create database: " + dbname)
client.create_database(dbname)


Create database: base47

In [9]:
client.query("show databases")


Out[9]:
ResultSet({'(u'databases', None)': [{u'name': u'_internal'}, {u'name': u'test'}, {u'name': u'base'}, {u'name': u'base2'}, {u'name': u'base47'}]})

In [11]:
df.columns=['val']

In [12]:
print("Write DataFrame")
client.write_points(df, 'demo', protocol=protocol)


Write DataFrame
Out[12]:
True

In [ ]:


In [13]:
print("Write DataFrame with Tags")
client.write_points(df, 'demo',
                    {'k1': 'v1', 'k2': 'v2'}, protocol=protocol)


Write DataFrame with Tags
Out[13]:
True

In [14]:
print("Read DataFrame")
client.query("select * from demo")


Read DataFrame
Out[14]:
defaultdict(list, {u'demo':                              k1    k2  val
             2017-11-16 00:00:00+00:00  None  None    0
             2017-11-16 00:00:00+00:00    v1    v2    0
             2017-11-16 01:00:00+00:00  None  None    1
             2017-11-16 01:00:00+00:00    v1    v2    1
             2017-11-16 02:00:00+00:00  None  None    2
             2017-11-16 02:00:00+00:00    v1    v2    2
             2017-11-16 03:00:00+00:00  None  None    3
             2017-11-16 03:00:00+00:00    v1    v2    3
             2017-11-16 04:00:00+00:00  None  None    4
             2017-11-16 04:00:00+00:00    v1    v2    4
             2017-11-16 05:00:00+00:00  None  None    5
             2017-11-16 05:00:00+00:00    v1    v2    5
             2017-11-16 06:00:00+00:00  None  None    6
             2017-11-16 06:00:00+00:00    v1    v2    6
             2017-11-16 07:00:00+00:00  None  None    7
             2017-11-16 07:00:00+00:00    v1    v2    7
             2017-11-16 08:00:00+00:00  None  None    8
             2017-11-16 08:00:00+00:00    v1    v2    8
             2017-11-16 09:00:00+00:00  None  None    9
             2017-11-16 09:00:00+00:00    v1    v2    9
             2017-11-16 10:00:00+00:00  None  None   10
             2017-11-16 10:00:00+00:00    v1    v2   10
             2017-11-16 11:00:00+00:00  None  None   11
             2017-11-16 11:00:00+00:00    v1    v2   11
             2017-11-16 12:00:00+00:00  None  None   12
             2017-11-16 12:00:00+00:00    v1    v2   12
             2017-11-16 13:00:00+00:00  None  None   13
             2017-11-16 13:00:00+00:00    v1    v2   13
             2017-11-16 14:00:00+00:00  None  None   14
             2017-11-16 14:00:00+00:00    v1    v2   14
             2017-11-16 15:00:00+00:00  None  None   15
             2017-11-16 15:00:00+00:00    v1    v2   15
             2017-11-16 16:00:00+00:00  None  None   16
             2017-11-16 16:00:00+00:00    v1    v2   16
             2017-11-16 17:00:00+00:00  None  None   17
             2017-11-16 17:00:00+00:00    v1    v2   17
             2017-11-16 18:00:00+00:00  None  None   18
             2017-11-16 18:00:00+00:00    v1    v2   18
             2017-11-16 19:00:00+00:00  None  None   19
             2017-11-16 19:00:00+00:00    v1    v2   19
             2017-11-16 20:00:00+00:00  None  None   20
             2017-11-16 20:00:00+00:00    v1    v2   20
             2017-11-16 21:00:00+00:00  None  None   21
             2017-11-16 21:00:00+00:00    v1    v2   21
             2017-11-16 22:00:00+00:00  None  None   22
             2017-11-16 22:00:00+00:00    v1    v2   22
             2017-11-16 23:00:00+00:00  None  None   23
             2017-11-16 23:00:00+00:00    v1    v2   23
             2017-11-17 00:00:00+00:00  None  None   24
             2017-11-17 00:00:00+00:00    v1    v2   24
             2017-11-17 01:00:00+00:00  None  None   25
             2017-11-17 01:00:00+00:00    v1    v2   25
             2017-11-17 02:00:00+00:00  None  None   26
             2017-11-17 02:00:00+00:00    v1    v2   26
             2017-11-17 03:00:00+00:00  None  None   27
             2017-11-17 03:00:00+00:00    v1    v2   27
             2017-11-17 04:00:00+00:00  None  None   28
             2017-11-17 04:00:00+00:00    v1    v2   28
             2017-11-17 05:00:00+00:00  None  None   29
             2017-11-17 05:00:00+00:00    v1    v2   29})

In [ ]:
print("Delete database: " + dbname)
client.drop_database(dbname)

In [1]:
import numpy as np
import pandas as pd
import time
import requests
url = 'http://localhost:32768/write'
params = {"db": "base", "u": "root", "p": "root"}

In [2]:
def read_data():
    with open('data/gas_ft.csv') as f:
        return [x.split(',') for x in f.readlines()[1:]]

a = read_data()

In [3]:
a[0]


Out[3]:
['67', 'm\xc2\xb3', '0.34', '2014-07-10T00:30:00.000Z\n']

In [22]:
#payload = "elec,id=500 value=24 2018-03-05T19:31:00.000Z\n"
payload = "elec,id=500 value=24 "#+str(pd.to_datetime('2018-03-05T19:29:00.000Z\n').value // 10 ** 9)
r = requests.post(url, params=params, data=payload)

In [18]:



Out[18]:
'1520278260'

In [ ]:
# -*- coding: utf-8 -*-
"""Tutorial how to use the class helper `SeriesHelper`."""

from influxdb import InfluxDBClient
from influxdb import SeriesHelper

# InfluxDB connections settings
host = 'localhost'
port = 8086
user = 'root'
password = 'root'
dbname = 'mydb'

myclient = InfluxDBClient(host, port, user, password, dbname)

# Uncomment the following code if the database is not yet created
# myclient.create_database(dbname)
# myclient.create_retention_policy('awesome_policy', '3d', 3, default=True)


class MySeriesHelper(SeriesHelper):
    """Instantiate SeriesHelper to write points to the backend."""

    class Meta:
        """Meta class stores time series helper configuration."""

        # The client should be an instance of InfluxDBClient.
        client = myclient

        # The series name must be a string. Add dependent fields/tags
        # in curly brackets.
        series_name = 'events.stats.{server_name}'

        # Defines all the fields in this time series.
        fields = ['some_stat', 'other_stat']

        # Defines all the tags for the series.
        tags = ['server_name']

        # Defines the number of data points to store prior to writing
        # on the wire.
        bulk_size = 5

        # autocommit must be set to True when using bulk_size
        autocommit = True


# The following will create *five* (immutable) data points.
# Since bulk_size is set to 5, upon the fifth construction call, *all* data
# points will be written on the wire via MySeriesHelper.Meta.client.
MySeriesHelper(server_name='us.east-1', some_stat=159, other_stat=10)
MySeriesHelper(server_name='us.east-1', some_stat=158, other_stat=20)
MySeriesHelper(server_name='us.east-1', some_stat=157, other_stat=30)
MySeriesHelper(server_name='us.east-1', some_stat=156, other_stat=40)
MySeriesHelper(server_name='us.east-1', some_stat=155, other_stat=50)

# To manually submit data points which are not yet written, call commit:
MySeriesHelper.commit()

# To inspect the JSON which will be written, call _json_body_():
MySeriesHelper._json_body_()

In [9]:
for metric in a[:]:
    payload = "elec,id="+str(metric[0])+" value="+str(metric[2])+" "+str(pd.to_datetime(metric[3]).value // 10 ** 9)+"\n"
    #payload = "water,id="+str(metric[0])+" value="+str(metric[2])+"\n"
    r = requests.post(url, params=params, data=payload)


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-9-1eced962bd0c> in <module>()
      2     payload = "elec,id="+str(metric[0])+" value="+str(metric[2])+" "+str(pd.to_datetime(metric[3]).value // 10 ** 9)+"\n"
      3     #payload = "water,id="+str(metric[0])+" value="+str(metric[2])+"\n"
----> 4     r = requests.post(url, params=params, data=payload)

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\site-packages\requests\api.pyc in post(url, data, json, **kwargs)
    110     """
    111 
--> 112     return request('post', url, data=data, json=json, **kwargs)
    113 
    114 

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\site-packages\requests\api.pyc in request(method, url, **kwargs)
     56     # cases, and look like a memory leak in others.
     57     with sessions.Session() as session:
---> 58         return session.request(method=method, url=url, **kwargs)
     59 
     60 

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\site-packages\requests\sessions.pyc in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)
    506         }
    507         send_kwargs.update(settings)
--> 508         resp = self.send(prep, **send_kwargs)
    509 
    510         return resp

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\site-packages\requests\sessions.pyc in send(self, request, **kwargs)
    616 
    617         # Send the request
--> 618         r = adapter.send(request, **kwargs)
    619 
    620         # Total elapsed time of the request (approximately)

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\site-packages\requests\adapters.pyc in send(self, request, stream, timeout, verify, cert, proxies)
    438                     decode_content=False,
    439                     retries=self.max_retries,
--> 440                     timeout=timeout
    441                 )
    442 

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\site-packages\urllib3\connectionpool.pyc in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
    599                                                   timeout=timeout_obj,
    600                                                   body=body, headers=headers,
--> 601                                                   chunked=chunked)
    602 
    603             # If we're going to release the connection in ``finally:``, then

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\site-packages\urllib3\connectionpool.pyc in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
    378         try:
    379             try:  # Python 2.7, use buffering of HTTP responses
--> 380                 httplib_response = conn.getresponse(buffering=True)
    381             except TypeError:  # Python 2.6 and older, Python 3
    382                 try:

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\httplib.pyc in getresponse(self, buffering)
   1119 
   1120         try:
-> 1121             response.begin()
   1122             assert response.will_close != _UNKNOWN
   1123             self.__state = _CS_IDLE

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\httplib.pyc in begin(self)
    436         # read until we get a non-100 response
    437         while True:
--> 438             version, status, reason = self._read_status()
    439             if status != CONTINUE:
    440                 break

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\httplib.pyc in _read_status(self)
    392     def _read_status(self):
    393         # Initialize with Simple-Response defaults
--> 394         line = self.fp.readline(_MAXLINE + 1)
    395         if len(line) > _MAXLINE:
    396             raise LineTooLong("header line")

C:\Users\csala\AppData\Local\Continuum\anaconda2\lib\socket.pyc in readline(self, size)
    478             while True:
    479                 try:
--> 480                     data = self._sock.recv(self._rbufsize)
    481                 except error, e:
    482                     if e.args[0] == EINTR:

KeyboardInterrupt: 

In [9]:
def read_data():
    with open('data/water_ft.csv') as f:
        return [x.split(',') for x in f.readlines()[1:]]

a = read_data()

In [10]:
a[0]


Out[10]:
['68', 'm\xc2\xb3', '1.18', '2014-07-10T00:30:00.000Z\n']

In [ ]:
for metric in a[1000:3000]:
    #payload = "gas,id="+str(metric[0])+" value="+str(metric[2])+" "+str(pd.to_datetime(metric[3]).value // 10 ** 9)+"\n"
    payload = "water,id="+str(metric[0])+" value="+str(metric[2])+"\n"
    r = requests.post(url, params=params, data=payload)
    time.sleep(1)