In [1]:
import csv
from dateutil import parser
import datetime
import time
from engine.engine import Engine
In [2]:
log = []
with open("data/200log.csv", 'r') as f:
reader = csv.reader(f)
next(reader, None)
for row in reader:
url_request = unicode(row[0])
url_status_code = row[1]
header_len = row[2]
response_len = row[3]
name = unicode(row[4])
timedate = parser.parse(row[5])
message = unicode(row[6])
scraperlog = [timedate, url_request, url_status_code, header_len, response_len, name, message]
log.append(scraperlog)
print 'done', log[:2]
In [5]:
Engine(engine='new_test', scraperlog=log)
Out[5]:
In [6]:
Engine(engine='new_test').main()
In [6]:
print e
In [ ]: