In [41]:
import tweepy
import re
import json

import random

from time import sleep

import sqlite3 as lite

import datetime, time, os, sys
import argparse, ConfigParser
Config = ConfigParser.ConfigParser()
Config.read('config.cnf')

consumer_key = Config.get('twitterdissertation', 'consumer_key')
consumer_secret = Config.get('twitterdissertation', 'consumer_secret')
access_token = Config.get('twitterdissertation', 'access_token')
access_token_secret = Config.get('twitterdissertation', 'access_token_secret')

auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# set up access to the Twitter API
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)

In [1]:
with open('tweetabledissertation.md') as f:
    diss=f.read().strip()

In [2]:
import nltk.data
from nltk import word_tokenize
sent_detector = nltk.data.load('tokenizers/punkt/english.pickle')

In [3]:
diss_sent = sent_detector.tokenize(diss)

In [ ]:
tw = "The Public Impact of Latin America's Approach to Open Access, by Juan Pablo Alperin"
api.update_status(tw)

In [4]:
i = 0
try: 
    for sent in diss_sent[i:]: 
        sent = sent.strip()
        tw = ''
        for token in sent.split(' '): 
            if len(tw) + len(token) < 140:
                tw = "%s %s" % (tw, token)
            else:
                if 'automatically captures' in tw:
                    print i
                    print tw
                tw = token
      
        i = i + 1
        if 'automatically captures' in tw:
            print i
            print tw

except KeyboardInterrupt, error:
    print "at sentence: %s" % i
    raise


365
 The online nature of the survey automatically captures the incident and allows for it to be analyzed along with other data about the
didn't find

In [ ]: