RedTube json Python
In [10]:
import requests
import json
import random
import getpass
#import couchdb
import pickle
import getpass
#!flask/bin/python
#from flask import Flask, jsonify
In [11]:
myusr = getpass.getuser()
In [12]:
print(myusr)
In [2]:
#couch = couchdb.Server()
In [36]:
with open('/home/{}/prn.pickle'.format(myusr), 'rb') as handle:
prnlis = pickle.load(handle)
In [13]:
#db = couch.create('redtube')
In [14]:
#db = couch['redtube']
Requests and json are the two main modules used for this. Random can also be handy
In [15]:
payload = {'output' : 'json', 'data' : 'redtube.Videos.searchVideos', 'page' : 1}
In [16]:
getprn = requests.get('http://api.redtube.com/', params = payload)
In [17]:
daprn = getprn.json()
In [18]:
levid = len(daprn['videos'])
In [19]:
porndick = dict()
In [ ]:
In [47]:
#for lev in range(0, levid):
# print(daprn['videos'][lev]['video'])
# prntit = (daprn['videos'][lev]['video']['title'])
# prnnow = prntit.replace(' ', '-')
# prnlow = prnnow.lower()
# print(prnlow)
# try:
# somelis = list()
# for dapr in daprn['videos'][lev]['video']['tags']:
# print(dapr['tag_name'])
# somelis.append(dapr['tag_name'])
# porndick.update({daprn['videos'][lev]['video']['video_id'] : {'tags' : ", ".join(str(x) for x in somelis)}})
#db.save(porndick)
#try:
# db = couch.create(prnlow)
#except PreconditionFailed:
# db = couch[prnlow]
#db.save({daprn['videos'][lev]['video']['video_id'] : {'tags' : ", ".join(str(x) for x in somelis)}})
# except KeyError:
# continue
In [18]:
#for i in db:
# print(i)
In [45]:
#db.save(porndick)
#for i in db:
# print(db[i])
In [54]:
#print(pornd['tags'])
In [8]:
#loaPrn = json.loads(getPrn.text)
#print loaUrl
Convert it into readable text that you can work with
In [28]:
lenvid = len(daprn[u'videos'])
In [29]:
lenvid
Out[29]:
In [25]:
#aldic = dict()
In [42]:
with open('/home/{}/prn3.pickle'.format(myusr), 'rb') as handles:
aldic = pickle.load(handles)
In [26]:
import shutil
In [46]:
for napn in range(0, lenvid):
print(daprn[u'videos'][napn]['video']['url'])
print(daprn[u'videos'][napn]['video']['title'])
try:
letae = len(daprn[u'videos'][napn]['video']['tags'])
tagna = (daprn[u'videos'][napn]['video']['tags'])
reqbru = requests.get('http://api.giphy.com/v1/gifs/translate?s={}&api_key=dc6zaTOxFJmzC'.format(tagna))
brujsn = reqbru.json()
print(brujsn['data']['images']['fixed_width']['url'])
gurl = (brujsn['data']['images']['fixed_width']['url'])
gslug = (brujsn['data']['slug'])
#fislg = gslug.repl
try:
somelis = list()
for dapr in daprn['videos'][lev]['video']['tags']:
print(dapr['tag_name'])
somelis.append(dapr['tag_name'])
porndick.update({daprn['videos'][lev]['video']['video_id'] : {'tags' : ", ".join(str(x) for x in somelis)}})
except KeyError:
continue
aldic.update({gslug : gurl})
#print(gurl)
'''
with open('/home/pi/redtube/posts/{}.meta'.format(gslug), 'w') as blmet:
blmet.write('.. title: ' + glug + ' \n' + '.. slug: ' + nameofblogpost + ' \n' + '.. date: ' + str(nowtime) + ' \n' + '.. tags: ' + tagblog + '\n' + '.. link:\n.. description:\n.. type: text')
response = requests.get(gurl, stream=True)#
response
with open('/home/pi/redtube/galleries/{}.gif'.format(gslug), 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
del response
tan = tagna.replace(' ', '-')
tanq = tan.lower()
print(tanq)
'''
except KeyError:
continue
In [ ]:
with open('/home/{}/prn.pickle'.format(myusr), 'wb') as handle:
pickle.dump(porndick, handle, protocol=pickle.HIGHEST_PROTOCOL)
In [41]:
with open('/home/{}/prn3.pickle'.format(myusr), 'wb') as handle:
pickle.dump(aldic, handle, protocol=pickle.HIGHEST_PROTOCOL)
In [44]:
#db.save(aldic)