In [222]:
import re
import requests
import praw
import pickle
import itertools
from bs4 import BeautifulSoup
from time import sleep

In [183]:
s = "Hello's"
print(s.title())


Hello'S

In [203]:
s = 'Fox'


Out[203]:
['FOX', 'FOx', 'FoX', 'Fox', 'fOX', 'fOx', 'foX', 'fox']

In [223]:
lis = ["no", "way", "lol"]
s = list(map(' '.join, itertools.product(*((c[0].upper() + c[1:], c) for c in lis))))
print(s)


['No Way Lol', 'No Way lol', 'No way Lol', 'No way lol', 'no Way Lol', 'no Way lol', 'no way Lol', 'no way lol']

In [224]:
m = re.compile(r"\[\[[^\]]*\]\]")

def supers(text):
    response = " "
    split = text.split()
    for i in range(len(split)):
        if i != 0:
            response += " "
        response += "^" + split[i]
    return response

def findItem(name):
    name = name.replace("'","%27").split()
    nameLen = len(name)
    for i in range(len(name)):
        test = itemLookup(name[:nameLen-i])
        if test != "":
            return test
            
    return ""
def itemLookup(name):
    # Try a URL
    possibleURLs = list(map('_'.join, itertools.product(*((c[0].upper() + c[1:].lower(), c.lower()) for c in name))))
    for possibleURL in possibleURLs:
        print(possibleURL)
        r = requests.get("http://www.pathofexile.gamepedia.com/" + possibleURL)
        if r.status_code != 404:
            break
    if r.status_code == 404:
        return ""
    
    soup = BeautifulSoup(r.text, "html.parser")
    itemspan = soup.find("span", { "class" : "infobox-page-container"})
    
    # Break if an invalid page has been reached
    if not itemspan:
        return ""
    
    itemspan = itemspan.find("span", { "class" : "item-box" })
    
    # Get item title
    itemTitleRaw = itemspan.find("span", { "class" : "header"}).children
    itemTitle = ""
    for item in itemTitleRaw:
        if item.string:
            itemTitle += item.string + " "
    itemTitle = itemTitle[:-1]
            
    # Get item stats
    itemStats = itemspan.find("span", { "class" : "item-stats"}).find("span")
    
    # Get item mods
    itemMods = itemspan.findAll("span", { "class" : "-mod"})
            
    # Get image URL
    itemURL = itemspan.findAll("a", { "class" : "image"})[0].img["src"]
    
    
    # Print
    response = ""
    response += "[**" + itemTitle + "**](" + itemURL + ")"
    response += "[[Wiki]](http://www.pathofexile.gamepedia.com/" + possibleURL + ")\n\n"
    
    # Print Stats

    for item in itemStats.children:
        try:
            temp = ""
            for child in item.children:
                if child.string:
                    temp += supers(child.string)
            response += temp
            if temp == "":
                response += " ^| "
        except:
            response += supers(item)
    response += "\n\n"
    
    #Print Mods
    for i in range(len(itemMods)):
        for item in itemMods[i].children:
            if item.string:
                if len(itemMods) == 2 and i == 0:
                    response += "*" + item.string + "*"
                else:
                    response += item.string
            else:
                response += "\n\n"
        response += "\n\n"

    return response

In [39]:
findItem("sorcerer boots bob")

In [100]:
r = praw.Reddit('bot1')

In [229]:
def respond(lim, rate):
    with open('ids.pickle', 'rb') as handle:
        ids = pickle.load(handle)
    while True:
        subreddit = r.subreddit("test")
        for submission in subreddit.new(limit=lim):
            comment_queue = submission.comments[:]
            while comment_queue:
                com = comment_queue.pop(0)
                if "[[" in com.body and "]]" in com.body and com.id not in ids:
                    ids.append(com.id)
                    print("Found Comment:" + com.id)
                    reply = ""
                    for item in m.findall(com.body)[:10]:
                        temp = findItem(item[2:-2])
                        reply += temp
                        if temp != "":
                            reply += "\n\n---------\n\n"
                    if reply != "":
                        reply += "I am a bot. Reply to me with up to 7 [[item names]]."
                        com.reply(reply)
                    else:
                        print("False Reply ^")
                comment_queue.extend(com.replies)
        with open('ids.pickle', 'wb') as handle:
            pickle.dump(ids, handle, protocol=pickle.HIGHEST_PROTOCOL)
        sleep(rate)

In [230]:
respond(50, 30)


Found Comment:dbjcylw
Essence_Of_Horror
Essence_Of_horror
Essence_of_Horror
Found Comment:dbjcz0x
Wiki
wiki
Item_Names
Item_names
item_Names
item_names
Item
False Reply ^
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
C:\Users\liort\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
    384             try:  # Python 2.7, use buffering of HTTP responses
--> 385                 httplib_response = conn.getresponse(buffering=True)
    386             except TypeError:  # Python 2.6 and older

TypeError: getresponse() got an unexpected keyword argument 'buffering'

During handling of the above exception, another exception occurred:

TimeoutError                              Traceback (most recent call last)
C:\Users\liort\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, **response_kw)
    577                                                   body=body, headers=headers,
--> 578                                                   chunked=chunked)
    579 

C:\Users\liort\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
    386             except TypeError:  # Python 2.6 and older
--> 387                 httplib_response = conn.getresponse()
    388         except (SocketTimeout, BaseSSLError, SocketError) as e:

C:\Users\liort\Anaconda3\lib\http\client.py in getresponse(self)
   1196             try:
-> 1197                 response.begin()
   1198             except ConnectionError:

C:\Users\liort\Anaconda3\lib\http\client.py in begin(self)
    296         while True:
--> 297             version, status, reason = self._read_status()
    298             if status != CONTINUE:

C:\Users\liort\Anaconda3\lib\http\client.py in _read_status(self)
    257     def _read_status(self):
--> 258         line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
    259         if len(line) > _MAXLINE:

C:\Users\liort\Anaconda3\lib\socket.py in readinto(self, b)
    574             try:
--> 575                 return self._sock.recv_into(b)
    576             except timeout:

C:\Users\liort\Anaconda3\lib\ssl.py in recv_into(self, buffer, nbytes, flags)
    928                   self.__class__)
--> 929             return self.read(nbytes, buffer)
    930         else:

C:\Users\liort\Anaconda3\lib\ssl.py in read(self, len, buffer)
    790         try:
--> 791             return self._sslobj.read(len, buffer)
    792         except SSLError as x:

C:\Users\liort\Anaconda3\lib\ssl.py in read(self, len, buffer)
    574         if buffer is not None:
--> 575             v = self._sslobj.read(len, buffer)
    576         else:

TimeoutError: [WinError 10060] A connection attempt failed because the connected party did not properly respond after a period of time, or established connection failed because connected host has failed to respond

During handling of the above exception, another exception occurred:

ProtocolError                             Traceback (most recent call last)
C:\Users\liort\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
    402                     retries=self.max_retries,
--> 403                     timeout=timeout
    404                 )

C:\Users\liort\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, **response_kw)
    622             retries = retries.increment(method, url, error=e, _pool=self,
--> 623                                         _stacktrace=sys.exc_info()[2])
    624             retries.sleep()

C:\Users\liort\Anaconda3\lib\site-packages\requests\packages\urllib3\util\retry.py in increment(self, method, url, response, error, _pool, _stacktrace)
    254             if read is False:
--> 255                 raise six.reraise(type(error), error, _stacktrace)
    256             elif read is not None:

C:\Users\liort\Anaconda3\lib\site-packages\requests\packages\urllib3\packages\six.py in reraise(tp, value, tb)
    308         if value.__traceback__ is not tb:
--> 309             raise value.with_traceback(tb)
    310         raise value

C:\Users\liort\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, **response_kw)
    577                                                   body=body, headers=headers,
--> 578                                                   chunked=chunked)
    579 

C:\Users\liort\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
    386             except TypeError:  # Python 2.6 and older
--> 387                 httplib_response = conn.getresponse()
    388         except (SocketTimeout, BaseSSLError, SocketError) as e:

C:\Users\liort\Anaconda3\lib\http\client.py in getresponse(self)
   1196             try:
-> 1197                 response.begin()
   1198             except ConnectionError:

C:\Users\liort\Anaconda3\lib\http\client.py in begin(self)
    296         while True:
--> 297             version, status, reason = self._read_status()
    298             if status != CONTINUE:

C:\Users\liort\Anaconda3\lib\http\client.py in _read_status(self)
    257     def _read_status(self):
--> 258         line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
    259         if len(line) > _MAXLINE:

C:\Users\liort\Anaconda3\lib\socket.py in readinto(self, b)
    574             try:
--> 575                 return self._sock.recv_into(b)
    576             except timeout:

C:\Users\liort\Anaconda3\lib\ssl.py in recv_into(self, buffer, nbytes, flags)
    928                   self.__class__)
--> 929             return self.read(nbytes, buffer)
    930         else:

C:\Users\liort\Anaconda3\lib\ssl.py in read(self, len, buffer)
    790         try:
--> 791             return self._sslobj.read(len, buffer)
    792         except SSLError as x:

C:\Users\liort\Anaconda3\lib\ssl.py in read(self, len, buffer)
    574         if buffer is not None:
--> 575             v = self._sslobj.read(len, buffer)
    576         else:

ProtocolError: ('Connection aborted.', TimeoutError(10060, 'A connection attempt failed because the connected party did not properly respond after a period of time, or established connection failed because connected host has failed to respond', None, 10060, None))

During handling of the above exception, another exception occurred:

ConnectionError                           Traceback (most recent call last)
C:\Users\liort\Anaconda3\lib\site-packages\prawcore\requestor.py in request(self, *args, **kwargs)
     45         try:
---> 46             return self._http.request(*args, **kwargs)
     47         except Exception as exc:

C:\Users\liort\Anaconda3\lib\site-packages\requests\sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)
    474         send_kwargs.update(settings)
--> 475         resp = self.send(prep, **send_kwargs)
    476 

C:\Users\liort\Anaconda3\lib\site-packages\requests\sessions.py in send(self, request, **kwargs)
    584         # Send the request
--> 585         r = adapter.send(request, **kwargs)
    586 

C:\Users\liort\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
    452         except (ProtocolError, socket.error) as err:
--> 453             raise ConnectionError(err, request=request)
    454 

ConnectionError: ('Connection aborted.', TimeoutError(10060, 'A connection attempt failed because the connected party did not properly respond after a period of time, or established connection failed because connected host has failed to respond', None, 10060, None))

During handling of the above exception, another exception occurred:

RequestException                          Traceback (most recent call last)
<ipython-input-230-4292e4dc4f5f> in <module>()
----> 1 respond(50, 30)

<ipython-input-229-3816e78ddb76> in respond(lim, rate)
      4     while True:
      5         subreddit = r.subreddit("test")
----> 6         for submission in subreddit.new(limit=lim):
      7             comment_queue = submission.comments[:]
      8             while comment_queue:

C:\Users\liort\Anaconda3\lib\site-packages\praw\models\listing\generator.py in __next__(self)
     43 
     44         if self._listing is None or self._list_index >= len(self._listing):
---> 45             self._next_batch()
     46 
     47         self._list_index += 1

C:\Users\liort\Anaconda3\lib\site-packages\praw\models\listing\generator.py in _next_batch(self)
     53             raise StopIteration()
     54 
---> 55         self._listing = self._reddit.get(self.url, params=self.params)
     56         if isinstance(self._listing, list):
     57             self._listing = self._listing[1]  # for submission duplicates

C:\Users\liort\Anaconda3\lib\site-packages\praw\reddit.py in get(self, path, params)
    305 
    306         """
--> 307         data = self.request('GET', path, params=params)
    308         return self._objector.objectify(data)
    309 

C:\Users\liort\Anaconda3\lib\site-packages\praw\reddit.py in request(self, method, path, params, data, files)
    389         """
    390         return self._core.request(method, path, data=data, files=files,
--> 391                                   params=params)
    392 
    393     def submission(  # pylint: disable=invalid-name,redefined-builtin

C:\Users\liort\Anaconda3\lib\site-packages\prawcore\sessions.py in request(self, method, path, data, files, json, params)
    110         """
    111         if not self._authorizer.is_valid():
--> 112             self._authorizer.refresh()
    113 
    114         headers = {'Authorization': 'bearer {}'

C:\Users\liort\Anaconda3\lib\site-packages\prawcore\auth.py in refresh(self)
    326         """Obtain a new personal-use script type access token."""
    327         self._request_token(grant_type='password', username=self._username,
--> 328                             password=self._password)

C:\Users\liort\Anaconda3\lib\site-packages\prawcore\auth.py in _request_token(self, **data)
    136                const.ACCESS_TOKEN_PATH)
    137         pre_request_time = time.time()
--> 138         response = self._authenticator._post(url, **data)
    139         payload = response.json()
    140         if 'error' in payload:  # Why are these OKAY responses?

C:\Users\liort\Anaconda3\lib\site-packages\prawcore\auth.py in _post(self, url, success_status, **data)
     27     def _post(self, url, success_status=codes['ok'], **data):
     28         response = self._requestor.request('post', url, auth=self._auth(),
---> 29                                            data=sorted(data.items()))
     30         if response.status_code != success_status:
     31             raise ResponseException(response)

C:\Users\liort\Anaconda3\lib\site-packages\prawcore\requestor.py in request(self, *args, **kwargs)
     46             return self._http.request(*args, **kwargs)
     47         except Exception as exc:
---> 48             raise RequestException(exc, args, kwargs)

RequestException: error with request ('Connection aborted.', TimeoutError(10060, 'A connection attempt failed because the connected party did not properly respond after a period of time, or established connection failed because connected host has failed to respond', None, 10060, None))

In [ ]: