In [5]:
import cowbotfunctions


baseurl="http://environicsindia.in/index.php/en/?option=com_content&view=article&layout=&id=1481"
baseurl2="http://environicsindia.in/index.php/en/?option=com_content&view=article&layout=&id=1322"

In [ ]:
base2=cowbotfunctions.gettextandlinks(baseurl2)

In [18]:
base1=cowbotfunctions.gettextandlinks(baseurl)


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-18-b1d879f2ac3f> in <module>()
----> 1 base1=cowbotfunctions.gettextandlinks(baseurl)

/opt/cowmesh/cowbots/cowbotfunctions.pyc in gettextandlinks(url)
      5 from livdatcsvlib import *
      6 def gettextandlinks(url):
----> 7     webcontent=urllib2.urlopen(url).read()
      8     soup = BeautifulSoup(webcontent,'html.parser')
      9     links=[]

/usr/lib/python2.7/urllib2.pyc in urlopen(url, data, timeout, cafile, capath, cadefault, context)
    152     else:
    153         opener = _opener
--> 154     return opener.open(url, data, timeout)
    155 
    156 def install_opener(opener):

/usr/lib/python2.7/urllib2.pyc in open(self, fullurl, data, timeout)
    427             req = meth(req)
    428 
--> 429         response = self._open(req, data)
    430 
    431         # post-process response

/usr/lib/python2.7/urllib2.pyc in _open(self, req, data)
    445         protocol = req.get_type()
    446         result = self._call_chain(self.handle_open, protocol, protocol +
--> 447                                   '_open', req)
    448         if result:
    449             return result

/usr/lib/python2.7/urllib2.pyc in _call_chain(self, chain, kind, meth_name, *args)
    405             func = getattr(handler, meth_name)
    406 
--> 407             result = func(*args)
    408             if result is not None:
    409                 return result

/usr/lib/python2.7/urllib2.pyc in http_open(self, req)
   1226 
   1227     def http_open(self, req):
-> 1228         return self.do_open(httplib.HTTPConnection, req)
   1229 
   1230     http_request = AbstractHTTPHandler.do_request_

/usr/lib/python2.7/urllib2.pyc in do_open(self, http_class, req, **http_conn_args)
   1199         else:
   1200             try:
-> 1201                 r = h.getresponse(buffering=True)
   1202             except TypeError: # buffering kw not supported
   1203                 r = h.getresponse()

/usr/lib/python2.7/httplib.pyc in getresponse(self, buffering)
   1134 
   1135         try:
-> 1136             response.begin()
   1137             assert response.will_close != _UNKNOWN
   1138             self.__state = _CS_IDLE

/usr/lib/python2.7/httplib.pyc in begin(self)
    451         # read until we get a non-100 response
    452         while True:
--> 453             version, status, reason = self._read_status()
    454             if status != CONTINUE:
    455                 break

/usr/lib/python2.7/httplib.pyc in _read_status(self)
    407     def _read_status(self):
    408         # Initialize with Simple-Response defaults
--> 409         line = self.fp.readline(_MAXLINE + 1)
    410         if len(line) > _MAXLINE:
    411             raise LineTooLong("header line")

/usr/lib/python2.7/socket.pyc in readline(self, size)
    478             while True:
    479                 try:
--> 480                     data = self._sock.recv(self._rbufsize)
    481                 except error, e:
    482                     if e.args[0] == EINTR:

KeyboardInterrupt: 

In [17]:
print "h"


h

In [ ]: