import pywikibot
site = pywikibot.Site('test', 'wikipedia')
site
APISite("test", "wikipedia")
page = pywikibot.Page(site, 'test')
page
Page('Test')
page.exists()
True
import urllib2
import re

def GetRevisions(Trains):
    url = "https://en.wikipedia.org/w/api.php?action=query&format=xml&prop=revisions&rvlimit=500&titles=" + pageTitle
    revisions = []                                        #list of all accumulated revisions
    next = ''                                             #information for the next request
    while True:
        response = urllib2.urlopen(url + next).read()     #web request
        revisions += re.findall('<rev [^>]*>', response)  #adds all revisions from the current request to the list

        cont = re.search('<continue rvcontinue="([^"]+)"', response)
        if not cont:                                      #break the loop if 'continue' element missing
            break

        next = "&rvcontinue=" + cont.group(1)             #gets the revision Id from which to start the next request

    return revisions;
---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
<ipython-input-7-adb953620b72> in <module>()
----> 1 import urllib2
      2 import re
      3 
      4 def GetRevisions(Trains):
      5     url = "https://en.wikipedia.org/w/api.php?action=query&format=xml&prop=revisions&rvlimit=500&titles=" + pageTitle

ModuleNotFoundError: No module named 'urllib2'
revisions = GetRevisions("Coffee")
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-8-98679a26749b> in <module>()
----> 1 revisions = GetRevisions("Coffee")

NameError: name 'GetRevisions' is not defined
site = pywikibot.Site("en", "wikipedia")
page = pywikibot.Page(site, "pagename")
revs = page.revisions(content=True)
---------------------------------------------------------------------------
NoPage                                    Traceback (most recent call last)
<ipython-input-9-efa1b4cedab0> in <module>()
      1 site = pywikibot.Site("en", "wikipedia")
      2 page = pywikibot.Page(site, "pagename")
----> 3 revs = page.revisions(content=True)

/srv/paws/pwb/pywikibot/tools/__init__.py in wrapper(*__args, **__kw)
   1518                              cls, depth)
   1519                     del __kw[old_arg]
-> 1520             return obj(*__args, **__kw)
   1521 
   1522         if not __debug__:

/srv/paws/pwb/pywikibot/page.py in revisions(self, reverse, total, content, rollback, starttime, endtime)
   1698         self.site.loadrevisions(self, getText=content, rvdir=reverse,
   1699                                 starttime=starttime, endtime=endtime,
-> 1700                                 total=total, rollback=rollback)
   1701         return (self._revisions[rev] for rev in
   1702                 sorted(self._revisions, reverse=not reverse)[:total])

/srv/paws/pwb/pywikibot/site.py in loadrevisions(self, page, getText, revids, startid, endid, starttime, endtime, rvdir, user, excludeuser, section, sysop, step, total, rollback)
   4072                 raise InconsistentTitleReceived(page, pagedata['title'])
   4073             if "missing" in pagedata:
-> 4074                 raise NoPage(page)
   4075             api.update_page(page, pagedata, rvgen.props)
   4076 

NoPage: Page [[en:Pagename]] doesn't exist.
site = pywikibot.Site("en", "wikipedia")
page = pywikibot.Page(site, "Trains")
revs = page.revisions(content=True)
site = pywikibot.Site("en", "wikipedia")
page = pywikibot.Page(site, "Geleen")
revs = page.revisions(content=True)
site = pywikibot.Site("en", "wikipedia")
page = pywikibot.Page(site, "pagename")
revs = page.revisions(content=True)
---------------------------------------------------------------------------
NoPage                                    Traceback (most recent call last)
<ipython-input-12-efa1b4cedab0> in <module>()
      1 site = pywikibot.Site("en", "wikipedia")
      2 page = pywikibot.Page(site, "pagename")
----> 3 revs = page.revisions(content=True)

/srv/paws/pwb/pywikibot/tools/__init__.py in wrapper(*__args, **__kw)
   1518                              cls, depth)
   1519                     del __kw[old_arg]
-> 1520             return obj(*__args, **__kw)
   1521 
   1522         if not __debug__:

/srv/paws/pwb/pywikibot/page.py in revisions(self, reverse, total, content, rollback, starttime, endtime)
   1698         self.site.loadrevisions(self, getText=content, rvdir=reverse,
   1699                                 starttime=starttime, endtime=endtime,
-> 1700                                 total=total, rollback=rollback)
   1701         return (self._revisions[rev] for rev in
   1702                 sorted(self._revisions, reverse=not reverse)[:total])

/srv/paws/pwb/pywikibot/site.py in loadrevisions(self, page, getText, revids, startid, endid, starttime, endtime, rvdir, user, excludeuser, section, sysop, step, total, rollback)
   4072                 raise InconsistentTitleReceived(page, pagedata['title'])
   4073             if "missing" in pagedata:
-> 4074                 raise NoPage(page)
   4075             api.update_page(page, pagedata, rvgen.props)
   4076 

NoPage: Page [[en:Pagename]] doesn't exist.
site = pywikibot.Site("en", "wikipedia")
page = pywikibot.Page(site, "geleen")
revs = page.revisions(content=True)
site = pywikibot.Site("en", "wikipedia")
page = pywikibot.Page(site, Geleen)
revs = page.revisions(content=True)
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-18-24a3c307c06f> in <module>()
      1 site = pywikibot.Site("en", "wikipedia")
----> 2 page = pywikibot.Page(site, Geleen)
      3 revs = page.revisions(content=True)

NameError: name 'Geleen' is not defined
#import pywikibot
site=pywikibot.Site('nl')
page=pywikibot.Page(site,'Schaar')
for onelink in page.linkedPages():
  print(onelink)
WARNING: API error mwoauth-invalid-authorization-invalid-user: The authorization headers in your request are for a user that does not exist here
---------------------------------------------------------------------------
NoUsername                                Traceback (most recent call last)
<ipython-input-22-b5ea766606bc> in <module>()
      1 #import pywikibot
      2 site=pywikibot.Site('nl')
----> 3 page=pywikibot.Page(site,'Schaar')
      4 for onelink in page.linkedPages():
      5   print(onelink)

/srv/paws/pwb/pywikibot/tools/__init__.py in wrapper(*__args, **__kw)
   1518                              cls, depth)
   1519                     del __kw[old_arg]
-> 1520             return obj(*__args, **__kw)
   1521 
   1522         if not __debug__:

/srv/paws/pwb/pywikibot/page.py in __init__(self, source, title, ns)
   2238                 raise ValueError(u'Title must be specified and not empty '
   2239                                  'if source is a Site.')
-> 2240         super(Page, self).__init__(source, title, ns)
   2241 
   2242     @property

/srv/paws/pwb/pywikibot/page.py in __init__(self, source, title, ns)
    199 
    200         if isinstance(source, pywikibot.site.BaseSite):
--> 201             self._link = Link(title, source=source, defaultNamespace=ns)
    202             self._revisions = {}
    203         elif isinstance(source, Page):

/srv/paws/pwb/pywikibot/page.py in __init__(self, text, source, defaultNamespace)
   5363         # See bug T104864, defaultNamespace might have been deleted.
   5364         try:
-> 5365             self._defaultns = self._source.namespaces[defaultNamespace]
   5366         except KeyError:
   5367             self._defaultns = defaultNamespace

/srv/paws/pwb/pywikibot/site.py in namespaces(self)
   1014         """Return dict of valid namespaces on this wiki."""
   1015         if not hasattr(self, '_namespaces'):
-> 1016             self._namespaces = NamespacesDict(self._build_namespaces())
   1017         return self._namespaces
   1018 

/srv/paws/pwb/pywikibot/site.py in _build_namespaces(self)
   2623         # For versions lower than 1.14, APISite needs to override
   2624         # the defaults defined in Namespace.
-> 2625         is_mw114 = MediaWikiVersion(self.version()) >= MediaWikiVersion('1.14')
   2626 
   2627         for nsdata in self.siteinfo.get('namespaces', cache=False).values():

/srv/paws/pwb/pywikibot/site.py in version(self)
   2737             try:
   2738                 version = self.siteinfo.get('generator',
-> 2739                                             expiry=1).split(' ')[1]
   2740             except pywikibot.data.api.APIError:
   2741                 # May occur if you are not logged in (no API read permissions).

/srv/paws/pwb/pywikibot/site.py in get(self, key, get_default, cache, expiry)
   1657                 elif not Siteinfo._is_expired(cached[1], expiry):
   1658                     return copy.deepcopy(cached[0])
-> 1659         preloaded = self._get_general(key, expiry)
   1660         if not preloaded:
   1661             preloaded = self._get_siteinfo(key, expiry)[key]

/srv/paws/pwb/pywikibot/site.py in _get_general(self, key, expiry)
   1603                     .format("', '".join(props)), _logger)
   1604             props += ['general']
-> 1605             default_info = self._get_siteinfo(props, expiry)
   1606             for prop in props:
   1607                 self._cache[prop] = default_info[prop]

/srv/paws/pwb/pywikibot/site.py in _get_siteinfo(self, prop, expiry)
   1528             # warnings are handled later
   1529             request._warning_handler = warn_handler
-> 1530             data = request.submit()
   1531         except api.APIError as e:
   1532             if e.code == 'siunknown_siprop':

/srv/paws/pwb/pywikibot/data/api.py in submit(self)
   2347         cached_available = self._load_cache()
   2348         if not cached_available:
-> 2349             self._data = super(CachedRequest, self).submit()
   2350             self._write_cache(self._data)
   2351         else:

/srv/paws/pwb/pywikibot/data/api.py in submit(self)
   2176                     continue
   2177                 raise NoUsername('Failed OAuth authentication for %s: %s'
-> 2178                                  % (self.site, info))
   2179             if code == 'cirrussearch-too-busy-error':  # T170647
   2180                 self.wait()

NoUsername: Failed OAuth authentication for wikipedia:nl: The authorization headers in your request are for a user that does not exist here
 @deprecated_args(throttle=None,
                     change_edit_time=None,
                     expandtemplates=None)
    def get(self, force=False, get_redirect=False, sysop=False):
        """
        Return the wiki-text of the page.

        This will retrieve the page from the server if it has not been
        retrieved yet, or if force is True. This can raise the following
        exceptions that should be caught by the calling code:

        @exception NoPage:         The page does not exist
        @exception IsRedirectPage: The page is a redirect. The argument of the
                                   exception is the title of the page it
                                   redirects to.
        @exception SectionError:   The section does not exist on a page with
                                   a # link

        @param force:           reload all page attributes, including errors.
        @param get_redirect:    return the redirect text, do not follow the
                                redirect, do not raise an exception.
        @param sysop:           if the user has a sysop account, use it to
                                retrieve this page

        @rtype: unicode
        """
        if force:
            del self.latest_revision_id
        try:
            self._getInternals(sysop)
        except pywikibot.IsRedirectPage:
            if not get_redirect:
                raise

        return self.latest_revision.text
  File "<ipython-input-30-e89661a4f349>", line 4
    def get(self, force=False, get_redirect=False, sysop=False):
    ^
IndentationError: unexpected indent
  def lastNonBotUser(self):
        """
        Return name or IP address of last human/non-bot user to edit page.

        Determine the most recent human editor out of the last revisions.
        If it was not able to retrieve a human user, returns None.

        If the edit was done by a bot which is no longer flagged as 'bot',
        i.e. which is not returned by Site.botusers(), it will be returned
        as a non-bot edit.

        @rtype: unicode
        """
        if hasattr(self, '_lastNonBotUser'):
            return self._lastNonBotUser

        self._lastNonBotUser = None
        for entry in self.revisions():
            if entry.user and (not self.site.isBot(entry.user)):
                self._lastNonBotUser = entry.user
                break

        return self._lastNonBotUser
import urllib.request
import re


def GetRevisions(Trains):
    url = "https://en.wikipedia.org/w/api.php?action=query&format=xml&prop=revisions&rvlimit=500&titles=Trains" 
    revisions = []                                        #list of all accumulated revisions
    next = ''                                             #information for the next request
    while True:
        response = urllib.urlopen(url + next).read()     #web request
        revisions += re.findall('<rev [^>]*>', response)  #adds all revisions from the current request to the list

        cont = re.search('<continue rvcontinue="([^"]+)"', response)
        if not cont:                                      #break the loop if 'continue' element missing
            break

        next = "&rvcontinue=" + cont.group(1)             #gets the revision Id from which to start the next request

    return revisions;
import urllib2
---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
<ipython-input-42-846c1dbd158b> in <module>()
----> 1 import urllib2

ModuleNotFoundError: No module named 'urllib2'
import urllib.
  File "<ipython-input-46-5d06292a3743>", line 1
    import urllib.
                  ^
SyntaxError: invalid syntax
import urllib.request
htmlfile = urllib.request.urlopen("http://google.com")
htmltext = htmlfile.read()
print(htmltext)
b'<!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="en"><head><meta content="Search the world\'s information, including webpages, images, videos and more. Google has many special features to help you find exactly what you\'re looking for." name="description"><meta content="noodp" name="robots"><meta content="text/html; charset=UTF-8" http-equiv="Content-Type"><meta content="/images/branding/googleg/1x/googleg_standard_color_128dp.png" itemprop="image"><title>Google</title><script nonce="MxN9b+g4/BxJoDuYqKNrwA==">(function(){window.google={kEI:\'dF_LWuesCuO6ggfnzLqICA\',kEXPI:\'0,1353747,440,90,638,542,218,87,131,546,340,17,57,366,336,214,167,176,104,5,6,23,27,17,106,14,227,2341608,191,49,32,329294,1294,12383,2349,2506,32691,16115,769,7,804,7,5105,4069,1402,5281,1100,3101,234,2,2,1624,515,2470,2192,369,548,332,332,326,1776,113,1614,587,1675,894,622,224,843,1375,58,72,49,81,3980,208,919,444,353,1457,21,352,24,287,64,311,295,438,618,402,43,324,37,218,86,409,612,388,72,99,618,562,401,1293,6,154,323,5,402,129,294,131,366,695,2,49,430,38,7,1,2,561,40,9,636,8,307,19,21,105,85,277,493,67,132,64,722,137,440,701,37,334,198,413,43,134,90,55,2,184,59,192,327,2,104,256,596,535,157,30,66,281,26,10,187,241,761,3,2347028,192,3,149,5,3686463,12,2533,9,18,5997521,2800112,135,4,1572,549,332,441,2,2,1,2,1,1,77,1,1,900,207,1,1,1,1,1,371,9,435,26,105,4,13,13,21,22311637\',authuser:0,kscs:\'c9c918f0_dF_LWuesCuO6ggfnzLqICA\',u:\'c9c918f0\',kGL:\'US\'};google.kHL=\'en\';})();(function(){google.lc=[];google.li=0;google.getEI=function(a){for(var b;a&&(!a.getAttribute||!(b=a.getAttribute("eid")));)a=a.parentNode;return b||google.kEI};google.getLEI=function(a){for(var b=null;a&&(!a.getAttribute||!(b=a.getAttribute("leid")));)a=a.parentNode;return b};google.https=function(){return"https:"==window.location.protocol};google.ml=function(){return null};google.wl=function(a,b){try{google.ml(Error(a),!1,b)}catch(d){}};google.time=function(){return(new Date).getTime()};google.log=function(a,b,d,c,g){if(a=google.logUrl(a,b,d,c,g)){b=new Image;var e=google.lc,f=google.li;e[f]=b;b.onerror=b.onload=b.onabort=function(){delete e[f]};google.vel&&google.vel.lu&&google.vel.lu(a);b.src=a;google.li=f+1}};google.logUrl=function(a,b,d,c,g){var e="",f=google.ls||"";d||-1!=b.search("&ei=")||(e="&ei="+google.getEI(c),-1==b.search("&lei=")&&(c=google.getLEI(c))&&(e+="&lei="+c));c="";!d&&google.cshid&&-1==b.search("&cshid=")&&(c="&cshid="+google.cshid);a=d||"/"+(g||"gen_204")+"?atyp=i&ct="+a+"&cad="+b+e+f+"&zx="+google.time()+c;/^http:/i.test(a)&&google.https()&&(google.ml(Error("a"),!1,{src:a,glmm:1}),a="");return a};}).call(this);(function(){google.y={};google.x=function(a,b){if(a)var c=a.id;else{do c=Math.random();while(google.y[c])}google.y[c]=[a,b];return!1};google.lm=[];google.plm=function(a){google.lm.push.apply(google.lm,a)};google.lq=[];google.load=function(a,b,c){google.lq.push([[a],b,c])};google.loadAll=function(a,b){google.lq.push([a,b])};}).call(this);google.f={};var a=window.location,b=a.href.indexOf("#");if(0<=b){var c=a.href.substring(b+1);/(^|&)q=/.test(c)&&-1==c.indexOf("#")&&a.replace("/search?"+c.replace(/(^|&)fp=[^&]*/g,"")+"&cad=h")};</script><style>#gbar,#guser{font-size:13px;padding-top:1px !important;}#gbar{height:22px}#guser{padding-bottom:7px !important;text-align:right}.gbh,.gbd{border-top:1px solid #c9d7f1;font-size:1px}.gbh{height:0;position:absolute;top:24px;width:100%}@media all{.gb1{height:22px;margin-right:.5em;vertical-align:top}#gbar{float:left}}a.gb1,a.gb4{text-decoration:underline !important}a.gb1,a.gb4{color:#00c !important}.gbi .gb4{color:#dd8e27 !important}.gbf .gb4{color:#900 !important}\n</style><style>body,td,a,p,.h{font-family:arial,sans-serif}body{margin:0;overflow-y:scroll}#gog{padding:3px 8px 0}td{line-height:.8em}.gac_m td{line-height:17px}form{margin-bottom:20px}.h{color:#36c}.q{color:#00c}.ts td{padding:0}.ts{border-collapse:collapse}em{font-weight:bold;font-style:normal}.lst{height:25px;width:496px}.gsfi,.lst{font:18px arial,sans-serif}.gsfs{font:17px arial,sans-serif}.ds{display:inline-box;display:inline-block;margin:3px 0 4px;margin-left:4px}input{font-family:inherit}a.gb1,a.gb2,a.gb3,a.gb4{color:#11c !important}body{background:#fff;color:black}a{color:#11c;text-decoration:none}a:hover,a:active{text-decoration:underline}.fl a{color:#36c}a:visited{color:#551a8b}a.gb1,a.gb4{text-decoration:underline}a.gb3:hover{text-decoration:none}#ghead a.gb2:hover{color:#fff !important}.sblc{padding-top:5px}.sblc a{display:block;margin:2px 0;margin-left:13px;font-size:11px}.lsbb{background:#eee;border:solid 1px;border-color:#ccc #999 #999 #ccc;height:30px}.lsbb{display:block}.ftl,#fll a{display:inline-block;margin:0 12px}.lsb{background:url(/images/nav_logo229.png) 0 -261px repeat-x;border:none;color:#000;cursor:pointer;height:30px;margin:0;outline:0;font:15px arial,sans-serif;vertical-align:top}.lsb:active{background:#ccc}.lst:focus{outline:none}</style><script nonce="MxN9b+g4/BxJoDuYqKNrwA=="></script><link href="/images/branding/product/ico/googleg_lodp.ico" rel="shortcut icon"></head><body bgcolor="#fff"><script nonce="MxN9b+g4/BxJoDuYqKNrwA==">(function(){var src=\'/images/nav_logo229.png\';var iesg=false;document.body.onload = function(){window.n && window.n();if (document.images){new Image().src=src;}\nif (!iesg){document.f&&document.f.q.focus();document.gbqf&&document.gbqf.q.focus();}\n}\n})();</script><div id="mngb"> <div id=gbar><nobr><b class=gb1>Search</b> <a class=gb1 href="http://www.google.com/imghp?hl=en&tab=wi">Images</a> <a class=gb1 href="http://maps.google.com/maps?hl=en&tab=wl">Maps</a> <a class=gb1 href="https://play.google.com/?hl=en&tab=w8">Play</a> <a class=gb1 href="http://www.youtube.com/?gl=US&tab=w1">YouTube</a> <a class=gb1 href="http://news.google.com/nwshp?hl=en&tab=wn">News</a> <a class=gb1 href="https://mail.google.com/mail/?tab=wm">Gmail</a> <a class=gb1 href="https://drive.google.com/?tab=wo">Drive</a> <a class=gb1 style="text-decoration:none" href="https://www.google.com/intl/en/options/"><u>More</u> &raquo;</a></nobr></div><div id=guser width=100%><nobr><span id=gbn class=gbi></span><span id=gbf class=gbf></span><span id=gbe></span><a href="http://www.google.com/history/optout?hl=en" class=gb4>Web History</a> | <a  href="/preferences?hl=en" class=gb4>Settings</a> | <a target=_top id=gb_70 href="https://accounts.google.com/ServiceLogin?hl=en&passive=true&continue=http://www.google.com/" class=gb4>Sign in</a></nobr></div><div class=gbh style=left:0></div><div class=gbh style=right:0></div> </div><center><br clear="all" id="lgpd"><div id="lga"><img alt="Google" height="92" src="/images/branding/googlelogo/1x/googlelogo_white_background_color_272x92dp.png" style="padding:28px 0 14px" width="272" id="hplogo" onload="window.lol&&lol()"><br><br></div><form action="/search" name="f"><table cellpadding="0" cellspacing="0"><tr valign="top"><td width="25%">&nbsp;</td><td align="center" nowrap=""><input name="ie" value="ISO-8859-1" type="hidden"><input value="en" name="hl" type="hidden"><input name="source" type="hidden" value="hp"><input name="biw" type="hidden"><input name="bih" type="hidden"><div class="ds" style="height:32px;margin:4px 0"><input style="color:#000;margin:0;padding:5px 8px 0 6px;vertical-align:top" autocomplete="off" class="lst" value="" title="Google Search" maxlength="2048" name="q" size="57"></div><br style="line-height:0"><span class="ds"><span class="lsbb"><input class="lsb" value="Google Search" name="btnG" type="submit"></span></span><span class="ds"><span class="lsbb"><input class="lsb" value="I\'m Feeling Lucky" name="btnI" onclick="if(this.form.q.value)this.checked=1; else top.location=\'/doodles/\'" type="submit"></span></span></td><td class="fl sblc" align="left" nowrap="" width="25%"><a href="/advanced_search?hl=en&amp;authuser=0">Advanced search</a><a href="/language_tools?hl=en&amp;authuser=0">Language tools</a></td></tr></table><input id="gbv" name="gbv" type="hidden" value="1"></form><div id="gac_scont"></div><div style="font-size:83%;min-height:3.5em"><br></div><span id="footer"><div style="font-size:10pt"><div style="margin:19px auto;text-align:center" id="fll"><a href="/intl/en/ads/">Advertising\xa0Programs</a><a href="/services/">Business Solutions</a><a href="https://plus.google.com/116899029375914044550" rel="publisher">+Google</a><a href="/intl/en/about.html">About Google</a></div></div><p style="color:#767676;font-size:8pt">&copy; 2018 - <a href="/intl/en/policies/privacy/">Privacy</a> - <a href="/intl/en/policies/terms/">Terms</a></p></span></center><script nonce="MxN9b+g4/BxJoDuYqKNrwA==">(function(){window.google.cdo={height:0,width:0};(function(){var a=window.innerWidth,b=window.innerHeight;if(!a||!b){var c=window.document,d="CSS1Compat"==c.compatMode?c.documentElement:c.body;a=d.clientWidth;b=d.clientHeight}a&&b&&(a!=google.cdo.width||b!=google.cdo.height)&&google.log("","","/client_204?&atyp=i&biw="+a+"&bih="+b+"&ei="+google.kEI);}).call(this);})();</script><div id="xjsd"></div><div id="xjsi"><script nonce="MxN9b+g4/BxJoDuYqKNrwA==">(function(){function c(b){window.setTimeout(function(){var a=document.createElement("script");a.src=b;google.timers&&google.timers.load.t&&google.tick("load",{gen204:"xjsls",clearcut:31});document.getElementById("xjsd").appendChild(a)},0)}google.dljp=function(b,a){google.xjsu=b;c(a)};google.dlj=c;}).call(this);if(!google.xjs){window._=window._||{};window._DumpException=window._._DumpException=function(e){throw e};window._F_installCss=window._._F_installCss=function(c){};google.dljp(\'/xjs/_/js/k\\x3dxjs.hp.en_US.z4uS-JjN0xo.O/m\\x3dsb_he,d/am\\x3dVDA2/rt\\x3dj/d\\x3d1/t\\x3dzcms/rs\\x3dACT90oEQ7WwsvMCTs5ie-HdNGKsjacvQJg\',\'/xjs/_/js/k\\x3dxjs.hp.en_US.z4uS-JjN0xo.O/m\\x3dsb_he,d/am\\x3dVDA2/rt\\x3dj/d\\x3d1/t\\x3dzcms/rs\\x3dACT90oEQ7WwsvMCTs5ie-HdNGKsjacvQJg\');google.xjs=1;}google.pmc={"sb_he":{"agen":true,"cgen":true,"client":"heirloom-hp","dh":true,"dhqt":true,"ds":"","ffql":"en","fl":true,"host":"google.com","isbh":28,"jsonp":true,"msgs":{"cibl":"Clear Search","dym":"Did you mean:","lcky":"I\\u0026#39;m Feeling Lucky","lml":"Learn more","oskt":"Input tools","psrc":"This search was removed from your \\u003Ca href=\\"/history\\"\\u003EWeb History\\u003C/a\\u003E","psrl":"Remove","sbit":"Search by image","srch":"Google Search"},"nds":true,"ovr":{},"pq":"","refpd":true,"rfs":[],"sbpl":24,"sbpr":24,"scd":10,"sce":5,"stok":"EvwaHeBguQV0MHWL_l89DjARQDY"},"d":{},"ZI/YVQ":{},"U5B21g":{},"DPBNMg":{},"YFCs/g":{}};google.x(null,function(){});(function(){var r=[];google.plm(r);})();(function(){var ctx=[]\n;google.jsc && google.jsc.x(ctx);})();</script></div></body></html>'
import io
import urllib.request

def GetRevisions(Trains):
    url = "https://en.wikipedia.org/w/api.php?action=query&format=xml&prop=revisions&rvlimit=500&titles=Trains" 
    revisions = []                                        #list of all accumulated revisions
    next = ''                                             #information for the next request
    while True:
        response = urllib2.urlopen(url + next).read()     #web request
        revisions += re.findall('<rev [^>]*>', response)  #adds all revisions from the current request to the list

        cont = re.search('<continue rvcontinue="([^"]+)"', response)
        if not cont:                                      #break the loop if 'continue' element missing
            break

        next = "&rvcontinue=" + cont.group(1)             #gets the revision Id from which to start the next request

    return revisions;
import json

from wikitools import wiki, api

site = wiki.Wiki("http://en.wikipedia.org/w/api.php")
names = ["Sherrod Brown","Maria Cantwell"]
allMembers = []
for name in names:
    params = {'action':'query',
        'titles': name,
        'prop':'revisions',
        'rvprop':'ids|flags|timestamp|userid|user|size|comment|tags',
        'rvlimit':'100'
    }
    req = api.APIRequest(site, params)
    res = req.query(querycontinue=False)
    allMembers.append(res)

with open('/Applications/MAMP/htdocs/python/wikipedia-1.4.0/data/wiki-leg.json', 'w') as outfile:
    json.dump(allMembers, outfile, indent=2)
---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
<ipython-input-55-fd541563f23e> in <module>()
      1 import json
      2 
----> 3 from wikitools import wiki, api
      4 
      5 site = wiki.Wiki("http://en.wikipedia.org/w/api.php")

ModuleNotFoundError: No module named 'wikitools'