import pywikibot
site = pywikibot.Site('americhino', 'wikia')
WARNING: /srv/paws/lib/python3.6/site-packages/ipykernel_launcher.py:1: UserWarning: Site wikia:wikia instantiated using different code "americhino"
  """Entry point for launching an IPython kernel.

site = pywikibot.Site('wikia')
---------------------------------------------------------------------------
UnknownSite                               Traceback (most recent call last)
<ipython-input-3-c9141119b31e> in <module>()
----> 1 site = pywikibot.Site('wikia')

/srv/paws/pwb/pywikibot/__init__.py in Site(code, fam, user, sysop, interface, url)
   1273     key = '%s:%s:%s:%s' % (interface.__name__, fam, code, user)
   1274     if key not in _sites or not isinstance(_sites[key], interface):
-> 1275         _sites[key] = interface(code=code, fam=fam, user=user, sysop=sysop)
   1276         debug(u"Instantiated %s object '%s'"
   1277               % (interface.__name__, _sites[key]), _logger)

/srv/paws/pwb/pywikibot/site.py in __init__(self, code, fam, user, sysop)
   1848     def __init__(self, code, fam=None, user=None, sysop=None):
   1849         """Initializer."""
-> 1850         BaseSite.__init__(self, code, fam, user, sysop)
   1851         self._msgcache = {}
   1852         self._loginstatus = LoginStatus.NOT_ATTEMPTED

/srv/paws/pwb/pywikibot/site.py in __init__(self, code, fam, user, sysop)
    776             else:
    777                 raise UnknownSite(u"Language '%s' does not exist in family %s"
--> 778                                   % (self.__code, self.__family.name))
    779 
    780         self._username = [normalize_username(user), normalize_username(sysop)]

UnknownSite: Language 'wikia' does not exist in family wikipedia
site = pywikibot.Site('test', 'wikipedia')
site
APISite("test", "wikipedia")
page = pywikibot.Page(site, 'test')
page.exists()
True
page.exists()
True
page.text
"Hello added [[test]] world's world.....🕂🎀 It out the Me4ec y☠☠🎇♥\nwtf"
page.text
"Hello added [[test]] world's world.....🕂🎀 It out the Me4ec y☠☠🎇♥\nwtf"
page.text = "lol"
page.text
'lol'
page.save()
Page [[Test]] saved
help(pywikibot.Page.save)
Help on function save in module pywikibot.page:

save(self, summary=None, watch=None, minor=True, botflag=None, force=False, asynchronous=False, callback=None, apply_cosmetic_changes=None, quiet=False, **kwargs, comment='[deprecated name of summary]', sysop=NotImplemented, async='[deprecated name of asynchronous]')
    Save the current contents of page's text to the wiki.
    
    @param summary: The edit summary for the modification (optional, but
        most wikis strongly encourage its use)
    @type summary: unicode
    @param watch: Specify how the watchlist is affected by this edit, set
        to one of "watch", "unwatch", "preferences", "nochange":
        * watch: add the page to the watchlist
        * unwatch: remove the page from the watchlist
        * preferences: use the preference settings (Default)
        * nochange: don't change the watchlist
        If None (default), follow bot account's default settings
    
        For backward compatibility watch parameter may also be boolean:
        if True, add or if False, remove this Page to/from bot
        user's watchlist.
    @type watch: string, bool (deprecated) or None
    @param minor: if True, mark this edit as minor
    @type minor: bool
    @param botflag: if True, mark this edit as made by a bot (default:
        True if user has bot status, False if not)
    @param force: if True, ignore botMayEdit() setting
    @type force: bool
    @param asynchronous: if True, launch a separate thread to save
        asynchronously
    @param callback: a callable object that will be called after the
        page put operation. This object must take two arguments: (1) a
        Page object, and (2) an exception instance, which will be None
        if the page was saved successfully. The callback is intended for
        use by bots that need to keep track of which saves were
        successful.
    @param apply_cosmetic_changes: Overwrites the cosmetic_changes
        configuration value to this value unless it's None.
    @type apply_cosmetic_changes: bool or None
    @param quiet: enable/disable successful save operation message;
        defaults to False.
        In asynchronous mode, if True, it is up to the calling bot to
        manage the output e.g. via callback.
    @type quiet: bool

help(pywikibot.site)
Help on module pywikibot.site in pywikibot:

NAME
    pywikibot.site - Objects representing MediaWiki sites (wikis).

DESCRIPTION
    This module also includes functions to load families, which are
    groups of wikis on the same topic in different languages.

CLASSES
    builtins.object
        LoginStatus
        TokenWallet
    collections.abc.Container(builtins.object)
        Siteinfo
    collections.abc.Iterable(builtins.object)
        Namespace(collections.abc.Iterable, pywikibot.tools.ComparableMixin, pywikibot.tools.UnicodeMixin)
    collections.abc.Mapping(collections.abc.Collection)
        NamespacesDict(collections.abc.Mapping, pywikibot.tools.SelfCallMixin)
    pywikibot.exceptions.Error(pywikibot.tools.UnicodeMixin, builtins.Exception)
        PageInUse
    pywikibot.tools.ComparableMixin(builtins.object)
        BaseSite
            APISite
                DataSite
            NonMWAPISite
            RemovedSite
        Namespace(collections.abc.Iterable, pywikibot.tools.ComparableMixin, pywikibot.tools.UnicodeMixin)
    pywikibot.tools.SelfCallMixin(builtins.object)
        NamespacesDict(collections.abc.Mapping, pywikibot.tools.SelfCallMixin)
    pywikibot.tools.UnicodeMixin(builtins.object)
        Namespace(collections.abc.Iterable, pywikibot.tools.ComparableMixin, pywikibot.tools.UnicodeMixin)
    
    class APISite(BaseSite)
     |  API interface to MediaWiki site.
     |  
     |  Do not instantiate directly; use pywikibot.Site function.
     |  
     |  Method resolution order:
     |      APISite
     |      BaseSite
     |      pywikibot.tools.ComparableMixin
     |      builtins.object
     |  
     |  Methods defined here:
     |  
     |  __getstate__(self)
     |      Remove TokenWallet before pickling, for security reasons.
     |  
     |  __init__(self, code, fam=None, user=None, sysop=None)
     |      Initializer.
     |  
     |  __setstate__(self, attrs)
     |      Restore things removed in __getstate__.
     |  
     |  allcategories(self, start='!', prefix='', total=None, reverse=False, content=False, step=NotImplemented)
     |      Iterate categories used (which need not have a Category page).
     |      
     |      Iterator yields Category objects. Note that, in practice, links that
     |      were found on pages that have been deleted may not have been removed
     |      from the database table, so this method can return false positives.
     |      
     |      @param start: Start at this category title (category need not exist).
     |      @param prefix: Only yield categories starting with this string.
     |      @param reverse: if True, iterate in reverse Unicode lexigraphic
     |          order (default: iterate in forward order)
     |      @param content: if True, load the current content of each iterated page
     |          (default False); note that this means the contents of the category
     |          description page, not the pages that are members of the category
     |  
     |  allimages(self, start='!', prefix='', minsize=None, maxsize=None, reverse=False, sha1=None, sha1base36=None, total=None, content=False, step=NotImplemented)
     |      Iterate all images, ordered by image title.
     |      
     |      Yields FilePages, but these pages need not exist on the wiki.
     |      
     |      @param start: start at this title (name need not exist)
     |      @param prefix: only iterate titles starting with this substring
     |      @param minsize: only iterate images of at least this many bytes
     |      @param maxsize: only iterate images of no more than this many bytes
     |      @param reverse: if True, iterate in reverse lexigraphic order
     |      @param sha1: only iterate image (it is theoretically possible there
     |          could be more than one) with this sha1 hash
     |      @param sha1base36: same as sha1 but in base 36
     |      @param content: if True, load the current content of each iterated page
     |          (default False); note that this means the content of the image
     |          description page, not the image itself
     |  
     |  alllinks(self, start='!', prefix='', namespace=0, unique=False, fromids=False, total=None, step=NotImplemented)
     |      Iterate all links to pages (which need not exist) in one namespace.
     |      
     |      Note that, in practice, links that were found on pages that have
     |      been deleted may not have been removed from the links table, so this
     |      method can return false positives.
     |      
     |      @param start: Start at this title (page need not exist).
     |      @param prefix: Only yield pages starting with this string.
     |      @param namespace: Iterate pages from this (single) namespace
     |      @type namespace: int or Namespace
     |      @param unique: If True, only iterate each link title once (default:
     |          iterate once for each linking page)
     |      @param fromids: if True, include the pageid of the page containing
     |          each link (default: False) as the '_fromid' attribute of the Page;
     |          cannot be combined with unique
     |      @raises KeyError: the namespace identifier was not resolved
     |      @raises TypeError: the namespace identifier has an inappropriate
     |          type such as bool, or an iterable with more than one namespace
     |  
     |  allpages(self, start='!', prefix='', namespace=0, filterredir=None, filterlanglinks=None, minsize=None, maxsize=None, protect_type=None, protect_level=None, reverse=False, total=None, content=False, throttle=NotImplemented, limit='[deprecated name of total]', step=NotImplemented, includeredirects='[deprecated name of filterredir]')
     |      Iterate pages in a single namespace.
     |      
     |      @param start: Start at this title (page need not exist).
     |      @param prefix: Only yield pages starting with this string.
     |      @param namespace: Iterate pages from this (single) namespace
     |      @type namespace: int or Namespace.
     |      @param filterredir: if True, only yield redirects; if False (and not
     |          None), only yield non-redirects (default: yield both)
     |      @param filterlanglinks: if True, only yield pages with language links;
     |          if False (and not None), only yield pages without language links
     |          (default: yield both)
     |      @param minsize: if present, only yield pages at least this many
     |          bytes in size
     |      @param maxsize: if present, only yield pages at most this many bytes
     |          in size
     |      @param protect_type: only yield pages that have a protection of the
     |          specified type
     |      @type protect_type: str
     |      @param protect_level: only yield pages that have protection at this
     |          level; can only be used if protect_type is specified
     |      @param reverse: if True, iterate in reverse Unicode lexigraphic
     |          order (default: iterate in forward order)
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: the namespace identifier was not resolved
     |      @raises TypeError: the namespace identifier has an inappropriate
     |          type such as bool, or an iterable with more than one namespace
     |  
     |  allusers(self, start='!', prefix='', group=None, total=None, step=NotImplemented)
     |      Iterate registered users, ordered by username.
     |      
     |      Iterated values are dicts containing 'name', 'editcount',
     |      'registration', and (sometimes) 'groups' keys. 'groups' will be
     |      present only if the user is a member of at least 1 group, and will
     |      be a list of unicodes; all the other values are unicodes and should
     |      always be present.
     |      
     |      @param start: start at this username (name need not exist)
     |      @param prefix: only iterate usernames starting with this substring
     |      @param group: only iterate users that are members of this group
     |      @type group: str
     |  
     |  ancientpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages, datestamps from Special:Ancientpages.
     |      
     |      @param total: number of pages to return
     |  
     |  assert_valid_iter_params(self, msg_prefix, start, end, reverse)
     |      Validate iterating API parameters.
     |  
     |  blocks(self, starttime=None, endtime=None, reverse=False, blockids=None, users=None, iprange=None, total=None, step=NotImplemented)
     |      Iterate all current blocks, in order of creation.
     |      
     |      The iterator yields dicts containing keys corresponding to the
     |      block properties.
     |      
     |      @see: U{https://www.mediawiki.org/wiki/API:Blocks}
     |      
     |      @note: logevents only logs user blocks, while this method
     |          iterates all blocks including IP ranges.
     |      @note: C{userid} key will be given for mw 1.18+ only
     |      @note: C{iprange} parameter cannot be used together with C{users}.
     |      
     |      @param starttime: start iterating at this Timestamp
     |      @type starttime: pywikibot.Timestamp
     |      @param endtime: stop iterating at this Timestamp
     |      @type endtime: pywikibot.Timestamp
     |      @param reverse: if True, iterate oldest blocks first (default: newest)
     |      @type reverse: bool
     |      @param blockids: only iterate blocks with these id numbers. Numbers
     |          must be separated by '|' if given by a basestring.
     |      @type blockids: basestring, tuple or list
     |      @param users: only iterate blocks affecting these usernames or IPs
     |      @type users: basestring, tuple or list
     |      @param iprange: a single IP or an IP range. Ranges broader than
     |          IPv4/16 or IPv6/19 are not accepted.
     |      @type iprange: str
     |      @param total: total amount of block entries
     |      @type total: int
     |  
     |  blockuser(self, user, expiry, reason, anononly=True, nocreate=True, autoblock=True, noemail=False, reblock=False)
     |      Block a user for certain amount of time and for a certain reason.
     |      
     |      @param user: The username/IP to be blocked without a namespace.
     |      @type user: L{pywikibot.User}
     |      @param expiry: The length or date/time when the block expires. If
     |          'never', 'infinite', 'indefinite' it never does. If the value is
     |          given as a basestring it's parsed by php's strtotime function:
     |      
     |              U{http://php.net/manual/en/function.strtotime.php}
     |      
     |          The relative format is described there:
     |      
     |              U{http://php.net/manual/en/datetime.formats.relative.php}
     |      
     |          It is recommended to not use a basestring if possible to be
     |          independent of the API.
     |      @type expiry: Timestamp/datetime (absolute),
     |          basestring (relative/absolute) or False ('never')
     |      @param reason: The reason for the block.
     |      @type reason: basestring
     |      @param anononly: Disable anonymous edits for this IP.
     |      @type anononly: boolean
     |      @param nocreate: Prevent account creation.
     |      @type nocreate: boolean
     |      @param autoblock: Automatically block the last used IP address and all
     |          subsequent IP addresses from which this account logs in.
     |      @type autoblock: boolean
     |      @param noemail: Prevent user from sending email through the wiki.
     |      @type noemail: boolean
     |      @param reblock: If the user is already blocked, overwrite the existing
     |          block.
     |      @type reblock: boolean
     |      @return: The data retrieved from the API request.
     |      @rtype: dict
     |  
     |  botusers(self, total=None, step=NotImplemented)
     |      Iterate bot users.
     |      
     |      Iterated values are dicts containing 'name', 'userid', 'editcount',
     |      'registration', and 'groups' keys. 'groups' will be present only if
     |      the user is a member of at least 1 group, and will be a list of
     |      unicodes; all the other values are unicodes and should always be
     |      present.
     |  
     |  broken_redirects(self, total=None, step=NotImplemented)
     |      Yield Pages with broken redirects from Special:BrokenRedirects.
     |      
     |      @param total: number of pages to return
     |  
     |  case(self)
     |      Deprecated; use siteinfo or Namespace instance instead.
     |      
     |      Return this site's capitalization rule.
     |  
     |  categories(self, number=10, repeat=False)
     |      DEPRECATED.
     |  
     |  categoryinfo(self, category)
     |      Retrieve data on contents of category.
     |  
     |  categorymembers(self, category, namespaces=None, sortby=None, reverse=False, starttime=None, endtime=None, startsort=None, endsort=None, total=None, content=False, member_type=None, startprefix=None, endprefix=None, step=NotImplemented)
     |      Iterate members of specified category.
     |      
     |      @param category: The Category to iterate.
     |      @param namespaces: If present, only return category members from
     |          these namespaces. To yield subcategories or files, use
     |          parameter member_type instead.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param sortby: determines the order in which results are generated,
     |          valid values are "sortkey" (default, results ordered by category
     |          sort key) or "timestamp" (results ordered by time page was
     |          added to the category)
     |      @type sortby: str
     |      @param reverse: if True, generate results in reverse order
     |          (default False)
     |      @param starttime: if provided, only generate pages added after this
     |          time; not valid unless sortby="timestamp"
     |      @type starttime: pywikibot.Timestamp
     |      @param endtime: if provided, only generate pages added before this
     |          time; not valid unless sortby="timestamp"
     |      @param startsort: if provided, only generate pages that have a
     |          sortkey >= startsort; not valid if sortby="timestamp"
     |          (Deprecated in MW 1.24)
     |      @type startsort: str
     |      @param endsort: if provided, only generate pages that have a
     |          sortkey <= endsort; not valid if sortby="timestamp"
     |          (Deprecated in MW 1.24)
     |      @type endsort: str
     |      @param startprefix: if provided, only generate pages >= this title
     |          lexically; not valid if sortby="timestamp"; overrides "startsort"
     |          (requires MW 1.18+)
     |      @type startprefix: str
     |      @param endprefix: if provided, only generate pages < this title
     |          lexically; not valid if sortby="timestamp"; overrides "endsort"
     |          (requires MW 1.18+)
     |      @type endprefix: str
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @type content: bool
     |      @param member_type: member type; if member_type includes 'page' and is
     |          used in conjunction with sortby="timestamp", the API may limit
     |          results to only pages in the first 50 namespaces.
     |      @type member_type: str or iterable of str; values: page, subcat, file
     |      
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises NotImplementedError: startprefix or endprefix parameters are
     |          given but site.version is less than 1.18.
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  checkBlocks(self, sysop=False)
     |      Raise an exception when the user is blocked. DEPRECATED.
     |      
     |      @param sysop: If true, log in to sysop account (if available)
     |      @type sysop: bool
     |      @raises UserBlocked: The logged in user/sysop account is blocked.
     |  
     |  compare(self, old, diff)
     |      Corresponding method to the 'action=compare' API action.
     |      
     |      See: https://en.wikipedia.org/w/api.php?action=help&modules=compare
     |      Use pywikibot.diff's html_comparator() method to parse result.
     |      @param old: starting revision ID, title, Page, or Revision
     |      @type old: int, str, pywikibot.Page, or pywikibot.Page.Revision
     |      @param diff: ending revision ID, title, Page, or Revision
     |      @type diff: int, str, pywikibot.Page, or pywikibot.Page.Revision
     |      @return: Returns an HTML string of a diff between two revisions.
     |      @rtype: str
     |  
     |  create_new_topic(self, page, title, content, format)
     |      Create a new topic on a Flow board.
     |      
     |      @param page: A Flow board
     |      @type page: Board
     |      @param title: The title of the new topic (must be in plaintext)
     |      @type title: unicode
     |      @param content: The content of the topic's initial post
     |      @type content: unicode
     |      @param format: The content format of the value supplied for content
     |      @type format: unicode (either 'wikitext' or 'html')
     |      @return: The metadata of the new topic
     |      @rtype: dict
     |  
     |  data_repository(self)
     |      Return the data repository connected to this site.
     |      
     |      @return: The data repository if one is connected or None otherwise.
     |      @rtype: DataSite or None
     |  
     |  dbName(self)
     |      Return this site's internal id.
     |  
     |  deadendpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Page objects retrieved from Special:Deadendpages.
     |      
     |      @param total: number of pages to return
     |  
     |  delete_post(self, post, reason)
     |      Delete a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param reason: The reason to delete the post
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  delete_topic(self, page, reason)
     |      Delete a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reason: The reason to delete the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  deletedrevs(self, page, start=None, end=None, reverse=None, content=False, total=None, step=NotImplemented, get_text='[deprecated name of content]')
     |      Iterate deleted revisions.
     |      
     |      Each value returned by the iterator will be a dict containing the
     |      'title' and 'ns' keys for a particular Page and a 'revisions' key
     |      whose value is a list of revisions in the same format as
     |      recentchanges (plus a 'content' element if requested). If get_text
     |      is true, the toplevel dict will contain a 'token' key as well.
     |      
     |      @param page: The page to check for deleted revisions
     |      @param start: Iterate revisions starting at this Timestamp
     |      @param end: Iterate revisions ending at this Timestamp
     |      @param reverse: Iterate oldest revisions first (default: newest)
     |      @param content: If True, retrieve the content of each revision and
     |          an undelete token
     |  
     |  deletepage(self, page, reason, summary='[deprecated name of reason]')
     |      Delete page from the wiki. Requires appropriate privilege level.
     |      
     |      @param page: Page to be deleted.
     |      @type page: Page
     |      @param reason: Deletion reason.
     |      @type reason: basestring
     |  
     |  double_redirects(self, total=None, step=NotImplemented)
     |      Yield Pages with double redirects from Special:DoubleRedirects.
     |      
     |      @param total: number of pages to return
     |  
     |  editpage(self, page, summary=None, minor=True, notminor=False, bot=True, recreate=True, createonly=False, nocreate=False, watch=None, **kwargs)
     |      Submit an edit to be saved to the wiki.
     |      
     |      @param page: The Page to be saved.
     |          By default its .text property will be used
     |          as the new text to be saved to the wiki
     |      @param summary: the edit summary
     |      @param minor: if True (default), mark edit as minor
     |      @param notminor: if True, override account preferences to mark edit
     |          as non-minor
     |      @param recreate: if True (default), create new page even if this
     |          title has previously been deleted
     |      @param createonly: if True, raise an error if this title already
     |          exists on the wiki
     |      @param nocreate: if True, raise an error if the page does not exist
     |      @param watch: Specify how the watchlist is affected by this edit, set
     |          to one of "watch", "unwatch", "preferences", "nochange":
     |          * watch: add the page to the watchlist
     |          * unwatch: remove the page from the watchlist
     |          The following settings are supported by mw >= 1.16 only
     |          * preferences: use the preference settings (default)
     |          * nochange: don't change the watchlist
     |      @param bot: if True, mark edit with bot flag
     |      @kwarg text: Overrides Page.text
     |      @type text: unicode
     |      @kwarg section: Edit an existing numbered section or
     |          a new section ('new')
     |      @type section: int or str
     |      @kwarg prependtext: Prepend text. Overrides Page.text
     |      @type text: unicode
     |      @kwarg appendtext: Append text. Overrides Page.text.
     |      @type text: unicode
     |      @kwarg undo: Revision id to undo. Overrides Page.text
     |      @type undo: int
     |      @return: True if edit succeeded, False if it failed
     |      @rtype: bool
     |      @raises Error: No text to be saved
     |      @raises NoPage: recreate is disabled and page does not exist
     |      @raises CaptchaError: config.solve_captcha is False and saving
     |          the page requires solving a captcha
     |  
     |  expand_text(self, text, title=None, includecomments=None, string='[deprecated name of text]')
     |      Parse the given text for preprocessing and rendering.
     |      
     |      e.g expand templates and strip comments if includecomments
     |      parameter is not True. Keeps text inside
     |      <nowiki></nowiki> tags unchanges etc. Can be used to parse
     |      magic parser words like {{CURRENTTIMESTAMP}}.
     |      
     |      @param text: text to be expanded
     |      @type text: unicode
     |      @param title: page title without section
     |      @type title: unicode
     |      @param includecomments: if True do not strip comments
     |      @type includecomments: bool
     |      @rtype: unicode
     |  
     |  exturlusage(self, url=None, protocol='http', namespaces=None, total=None, content=False, step=NotImplemented)
     |      Iterate Pages that contain links to the given URL.
     |      
     |      @param url: The URL to search for (without the protocol prefix);
     |          this may include a '*' as a wildcard, only at the start of the
     |          hostname
     |      @param protocol: The protocol prefix (default: "http")
     |  
     |  forceLogin = call(*a, **kw)
     |  
     |  getExpandedString = call(*a, **kw)
     |  
     |  getFilesFromAnHash(self, hash_found=None)
     |      Return all files that have the same hash.
     |      
     |      DEPRECATED: Use L{APISite.allimages} instead using 'sha1'.
     |  
     |  getImagesFromAnHash(self, hash_found=None)
     |      Return all images that have the same hash.
     |      
     |      DEPRECATED: Use L{APISite.allimages} instead using 'sha1'.
     |  
     |  getPatrolToken(self, sysop=False)
     |      DEPRECATED: Get patrol token.
     |  
     |  getToken(self, getalways=True, getagain=False, sysop=False)
     |      DEPRECATED: Get edit token.
     |  
     |  get_parsed_page(self, page)
     |      Retrieve parsed text of the page using action=parse.
     |  
     |  get_property_names(self, force=False)
     |      Get property names for pages_with_property().
     |      
     |      @param force: force to retrieve userinfo ignoring cache
     |      @type force: bool
     |  
     |  get_searched_namespaces(self, force=False)
     |      Retrieve the default searched namespaces for the user.
     |      
     |      If no user is logged in, it returns the namespaces used by default.
     |      Otherwise it returns the user preferences. It caches the last result
     |      and returns it, if the username or login status hasn't changed.
     |      
     |      @param force: Whether the cache should be discarded.
     |      @return: The namespaces which are searched by default.
     |      @rtype: C{set} of L{Namespace}
     |  
     |  get_tokens(self, types, all=False)
     |      Preload one or multiple tokens.
     |      
     |      For all MediaWiki versions prior to 1.20, only one token can be
     |      retrieved at once.
     |      For MediaWiki versions since 1.24wmfXXX a new token
     |      system was introduced which reduced the amount of tokens available.
     |      Most of them were merged into the 'csrf' token. If the token type in
     |      the parameter is not known it will default to the 'csrf' token.
     |      
     |      The other token types available are:
     |       - deleteglobalaccount
     |       - patrol (*)
     |       - rollback
     |       - setglobalaccountstatus
     |       - userrights
     |       - watch
     |      
     |       (*) Patrol was added in v1.14.
     |           Until v1.16, the patrol token is same as the edit token.
     |           For v1.17-19, the patrol token must be obtained from the query
     |           list recentchanges.
     |      
     |      @param types: the types of token (e.g., "edit", "move", "delete");
     |          see API documentation for full list of types
     |      @type types: iterable
     |      @param all: load all available tokens, if None only if it can be done
     |          in one request.
     |      @type all: bool
     |      
     |      return: a dict with retrieved valid tokens.
     |      rtype: dict
     |  
     |  getcategoryinfo(self, category)
     |      Retrieve data on contents of category.
     |  
     |  getcurrenttime = call(*a, **kw)
     |  
     |  getcurrenttimestamp(self)
     |      Return the server time as a MediaWiki timestamp string.
     |      
     |      It calls L{server_time} first so it queries the server to get the
     |      current server time.
     |      
     |      @return: the server time
     |      @rtype: str (as 'yyyymmddhhmmss')
     |  
     |  getglobaluserinfo(self)
     |      Retrieve globaluserinfo from site and cache it.
     |      
     |      self._globaluserinfo will be a dict with the following keys and values:
     |      
     |        - id: user id (numeric str)
     |        - home: dbname of home wiki
     |        - registration: registration date as Timestamp
     |        - groups: list of groups (could be empty)
     |        - rights: list of rights (could be empty)
     |        - editcount: global editcount
     |  
     |  getmagicwords(self, word)
     |      Return list of localized "word" magic words for the site.
     |  
     |  getredirtarget(self, page)
     |      Return page object for the redirect target of page.
     |      
     |      @param page: page to search redirects for
     |      @type page: pywikibot.page.BasePage
     |      @return: redirect target of page
     |      @rtype: pywikibot.Page
     |      
     |      @raises IsNotRedirectPage: page is not a redirect
     |      @raises RuntimeError: no redirects found
     |      @raises CircularRedirect: page is a circular redirect
     |      @raises InterwikiRedirectPage: the redirect target is
     |          on another site
     |  
     |  getuserinfo(self, force=False)
     |      Retrieve userinfo from site and store in _userinfo attribute.
     |      
     |      self._userinfo will be a dict with the following keys and values:
     |      
     |        - id: user id (numeric str)
     |        - name: username (if user is logged in)
     |        - anon: present if user is not logged in
     |        - groups: list of groups (could be empty)
     |        - rights: list of rights (could be empty)
     |        - message: present if user has a new message on talk page
     |        - blockinfo: present if user is blocked (dict)
     |      
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |      
     |      @param force: force to retrieve userinfo ignoring cache
     |      @type force: bool
     |  
     |  globalusage(self, page, total=None)
     |      Iterate global image usage for a given FilePage.
     |      
     |      @param page: the page to return global image usage for.
     |      @type image: pywikibot.FilePage
     |      @param total: iterate no more than this number of pages in total.
     |      @raises TypeError: input page is not a FilePage.
     |      @raises SiteDefinitionError: Site could not be defined for a returned
     |          entry in API response.
     |  
     |  hasExtension(self, name, unknown=None)
     |      Deprecated; use has_extension instead.
     |      
     |      Determine whether extension `name` is loaded.
     |      
     |              Use L{has_extension} instead!
     |      
     |              @param name: The extension to check for, case insensitive
     |              @type name: str
     |              @param unknown: Old parameter which shouldn't be used anymore.
     |              @return: If the extension is loaded
     |              @rtype: bool
     |  
     |  has_all_mediawiki_messages(self, keys)
     |      Confirm that the site defines a set of MediaWiki messages.
     |      
     |      @param keys: names of MediaWiki messages
     |      @type keys: set of str
     |      
     |      @rtype: bool
     |  
     |  has_api(self)
     |      Deprecated.
     |      
     |      Return whether this site has an API.
     |  
     |  has_extension(self, name)
     |      Determine whether extension `name` is loaded.
     |      
     |      @param name: The extension to check for, case sensitive
     |      @type name: str
     |      @return: If the extension is loaded
     |      @rtype: bool
     |  
     |  has_group(self, group, sysop=False)
     |      Return true if and only if the user is a member of specified group.
     |      
     |      Possible values of 'group' may vary depending on wiki settings,
     |      but will usually include bot.
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |  
     |  has_mediawiki_message(self, key)
     |      Determine if the site defines a MediaWiki message.
     |      
     |      @param key: name of MediaWiki message
     |      @type key: str
     |      
     |      @rtype: bool
     |  
     |  has_right(self, right, sysop=False)
     |      Return true if and only if the user has a specific right.
     |      
     |      Possible values of 'right' may vary depending on wiki settings,
     |      but will usually include:
     |      
     |      * Actions: edit, move, delete, protect, upload
     |      * User levels: autoconfirmed, sysop, bot
     |      
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |  
     |  hide_post(self, post, reason)
     |      Hide a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param reason: The reason to hide the post
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  hide_topic(self, page, reason)
     |      Hide a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reason: The reason to hide the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  image_repository(self)
     |      Return Site object for image repository e.g. commons.
     |  
     |  imageusage(self, image, namespaces=None, filterredir=None, total=None, content=False, step=NotImplemented)
     |      Iterate Pages that contain links to the given FilePage.
     |      
     |      @param image: the image to search for (FilePage need not exist on
     |          the wiki)
     |      @type image: pywikibot.FilePage
     |      @param namespaces: If present, only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param filterredir: if True, only yield redirects; if False (and not
     |          None), only yield non-redirects (default: yield both)
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  isAllowed = call(*a, **kw)
     |  
     |  isBlocked = call(*a, **kw)
     |  
     |  isBot(self, username)
     |      Return True is username is a bot user.
     |  
     |  is_blocked(self, sysop=False)
     |      Return True when logged in user is blocked.
     |      
     |      To check whether a user can perform an action,
     |      the method has_right should be used.
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |      
     |      @param sysop: If true, log in to sysop account (if available)
     |      @type sysop: bool
     |      @rtype: bool
     |  
     |  is_data_repository(self)
     |      Return True if its data repository is itself.
     |  
     |  is_image_repository(self)
     |      Return True if Site object is the image repository.
     |  
     |  is_oauth_token_available(self)
     |      Check whether OAuth token is set for this site.
     |      
     |      @rtype: bool
     |  
     |  is_uploaddisabled(self)
     |      Return True if upload is disabled on site.
     |      
     |      When the version is at least 1.27wmf9, uses general siteinfo.
     |      If not called directly, it is cached by the first attempted
     |      upload action.
     |  
     |  language(self)
     |      Deprecated; use APISite.lang instead.
     |      
     |      Return the code for the language of this Site.
     |  
     |  linksearch(self, siteurl, limit=None, euprotocol=None)
     |      Deprecated; use Site().exturlusage instead.
     |      
     |      Backwards-compatible interface to exturlusage().
     |  
     |  linter_pages(self, lint_categories=None, total=None, namespaces=None, pageids=None, lint_from=None)
     |      Return a generator to pages containing linter errors.
     |      
     |      @param lint_categories: categories of lint errors
     |      @type lntcategories: an iterable that returns values (str),
     |          or a pipe-separated string of values.
     |      
     |      @param total: if not None, yielding this many items in total
     |      @type total: int
     |      
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      
     |      @param pageids: only include lint errors from the specified pageids
     |      @type pageids: an iterable that returns pageids (str or int),
     |          or a comma- or pipe-separated string of pageids
     |          (e.g. '945097,1483753, 956608' or '945097|483753|956608')
     |      
     |      @param lint_from: Lint ID to start querying from
     |      @type lint_from: str representing digit or integer
     |      
     |      @return: pages with Linter errors.
     |      @rtype: Iterable[pywikibot.Page]
     |  
     |  list_to_text(self, args)
     |      Convert a list of strings into human-readable text.
     |      
     |      The MediaWiki messages 'and' and 'word-separator' are used as separator
     |      between the last two arguments.
     |      If more than two arguments are given, other arguments are
     |      joined using MediaWiki message 'comma-separator'.
     |      
     |      @param args: text to be expanded
     |      @type args: iterable of unicode
     |      
     |      @rtype: unicode
     |  
     |  live_version(self, force=False)
     |      Deprecated; use version() instead.
     |      
     |      Return the 'real' version number found on [[Special:Version]].
     |      
     |              By default the version number is cached for one day.
     |      
     |              @param force: If the version should be read always from the server and
     |                  never from the cache.
     |              @type force: bool
     |              @return: A tuple containing the major, minor version number and any
     |                  text after that. If an error occurred (0, 0, 0) is returned.
     |              @rtype: int, int, str
     |  
     |  load_board(self, page)
     |      Retrieve the data for a Flow board.
     |      
     |      @param page: A Flow board
     |      @type page: Board
     |      @return: A dict representing the board's metadata.
     |      @rtype: dict
     |  
     |  load_pages_from_pageids(self, pageids)
     |      Return a page generator from pageids.
     |      
     |      Pages are iterated in the same order than in the underlying pageids.
     |      
     |      Pageids are filtered and only one page is returned in case of
     |      duplicate pageids.
     |      
     |      @param pageids: an iterable that returns pageids (str or int),
     |          or a comma- or pipe-separated string of pageids
     |          (e.g. '945097,1483753, 956608' or '945097|483753|956608')
     |  
     |  load_post_current_revision(self, page, post_id, format)
     |      Retrieve the data for a post to a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param post_id: The UUID of the Post
     |      @type post_id: unicode
     |      @param format: The content format used for the returned content
     |      @type format: unicode (either 'wikitext', 'html', or 'fixed-html')
     |      @return: A dict representing the post data for the given UUID.
     |      @rtype: dict
     |  
     |  load_topic(self, page, format)
     |      Retrieve the data for a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param format: The content format to request the data in.
     |      @type format: str (either 'wikitext', 'html', or 'fixed-html')
     |      @return: A dict representing the topic's data.
     |      @rtype: dict
     |  
     |  load_topiclist(self, page, format='wikitext', limit=100, sortby='newest', toconly=False, offset=None, offset_id=None, reverse=False, include_offset=False)
     |      Retrieve the topiclist of a Flow board.
     |      
     |      @param page: A Flow board
     |      @type page: Board
     |      @param format: The content format to request the data in.
     |      @type format: str (either 'wikitext', 'html', or 'fixed-html')
     |      @param limit: The number of topics to fetch in each request.
     |      @type limit: int
     |      @param sortby: Algorithm to sort topics by.
     |      @type sortby: str (either 'newest' or 'updated')
     |      @param toconly: Whether to only include information for the TOC.
     |      @type toconly: bool
     |      @param offset: The timestamp to start at (when sortby is 'updated').
     |      @type offset: Timestamp or equivalent str
     |      @param offset_id: The topic UUID to start at (when sortby is 'newest').
     |      @type offset_id: str (in the form of a UUID)
     |      @param reverse: Whether to reverse the topic ordering.
     |      @type reverse: bool
     |      @param include_offset: Whether to include the offset topic.
     |      @type include_offset: bool
     |      @return: A dict representing the board's topiclist.
     |      @rtype: dict
     |  
     |  loadcoordinfo(self, page)
     |      Load [[mw:Extension:GeoData]] info.
     |  
     |  loadflowinfo(self, page)
     |      Deprecated; use Check the content model instead instead.
     |      
     |      
     |      Load Flow-related information about a given page.
     |      
     |      Assumes that the Flow extension is installed.
     |      
     |      @raises APIError: Flow extension is not installed
     |  
     |  loadimageinfo(self, page, history=False, url_width=None, url_height=None, url_param=None)
     |      Load image info from api and save in page attributes.
     |      
     |      Parameters correspond to iiprops in:
     |      [1] U{https://www.mediawiki.org/wiki/API:Imageinfo}
     |      
     |      Parameters validation and error handling left to the API call.
     |      
     |      @param history: if true, return the image's version history
     |      @param url_width: see iiurlwidth in [1]
     |      @param url_height: see iiurlheigth in [1]
     |      @param url_param: see iiurlparam in [1]
     |  
     |  loadpageimage(self, page)
     |      Load [[mw:Extension:PageImages]] info.
     |      
     |      @param page: The page for which to obtain the image
     |      @type page: pywikibot.Page
     |      
     |      @raises APIError: PageImages extension is not installed
     |  
     |  loadpageinfo(self, page, preload=False)
     |      Load page info from api and store in page attributes.
     |  
     |  loadpageprops(self, page)
     |      Load page props for the given page.
     |  
     |  loadrevisions(self, page, content=False, revids=None, startid=None, endid=None, starttime=None, endtime=None, rvdir=None, user=None, excludeuser=None, section=None, sysop=False, step=None, total=None, rollback=False, getText='[deprecated name of content]')
     |      Retrieve revision information and store it in page object.
     |      
     |      By default, retrieves the last (current) revision of the page,
     |      unless any of the optional parameters revids, startid, endid,
     |      starttime, endtime, rvdir, user, excludeuser, or limit are
     |      specified. Unless noted below, all parameters not specified
     |      default to False.
     |      
     |      If rvdir is False or not specified, startid must be greater than
     |      endid if both are specified; likewise, starttime must be greater
     |      than endtime. If rvdir is True, these relationships are reversed.
     |      
     |      @param page: retrieve revisions of this Page and hold the data.
     |      @type page: pywikibot.Page
     |      @param content: if True, retrieve the wiki-text of each revision;
     |          otherwise, only retrieve the revision metadata (default)
     |      @type content: bool
     |      @param section: if specified, retrieve only this section of the text
     |          (content must be True); section must be given by number (top of
     |          the article is section 0), not name
     |      @type section: int
     |      @param revids: retrieve only the specified revision ids (raise
     |          Exception if any of revids does not correspond to page)
     |      @type revids: an int, a str or a list of ints or strings
     |      @param startid: retrieve revisions starting with this revid
     |      @param endid: stop upon retrieving this revid
     |      @param starttime: retrieve revisions starting at this Timestamp
     |      @param endtime: stop upon reaching this Timestamp
     |      @param rvdir: if false, retrieve newest revisions first (default);
     |          if true, retrieve earliest first
     |      @param user: retrieve only revisions authored by this user
     |      @param excludeuser: retrieve all revisions not authored by this user
     |      @param sysop: if True, switch to sysop account (if available) to
     |          retrieve this page
     |      @raises ValueError: invalid startid/endid or starttime/endtime values
     |      @raises pywikibot.Error: revids belonging to a different page
     |  
     |  lock_topic(self, page, lock, reason)
     |      Lock or unlock a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param lock: Whether to lock or unlock the topic
     |      @type lock: bool (True corresponds to locking the topic.)
     |      @param reason: The reason to lock or unlock the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  logevents(self, logtype=None, user=None, page=None, namespace=None, start=None, end=None, reverse=False, tag=None, total=None, step=NotImplemented)
     |      Iterate all log entries.
     |      
     |      @note: logevents with logtype='block' only logs user blocks whereas
     |          site.blocks iterates all blocks including IP ranges.
     |      
     |      @param logtype: only iterate entries of this type
     |          (see mediawiki api documentation for available types)
     |      @type logtype: basestring
     |      @param user: only iterate entries that match this user name
     |      @type user: basestring
     |      @param page: only iterate entries affecting this page
     |      @type page: Page or basestring
     |      @param namespace: namespace(s) to retrieve logevents from
     |      @type namespace: int or Namespace or an iterable of them
     |      @note: due to an API limitation, if namespace param contains multiple
     |          namespaces, log entries from all namespaces will be fetched from
     |          the API and will be filtered later during iteration.
     |      @param start: only iterate entries from and after this Timestamp
     |      @type start: Timestamp or ISO date string
     |      @param end: only iterate entries up to and through this Timestamp
     |      @type end: Timestamp or ISO date string
     |      @param reverse: if True, iterate oldest entries first (default: newest)
     |      @type reverse: bool
     |      @param tag: only iterate entries tagged with this tag
     |      @type tag: basestring
     |      @param total: maximum number of events to iterate
     |      @type total: int
     |      @rtype: iterable
     |      
     |      @raises KeyError: the namespace identifier was not resolved
     |      @raises TypeError: the namespace identifier has an inappropriate
     |          type such as bool, or an iterable with more than one namespace
     |  
     |  loggedInAs(self, sysop=False)
     |      Deprecated; use Site.user() instead.
     |      
     |      Return the current username if logged in, otherwise return None.
     |      
     |              DEPRECATED (use .user() method instead)
     |      
     |              @param sysop: if True, test if user is logged in as the sysop user
     |                           instead of the normal user.
     |              @type sysop: bool
     |      
     |              @rtype: bool
     |  
     |  logged_in(self, sysop=False)
     |      Verify the bot is logged into the site as the expected user.
     |      
     |      The expected usernames are those provided as either the user or sysop
     |      parameter at instantiation.
     |      
     |      @param sysop: if True, test if user is logged in as the sysop user
     |                   instead of the normal user.
     |      @type sysop: bool
     |      
     |      @rtype: bool
     |  
     |  login(self, sysop=False, autocreate=False)
     |      Log the user in if not already logged in.
     |      
     |      @param sysop: if true, log in with the sysop account.
     |      @type sysop: bool
     |      
     |      @param autocreate: if true, allow auto-creation of the account
     |                         using unified login
     |      @type autocreate: bool
     |      
     |      @raises NoUsername: Username is not recognised by the site.
     |      @see: U{https://www.mediawiki.org/wiki/API:Login}
     |  
     |  logout(self)
     |      Logout of the site and load details for the logged out user.
     |      
     |      Also logs out of the global account if linked to the user.
     |      U{https://www.mediawiki.org/wiki/API:Logout}
     |      
     |      @raises APIError: Logout is not available when OAuth enabled.
     |  
     |  logpages(self, number=50, mode=None, title=None, user=None, namespace=[], start=None, end=None, tag=None, newer=False, dump=False, offset=None, repeat=NotImplemented)
     |      Iterate log pages. DEPRECATED.
     |      
     |      When dump is enabled, the raw API dict is returned.
     |      
     |      @rtype: tuple of Page, str, int, str
     |  
     |  lonelypages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages retrieved from Special:Lonelypages.
     |      
     |      @param total: number of pages to return
     |  
     |  longpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages and lengths from Special:Longpages.
     |      
     |      Yields a tuple of Page object, length(int).
     |      
     |      @param total: number of pages to return
     |  
     |  mediawiki_message(self, key, forceReload=NotImplemented)
     |      Fetch the text for a MediaWiki message.
     |      
     |      @param key: name of MediaWiki message
     |      @type key: str
     |      
     |      @rtype unicode
     |  
     |  mediawiki_messages(self, keys)
     |      Fetch the text of a set of MediaWiki messages.
     |      
     |      If keys is '*' or ['*'], all messages will be fetched. (deprecated)
     |      
     |      The returned dict uses each key to store the associated message.
     |      
     |      @param keys: MediaWiki messages to fetch
     |      @type keys: set of str, '*' or ['*']
     |      
     |      @rtype dict
     |  
     |  merge_history(self, source, dest, timestamp=None, reason=None)
     |      Merge revisions from one page into another.
     |      
     |      Revisions dating up to the given timestamp in the source will be
     |      moved into the destination page history. History merge fails if
     |      the timestamps of source and dest revisions overlap (all source
     |      revisions must be dated before the earliest dest revision).
     |      
     |      @param source: Source page from which revisions will be merged
     |      @type source: pywikibot.Page
     |      @param dest: Destination page to which revisions will be merged
     |      @type dest: pywikibot.Page
     |      @param timestamp: Revisions from this page dating up to this timestamp
     |          will be merged into the destination page (if not given or False,
     |          all revisions will be merged)
     |      @type timestamp: pywikibot.Timestamp
     |      @param reason: Optional reason for the history merge
     |      @type reason: str
     |  
     |  messages(self, sysop=False)
     |      Return true if the user has new messages, and false otherwise.
     |  
     |  moderate_post(self, post, state, reason)
     |      Moderate a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param state: The new moderation state
     |      @type state: str
     |      @param reason: The reason to moderate the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  moderate_topic(self, page, state, reason)
     |      Moderate a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param state: The new moderation state
     |      @type state: str
     |      @param reason: The reason to moderate the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  movepage(self, page, newtitle, summary, movetalk=True, noredirect=False)
     |      Move a Page to a new title.
     |      
     |      @param page: the Page to be moved (must exist)
     |      @param newtitle: the new title for the Page
     |      @type newtitle: unicode
     |      @param summary: edit summary (required!)
     |      @param movetalk: if True (default), also move the talk page if possible
     |      @param noredirect: if True, suppress creation of a redirect from the
     |          old title to the new one
     |      @return: Page object with the new title
     |      @rtype: pywikibot.Page
     |  
     |  namespace(self, num, all=False)
     |      Return string containing local name of namespace 'num'.
     |      
     |      If optional argument 'all' is true, return all recognized
     |      values for this namespace.
     |      
     |      @param num: Namespace constant.
     |      @type num: int
     |      @param all: If True return a Namespace object. Otherwise
     |          return the namespace name.
     |      @return: local name or Namespace object
     |      @rtype: str or Namespace
     |  
     |  newfiles(self, user=None, start=None, end=None, reverse=False, total=None, lestart='[deprecated name of start]', leend='[deprecated name of end]', leuser='[deprecated name of user]', letitle=NotImplemented, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
     |      Yield information about newly uploaded files.
     |      
     |      DEPRECATED: Use logevents(logtype='upload') instead.
     |      
     |      Yields a tuple of FilePage, Timestamp, user(unicode), comment(unicode).
     |      
     |      N.B. the API does not provide direct access to Special:Newimages, so
     |      this is derived from the "upload" log events instead.
     |  
     |  newimages(self, *args, **kwargs, number='[deprecated name of total]', repeat=NotImplemented)
     |      Yield information about newly uploaded files.
     |      
     |      DEPRECATED: Use logevents(logtype='upload') instead.
     |  
     |  newpages(self, user=None, returndict=False, start=None, end=None, reverse=False, bot=False, redirect=False, excludeuser=None, patrolled=None, namespaces=None, total=None, number='[deprecated name of total]', repeat=NotImplemented, namespace='[deprecated name of namespaces]', rcshow=NotImplemented, rc_show=NotImplemented, get_redirect=NotImplemented, step=NotImplemented, showBot='[deprecated name of bot]', showRedirects='[deprecated name of redirect]', showPatrolled='[deprecated name of patrolled]')
     |      Yield new articles (as Page objects) from recent changes.
     |      
     |      Starts with the newest article and fetches the number of articles
     |      specified in the first argument.
     |      
     |      The objects yielded are dependent on parameter returndict.
     |      When true, it yields a tuple composed of a Page object and a dict of
     |      attributes.
     |      When false, it yields a tuple composed of the Page object,
     |      timestamp (unicode), length (int), an empty unicode string, username
     |      or IP address (str), comment (unicode).
     |      
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  nice_get_address(self, title)
     |      Return shorter URL path to retrieve page titled 'title'.
     |  
     |  notifications(self, **kwargs)
     |      Yield Notification objects from the Echo extension.
     |  
     |  notifications_mark_read(self, **kwargs)
     |      Mark selected notifications as read.
     |      
     |      @return: whether the action was successful
     |      @rtype: bool
     |  
     |  page_can_be_edited(self, page)
     |      Determine if the page can be edited.
     |      
     |      Return True if and only if:
     |        - page is unprotected, and bot has an account for this site, or
     |        - page is protected, and bot has a sysop account for this site.
     |      
     |      @rtype: bool
     |  
     |  page_embeddedin(self, page, filter_redirects=None, namespaces=None, total=None, content=False, step=NotImplemented, filterRedirects='[deprecated name of filter_redirects]')
     |      Iterate all pages that embedded the given page as a template.
     |      
     |      @param page: The Page to get inclusions for.
     |      @param filter_redirects: If True, only return redirects that embed
     |          the given page. If False, only return non-redirect links. If
     |          None, return both (no filtering).
     |      @param namespaces: If present, only return links from the namespaces
     |          in this list.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  page_exists(self, page)
     |      Deprecated; use page.exists() instead.
     |      
     |      Return True if and only if page is an existing page on site.
     |  
     |  page_extlinks(self, page, total=None, step=NotImplemented)
     |      Iterate all external links on page, yielding URL strings.
     |  
     |  page_from_repository(self, item)
     |      Return a Page for this site object specified by wikibase item.
     |      
     |      @param item: id number of item, "Q###",
     |      @type item: str
     |      @return: Page, or Category object given by wikibase item number
     |          for this site object.
     |      @rtype: pywikibot.Page or None
     |      
     |      @raises UnknownExtension: site has no wikibase extension
     |      @raises NotimplementedError: method not implemented for a wikibase site
     |  
     |  page_isredirect(self, page)
     |      Return True if and only if page is a redirect.
     |  
     |  page_restrictions(self, page)
     |      Return a dictionary reflecting page protections.
     |  
     |  pagebacklinks(self, page, follow_redirects=False, filter_redirects=None, namespaces=None, total=None, content=False, followRedirects='[deprecated name of follow_redirects]', filterRedirects='[deprecated name of filter_redirects]')
     |      Iterate all pages that link to the given page.
     |      
     |      @param page: The Page to get links to.
     |      @param follow_redirects: Also return links to redirects pointing to
     |          the given page.
     |      @param filter_redirects: If True, only return redirects to the given
     |          page. If False, only return non-redirect links. If None, return
     |          both (no filtering).
     |      @param namespaces: If present, only return links from the namespaces
     |          in this list.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param total: Maximum number of pages to retrieve in total.
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  pagecategories(self, page, total=None, content=False, withSortKey=NotImplemented, step=NotImplemented)
     |      Iterate categories to which page belongs.
     |      
     |      @param content: if True, load the current content of each iterated page
     |          (default False); note that this means the contents of the
     |          category description page, not the pages contained in the category
     |  
     |  pageimages(self, page, total=None, content=False, step=NotImplemented)
     |      Iterate images used (not just linked) on the page.
     |      
     |      @param content: if True, load the current content of each iterated page
     |          (default False); note that this means the content of the image
     |          description page, not the image itself
     |  
     |  pagelanglinks(self, page, total=None, include_obsolete=False, step=NotImplemented)
     |      Iterate all interlanguage links on page, yielding Link objects.
     |      
     |      @param include_obsolete: if true, yield even Link objects whose
     |                               site is obsolete
     |  
     |  pagelinks(self, page, namespaces=None, follow_redirects=False, total=None, content=False, step=NotImplemented)
     |      Iterate internal wikilinks contained (or transcluded) on page.
     |      
     |      @param namespaces: Only iterate pages in these namespaces
     |          (default: all)
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param follow_redirects: if True, yields the target of any redirects,
     |          rather than the redirect page
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  pagename2codes(self)
     |      Return list of localized PAGENAMEE tags for the site.
     |  
     |  pagenamecodes(self)
     |      Return list of localized PAGENAME tags for the site.
     |  
     |  pagereferences(self, page, follow_redirects=False, filter_redirects=None, with_template_inclusion=True, only_template_inclusion=False, namespaces=None, total=None, content=False, step=NotImplemented, followRedirects='[deprecated name of follow_redirects]', filterRedirects='[deprecated name of filter_redirects]', onlyTemplateInclusion='[deprecated name of only_template_inclusion]', withTemplateInclusion='[deprecated name of with_template_inclusion]')
     |      Convenience method combining pagebacklinks and page_embeddedin.
     |      
     |      @param namespaces: If present, only return links from the namespaces
     |          in this list.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  pages_with_property(self, propname, total=None)
     |      Yield Page objects from Special:PagesWithProp.
     |      
     |      @param propname: must be a valid property.
     |      @type propname: str
     |      @param total: number of pages to return
     |      @type total: int or None
     |      @return: return a generator of Page objects
     |      @rtype: iterator
     |  
     |  pagetemplates(self, page, namespaces=None, total=None, content=False, step=NotImplemented)
     |      Iterate templates transcluded (not just linked) on the page.
     |      
     |      @param namespaces: Only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  patrol(self, rcid=None, revid=None, revision=None, token=NotImplemented)
     |      Return a generator of patrolled pages.
     |      
     |      Pages to be patrolled are identified by rcid, revid or revision.
     |      At least one of the parameters is mandatory.
     |      See https://www.mediawiki.org/wiki/API:Patrol.
     |      
     |      @param rcid: an int/string/iterable/iterator providing rcid of pages
     |          to be patrolled.
     |      @type rcid: iterable/iterator which returns a number or string which
     |           contains only digits; it also supports a string (as above) or int
     |      @param revid: an int/string/iterable/iterator providing revid of pages
     |          to be patrolled.
     |      @type revid: iterable/iterator which returns a number or string which
     |           contains only digits; it also supports a string (as above) or int.
     |      @param revision: an Revision/iterable/iterator providing Revision
     |          object of pages to be patrolled.
     |      @type revision: iterable/iterator which returns a Revision object; it
     |          also supports a single Revision.
     |      @rtype: iterator of dict with 'rcid', 'ns' and 'title'
     |          of the patrolled page.
     |  
     |  prefixindex(self, prefix, namespace=0, includeredirects=True)
     |      Yield all pages with a given prefix. Deprecated.
     |      
     |      Use allpages() with the prefix= parameter instead of this method.
     |  
     |  preloadpages(self, pagelist, groupsize=50, templates=False, langlinks=False, pageprops=False)
     |      Return a generator to a list of preloaded pages.
     |      
     |      Pages are iterated in the same order than in the underlying pagelist.
     |      In case of duplicates in a groupsize batch, return the first entry.
     |      
     |      @param pagelist: an iterable that returns Page objects
     |      @param groupsize: how many Pages to query at a time
     |      @type groupsize: int
     |      @param templates: preload pages (typically templates) transcluded in
     |          the provided pages
     |      @type templates: bool
     |      @param langlinks: preload all language links from the provided pages
     |          to other languages
     |      @type langlinks: bool
     |      @param pageprops: preload various properties defined in page content
     |      @type pageprops: bool
     |  
     |  protect(self, page, protections, reason, expiry=None, **kwargs, summary='[deprecated name of reason]')
     |      (Un)protect a wiki page. Requires administrator status.
     |      
     |      @param protections: A dict mapping type of protection to protection
     |          level of that type. Valid types of protection are 'edit', 'move',
     |          'create', and 'upload'. Valid protection levels (in MediaWiki 1.12)
     |          are '' (equivalent to 'none'), 'autoconfirmed', and 'sysop'.
     |          If None is given, however, that protection will be skipped.
     |      @type protections: dict
     |      @param reason: Reason for the action
     |      @type reason: basestring
     |      @param expiry: When the block should expire. This expiry will be
     |          applied to all protections. If None, 'infinite', 'indefinite',
     |          'never', or '' is given, there is no expiry.
     |      @type expiry: pywikibot.Timestamp, string in GNU timestamp format
     |          (including ISO 8601).
     |  
     |  protectedpages(self, namespace=0, type='edit', level=False, total=None, lvl='[deprecated name of level]')
     |      Return protected pages depending on protection level and type.
     |      
     |      For protection types which aren't 'create' it uses L{APISite.allpages},
     |      while it uses for 'create' the 'query+protectedtitles' module.
     |      
     |      @param namespaces: The searched namespace.
     |      @type namespaces: int or Namespace or str
     |      @param type: The protection type to search for (default 'edit').
     |      @type type: str
     |      @param level: The protection level (like 'autoconfirmed'). If False it
     |          shows all protection levels.
     |      @type level: str or False
     |      @return: The pages which are protected.
     |      @rtype: Iterable[pywikibot.Page]
     |  
     |  protection_levels(self)
     |      Return the protection levels available on this site.
     |      
     |      @return: protection types available
     |      @rtype: set of unicode instances
     |      @see: L{Siteinfo._get_default()}
     |  
     |  protection_types(self)
     |      Return the protection types available on this site.
     |      
     |      @return: protection types available
     |      @rtype: set of unicode instances
     |      @see: L{Siteinfo._get_default()}
     |  
     |  purgepages(self, pages, **kwargs)
     |      Purge the server's cache for one or multiple pages.
     |      
     |      @param pages: list of Page objects
     |      @return: True if API returned expected response; False otherwise
     |      @rtype: bool
     |  
     |  randompage(self, redirect=False)
     |      DEPRECATED.
     |      
     |      @param redirect: Return a random redirect page
     |      @rtype: pywikibot.Page
     |  
     |  randompages(self, total=None, namespaces=None, redirects=False, content=False, step=NotImplemented)
     |      Iterate a number of random pages.
     |      
     |      Pages are listed in a fixed sequence, only the starting point is
     |      random.
     |      
     |      @param total: the maximum number of pages to iterate
     |      @param namespaces: only iterate pages in these namespaces.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param redirects: if True, include only redirect pages in results,
     |          False does not include redirects and None (MW 1.26+) include both
     |          types. (default: False)
     |      @type redirects: bool or None
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |      @raises AssertError: unsupported redirects parameter
     |  
     |  randomredirectpage(self)
     |      DEPRECATED: Use Site.randompages() instead.
     |      
     |      @return: Return a random redirect page
     |  
     |  recentchanges(self, start=None, end=None, reverse=False, namespaces=None, pagelist=None, changetype=None, minor=None, bot=None, anon=None, redirect=None, patrolled=None, top_only=False, total=None, user=None, excludeuser=None, tag=None, returndict=NotImplemented, nobots=NotImplemented, rcshow=NotImplemented, rcprop=NotImplemented, rctype='[deprecated name of changetype]', revision=NotImplemented, repeat=NotImplemented, rcstart='[deprecated name of start]', rcend='[deprecated name of end]', rcdir=NotImplemented, step=NotImplemented, includeredirects='[deprecated name of redirect]', namespace='[deprecated name of namespaces]', rcnamespace='[deprecated name of namespaces]', number='[deprecated name of total]', rclimit='[deprecated name of total]', showMinor='[deprecated name of minor]', showBot='[deprecated name of bot]', showAnon='[deprecated name of anon]', showRedirects='[deprecated name of redirect]', showPatrolled='[deprecated name of patrolled]', topOnly='[deprecated name of top_only]')
     |      Iterate recent changes.
     |      
     |      @param start: Timestamp to start listing from
     |      @type start: pywikibot.Timestamp
     |      @param end: Timestamp to end listing at
     |      @type end: pywikibot.Timestamp
     |      @param reverse: if True, start with oldest changes (default: newest)
     |      @type reverse: bool
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param pagelist: iterate changes to pages in this list only
     |      @param pagelist: list of Pages
     |      @param changetype: only iterate changes of this type ("edit" for
     |          edits to existing pages, "new" for new pages, "log" for log
     |          entries)
     |      @type changetype: basestring
     |      @param minor: if True, only list minor edits; if False, only list
     |          non-minor edits; if None, list all
     |      @type minor: bool or None
     |      @param bot: if True, only list bot edits; if False, only list
     |          non-bot edits; if None, list all
     |      @type bot: bool or None
     |      @param anon: if True, only list anon edits; if False, only list
     |          non-anon edits; if None, list all
     |      @type anon: bool or None
     |      @param redirect: if True, only list edits to redirect pages; if
     |          False, only list edits to non-redirect pages; if None, list all
     |      @type redirect: bool or None
     |      @param patrolled: if True, only list patrolled edits; if False,
     |          only list non-patrolled edits; if None, list all
     |      @type patrolled: bool or None
     |      @param top_only: if True, only list changes that are the latest
     |      revision
     |          (default False)
     |      @type top_only: bool
     |      @param user: if not None, only list edits by this user or users
     |      @type user: basestring|list
     |      @param excludeuser: if not None, exclude edits by this user or users
     |      @type excludeuser: basestring|list
     |      @param tag: a recent changes tag
     |      @type tag: str
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  redirect(self)
     |      Return the localized #REDIRECT keyword.
     |  
     |  redirectRegex(self)
     |      Return a compiled regular expression matching on redirect pages.
     |      
     |      Group 1 in the regex match object will be the target title.
     |  
     |  redirectpages(self, total=None, step=NotImplemented)
     |      Yield redirect pages from Special:ListRedirects.
     |      
     |      @param total: number of pages to return
     |  
     |  reply_to_post(self, page, reply_to_uuid, content, format)
     |      Reply to a post on a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reply_to_uuid: The UUID of the Post to create a reply to
     |      @type reply_to_uuid: unicode
     |      @param content: The content of the reply
     |      @type content: unicode
     |      @param format: The content format used for the supplied content
     |      @type format: unicode (either 'wikitext' or 'html')
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  resolvemagicwords(self, wikitext)
     |      Replace the {{ns:xx}} marks in a wikitext with the namespace names.
     |      
     |      DEPRECATED.
     |  
     |  restore_post(self, post, reason)
     |      Restore a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param reason: The reason to restore the post
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  restore_topic(self, page, reason)
     |      Restore a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reason: The reason to restore the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  rollbackpage(self, page, **kwargs)
     |      Roll back page to version before last user's edits.
     |      
     |      The keyword arguments are those supported by the rollback API.
     |      
     |      As a precaution against errors, this method will fail unless
     |      the page history contains at least two revisions, and at least
     |      one that is not by the same user who made the last edit.
     |      
     |      @param page: the Page to be rolled back (must exist)
     |  
     |  search(self, searchstring, namespaces=None, where='text', get_redirects=False, total=None, content=False, number='[deprecated name of total]', step=NotImplemented, key='[deprecated name of searchstring]', getredirects='[deprecated name of get_redirects]')
     |      Iterate Pages that contain the searchstring.
     |      
     |      Note that this may include non-existing Pages if the wiki's database
     |      table contains outdated entries.
     |      
     |      @param searchstring: the text to search for
     |      @type searchstring: unicode
     |      @param where: Where to search; value must be "text", "title" or
     |          "nearmatch" (many wikis do not support title or nearmatch search)
     |      @param namespaces: search only in these namespaces (defaults to all)
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param get_redirects: if True, include redirects in results. Since
     |          version MediaWiki 1.23 it will always return redirects.
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  server_time(self)
     |      Return a Timestamp object representing the current server time.
     |      
     |      For wikis with a version newer than 1.16 it uses the 'time' property
     |      of the siteinfo 'general'. It'll force a reload before returning the
     |      time. It requests to expand the text '{{CURRENTTIMESTAMP}}' for older
     |      wikis.
     |      
     |      @return: the current server time
     |      @rtype: L{Timestamp}
     |  
     |  shortpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages and lengths from Special:Shortpages.
     |      
     |      Yields a tuple of Page object, length(int).
     |      
     |      @param total: number of pages to return
     |  
     |  stash_info(self, file_key, props=False)
     |      Get the stash info for a given file key.
     |  
     |  suppress_post(self, post, reason)
     |      Suppress a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param reason: The reason to suppress the post
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  suppress_topic(self, page, reason)
     |      Suppress a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reason: The reason to suppress the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  thank_post(self, post)
     |      Corresponding method to the 'action=flowthank' API action.
     |      
     |      @param post: The post to be thanked for.
     |      @type post: Post
     |      @raise APIError: On thanking oneself or other API errors.
     |      @return: The API response.
     |  
     |  thank_revision(self, revid, source=None)
     |      Corresponding method to the 'action=thank' API action.
     |      
     |      @param revid: Revision ID for the revision to be thanked.
     |      @type revid: int
     |      @param source: A source for the thanking operation.
     |      @type source: str
     |      @raise APIError: On thanking oneself or other API errors.
     |      @return: The API response.
     |  
     |  token(self, page, tokentype)
     |      Deprecated; use the 'tokens' property instead.
     |      
     |      Return token retrieved from wiki to allow changing page content.
     |      
     |              @param page: the Page for which a token should be retrieved
     |              @param tokentype: the type of token (e.g., "edit", "move", "delete");
     |                  see API documentation for full list of types
     |  
     |  unblockuser(self, user, reason=None)
     |      Remove the block for the user.
     |      
     |      @param user: The username/IP without a namespace.
     |      @type user: L{pywikibot.User}
     |      @param reason: Reason for the unblock.
     |      @type reason: basestring
     |  
     |  uncategorizedcategories(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Categories from Special:Uncategorizedcategories.
     |      
     |      @param total: number of pages to return
     |  
     |  uncategorizedfiles = uncategorizedimages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |  
     |  uncategorizedimages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield FilePages from Special:Uncategorizedimages.
     |      
     |      @param total: number of pages to return
     |  
     |  uncategorizedpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages from Special:Uncategorizedpages.
     |      
     |      @param total: number of pages to return
     |  
     |  uncategorizedtemplates(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages from Special:Uncategorizedtemplates.
     |      
     |      @param total: number of pages to return
     |  
     |  unconnected_pages(self, total=None, step=NotImplemented)
     |      Yield Page objects from Special:UnconnectedPages.
     |      
     |      @param total: number of pages to return
     |  
     |  undelete_page(self, page, reason, revisions=None, summary='[deprecated name of reason]')
     |      Undelete page from the wiki. Requires appropriate privilege level.
     |      
     |      @param page: Page to be deleted.
     |      @type page: pywikibot.BasePage
     |      @param revisions: List of timestamps to restore.
     |          If None, restores all revisions.
     |      @type revisions: list
     |      @param reason: Undeletion reason.
     |      @type reason: basestring
     |  
     |  unusedcategories(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Category objects from Special:Unusedcategories.
     |      
     |      @param total: number of pages to return
     |  
     |  unusedfiles(self, total=None, extension=NotImplemented, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield FilePage objects from Special:Unusedimages.
     |      
     |      @param total: number of pages to return
     |  
     |  unusedimages(self, total=None, extension=NotImplemented, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield FilePage objects from Special:Unusedimages.
     |      
     |      DEPRECATED: Use L{APISite.unusedfiles} instead.
     |  
     |  unwatchedpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages from Special:Unwatchedpages (requires Admin privileges).
     |      
     |      @param total: number of pages to return
     |  
     |  upload(self, filepage, source_filename=None, source_url=None, comment=None, text=None, watch=False, ignore_warnings=False, chunk_size=0, _file_key=None, _offset=0, _verify_stash=None, report_success=None, imagepage='[deprecated name of filepage]')
     |      Upload a file to the wiki.
     |      
     |      Either source_filename or source_url, but not both, must be provided.
     |      
     |      @param filepage: a FilePage object from which the wiki-name of the
     |          file will be obtained.
     |      @param source_filename: path to the file to be uploaded
     |      @param source_url: URL of the file to be uploaded
     |      @param comment: Edit summary; if this is not provided, then
     |          filepage.text will be used. An empty summary is not permitted.
     |          This may also serve as the initial page text (see below).
     |      @param text: Initial page text; if this is not set, then
     |          filepage.text will be used, or comment.
     |      @param watch: If true, add filepage to the bot user's watchlist
     |      @param ignore_warnings: It may be a static boolean, a callable
     |          returning a boolean or an iterable. The callable gets a list of
     |          UploadWarning instances and the iterable should contain the warning
     |          codes for which an equivalent callable would return True if all
     |          UploadWarning codes are in thet list. If the result is False it'll
     |          not continue uploading the file and otherwise disable any warning
     |          and reattempt to upload the file. NOTE: If report_success is True
     |          or None it'll raise an UploadWarning exception if the static
     |          boolean is False.
     |      @type ignore_warnings: bool or callable or iterable of str
     |      @param chunk_size: The chunk size in bytesfor chunked uploading (see
     |          U{https://www.mediawiki.org/wiki/API:Upload#Chunked_uploading}). It
     |          will only upload in chunks, if the version number is 1.20 or higher
     |          and the chunk size is positive but lower than the file size.
     |      @type chunk_size: int
     |      @param _file_key: Reuses an already uploaded file using the filekey. If
     |          None (default) it will upload the file.
     |      @type _file_key: str or None
     |      @param _offset: When file_key is not None this can be an integer to
     |          continue a previously canceled chunked upload. If False it treats
     |          that as a finished upload. If True it requests the stash info from
     |          the server to determine the offset. By default starts at 0.
     |      @type _offset: int or bool
     |      @param _verify_stash: Requests the SHA1 and file size uploaded and
     |          compares it to the local file. Also verifies that _offset is
     |          matching the file size if the _offset is an int. If _offset is
     |          False if verifies that the file size match with the local file. If
     |          None it'll verifies the stash when a file key and offset is given.
     |      @type _verify_stash: bool or None
     |      @param report_success: If the upload was successful it'll print a
     |          success message and if ignore_warnings is set to False it'll
     |          raise an UploadWarning if a warning occurred. If it's None
     |          (default) it'll be True if ignore_warnings is a bool and False
     |          otherwise. If it's True or None ignore_warnings must be a bool.
     |      @return: It returns True if the upload was successful and False
     |          otherwise.
     |      @rtype: bool
     |  
     |  usercontribs(self, user=None, userprefix=None, start=None, end=None, reverse=False, namespaces=None, minor=None, total=None, top_only=False, step=NotImplemented, showMinor='[deprecated name of minor]')
     |      Iterate contributions by a particular user.
     |      
     |      Iterated values are in the same format as recentchanges.
     |      
     |      @param user: Iterate contributions by this user (name or IP)
     |      @param userprefix: Iterate contributions by all users whose names
     |          or IPs start with this substring
     |      @param start: Iterate contributions starting at this Timestamp
     |      @param end: Iterate contributions ending at this Timestamp
     |      @param reverse: Iterate oldest contributions first (default: newest)
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param minor: if True, iterate only minor edits; if False and
     |          not None, iterate only non-minor edits (default: iterate both)
     |      @param total: limit result to this number of pages
     |      @type total: int
     |      @param top_only: if True, iterate only edits which are the latest
     |          revision (default: False)
     |      @raises Error: either user or userprefix must be non-empty
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  users(self, usernames)
     |      Iterate info about a list of users by name or IP.
     |      
     |      @param usernames: a list of user names
     |      @type usernames: list, or other iterable, of unicodes
     |  
     |  validate_tokens(self, types)
     |      Validate if requested tokens are acceptable.
     |      
     |      Valid tokens depend on mw version.
     |  
     |  version(self)
     |      Return live project version number as a string.
     |      
     |      This overwrites the corresponding family method for APISite class. Use
     |      L{pywikibot.tools.MediaWikiVersion} to compare MediaWiki versions.
     |  
     |  wantedcategories(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages from Special:Wantedcategories.
     |      
     |      @param total: number of pages to return
     |  
     |  wantedpages(self, total=None, step=NotImplemented)
     |      Yield Pages from Special:Wantedpages.
     |      
     |      @param total: number of pages to return
     |  
     |  watch(self, pages, unwatch=False)
     |      Add or remove pages from watchlist.
     |      
     |      @param pages: A single page or a sequence of pages.
     |      @type pages: A page object, a page-title string, or sequence of them.
     |          Also accepts a single pipe-separated string like 'title1|title2'.
     |      @param unwatch: If True, remove pages from watchlist;
     |          if False add them (default).
     |      @return: True if API returned expected response; False otherwise
     |      @rtype: bool
     |  
     |  watched_pages(self, sysop=False, force=False, total=None, step=NotImplemented)
     |      Return watchlist.
     |      
     |      @param sysop: Returns watchlist of sysop user if true
     |      @type sysop: bool
     |      @param force_reload: Reload watchlist
     |      @type force_reload: bool
     |      @param total: if not None, limit the generator to yielding this many
     |          items in total
     |      @type total: int
     |      @return: list of pages in watchlist
     |      @rtype: list of pywikibot.Page objects
     |  
     |  watchlist_revs(self, start=None, end=None, reverse=False, namespaces=None, minor=None, bot=None, anon=None, total=None, step=NotImplemented, showMinor='[deprecated name of minor]', showAnon='[deprecated name of anon]', showBot='[deprecated name of bot]')
     |      Iterate revisions to pages on the bot user's watchlist.
     |      
     |      Iterated values will be in same format as recentchanges.
     |      
     |      @param start: Iterate revisions starting at this Timestamp
     |      @param end: Iterate revisions ending at this Timestamp
     |      @param reverse: Iterate oldest revisions first (default: newest)
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param minor: if True, only list minor edits; if False (and not
     |          None), only list non-minor edits
     |      @param bot: if True, only list bot edits; if False (and not
     |          None), only list non-bot edits
     |      @param anon: if True, only list anon edits; if False (and not
     |          None), only list non-anon edits
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  watchpage(self, page, unwatch=False)
     |      Add or remove page from watchlist.
     |      
     |      DEPRECATED: Use Site().watch() instead.
     |      
     |      @param page: A single page.
     |      @type page: A page object, a page-title string.
     |      @param unwatch: If True, remove page from watchlist;
     |          if False (default), add it.
     |      @return: True if API returned expected response; False otherwise
     |      @rtype: bool
     |  
     |  withoutinterwiki(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages without language links from Special:Withoutinterwiki.
     |      
     |      @param total: number of pages to return
     |  
     |  ----------------------------------------------------------------------
     |  Class methods defined here:
     |  
     |  fromDBName(dbname, site=None) from builtins.type
     |      Create a site from a database name using the sitematrix.
     |      
     |      @param dbname: database name
     |      @type dbname: str
     |      @param site: Site to load sitematrix from. (Default meta.wikimedia.org)
     |      @type site: APISite
     |      @return: site object for the database name
     |      @rtype: APISite
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors defined here:
     |  
     |  article_path
     |      Get the nice article path without $1.
     |  
     |  globaluserinfo
     |      Retrieve userinfo from site and store in _userinfo attribute.
     |      
     |      self._userinfo will be a dict with the following keys and values:
     |      
     |        - id: user id (numeric str)
     |        - name: username (if user is logged in)
     |        - anon: present if user is not logged in
     |        - groups: list of groups (could be empty)
     |        - rights: list of rights (could be empty)
     |        - message: present if user has a new message on talk page
     |        - blockinfo: present if user is blocked (dict)
     |      
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |      
     |      @param force: force to retrieve userinfo ignoring cache
     |      @type force: bool
     |  
     |  has_data_repository
     |      Return True if site has a shared data repository like Wikidata.
     |  
     |  has_image_repository
     |      Return True if site has a shared image repository like Commons.
     |  
     |  has_transcluded_data
     |      Deprecated; use has_data_repository instead.
     |      
     |      Return True if site has a shared data repository like Wikidata.
     |  
     |  lang
     |      Return the code for the language of this Site.
     |  
     |  logtypes
     |      Return a set of log types available on current site.
     |  
     |  months_names
     |      Obtain month names from the site messages.
     |      
     |      The list is zero-indexed, ordered by month in calendar, and should
     |      be in the original site language.
     |      
     |      @return: list of tuples (month name, abbreviation)
     |      @rtype: list
     |  
     |  proofread_index_ns
     |      Return Index namespace for the ProofreadPage extension.
     |  
     |  proofread_levels
     |      Return Quality Levels for the ProofreadPage extension.
     |  
     |  proofread_page_ns
     |      Return Page namespace for the ProofreadPage extension.
     |  
     |  siteinfo
     |      Site information dict.
     |  
     |  userinfo
     |      Retrieve userinfo from site and store in _userinfo attribute.
     |      
     |      self._userinfo will be a dict with the following keys and values:
     |      
     |        - id: user id (numeric str)
     |        - name: username (if user is logged in)
     |        - anon: present if user is not logged in
     |        - groups: list of groups (could be empty)
     |        - rights: list of rights (could be empty)
     |        - message: present if user has a new message on talk page
     |        - blockinfo: present if user is blocked (dict)
     |      
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |      
     |      @param force: force to retrieve userinfo ignoring cache
     |      @type force: bool
     |  
     |  ----------------------------------------------------------------------
     |  Data and other attributes defined here:
     |  
     |  OnErrorExc = <class 'pywikibot.site.OnErrorExc'>
     |      OnErrorExc(exception, on_new_page)
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from BaseSite:
     |  
     |  __getattr__(self, attr)
     |      Delegate undefined methods calls to the Family object.
     |  
     |  __hash__(self)
     |      Return hashable key.
     |  
     |  __repr__(self)
     |      Return internal representation.
     |  
     |  __str__(self)
     |      Return string representing this Site's name and code.
     |  
     |  category_namespace(self)
     |      Deprecated; use namespaces.CATEGORY.custom_name instead.
     |      
     |      Return local name for the Category namespace.
     |  
     |  category_namespaces(self)
     |      Deprecated; use list(namespaces.CATEGORY) instead.
     |      
     |      Return names for the Category namespace.
     |  
     |  category_on_one_line(self)
     |      Return True if this site wants all category links on one line.
     |  
     |  disambcategory(self)
     |      Return Category in which disambig pages are listed.
     |  
     |  fam(self)
     |      Deprecated; use family attribute instead.
     |      
     |      Return Family object for this Site.
     |  
     |  getNamespaceIndex(self, namespace)
     |      DEPRECATED: Return the Namespace for a given namespace name.
     |  
     |  getSite(self, code)
     |      Return Site object for language 'code' in this Family.
     |  
     |  getUrl(self, path, retry=None, sysop=None, data=None, compress=NotImplemented, no_hostname=NotImplemented, cookies_only=NotImplemented, refer=NotImplemented, back_response=NotImplemented)
     |      DEPRECATED.
     |      
     |      Retained for compatibility only. All arguments except path and data
     |      are ignored.
     |  
     |  image_namespace(self)
     |      Deprecated; use namespaces.FILE.custom_name instead.
     |      
     |      Return local name for the File namespace.
     |  
     |  interwiki(self, prefix)
     |      Return the site for a corresponding interwiki prefix.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  interwiki_prefix(self, site)
     |      Return the interwiki prefixes going to that site.
     |      
     |      The interwiki prefixes are ordered first by length (shortest first)
     |      and then alphabetically. L{interwiki(prefix)} is not guaranteed to
     |      equal C{site} (i.e. the parameter passed to this function).
     |      
     |      @param site: The targeted site, which might be it's own.
     |      @type site: L{BaseSite}
     |      @return: The interwiki prefixes
     |      @rtype: list (guaranteed to be not empty)
     |      @raises KeyError: if there is no interwiki prefix for that site.
     |  
     |  interwiki_putfirst(self)
     |      Return list of language codes for ordering of interwiki links.
     |  
     |  isInterwikiLink(self, text)
     |      Return True if text is in the form of an interwiki link.
     |      
     |      If a link object constructed using "text" as the link text parses as
     |      belonging to a different site, this method returns True.
     |  
     |  languages(self)
     |      Return list of all valid language codes for this site's Family.
     |  
     |  linkto(self, title, othersite=None)
     |      DEPRECATED. Return a wikilink to a page.
     |      
     |      @param title: Title of the page to link to
     |      @type title: unicode
     |      @param othersite: Generate a interwiki link for use on this site.
     |      @type othersite: BaseSite or None
     |      
     |      @rtype: unicode
     |  
     |  local_interwiki(self, prefix)
     |      Return whether the interwiki prefix is local.
     |      
     |      A local interwiki prefix is handled by the target site like a normal
     |      link. So if that link also contains an interwiki link it does follow
     |      it as long as it's a local link.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  lock_page(self, page, block=True)
     |      Lock page for writing. Must be called before writing any page.
     |      
     |      We don't want different threads trying to write to the same page
     |      at the same time, even to different sections.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |      @param block: if true, wait until the page is available to be locked;
     |          otherwise, raise an exception if page can't be locked
     |  
     |  mediawiki_namespace(self)
     |      Deprecated; use namespaces.MEDIAWIKI.custom_name instead.
     |      
     |      Return local name for the MediaWiki namespace.
     |  
     |  normalizeNamespace = call(*a, **kw)
     |  
     |  ns_index(self, namespace)
     |      Deprecated; use APISite.namespaces.lookup_name instead.
     |      
     |      
     |      Return the Namespace for a given namespace name.
     |      
     |      @param namespace: name
     |      @type namespace: unicode
     |      @return: The matching Namespace object on this Site
     |      @rtype: Namespace, or None if invalid
     |  
     |  ns_normalize(self, value)
     |      Return canonical local form of namespace name.
     |      
     |      @param value: A namespace name
     |      @type value: unicode
     |  
     |  postData(self, address, data, sysop=False, compress=True, cookies=None, contentType=NotImplemented)
     |      DEPRECATED.
     |  
     |  postForm(self, address, predata, sysop=False, cookies=None)
     |      DEPRECATED.
     |  
     |  sametitle(self, title1, title2)
     |      Return True if title1 and title2 identify the same wiki page.
     |      
     |      title1 and title2 may be unequal but still identify the same page,
     |      if they use different aliases for the same namespace.
     |  
     |  special_namespace(self)
     |      Deprecated; use namespaces.SPECIAL.custom_name instead.
     |      
     |      Return local name for the Special: namespace.
     |  
     |  template_namespace(self)
     |      Deprecated; use namespaces.TEMPLATE.custom_name instead.
     |      
     |      Return local name for the Template namespace.
     |  
     |  unlock_page(self, page)
     |      Unlock page. Call as soon as a write operation has completed.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |  
     |  urlEncode(self, query)
     |      DEPRECATED.
     |  
     |  user(self)
     |      Return the currently-logged in bot username, or None.
     |  
     |  username(self, sysop=False)
     |      Return the username/sysopname used for the site.
     |  
     |  validLanguageLinks(self)
     |      Return list of language codes to be used in interwiki links.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from BaseSite:
     |  
     |  code
     |      The identifying code for this Site equal to the wiki prefix.
     |      
     |      By convention, this is usually an ISO language code, but it does
     |      not have to be.
     |  
     |  doc_subpage
     |      Return the documentation subpage for this Site.
     |      
     |      @rtype: tuple
     |  
     |  family
     |      The Family object for this Site's wiki family.
     |  
     |  namespaces
     |      Return dict of valid namespaces on this wiki.
     |  
     |  nocapitalize
     |      Return whether this site's default title case is case-sensitive.
     |      
     |      DEPRECATED.
     |  
     |  sitename
     |      String representing this Site's name and code.
     |  
     |  throttle
     |      Return this Site's throttle. Initialize a new one if needed.
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __eq__(self, other)
     |      Compare if self is equal to other.
     |  
     |  __ge__(self, other)
     |      Compare if self is greater equals other.
     |  
     |  __gt__(self, other)
     |      Compare if self is greater than other.
     |  
     |  __le__(self, other)
     |      Compare if self is less equals other.
     |  
     |  __lt__(self, other)
     |      Compare if self is less than other.
     |  
     |  __ne__(self, other)
     |      Compare if self is not equal to other.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
    
    class BaseSite(pywikibot.tools.ComparableMixin)
     |  Site methods that are independent of the communication interface.
     |  
     |  Method resolution order:
     |      BaseSite
     |      pywikibot.tools.ComparableMixin
     |      builtins.object
     |  
     |  Methods defined here:
     |  
     |  __getattr__(self, attr)
     |      Delegate undefined methods calls to the Family object.
     |  
     |  __getstate__(self)
     |      Remove Lock based classes before pickling.
     |  
     |  __hash__(self)
     |      Return hashable key.
     |  
     |  __init__(self, code, fam=None, user=None, sysop=None)
     |      Initializer.
     |      
     |      @param code: the site's language code
     |      @type code: str
     |      @param fam: wiki family name (optional)
     |      @type fam: str or Family
     |      @param user: bot user name (optional)
     |      @type user: str
     |      @param sysop: sysop account user name (optional)
     |      @type sysop: str
     |  
     |  __repr__(self)
     |      Return internal representation.
     |  
     |  __setstate__(self, attrs)
     |      Restore things removed in __getstate__.
     |  
     |  __str__(self)
     |      Return string representing this Site's name and code.
     |  
     |  category_namespace(self)
     |      Deprecated; use namespaces.CATEGORY.custom_name instead.
     |      
     |      Return local name for the Category namespace.
     |  
     |  category_namespaces(self)
     |      Deprecated; use list(namespaces.CATEGORY) instead.
     |      
     |      Return names for the Category namespace.
     |  
     |  category_on_one_line(self)
     |      Return True if this site wants all category links on one line.
     |  
     |  disambcategory(self)
     |      Return Category in which disambig pages are listed.
     |  
     |  fam(self)
     |      Deprecated; use family attribute instead.
     |      
     |      Return Family object for this Site.
     |  
     |  getNamespaceIndex(self, namespace)
     |      DEPRECATED: Return the Namespace for a given namespace name.
     |  
     |  getSite(self, code)
     |      Return Site object for language 'code' in this Family.
     |  
     |  getUrl(self, path, retry=None, sysop=None, data=None, compress=NotImplemented, no_hostname=NotImplemented, cookies_only=NotImplemented, refer=NotImplemented, back_response=NotImplemented)
     |      DEPRECATED.
     |      
     |      Retained for compatibility only. All arguments except path and data
     |      are ignored.
     |  
     |  has_api(self)
     |      Deprecated.
     |      
     |      Return whether this site has an API.
     |  
     |  image_namespace(self)
     |      Deprecated; use namespaces.FILE.custom_name instead.
     |      
     |      Return local name for the File namespace.
     |  
     |  interwiki(self, prefix)
     |      Return the site for a corresponding interwiki prefix.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  interwiki_prefix(self, site)
     |      Return the interwiki prefixes going to that site.
     |      
     |      The interwiki prefixes are ordered first by length (shortest first)
     |      and then alphabetically. L{interwiki(prefix)} is not guaranteed to
     |      equal C{site} (i.e. the parameter passed to this function).
     |      
     |      @param site: The targeted site, which might be it's own.
     |      @type site: L{BaseSite}
     |      @return: The interwiki prefixes
     |      @rtype: list (guaranteed to be not empty)
     |      @raises KeyError: if there is no interwiki prefix for that site.
     |  
     |  interwiki_putfirst(self)
     |      Return list of language codes for ordering of interwiki links.
     |  
     |  isInterwikiLink(self, text)
     |      Return True if text is in the form of an interwiki link.
     |      
     |      If a link object constructed using "text" as the link text parses as
     |      belonging to a different site, this method returns True.
     |  
     |  languages(self)
     |      Return list of all valid language codes for this site's Family.
     |  
     |  linkto(self, title, othersite=None)
     |      DEPRECATED. Return a wikilink to a page.
     |      
     |      @param title: Title of the page to link to
     |      @type title: unicode
     |      @param othersite: Generate a interwiki link for use on this site.
     |      @type othersite: BaseSite or None
     |      
     |      @rtype: unicode
     |  
     |  local_interwiki(self, prefix)
     |      Return whether the interwiki prefix is local.
     |      
     |      A local interwiki prefix is handled by the target site like a normal
     |      link. So if that link also contains an interwiki link it does follow
     |      it as long as it's a local link.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  lock_page(self, page, block=True)
     |      Lock page for writing. Must be called before writing any page.
     |      
     |      We don't want different threads trying to write to the same page
     |      at the same time, even to different sections.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |      @param block: if true, wait until the page is available to be locked;
     |          otherwise, raise an exception if page can't be locked
     |  
     |  mediawiki_namespace(self)
     |      Deprecated; use namespaces.MEDIAWIKI.custom_name instead.
     |      
     |      Return local name for the MediaWiki namespace.
     |  
     |  normalizeNamespace = call(*a, **kw)
     |  
     |  ns_index(self, namespace)
     |      Deprecated; use APISite.namespaces.lookup_name instead.
     |      
     |      
     |      Return the Namespace for a given namespace name.
     |      
     |      @param namespace: name
     |      @type namespace: unicode
     |      @return: The matching Namespace object on this Site
     |      @rtype: Namespace, or None if invalid
     |  
     |  ns_normalize(self, value)
     |      Return canonical local form of namespace name.
     |      
     |      @param value: A namespace name
     |      @type value: unicode
     |  
     |  pagename2codes(self)
     |      Return list of localized PAGENAMEE tags for the site.
     |  
     |  pagenamecodes(self)
     |      Return list of localized PAGENAME tags for the site.
     |  
     |  postData(self, address, data, sysop=False, compress=True, cookies=None, contentType=NotImplemented)
     |      DEPRECATED.
     |  
     |  postForm(self, address, predata, sysop=False, cookies=None)
     |      DEPRECATED.
     |  
     |  redirect(self)
     |      Return list of localized redirect tags for the site.
     |  
     |  redirectRegex(self, pattern=None)
     |      Return a compiled regular expression matching on redirect pages.
     |      
     |      Group 1 in the regex match object will be the target title.
     |  
     |  sametitle(self, title1, title2)
     |      Return True if title1 and title2 identify the same wiki page.
     |      
     |      title1 and title2 may be unequal but still identify the same page,
     |      if they use different aliases for the same namespace.
     |  
     |  special_namespace(self)
     |      Deprecated; use namespaces.SPECIAL.custom_name instead.
     |      
     |      Return local name for the Special: namespace.
     |  
     |  template_namespace(self)
     |      Deprecated; use namespaces.TEMPLATE.custom_name instead.
     |      
     |      Return local name for the Template namespace.
     |  
     |  unlock_page(self, page)
     |      Unlock page. Call as soon as a write operation has completed.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |  
     |  urlEncode(self, query)
     |      DEPRECATED.
     |  
     |  user(self)
     |      Return the currently-logged in bot username, or None.
     |  
     |  username(self, sysop=False)
     |      Return the username/sysopname used for the site.
     |  
     |  validLanguageLinks(self)
     |      Return list of language codes to be used in interwiki links.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors defined here:
     |  
     |  code
     |      The identifying code for this Site equal to the wiki prefix.
     |      
     |      By convention, this is usually an ISO language code, but it does
     |      not have to be.
     |  
     |  doc_subpage
     |      Return the documentation subpage for this Site.
     |      
     |      @rtype: tuple
     |  
     |  family
     |      The Family object for this Site's wiki family.
     |  
     |  lang
     |      The ISO language code for this Site.
     |      
     |      Presumed to be equal to the site code, but this can be overridden.
     |  
     |  namespaces
     |      Return dict of valid namespaces on this wiki.
     |  
     |  nocapitalize
     |      Return whether this site's default title case is case-sensitive.
     |      
     |      DEPRECATED.
     |  
     |  sitename
     |      String representing this Site's name and code.
     |  
     |  throttle
     |      Return this Site's throttle. Initialize a new one if needed.
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __eq__(self, other)
     |      Compare if self is equal to other.
     |  
     |  __ge__(self, other)
     |      Compare if self is greater equals other.
     |  
     |  __gt__(self, other)
     |      Compare if self is greater than other.
     |  
     |  __le__(self, other)
     |      Compare if self is less equals other.
     |  
     |  __lt__(self, other)
     |      Compare if self is less than other.
     |  
     |  __ne__(self, other)
     |      Compare if self is not equal to other.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
    
    class DataSite(APISite)
     |  Wikibase data capable site.
     |  
     |  Method resolution order:
     |      DataSite
     |      APISite
     |      BaseSite
     |      pywikibot.tools.ComparableMixin
     |      builtins.object
     |  
     |  Methods defined here:
     |  
     |  __getattr__(self, attr)
     |      Provide data access methods.
     |      
     |      Methods provided are get_info, get_sitelinks, get_aliases,
     |      get_labels, get_descriptions, and get_urls.
     |  
     |  __init__(self, *args, **kwargs)
     |      Initializer.
     |  
     |  addClaim(self, item, claim, bot=True, summary=None)
     |      Add a claim.
     |      
     |      @param item: Entity to modify
     |      @type item: WikibasePage
     |      @param claim: Claim to be added
     |      @type claim: pywikibot.Claim
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @param summary: Edit summary
     |      @type summary: str
     |  
     |  changeClaimTarget(self, claim, snaktype='value', bot=True, summary=None)
     |      Set the claim target to the value of the provided claim target.
     |      
     |      @param claim: The source of the claim target value
     |      @type claim: pywikibot.Claim
     |      @param snaktype: An optional snaktype. Default: 'value'
     |      @type snaktype: str ('value', 'novalue' or 'somevalue')
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @param summary: Edit summary
     |      @type summary: str
     |  
     |  createNewItemFromPage(self, page, bot=True, **kwargs)
     |      Create a new Wikibase item for a provided page.
     |      
     |      @param page: page to fetch links from
     |      @type page: pywikibot.Page
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @return: pywikibot.ItemPage of newly created item
     |      @rtype: pywikibot.ItemPage
     |  
     |  data_repository(self)
     |      Override parent method.
     |      
     |      This avoids pointless API queries since the data repository
     |      is this site by definition.
     |      
     |      @return: this Site object
     |      @rtype: DataSite
     |  
     |  editEntity(self, identification, data, bot=True, **kwargs)
     |      Edit entity.
     |      
     |      @param identification: API parameters to use for entity identification
     |      @type identification: dict
     |      @param data: data updates
     |      @type data: dict
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @return: New entity data
     |      @rtype: dict
     |  
     |  editQualifier(self, claim, qualifier, new=False, bot=True, summary=None, baserevid=None)
     |      Create/Edit a qualifier.
     |      
     |      @param claim: A Claim object to add the qualifier to
     |      @type claim: pywikibot.Claim
     |      @param qualifier: A Claim object to be used as a qualifier
     |      @type qualifier: pywikibot.Claim
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @param summary: Edit summary
     |      @type summary: str
     |      @param baserevid: Base revision id override, used to detect conflicts.
     |          When omitted, revision of claim.on_item is used. DEPRECATED.
     |      @type baserevid: long
     |  
     |  editSource(self, claim, source, new=False, bot=True, summary=None, baserevid=None)
     |      Create/Edit a source.
     |      
     |      @param claim: A Claim object to add the source to
     |      @type claim: pywikibot.Claim
     |      @param source: A Claim object to be used as a source
     |      @type source: pywikibot.Claim
     |      @param new: Whether to create a new one if the "source" already exists
     |      @type new: bool
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @param summary: Edit summary
     |      @type summary: str
     |      @param baserevid: Base revision id override, used to detect conflicts.
     |          When omitted, revision of claim.on_item is used. DEPRECATED.
     |      @type baserevid: long
     |  
     |  geo_shape_repository(self)
     |      Return Site object for the geo-shapes repository e.g. commons.
     |  
     |  getPropertyType(self, prop)
     |      Obtain the type of a property.
     |      
     |      This is used specifically because we can cache
     |      the value for a much longer time (near infinite).
     |  
     |  get_item(self, source, **params)
     |      Deprecated; use pywikibot.ItemPage instead.
     |      
     |      Get the data for multiple Wikibase items.
     |  
     |  linkTitles(self, page1, page2, bot=True)
     |      Link two pages together.
     |      
     |      @param page1: First page to link
     |      @type page1: pywikibot.Page
     |      @param page2: Second page to link
     |      @type page2: pywikibot.Page
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @return: dict API output
     |      @rtype: dict
     |  
     |  loadcontent(self, identification, *props)
     |      Fetch the current content of a Wikibase item.
     |      
     |      This is called loadcontent since
     |      wbgetentities does not support fetching old
     |      revisions. Eventually this will get replaced by
     |      an actual loadrevisions.
     |      
     |      @param identification: Parameters used to identify the page(s)
     |      @type identification: dict
     |      @param props: the optional properties to fetch.
     |  
     |  mergeItems(self, from_item, to_item, ignore_conflicts=None, summary=None, bot=True, ignoreconflicts='[deprecated name of ignore_conflicts]', fromItem='[deprecated name of from_item]', toItem='[deprecated name of to_item]')
     |      Merge two items together.
     |      
     |      @param from_item: Item to merge from
     |      @type from_item: pywikibot.ItemPage
     |      @param to_item: Item to merge into
     |      @type to_item: pywikibot.ItemPage
     |      @param ignore_conflicts: Which type of conflicts
     |          ('description', 'sitelink', and 'statement')
     |          should be ignored
     |      @type ignore_conflicts: list of str
     |      @param summary: Edit summary
     |      @type summary: str
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @return: dict API output
     |      @rtype: dict
     |  
     |  preload_entities(self, pagelist, groupsize=50)
     |      Yield subclasses of WikibasePage's with content prefilled.
     |      
     |      Note that pages will be iterated in a different order
     |      than in the underlying pagelist.
     |      
     |      @param pagelist: an iterable that yields either WikibasePage objects,
     |                       or Page objects linked to an ItemPage.
     |      @param groupsize: how many pages to query at a time
     |      @type groupsize: int
     |  
     |  preloaditempages(self, pagelist, groupsize=50)
     |      DEPRECATED.
     |  
     |  removeClaims(self, claims, bot=True, summary=None, baserevid=None)
     |      Remove claims.
     |      
     |      @param claims: Claims to be removed
     |      @type claims: List[pywikibot.Claim]
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @param summary: Edit summary
     |      @type summary: str
     |      @param baserevid: Base revision id override, used to detect conflicts.
     |          When omitted, revision of claim.on_item is used. DEPRECATED.
     |      @type baserevid: long
     |  
     |  removeSources(self, claim, sources, bot=True, summary=None, baserevid=None)
     |      Remove sources.
     |      
     |      @param claim: A Claim object to remove the sources from
     |      @type claim: pywikibot.Claim
     |      @param sources: A list of Claim objects that are sources
     |      @type sources: pywikibot.Claim
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @param summary: Edit summary
     |      @type summary: str
     |      @param baserevid: Base revision id override, used to detect conflicts.
     |          When omitted, revision of claim.on_item is used. DEPRECATED.
     |      @type baserevid: long
     |  
     |  remove_qualifiers(self, claim, qualifiers, bot=True, summary=None, baserevid=None)
     |      Remove qualifiers.
     |      
     |      @param claim: A Claim object to remove the qualifier from
     |      @type claim: pywikibot.Claim
     |      @param qualifiers: Claim objects currently used as a qualifiers
     |      @type qualifiers: List[pywikibot.Claim]
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @param summary: Edit summary
     |      @type summary: str
     |      @param baserevid: Base revision id override, used to detect conflicts.
     |          When omitted, revision of claim.on_item is used. DEPRECATED.
     |      @type baserevid: long
     |  
     |  save_claim(self, claim, summary=None, bot=True)
     |      Save the whole claim to the wikibase site.
     |      
     |      @param claim: The claim to save
     |      @type claim: pywikibot.Claim
     |      @param bot: Whether to mark the edit as a bot edit
     |      @type bot: bool
     |      @param summary: Edit summary
     |      @type summary: str
     |  
     |  search_entities(self, search, language, total=None, **kwargs, limit='[deprecated name of total]')
     |      Search for pages or properties that contain the given text.
     |      
     |      @param search: Text to find.
     |      @type search: str
     |      @param language: Language to search in.
     |      @type language: str
     |      @param total: Maximum number of pages to retrieve in total, or None in
     |          case of no limit.
     |      @type limit: int or None
     |      @return: 'search' list from API output.
     |      @rtype: api.APIGenerator
     |  
     |  set_redirect_target(self, from_item, to_item)
     |      Make a redirect to another item.
     |      
     |      @param to_item: title of target item.
     |      @type to_item: pywikibot.ItemPage
     |      @param from_item: Title of the item to be redirected.
     |      @type from_item: pywikibot.ItemPage
     |  
     |  tabular_data_repository(self)
     |      Return Site object for the tabular-datas repository e.g. commons.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors defined here:
     |  
     |  concept_base_uri
     |      Return the base uri for concepts/entities.
     |      
     |      @return: concept base uri
     |      @rtype: str
     |  
     |  item_namespace
     |      Return namespace for items.
     |      
     |      @return: item namespace
     |      @rtype: Namespace
     |  
     |  property_namespace
     |      Return namespace for properties.
     |      
     |      @return: property namespace
     |      @rtype: Namespace
     |  
     |  sparql_endpoint
     |      Return the sparql endpoint url, if any has been set.
     |      
     |      @return: sparql endpoint url
     |      @rtype: str|None
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from APISite:
     |  
     |  __getstate__(self)
     |      Remove TokenWallet before pickling, for security reasons.
     |  
     |  __setstate__(self, attrs)
     |      Restore things removed in __getstate__.
     |  
     |  allcategories(self, start='!', prefix='', total=None, reverse=False, content=False, step=NotImplemented)
     |      Iterate categories used (which need not have a Category page).
     |      
     |      Iterator yields Category objects. Note that, in practice, links that
     |      were found on pages that have been deleted may not have been removed
     |      from the database table, so this method can return false positives.
     |      
     |      @param start: Start at this category title (category need not exist).
     |      @param prefix: Only yield categories starting with this string.
     |      @param reverse: if True, iterate in reverse Unicode lexigraphic
     |          order (default: iterate in forward order)
     |      @param content: if True, load the current content of each iterated page
     |          (default False); note that this means the contents of the category
     |          description page, not the pages that are members of the category
     |  
     |  allimages(self, start='!', prefix='', minsize=None, maxsize=None, reverse=False, sha1=None, sha1base36=None, total=None, content=False, step=NotImplemented)
     |      Iterate all images, ordered by image title.
     |      
     |      Yields FilePages, but these pages need not exist on the wiki.
     |      
     |      @param start: start at this title (name need not exist)
     |      @param prefix: only iterate titles starting with this substring
     |      @param minsize: only iterate images of at least this many bytes
     |      @param maxsize: only iterate images of no more than this many bytes
     |      @param reverse: if True, iterate in reverse lexigraphic order
     |      @param sha1: only iterate image (it is theoretically possible there
     |          could be more than one) with this sha1 hash
     |      @param sha1base36: same as sha1 but in base 36
     |      @param content: if True, load the current content of each iterated page
     |          (default False); note that this means the content of the image
     |          description page, not the image itself
     |  
     |  alllinks(self, start='!', prefix='', namespace=0, unique=False, fromids=False, total=None, step=NotImplemented)
     |      Iterate all links to pages (which need not exist) in one namespace.
     |      
     |      Note that, in practice, links that were found on pages that have
     |      been deleted may not have been removed from the links table, so this
     |      method can return false positives.
     |      
     |      @param start: Start at this title (page need not exist).
     |      @param prefix: Only yield pages starting with this string.
     |      @param namespace: Iterate pages from this (single) namespace
     |      @type namespace: int or Namespace
     |      @param unique: If True, only iterate each link title once (default:
     |          iterate once for each linking page)
     |      @param fromids: if True, include the pageid of the page containing
     |          each link (default: False) as the '_fromid' attribute of the Page;
     |          cannot be combined with unique
     |      @raises KeyError: the namespace identifier was not resolved
     |      @raises TypeError: the namespace identifier has an inappropriate
     |          type such as bool, or an iterable with more than one namespace
     |  
     |  allpages(self, start='!', prefix='', namespace=0, filterredir=None, filterlanglinks=None, minsize=None, maxsize=None, protect_type=None, protect_level=None, reverse=False, total=None, content=False, throttle=NotImplemented, limit='[deprecated name of total]', step=NotImplemented, includeredirects='[deprecated name of filterredir]')
     |      Iterate pages in a single namespace.
     |      
     |      @param start: Start at this title (page need not exist).
     |      @param prefix: Only yield pages starting with this string.
     |      @param namespace: Iterate pages from this (single) namespace
     |      @type namespace: int or Namespace.
     |      @param filterredir: if True, only yield redirects; if False (and not
     |          None), only yield non-redirects (default: yield both)
     |      @param filterlanglinks: if True, only yield pages with language links;
     |          if False (and not None), only yield pages without language links
     |          (default: yield both)
     |      @param minsize: if present, only yield pages at least this many
     |          bytes in size
     |      @param maxsize: if present, only yield pages at most this many bytes
     |          in size
     |      @param protect_type: only yield pages that have a protection of the
     |          specified type
     |      @type protect_type: str
     |      @param protect_level: only yield pages that have protection at this
     |          level; can only be used if protect_type is specified
     |      @param reverse: if True, iterate in reverse Unicode lexigraphic
     |          order (default: iterate in forward order)
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: the namespace identifier was not resolved
     |      @raises TypeError: the namespace identifier has an inappropriate
     |          type such as bool, or an iterable with more than one namespace
     |  
     |  allusers(self, start='!', prefix='', group=None, total=None, step=NotImplemented)
     |      Iterate registered users, ordered by username.
     |      
     |      Iterated values are dicts containing 'name', 'editcount',
     |      'registration', and (sometimes) 'groups' keys. 'groups' will be
     |      present only if the user is a member of at least 1 group, and will
     |      be a list of unicodes; all the other values are unicodes and should
     |      always be present.
     |      
     |      @param start: start at this username (name need not exist)
     |      @param prefix: only iterate usernames starting with this substring
     |      @param group: only iterate users that are members of this group
     |      @type group: str
     |  
     |  ancientpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages, datestamps from Special:Ancientpages.
     |      
     |      @param total: number of pages to return
     |  
     |  assert_valid_iter_params(self, msg_prefix, start, end, reverse)
     |      Validate iterating API parameters.
     |  
     |  blocks(self, starttime=None, endtime=None, reverse=False, blockids=None, users=None, iprange=None, total=None, step=NotImplemented)
     |      Iterate all current blocks, in order of creation.
     |      
     |      The iterator yields dicts containing keys corresponding to the
     |      block properties.
     |      
     |      @see: U{https://www.mediawiki.org/wiki/API:Blocks}
     |      
     |      @note: logevents only logs user blocks, while this method
     |          iterates all blocks including IP ranges.
     |      @note: C{userid} key will be given for mw 1.18+ only
     |      @note: C{iprange} parameter cannot be used together with C{users}.
     |      
     |      @param starttime: start iterating at this Timestamp
     |      @type starttime: pywikibot.Timestamp
     |      @param endtime: stop iterating at this Timestamp
     |      @type endtime: pywikibot.Timestamp
     |      @param reverse: if True, iterate oldest blocks first (default: newest)
     |      @type reverse: bool
     |      @param blockids: only iterate blocks with these id numbers. Numbers
     |          must be separated by '|' if given by a basestring.
     |      @type blockids: basestring, tuple or list
     |      @param users: only iterate blocks affecting these usernames or IPs
     |      @type users: basestring, tuple or list
     |      @param iprange: a single IP or an IP range. Ranges broader than
     |          IPv4/16 or IPv6/19 are not accepted.
     |      @type iprange: str
     |      @param total: total amount of block entries
     |      @type total: int
     |  
     |  blockuser(self, user, expiry, reason, anononly=True, nocreate=True, autoblock=True, noemail=False, reblock=False)
     |      Block a user for certain amount of time and for a certain reason.
     |      
     |      @param user: The username/IP to be blocked without a namespace.
     |      @type user: L{pywikibot.User}
     |      @param expiry: The length or date/time when the block expires. If
     |          'never', 'infinite', 'indefinite' it never does. If the value is
     |          given as a basestring it's parsed by php's strtotime function:
     |      
     |              U{http://php.net/manual/en/function.strtotime.php}
     |      
     |          The relative format is described there:
     |      
     |              U{http://php.net/manual/en/datetime.formats.relative.php}
     |      
     |          It is recommended to not use a basestring if possible to be
     |          independent of the API.
     |      @type expiry: Timestamp/datetime (absolute),
     |          basestring (relative/absolute) or False ('never')
     |      @param reason: The reason for the block.
     |      @type reason: basestring
     |      @param anononly: Disable anonymous edits for this IP.
     |      @type anononly: boolean
     |      @param nocreate: Prevent account creation.
     |      @type nocreate: boolean
     |      @param autoblock: Automatically block the last used IP address and all
     |          subsequent IP addresses from which this account logs in.
     |      @type autoblock: boolean
     |      @param noemail: Prevent user from sending email through the wiki.
     |      @type noemail: boolean
     |      @param reblock: If the user is already blocked, overwrite the existing
     |          block.
     |      @type reblock: boolean
     |      @return: The data retrieved from the API request.
     |      @rtype: dict
     |  
     |  botusers(self, total=None, step=NotImplemented)
     |      Iterate bot users.
     |      
     |      Iterated values are dicts containing 'name', 'userid', 'editcount',
     |      'registration', and 'groups' keys. 'groups' will be present only if
     |      the user is a member of at least 1 group, and will be a list of
     |      unicodes; all the other values are unicodes and should always be
     |      present.
     |  
     |  broken_redirects(self, total=None, step=NotImplemented)
     |      Yield Pages with broken redirects from Special:BrokenRedirects.
     |      
     |      @param total: number of pages to return
     |  
     |  case(self)
     |      Deprecated; use siteinfo or Namespace instance instead.
     |      
     |      Return this site's capitalization rule.
     |  
     |  categories(self, number=10, repeat=False)
     |      DEPRECATED.
     |  
     |  categoryinfo(self, category)
     |      Retrieve data on contents of category.
     |  
     |  categorymembers(self, category, namespaces=None, sortby=None, reverse=False, starttime=None, endtime=None, startsort=None, endsort=None, total=None, content=False, member_type=None, startprefix=None, endprefix=None, step=NotImplemented)
     |      Iterate members of specified category.
     |      
     |      @param category: The Category to iterate.
     |      @param namespaces: If present, only return category members from
     |          these namespaces. To yield subcategories or files, use
     |          parameter member_type instead.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param sortby: determines the order in which results are generated,
     |          valid values are "sortkey" (default, results ordered by category
     |          sort key) or "timestamp" (results ordered by time page was
     |          added to the category)
     |      @type sortby: str
     |      @param reverse: if True, generate results in reverse order
     |          (default False)
     |      @param starttime: if provided, only generate pages added after this
     |          time; not valid unless sortby="timestamp"
     |      @type starttime: pywikibot.Timestamp
     |      @param endtime: if provided, only generate pages added before this
     |          time; not valid unless sortby="timestamp"
     |      @param startsort: if provided, only generate pages that have a
     |          sortkey >= startsort; not valid if sortby="timestamp"
     |          (Deprecated in MW 1.24)
     |      @type startsort: str
     |      @param endsort: if provided, only generate pages that have a
     |          sortkey <= endsort; not valid if sortby="timestamp"
     |          (Deprecated in MW 1.24)
     |      @type endsort: str
     |      @param startprefix: if provided, only generate pages >= this title
     |          lexically; not valid if sortby="timestamp"; overrides "startsort"
     |          (requires MW 1.18+)
     |      @type startprefix: str
     |      @param endprefix: if provided, only generate pages < this title
     |          lexically; not valid if sortby="timestamp"; overrides "endsort"
     |          (requires MW 1.18+)
     |      @type endprefix: str
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @type content: bool
     |      @param member_type: member type; if member_type includes 'page' and is
     |          used in conjunction with sortby="timestamp", the API may limit
     |          results to only pages in the first 50 namespaces.
     |      @type member_type: str or iterable of str; values: page, subcat, file
     |      
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises NotImplementedError: startprefix or endprefix parameters are
     |          given but site.version is less than 1.18.
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  checkBlocks(self, sysop=False)
     |      Raise an exception when the user is blocked. DEPRECATED.
     |      
     |      @param sysop: If true, log in to sysop account (if available)
     |      @type sysop: bool
     |      @raises UserBlocked: The logged in user/sysop account is blocked.
     |  
     |  compare(self, old, diff)
     |      Corresponding method to the 'action=compare' API action.
     |      
     |      See: https://en.wikipedia.org/w/api.php?action=help&modules=compare
     |      Use pywikibot.diff's html_comparator() method to parse result.
     |      @param old: starting revision ID, title, Page, or Revision
     |      @type old: int, str, pywikibot.Page, or pywikibot.Page.Revision
     |      @param diff: ending revision ID, title, Page, or Revision
     |      @type diff: int, str, pywikibot.Page, or pywikibot.Page.Revision
     |      @return: Returns an HTML string of a diff between two revisions.
     |      @rtype: str
     |  
     |  create_new_topic(self, page, title, content, format)
     |      Create a new topic on a Flow board.
     |      
     |      @param page: A Flow board
     |      @type page: Board
     |      @param title: The title of the new topic (must be in plaintext)
     |      @type title: unicode
     |      @param content: The content of the topic's initial post
     |      @type content: unicode
     |      @param format: The content format of the value supplied for content
     |      @type format: unicode (either 'wikitext' or 'html')
     |      @return: The metadata of the new topic
     |      @rtype: dict
     |  
     |  dbName(self)
     |      Return this site's internal id.
     |  
     |  deadendpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Page objects retrieved from Special:Deadendpages.
     |      
     |      @param total: number of pages to return
     |  
     |  delete_post(self, post, reason)
     |      Delete a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param reason: The reason to delete the post
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  delete_topic(self, page, reason)
     |      Delete a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reason: The reason to delete the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  deletedrevs(self, page, start=None, end=None, reverse=None, content=False, total=None, step=NotImplemented, get_text='[deprecated name of content]')
     |      Iterate deleted revisions.
     |      
     |      Each value returned by the iterator will be a dict containing the
     |      'title' and 'ns' keys for a particular Page and a 'revisions' key
     |      whose value is a list of revisions in the same format as
     |      recentchanges (plus a 'content' element if requested). If get_text
     |      is true, the toplevel dict will contain a 'token' key as well.
     |      
     |      @param page: The page to check for deleted revisions
     |      @param start: Iterate revisions starting at this Timestamp
     |      @param end: Iterate revisions ending at this Timestamp
     |      @param reverse: Iterate oldest revisions first (default: newest)
     |      @param content: If True, retrieve the content of each revision and
     |          an undelete token
     |  
     |  deletepage(self, page, reason, summary='[deprecated name of reason]')
     |      Delete page from the wiki. Requires appropriate privilege level.
     |      
     |      @param page: Page to be deleted.
     |      @type page: Page
     |      @param reason: Deletion reason.
     |      @type reason: basestring
     |  
     |  double_redirects(self, total=None, step=NotImplemented)
     |      Yield Pages with double redirects from Special:DoubleRedirects.
     |      
     |      @param total: number of pages to return
     |  
     |  editpage(self, page, summary=None, minor=True, notminor=False, bot=True, recreate=True, createonly=False, nocreate=False, watch=None, **kwargs)
     |      Submit an edit to be saved to the wiki.
     |      
     |      @param page: The Page to be saved.
     |          By default its .text property will be used
     |          as the new text to be saved to the wiki
     |      @param summary: the edit summary
     |      @param minor: if True (default), mark edit as minor
     |      @param notminor: if True, override account preferences to mark edit
     |          as non-minor
     |      @param recreate: if True (default), create new page even if this
     |          title has previously been deleted
     |      @param createonly: if True, raise an error if this title already
     |          exists on the wiki
     |      @param nocreate: if True, raise an error if the page does not exist
     |      @param watch: Specify how the watchlist is affected by this edit, set
     |          to one of "watch", "unwatch", "preferences", "nochange":
     |          * watch: add the page to the watchlist
     |          * unwatch: remove the page from the watchlist
     |          The following settings are supported by mw >= 1.16 only
     |          * preferences: use the preference settings (default)
     |          * nochange: don't change the watchlist
     |      @param bot: if True, mark edit with bot flag
     |      @kwarg text: Overrides Page.text
     |      @type text: unicode
     |      @kwarg section: Edit an existing numbered section or
     |          a new section ('new')
     |      @type section: int or str
     |      @kwarg prependtext: Prepend text. Overrides Page.text
     |      @type text: unicode
     |      @kwarg appendtext: Append text. Overrides Page.text.
     |      @type text: unicode
     |      @kwarg undo: Revision id to undo. Overrides Page.text
     |      @type undo: int
     |      @return: True if edit succeeded, False if it failed
     |      @rtype: bool
     |      @raises Error: No text to be saved
     |      @raises NoPage: recreate is disabled and page does not exist
     |      @raises CaptchaError: config.solve_captcha is False and saving
     |          the page requires solving a captcha
     |  
     |  expand_text(self, text, title=None, includecomments=None, string='[deprecated name of text]')
     |      Parse the given text for preprocessing and rendering.
     |      
     |      e.g expand templates and strip comments if includecomments
     |      parameter is not True. Keeps text inside
     |      <nowiki></nowiki> tags unchanges etc. Can be used to parse
     |      magic parser words like {{CURRENTTIMESTAMP}}.
     |      
     |      @param text: text to be expanded
     |      @type text: unicode
     |      @param title: page title without section
     |      @type title: unicode
     |      @param includecomments: if True do not strip comments
     |      @type includecomments: bool
     |      @rtype: unicode
     |  
     |  exturlusage(self, url=None, protocol='http', namespaces=None, total=None, content=False, step=NotImplemented)
     |      Iterate Pages that contain links to the given URL.
     |      
     |      @param url: The URL to search for (without the protocol prefix);
     |          this may include a '*' as a wildcard, only at the start of the
     |          hostname
     |      @param protocol: The protocol prefix (default: "http")
     |  
     |  forceLogin = call(*a, **kw)
     |  
     |  getExpandedString = call(*a, **kw)
     |  
     |  getFilesFromAnHash(self, hash_found=None)
     |      Return all files that have the same hash.
     |      
     |      DEPRECATED: Use L{APISite.allimages} instead using 'sha1'.
     |  
     |  getImagesFromAnHash(self, hash_found=None)
     |      Return all images that have the same hash.
     |      
     |      DEPRECATED: Use L{APISite.allimages} instead using 'sha1'.
     |  
     |  getPatrolToken(self, sysop=False)
     |      DEPRECATED: Get patrol token.
     |  
     |  getToken(self, getalways=True, getagain=False, sysop=False)
     |      DEPRECATED: Get edit token.
     |  
     |  get_parsed_page(self, page)
     |      Retrieve parsed text of the page using action=parse.
     |  
     |  get_property_names(self, force=False)
     |      Get property names for pages_with_property().
     |      
     |      @param force: force to retrieve userinfo ignoring cache
     |      @type force: bool
     |  
     |  get_searched_namespaces(self, force=False)
     |      Retrieve the default searched namespaces for the user.
     |      
     |      If no user is logged in, it returns the namespaces used by default.
     |      Otherwise it returns the user preferences. It caches the last result
     |      and returns it, if the username or login status hasn't changed.
     |      
     |      @param force: Whether the cache should be discarded.
     |      @return: The namespaces which are searched by default.
     |      @rtype: C{set} of L{Namespace}
     |  
     |  get_tokens(self, types, all=False)
     |      Preload one or multiple tokens.
     |      
     |      For all MediaWiki versions prior to 1.20, only one token can be
     |      retrieved at once.
     |      For MediaWiki versions since 1.24wmfXXX a new token
     |      system was introduced which reduced the amount of tokens available.
     |      Most of them were merged into the 'csrf' token. If the token type in
     |      the parameter is not known it will default to the 'csrf' token.
     |      
     |      The other token types available are:
     |       - deleteglobalaccount
     |       - patrol (*)
     |       - rollback
     |       - setglobalaccountstatus
     |       - userrights
     |       - watch
     |      
     |       (*) Patrol was added in v1.14.
     |           Until v1.16, the patrol token is same as the edit token.
     |           For v1.17-19, the patrol token must be obtained from the query
     |           list recentchanges.
     |      
     |      @param types: the types of token (e.g., "edit", "move", "delete");
     |          see API documentation for full list of types
     |      @type types: iterable
     |      @param all: load all available tokens, if None only if it can be done
     |          in one request.
     |      @type all: bool
     |      
     |      return: a dict with retrieved valid tokens.
     |      rtype: dict
     |  
     |  getcategoryinfo(self, category)
     |      Retrieve data on contents of category.
     |  
     |  getcurrenttime = call(*a, **kw)
     |  
     |  getcurrenttimestamp(self)
     |      Return the server time as a MediaWiki timestamp string.
     |      
     |      It calls L{server_time} first so it queries the server to get the
     |      current server time.
     |      
     |      @return: the server time
     |      @rtype: str (as 'yyyymmddhhmmss')
     |  
     |  getglobaluserinfo(self)
     |      Retrieve globaluserinfo from site and cache it.
     |      
     |      self._globaluserinfo will be a dict with the following keys and values:
     |      
     |        - id: user id (numeric str)
     |        - home: dbname of home wiki
     |        - registration: registration date as Timestamp
     |        - groups: list of groups (could be empty)
     |        - rights: list of rights (could be empty)
     |        - editcount: global editcount
     |  
     |  getmagicwords(self, word)
     |      Return list of localized "word" magic words for the site.
     |  
     |  getredirtarget(self, page)
     |      Return page object for the redirect target of page.
     |      
     |      @param page: page to search redirects for
     |      @type page: pywikibot.page.BasePage
     |      @return: redirect target of page
     |      @rtype: pywikibot.Page
     |      
     |      @raises IsNotRedirectPage: page is not a redirect
     |      @raises RuntimeError: no redirects found
     |      @raises CircularRedirect: page is a circular redirect
     |      @raises InterwikiRedirectPage: the redirect target is
     |          on another site
     |  
     |  getuserinfo(self, force=False)
     |      Retrieve userinfo from site and store in _userinfo attribute.
     |      
     |      self._userinfo will be a dict with the following keys and values:
     |      
     |        - id: user id (numeric str)
     |        - name: username (if user is logged in)
     |        - anon: present if user is not logged in
     |        - groups: list of groups (could be empty)
     |        - rights: list of rights (could be empty)
     |        - message: present if user has a new message on talk page
     |        - blockinfo: present if user is blocked (dict)
     |      
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |      
     |      @param force: force to retrieve userinfo ignoring cache
     |      @type force: bool
     |  
     |  globalusage(self, page, total=None)
     |      Iterate global image usage for a given FilePage.
     |      
     |      @param page: the page to return global image usage for.
     |      @type image: pywikibot.FilePage
     |      @param total: iterate no more than this number of pages in total.
     |      @raises TypeError: input page is not a FilePage.
     |      @raises SiteDefinitionError: Site could not be defined for a returned
     |          entry in API response.
     |  
     |  hasExtension(self, name, unknown=None)
     |      Deprecated; use has_extension instead.
     |      
     |      Determine whether extension `name` is loaded.
     |      
     |              Use L{has_extension} instead!
     |      
     |              @param name: The extension to check for, case insensitive
     |              @type name: str
     |              @param unknown: Old parameter which shouldn't be used anymore.
     |              @return: If the extension is loaded
     |              @rtype: bool
     |  
     |  has_all_mediawiki_messages(self, keys)
     |      Confirm that the site defines a set of MediaWiki messages.
     |      
     |      @param keys: names of MediaWiki messages
     |      @type keys: set of str
     |      
     |      @rtype: bool
     |  
     |  has_api(self)
     |      Deprecated.
     |      
     |      Return whether this site has an API.
     |  
     |  has_extension(self, name)
     |      Determine whether extension `name` is loaded.
     |      
     |      @param name: The extension to check for, case sensitive
     |      @type name: str
     |      @return: If the extension is loaded
     |      @rtype: bool
     |  
     |  has_group(self, group, sysop=False)
     |      Return true if and only if the user is a member of specified group.
     |      
     |      Possible values of 'group' may vary depending on wiki settings,
     |      but will usually include bot.
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |  
     |  has_mediawiki_message(self, key)
     |      Determine if the site defines a MediaWiki message.
     |      
     |      @param key: name of MediaWiki message
     |      @type key: str
     |      
     |      @rtype: bool
     |  
     |  has_right(self, right, sysop=False)
     |      Return true if and only if the user has a specific right.
     |      
     |      Possible values of 'right' may vary depending on wiki settings,
     |      but will usually include:
     |      
     |      * Actions: edit, move, delete, protect, upload
     |      * User levels: autoconfirmed, sysop, bot
     |      
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |  
     |  hide_post(self, post, reason)
     |      Hide a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param reason: The reason to hide the post
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  hide_topic(self, page, reason)
     |      Hide a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reason: The reason to hide the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  image_repository(self)
     |      Return Site object for image repository e.g. commons.
     |  
     |  imageusage(self, image, namespaces=None, filterredir=None, total=None, content=False, step=NotImplemented)
     |      Iterate Pages that contain links to the given FilePage.
     |      
     |      @param image: the image to search for (FilePage need not exist on
     |          the wiki)
     |      @type image: pywikibot.FilePage
     |      @param namespaces: If present, only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param filterredir: if True, only yield redirects; if False (and not
     |          None), only yield non-redirects (default: yield both)
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  isAllowed = call(*a, **kw)
     |  
     |  isBlocked = call(*a, **kw)
     |  
     |  isBot(self, username)
     |      Return True is username is a bot user.
     |  
     |  is_blocked(self, sysop=False)
     |      Return True when logged in user is blocked.
     |      
     |      To check whether a user can perform an action,
     |      the method has_right should be used.
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |      
     |      @param sysop: If true, log in to sysop account (if available)
     |      @type sysop: bool
     |      @rtype: bool
     |  
     |  is_data_repository(self)
     |      Return True if its data repository is itself.
     |  
     |  is_image_repository(self)
     |      Return True if Site object is the image repository.
     |  
     |  is_oauth_token_available(self)
     |      Check whether OAuth token is set for this site.
     |      
     |      @rtype: bool
     |  
     |  is_uploaddisabled(self)
     |      Return True if upload is disabled on site.
     |      
     |      When the version is at least 1.27wmf9, uses general siteinfo.
     |      If not called directly, it is cached by the first attempted
     |      upload action.
     |  
     |  language(self)
     |      Deprecated; use APISite.lang instead.
     |      
     |      Return the code for the language of this Site.
     |  
     |  linksearch(self, siteurl, limit=None, euprotocol=None)
     |      Deprecated; use Site().exturlusage instead.
     |      
     |      Backwards-compatible interface to exturlusage().
     |  
     |  linter_pages(self, lint_categories=None, total=None, namespaces=None, pageids=None, lint_from=None)
     |      Return a generator to pages containing linter errors.
     |      
     |      @param lint_categories: categories of lint errors
     |      @type lntcategories: an iterable that returns values (str),
     |          or a pipe-separated string of values.
     |      
     |      @param total: if not None, yielding this many items in total
     |      @type total: int
     |      
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      
     |      @param pageids: only include lint errors from the specified pageids
     |      @type pageids: an iterable that returns pageids (str or int),
     |          or a comma- or pipe-separated string of pageids
     |          (e.g. '945097,1483753, 956608' or '945097|483753|956608')
     |      
     |      @param lint_from: Lint ID to start querying from
     |      @type lint_from: str representing digit or integer
     |      
     |      @return: pages with Linter errors.
     |      @rtype: Iterable[pywikibot.Page]
     |  
     |  list_to_text(self, args)
     |      Convert a list of strings into human-readable text.
     |      
     |      The MediaWiki messages 'and' and 'word-separator' are used as separator
     |      between the last two arguments.
     |      If more than two arguments are given, other arguments are
     |      joined using MediaWiki message 'comma-separator'.
     |      
     |      @param args: text to be expanded
     |      @type args: iterable of unicode
     |      
     |      @rtype: unicode
     |  
     |  live_version(self, force=False)
     |      Deprecated; use version() instead.
     |      
     |      Return the 'real' version number found on [[Special:Version]].
     |      
     |              By default the version number is cached for one day.
     |      
     |              @param force: If the version should be read always from the server and
     |                  never from the cache.
     |              @type force: bool
     |              @return: A tuple containing the major, minor version number and any
     |                  text after that. If an error occurred (0, 0, 0) is returned.
     |              @rtype: int, int, str
     |  
     |  load_board(self, page)
     |      Retrieve the data for a Flow board.
     |      
     |      @param page: A Flow board
     |      @type page: Board
     |      @return: A dict representing the board's metadata.
     |      @rtype: dict
     |  
     |  load_pages_from_pageids(self, pageids)
     |      Return a page generator from pageids.
     |      
     |      Pages are iterated in the same order than in the underlying pageids.
     |      
     |      Pageids are filtered and only one page is returned in case of
     |      duplicate pageids.
     |      
     |      @param pageids: an iterable that returns pageids (str or int),
     |          or a comma- or pipe-separated string of pageids
     |          (e.g. '945097,1483753, 956608' or '945097|483753|956608')
     |  
     |  load_post_current_revision(self, page, post_id, format)
     |      Retrieve the data for a post to a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param post_id: The UUID of the Post
     |      @type post_id: unicode
     |      @param format: The content format used for the returned content
     |      @type format: unicode (either 'wikitext', 'html', or 'fixed-html')
     |      @return: A dict representing the post data for the given UUID.
     |      @rtype: dict
     |  
     |  load_topic(self, page, format)
     |      Retrieve the data for a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param format: The content format to request the data in.
     |      @type format: str (either 'wikitext', 'html', or 'fixed-html')
     |      @return: A dict representing the topic's data.
     |      @rtype: dict
     |  
     |  load_topiclist(self, page, format='wikitext', limit=100, sortby='newest', toconly=False, offset=None, offset_id=None, reverse=False, include_offset=False)
     |      Retrieve the topiclist of a Flow board.
     |      
     |      @param page: A Flow board
     |      @type page: Board
     |      @param format: The content format to request the data in.
     |      @type format: str (either 'wikitext', 'html', or 'fixed-html')
     |      @param limit: The number of topics to fetch in each request.
     |      @type limit: int
     |      @param sortby: Algorithm to sort topics by.
     |      @type sortby: str (either 'newest' or 'updated')
     |      @param toconly: Whether to only include information for the TOC.
     |      @type toconly: bool
     |      @param offset: The timestamp to start at (when sortby is 'updated').
     |      @type offset: Timestamp or equivalent str
     |      @param offset_id: The topic UUID to start at (when sortby is 'newest').
     |      @type offset_id: str (in the form of a UUID)
     |      @param reverse: Whether to reverse the topic ordering.
     |      @type reverse: bool
     |      @param include_offset: Whether to include the offset topic.
     |      @type include_offset: bool
     |      @return: A dict representing the board's topiclist.
     |      @rtype: dict
     |  
     |  loadcoordinfo(self, page)
     |      Load [[mw:Extension:GeoData]] info.
     |  
     |  loadflowinfo(self, page)
     |      Deprecated; use Check the content model instead instead.
     |      
     |      
     |      Load Flow-related information about a given page.
     |      
     |      Assumes that the Flow extension is installed.
     |      
     |      @raises APIError: Flow extension is not installed
     |  
     |  loadimageinfo(self, page, history=False, url_width=None, url_height=None, url_param=None)
     |      Load image info from api and save in page attributes.
     |      
     |      Parameters correspond to iiprops in:
     |      [1] U{https://www.mediawiki.org/wiki/API:Imageinfo}
     |      
     |      Parameters validation and error handling left to the API call.
     |      
     |      @param history: if true, return the image's version history
     |      @param url_width: see iiurlwidth in [1]
     |      @param url_height: see iiurlheigth in [1]
     |      @param url_param: see iiurlparam in [1]
     |  
     |  loadpageimage(self, page)
     |      Load [[mw:Extension:PageImages]] info.
     |      
     |      @param page: The page for which to obtain the image
     |      @type page: pywikibot.Page
     |      
     |      @raises APIError: PageImages extension is not installed
     |  
     |  loadpageinfo(self, page, preload=False)
     |      Load page info from api and store in page attributes.
     |  
     |  loadpageprops(self, page)
     |      Load page props for the given page.
     |  
     |  loadrevisions(self, page, content=False, revids=None, startid=None, endid=None, starttime=None, endtime=None, rvdir=None, user=None, excludeuser=None, section=None, sysop=False, step=None, total=None, rollback=False, getText='[deprecated name of content]')
     |      Retrieve revision information and store it in page object.
     |      
     |      By default, retrieves the last (current) revision of the page,
     |      unless any of the optional parameters revids, startid, endid,
     |      starttime, endtime, rvdir, user, excludeuser, or limit are
     |      specified. Unless noted below, all parameters not specified
     |      default to False.
     |      
     |      If rvdir is False or not specified, startid must be greater than
     |      endid if both are specified; likewise, starttime must be greater
     |      than endtime. If rvdir is True, these relationships are reversed.
     |      
     |      @param page: retrieve revisions of this Page and hold the data.
     |      @type page: pywikibot.Page
     |      @param content: if True, retrieve the wiki-text of each revision;
     |          otherwise, only retrieve the revision metadata (default)
     |      @type content: bool
     |      @param section: if specified, retrieve only this section of the text
     |          (content must be True); section must be given by number (top of
     |          the article is section 0), not name
     |      @type section: int
     |      @param revids: retrieve only the specified revision ids (raise
     |          Exception if any of revids does not correspond to page)
     |      @type revids: an int, a str or a list of ints or strings
     |      @param startid: retrieve revisions starting with this revid
     |      @param endid: stop upon retrieving this revid
     |      @param starttime: retrieve revisions starting at this Timestamp
     |      @param endtime: stop upon reaching this Timestamp
     |      @param rvdir: if false, retrieve newest revisions first (default);
     |          if true, retrieve earliest first
     |      @param user: retrieve only revisions authored by this user
     |      @param excludeuser: retrieve all revisions not authored by this user
     |      @param sysop: if True, switch to sysop account (if available) to
     |          retrieve this page
     |      @raises ValueError: invalid startid/endid or starttime/endtime values
     |      @raises pywikibot.Error: revids belonging to a different page
     |  
     |  lock_topic(self, page, lock, reason)
     |      Lock or unlock a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param lock: Whether to lock or unlock the topic
     |      @type lock: bool (True corresponds to locking the topic.)
     |      @param reason: The reason to lock or unlock the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  logevents(self, logtype=None, user=None, page=None, namespace=None, start=None, end=None, reverse=False, tag=None, total=None, step=NotImplemented)
     |      Iterate all log entries.
     |      
     |      @note: logevents with logtype='block' only logs user blocks whereas
     |          site.blocks iterates all blocks including IP ranges.
     |      
     |      @param logtype: only iterate entries of this type
     |          (see mediawiki api documentation for available types)
     |      @type logtype: basestring
     |      @param user: only iterate entries that match this user name
     |      @type user: basestring
     |      @param page: only iterate entries affecting this page
     |      @type page: Page or basestring
     |      @param namespace: namespace(s) to retrieve logevents from
     |      @type namespace: int or Namespace or an iterable of them
     |      @note: due to an API limitation, if namespace param contains multiple
     |          namespaces, log entries from all namespaces will be fetched from
     |          the API and will be filtered later during iteration.
     |      @param start: only iterate entries from and after this Timestamp
     |      @type start: Timestamp or ISO date string
     |      @param end: only iterate entries up to and through this Timestamp
     |      @type end: Timestamp or ISO date string
     |      @param reverse: if True, iterate oldest entries first (default: newest)
     |      @type reverse: bool
     |      @param tag: only iterate entries tagged with this tag
     |      @type tag: basestring
     |      @param total: maximum number of events to iterate
     |      @type total: int
     |      @rtype: iterable
     |      
     |      @raises KeyError: the namespace identifier was not resolved
     |      @raises TypeError: the namespace identifier has an inappropriate
     |          type such as bool, or an iterable with more than one namespace
     |  
     |  loggedInAs(self, sysop=False)
     |      Deprecated; use Site.user() instead.
     |      
     |      Return the current username if logged in, otherwise return None.
     |      
     |              DEPRECATED (use .user() method instead)
     |      
     |              @param sysop: if True, test if user is logged in as the sysop user
     |                           instead of the normal user.
     |              @type sysop: bool
     |      
     |              @rtype: bool
     |  
     |  logged_in(self, sysop=False)
     |      Verify the bot is logged into the site as the expected user.
     |      
     |      The expected usernames are those provided as either the user or sysop
     |      parameter at instantiation.
     |      
     |      @param sysop: if True, test if user is logged in as the sysop user
     |                   instead of the normal user.
     |      @type sysop: bool
     |      
     |      @rtype: bool
     |  
     |  login(self, sysop=False, autocreate=False)
     |      Log the user in if not already logged in.
     |      
     |      @param sysop: if true, log in with the sysop account.
     |      @type sysop: bool
     |      
     |      @param autocreate: if true, allow auto-creation of the account
     |                         using unified login
     |      @type autocreate: bool
     |      
     |      @raises NoUsername: Username is not recognised by the site.
     |      @see: U{https://www.mediawiki.org/wiki/API:Login}
     |  
     |  logout(self)
     |      Logout of the site and load details for the logged out user.
     |      
     |      Also logs out of the global account if linked to the user.
     |      U{https://www.mediawiki.org/wiki/API:Logout}
     |      
     |      @raises APIError: Logout is not available when OAuth enabled.
     |  
     |  logpages(self, number=50, mode=None, title=None, user=None, namespace=[], start=None, end=None, tag=None, newer=False, dump=False, offset=None, repeat=NotImplemented)
     |      Iterate log pages. DEPRECATED.
     |      
     |      When dump is enabled, the raw API dict is returned.
     |      
     |      @rtype: tuple of Page, str, int, str
     |  
     |  lonelypages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages retrieved from Special:Lonelypages.
     |      
     |      @param total: number of pages to return
     |  
     |  longpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages and lengths from Special:Longpages.
     |      
     |      Yields a tuple of Page object, length(int).
     |      
     |      @param total: number of pages to return
     |  
     |  mediawiki_message(self, key, forceReload=NotImplemented)
     |      Fetch the text for a MediaWiki message.
     |      
     |      @param key: name of MediaWiki message
     |      @type key: str
     |      
     |      @rtype unicode
     |  
     |  mediawiki_messages(self, keys)
     |      Fetch the text of a set of MediaWiki messages.
     |      
     |      If keys is '*' or ['*'], all messages will be fetched. (deprecated)
     |      
     |      The returned dict uses each key to store the associated message.
     |      
     |      @param keys: MediaWiki messages to fetch
     |      @type keys: set of str, '*' or ['*']
     |      
     |      @rtype dict
     |  
     |  merge_history(self, source, dest, timestamp=None, reason=None)
     |      Merge revisions from one page into another.
     |      
     |      Revisions dating up to the given timestamp in the source will be
     |      moved into the destination page history. History merge fails if
     |      the timestamps of source and dest revisions overlap (all source
     |      revisions must be dated before the earliest dest revision).
     |      
     |      @param source: Source page from which revisions will be merged
     |      @type source: pywikibot.Page
     |      @param dest: Destination page to which revisions will be merged
     |      @type dest: pywikibot.Page
     |      @param timestamp: Revisions from this page dating up to this timestamp
     |          will be merged into the destination page (if not given or False,
     |          all revisions will be merged)
     |      @type timestamp: pywikibot.Timestamp
     |      @param reason: Optional reason for the history merge
     |      @type reason: str
     |  
     |  messages(self, sysop=False)
     |      Return true if the user has new messages, and false otherwise.
     |  
     |  moderate_post(self, post, state, reason)
     |      Moderate a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param state: The new moderation state
     |      @type state: str
     |      @param reason: The reason to moderate the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  moderate_topic(self, page, state, reason)
     |      Moderate a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param state: The new moderation state
     |      @type state: str
     |      @param reason: The reason to moderate the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  movepage(self, page, newtitle, summary, movetalk=True, noredirect=False)
     |      Move a Page to a new title.
     |      
     |      @param page: the Page to be moved (must exist)
     |      @param newtitle: the new title for the Page
     |      @type newtitle: unicode
     |      @param summary: edit summary (required!)
     |      @param movetalk: if True (default), also move the talk page if possible
     |      @param noredirect: if True, suppress creation of a redirect from the
     |          old title to the new one
     |      @return: Page object with the new title
     |      @rtype: pywikibot.Page
     |  
     |  namespace(self, num, all=False)
     |      Return string containing local name of namespace 'num'.
     |      
     |      If optional argument 'all' is true, return all recognized
     |      values for this namespace.
     |      
     |      @param num: Namespace constant.
     |      @type num: int
     |      @param all: If True return a Namespace object. Otherwise
     |          return the namespace name.
     |      @return: local name or Namespace object
     |      @rtype: str or Namespace
     |  
     |  newfiles(self, user=None, start=None, end=None, reverse=False, total=None, lestart='[deprecated name of start]', leend='[deprecated name of end]', leuser='[deprecated name of user]', letitle=NotImplemented, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
     |      Yield information about newly uploaded files.
     |      
     |      DEPRECATED: Use logevents(logtype='upload') instead.
     |      
     |      Yields a tuple of FilePage, Timestamp, user(unicode), comment(unicode).
     |      
     |      N.B. the API does not provide direct access to Special:Newimages, so
     |      this is derived from the "upload" log events instead.
     |  
     |  newimages(self, *args, **kwargs, number='[deprecated name of total]', repeat=NotImplemented)
     |      Yield information about newly uploaded files.
     |      
     |      DEPRECATED: Use logevents(logtype='upload') instead.
     |  
     |  newpages(self, user=None, returndict=False, start=None, end=None, reverse=False, bot=False, redirect=False, excludeuser=None, patrolled=None, namespaces=None, total=None, number='[deprecated name of total]', repeat=NotImplemented, namespace='[deprecated name of namespaces]', rcshow=NotImplemented, rc_show=NotImplemented, get_redirect=NotImplemented, step=NotImplemented, showBot='[deprecated name of bot]', showRedirects='[deprecated name of redirect]', showPatrolled='[deprecated name of patrolled]')
     |      Yield new articles (as Page objects) from recent changes.
     |      
     |      Starts with the newest article and fetches the number of articles
     |      specified in the first argument.
     |      
     |      The objects yielded are dependent on parameter returndict.
     |      When true, it yields a tuple composed of a Page object and a dict of
     |      attributes.
     |      When false, it yields a tuple composed of the Page object,
     |      timestamp (unicode), length (int), an empty unicode string, username
     |      or IP address (str), comment (unicode).
     |      
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  nice_get_address(self, title)
     |      Return shorter URL path to retrieve page titled 'title'.
     |  
     |  notifications(self, **kwargs)
     |      Yield Notification objects from the Echo extension.
     |  
     |  notifications_mark_read(self, **kwargs)
     |      Mark selected notifications as read.
     |      
     |      @return: whether the action was successful
     |      @rtype: bool
     |  
     |  page_can_be_edited(self, page)
     |      Determine if the page can be edited.
     |      
     |      Return True if and only if:
     |        - page is unprotected, and bot has an account for this site, or
     |        - page is protected, and bot has a sysop account for this site.
     |      
     |      @rtype: bool
     |  
     |  page_embeddedin(self, page, filter_redirects=None, namespaces=None, total=None, content=False, step=NotImplemented, filterRedirects='[deprecated name of filter_redirects]')
     |      Iterate all pages that embedded the given page as a template.
     |      
     |      @param page: The Page to get inclusions for.
     |      @param filter_redirects: If True, only return redirects that embed
     |          the given page. If False, only return non-redirect links. If
     |          None, return both (no filtering).
     |      @param namespaces: If present, only return links from the namespaces
     |          in this list.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  page_exists(self, page)
     |      Deprecated; use page.exists() instead.
     |      
     |      Return True if and only if page is an existing page on site.
     |  
     |  page_extlinks(self, page, total=None, step=NotImplemented)
     |      Iterate all external links on page, yielding URL strings.
     |  
     |  page_from_repository(self, item)
     |      Return a Page for this site object specified by wikibase item.
     |      
     |      @param item: id number of item, "Q###",
     |      @type item: str
     |      @return: Page, or Category object given by wikibase item number
     |          for this site object.
     |      @rtype: pywikibot.Page or None
     |      
     |      @raises UnknownExtension: site has no wikibase extension
     |      @raises NotimplementedError: method not implemented for a wikibase site
     |  
     |  page_isredirect(self, page)
     |      Return True if and only if page is a redirect.
     |  
     |  page_restrictions(self, page)
     |      Return a dictionary reflecting page protections.
     |  
     |  pagebacklinks(self, page, follow_redirects=False, filter_redirects=None, namespaces=None, total=None, content=False, followRedirects='[deprecated name of follow_redirects]', filterRedirects='[deprecated name of filter_redirects]')
     |      Iterate all pages that link to the given page.
     |      
     |      @param page: The Page to get links to.
     |      @param follow_redirects: Also return links to redirects pointing to
     |          the given page.
     |      @param filter_redirects: If True, only return redirects to the given
     |          page. If False, only return non-redirect links. If None, return
     |          both (no filtering).
     |      @param namespaces: If present, only return links from the namespaces
     |          in this list.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param total: Maximum number of pages to retrieve in total.
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  pagecategories(self, page, total=None, content=False, withSortKey=NotImplemented, step=NotImplemented)
     |      Iterate categories to which page belongs.
     |      
     |      @param content: if True, load the current content of each iterated page
     |          (default False); note that this means the contents of the
     |          category description page, not the pages contained in the category
     |  
     |  pageimages(self, page, total=None, content=False, step=NotImplemented)
     |      Iterate images used (not just linked) on the page.
     |      
     |      @param content: if True, load the current content of each iterated page
     |          (default False); note that this means the content of the image
     |          description page, not the image itself
     |  
     |  pagelanglinks(self, page, total=None, include_obsolete=False, step=NotImplemented)
     |      Iterate all interlanguage links on page, yielding Link objects.
     |      
     |      @param include_obsolete: if true, yield even Link objects whose
     |                               site is obsolete
     |  
     |  pagelinks(self, page, namespaces=None, follow_redirects=False, total=None, content=False, step=NotImplemented)
     |      Iterate internal wikilinks contained (or transcluded) on page.
     |      
     |      @param namespaces: Only iterate pages in these namespaces
     |          (default: all)
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param follow_redirects: if True, yields the target of any redirects,
     |          rather than the redirect page
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  pagename2codes(self)
     |      Return list of localized PAGENAMEE tags for the site.
     |  
     |  pagenamecodes(self)
     |      Return list of localized PAGENAME tags for the site.
     |  
     |  pagereferences(self, page, follow_redirects=False, filter_redirects=None, with_template_inclusion=True, only_template_inclusion=False, namespaces=None, total=None, content=False, step=NotImplemented, followRedirects='[deprecated name of follow_redirects]', filterRedirects='[deprecated name of filter_redirects]', onlyTemplateInclusion='[deprecated name of only_template_inclusion]', withTemplateInclusion='[deprecated name of with_template_inclusion]')
     |      Convenience method combining pagebacklinks and page_embeddedin.
     |      
     |      @param namespaces: If present, only return links from the namespaces
     |          in this list.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  pages_with_property(self, propname, total=None)
     |      Yield Page objects from Special:PagesWithProp.
     |      
     |      @param propname: must be a valid property.
     |      @type propname: str
     |      @param total: number of pages to return
     |      @type total: int or None
     |      @return: return a generator of Page objects
     |      @rtype: iterator
     |  
     |  pagetemplates(self, page, namespaces=None, total=None, content=False, step=NotImplemented)
     |      Iterate templates transcluded (not just linked) on the page.
     |      
     |      @param namespaces: Only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  patrol(self, rcid=None, revid=None, revision=None, token=NotImplemented)
     |      Return a generator of patrolled pages.
     |      
     |      Pages to be patrolled are identified by rcid, revid or revision.
     |      At least one of the parameters is mandatory.
     |      See https://www.mediawiki.org/wiki/API:Patrol.
     |      
     |      @param rcid: an int/string/iterable/iterator providing rcid of pages
     |          to be patrolled.
     |      @type rcid: iterable/iterator which returns a number or string which
     |           contains only digits; it also supports a string (as above) or int
     |      @param revid: an int/string/iterable/iterator providing revid of pages
     |          to be patrolled.
     |      @type revid: iterable/iterator which returns a number or string which
     |           contains only digits; it also supports a string (as above) or int.
     |      @param revision: an Revision/iterable/iterator providing Revision
     |          object of pages to be patrolled.
     |      @type revision: iterable/iterator which returns a Revision object; it
     |          also supports a single Revision.
     |      @rtype: iterator of dict with 'rcid', 'ns' and 'title'
     |          of the patrolled page.
     |  
     |  prefixindex(self, prefix, namespace=0, includeredirects=True)
     |      Yield all pages with a given prefix. Deprecated.
     |      
     |      Use allpages() with the prefix= parameter instead of this method.
     |  
     |  preloadpages(self, pagelist, groupsize=50, templates=False, langlinks=False, pageprops=False)
     |      Return a generator to a list of preloaded pages.
     |      
     |      Pages are iterated in the same order than in the underlying pagelist.
     |      In case of duplicates in a groupsize batch, return the first entry.
     |      
     |      @param pagelist: an iterable that returns Page objects
     |      @param groupsize: how many Pages to query at a time
     |      @type groupsize: int
     |      @param templates: preload pages (typically templates) transcluded in
     |          the provided pages
     |      @type templates: bool
     |      @param langlinks: preload all language links from the provided pages
     |          to other languages
     |      @type langlinks: bool
     |      @param pageprops: preload various properties defined in page content
     |      @type pageprops: bool
     |  
     |  protect(self, page, protections, reason, expiry=None, **kwargs, summary='[deprecated name of reason]')
     |      (Un)protect a wiki page. Requires administrator status.
     |      
     |      @param protections: A dict mapping type of protection to protection
     |          level of that type. Valid types of protection are 'edit', 'move',
     |          'create', and 'upload'. Valid protection levels (in MediaWiki 1.12)
     |          are '' (equivalent to 'none'), 'autoconfirmed', and 'sysop'.
     |          If None is given, however, that protection will be skipped.
     |      @type protections: dict
     |      @param reason: Reason for the action
     |      @type reason: basestring
     |      @param expiry: When the block should expire. This expiry will be
     |          applied to all protections. If None, 'infinite', 'indefinite',
     |          'never', or '' is given, there is no expiry.
     |      @type expiry: pywikibot.Timestamp, string in GNU timestamp format
     |          (including ISO 8601).
     |  
     |  protectedpages(self, namespace=0, type='edit', level=False, total=None, lvl='[deprecated name of level]')
     |      Return protected pages depending on protection level and type.
     |      
     |      For protection types which aren't 'create' it uses L{APISite.allpages},
     |      while it uses for 'create' the 'query+protectedtitles' module.
     |      
     |      @param namespaces: The searched namespace.
     |      @type namespaces: int or Namespace or str
     |      @param type: The protection type to search for (default 'edit').
     |      @type type: str
     |      @param level: The protection level (like 'autoconfirmed'). If False it
     |          shows all protection levels.
     |      @type level: str or False
     |      @return: The pages which are protected.
     |      @rtype: Iterable[pywikibot.Page]
     |  
     |  protection_levels(self)
     |      Return the protection levels available on this site.
     |      
     |      @return: protection types available
     |      @rtype: set of unicode instances
     |      @see: L{Siteinfo._get_default()}
     |  
     |  protection_types(self)
     |      Return the protection types available on this site.
     |      
     |      @return: protection types available
     |      @rtype: set of unicode instances
     |      @see: L{Siteinfo._get_default()}
     |  
     |  purgepages(self, pages, **kwargs)
     |      Purge the server's cache for one or multiple pages.
     |      
     |      @param pages: list of Page objects
     |      @return: True if API returned expected response; False otherwise
     |      @rtype: bool
     |  
     |  randompage(self, redirect=False)
     |      DEPRECATED.
     |      
     |      @param redirect: Return a random redirect page
     |      @rtype: pywikibot.Page
     |  
     |  randompages(self, total=None, namespaces=None, redirects=False, content=False, step=NotImplemented)
     |      Iterate a number of random pages.
     |      
     |      Pages are listed in a fixed sequence, only the starting point is
     |      random.
     |      
     |      @param total: the maximum number of pages to iterate
     |      @param namespaces: only iterate pages in these namespaces.
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param redirects: if True, include only redirect pages in results,
     |          False does not include redirects and None (MW 1.26+) include both
     |          types. (default: False)
     |      @type redirects: bool or None
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |      @raises AssertError: unsupported redirects parameter
     |  
     |  randomredirectpage(self)
     |      DEPRECATED: Use Site.randompages() instead.
     |      
     |      @return: Return a random redirect page
     |  
     |  recentchanges(self, start=None, end=None, reverse=False, namespaces=None, pagelist=None, changetype=None, minor=None, bot=None, anon=None, redirect=None, patrolled=None, top_only=False, total=None, user=None, excludeuser=None, tag=None, returndict=NotImplemented, nobots=NotImplemented, rcshow=NotImplemented, rcprop=NotImplemented, rctype='[deprecated name of changetype]', revision=NotImplemented, repeat=NotImplemented, rcstart='[deprecated name of start]', rcend='[deprecated name of end]', rcdir=NotImplemented, step=NotImplemented, includeredirects='[deprecated name of redirect]', namespace='[deprecated name of namespaces]', rcnamespace='[deprecated name of namespaces]', number='[deprecated name of total]', rclimit='[deprecated name of total]', showMinor='[deprecated name of minor]', showBot='[deprecated name of bot]', showAnon='[deprecated name of anon]', showRedirects='[deprecated name of redirect]', showPatrolled='[deprecated name of patrolled]', topOnly='[deprecated name of top_only]')
     |      Iterate recent changes.
     |      
     |      @param start: Timestamp to start listing from
     |      @type start: pywikibot.Timestamp
     |      @param end: Timestamp to end listing at
     |      @type end: pywikibot.Timestamp
     |      @param reverse: if True, start with oldest changes (default: newest)
     |      @type reverse: bool
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param pagelist: iterate changes to pages in this list only
     |      @param pagelist: list of Pages
     |      @param changetype: only iterate changes of this type ("edit" for
     |          edits to existing pages, "new" for new pages, "log" for log
     |          entries)
     |      @type changetype: basestring
     |      @param minor: if True, only list minor edits; if False, only list
     |          non-minor edits; if None, list all
     |      @type minor: bool or None
     |      @param bot: if True, only list bot edits; if False, only list
     |          non-bot edits; if None, list all
     |      @type bot: bool or None
     |      @param anon: if True, only list anon edits; if False, only list
     |          non-anon edits; if None, list all
     |      @type anon: bool or None
     |      @param redirect: if True, only list edits to redirect pages; if
     |          False, only list edits to non-redirect pages; if None, list all
     |      @type redirect: bool or None
     |      @param patrolled: if True, only list patrolled edits; if False,
     |          only list non-patrolled edits; if None, list all
     |      @type patrolled: bool or None
     |      @param top_only: if True, only list changes that are the latest
     |      revision
     |          (default False)
     |      @type top_only: bool
     |      @param user: if not None, only list edits by this user or users
     |      @type user: basestring|list
     |      @param excludeuser: if not None, exclude edits by this user or users
     |      @type excludeuser: basestring|list
     |      @param tag: a recent changes tag
     |      @type tag: str
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  redirect(self)
     |      Return the localized #REDIRECT keyword.
     |  
     |  redirectRegex(self)
     |      Return a compiled regular expression matching on redirect pages.
     |      
     |      Group 1 in the regex match object will be the target title.
     |  
     |  redirectpages(self, total=None, step=NotImplemented)
     |      Yield redirect pages from Special:ListRedirects.
     |      
     |      @param total: number of pages to return
     |  
     |  reply_to_post(self, page, reply_to_uuid, content, format)
     |      Reply to a post on a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reply_to_uuid: The UUID of the Post to create a reply to
     |      @type reply_to_uuid: unicode
     |      @param content: The content of the reply
     |      @type content: unicode
     |      @param format: The content format used for the supplied content
     |      @type format: unicode (either 'wikitext' or 'html')
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  resolvemagicwords(self, wikitext)
     |      Replace the {{ns:xx}} marks in a wikitext with the namespace names.
     |      
     |      DEPRECATED.
     |  
     |  restore_post(self, post, reason)
     |      Restore a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param reason: The reason to restore the post
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  restore_topic(self, page, reason)
     |      Restore a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reason: The reason to restore the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  rollbackpage(self, page, **kwargs)
     |      Roll back page to version before last user's edits.
     |      
     |      The keyword arguments are those supported by the rollback API.
     |      
     |      As a precaution against errors, this method will fail unless
     |      the page history contains at least two revisions, and at least
     |      one that is not by the same user who made the last edit.
     |      
     |      @param page: the Page to be rolled back (must exist)
     |  
     |  search(self, searchstring, namespaces=None, where='text', get_redirects=False, total=None, content=False, number='[deprecated name of total]', step=NotImplemented, key='[deprecated name of searchstring]', getredirects='[deprecated name of get_redirects]')
     |      Iterate Pages that contain the searchstring.
     |      
     |      Note that this may include non-existing Pages if the wiki's database
     |      table contains outdated entries.
     |      
     |      @param searchstring: the text to search for
     |      @type searchstring: unicode
     |      @param where: Where to search; value must be "text", "title" or
     |          "nearmatch" (many wikis do not support title or nearmatch search)
     |      @param namespaces: search only in these namespaces (defaults to all)
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param get_redirects: if True, include redirects in results. Since
     |          version MediaWiki 1.23 it will always return redirects.
     |      @param content: if True, load the current content of each iterated page
     |          (default False)
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  server_time(self)
     |      Return a Timestamp object representing the current server time.
     |      
     |      For wikis with a version newer than 1.16 it uses the 'time' property
     |      of the siteinfo 'general'. It'll force a reload before returning the
     |      time. It requests to expand the text '{{CURRENTTIMESTAMP}}' for older
     |      wikis.
     |      
     |      @return: the current server time
     |      @rtype: L{Timestamp}
     |  
     |  shortpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages and lengths from Special:Shortpages.
     |      
     |      Yields a tuple of Page object, length(int).
     |      
     |      @param total: number of pages to return
     |  
     |  stash_info(self, file_key, props=False)
     |      Get the stash info for a given file key.
     |  
     |  suppress_post(self, post, reason)
     |      Suppress a Flow post.
     |      
     |      @param post: A Flow post
     |      @type post: Post
     |      @param reason: The reason to suppress the post
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  suppress_topic(self, page, reason)
     |      Suppress a Flow topic.
     |      
     |      @param page: A Flow topic
     |      @type page: Topic
     |      @param reason: The reason to suppress the topic
     |      @type reason: unicode
     |      @return: Metadata returned by the API
     |      @rtype: dict
     |  
     |  thank_post(self, post)
     |      Corresponding method to the 'action=flowthank' API action.
     |      
     |      @param post: The post to be thanked for.
     |      @type post: Post
     |      @raise APIError: On thanking oneself or other API errors.
     |      @return: The API response.
     |  
     |  thank_revision(self, revid, source=None)
     |      Corresponding method to the 'action=thank' API action.
     |      
     |      @param revid: Revision ID for the revision to be thanked.
     |      @type revid: int
     |      @param source: A source for the thanking operation.
     |      @type source: str
     |      @raise APIError: On thanking oneself or other API errors.
     |      @return: The API response.
     |  
     |  token(self, page, tokentype)
     |      Deprecated; use the 'tokens' property instead.
     |      
     |      Return token retrieved from wiki to allow changing page content.
     |      
     |              @param page: the Page for which a token should be retrieved
     |              @param tokentype: the type of token (e.g., "edit", "move", "delete");
     |                  see API documentation for full list of types
     |  
     |  unblockuser(self, user, reason=None)
     |      Remove the block for the user.
     |      
     |      @param user: The username/IP without a namespace.
     |      @type user: L{pywikibot.User}
     |      @param reason: Reason for the unblock.
     |      @type reason: basestring
     |  
     |  uncategorizedcategories(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Categories from Special:Uncategorizedcategories.
     |      
     |      @param total: number of pages to return
     |  
     |  uncategorizedfiles = uncategorizedimages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield FilePages from Special:Uncategorizedimages.
     |      
     |      @param total: number of pages to return
     |  
     |  uncategorizedimages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield FilePages from Special:Uncategorizedimages.
     |      
     |      @param total: number of pages to return
     |  
     |  uncategorizedpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages from Special:Uncategorizedpages.
     |      
     |      @param total: number of pages to return
     |  
     |  uncategorizedtemplates(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages from Special:Uncategorizedtemplates.
     |      
     |      @param total: number of pages to return
     |  
     |  unconnected_pages(self, total=None, step=NotImplemented)
     |      Yield Page objects from Special:UnconnectedPages.
     |      
     |      @param total: number of pages to return
     |  
     |  undelete_page(self, page, reason, revisions=None, summary='[deprecated name of reason]')
     |      Undelete page from the wiki. Requires appropriate privilege level.
     |      
     |      @param page: Page to be deleted.
     |      @type page: pywikibot.BasePage
     |      @param revisions: List of timestamps to restore.
     |          If None, restores all revisions.
     |      @type revisions: list
     |      @param reason: Undeletion reason.
     |      @type reason: basestring
     |  
     |  unusedcategories(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Category objects from Special:Unusedcategories.
     |      
     |      @param total: number of pages to return
     |  
     |  unusedfiles(self, total=None, extension=NotImplemented, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield FilePage objects from Special:Unusedimages.
     |      
     |      @param total: number of pages to return
     |  
     |  unusedimages(self, total=None, extension=NotImplemented, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield FilePage objects from Special:Unusedimages.
     |      
     |      DEPRECATED: Use L{APISite.unusedfiles} instead.
     |  
     |  unwatchedpages(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages from Special:Unwatchedpages (requires Admin privileges).
     |      
     |      @param total: number of pages to return
     |  
     |  upload(self, filepage, source_filename=None, source_url=None, comment=None, text=None, watch=False, ignore_warnings=False, chunk_size=0, _file_key=None, _offset=0, _verify_stash=None, report_success=None, imagepage='[deprecated name of filepage]')
     |      Upload a file to the wiki.
     |      
     |      Either source_filename or source_url, but not both, must be provided.
     |      
     |      @param filepage: a FilePage object from which the wiki-name of the
     |          file will be obtained.
     |      @param source_filename: path to the file to be uploaded
     |      @param source_url: URL of the file to be uploaded
     |      @param comment: Edit summary; if this is not provided, then
     |          filepage.text will be used. An empty summary is not permitted.
     |          This may also serve as the initial page text (see below).
     |      @param text: Initial page text; if this is not set, then
     |          filepage.text will be used, or comment.
     |      @param watch: If true, add filepage to the bot user's watchlist
     |      @param ignore_warnings: It may be a static boolean, a callable
     |          returning a boolean or an iterable. The callable gets a list of
     |          UploadWarning instances and the iterable should contain the warning
     |          codes for which an equivalent callable would return True if all
     |          UploadWarning codes are in thet list. If the result is False it'll
     |          not continue uploading the file and otherwise disable any warning
     |          and reattempt to upload the file. NOTE: If report_success is True
     |          or None it'll raise an UploadWarning exception if the static
     |          boolean is False.
     |      @type ignore_warnings: bool or callable or iterable of str
     |      @param chunk_size: The chunk size in bytesfor chunked uploading (see
     |          U{https://www.mediawiki.org/wiki/API:Upload#Chunked_uploading}). It
     |          will only upload in chunks, if the version number is 1.20 or higher
     |          and the chunk size is positive but lower than the file size.
     |      @type chunk_size: int
     |      @param _file_key: Reuses an already uploaded file using the filekey. If
     |          None (default) it will upload the file.
     |      @type _file_key: str or None
     |      @param _offset: When file_key is not None this can be an integer to
     |          continue a previously canceled chunked upload. If False it treats
     |          that as a finished upload. If True it requests the stash info from
     |          the server to determine the offset. By default starts at 0.
     |      @type _offset: int or bool
     |      @param _verify_stash: Requests the SHA1 and file size uploaded and
     |          compares it to the local file. Also verifies that _offset is
     |          matching the file size if the _offset is an int. If _offset is
     |          False if verifies that the file size match with the local file. If
     |          None it'll verifies the stash when a file key and offset is given.
     |      @type _verify_stash: bool or None
     |      @param report_success: If the upload was successful it'll print a
     |          success message and if ignore_warnings is set to False it'll
     |          raise an UploadWarning if a warning occurred. If it's None
     |          (default) it'll be True if ignore_warnings is a bool and False
     |          otherwise. If it's True or None ignore_warnings must be a bool.
     |      @return: It returns True if the upload was successful and False
     |          otherwise.
     |      @rtype: bool
     |  
     |  usercontribs(self, user=None, userprefix=None, start=None, end=None, reverse=False, namespaces=None, minor=None, total=None, top_only=False, step=NotImplemented, showMinor='[deprecated name of minor]')
     |      Iterate contributions by a particular user.
     |      
     |      Iterated values are in the same format as recentchanges.
     |      
     |      @param user: Iterate contributions by this user (name or IP)
     |      @param userprefix: Iterate contributions by all users whose names
     |          or IPs start with this substring
     |      @param start: Iterate contributions starting at this Timestamp
     |      @param end: Iterate contributions ending at this Timestamp
     |      @param reverse: Iterate oldest contributions first (default: newest)
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param minor: if True, iterate only minor edits; if False and
     |          not None, iterate only non-minor edits (default: iterate both)
     |      @param total: limit result to this number of pages
     |      @type total: int
     |      @param top_only: if True, iterate only edits which are the latest
     |          revision (default: False)
     |      @raises Error: either user or userprefix must be non-empty
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  users(self, usernames)
     |      Iterate info about a list of users by name or IP.
     |      
     |      @param usernames: a list of user names
     |      @type usernames: list, or other iterable, of unicodes
     |  
     |  validate_tokens(self, types)
     |      Validate if requested tokens are acceptable.
     |      
     |      Valid tokens depend on mw version.
     |  
     |  version(self)
     |      Return live project version number as a string.
     |      
     |      This overwrites the corresponding family method for APISite class. Use
     |      L{pywikibot.tools.MediaWikiVersion} to compare MediaWiki versions.
     |  
     |  wantedcategories(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages from Special:Wantedcategories.
     |      
     |      @param total: number of pages to return
     |  
     |  wantedpages(self, total=None, step=NotImplemented)
     |      Yield Pages from Special:Wantedpages.
     |      
     |      @param total: number of pages to return
     |  
     |  watch(self, pages, unwatch=False)
     |      Add or remove pages from watchlist.
     |      
     |      @param pages: A single page or a sequence of pages.
     |      @type pages: A page object, a page-title string, or sequence of them.
     |          Also accepts a single pipe-separated string like 'title1|title2'.
     |      @param unwatch: If True, remove pages from watchlist;
     |          if False add them (default).
     |      @return: True if API returned expected response; False otherwise
     |      @rtype: bool
     |  
     |  watched_pages(self, sysop=False, force=False, total=None, step=NotImplemented)
     |      Return watchlist.
     |      
     |      @param sysop: Returns watchlist of sysop user if true
     |      @type sysop: bool
     |      @param force_reload: Reload watchlist
     |      @type force_reload: bool
     |      @param total: if not None, limit the generator to yielding this many
     |          items in total
     |      @type total: int
     |      @return: list of pages in watchlist
     |      @rtype: list of pywikibot.Page objects
     |  
     |  watchlist_revs(self, start=None, end=None, reverse=False, namespaces=None, minor=None, bot=None, anon=None, total=None, step=NotImplemented, showMinor='[deprecated name of minor]', showAnon='[deprecated name of anon]', showBot='[deprecated name of bot]')
     |      Iterate revisions to pages on the bot user's watchlist.
     |      
     |      Iterated values will be in same format as recentchanges.
     |      
     |      @param start: Iterate revisions starting at this Timestamp
     |      @param end: Iterate revisions ending at this Timestamp
     |      @param reverse: Iterate oldest revisions first (default: newest)
     |      @param namespaces: only iterate pages in these namespaces
     |      @type namespaces: iterable of basestring or Namespace key,
     |          or a single instance of those types. May be a '|' separated
     |          list of namespace identifiers.
     |      @param minor: if True, only list minor edits; if False (and not
     |          None), only list non-minor edits
     |      @param bot: if True, only list bot edits; if False (and not
     |          None), only list non-bot edits
     |      @param anon: if True, only list anon edits; if False (and not
     |          None), only list non-anon edits
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  watchpage(self, page, unwatch=False)
     |      Add or remove page from watchlist.
     |      
     |      DEPRECATED: Use Site().watch() instead.
     |      
     |      @param page: A single page.
     |      @type page: A page object, a page-title string.
     |      @param unwatch: If True, remove page from watchlist;
     |          if False (default), add it.
     |      @return: True if API returned expected response; False otherwise
     |      @rtype: bool
     |  
     |  withoutinterwiki(self, total=None, number='[deprecated name of total]', step=NotImplemented, repeat=NotImplemented)
     |      Yield Pages without language links from Special:Withoutinterwiki.
     |      
     |      @param total: number of pages to return
     |  
     |  ----------------------------------------------------------------------
     |  Class methods inherited from APISite:
     |  
     |  fromDBName(dbname, site=None) from builtins.type
     |      Create a site from a database name using the sitematrix.
     |      
     |      @param dbname: database name
     |      @type dbname: str
     |      @param site: Site to load sitematrix from. (Default meta.wikimedia.org)
     |      @type site: APISite
     |      @return: site object for the database name
     |      @rtype: APISite
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from APISite:
     |  
     |  article_path
     |      Get the nice article path without $1.
     |  
     |  globaluserinfo
     |      Retrieve userinfo from site and store in _userinfo attribute.
     |      
     |      self._userinfo will be a dict with the following keys and values:
     |      
     |        - id: user id (numeric str)
     |        - name: username (if user is logged in)
     |        - anon: present if user is not logged in
     |        - groups: list of groups (could be empty)
     |        - rights: list of rights (could be empty)
     |        - message: present if user has a new message on talk page
     |        - blockinfo: present if user is blocked (dict)
     |      
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |      
     |      @param force: force to retrieve userinfo ignoring cache
     |      @type force: bool
     |  
     |  has_data_repository
     |      Return True if site has a shared data repository like Wikidata.
     |  
     |  has_image_repository
     |      Return True if site has a shared image repository like Commons.
     |  
     |  has_transcluded_data
     |      Deprecated; use has_data_repository instead.
     |      
     |      Return True if site has a shared data repository like Wikidata.
     |  
     |  lang
     |      Return the code for the language of this Site.
     |  
     |  logtypes
     |      Return a set of log types available on current site.
     |  
     |  months_names
     |      Obtain month names from the site messages.
     |      
     |      The list is zero-indexed, ordered by month in calendar, and should
     |      be in the original site language.
     |      
     |      @return: list of tuples (month name, abbreviation)
     |      @rtype: list
     |  
     |  proofread_index_ns
     |      Return Index namespace for the ProofreadPage extension.
     |  
     |  proofread_levels
     |      Return Quality Levels for the ProofreadPage extension.
     |  
     |  proofread_page_ns
     |      Return Page namespace for the ProofreadPage extension.
     |  
     |  siteinfo
     |      Site information dict.
     |  
     |  userinfo
     |      Retrieve userinfo from site and store in _userinfo attribute.
     |      
     |      self._userinfo will be a dict with the following keys and values:
     |      
     |        - id: user id (numeric str)
     |        - name: username (if user is logged in)
     |        - anon: present if user is not logged in
     |        - groups: list of groups (could be empty)
     |        - rights: list of rights (could be empty)
     |        - message: present if user has a new message on talk page
     |        - blockinfo: present if user is blocked (dict)
     |      
     |      U{https://www.mediawiki.org/wiki/API:Userinfo}
     |      
     |      @param force: force to retrieve userinfo ignoring cache
     |      @type force: bool
     |  
     |  ----------------------------------------------------------------------
     |  Data and other attributes inherited from APISite:
     |  
     |  OnErrorExc = <class 'pywikibot.site.OnErrorExc'>
     |      OnErrorExc(exception, on_new_page)
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from BaseSite:
     |  
     |  __hash__(self)
     |      Return hashable key.
     |  
     |  __repr__(self)
     |      Return internal representation.
     |  
     |  __str__(self)
     |      Return string representing this Site's name and code.
     |  
     |  category_namespace(self)
     |      Deprecated; use namespaces.CATEGORY.custom_name instead.
     |      
     |      Return local name for the Category namespace.
     |  
     |  category_namespaces(self)
     |      Deprecated; use list(namespaces.CATEGORY) instead.
     |      
     |      Return names for the Category namespace.
     |  
     |  category_on_one_line(self)
     |      Return True if this site wants all category links on one line.
     |  
     |  disambcategory(self)
     |      Return Category in which disambig pages are listed.
     |  
     |  fam(self)
     |      Deprecated; use family attribute instead.
     |      
     |      Return Family object for this Site.
     |  
     |  getNamespaceIndex(self, namespace)
     |      DEPRECATED: Return the Namespace for a given namespace name.
     |  
     |  getSite(self, code)
     |      Return Site object for language 'code' in this Family.
     |  
     |  getUrl(self, path, retry=None, sysop=None, data=None, compress=NotImplemented, no_hostname=NotImplemented, cookies_only=NotImplemented, refer=NotImplemented, back_response=NotImplemented)
     |      DEPRECATED.
     |      
     |      Retained for compatibility only. All arguments except path and data
     |      are ignored.
     |  
     |  image_namespace(self)
     |      Deprecated; use namespaces.FILE.custom_name instead.
     |      
     |      Return local name for the File namespace.
     |  
     |  interwiki(self, prefix)
     |      Return the site for a corresponding interwiki prefix.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  interwiki_prefix(self, site)
     |      Return the interwiki prefixes going to that site.
     |      
     |      The interwiki prefixes are ordered first by length (shortest first)
     |      and then alphabetically. L{interwiki(prefix)} is not guaranteed to
     |      equal C{site} (i.e. the parameter passed to this function).
     |      
     |      @param site: The targeted site, which might be it's own.
     |      @type site: L{BaseSite}
     |      @return: The interwiki prefixes
     |      @rtype: list (guaranteed to be not empty)
     |      @raises KeyError: if there is no interwiki prefix for that site.
     |  
     |  interwiki_putfirst(self)
     |      Return list of language codes for ordering of interwiki links.
     |  
     |  isInterwikiLink(self, text)
     |      Return True if text is in the form of an interwiki link.
     |      
     |      If a link object constructed using "text" as the link text parses as
     |      belonging to a different site, this method returns True.
     |  
     |  languages(self)
     |      Return list of all valid language codes for this site's Family.
     |  
     |  linkto(self, title, othersite=None)
     |      DEPRECATED. Return a wikilink to a page.
     |      
     |      @param title: Title of the page to link to
     |      @type title: unicode
     |      @param othersite: Generate a interwiki link for use on this site.
     |      @type othersite: BaseSite or None
     |      
     |      @rtype: unicode
     |  
     |  local_interwiki(self, prefix)
     |      Return whether the interwiki prefix is local.
     |      
     |      A local interwiki prefix is handled by the target site like a normal
     |      link. So if that link also contains an interwiki link it does follow
     |      it as long as it's a local link.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  lock_page(self, page, block=True)
     |      Lock page for writing. Must be called before writing any page.
     |      
     |      We don't want different threads trying to write to the same page
     |      at the same time, even to different sections.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |      @param block: if true, wait until the page is available to be locked;
     |          otherwise, raise an exception if page can't be locked
     |  
     |  mediawiki_namespace(self)
     |      Deprecated; use namespaces.MEDIAWIKI.custom_name instead.
     |      
     |      Return local name for the MediaWiki namespace.
     |  
     |  normalizeNamespace = call(*a, **kw)
     |  
     |  ns_index(self, namespace)
     |      Deprecated; use APISite.namespaces.lookup_name instead.
     |      
     |      
     |      Return the Namespace for a given namespace name.
     |      
     |      @param namespace: name
     |      @type namespace: unicode
     |      @return: The matching Namespace object on this Site
     |      @rtype: Namespace, or None if invalid
     |  
     |  ns_normalize(self, value)
     |      Return canonical local form of namespace name.
     |      
     |      @param value: A namespace name
     |      @type value: unicode
     |  
     |  postData(self, address, data, sysop=False, compress=True, cookies=None, contentType=NotImplemented)
     |      DEPRECATED.
     |  
     |  postForm(self, address, predata, sysop=False, cookies=None)
     |      DEPRECATED.
     |  
     |  sametitle(self, title1, title2)
     |      Return True if title1 and title2 identify the same wiki page.
     |      
     |      title1 and title2 may be unequal but still identify the same page,
     |      if they use different aliases for the same namespace.
     |  
     |  special_namespace(self)
     |      Deprecated; use namespaces.SPECIAL.custom_name instead.
     |      
     |      Return local name for the Special: namespace.
     |  
     |  template_namespace(self)
     |      Deprecated; use namespaces.TEMPLATE.custom_name instead.
     |      
     |      Return local name for the Template namespace.
     |  
     |  unlock_page(self, page)
     |      Unlock page. Call as soon as a write operation has completed.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |  
     |  urlEncode(self, query)
     |      DEPRECATED.
     |  
     |  user(self)
     |      Return the currently-logged in bot username, or None.
     |  
     |  username(self, sysop=False)
     |      Return the username/sysopname used for the site.
     |  
     |  validLanguageLinks(self)
     |      Return list of language codes to be used in interwiki links.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from BaseSite:
     |  
     |  code
     |      The identifying code for this Site equal to the wiki prefix.
     |      
     |      By convention, this is usually an ISO language code, but it does
     |      not have to be.
     |  
     |  doc_subpage
     |      Return the documentation subpage for this Site.
     |      
     |      @rtype: tuple
     |  
     |  family
     |      The Family object for this Site's wiki family.
     |  
     |  namespaces
     |      Return dict of valid namespaces on this wiki.
     |  
     |  nocapitalize
     |      Return whether this site's default title case is case-sensitive.
     |      
     |      DEPRECATED.
     |  
     |  sitename
     |      String representing this Site's name and code.
     |  
     |  throttle
     |      Return this Site's throttle. Initialize a new one if needed.
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __eq__(self, other)
     |      Compare if self is equal to other.
     |  
     |  __ge__(self, other)
     |      Compare if self is greater equals other.
     |  
     |  __gt__(self, other)
     |      Compare if self is greater than other.
     |  
     |  __le__(self, other)
     |      Compare if self is less equals other.
     |  
     |  __lt__(self, other)
     |      Compare if self is less than other.
     |  
     |  __ne__(self, other)
     |      Compare if self is not equal to other.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
    
    class LoginStatus(builtins.object)
     |  Enum for Login statuses.
     |  
     |  >>> LoginStatus.NOT_ATTEMPTED
     |  -3
     |  >>> LoginStatus.AS_USER
     |  0
     |  >>> LoginStatus.name(-3)
     |  'NOT_ATTEMPTED'
     |  >>> LoginStatus.name(0)
     |  'AS_USER'
     |  
     |  Methods defined here:
     |  
     |  __init__(self, state)
     |      Initializer.
     |  
     |  __repr__(self)
     |      Return internal representation.
     |  
     |  ----------------------------------------------------------------------
     |  Class methods defined here:
     |  
     |  name(search_value) from builtins.type
     |      Return the name of a LoginStatus by it's value.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors defined here:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
     |  
     |  ----------------------------------------------------------------------
     |  Data and other attributes defined here:
     |  
     |  AS_SYSOP = 1
     |  
     |  AS_USER = 0
     |  
     |  IN_PROGRESS = -2
     |  
     |  NOT_ATTEMPTED = -3
     |  
     |  NOT_LOGGED_IN = -1
    
    class Namespace(collections.abc.Iterable, pywikibot.tools.ComparableMixin, pywikibot.tools.UnicodeMixin)
     |  Namespace site data object.
     |  
     |  This is backwards compatible with the structure of entries
     |  in site._namespaces which were a list of::
     |  
     |      [customised namespace,
     |       canonical namespace name?,
     |       namespace alias*]
     |  
     |  If the canonical_name is not provided for a namespace between -2
     |  and 15, the MediaWiki 1.14+ built-in names are used.
     |  Enable use_image_name to use built-in names from MediaWiki 1.13
     |  and earlier as the details.
     |  
     |  Image and File are aliases of each other by default.
     |  
     |  If only one of canonical_name and custom_name are available, both
     |  properties will have the same value.
     |  
     |  Method resolution order:
     |      Namespace
     |      collections.abc.Iterable
     |      pywikibot.tools.ComparableMixin
     |      pywikibot.tools.UnicodeMixin
     |      builtins.object
     |  
     |  Methods defined here:
     |  
     |  __add__(self, other)
     |      Apply addition on the namespace id.
     |  
     |  __contains__(self, item)
     |      Determine if item is a name of this namespace.
     |      
     |      The comparison is case insensitive, and item may have a single
     |      colon on one or both sides of the name.
     |      
     |      @param item: name to check
     |      @type item: basestring
     |      @rtype: bool
     |  
     |  __eq__(self, other)
     |      Compare whether two namespace objects are equal.
     |  
     |  __getitem__(self, index)
     |      Obtain an item from the iterable.
     |  
     |  __hash__(self)
     |      Return the namespace id.
     |  
     |  __index__(self)
     |      Return the namespace id.
     |  
     |  __init__(self, id, canonical_name=None, custom_name=None, aliases=None, use_image_name=False, **kwargs)
     |      Initializer.
     |      
     |      @param custom_name: Name defined in server LocalSettings.php
     |      @type custom_name: unicode
     |      @param canonical_name: Canonical name
     |      @type canonical_name: str
     |      @param aliases: Aliases
     |      @type aliases: list of unicode
     |      @param use_image_name: Use 'Image' as default canonical
     |                             for 'File' namespace
     |      @param use_image_name: bool
     |  
     |  __int__(self)
     |      Return the namespace id.
     |  
     |  __iter__(self)
     |      Return an iterator.
     |  
     |  __len__(self)
     |      Obtain length of the iterable.
     |  
     |  __mod__(self, other)
     |      Apply modulo on the namespace id.
     |  
     |  __ne__(self, other)
     |      Compare whether two namespace objects are not equal.
     |  
     |  __repr__(self)
     |      Return a reconstructable representation.
     |  
     |  __str__(self)
     |      Return the canonical string representation.
     |  
     |  __sub__(self, other)
     |      Apply subtraction on the namespace id.
     |  
     |  __unicode__(self)
     |      Return the custom string representation.
     |  
     |  canonical_prefix(self)
     |      Return the canonical name with required colons.
     |  
     |  custom_prefix(self)
     |      Return the custom name with required colons.
     |  
     |  ----------------------------------------------------------------------
     |  Class methods defined here:
     |  
     |  builtin_namespaces(use_image_name=False, case='first-letter') from abc.ABCMeta
     |      Return a dict of the builtin namespaces.
     |  
     |  lookup_name(name, namespaces=None) from abc.ABCMeta
     |      Deprecated; use NamespacesDict.lookup_name instead.
     |      
     |      
     |      Find the Namespace for a name.
     |      
     |      @param name: Name of the namespace.
     |      @type name: basestring
     |      @param namespaces: namespaces to search
     |                         default: builtins only
     |      @type namespaces: dict of Namespace
     |      @rtype: Namespace or None
     |  
     |  ----------------------------------------------------------------------
     |  Static methods defined here:
     |  
     |  default_case(id, default_case=None)
     |      Return the default fixed case value for the namespace ID.
     |  
     |  normalize_name(name)
     |      Remove an optional colon before and after name.
     |      
     |      TODO: reject illegal characters.
     |  
     |  resolve(identifiers, namespaces=None)
     |      Deprecated; use NamespacesDict.resolve instead.
     |      
     |      
     |      Resolve namespace identifiers to obtain Namespace objects.
     |      
     |      Identifiers may be any value for which int() produces a valid
     |      namespace id, except bool, or any string which Namespace.lookup_name
     |      successfully finds. A numerical string is resolved as an integer.
     |      
     |      @param identifiers: namespace identifiers
     |      @type identifiers: iterable of basestring or Namespace key,
     |          or a single instance of those types
     |      @param namespaces: namespaces to search (default: builtins only)
     |      @type namespaces: dict of Namespace
     |      @return: list of Namespace objects in the same order as the
     |          identifiers
     |      @rtype: list
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors defined here:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
     |  
     |  ----------------------------------------------------------------------
     |  Data and other attributes defined here:
     |  
     |  CATEGORY = 14
     |  
     |  CATEGORY_TALK = 15
     |  
     |  FILE = 6
     |  
     |  FILE_TALK = 7
     |  
     |  HELP = 12
     |  
     |  HELP_TALK = 13
     |  
     |  MAIN = 0
     |  
     |  MEDIA = -2
     |  
     |  MEDIAWIKI = 8
     |  
     |  MEDIAWIKI_TALK = 9
     |  
     |  PROJECT = 4
     |  
     |  PROJECT_TALK = 5
     |  
     |  SPECIAL = -1
     |  
     |  TALK = 1
     |  
     |  TEMPLATE = 10
     |  
     |  TEMPLATE_TALK = 11
     |  
     |  USER = 2
     |  
     |  USER_TALK = 3
     |  
     |  __abstractmethods__ = frozenset()
     |  
     |  canonical_namespaces = {-2: 'Media', -1: 'Special', 0: '', 1: 'Talk', ...
     |  
     |  ----------------------------------------------------------------------
     |  Class methods inherited from collections.abc.Iterable:
     |  
     |  __subclasshook__(C) from abc.ABCMeta
     |      Abstract classes can override this to customize issubclass().
     |      
     |      This is invoked early on by abc.ABCMeta.__subclasscheck__().
     |      It should return True, False or NotImplemented.  If it returns
     |      NotImplemented, the normal algorithm is used.  Otherwise, it
     |      overrides the normal algorithm (and the outcome is cached).
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __ge__(self, other)
     |      Compare if self is greater equals other.
     |  
     |  __gt__(self, other)
     |      Compare if self is greater than other.
     |  
     |  __le__(self, other)
     |      Compare if self is less equals other.
     |  
     |  __lt__(self, other)
     |      Compare if self is less than other.
    
    class NamespacesDict(collections.abc.Mapping, pywikibot.tools.SelfCallMixin)
     |  An immutable dictionary containing the Namespace instances.
     |  
     |  It adds a deprecation message when called as the 'namespaces' property of
     |  APISite was callable.
     |  
     |  Method resolution order:
     |      NamespacesDict
     |      collections.abc.Mapping
     |      collections.abc.Collection
     |      collections.abc.Sized
     |      collections.abc.Iterable
     |      collections.abc.Container
     |      pywikibot.tools.SelfCallMixin
     |      builtins.object
     |  
     |  Methods defined here:
     |  
     |  __getattr__(self, attr)
     |      Get the namespace with the given key.
     |      
     |      @param key: namespace key
     |      @type key: Namespace, int or str
     |      @rtype: Namespace
     |  
     |  __getitem__(self, key)
     |      Get the namespace with the given key.
     |      
     |      @param key: namespace key
     |      @type key: Namespace, int or str
     |      @rtype: Namespace
     |  
     |  __init__(self, namespaces)
     |      Create new dict using the given namespaces.
     |  
     |  __iter__(self)
     |      Iterate over all namespaces.
     |  
     |  __len__(self)
     |      Get the number of namespaces.
     |  
     |  lookup_name(self, name)
     |      Find the Namespace for a name also checking aliases.
     |      
     |      @param name: Name of the namespace.
     |      @type name: basestring
     |      @rtype: Namespace or None
     |  
     |  lookup_normalized_name(self, name)
     |      Find the Namespace for a name also checking aliases.
     |      
     |      The name has to be normalized and must be lower case.
     |      
     |      @param name: Name of the namespace.
     |      @type name: basestring
     |      @rtype: Namespace or None
     |  
     |  resolve(self, identifiers)
     |      Resolve namespace identifiers to obtain Namespace objects.
     |      
     |      Identifiers may be any value for which int() produces a valid
     |      namespace id, except bool, or any string which Namespace.lookup_name
     |      successfully finds. A numerical string is resolved as an integer.
     |      
     |      @param identifiers: namespace identifiers
     |      @type identifiers: iterable of basestring or Namespace key,
     |          or a single instance of those types
     |      @return: list of Namespace objects in the same order as the
     |          identifiers
     |      @rtype: list
     |      @raises KeyError: a namespace identifier was not resolved
     |      @raises TypeError: a namespace identifier has an inappropriate
     |          type such as NoneType or bool
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors defined here:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
     |  
     |  ----------------------------------------------------------------------
     |  Data and other attributes defined here:
     |  
     |  __abstractmethods__ = frozenset()
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from collections.abc.Mapping:
     |  
     |  __contains__(self, key)
     |  
     |  __eq__(self, other)
     |      Return self==value.
     |  
     |  get(self, key, default=None)
     |      D.get(k[,d]) -> D[k] if k in D, else d.  d defaults to None.
     |  
     |  items(self)
     |      D.items() -> a set-like object providing a view on D's items
     |  
     |  keys(self)
     |      D.keys() -> a set-like object providing a view on D's keys
     |  
     |  values(self)
     |      D.values() -> an object providing a view on D's values
     |  
     |  ----------------------------------------------------------------------
     |  Data and other attributes inherited from collections.abc.Mapping:
     |  
     |  __hash__ = None
     |  
     |  __reversed__ = None
     |  
     |  ----------------------------------------------------------------------
     |  Class methods inherited from collections.abc.Collection:
     |  
     |  __subclasshook__(C) from abc.ABCMeta
     |      Abstract classes can override this to customize issubclass().
     |      
     |      This is invoked early on by abc.ABCMeta.__subclasscheck__().
     |      It should return True, False or NotImplemented.  If it returns
     |      NotImplemented, the normal algorithm is used.  Otherwise, it
     |      overrides the normal algorithm (and the outcome is cached).
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from pywikibot.tools.SelfCallMixin:
     |  
     |  __call__(self)
     |      Do nothing and just return itself.
    
    class NonMWAPISite(BaseSite)
     |  API interface to non MediaWiki sites.
     |  
     |  Method resolution order:
     |      NonMWAPISite
     |      BaseSite
     |      pywikibot.tools.ComparableMixin
     |      builtins.object
     |  
     |  Methods defined here:
     |  
     |  __getattribute__(self, attr)
     |      Return attribute if present else raise NotImplementedError.
     |  
     |  __init__(self, url)
     |      Initializer.
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from BaseSite:
     |  
     |  __getattr__(self, attr)
     |      Delegate undefined methods calls to the Family object.
     |  
     |  __getstate__(self)
     |      Remove Lock based classes before pickling.
     |  
     |  __hash__(self)
     |      Return hashable key.
     |  
     |  __repr__(self)
     |      Return internal representation.
     |  
     |  __setstate__(self, attrs)
     |      Restore things removed in __getstate__.
     |  
     |  __str__(self)
     |      Return string representing this Site's name and code.
     |  
     |  category_namespace(self)
     |      Deprecated; use namespaces.CATEGORY.custom_name instead.
     |      
     |      Return local name for the Category namespace.
     |  
     |  category_namespaces(self)
     |      Deprecated; use list(namespaces.CATEGORY) instead.
     |      
     |      Return names for the Category namespace.
     |  
     |  category_on_one_line(self)
     |      Return True if this site wants all category links on one line.
     |  
     |  disambcategory(self)
     |      Return Category in which disambig pages are listed.
     |  
     |  fam(self)
     |      Deprecated; use family attribute instead.
     |      
     |      Return Family object for this Site.
     |  
     |  getNamespaceIndex(self, namespace)
     |      DEPRECATED: Return the Namespace for a given namespace name.
     |  
     |  getSite(self, code)
     |      Return Site object for language 'code' in this Family.
     |  
     |  getUrl(self, path, retry=None, sysop=None, data=None, compress=NotImplemented, no_hostname=NotImplemented, cookies_only=NotImplemented, refer=NotImplemented, back_response=NotImplemented)
     |      DEPRECATED.
     |      
     |      Retained for compatibility only. All arguments except path and data
     |      are ignored.
     |  
     |  has_api(self)
     |      Deprecated.
     |      
     |      Return whether this site has an API.
     |  
     |  image_namespace(self)
     |      Deprecated; use namespaces.FILE.custom_name instead.
     |      
     |      Return local name for the File namespace.
     |  
     |  interwiki(self, prefix)
     |      Return the site for a corresponding interwiki prefix.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  interwiki_prefix(self, site)
     |      Return the interwiki prefixes going to that site.
     |      
     |      The interwiki prefixes are ordered first by length (shortest first)
     |      and then alphabetically. L{interwiki(prefix)} is not guaranteed to
     |      equal C{site} (i.e. the parameter passed to this function).
     |      
     |      @param site: The targeted site, which might be it's own.
     |      @type site: L{BaseSite}
     |      @return: The interwiki prefixes
     |      @rtype: list (guaranteed to be not empty)
     |      @raises KeyError: if there is no interwiki prefix for that site.
     |  
     |  interwiki_putfirst(self)
     |      Return list of language codes for ordering of interwiki links.
     |  
     |  isInterwikiLink(self, text)
     |      Return True if text is in the form of an interwiki link.
     |      
     |      If a link object constructed using "text" as the link text parses as
     |      belonging to a different site, this method returns True.
     |  
     |  languages(self)
     |      Return list of all valid language codes for this site's Family.
     |  
     |  linkto(self, title, othersite=None)
     |      DEPRECATED. Return a wikilink to a page.
     |      
     |      @param title: Title of the page to link to
     |      @type title: unicode
     |      @param othersite: Generate a interwiki link for use on this site.
     |      @type othersite: BaseSite or None
     |      
     |      @rtype: unicode
     |  
     |  local_interwiki(self, prefix)
     |      Return whether the interwiki prefix is local.
     |      
     |      A local interwiki prefix is handled by the target site like a normal
     |      link. So if that link also contains an interwiki link it does follow
     |      it as long as it's a local link.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  lock_page(self, page, block=True)
     |      Lock page for writing. Must be called before writing any page.
     |      
     |      We don't want different threads trying to write to the same page
     |      at the same time, even to different sections.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |      @param block: if true, wait until the page is available to be locked;
     |          otherwise, raise an exception if page can't be locked
     |  
     |  mediawiki_namespace(self)
     |      Deprecated; use namespaces.MEDIAWIKI.custom_name instead.
     |      
     |      Return local name for the MediaWiki namespace.
     |  
     |  normalizeNamespace = call(*a, **kw)
     |  
     |  ns_index(self, namespace)
     |      Deprecated; use APISite.namespaces.lookup_name instead.
     |      
     |      
     |      Return the Namespace for a given namespace name.
     |      
     |      @param namespace: name
     |      @type namespace: unicode
     |      @return: The matching Namespace object on this Site
     |      @rtype: Namespace, or None if invalid
     |  
     |  ns_normalize(self, value)
     |      Return canonical local form of namespace name.
     |      
     |      @param value: A namespace name
     |      @type value: unicode
     |  
     |  pagename2codes(self)
     |      Return list of localized PAGENAMEE tags for the site.
     |  
     |  pagenamecodes(self)
     |      Return list of localized PAGENAME tags for the site.
     |  
     |  postData(self, address, data, sysop=False, compress=True, cookies=None, contentType=NotImplemented)
     |      DEPRECATED.
     |  
     |  postForm(self, address, predata, sysop=False, cookies=None)
     |      DEPRECATED.
     |  
     |  redirect(self)
     |      Return list of localized redirect tags for the site.
     |  
     |  redirectRegex(self, pattern=None)
     |      Return a compiled regular expression matching on redirect pages.
     |      
     |      Group 1 in the regex match object will be the target title.
     |  
     |  sametitle(self, title1, title2)
     |      Return True if title1 and title2 identify the same wiki page.
     |      
     |      title1 and title2 may be unequal but still identify the same page,
     |      if they use different aliases for the same namespace.
     |  
     |  special_namespace(self)
     |      Deprecated; use namespaces.SPECIAL.custom_name instead.
     |      
     |      Return local name for the Special: namespace.
     |  
     |  template_namespace(self)
     |      Deprecated; use namespaces.TEMPLATE.custom_name instead.
     |      
     |      Return local name for the Template namespace.
     |  
     |  unlock_page(self, page)
     |      Unlock page. Call as soon as a write operation has completed.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |  
     |  urlEncode(self, query)
     |      DEPRECATED.
     |  
     |  user(self)
     |      Return the currently-logged in bot username, or None.
     |  
     |  username(self, sysop=False)
     |      Return the username/sysopname used for the site.
     |  
     |  validLanguageLinks(self)
     |      Return list of language codes to be used in interwiki links.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from BaseSite:
     |  
     |  code
     |      The identifying code for this Site equal to the wiki prefix.
     |      
     |      By convention, this is usually an ISO language code, but it does
     |      not have to be.
     |  
     |  doc_subpage
     |      Return the documentation subpage for this Site.
     |      
     |      @rtype: tuple
     |  
     |  family
     |      The Family object for this Site's wiki family.
     |  
     |  lang
     |      The ISO language code for this Site.
     |      
     |      Presumed to be equal to the site code, but this can be overridden.
     |  
     |  namespaces
     |      Return dict of valid namespaces on this wiki.
     |  
     |  nocapitalize
     |      Return whether this site's default title case is case-sensitive.
     |      
     |      DEPRECATED.
     |  
     |  sitename
     |      String representing this Site's name and code.
     |  
     |  throttle
     |      Return this Site's throttle. Initialize a new one if needed.
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __eq__(self, other)
     |      Compare if self is equal to other.
     |  
     |  __ge__(self, other)
     |      Compare if self is greater equals other.
     |  
     |  __gt__(self, other)
     |      Compare if self is greater than other.
     |  
     |  __le__(self, other)
     |      Compare if self is less equals other.
     |  
     |  __lt__(self, other)
     |      Compare if self is less than other.
     |  
     |  __ne__(self, other)
     |      Compare if self is not equal to other.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
    
    class PageInUse(pywikibot.exceptions.Error)
     |  Page cannot be reserved for writing due to existing lock.
     |  
     |  Method resolution order:
     |      PageInUse
     |      pywikibot.exceptions.Error
     |      pywikibot.tools.UnicodeMixin
     |      builtins.Exception
     |      builtins.BaseException
     |      builtins.object
     |  
     |  Methods inherited from pywikibot.exceptions.Error:
     |  
     |  __init__(self, arg)
     |      Initializer.
     |  
     |  __unicode__(self)
     |      Return a unicode string representation.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from pywikibot.exceptions.Error:
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from pywikibot.tools.UnicodeMixin:
     |  
     |  __str__(self)
     |      Return the unicode representation as the str representation.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from pywikibot.tools.UnicodeMixin:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from builtins.Exception:
     |  
     |  __new__(*args, **kwargs) from builtins.type
     |      Create and return a new object.  See help(type) for accurate signature.
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from builtins.BaseException:
     |  
     |  __delattr__(self, name, /)
     |      Implement delattr(self, name).
     |  
     |  __getattribute__(self, name, /)
     |      Return getattr(self, name).
     |  
     |  __reduce__(...)
     |      helper for pickle
     |  
     |  __repr__(self, /)
     |      Return repr(self).
     |  
     |  __setattr__(self, name, value, /)
     |      Implement setattr(self, name, value).
     |  
     |  __setstate__(...)
     |  
     |  with_traceback(...)
     |      Exception.with_traceback(tb) --
     |      set self.__traceback__ to tb and return self.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from builtins.BaseException:
     |  
     |  __cause__
     |      exception cause
     |  
     |  __context__
     |      exception context
     |  
     |  __suppress_context__
     |  
     |  __traceback__
     |  
     |  args
    
    class RemovedSite(BaseSite)
     |  Site removed from a family.
     |  
     |  Method resolution order:
     |      RemovedSite
     |      BaseSite
     |      pywikibot.tools.ComparableMixin
     |      builtins.object
     |  
     |  Methods defined here:
     |  
     |  __init__(self, code, fam, user=None, sysop=None)
     |      Initializer.
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from BaseSite:
     |  
     |  __getattr__(self, attr)
     |      Delegate undefined methods calls to the Family object.
     |  
     |  __getstate__(self)
     |      Remove Lock based classes before pickling.
     |  
     |  __hash__(self)
     |      Return hashable key.
     |  
     |  __repr__(self)
     |      Return internal representation.
     |  
     |  __setstate__(self, attrs)
     |      Restore things removed in __getstate__.
     |  
     |  __str__(self)
     |      Return string representing this Site's name and code.
     |  
     |  category_namespace(self)
     |      Deprecated; use namespaces.CATEGORY.custom_name instead.
     |      
     |      Return local name for the Category namespace.
     |  
     |  category_namespaces(self)
     |      Deprecated; use list(namespaces.CATEGORY) instead.
     |      
     |      Return names for the Category namespace.
     |  
     |  category_on_one_line(self)
     |      Return True if this site wants all category links on one line.
     |  
     |  disambcategory(self)
     |      Return Category in which disambig pages are listed.
     |  
     |  fam(self)
     |      Deprecated; use family attribute instead.
     |      
     |      Return Family object for this Site.
     |  
     |  getNamespaceIndex(self, namespace)
     |      DEPRECATED: Return the Namespace for a given namespace name.
     |  
     |  getSite(self, code)
     |      Return Site object for language 'code' in this Family.
     |  
     |  getUrl(self, path, retry=None, sysop=None, data=None, compress=NotImplemented, no_hostname=NotImplemented, cookies_only=NotImplemented, refer=NotImplemented, back_response=NotImplemented)
     |      DEPRECATED.
     |      
     |      Retained for compatibility only. All arguments except path and data
     |      are ignored.
     |  
     |  has_api(self)
     |      Deprecated.
     |      
     |      Return whether this site has an API.
     |  
     |  image_namespace(self)
     |      Deprecated; use namespaces.FILE.custom_name instead.
     |      
     |      Return local name for the File namespace.
     |  
     |  interwiki(self, prefix)
     |      Return the site for a corresponding interwiki prefix.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  interwiki_prefix(self, site)
     |      Return the interwiki prefixes going to that site.
     |      
     |      The interwiki prefixes are ordered first by length (shortest first)
     |      and then alphabetically. L{interwiki(prefix)} is not guaranteed to
     |      equal C{site} (i.e. the parameter passed to this function).
     |      
     |      @param site: The targeted site, which might be it's own.
     |      @type site: L{BaseSite}
     |      @return: The interwiki prefixes
     |      @rtype: list (guaranteed to be not empty)
     |      @raises KeyError: if there is no interwiki prefix for that site.
     |  
     |  interwiki_putfirst(self)
     |      Return list of language codes for ordering of interwiki links.
     |  
     |  isInterwikiLink(self, text)
     |      Return True if text is in the form of an interwiki link.
     |      
     |      If a link object constructed using "text" as the link text parses as
     |      belonging to a different site, this method returns True.
     |  
     |  languages(self)
     |      Return list of all valid language codes for this site's Family.
     |  
     |  linkto(self, title, othersite=None)
     |      DEPRECATED. Return a wikilink to a page.
     |      
     |      @param title: Title of the page to link to
     |      @type title: unicode
     |      @param othersite: Generate a interwiki link for use on this site.
     |      @type othersite: BaseSite or None
     |      
     |      @rtype: unicode
     |  
     |  local_interwiki(self, prefix)
     |      Return whether the interwiki prefix is local.
     |      
     |      A local interwiki prefix is handled by the target site like a normal
     |      link. So if that link also contains an interwiki link it does follow
     |      it as long as it's a local link.
     |      
     |      @raises SiteDefinitionError: if the url given in the interwiki table
     |          doesn't match any of the existing families.
     |      @raises KeyError: if the prefix is not an interwiki prefix.
     |  
     |  lock_page(self, page, block=True)
     |      Lock page for writing. Must be called before writing any page.
     |      
     |      We don't want different threads trying to write to the same page
     |      at the same time, even to different sections.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |      @param block: if true, wait until the page is available to be locked;
     |          otherwise, raise an exception if page can't be locked
     |  
     |  mediawiki_namespace(self)
     |      Deprecated; use namespaces.MEDIAWIKI.custom_name instead.
     |      
     |      Return local name for the MediaWiki namespace.
     |  
     |  normalizeNamespace = call(*a, **kw)
     |  
     |  ns_index(self, namespace)
     |      Deprecated; use APISite.namespaces.lookup_name instead.
     |      
     |      
     |      Return the Namespace for a given namespace name.
     |      
     |      @param namespace: name
     |      @type namespace: unicode
     |      @return: The matching Namespace object on this Site
     |      @rtype: Namespace, or None if invalid
     |  
     |  ns_normalize(self, value)
     |      Return canonical local form of namespace name.
     |      
     |      @param value: A namespace name
     |      @type value: unicode
     |  
     |  pagename2codes(self)
     |      Return list of localized PAGENAMEE tags for the site.
     |  
     |  pagenamecodes(self)
     |      Return list of localized PAGENAME tags for the site.
     |  
     |  postData(self, address, data, sysop=False, compress=True, cookies=None, contentType=NotImplemented)
     |      DEPRECATED.
     |  
     |  postForm(self, address, predata, sysop=False, cookies=None)
     |      DEPRECATED.
     |  
     |  redirect(self)
     |      Return list of localized redirect tags for the site.
     |  
     |  redirectRegex(self, pattern=None)
     |      Return a compiled regular expression matching on redirect pages.
     |      
     |      Group 1 in the regex match object will be the target title.
     |  
     |  sametitle(self, title1, title2)
     |      Return True if title1 and title2 identify the same wiki page.
     |      
     |      title1 and title2 may be unequal but still identify the same page,
     |      if they use different aliases for the same namespace.
     |  
     |  special_namespace(self)
     |      Deprecated; use namespaces.SPECIAL.custom_name instead.
     |      
     |      Return local name for the Special: namespace.
     |  
     |  template_namespace(self)
     |      Deprecated; use namespaces.TEMPLATE.custom_name instead.
     |      
     |      Return local name for the Template namespace.
     |  
     |  unlock_page(self, page)
     |      Unlock page. Call as soon as a write operation has completed.
     |      
     |      @param page: the page to be locked
     |      @type page: pywikibot.Page
     |  
     |  urlEncode(self, query)
     |      DEPRECATED.
     |  
     |  user(self)
     |      Return the currently-logged in bot username, or None.
     |  
     |  username(self, sysop=False)
     |      Return the username/sysopname used for the site.
     |  
     |  validLanguageLinks(self)
     |      Return list of language codes to be used in interwiki links.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from BaseSite:
     |  
     |  code
     |      The identifying code for this Site equal to the wiki prefix.
     |      
     |      By convention, this is usually an ISO language code, but it does
     |      not have to be.
     |  
     |  doc_subpage
     |      Return the documentation subpage for this Site.
     |      
     |      @rtype: tuple
     |  
     |  family
     |      The Family object for this Site's wiki family.
     |  
     |  lang
     |      The ISO language code for this Site.
     |      
     |      Presumed to be equal to the site code, but this can be overridden.
     |  
     |  namespaces
     |      Return dict of valid namespaces on this wiki.
     |  
     |  nocapitalize
     |      Return whether this site's default title case is case-sensitive.
     |      
     |      DEPRECATED.
     |  
     |  sitename
     |      String representing this Site's name and code.
     |  
     |  throttle
     |      Return this Site's throttle. Initialize a new one if needed.
     |  
     |  ----------------------------------------------------------------------
     |  Methods inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __eq__(self, other)
     |      Compare if self is equal to other.
     |  
     |  __ge__(self, other)
     |      Compare if self is greater equals other.
     |  
     |  __gt__(self, other)
     |      Compare if self is greater than other.
     |  
     |  __le__(self, other)
     |      Compare if self is less equals other.
     |  
     |  __lt__(self, other)
     |      Compare if self is less than other.
     |  
     |  __ne__(self, other)
     |      Compare if self is not equal to other.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors inherited from pywikibot.tools.ComparableMixin:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
    
    class Siteinfo(collections.abc.Container)
     |  A 'dictionary' like container for siteinfo.
     |  
     |  This class queries the server to get the requested siteinfo property.
     |  Optionally it can cache this directly in the instance so that later
     |  requests don't need to query the server.
     |  
     |  All values of the siteinfo property 'general' are directly available.
     |  
     |  Method resolution order:
     |      Siteinfo
     |      collections.abc.Container
     |      builtins.object
     |  
     |  Methods defined here:
     |  
     |  __call__(self, key='general', force=False, dump=False)
     |      DEPRECATED: Return the entry for key or dump the complete cache.
     |  
     |  __contains__(self, key)
     |      Return whether the value is cached.
     |  
     |  __getitem__(self, key)
     |      Return a siteinfo property, caching and not forcing it.
     |  
     |  __init__(self, site)
     |      Initialise it with an empty cache.
     |  
     |  get(self, key, get_default=True, cache=True, expiry=False)
     |      Return a siteinfo property.
     |      
     |      It will never throw an APIError if it only stated, that the siteinfo
     |      property doesn't exist. Instead it will use the default value.
     |      
     |      @param key: The name of the siteinfo property.
     |      @type key: str
     |      @param get_default: Whether to throw an KeyError if the key is invalid.
     |      @type get_default: bool
     |      @param cache: Caches the result interally so that future accesses via
     |          this method won't query the server.
     |      @type cache: bool
     |      @param expiry: If the cache is older than the expiry it ignores the
     |          cache and queries the server to get the newest value.
     |      @type expiry: int/float (days), L{datetime.timedelta}, False (never)
     |      @return: The gathered property
     |      @rtype: various
     |      @raises KeyError: If the key is not a valid siteinfo property and the
     |          get_default option is set to False.
     |      @see: L{_get_siteinfo}
     |  
     |  get_requested_time(self, key)
     |      Return when 'key' was successfully requested from the server.
     |      
     |      If the property is actually in the siprop 'general' it returns the
     |      last request from the 'general' siprop.
     |      
     |      @param key: The siprop value or a property of 'general'.
     |      @type key: basestring
     |      @return: The last time the siprop of 'key' was requested.
     |      @rtype: None (never), False (default), L{datetime.datetime} (cached)
     |  
     |  is_recognised(self, key)
     |      Return if 'key' is a valid property name. 'None' if not cached.
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors defined here:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)
     |  
     |  ----------------------------------------------------------------------
     |  Data and other attributes defined here:
     |  
     |  BOOLEAN_PROPS = {'general': ['imagewhitelistenabled', 'langconversion'...
     |  
     |  WARNING_REGEX = re.compile('^Unrecognized values? for parameter ["\\\'...
     |  
     |  __abstractmethods__ = frozenset()
     |  
     |  ----------------------------------------------------------------------
     |  Class methods inherited from collections.abc.Container:
     |  
     |  __subclasshook__(C) from abc.ABCMeta
     |      Abstract classes can override this to customize issubclass().
     |      
     |      This is invoked early on by abc.ABCMeta.__subclasscheck__().
     |      It should return True, False or NotImplemented.  If it returns
     |      NotImplemented, the normal algorithm is used.  Otherwise, it
     |      overrides the normal algorithm (and the outcome is cached).
    
    class TokenWallet(builtins.object)
     |  Container for tokens.
     |  
     |  Methods defined here:
     |  
     |  __contains__(self, key)
     |      Return True if the given token name is cached.
     |  
     |  __getitem__(self, key)
     |      Get token value for the given key.
     |  
     |  __init__(self, site)
     |      Initializer.
     |  
     |  __repr__(self)
     |      Return a representation of the internal tokens dictionary.
     |  
     |  __str__(self)
     |      Return a str representation of the internal tokens dictionary.
     |  
     |  load_tokens(self, types, all=False)
     |      Preload one or multiple tokens.
     |      
     |      @param types: the types of token.
     |      @type types: iterable
     |      @param all: load all available tokens, if None only if it can be done
     |          in one request.
     |      @type all: bool
     |  
     |  ----------------------------------------------------------------------
     |  Data descriptors defined here:
     |  
     |  __dict__
     |      dictionary for instance variables (if defined)
     |  
     |  __weakref__
     |      list of weak references to the object (if defined)

FUNCTIONS
    must_be(group=None, right=None)
        Decorator to require a certain user status when method is called.
        
        @param group: The group the logged in user should belong to
                      this parameter can be overridden by
                      keyword argument 'as_group'.
        @type group: str ('user' or 'sysop')
        @param right: The rights the logged in user should have.
                      Not supported yet and thus ignored.
        
        @return: method decorator
    
    need_extension(extension)
        Decorator to require a certain MediaWiki extension.
        
        @param extension: the MediaWiki extension required
        @type extension: unicode
        @return: a decorator to make sure the requirement is satisfied when
            the decorated function is called.
    
    need_version(version)
        Decorator to require a certain MediaWiki version number.
        
        @param version: the mw version number required
        @type version: str
        @return: a decorator to make sure the requirement is satisfied when
            the decorated function is called.
    
    warn(...)
        Issue a warning, or maybe ignore it or raise an exception.

DATA
    PY2 = False
    absolute_import = _Feature((2, 5, 0, 'alpha', 1), (3, 0, 0, 'alpha', 0...
    basestring = (<class 'str'>,)
    unicode_literals = _Feature((2, 6, 0, 'alpha', 2), (3, 0, 0, 'alpha', ...

FILE
    /srv/paws/pwb/pywikibot/site.py


site
APISite("test", "wikipedia")
import mwapi
session = mwapi.Session('https://americhino.wikia.com') user_agent = 'Americhino'
  File "<ipython-input-19-839b9bc5fd66>", line 1
    session = mwapi.Session('https://americhino.wikia.com') user_agent = 'Americhino'
                                                                     ^
SyntaxError: invalid syntax
print(session.get(action='query', meta='userinfo'))
{'query': {'userinfo': {'anon': '', 'name': 'Jtmorgan', 'id': 0}},
 'batchcomplete': ''}
---------------------------------------------------------------------------
JSONDecodeError                           Traceback (most recent call last)
/srv/paws/lib/python3.6/site-packages/mwapi/session.py in _request(self, method, params, files, auth)
    115         try:
--> 116             doc = resp.json()
    117         except ValueError:

/srv/paws/lib/python3.6/site-packages/requests/models.py in json(self, **kwargs)
    895                     pass
--> 896         return complexjson.loads(self.text, **kwargs)
    897 

/usr/lib/python3.6/json/__init__.py in loads(s, encoding, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
    353             parse_constant is None and object_pairs_hook is None and not kw):
--> 354         return _default_decoder.decode(s)
    355     if cls is None:

/usr/lib/python3.6/json/decoder.py in decode(self, s, _w)
    338         """
--> 339         obj, end = self.raw_decode(s, idx=_w(s, 0).end())
    340         end = _w(s, end).end()

/usr/lib/python3.6/json/decoder.py in raw_decode(self, s, idx)
    356         except StopIteration as err:
--> 357             raise JSONDecodeError("Expecting value", s, err.value) from None
    358         return obj, end

JSONDecodeError: Expecting value: line 1 column 1 (char 0)

During handling of the above exception, another exception occurred:

ValueError                                Traceback (most recent call last)
<ipython-input-20-66dbe248cd2d> in <module>()
----> 1 print(session.get(action='query', meta='userinfo'))
      2 {'query': {'userinfo': {'anon': '', 'name': 'Jtmorgan', 'id': 0}},
      3  'batchcomplete': ''}

/srv/paws/lib/python3.6/site-packages/mwapi/session.py in get(self, query_continue, auth, continuation, **params)
    307         return self.request('GET', params=params, auth=auth,
    308                             query_continue=query_continue,
--> 309                             continuation=continuation)
    310 
    311     def post(self, query_continue=None, upload_file=None, auth=None,

/srv/paws/lib/python3.6/site-packages/mwapi/session.py in request(self, method, params, query_continue, files, auth, continuation)
    169         else:
    170             return self._request(method, params=normal_params, auth=auth,
--> 171                                  files=files)
    172 
    173     def continuation(self, method, params=None, query_continue=None,

/srv/paws/lib/python3.6/site-packages/mwapi/session.py in _request(self, method, params, files, auth)
    121                 prefix = resp.text[:350]
    122             raise ValueError("Could not decode as JSON:\n{0}"
--> 123                              .format(prefix))
    124 
    125         if 'error' in doc:

ValueError: Could not decode as JSON:
<!doctype html>
<html lang="en" dir="ltr" class="">
<head>

<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
	<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes">
<meta name="generator" content="MediaWiki 1.19.24" />
<meta name="robots" content="noindex,nofollow" />
<meta name="twitter:card" c
print(session.get(action='query', meta='userinfo'))
{'query': {'userinfo': {'anon': '', 'name': 'Jtmorgan', 'id': 0}},
 'batchcomplete': ''}