import pywikibot
print ('sdssd')
sdssd
    site = pywikibot.Site('test', 'wikipedia')
    help(site)
Help on APISite in module pywikibot.site object:

class APISite(BaseSite)
 |  API interface to MediaWiki site.
 |  
 |  Do not instantiate directly; use pywikibot.Site function.
 |  
 |  Method resolution order:
 |      APISite
 |      BaseSite
 |      pywikibot.tools.ComparableMixin
 |      builtins.object
 |  
 |  Methods defined here:
 |  
 |  __getstate__(self)
 |      Remove TokenWallet before pickling, for security reasons.
 |  
 |  __init__(self, code, fam=None, user=None, sysop=None)
 |      Constructor.
 |  
 |  __setstate__(self, attrs)
 |      Restore things removed in __getstate__.
 |  
 |  allcategories(self, start='!', prefix='', total=None, reverse=False, content=False, step=NotImplemented)
 |      Iterate categories used (which need not have a Category page).
 |      
 |      Iterator yields Category objects. Note that, in practice, links that
 |      were found on pages that have been deleted may not have been removed
 |      from the database table, so this method can return false positives.
 |      
 |      @param start: Start at this category title (category need not exist).
 |      @param prefix: Only yield categories starting with this string.
 |      @param reverse: if True, iterate in reverse Unicode lexigraphic
 |          order (default: iterate in forward order)
 |      @param content: if True, load the current content of each iterated page
 |          (default False); note that this means the contents of the category
 |          description page, not the pages that are members of the category
 |  
 |  allimages(self, start='!', prefix='', minsize=None, maxsize=None, reverse=False, sha1=None, sha1base36=None, total=None, content=False, step=NotImplemented)
 |      Iterate all images, ordered by image title.
 |      
 |      Yields FilePages, but these pages need not exist on the wiki.
 |      
 |      @param start: start at this title (name need not exist)
 |      @param prefix: only iterate titles starting with this substring
 |      @param minsize: only iterate images of at least this many bytes
 |      @param maxsize: only iterate images of no more than this many bytes
 |      @param reverse: if True, iterate in reverse lexigraphic order
 |      @param sha1: only iterate image (it is theoretically possible there
 |          could be more than one) with this sha1 hash
 |      @param sha1base36: same as sha1 but in base 36
 |      @param content: if True, load the current content of each iterated page
 |          (default False); note that this means the content of the image
 |          description page, not the image itself
 |  
 |  alllinks(self, start='!', prefix='', namespace=0, unique=False, fromids=False, total=None, step=NotImplemented)
 |      Iterate all links to pages (which need not exist) in one namespace.
 |      
 |      Note that, in practice, links that were found on pages that have
 |      been deleted may not have been removed from the links table, so this
 |      method can return false positives.
 |      
 |      @param start: Start at this title (page need not exist).
 |      @param prefix: Only yield pages starting with this string.
 |      @param namespace: Iterate pages from this (single) namespace
 |      @type namespace: int or Namespace
 |      @param unique: If True, only iterate each link title once (default:
 |          iterate once for each linking page)
 |      @param fromids: if True, include the pageid of the page containing
 |          each link (default: False) as the '_fromid' attribute of the Page;
 |          cannot be combined with unique
 |      @raises KeyError: the namespace identifier was not resolved
 |      @raises TypeError: the namespace identifier has an inappropriate
 |          type such as bool, or an iterable with more than one namespace
 |  
 |  allpages(self, start='!', prefix='', namespace=0, filterredir=None, filterlanglinks=None, minsize=None, maxsize=None, protect_type=None, protect_level=None, reverse=False, total=None, content=False, throttle=NotImplemented, step=NotImplemented, limit='[deprecated name of total]', includeredirects='[deprecated name of filterredir]')
 |      Iterate pages in a single namespace.
 |      
 |      @param start: Start at this title (page need not exist).
 |      @param prefix: Only yield pages starting with this string.
 |      @param namespace: Iterate pages from this (single) namespace
 |      @type namespace: int or Namespace.
 |      @param filterredir: if True, only yield redirects; if False (and not
 |          None), only yield non-redirects (default: yield both)
 |      @param filterlanglinks: if True, only yield pages with language links;
 |          if False (and not None), only yield pages without language links
 |          (default: yield both)
 |      @param minsize: if present, only yield pages at least this many
 |          bytes in size
 |      @param maxsize: if present, only yield pages at most this many bytes
 |          in size
 |      @param protect_type: only yield pages that have a protection of the
 |          specified type
 |      @type protect_type: str
 |      @param protect_level: only yield pages that have protection at this
 |          level; can only be used if protect_type is specified
 |      @param reverse: if True, iterate in reverse Unicode lexigraphic
 |          order (default: iterate in forward order)
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      @raises KeyError: the namespace identifier was not resolved
 |      @raises TypeError: the namespace identifier has an inappropriate
 |          type such as bool, or an iterable with more than one namespace
 |  
 |  allusers(self, start='!', prefix='', group=None, total=None, step=NotImplemented)
 |      Iterate registered users, ordered by username.
 |      
 |      Iterated values are dicts containing 'name', 'editcount',
 |      'registration', and (sometimes) 'groups' keys. 'groups' will be
 |      present only if the user is a member of at least 1 group, and will
 |      be a list of unicodes; all the other values are unicodes and should
 |      always be present.
 |      
 |      @param start: start at this username (name need not exist)
 |      @param prefix: only iterate usernames starting with this substring
 |      @param group: only iterate users that are members of this group
 |      @type group: str
 |  
 |  ancientpages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages, datestamps from Special:Ancientpages.
 |      
 |      @param total: number of pages to return
 |  
 |  assert_valid_iter_params(self, msg_prefix, start, end, reverse)
 |      Validate iterating API parameters.
 |  
 |  blocks(self, starttime=None, endtime=None, reverse=False, blockids=None, users=None, iprange=None, total=None, step=NotImplemented)
 |      Iterate all current blocks, in order of creation.
 |      
 |      The iterator yields dicts containing keys corresponding to the
 |      block properties.
 |      
 |      @see: U{https://www.mediawiki.org/wiki/API:Blocks}
 |      
 |      @note: logevents only logs user blocks, while this method
 |          iterates all blocks including IP ranges.
 |      @note: C{userid} key will be given for mw 1.18+ only
 |      @note: C{iprange} parameter cannot be used together with C{users}.
 |      
 |      @param starttime: start iterating at this Timestamp
 |      @type starttime: pywikibot.Timestamp
 |      @param endtime: stop iterating at this Timestamp
 |      @type endtime: pywikibot.Timestamp
 |      @param reverse: if True, iterate oldest blocks first (default: newest)
 |      @type reverse: bool
 |      @param blockids: only iterate blocks with these id numbers. Numbers
 |          must be separated by '|' if given by a basestring.
 |      @type blockids: basestring, tuple or list
 |      @param users: only iterate blocks affecting these usernames or IPs
 |      @type users: basestring, tuple or list
 |      @param iprange: a single IP or an IP range. Ranges broader than
 |          IPv4/16 or IPv6/19 are not accepted.
 |      @type iprange: str
 |      @param total: total amount of block entries
 |      @type total: int
 |  
 |  blockuser(self, user, expiry, reason, anononly=True, nocreate=True, autoblock=True, noemail=False, reblock=False)
 |      Block a user for certain amount of time and for a certain reason.
 |      
 |      @param user: The username/IP to be blocked without a namespace.
 |      @type user: L{pywikibot.User}
 |      @param expiry: The length or date/time when the block expires. If
 |          'never', 'infinite', 'indefinite' it never does. If the value is
 |          given as a basestring it's parsed by php's strtotime function:
 |      
 |              U{http://php.net/manual/en/function.strtotime.php}
 |      
 |          The relative format is described there:
 |      
 |              U{http://php.net/manual/en/datetime.formats.relative.php}
 |      
 |          It is recommended to not use a basestring if possible to be
 |          independent of the API.
 |      @type expiry: Timestamp/datetime (absolute),
 |          basestring (relative/absolute) or False ('never')
 |      @param reason: The reason for the block.
 |      @type reason: basestring
 |      @param anononly: Disable anonymous edits for this IP.
 |      @type anononly: boolean
 |      @param nocreate: Prevent account creation.
 |      @type nocreate: boolean
 |      @param autoblock: Automatically block the last used IP address and all
 |          subsequent IP addresses from which this account logs in.
 |      @type autoblock: boolean
 |      @param noemail: Prevent user from sending email through the wiki.
 |      @type noemail: boolean
 |      @param reblock: If the user is already blocked, overwrite the existing
 |          block.
 |      @type reblock: boolean
 |      @return: The data retrieved from the API request.
 |      @rtype: dict
 |  
 |  botusers(self, total=None, step=NotImplemented)
 |      Iterate bot users.
 |      
 |      Iterated values are dicts containing 'name', 'userid', 'editcount',
 |      'registration', and 'groups' keys. 'groups' will be present only if
 |      the user is a member of at least 1 group, and will be a list of
 |      unicodes; all the other values are unicodes and should always be
 |      present.
 |  
 |  broken_redirects(self, total=None, step=NotImplemented)
 |      Yield Pages without language links from Special:BrokenRedirects.
 |      
 |      @param total: number of pages to return
 |  
 |  case(self)
 |      Deprecated; use siteinfo or Namespace instance instead.
 |      
 |      Return this site's capitalization rule.
 |  
 |  categories(self, number=10, repeat=False)
 |      DEPRECATED.
 |  
 |  categoryinfo(self, category)
 |      Retrieve data on contents of category.
 |  
 |  categorymembers(self, category, namespaces=None, sortby=None, reverse=False, starttime=None, endtime=None, startsort=None, endsort=None, total=None, content=False, member_type=None, step=NotImplemented)
 |      Iterate members of specified category.
 |      
 |      @param category: The Category to iterate.
 |      @param namespaces: If present, only return category members from
 |          these namespaces. To yield subcategories or files, use
 |          parameter member_type instead.
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param sortby: determines the order in which results are generated,
 |          valid values are "sortkey" (default, results ordered by category
 |          sort key) or "timestamp" (results ordered by time page was
 |          added to the category)
 |      @type sortby: str
 |      @param reverse: if True, generate results in reverse order
 |          (default False)
 |      @param starttime: if provided, only generate pages added after this
 |          time; not valid unless sortby="timestamp"
 |      @type starttime: pywikibot.Timestamp
 |      @param endtime: if provided, only generate pages added before this
 |          time; not valid unless sortby="timestamp"
 |      @type endtime: pywikibot.Timestamp
 |      @param startsort: if provided, only generate pages >= this title
 |          lexically; not valid if sortby="timestamp"
 |      @type startsort: str
 |      @param endsort: if provided, only generate pages <= this title
 |          lexically; not valid if sortby="timestamp"
 |      @type endsort: str
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      @type content: bool
 |      @param member_type: member type; if member_type includes 'page' and is
 |          used in conjunction with sortby="timestamp", the API may limit
 |          results to only pages in the first 50 namespaces.
 |      @type member_type: str or iterable of str; values: page, subcat, file
 |      
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  checkBlocks(self, sysop=False)
 |      Raise an exception when the user is blocked. DEPRECATED.
 |      
 |      @param sysop: If true, log in to sysop account (if available)
 |      @type sysop: bool
 |      @raises UserBlocked: The logged in user/sysop account is blocked.
 |  
 |  compare(self, old, diff)
 |      Corresponding method to the 'action=compare' API action.
 |      
 |      See: https://en.wikipedia.org/w/api.php?action=help&modules=compare
 |      Use pywikibot.diff's html_comparator() method to parse result.
 |      @param old: starting revision ID, title, Page, or Revision
 |      @type old: int, str, pywikibot.Page, or pywikibot.Page.Revision
 |      @param diff: ending revision ID, title, Page, or Revision
 |      @type diff: int, str, pywikibot.Page, or pywikibot.Page.Revision
 |      @return: Returns an HTML string of a diff between two revisions.
 |      @rtype: str
 |  
 |  create_new_topic(self, page, title, content, format)
 |      Create a new topic on a Flow board.
 |      
 |      @param page: A Flow board
 |      @type page: Board
 |      @param title: The title of the new topic (must be in plaintext)
 |      @type title: unicode
 |      @param content: The content of the topic's initial post
 |      @type content: unicode
 |      @param format: The content format of the value supplied for content
 |      @type format: unicode (either 'wikitext' or 'html')
 |      @return: The metadata of the new topic
 |      @rtype: dict
 |  
 |  data_repository(self)
 |      Return the data repository connected to this site.
 |      
 |      @return: The data repository if one is connected or None otherwise.
 |      @rtype: DataSite or None
 |  
 |  dbName(self)
 |      Return this site's internal id.
 |  
 |  deadendpages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Page objects retrieved from Special:Deadendpages.
 |      
 |      @param total: number of pages to return
 |  
 |  delete_post(self, post, reason)
 |      Delete a Flow post.
 |      
 |      @param post: A Flow post
 |      @type post: Post
 |      @param reason: The reason to delete the post
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  delete_topic(self, page, reason)
 |      Delete a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param reason: The reason to delete the topic
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  deletedrevs(self, page, start=None, end=None, reverse=None, get_text=False, total=None, step=NotImplemented)
 |      Iterate deleted revisions.
 |      
 |      Each value returned by the iterator will be a dict containing the
 |      'title' and 'ns' keys for a particular Page and a 'revisions' key
 |      whose value is a list of revisions in the same format as
 |      recentchanges (plus a 'content' element if requested). If get_text
 |      is true, the toplevel dict will contain a 'token' key as well.
 |      
 |      @param page: The page to check for deleted revisions
 |      @param start: Iterate revisions starting at this Timestamp
 |      @param end: Iterate revisions ending at this Timestamp
 |      @param reverse: Iterate oldest revisions first (default: newest)
 |      @param get_text: If True, retrieve the content of each revision and
 |          an undelete token
 |  
 |  deletepage(self, page, reason, summary='[deprecated name of reason]')
 |      Delete page from the wiki. Requires appropriate privilege level.
 |      
 |      @param page: Page to be deleted.
 |      @type page: Page
 |      @param reason: Deletion reason.
 |      @type reason: basestring
 |  
 |  double_redirects(self, total=None, step=NotImplemented)
 |      Yield Pages without language links from Special:BrokenRedirects.
 |      
 |      @param total: number of pages to return
 |  
 |  editpage(self, page, summary=None, minor=True, notminor=False, bot=True, recreate=True, createonly=False, nocreate=False, watch=None, **kwargs)
 |      Submit an edit to be saved to the wiki.
 |      
 |      @param page: The Page to be saved.
 |          By default its .text property will be used
 |          as the new text to be saved to the wiki
 |      @param summary: the edit summary
 |      @param minor: if True (default), mark edit as minor
 |      @param notminor: if True, override account preferences to mark edit
 |          as non-minor
 |      @param recreate: if True (default), create new page even if this
 |          title has previously been deleted
 |      @param createonly: if True, raise an error if this title already
 |          exists on the wiki
 |      @param nocreate: if True, raise an error if the page does not exist
 |      @param watch: Specify how the watchlist is affected by this edit, set
 |          to one of "watch", "unwatch", "preferences", "nochange":
 |          * watch: add the page to the watchlist
 |          * unwatch: remove the page from the watchlist
 |          The following settings are supported by mw >= 1.16 only
 |          * preferences: use the preference settings (default)
 |          * nochange: don't change the watchlist
 |      @param bot: if True, mark edit with bot flag
 |      @kwarg text: Overrides Page.text
 |      @type text: unicode
 |      @kwarg section: Edit an existing numbered section or
 |          a new section ('new')
 |      @type section: int or str
 |      @kwarg prependtext: Prepend text. Overrides Page.text
 |      @type text: unicode
 |      @kwarg appendtext: Append text. Overrides Page.text.
 |      @type text: unicode
 |      @kwarg undo: Revision id to undo. Overrides Page.text
 |      @type undo: int
 |      @return: True if edit succeeded, False if it failed
 |      @rtype: bool
 |      @raises Error: No text to be saved
 |      @raises NoPage: recreate is disabled and page does not exist
 |  
 |  expand_text(self, text, title=None, includecomments=None, string='[deprecated name of text]')
 |      Parse the given text for preprocessing and rendering.
 |      
 |      e.g expand templates and strip comments if includecomments
 |      parameter is not True. Keeps text inside
 |      <nowiki></nowiki> tags unchanges etc. Can be used to parse
 |      magic parser words like {{CURRENTTIMESTAMP}}.
 |      
 |      @param text: text to be expanded
 |      @type text: unicode
 |      @param title: page title without section
 |      @type title: unicode
 |      @param includecomments: if True do not strip comments
 |      @type includecomments: bool
 |      @rtype: unicode
 |  
 |  exturlusage(self, url=None, protocol='http', namespaces=None, total=None, content=False, step=NotImplemented)
 |      Iterate Pages that contain links to the given URL.
 |      
 |      @param url: The URL to search for (without the protocol prefix);
 |          this may include a '*' as a wildcard, only at the start of the
 |          hostname
 |      @param protocol: The protocol prefix (default: "http")
 |  
 |  forceLogin = call(*a, **kw)
 |  
 |  getExpandedString = call(*a, **kw)
 |  
 |  getFilesFromAnHash(self, hash_found=None)
 |      Return all files that have the same hash.
 |      
 |      DEPRECATED: Use L{APISite.allimages} instead using 'sha1'.
 |  
 |  getImagesFromAnHash(self, hash_found=None)
 |      Return all images that have the same hash.
 |      
 |      DEPRECATED: Use L{APISite.allimages} instead using 'sha1'.
 |  
 |  getParsedString(self, string, keeptags=None)
 |      Deprecated.
 |      
 |      compat defined keeptags as ['*'].
 |  
 |  getPatrolToken(self, sysop=False)
 |      DEPRECATED: Get patrol token.
 |  
 |  getToken(self, getalways=True, getagain=False, sysop=False)
 |      DEPRECATED: Get edit token.
 |  
 |  get_parsed_page(self, page)
 |      Retrieve parsed text of the page using action=parse.
 |  
 |  get_searched_namespaces(self, force=False)
 |      Retrieve the default searched namespaces for the user.
 |      
 |      If no user is logged in, it returns the namespaces used by default.
 |      Otherwise it returns the user preferences. It caches the last result
 |      and returns it, if the username or login status hasn't changed.
 |      
 |      @param force: Whether the cache should be discarded.
 |      @return: The namespaces which are searched by default.
 |      @rtype: C{set} of L{Namespace}
 |  
 |  get_tokens(self, types, all=False)
 |      Preload one or multiple tokens.
 |      
 |      For all MediaWiki versions prior to 1.20, only one token can be
 |      retrieved at once.
 |      For MediaWiki versions since 1.24wmfXXX a new token
 |      system was introduced which reduced the amount of tokens available.
 |      Most of them were merged into the 'csrf' token. If the token type in
 |      the parameter is not known it will default to the 'csrf' token.
 |      
 |      The other token types available are:
 |       - deleteglobalaccount
 |       - patrol (*)
 |       - rollback
 |       - setglobalaccountstatus
 |       - userrights
 |       - watch
 |      
 |       (*) Patrol was added in v1.14.
 |           Until v1.16, the patrol token is same as the edit token.
 |           For v1.17-19, the patrol token must be obtained from the query
 |           list recentchanges.
 |      
 |      @param types: the types of token (e.g., "edit", "move", "delete");
 |          see API documentation for full list of types
 |      @type  types: iterable
 |      @param all: load all available tokens, if None only if it can be done
 |          in one request.
 |      @type all: bool
 |      
 |      return: a dict with retrieved valid tokens.
 |      rtype: dict
 |  
 |  getcategoryinfo(self, category)
 |      Retrieve data on contents of category.
 |  
 |  getcurrenttime = call(*a, **kw)
 |  
 |  getcurrenttimestamp(self)
 |      Return the server time as a MediaWiki timestamp string.
 |      
 |      It calls L{server_time} first so it queries the server to get the
 |      current server time.
 |      
 |      @return: the server time
 |      @rtype: str (as 'yyyymmddhhmmss')
 |  
 |  getglobaluserinfo(self)
 |      Retrieve globaluserinfo from site and cache it.
 |      
 |      self._globaluserinfo will be a dict with the following keys and values:
 |      
 |        - id: user id (numeric str)
 |        - home: dbname of home wiki
 |        - registration: registration date as Timestamp
 |        - groups: list of groups (could be empty)
 |        - rights: list of rights (could be empty)
 |        - editcount: global editcount
 |  
 |  getmagicwords(self, word)
 |      Return list of localized "word" magic words for the site.
 |  
 |  getredirtarget(self, page)
 |      Return page object for the redirect target of page.
 |      
 |      @param page: page to search redirects for
 |      @type page: BasePage
 |      @return: redirect target of page
 |      @rtype: BasePage
 |      
 |      @raises IsNotRedirectPage: page is not a redirect
 |      @raises RuntimeError: no redirects found
 |      @raises CircularRedirect: page is a circular redirect
 |      @raises InterwikiRedirectPage: the redirect target is
 |          on another site
 |  
 |  getuserinfo(self, force=False)
 |      Retrieve userinfo from site and store in _userinfo attribute.
 |      
 |      self._userinfo will be a dict with the following keys and values:
 |      
 |        - id: user id (numeric str)
 |        - name: username (if user is logged in)
 |        - anon: present if user is not logged in
 |        - groups: list of groups (could be empty)
 |        - rights: list of rights (could be empty)
 |        - message: present if user has a new message on talk page
 |        - blockinfo: present if user is blocked (dict)
 |      
 |      U{https://www.mediawiki.org/wiki/API:Userinfo}
 |      
 |      @param force: force to retrieve userinfo ignoring cache
 |      @type force: bool
 |  
 |  hasExtension(self, name, unknown=None)
 |      Deprecated; use has_extension instead.
 |      
 |      Determine whether extension `name` is loaded.
 |      
 |              Use L{has_extension} instead!
 |      
 |              @param name: The extension to check for, case insensitive
 |              @type name: str
 |              @param unknown: Old parameter which shouldn't be used anymore.
 |              @return: If the extension is loaded
 |              @rtype: bool
 |  
 |  has_all_mediawiki_messages(self, keys)
 |      Confirm that the site defines a set of MediaWiki messages.
 |      
 |      @param keys: names of MediaWiki messages
 |      @type keys: set of str
 |      
 |      @rtype: bool
 |  
 |  has_api(self)
 |      Deprecated.
 |      
 |      Return whether this site has an API.
 |  
 |  has_extension(self, name)
 |      Determine whether extension `name` is loaded.
 |      
 |      @param name: The extension to check for, case sensitive
 |      @type name: str
 |      @return: If the extension is loaded
 |      @rtype: bool
 |  
 |  has_group(self, group, sysop=False)
 |      Return true if and only if the user is a member of specified group.
 |      
 |      Possible values of 'group' may vary depending on wiki settings,
 |      but will usually include bot.
 |      U{https://www.mediawiki.org/wiki/API:Userinfo}
 |  
 |  has_mediawiki_message(self, key)
 |      Determine if the site defines a MediaWiki message.
 |      
 |      @param key: name of MediaWiki message
 |      @type key: str
 |      
 |      @rtype: bool
 |  
 |  has_right(self, right, sysop=False)
 |      Return true if and only if the user has a specific right.
 |      
 |      Possible values of 'right' may vary depending on wiki settings,
 |      but will usually include:
 |      
 |      * Actions: edit, move, delete, protect, upload
 |      * User levels: autoconfirmed, sysop, bot
 |      
 |      U{https://www.mediawiki.org/wiki/API:Userinfo}
 |  
 |  hide_post(self, post, reason)
 |      Hide a Flow post.
 |      
 |      @param post: A Flow post
 |      @type post: Post
 |      @param reason: The reason to hide the post
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  hide_topic(self, page, reason)
 |      Hide a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param reason: The reason to hide the topic
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  image_repository(self)
 |      Return Site object for image repository e.g. commons.
 |  
 |  imageusage(self, image, namespaces=None, filterredir=None, total=None, content=False, step=NotImplemented)
 |      Iterate Pages that contain links to the given FilePage.
 |      
 |      @param image: the image to search for (FilePage need not exist on
 |          the wiki)
 |      @type image: FilePage
 |      @param namespaces: If present, only iterate pages in these namespaces
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param filterredir: if True, only yield redirects; if False (and not
 |          None), only yield non-redirects (default: yield both)
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  isAllowed = call(*a, **kw)
 |  
 |  isBlocked = call(*a, **kw)
 |  
 |  isBot(self, username)
 |      Return True is username is a bot user.
 |  
 |  is_blocked(self, sysop=False)
 |      Return True when logged in user is blocked.
 |      
 |      To check whether a user can perform an action,
 |      the method has_right should be used.
 |      U{https://www.mediawiki.org/wiki/API:Userinfo}
 |      
 |      @param sysop: If true, log in to sysop account (if available)
 |      @type sysop: bool
 |      @rtype: bool
 |  
 |  is_data_repository(self)
 |      Return True if its data repository is itself.
 |  
 |  is_image_repository(self)
 |      Return True if Site object is the image repository.
 |  
 |  is_oauth_token_available(self)
 |      Check whether OAuth token is set for this site.
 |      
 |      @rtype: bool
 |  
 |  is_uploaddisabled(self)
 |      Return True if upload is disabled on site.
 |      
 |      When the version is at least 1.27wmf9, uses general siteinfo.
 |      If not called directly, it is cached by the first attempted
 |      upload action.
 |  
 |  language(self)
 |      Deprecated; use APISite.lang instead.
 |      
 |      Return the code for the language of this Site.
 |  
 |  linksearch(self, siteurl, limit=None, euprotocol=None)
 |      Deprecated; use Site().exturlusage instead.
 |      
 |      Backwards-compatible interface to exturlusage().
 |  
 |  list_to_text(self, args)
 |      Convert a list of strings into human-readable text.
 |      
 |      The MediaWiki messages 'and' and 'word-separator' are used as separator
 |      between the last two arguments.
 |      If more than two arguments are given, other arguments are
 |      joined using MediaWiki message 'comma-separator'.
 |      
 |      @param args: text to be expanded
 |      @type args: iterable of unicode
 |      
 |      @rtype: unicode
 |  
 |  live_version(self, force=False)
 |      Deprecated; use version() instead.
 |      
 |      Return the 'real' version number found on [[Special:Version]].
 |      
 |              By default the version number is cached for one day.
 |      
 |              @param force: If the version should be read always from the server and
 |                  never from the cache.
 |              @type force: bool
 |              @return: A tuple containing the major, minor version number and any
 |                  text after that. If an error occurred (0, 0, 0) is returned.
 |              @rtype: int, int, str
 |  
 |  load_board(self, page)
 |      Retrieve the data for a Flow board.
 |      
 |      @param page: A Flow board
 |      @type page: Board
 |      @return: A dict representing the board's metadata.
 |      @rtype: dict
 |  
 |  load_post_current_revision(self, page, post_id, format)
 |      Retrieve the data for a post to a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param post_id: The UUID of the Post
 |      @type post_id: unicode
 |      @param format: The content format used for the returned content
 |      @type format: unicode (either 'wikitext', 'html', or 'fixed-html')
 |      @return: A dict representing the post data for the given UUID.
 |      @rtype: dict
 |  
 |  load_topic(self, page, format)
 |      Retrieve the data for a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param format: The content format to request the data in.
 |      @type format: str (either 'wikitext', 'html', or 'fixed-html')
 |      @return: A dict representing the topic's data.
 |      @rtype: dict
 |  
 |  load_topiclist(self, page, format='wikitext', limit=100, sortby='newest', toconly=False, offset=None, offset_id=None, reverse=False, include_offset=False)
 |      Retrieve the topiclist of a Flow board.
 |      
 |      @param page: A Flow board
 |      @type page: Board
 |      @param format: The content format to request the data in.
 |      @type format: str (either 'wikitext', 'html', or 'fixed-html')
 |      @param limit: The number of topics to fetch in each request.
 |      @type limit: int
 |      @param sortby: Algorithm to sort topics by.
 |      @type sortby: str (either 'newest' or 'updated')
 |      @param toconly: Whether to only include information for the TOC.
 |      @type toconly: bool
 |      @param offset: The timestamp to start at (when sortby is 'updated').
 |      @type offset: Timestamp or equivalent str
 |      @param offset_id: The topic UUID to start at (when sortby is 'newest').
 |      @type offset_id: str (in the form of a UUID)
 |      @param reverse: Whether to reverse the topic ordering.
 |      @type reverse: bool
 |      @param include_offset: Whether to include the offset topic.
 |      @type include_offset: bool
 |      @return: A dict representing the board's topiclist.
 |      @rtype: dict
 |  
 |  loadcoordinfo(self, page)
 |      Load [[mw:Extension:GeoData]] info.
 |  
 |  loadflowinfo(self, page)
 |      Deprecated; use Check the content model instead instead.
 |      
 |      
 |      Load Flow-related information about a given page.
 |      
 |      Assumes that the Flow extension is installed.
 |      
 |      @raises APIError: Flow extension is not installed
 |  
 |  loadimageinfo(self, page, history=False)
 |      Load image info from api and save in page attributes.
 |      
 |      @param history: if true, return the image's version history
 |  
 |  loadpageinfo(self, page, preload=False)
 |      Load page info from api and store in page attributes.
 |  
 |  loadpageprops(self, page)
 |      Load page props for the given page.
 |  
 |  loadrevisions(self, page, getText=False, revids=None, startid=None, endid=None, starttime=None, endtime=None, rvdir=None, user=None, excludeuser=None, section=None, sysop=False, step=None, total=None, rollback=False)
 |      Retrieve and store revision information.
 |      
 |      By default, retrieves the last (current) revision of the page,
 |      unless any of the optional parameters revids, startid, endid,
 |      starttime, endtime, rvdir, user, excludeuser, or limit are
 |      specified. Unless noted below, all parameters not specified
 |      default to False.
 |      
 |      If rvdir is False or not specified, startid must be greater than
 |      endid if both are specified; likewise, starttime must be greater
 |      than endtime. If rvdir is True, these relationships are reversed.
 |      
 |      @param page: retrieve revisions of this Page (required unless ids
 |          is specified)
 |      @param getText: if True, retrieve the wiki-text of each revision;
 |          otherwise, only retrieve the revision metadata (default)
 |      @param section: if specified, retrieve only this section of the text
 |          (getText must be True); section must be given by number (top of
 |          the article is section 0), not name
 |      @type section: int
 |      @param revids: retrieve only the specified revision ids (raise
 |          Exception if any of revids does not correspond to page
 |      @type revids: an int, a str or a list of ints or strings
 |      @param startid: retrieve revisions starting with this revid
 |      @param endid: stop upon retrieving this revid
 |      @param starttime: retrieve revisions starting at this Timestamp
 |      @param endtime: stop upon reaching this Timestamp
 |      @param rvdir: if false, retrieve newest revisions first (default);
 |          if true, retrieve earliest first
 |      @param user: retrieve only revisions authored by this user
 |      @param excludeuser: retrieve all revisions not authored by this user
 |      @param sysop: if True, switch to sysop account (if available) to
 |          retrieve this page
 |  
 |  lock_topic(self, page, lock, reason)
 |      Lock or unlock a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param lock: Whether to lock or unlock the topic
 |      @type lock: bool (True corresponds to locking the topic.)
 |      @param reason: The reason to lock or unlock the topic
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  logevents(self, logtype=None, user=None, page=None, namespace=None, start=None, end=None, reverse=False, tag=None, total=None, step=NotImplemented)
 |      Iterate all log entries.
 |      
 |      @note: logevents with logtype='block' only logs user blocks whereas
 |          site.blocks iterates all blocks including IP ranges.
 |      
 |      @param logtype: only iterate entries of this type (see wiki
 |          documentation for available types, which will include "block",
 |          "protect", "rights", "delete", "upload", "move", "import",
 |          "patrol", "merge")
 |      @type logtype: basestring
 |      @param user: only iterate entries that match this user name
 |      @type user: basestring
 |      @param page: only iterate entries affecting this page
 |      @type page: Page or basestring
 |      @param namespace: namespace to retrieve logevents from
 |      @type namespace: int or Namespace
 |      @param start: only iterate entries from and after this Timestamp
 |      @type start: Timestamp or ISO date string
 |      @param end: only iterate entries up to and through this Timestamp
 |      @type end: Timestamp or ISO date string
 |      @param reverse: if True, iterate oldest entries first (default: newest)
 |      @type reverse: bool
 |      @param tag: only iterate entries tagged with this tag
 |      @type tag: basestring
 |      @param total: maximum number of events to iterate
 |      @type total: int
 |      @rtype: iterable
 |      
 |      @raises KeyError: the namespace identifier was not resolved
 |      @raises TypeError: the namespace identifier has an inappropriate
 |          type such as bool, or an iterable with more than one namespace
 |  
 |  loggedInAs(self, sysop=False)
 |      Deprecated; use Site.user() instead.
 |      
 |      Return the current username if logged in, otherwise return None.
 |      
 |              DEPRECATED (use .user() method instead)
 |      
 |              @param sysop: if True, test if user is logged in as the sysop user
 |                           instead of the normal user.
 |              @type sysop: bool
 |      
 |              @rtype: bool
 |  
 |  logged_in(self, sysop=False)
 |      Verify the bot is logged into the site as the expected user.
 |      
 |      The expected usernames are those provided as either the user or sysop
 |      parameter at instantiation.
 |      
 |      @param sysop: if True, test if user is logged in as the sysop user
 |                   instead of the normal user.
 |      @type sysop: bool
 |      
 |      @rtype: bool
 |  
 |  login(self, sysop=False)
 |      Log the user in if not already logged in.
 |      
 |      U{https://www.mediawiki.org/wiki/API:Login}
 |  
 |  logout(self)
 |      Logout of the site and load details for the logged out user.
 |      
 |      Also logs out of the global account if linked to the user.
 |      U{https://www.mediawiki.org/wiki/API:Logout}
 |      
 |      @raises APIError: Logout is not available when OAuth enabled.
 |  
 |  logpages(self, number=50, mode=None, title=None, user=None, namespace=[], start=None, end=None, tag=None, newer=False, dump=False, offset=None, repeat=NotImplemented)
 |      Iterate log pages. DEPRECATED.
 |      
 |      When dump is enabled, the raw API dict is returned.
 |      
 |      @rtype: tuple of Page, str, int, str
 |  
 |  lonelypages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages retrieved from Special:Lonelypages.
 |      
 |      @param total: number of pages to return
 |  
 |  longpages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages and lengths from Special:Longpages.
 |      
 |      Yields a tuple of Page object, length(int).
 |      
 |      @param total: number of pages to return
 |  
 |  mediawiki_message(self, key, forceReload=NotImplemented)
 |      Fetch the text for a MediaWiki message.
 |      
 |      @param key: name of MediaWiki message
 |      @type key: str
 |      
 |      @rtype unicode
 |  
 |  mediawiki_messages(self, keys)
 |      Fetch the text of a set of MediaWiki messages.
 |      
 |      If keys is '*' or ['*'], all messages will be fetched. (deprecated)
 |      
 |      The returned dict uses each key to store the associated message.
 |      
 |      @param keys: MediaWiki messages to fetch
 |      @type keys: set of str, '*' or ['*']
 |      
 |      @rtype dict
 |  
 |  messages(self, sysop=False)
 |      Return true if the user has new messages, and false otherwise.
 |  
 |  moderate_post(self, post, state, reason)
 |      Moderate a Flow post.
 |      
 |      @param post: A Flow post
 |      @type post: Post
 |      @param state: The new moderation state
 |      @type state: str
 |      @param reason: The reason to moderate the topic
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  moderate_topic(self, page, state, reason)
 |      Moderate a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param state: The new moderation state
 |      @type state: str
 |      @param reason: The reason to moderate the topic
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  movepage(self, page, newtitle, summary, movetalk=True, noredirect=False)
 |      Move a Page to a new title.
 |      
 |      @param page: the Page to be moved (must exist)
 |      @param newtitle: the new title for the Page
 |      @type newtitle: unicode
 |      @param summary: edit summary (required!)
 |      @param movetalk: if True (default), also move the talk page if possible
 |      @param noredirect: if True, suppress creation of a redirect from the
 |          old title to the new one
 |      @return: Page object with the new title
 |      @rtype: pywikibot.Page
 |  
 |  namespace(self, num, all=False)
 |      Return string containing local name of namespace 'num'.
 |      
 |      If optional argument 'all' is true, return all recognized
 |      values for this namespace.
 |      
 |      @param num: Namespace constant.
 |      @type num: int
 |      @param all: If True return a Namespace object. Otherwise
 |          return the namespace name.
 |      @return: local name or Namespace object
 |      @rtype: str or Namespace
 |  
 |  newfiles(self, user=None, start=None, end=None, reverse=False, total=None, lestart='[deprecated name of start]', repeat=NotImplemented, leuser='[deprecated name of user]', number='[deprecated name of total]', letitle=NotImplemented, step=NotImplemented, leend='[deprecated name of end]')
 |      Yield information about newly uploaded files.
 |      
 |      Yields a tuple of FilePage, Timestamp, user(unicode), comment(unicode).
 |      
 |      N.B. the API does not provide direct access to Special:Newimages, so
 |      this is derived from the "upload" log events instead.
 |  
 |  newimages(self, *args, **kwargs, repeat=NotImplemented, number='[deprecated name of total]')
 |      Yield information about newly uploaded files.
 |      
 |      DEPRECATED: Use newfiles() instead.
 |  
 |  newpages(self, user=None, returndict=False, start=None, end=None, reverse=False, showBot=False, showRedirects=False, excludeuser=None, showPatrolled=None, namespaces=None, total=None, step=NotImplemented, repeat=NotImplemented, number='[deprecated name of total]', rc_show=NotImplemented, namespace='[deprecated name of namespaces]', rcshow=NotImplemented, get_redirect=NotImplemented)
 |      Yield new articles (as Page objects) from recent changes.
 |      
 |      Starts with the newest article and fetches the number of articles
 |      specified in the first argument.
 |      
 |      The objects yielded are dependent on parameter returndict.
 |      When true, it yields a tuple composed of a Page object and a dict of
 |      attributes.
 |      When false, it yields a tuple composed of the Page object,
 |      timestamp (unicode), length (int), an empty unicode string, username
 |      or IP address (str), comment (unicode).
 |      
 |      @param namespaces: only iterate pages in these namespaces
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  nice_get_address(self, title)
 |      Return shorter URL path to retrieve page titled 'title'.
 |  
 |  notifications(self, **kwargs)
 |      Yield Notification objects from the Echo extension.
 |  
 |  notifications_mark_read(self, **kwargs)
 |      Mark selected notifications as read.
 |      
 |      @return: whether the action was successful
 |      @rtype: bool
 |  
 |  page_can_be_edited(self, page)
 |      Determine if the page can be edited.
 |      
 |      Return True if and only if:
 |        - page is unprotected, and bot has an account for this site, or
 |        - page is protected, and bot has a sysop account for this site.
 |      
 |      @rtype: bool
 |  
 |  page_embeddedin(self, page, filterRedirects=None, namespaces=None, total=None, content=False, step=NotImplemented)
 |      Iterate all pages that embedded the given page as a template.
 |      
 |      @param page: The Page to get inclusions for.
 |      @param filterRedirects: If True, only return redirects that embed
 |          the given page. If False, only return non-redirect links. If
 |          None, return both (no filtering).
 |      @param namespaces: If present, only return links from the namespaces
 |          in this list.
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  page_exists(self, page)
 |      Return True if and only if page is an existing page on site.
 |  
 |  page_extlinks(self, page, total=None, step=NotImplemented)
 |      Iterate all external links on page, yielding URL strings.
 |  
 |  page_isredirect(self, page)
 |      Return True if and only if page is a redirect.
 |  
 |  page_restrictions(self, page)
 |      Return a dictionary reflecting page protections.
 |  
 |  pagebacklinks(self, page, followRedirects=False, filterRedirects=None, namespaces=None, total=None, content=False)
 |      Iterate all pages that link to the given page.
 |      
 |      @param page: The Page to get links to.
 |      @param followRedirects: Also return links to redirects pointing to
 |          the given page.
 |      @param filterRedirects: If True, only return redirects to the given
 |          page. If False, only return non-redirect links. If None, return
 |          both (no filtering).
 |      @param namespaces: If present, only return links from the namespaces
 |          in this list.
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param total: Maximum number of pages to retrieve in total.
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  pagecategories(self, page, total=None, content=False, step=NotImplemented, withSortKey=NotImplemented)
 |      Iterate categories to which page belongs.
 |      
 |      @param content: if True, load the current content of each iterated page
 |          (default False); note that this means the contents of the
 |          category description page, not the pages contained in the category
 |  
 |  pageimages(self, page, total=None, content=False, step=NotImplemented)
 |      Iterate images used (not just linked) on the page.
 |      
 |      @param content: if True, load the current content of each iterated page
 |          (default False); note that this means the content of the image
 |          description page, not the image itself
 |  
 |  pagelanglinks(self, page, total=None, include_obsolete=False, step=NotImplemented)
 |      Iterate all interlanguage links on page, yielding Link objects.
 |      
 |      @param include_obsolete: if true, yield even Link objects whose
 |                               site is obsolete
 |  
 |  pagelinks(self, page, namespaces=None, follow_redirects=False, total=None, content=False, step=NotImplemented)
 |      Iterate internal wikilinks contained (or transcluded) on page.
 |      
 |      @param namespaces: Only iterate pages in these namespaces (default: all)
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param follow_redirects: if True, yields the target of any redirects,
 |          rather than the redirect page
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  pagename2codes(self)
 |      Return list of localized PAGENAMEE tags for the site.
 |  
 |  pagenamecodes(self)
 |      Return list of localized PAGENAME tags for the site.
 |  
 |  pagereferences(self, page, followRedirects=False, filterRedirects=None, withTemplateInclusion=True, onlyTemplateInclusion=False, namespaces=None, total=None, content=False, step=NotImplemented)
 |      Convenience method combining pagebacklinks and page_embeddedin.
 |      
 |      @param namespaces: If present, only return links from the namespaces
 |          in this list.
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  pagetemplates(self, page, namespaces=None, total=None, content=False, step=NotImplemented)
 |      Iterate templates transcluded (not just linked) on the page.
 |      
 |      @param namespaces: Only iterate pages in these namespaces
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  patrol(self, rcid=None, revid=None, revision=None, token=NotImplemented)
 |      Return a generator of patrolled pages.
 |      
 |      Pages to be patrolled are identified by rcid, revid or revision.
 |      At least one of the parameters is mandatory.
 |      See https://www.mediawiki.org/wiki/API:Patrol.
 |      
 |      @param rcid: an int/string/iterable/iterator providing rcid of pages
 |          to be patrolled.
 |      @type rcid: iterable/iterator which returns a number or string which
 |           contains only digits; it also supports a string (as above) or int
 |      @param revid: an int/string/iterable/iterator providing revid of pages
 |          to be patrolled.
 |      @type revid: iterable/iterator which returns a number or string which
 |           contains only digits; it also supports a string (as above) or int.
 |      @param revision: an Revision/iterable/iterator providing Revision object
 |          of pages to be patrolled.
 |      @type revision: iterable/iterator which returns a Revision object; it
 |          also supports a single Revision.
 |      @rtype: iterator of dict with 'rcid', 'ns' and 'title'
 |          of the patrolled page.
 |  
 |  prefixindex(self, prefix, namespace=0, includeredirects=True)
 |      Yield all pages with a given prefix. Deprecated.
 |      
 |      Use allpages() with the prefix= parameter instead of this method.
 |  
 |  preloadpages(self, pagelist, groupsize=50, templates=False, langlinks=False, pageprops=False)
 |      Return a generator to a list of preloaded pages.
 |      
 |      Note that [at least in current implementation] pages may be iterated
 |      in a different order than in the underlying pagelist.
 |      
 |      @param pagelist: an iterable that returns Page objects
 |      @param groupsize: how many Pages to query at a time
 |      @type groupsize: int
 |      @param templates: preload list of templates in the pages
 |      @param langlinks: preload list of language links found in the pages
 |  
 |  protect(self, page, protections, reason, expiry=None, **kwargs, summary='[deprecated name of reason]')
 |      (Un)protect a wiki page. Requires administrator status.
 |      
 |      @param protections: A dict mapping type of protection to protection
 |          level of that type. Valid types of protection are 'edit', 'move',
 |          'create', and 'upload'. Valid protection levels (in MediaWiki 1.12)
 |          are '' (equivalent to 'none'), 'autoconfirmed', and 'sysop'.
 |          If None is given, however, that protection will be skipped.
 |      @type  protections: dict
 |      @param reason: Reason for the action
 |      @type  reason: basestring
 |      @param expiry: When the block should expire. This expiry will be applied
 |          to all protections. If None, 'infinite', 'indefinite', 'never', or ''
 |          is given, there is no expiry.
 |      @type expiry: pywikibot.Timestamp, string in GNU timestamp format
 |          (including ISO 8601).
 |  
 |  protectedpages(self, namespace=0, type='edit', level=False, total=None, lvl='[deprecated name of level]')
 |      Return protected pages depending on protection level and type.
 |      
 |      For protection types which aren't 'create' it uses L{APISite.allpages},
 |      while it uses for 'create' the 'query+protectedtitles' module.
 |      
 |      @param namespaces: The searched namespace.
 |      @type namespaces: int or Namespace or str
 |      @param type: The protection type to search for (default 'edit').
 |      @type type: str
 |      @param level: The protection level (like 'autoconfirmed'). If False it
 |          shows all protection levels.
 |      @type level: str or False
 |      @return: The pages which are protected.
 |      @rtype: generator of Page
 |  
 |  protection_levels(self)
 |      Return the protection levels available on this site.
 |      
 |      @return: protection types available
 |      @rtype: set of unicode instances
 |      @see: L{Siteinfo._get_default()}
 |  
 |  protection_types(self)
 |      Return the protection types available on this site.
 |      
 |      @return: protection types available
 |      @rtype: set of unicode instances
 |      @see: L{Siteinfo._get_default()}
 |  
 |  purgepages(self, pages, **kwargs)
 |      Purge the server's cache for one or multiple pages.
 |      
 |      @param pages: list of Page objects
 |      @return: True if API returned expected response; False otherwise
 |      @rtype: bool
 |  
 |  randompage(self, redirect=False)
 |      DEPRECATED.
 |      
 |      @param redirect: Return a random redirect page
 |      @rtype: pywikibot.Page
 |  
 |  randompages(self, total=10, namespaces=None, redirects=False, content=False, step=NotImplemented)
 |      Iterate a number of random pages.
 |      
 |      Pages are listed in a fixed sequence, only the starting point is
 |      random.
 |      
 |      @param total: the maximum number of pages to iterate (default: 1)
 |      @param namespaces: only iterate pages in these namespaces.
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param redirects: if True, include only redirect pages in results
 |          (default: include only non-redirects)
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  randomredirectpage(self)
 |      DEPRECATED: Use Site.randompages() instead.
 |      
 |      @return: Return a random redirect page
 |  
 |  recentchanges(self, start=None, end=None, reverse=False, namespaces=None, pagelist=None, changetype=None, showMinor=None, showBot=None, showAnon=None, showRedirects=None, showPatrolled=None, topOnly=False, total=None, user=None, excludeuser=None, returndict=NotImplemented, includeredirects='[deprecated name of showRedirects]', rclimit='[deprecated name of total]', number='[deprecated name of total]', rcshow=NotImplemented, step=NotImplemented, rcstart='[deprecated name of start]', rcnamespace='[deprecated name of namespaces]', repeat=NotImplemented, rcend='[deprecated name of end]', rctype='[deprecated name of changetype]', nobots=NotImplemented, revision=NotImplemented, namespace='[deprecated name of namespaces]', rcprop=NotImplemented, rcdir=NotImplemented)
 |      Iterate recent changes.
 |      
 |      @param start: Timestamp to start listing from
 |      @type start: pywikibot.Timestamp
 |      @param end: Timestamp to end listing at
 |      @type end: pywikibot.Timestamp
 |      @param reverse: if True, start with oldest changes (default: newest)
 |      @type reverse: bool
 |      @param namespaces: only iterate pages in these namespaces
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param pagelist: iterate changes to pages in this list only
 |      @param pagelist: list of Pages
 |      @param changetype: only iterate changes of this type ("edit" for
 |          edits to existing pages, "new" for new pages, "log" for log
 |          entries)
 |      @type changetype: basestring
 |      @param showMinor: if True, only list minor edits; if False, only list
 |          non-minor edits; if None, list all
 |      @type showMinor: bool or None
 |      @param showBot: if True, only list bot edits; if False, only list
 |          non-bot edits; if None, list all
 |      @type showBot: bool or None
 |      @param showAnon: if True, only list anon edits; if False, only list
 |          non-anon edits; if None, list all
 |      @type showAnon: bool or None
 |      @param showRedirects: if True, only list edits to redirect pages; if
 |          False, only list edits to non-redirect pages; if None, list all
 |      @type showRedirects: bool or None
 |      @param showPatrolled: if True, only list patrolled edits; if False,
 |          only list non-patrolled edits; if None, list all
 |      @type showPatrolled: bool or None
 |      @param topOnly: if True, only list changes that are the latest revision
 |          (default False)
 |      @type topOnly: bool
 |      @param user: if not None, only list edits by this user or users
 |      @type user: basestring|list
 |      @param excludeuser: if not None, exclude edits by this user or users
 |      @type excludeuser: basestring|list
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  redirect(self)
 |      Return the localized #REDIRECT keyword.
 |  
 |  redirectRegex(self)
 |      Return a compiled regular expression matching on redirect pages.
 |      
 |      Group 1 in the regex match object will be the target title.
 |  
 |  redirectpages(self, total=None, step=NotImplemented)
 |      Yield redirect pages from Special:ListRedirects.
 |      
 |      @param total: number of pages to return
 |  
 |  reply_to_post(self, page, reply_to_uuid, content, format)
 |      Reply to a post on a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param reply_to_uuid: The UUID of the Post to create a reply to
 |      @type reply_to_uuid: unicode
 |      @param content: The content of the reply
 |      @type content: unicode
 |      @param format: The content format used for the supplied content
 |      @type format: unicode (either 'wikitext' or 'html')
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  resolvemagicwords(self, wikitext)
 |      Replace the {{ns:xx}} marks in a wikitext with the namespace names.
 |      
 |      DEPRECATED.
 |  
 |  restore_post(self, post, reason)
 |      Restore a Flow post.
 |      
 |      @param post: A Flow post
 |      @type post: Post
 |      @param reason: The reason to restore the post
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  restore_topic(self, page, reason)
 |      Restore a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param reason: The reason to restore the topic
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  rollbackpage(self, page, **kwargs)
 |      Roll back page to version before last user's edits.
 |      
 |      The keyword arguments are those supported by the rollback API.
 |      
 |      As a precaution against errors, this method will fail unless
 |      the page history contains at least two revisions, and at least
 |      one that is not by the same user who made the last edit.
 |      
 |      @param page: the Page to be rolled back (must exist)
 |  
 |  search(self, searchstring, namespaces=None, where='text', get_redirects=False, total=None, content=False, key='[deprecated name of searchstring]', number='[deprecated name of total]', getredirects='[deprecated name of get_redirects]', step=NotImplemented)
 |      Iterate Pages that contain the searchstring.
 |      
 |      Note that this may include non-existing Pages if the wiki's database
 |      table contains outdated entries.
 |      
 |      @param searchstring: the text to search for
 |      @type searchstring: unicode
 |      @param where: Where to search; value must be "text", "title" or
 |          "nearmatch" (many wikis do not support title or nearmatch search)
 |      @param namespaces: search only in these namespaces (defaults to all)
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param get_redirects: if True, include redirects in results. Since
 |          version MediaWiki 1.23 it will always return redirects.
 |      @param content: if True, load the current content of each iterated page
 |          (default False)
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  server_time(self)
 |      Return a Timestamp object representing the current server time.
 |      
 |      For wikis with a version newer than 1.16 it uses the 'time' property
 |      of the siteinfo 'general'. It'll force a reload before returning the
 |      time. It requests to expand the text '{{CURRENTTIMESTAMP}}' for older
 |      wikis.
 |      
 |      @return: the current server time
 |      @rtype: L{Timestamp}
 |  
 |  shortpages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages and lengths from Special:Shortpages.
 |      
 |      Yields a tuple of Page object, length(int).
 |      
 |      @param total: number of pages to return
 |  
 |  stash_info(self, file_key, props=False)
 |      Get the stash info for a given file key.
 |  
 |  suppress_post(self, post, reason)
 |      Suppress a Flow post.
 |      
 |      @param post: A Flow post
 |      @type post: Post
 |      @param reason: The reason to suppress the post
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  suppress_topic(self, page, reason)
 |      Suppress a Flow topic.
 |      
 |      @param page: A Flow topic
 |      @type page: Topic
 |      @param reason: The reason to suppress the topic
 |      @type reason: unicode
 |      @return: Metadata returned by the API
 |      @rtype: dict
 |  
 |  token(self, page, tokentype)
 |      Deprecated; use the 'tokens' property instead.
 |      
 |      Return token retrieved from wiki to allow changing page content.
 |      
 |              @param page: the Page for which a token should be retrieved
 |              @param tokentype: the type of token (e.g., "edit", "move", "delete");
 |                  see API documentation for full list of types
 |  
 |  unblockuser(self, user, reason)
 |      Remove the block for the user.
 |      
 |      @param user: The username/IP without a namespace.
 |      @type user: L{pywikibot.User}
 |      @param reason: Reason for the unblock.
 |      @type reason: basestring
 |  
 |  uncategorizedcategories(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Categories from Special:Uncategorizedcategories.
 |      
 |      @param total: number of pages to return
 |  
 |  uncategorizedfiles = uncategorizedimages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |  
 |  uncategorizedimages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield FilePages from Special:Uncategorizedimages.
 |      
 |      @param total: number of pages to return
 |  
 |  uncategorizedpages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages from Special:Uncategorizedpages.
 |      
 |      @param total: number of pages to return
 |  
 |  uncategorizedtemplates(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages from Special:Uncategorizedtemplates.
 |      
 |      @param total: number of pages to return
 |  
 |  unconnected_pages(self, total=None, step=NotImplemented)
 |      Yield Page objects from Special:UnconnectedPages.
 |      
 |      @param total: number of pages to return
 |  
 |  undelete_page(self, page, reason, revisions=None, summary='[deprecated name of reason]')
 |      Undelete page from the wiki. Requires appropriate privilege level.
 |      
 |      @param page: Page to be deleted.
 |      @type page: Page
 |      @param revisions: List of timestamps to restore. If None, restores all revisions.
 |      @type revisions: list
 |      @param reason: Undeletion reason.
 |      @type reason: basestring
 |  
 |  unusedcategories(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Category objects from Special:Unusedcategories.
 |      
 |      @param total: number of pages to return
 |  
 |  unusedfiles(self, total=None, step=NotImplemented, extension=NotImplemented, repeat=NotImplemented, number='[deprecated name of total]')
 |      Yield FilePage objects from Special:Unusedimages.
 |      
 |      @param total: number of pages to return
 |  
 |  unusedimages(self, total=None, step=NotImplemented, extension=NotImplemented, repeat=NotImplemented, number='[deprecated name of total]')
 |      Yield FilePage objects from Special:Unusedimages.
 |      
 |      DEPRECATED: Use L{APISite.unusedfiles} instead.
 |  
 |  unwatchedpages(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages from Special:Unwatchedpages (requires Admin privileges).
 |      
 |      @param total: number of pages to return
 |  
 |  upload(self, filepage, source_filename=None, source_url=None, comment=None, text=None, watch=False, ignore_warnings=False, chunk_size=0, _file_key=None, _offset=0, _verify_stash=None, report_success=None, imagepage='[deprecated name of filepage]')
 |      Upload a file to the wiki.
 |      
 |      Either source_filename or source_url, but not both, must be provided.
 |      
 |      @param filepage: a FilePage object from which the wiki-name of the
 |          file will be obtained.
 |      @param source_filename: path to the file to be uploaded
 |      @param source_url: URL of the file to be uploaded
 |      @param comment: Edit summary; if this is not provided, then
 |          filepage.text will be used. An empty summary is not permitted.
 |          This may also serve as the initial page text (see below).
 |      @param text: Initial page text; if this is not set, then
 |          filepage.text will be used, or comment.
 |      @param watch: If true, add filepage to the bot user's watchlist
 |      @param ignore_warnings: It may be a static boolean, a callable returning
 |          a boolean or an iterable. The callable gets a list of UploadWarning
 |          instances and the iterable should contain the warning codes for
 |          which an equivalent callable would return True if all UploadWarning
 |          codes are in thet list. If the result is False it'll not continuing
 |          uploading the file and otherwise disable any warning and
 |          reattempting to upload the file. NOTE: If report_success is True or
 |          None it'll raise an UploadWarning exception if the static boolean is
 |          False.
 |      @type ignore_warnings: bool or callable or iterable of str
 |      @param chunk_size: The chunk size in bytesfor chunked uploading (see
 |          U{https://www.mediawiki.org/wiki/API:Upload#Chunked_uploading}). It
 |          will only upload in chunks, if the version number is 1.20 or higher
 |          and the chunk size is positive but lower than the file size.
 |      @type chunk_size: int
 |      @param _file_key: Reuses an already uploaded file using the filekey. If
 |          None (default) it will upload the file.
 |      @type _file_key: str or None
 |      @param _offset: When file_key is not None this can be an integer to
 |          continue a previously canceled chunked upload. If False it treats
 |          that as a finished upload. If True it requests the stash info from
 |          the server to determine the offset. By default starts at 0.
 |      @type _offset: int or bool
 |      @param _verify_stash: Requests the SHA1 and file size uploaded and
 |          compares it to the local file. Also verifies that _offset is
 |          matching the file size if the _offset is an int. If _offset is False
 |          if verifies that the file size match with the local file. If None
 |          it'll verifies the stash when a file key and offset is given.
 |      @type _verify_stash: bool or None
 |      @param report_success: If the upload was successful it'll print a
 |          success message and if ignore_warnings is set to False it'll
 |          raise an UploadWarning if a warning occurred. If it's None (default)
 |          it'll be True if ignore_warnings is a bool and False otherwise. If
 |          it's True or None ignore_warnings must be a bool.
 |      @return: It returns True if the upload was successful and False
 |          otherwise.
 |      @rtype: bool
 |  
 |  usercontribs(self, user=None, userprefix=None, start=None, end=None, reverse=False, namespaces=None, showMinor=None, total=None, top_only=False, step=NotImplemented)
 |      Iterate contributions by a particular user.
 |      
 |      Iterated values are in the same format as recentchanges.
 |      
 |      @param user: Iterate contributions by this user (name or IP)
 |      @param userprefix: Iterate contributions by all users whose names
 |          or IPs start with this substring
 |      @param start: Iterate contributions starting at this Timestamp
 |      @param end: Iterate contributions ending at this Timestamp
 |      @param reverse: Iterate oldest contributions first (default: newest)
 |      @param namespaces: only iterate pages in these namespaces
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param showMinor: if True, iterate only minor edits; if False and
 |          not None, iterate only non-minor edits (default: iterate both)
 |      @param top_only: if True, iterate only edits which are the latest
 |          revision
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  users(self, usernames)
 |      Iterate info about a list of users by name or IP.
 |      
 |      @param usernames: a list of user names
 |      @type usernames: list, or other iterable, of unicodes
 |  
 |  validate_tokens(self, types)
 |      Validate if requested tokens are acceptable.
 |      
 |      Valid tokens depend on mw version.
 |  
 |  version(self)
 |      Return live project version number as a string.
 |      
 |      This overwrites the corresponding family method for APISite class. Use
 |      L{pywikibot.tools.MediaWikiVersion} to compare MediaWiki versions.
 |  
 |  wantedcategories(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages from Special:Wantedcategories.
 |      
 |      @param total: number of pages to return
 |  
 |  wantedpages(self, total=None, step=NotImplemented)
 |      Yield Pages from Special:Wantedpages.
 |      
 |      @param total: number of pages to return
 |  
 |  watch(self, pages, unwatch=False)
 |      Add or remove pages from watchlist.
 |      
 |      @param pages: A single page or a sequence of pages.
 |      @type pages: A page object, a page-title string, or sequence of them.
 |          Also accepts a single pipe-separated string like 'title1|title2'.
 |      @param unwatch: If True, remove pages from watchlist;
 |          if False add them (default).
 |      @return: True if API returned expected response; False otherwise
 |      @rtype: bool
 |  
 |  watched_pages(self, sysop=False, force=False, total=None, step=NotImplemented)
 |      Return watchlist.
 |      
 |      @param sysop: Returns watchlist of sysop user if true
 |      @type sysop: bool
 |      @param force_reload: Reload watchlist
 |      @type force_reload: bool
 |      @return: list of pages in watchlist
 |      @rtype: list of pywikibot.Page objects
 |  
 |  watchlist_revs(self, start=None, end=None, reverse=False, namespaces=None, showMinor=None, showBot=None, showAnon=None, total=None, step=NotImplemented)
 |      Iterate revisions to pages on the bot user's watchlist.
 |      
 |      Iterated values will be in same format as recentchanges.
 |      
 |      @param start: Iterate revisions starting at this Timestamp
 |      @param end: Iterate revisions ending at this Timestamp
 |      @param reverse: Iterate oldest revisions first (default: newest)
 |      @param namespaces: only iterate pages in these namespaces
 |      @type namespaces: iterable of basestring or Namespace key,
 |          or a single instance of those types.  May be a '|' separated
 |          list of namespace identifiers.
 |      @param showMinor: if True, only list minor edits; if False (and not
 |          None), only list non-minor edits
 |      @param showBot: if True, only list bot edits; if False (and not
 |          None), only list non-bot edits
 |      @param showAnon: if True, only list anon edits; if False (and not
 |          None), only list non-anon edits
 |      @raises KeyError: a namespace identifier was not resolved
 |      @raises TypeError: a namespace identifier has an inappropriate
 |          type such as NoneType or bool
 |  
 |  watchpage(self, page, unwatch=False)
 |      Add or remove page from watchlist.
 |      
 |      DEPRECATED: Use Site().watch() instead.
 |      
 |      @param page: A single page.
 |      @type page: A page object, a page-title string.
 |      @param unwatch: If True, remove page from watchlist; if False (default),
 |          add it.
 |      @return: True if API returned expected response; False otherwise
 |      @rtype: bool
 |  
 |  withoutinterwiki(self, total=None, repeat=NotImplemented, number='[deprecated name of total]', step=NotImplemented)
 |      Yield Pages without language links from Special:Withoutinterwiki.
 |      
 |      @param total: number of pages to return
 |  
 |  ----------------------------------------------------------------------
 |  Class methods defined here:
 |  
 |  fromDBName(dbname, site=None) from builtins.type
 |      Create a site from a database name using the sitematrix.
 |      
 |      @param dbname: database name
 |      @type dbname: str
 |      @param site: Site to load sitematrix from. (Default meta.wikimedia.org)
 |      @type site: APISite
 |      @return: site object for the database name
 |      @rtype: APISite
 |  
 |  ----------------------------------------------------------------------
 |  Data descriptors defined here:
 |  
 |  article_path
 |      Get the nice article path without $1.
 |  
 |  globaluserinfo
 |      Retrieve userinfo from site and store in _userinfo attribute.
 |      
 |      self._userinfo will be a dict with the following keys and values:
 |      
 |        - id: user id (numeric str)
 |        - name: username (if user is logged in)
 |        - anon: present if user is not logged in
 |        - groups: list of groups (could be empty)
 |        - rights: list of rights (could be empty)
 |        - message: present if user has a new message on talk page
 |        - blockinfo: present if user is blocked (dict)
 |      
 |      U{https://www.mediawiki.org/wiki/API:Userinfo}
 |      
 |      @param force: force to retrieve userinfo ignoring cache
 |      @type force: bool
 |  
 |  has_data_repository
 |      Return True if site has a shared data repository like Wikidata.
 |  
 |  has_image_repository
 |      Return True if site has a shared image repository like Commons.
 |  
 |  has_transcluded_data
 |      Return True if site has a shared data repository like Wikidata.
 |  
 |  lang
 |      Return the code for the language of this Site.
 |  
 |  months_names
 |      Obtain month names from the site messages.
 |      
 |      The list is zero-indexed, ordered by month in calendar, and should
 |      be in the original site language.
 |      
 |      @return: list of tuples (month name, abbreviation)
 |      @rtype: list
 |  
 |  proofread_index_ns
 |      Return Index namespace for the ProofreadPage extension.
 |  
 |  proofread_levels
 |      Return Quality Levels for the ProofreadPage extension.
 |  
 |  proofread_page_ns
 |      Return Page namespace for the ProofreadPage extension.
 |  
 |  siteinfo
 |      Site information dict.
 |  
 |  userinfo
 |      Retrieve userinfo from site and store in _userinfo attribute.
 |      
 |      self._userinfo will be a dict with the following keys and values:
 |      
 |        - id: user id (numeric str)
 |        - name: username (if user is logged in)
 |        - anon: present if user is not logged in
 |        - groups: list of groups (could be empty)
 |        - rights: list of rights (could be empty)
 |        - message: present if user has a new message on talk page
 |        - blockinfo: present if user is blocked (dict)
 |      
 |      U{https://www.mediawiki.org/wiki/API:Userinfo}
 |      
 |      @param force: force to retrieve userinfo ignoring cache
 |      @type force: bool
 |  
 |  ----------------------------------------------------------------------
 |  Data and other attributes defined here:
 |  
 |  OnErrorExc = <class 'pywikibot.site.OnErrorExc'>
 |      OnErrorExc(exception, on_new_page)
 |  
 |  ----------------------------------------------------------------------
 |  Methods inherited from BaseSite:
 |  
 |  __getattr__(self, attr)
 |      Delegate undefined methods calls to the Family object.
 |  
 |  __hash__(self)
 |      Return hashable key.
 |  
 |  __repr__(self)
 |      Return internal representation.
 |  
 |  __str__(self)
 |      Return string representing this Site's name and code.
 |  
 |  category_namespace(self)
 |      Deprecated; use namespaces.CATEGORY.custom_name instead.
 |      
 |      Return local name for the Category namespace.
 |  
 |  category_namespaces(self)
 |      Deprecated; use list(namespaces.CATEGORY) instead.
 |      
 |      Return names for the Category namespace.
 |  
 |  category_on_one_line(self)
 |      Return True if this site wants all category links on one line.
 |  
 |  checkCharset(self, charset)
 |      DEPRECATED.
 |  
 |  cookies(self, sysop=False)
 |      DEPRECATED.
 |  
 |  disambcategory(self)
 |      Return Category in which disambig pages are listed.
 |  
 |  fam(self)
 |      Deprecated; use family attribute instead.
 |      
 |      Return Family object for this Site.
 |  
 |  getNamespaceIndex(self, namespace)
 |      DEPRECATED: Return the Namespace for a given namespace name.
 |  
 |  getSite(self, code)
 |      Return Site object for language 'code' in this Family.
 |  
 |  getUrl(self, path, retry=None, sysop=None, data=None, refer=NotImplemented, cookies_only=NotImplemented, back_response=NotImplemented, no_hostname=NotImplemented, compress=NotImplemented)
 |      DEPRECATED.
 |      
 |      Retained for compatibility only. All arguments except path and data
 |      are ignored.
 |  
 |  image_namespace(self)
 |      Deprecated; use namespaces.FILE.custom_name instead.
 |      
 |      Return local name for the File namespace.
 |  
 |  interwiki(self, prefix)
 |      Return the site for a corresponding interwiki prefix.
 |      
 |      @raises SiteDefinitionError: if the url given in the interwiki table
 |          doesn't match any of the existing families.
 |      @raises KeyError: if the prefix is not an interwiki prefix.
 |  
 |  interwiki_prefix(self, site)
 |      Return the interwiki prefixes going to that site.
 |      
 |      The interwiki prefixes are ordered first by length (shortest first)
 |      and then alphabetically. L{interwiki(prefix)} is not guaranteed to equal
 |      C{site} (i.e. the parameter passed to this function).
 |      
 |      @param site: The targeted site, which might be it's own.
 |      @type site: L{BaseSite}
 |      @return: The interwiki prefixes
 |      @rtype: list (guaranteed to be not empty)
 |      @raises KeyError: if there is no interwiki prefix for that site.
 |  
 |  interwiki_putfirst(self)
 |      Return list of language codes for ordering of interwiki links.
 |  
 |  isInterwikiLink(self, text)
 |      Return True if text is in the form of an interwiki link.
 |      
 |      If a link object constructed using "text" as the link text parses as
 |      belonging to a different site, this method returns True.
 |  
 |  languages(self)
 |      Return list of all valid language codes for this site's Family.
 |  
 |  linkto(self, title, othersite=None)
 |      DEPRECATED. Return a wikilink to a page.
 |      
 |      @param title: Title of the page to link to
 |      @type title: unicode
 |      @param othersite: Generate a interwiki link for use on this site.
 |      @type othersite: Site (optional)
 |      
 |      @rtype: unicode
 |  
 |  local_interwiki(self, prefix)
 |      Return whether the interwiki prefix is local.
 |      
 |      A local interwiki prefix is handled by the target site like a normal
 |      link. So if that link also contains an interwiki link it does follow
 |      it as long as it's a local link.
 |      
 |      @raises SiteDefinitionError: if the url given in the interwiki table
 |          doesn't match any of the existing families.
 |      @raises KeyError: if the prefix is not an interwiki prefix.
 |  
 |  lock_page(self, page, block=True)
 |      Lock page for writing.  Must be called before writing any page.
 |      
 |      We don't want different threads trying to write to the same page
 |      at the same time, even to different sections.
 |      
 |      @param page: the page to be locked
 |      @type page: pywikibot.Page
 |      @param block: if true, wait until the page is available to be locked;
 |          otherwise, raise an exception if page can't be locked
 |  
 |  mediawiki_namespace(self)
 |      Deprecated; use namespaces.MEDIAWIKI.custom_name instead.
 |      
 |      Return local name for the MediaWiki namespace.
 |  
 |  normalizeNamespace = call(*a, **kw)
 |  
 |  ns_index(self, namespace)
 |      Deprecated; use APISite.namespaces.lookup_name instead.
 |      
 |      
 |      Return the Namespace for a given namespace name.
 |      
 |      @param namespace: name
 |      @type namespace: unicode
 |      @return: The matching Namespace object on this Site
 |      @rtype: Namespace, or None if invalid
 |  
 |  ns_normalize(self, value)
 |      Return canonical local form of namespace name.
 |      
 |      @param value: A namespace name
 |      @type value: unicode
 |  
 |  postData(self, address, data, contentType=None, sysop=False, compress=True, cookies=None)
 |      DEPRECATED.
 |  
 |  postForm(self, address, predata, sysop=False, cookies=None)
 |      DEPRECATED.
 |  
 |  sametitle(self, title1, title2)
 |      Return True if title1 and title2 identify the same wiki page.
 |      
 |      title1 and title2 may be unequal but still identify the same page,
 |      if they use different aliases for the same namespace.
 |  
 |  solveCaptcha(self, data)
 |      DEPRECATED.
 |  
 |  special_namespace(self)
 |      Deprecated; use namespaces.SPECIAL.custom_name instead.
 |      
 |      Return local name for the Special: namespace.
 |  
 |  template_namespace(self)
 |      Deprecated; use namespaces.TEMPLATE.custom_name instead.
 |      
 |      Return local name for the Template namespace.
 |  
 |  unlock_page(self, page)
 |      Unlock page.  Call as soon as a write operation has completed.
 |      
 |      @param page: the page to be locked
 |      @type page: pywikibot.Page
 |  
 |  updateCookies(self, datas, sysop=False)
 |      DEPRECATED.
 |  
 |  urlEncode(self, query)
 |      DEPRECATED.
 |  
 |  user(self)
 |      Return the currently-logged in bot user, or None.
 |  
 |  username(self, sysop=False)
 |      Return the username/sysopname used for the site.
 |  
 |  validLanguageLinks(self)
 |      Return list of language codes that can be used in interwiki links.
 |  
 |  ----------------------------------------------------------------------
 |  Data descriptors inherited from BaseSite:
 |  
 |  code
 |      The identifying code for this Site.
 |      
 |      By convention, this is usually an ISO language code, but it does
 |      not have to be.
 |  
 |  doc_subpage
 |      Return the documentation subpage for this Site.
 |      
 |      @rtype: tuple
 |  
 |  family
 |      The Family object for this Site's wiki family.
 |  
 |  namespaces
 |      Return dict of valid namespaces on this wiki.
 |  
 |  nocapitalize
 |      Return whether this site's default title case is case-sensitive.
 |      
 |      DEPRECATED.
 |  
 |  sitename
 |      String representing this Site's name and code.
 |  
 |  throttle
 |      Return this Site's throttle.  Initialize a new one if needed.
 |  
 |  ----------------------------------------------------------------------
 |  Methods inherited from pywikibot.tools.ComparableMixin:
 |  
 |  __eq__(self, other)
 |      Compare if self is equal to other.
 |  
 |  __ge__(self, other)
 |      Compare if self is greater equals other.
 |  
 |  __gt__(self, other)
 |      Compare if self is greater than other.
 |  
 |  __le__(self, other)
 |      Compare if self is less equals other.
 |  
 |  __lt__(self, other)
 |      Compare if self is less than other.
 |  
 |  __ne__(self, other)
 |      Compare if self is not equal to other.
 |  
 |  ----------------------------------------------------------------------
 |  Data descriptors inherited from pywikibot.tools.ComparableMixin:
 |  
 |  __dict__
 |      dictionary for instance variables (if defined)
 |  
 |  __weakref__
 |      list of weak references to the object (if defined)

page = pywikibot.Page(site, 'test')
 
page.exists()
True
page = pywikibot.Page(site, 'test')