In [1]:
import pywikibot
import csv
import re
import requests
import urllib.parse
import time
import re
import datetime
from datetime import date

from pywikibot import pagegenerators as pg


with open('copyrightPD100.rq', 'r') as query_file:
    QUERY = query_file.read()

wikidata_site = pywikibot.Site("wikidata", "wikidata")
generator = pg.WikidataSPARQLPageGenerator(QUERY, site=wikidata_site)

for item in generator:
    
    site = pywikibot.Site("wikidata", "wikidata")
    repo = site.data_repository()
    item_dict = item.get() #Get the item dictionary
    clm_dict = item_dict["claims"] # Get the claim dictionary
    print (item)
        
    years = None
    try:
        clm_list = clm_dict["P570"] #overlijdensdatum
        for clm in clm_list:
            dod = clm.getTarget()
#            print(dod)
            if (dod is not None and dod.precision > 8):#year or more precise
                years = date.today().year - dod.year
#                if (livedyear < livedyearsago):
#               years = diedyearsago
    except:
        print ('geen geschikte dod')
    copyrigtstatus = None
    print (years)
    if (years is not None):
        if (years > 100 ):
            reason = pywikibot.ItemPage(repo, u"Q96095092") # died more than 100 years ago
            jurisdiction = pywikibot.ItemPage(repo, u"Q13780930") #wereldwijd
            copyrigtstatus = pywikibot.ItemPage(repo, u"Q71887839")#oeuvre expired
        elif (years > 70 ):
            reason = pywikibot.ItemPage(repo, u"Q96095089") # died more than 70 years ago
            jurisdiction = pywikibot.ItemPage(repo, u"Q59542795") #countries 70 years or shorter
            copyrigtstatus = pywikibot.ItemPage(repo, u"Q71887839")#oeuvre expired
        elif (years <= 50 ):
            reason = pywikibot.ItemPage(repo, u"Q96095541") # died less than 50 years ago
            jurisdiction = pywikibot.ItemPage(repo, u"Q87048619") #countries 50 years or longer
            copyrigtstatus = pywikibot.ItemPage(repo, u"Q73555012")#oeuvre copyrighted
        elif (years <= 70 ):
            reason = pywikibot.ItemPage(repo, u"Q96095416") # died less than 70 years ago
            jurisdiction = pywikibot.ItemPage(repo, u"Q60845045") #countries 70 years or longer
            copyrigtstatus = pywikibot.ItemPage(repo, u"Q73555012")#oeuvre copyrighted

        if (copyrigtstatus):
            print (copyrigtstatus)
            claim = pywikibot.Claim(repo, u'P7763')#copyright status
            claim.setTarget(copyrigtstatus)#set status
            reasonStatus = pywikibot.Claim(repo, u'P887')
            reasonStatus.setTarget(reason)
            juriStatus = pywikibot.Claim(repo, u'P1001')
            juriStatus.setTarget(jurisdiction)



            refdate = pywikibot.Claim(repo, u'P813')
            today = datetime.datetime.today()
            claimdate = pywikibot.WbTime(year=today.year, month=today.month, day=today.day)
            refdate.setTarget(claimdate)
    #            claim.addQualifier(reasonStatus)
            claim.addQualifier(juriStatus)
    #            claim.addSources([statedin, refdate, reasonStatus], summary=u'Adding sources.')
            claim.addSources([refdate, reasonStatus], summary=u'Adding sources.')
            item.addClaim(claim, summary=u'Add copyright status based on date of death') 
 
Sleeping for 5.0 seconds, 2020-06-07 18:50:46
Sleeping for 5.0 seconds, 2020-06-07 18:50:51
Sleeping for 5.0 seconds, 2020-06-07 18:50:56
Sleeping for 5.7 seconds, 2020-06-07 18:51:01
Sleeping for 7.1 seconds, 2020-06-07 18:51:07
Sleeping for 9.1 seconds, 2020-06-07 18:51:14
Sleeping for 10.6 seconds, 2020-06-07 18:51:23
Sleeping for 12.2 seconds, 2020-06-07 18:51:34
Sleeping for 13.7 seconds, 2020-06-07 18:51:46
Sleeping for 15.2 seconds, 2020-06-07 18:52:00
Sleeping for 18.0 seconds, 2020-06-07 18:52:16
Sleeping for 19.6 seconds, 2020-06-07 18:52:34
Sleeping for 21.3 seconds, 2020-06-07 18:52:54
Sleeping for 24.6 seconds, 2020-06-07 18:53:15
Sleeping for 26.3 seconds, 2020-06-07 18:53:40
---------------------------------------------------------------------------
MaxlagTimeoutError                        Traceback (most recent call last)
<ipython-input-1-2bb272308686> in <module>
     16 
     17 wikidata_site = pywikibot.Site("wikidata", "wikidata")
---> 18 generator = pg.WikidataSPARQLPageGenerator(QUERY, site=wikidata_site)
     19 
     20 for item in generator:

/srv/paws/pwb/pywikibot/pagegenerators.py in WikidataSPARQLPageGenerator(query, site, item_name, endpoint, entity_url, result_type)
   2949     if not endpoint or not entity_url:
   2950         dependencies['repo'] = repo
-> 2951     query_object = sparql.SparqlQuery(**dependencies)
   2952     data = query_object.get_items(query,
   2953                                   item_name=item_name,

/srv/paws/pwb/pywikibot/data/sparql.py in __init__(self, endpoint, entity_url, repo, max_retries, retry_wait)
     61         if repo:
     62             try:
---> 63                 self.endpoint = repo.sparql_endpoint
     64                 self.entity_url = repo.concept_base_uri
     65             except NotImplementedError:

/srv/paws/pwb/pywikibot/site/__init__.py in callee(self, *args, **kwargs)
   1358     def decorator(fn):
   1359         def callee(self, *args, **kwargs):
-> 1360             if MediaWikiVersion(self.version()) < MediaWikiVersion(version):
   1361                 raise NotImplementedError(
   1362                     'Method or function "%s"\n'

/srv/paws/pwb/pywikibot/site/__init__.py in version(self)
   2744             try:
   2745                 version = self.siteinfo.get('generator',
-> 2746                                             expiry=1).split(' ')[1]
   2747             except pywikibot.data.api.APIError:
   2748                 # May occur if you are not logged in (no API read permissions).

/srv/paws/pwb/pywikibot/site/__init__.py in get(self, key, get_default, cache, expiry)
   1680                 elif not Siteinfo._is_expired(cached[1], expiry):
   1681                     return copy.deepcopy(cached[0])
-> 1682         preloaded = self._get_general(key, expiry)
   1683         if not preloaded:
   1684             preloaded = self._get_siteinfo(key, expiry)[key]

/srv/paws/pwb/pywikibot/site/__init__.py in _get_general(self, key, expiry)
   1626                     .format("', '".join(props)), _logger)
   1627             props += ['general']
-> 1628             default_info = self._get_siteinfo(props, expiry)
   1629             for prop in props:
   1630                 self._cache[prop] = default_info[prop]

/srv/paws/pwb/pywikibot/site/__init__.py in _get_siteinfo(self, prop, expiry)
   1549         request._warning_handler = warn_handler
   1550         try:
-> 1551             data = request.submit()
   1552         except api.APIError as e:
   1553             if e.code == 'siunknown_siprop':

/srv/paws/pwb/pywikibot/data/api.py in submit(self)
   2249         cached_available = self._load_cache()
   2250         if not cached_available:
-> 2251             self._data = super(CachedRequest, self).submit()
   2252             self._write_cache(self._data)
   2253         else:

/srv/paws/pwb/pywikibot/data/api.py in submit(self)
   2099             raise unittest.SkipTest(msg)
   2100 
-> 2101         raise MaxlagTimeoutError(msg)
   2102 
   2103     def wait(self):

MaxlagTimeoutError: Maximum retries attempted due to maxlag without success.
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]: