Set up all the environment first

#start of actual script
import pywikibot
from pywikibot import pagegenerators as pg

site = pywikibot.Site("wikidata", "wikidata")
wikidata_site = site #compatibility stuff
repo = site.data_repository()    

def getLabelFromObject(WDObject):
    item_dict = WDObject.get()
    item_label = False
    if 'labels' in item_dict:
        if 'en' in item_dict['labels']:
            item_label = item_dict['labels']['en']
    label = item_label
    if (label):
        return label
    else:
        return WDObject.getID()

def getLabelFromWDID(ID): #works for properties only. need separate function for items. why why why
    site = pywikibot.Site("wikidata", "wikidata")
    repo = site.data_repository()
    item = pywikibot.PropertyPage(repo, ID)
    return getLabelFromObject(item)

Main script here (run above first)

# qid = 'Q13406268' # This is the sandbox QID. The query replaces that in production
pidm1 = 'P1112' # main property to migrate from
pidq1 = 'P642' # qualifier property to migrate from
pidm2 = 'P528' # main property to migrate to
pidq2 = 'P972' # qualifier property to migrate to

QUERY = """SELECT DISTINCT ?item
WHERE
{
    ?item wdt:""" + pidm1 + """ ?wdprop ;
             p:""" + pidm1 + """ ?statement .
    ?statement pq:""" + pidq1 + """ ?ofwhat .
}

ORDER BY ASC(?item)
LIMIT 3"""

edit_summary = 'Deprecate ' + pidm1 + '/' + pidq1 + ', move to ' + pidm2 + '/' + pidq2

generator = pg.WikidataSPARQLPageGenerator(QUERY, site=wikidata_site)
generator = site.preloadpages(generator, pageprops=True)

for item in generator:
    # item = pywikibot.ItemPage(repo, qid)

    item_dict = item.get()
    item_label = getLabelFromObject(item)
    qid = item.getID()
    
    print('Now working on ', qid, ' ', item_label)
    
    if pidm1 in item_dict['claims']:
        for claim_object in item_dict['claims'][pidm1]:
            if pidq1 in claim_object.qualifiers: 
                # By this point, we have asserted that we have the right combination of main property and qualifier
                for qualifier_object in claim_object.qualifiers[pidq1]:
                    qualifier_target = qualifier_object.getTarget()
                    #if (len(allowed_qualifier_targets) > 0 and 
                    #    qualifier_target.getID() not in allowed_qualifier_targets):
                    #    continue

                    # qualifier_target_label = getLabelFromObject(qualifier_target) if type(qualifier_target) == pywikibot.page.ItemPage else '[none]'

                    qualifier_dict = qualifier_object.toJSON()
                    qualifier_dict['property'] =  pidq2 #set up the qualifier change
                    new_qualifier_object = qualifier_object.qualifierFromJSON(site = wikidata_site, data = qualifier_dict)

                    claim_object.addQualifier(new_qualifier_object, summary=edit_summary)
                    # print('Inner loop')


                newclaim = pywikibot.Claim(repo, pidm2)

                # This line is only needed because the old claim and the new claim have different datatypes
                newclaim.setTarget(str(claim_object.target.amount)) 

                # Otherwise could've used this line
                # stringclaim.target = claim.target 

                newclaim.qualifiers = claim_object.qualifiers
                newclaim.sources = claim_object.sources
                newclaim.rank = claim_object.rank
                item.addClaim(newclaim, summary=edit_summary)
                item.removeClaims(claim_object, summary=edit_summary)    
                # print('Outer loop')


    # Finally, pop all instances of the old qualifier property from the statement
    item = pywikibot.ItemPage(repo, qid)
    item_dict = item.get()
    if pidm2 in item_dict['claims']:
        for claim_object in item_dict['claims'][pidm2]:
            if pidq1 in claim_object.qualifiers: 
                # We've found a combination of new main prop + old qualifier prop. So actually pop it
                for qualifier_object in claim_object.qualifiers[pidq1]:
                    claim_object.removeQualifier(qualifier_object, summary=edit_summary)
                    # print('Alternative loop')
Retrieving 3 pages from wikidata:wikidata.
Now working on  Q1049265   Zekrom
Sleeping for 8.3 seconds, 2019-01-05 15:38:44
Sleeping for 9.4 seconds, 2019-01-05 15:38:53
Sleeping for 8.9 seconds, 2019-01-05 15:39:03
Sleeping for 9.4 seconds, 2019-01-05 15:39:13
Sleeping for 9.0 seconds, 2019-01-05 15:39:23
Sleeping for 9.1 seconds, 2019-01-05 15:39:33
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-2-237504192f7c> in <module>()
     66 
     67     # Finally, pop all instances of the old qualifier property from the statement
---> 68     item = pywikibot.ItemPage(repo, qid)
     69     item_dict = item.get()
     70     if pidm2 in item_dict['claims']:

NameError: name 'qid' is not defined
QUERY="""SELECT DISTINCT ?item ?itemLabel
WHERE
{
  ?item wdt:P1112 ?wdprop ;
             p:P1112 ?statement .
  ?statement pq:P642 ?ofwhat .
  SERVICE wikibase:label { bd:serviceParam wikibase:language "en,bg"  }    
}
ORDER BY ASC(?value) 
LIMIT 1000"""

generator = pg.WikidataSPARQLPageGenerator(QUERY, site=wikidata_site)
generator = site.preloadpages(generator, pageprops=True)
print(QUERY)
SELECT DISTINCT ?item ?itemLabel ?property ?propertyLabel ?value ?asObject ?asObjectLabel
WHERE
{
  wd:P69 wikibase:claim ?p .
  ?prop pq:P31 ?asObject .
  hint:Query hint:optimizer "None" .	
  ?item ?p ?prop . 
  ?property wikibase:claim ?p .  
  ?property wikibase:statementProperty ?ps .
  ?prop ?ps ?value .
  SERVICE wikibase:label { bd:serviceParam wikibase:language "en,bg"  }    
}
ORDER BY ASC(?value) 
LIMIT 1000
type(claim.qualifiers)
collections.OrderedDict
newclaim = pywikibot.Claim(repo, 'P528')

# This line is only needed because the old claim and the new claim have different datatypes
newclaim.setTarget(str(claim.target.amount)) 

# Otherwise could've used this line
# stringclaim.target = claim.target 

newclaim.qualifiers = claim.qualifiers
newclaim.sources = claim.sources
newclaim.rank = claim.rank
item.addClaim(stringclaim, summary='Move to def test edit')
item.removeClaims(claim, summary='Deprecate abc test edit')
WARNING: API warning (wbsetclaim) of unknown format: {'messages': [{'name': 'wikibase-conflict-patched', 'parameters': [], 'html': {'*': 'Your edit was patched into the latest version.'}, 'type': 'warning'}]}
Sleeping for 9.4 seconds, 2019-01-05 13:39:22
# Try to create a random statement
qid = 'Q13406268'
item = pywikibot.ItemPage(repo, qid)

claim = pywikibot.Claim(repo, u'P19')
target = pywikibot.ItemPage(repo, u"Q350")
claim.setTarget(target)
item.addClaim(claim, summary=u'Adding claim')

Copy of qualifier migration below

#input vars
main_property = 'P69'
qualifier_property = 'P31'
new_qualifier_property = 'P3831'
query_filename = 'qualifier_migrate.rq'
allowed_qualifier_targets = {'Q1080794', 'Q423208', 'Q428602', 'Q1971849'} #leave empty for everything to be processed; change to format {'Q1234', 'Q5678'} if you want to limit the targets to be operated on
edit_summary = qualifier_property + ' is no longer used as a qualifier, migrating ' + main_property + '/' + qualifier_property + ' to ' + '/' + new_qualifier_property
no_promptbox = 0 #set to 1 to operate on entire query automatically, 0 to prompt once per item
QUERY = """SELECT DISTINCT ?item ?itemLabel ?property ?propertyLabel ?value ?asObject ?asObjectLabel
WHERE
{
  wd:""" + main_property + """ wikibase:claim ?p .
  ?prop pq:""" + qualifier_property + """ ?asObject .
  hint:Query hint:optimizer "None" .	
  ?item ?p ?prop . 
  ?property wikibase:claim ?p .  
  ?property wikibase:statementProperty ?ps .
  ?prop ?ps ?value .
  SERVICE wikibase:label { bd:serviceParam wikibase:language "en,bg"  }    
}
ORDER BY ASC(?value) 
LIMIT 1000"""

#start of actual script
import pywikibot
from pywikibot import pagegenerators as pg

site = pywikibot.Site("wikidata", "wikidata")
wikidata_site = site #compatibility stuff
repo = site.data_repository()    

def getLabelFromObject(WDObject):
    item_dict = WDObject.get()
    item_label = False
    if 'labels' in item_dict:
        if 'en' in item_dict['labels']:
            item_label = item_dict['labels']['en']
    label = item_label
    if (label):
        return label
    else:
        return WDObject.getID()

def getLabelFromWDID(ID): #works for properties only. need separate function for items. why why why
    site = pywikibot.Site("wikidata", "wikidata")
    repo = site.data_repository()
    item = pywikibot.PropertyPage(repo, ID)
    return getLabelFromObject(item)

main_property_label = getLabelFromWDID(main_property)
qualifier_property_label = getLabelFromWDID(qualifier_property)
new_qualifier_property_label = getLabelFromWDID(new_qualifier_property)

#replaced by including SPARQL inside this program
#with open(query_filename, 'r') as query_file:
#    QUERY = query_file.read()

generator = pg.WikidataSPARQLPageGenerator(QUERY, site=wikidata_site)
generator = site.preloadpages(generator, pageprops=True)

break_flag = 0
edit_count = 0

for item in generator:
    #operate on the most recent one for testing
    item_dict = item.get()
    item_label = getLabelFromObject(item)
            
    print('Now working on ', item.getID(), ' ', item_label)#, 'Ready? ("yes" to go, "break" to stop)')

    for claim_object in item_dict['claims'][main_property]:
        claim_target = claim_object.getTarget()
        claim_target_label = getLabelFromObject(claim_target) if type(claim_target) ==pywikibot.page.ItemPage else '[none]'
        
        if qualifier_property in claim_object.qualifiers:
            for qualifier_object in claim_object.qualifiers[qualifier_property]:
                qualifier_target = qualifier_object.getTarget()
                if (len(allowed_qualifier_targets) > 0 and 
                    qualifier_target.getID() not in allowed_qualifier_targets):
                    continue
                    
                qualifier_target_label = getLabelFromObject(qualifier_target) if type(qualifier_target) == pywikibot.page.ItemPage else '[none]'
                    
                print(item_label, main_property_label, claim_target_label, 
                      qualifier_property_label, qualifier_target_label, 
                      'change to', new_qualifier_property_label, '!')

                acceptable_prompt_set = {'y', 'yes', 'n', 'no', 'break'}
                promptbox = ''
                if (no_promptbox == 1):
                    promptbox = 'yes'
                while (promptbox not in acceptable_prompt_set):
                    promptbox = input()  #becomes automatic if set to 'y'

                if promptbox == 'y' or promptbox == 'yes':
                    qualifier_dict = qualifier_object.toJSON()
                    qualifier_dict['property'] =  new_qualifier_property #set up the qualifier change
                    print('Changing to', new_qualifier_property_label)
                    new_qualifier_object = qualifier_object.qualifierFromJSON(site = wikidata_site, data = qualifier_dict)
                    claim_object.addQualifier(new_qualifier_object, summary=edit_summary)
                    edit_count = edit_count + 1

                elif promptbox == 'n' or promptbox == 'no':
                    print('Skipped')
                    
                elif promptbox == 'break':
                    break_flag = 1
                    break

        if break_flag == 1:
            break
    if break_flag == 1:
        break

print('All done, thanks for using! We edited', edit_count, 'qualifiers.')
Retrieving 50 pages from wikidata:wikidata.
Now working on  Q1464697   Richard Neal
Now working on  Q972029   Gary Ackerman
Now working on  Q40589   Keith Ellison
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
/srv/paws/lib/python3.6/site-packages/urllib3/connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
    376             try:  # Python 2.7, use buffering of HTTP responses
--> 377                 httplib_response = conn.getresponse(buffering=True)
    378             except TypeError:  # Python 2.6 and older, Python 3

TypeError: getresponse() got an unexpected keyword argument 'buffering'

During handling of the above exception, another exception occurred:

KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-17-89ad434841fd> in <module>()
     71     for claim_object in item_dict['claims'][main_property]:
     72         claim_target = claim_object.getTarget()
---> 73         claim_target_label = getLabelFromObject(claim_target) if type(claim_target) ==pywikibot.page.ItemPage else '[none]'
     74 
     75         if qualifier_property in claim_object.qualifiers:

<ipython-input-17-89ad434841fd> in getLabelFromObject(WDObject)
     31 
     32 def getLabelFromObject(WDObject):
---> 33     item_dict = WDObject.get()
     34     item_label = False
     35     if 'labels' in item_dict:

/srv/paws/pwb/pywikibot/page.py in get(self, force, get_redirect, *args, **kwargs)
   4457         @raise NotImplementedError: a value in args or kwargs
   4458         """
-> 4459         data = super(ItemPage, self).get(force, *args, **kwargs)
   4460 
   4461         if self.isRedirectPage() and not get_redirect:

/srv/paws/pwb/pywikibot/page.py in get(self, force, *args, **kwargs)
   3900 
   3901             try:
-> 3902                 data = self.repo.loadcontent(identification)
   3903             except APIError as err:
   3904                 if err.code == 'no-such-entity':

/srv/paws/pwb/pywikibot/site.py in loadcontent(self, identification, *props)
   7672                                     props=props if props else False)
   7673         req = self._simple_request(**params)
-> 7674         data = req.submit()
   7675         if 'success' not in data:
   7676             raise api.APIError(data['errors'])

/srv/paws/pwb/pywikibot/data/api.py in submit(self)
   2186                                                                    paramstring)
   2187             rawdata, use_get = self._http_request(use_get, uri, body, headers,
-> 2188                                                   paramstring)
   2189             if rawdata is None:
   2190                 continue

/srv/paws/pwb/pywikibot/data/api.py in _http_request(self, use_get, uri, body, headers, paramstring)
   1945                 site=self.site, uri=uri,
   1946                 method='GET' if use_get else 'POST',
-> 1947                 body=body, headers=headers)
   1948         except Server504Error:
   1949             pywikibot.log('Caught HTTP 504 error; retrying')

/srv/paws/pwb/pywikibot/tools/__init__.py in wrapper(*__args, **__kw)
   1735                              cls, depth)
   1736                     del __kw[old_arg]
-> 1737             return obj(*__args, **__kw)
   1738 
   1739         if not __debug__:

/srv/paws/pwb/pywikibot/comms/http.py in request(site, uri, method, params, body, headers, data, **kwargs)
    322 
    323     baseuri = site.base_url(uri)
--> 324     r = fetch(baseuri, method, params, body, headers, **kwargs)
    325     site.throttle.retry_after = int(r.response_headers.get('retry-after', 0))
    326     return r.text

/srv/paws/pwb/pywikibot/comms/http.py in fetch(uri, method, params, body, headers, default_error_handling, use_fake_user_agent, data, **kwargs)
    519             headers['user-agent'] = fake_user_agent()
    520 
--> 521     request = _enqueue(uri, method, params, body, headers, **kwargs)
    522     # if there's no data in the answer we're in trouble
    523     assert request._data is not None

/srv/paws/pwb/pywikibot/comms/http.py in _enqueue(uri, method, params, body, headers, data, **kwargs)
    475     request = threadedhttp.HttpRequest(
    476         uri, method, params, body, all_headers, callbacks, **kwargs)
--> 477     _http_process(session, request)
    478     return request
    479 

/srv/paws/pwb/pywikibot/comms/http.py in _http_process(session, http_request)
    389                                    headers=headers, auth=auth, timeout=timeout,
    390                                    verify=not ignore_validation,
--> 391                                    **http_request.kwargs)
    392     except Exception as e:
    393         http_request.data = e

/srv/paws/lib/python3.6/site-packages/requests/sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)
    510         }
    511         send_kwargs.update(settings)
--> 512         resp = self.send(prep, **send_kwargs)
    513 
    514         return resp

/srv/paws/lib/python3.6/site-packages/requests/sessions.py in send(self, request, **kwargs)
    620 
    621         # Send the request
--> 622         r = adapter.send(request, **kwargs)
    623 
    624         # Total elapsed time of the request (approximately)

/srv/paws/lib/python3.6/site-packages/requests/adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
    443                     decode_content=False,
    444                     retries=self.max_retries,
--> 445                     timeout=timeout
    446                 )
    447 

/srv/paws/lib/python3.6/site-packages/urllib3/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
    598                                                   timeout=timeout_obj,
    599                                                   body=body, headers=headers,
--> 600                                                   chunked=chunked)
    601 
    602             # If we're going to release the connection in ``finally:``, then

/srv/paws/lib/python3.6/site-packages/urllib3/connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
    378             except TypeError:  # Python 2.6 and older, Python 3
    379                 try:
--> 380                     httplib_response = conn.getresponse()
    381                 except Exception as e:
    382                     # Remove the TypeError from the exception chain in Python 3;

/usr/lib/python3.6/http/client.py in getresponse(self)
   1329         try:
   1330             try:
-> 1331                 response.begin()
   1332             except ConnectionError:
   1333                 self.close()

/usr/lib/python3.6/http/client.py in begin(self)
    295         # read until we get a non-100 response
    296         while True:
--> 297             version, status, reason = self._read_status()
    298             if status != CONTINUE:
    299                 break

/usr/lib/python3.6/http/client.py in _read_status(self)
    256 
    257     def _read_status(self):
--> 258         line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
    259         if len(line) > _MAXLINE:
    260             raise LineTooLong("status line")

/usr/lib/python3.6/socket.py in readinto(self, b)
    584         while True:
    585             try:
--> 586                 return self._sock.recv_into(b)
    587             except timeout:
    588                 self._timeout_occurred = True

/usr/lib/python3.6/ssl.py in recv_into(self, buffer, nbytes, flags)
   1007                   "non-zero flags not allowed in calls to recv_into() on %s" %
   1008                   self.__class__)
-> 1009             return self.read(nbytes, buffer)
   1010         else:
   1011             return socket.recv_into(self, buffer, nbytes, flags)

/usr/lib/python3.6/ssl.py in read(self, len, buffer)
    869             raise ValueError("Read on closed or unwrapped SSL socket.")
    870         try:
--> 871             return self._sslobj.read(len, buffer)
    872         except SSLError as x:
    873             if x.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs:

/usr/lib/python3.6/ssl.py in read(self, len, buffer)
    629         """
    630         if buffer is not None:
--> 631             v = self._sslobj.read(len, buffer)
    632         else:
    633             v = self._sslobj.read(len)

KeyboardInterrupt: