import sys
import json
import csv
import requests 
import pprint
from tqdm import tqdm_notebook
from urllib.request import urlopen
from multiprocessing.pool import ThreadPool
from time import time as timer
import time

import pandas as pd
import matplotlib.pyplot as plot
from IPython.display import Image

metapibaseurl  = 'https://collectionapi.metmuseum.org/public/collection/v1/objects/{}'

maxthreads = 15   # How many parallel threads to run

# Need to handle Azure error:
# {'statusCode': 429, 'message': 'Rate limit is exceeded. Try again in 1 seconds.'}
# Quotas: https://docs.microsoft.com/en-us/azure/cognitive-services/custom-vision-service/limits-and-quotas
maxtries = 20
waitinterval = 1

# Get Met ID to dimensions via their API    
def metid2dimensions(id):
    url = metapibaseurl.format(id)   # Create request string

    try:
        r = requests.get(url).json()
        dim = r['dimensions']
        return id, dim
    except Exception as e:
        return id, None
metid2dimensions(185860)
metid2dimensions(437396)
(437396, '30 7/8 x 27 1/8 in. (78.4 x 68.9 cm)')
metobjectsfile = 'met-ids-for-missing-dimensions.txt'
metoutputfile = 'met-dimensions.csv'

with open(metobjectsfile) as f:
    content = f.readlines()   # Can use [start:finish] or other ways to specify lines in file
    content = [x.strip() for x in content]   # remove whitespace

    contentdict =  { i : None for i in content } # create dict from list

    # Parallel processing using threads
    start = timer()
    
    results = ThreadPool(maxthreads).imap_unordered(metid2dimensions, content)

    outfile = open(metoutputfile,'w',buffering=1)  # Buffering=1 writes lines to files immediately
    maxitems = len(content)

    for id, prediction in tqdm_notebook(results,total=maxitems):
        outfile.write("{},{}\n".format(id, prediction))

#       if error is None:
#        print("%d, %r fetched in %ss" % (int(id), prediction, timer() - start))
#        else:
#           print("error fetching %d: %s" % (int(id), error))

    print("Elapsed Time (parallel threads): %s" % (timer() - start,))
    outfile.close()

#    metid2image ()



#    pp = pprint.PrettyPrinter(indent=4)
#    pp.pprint(item)
#        item = json.loads(response)
#        if item.primaryImageSmall:
#            print (item.primaryImageSmall)
Elapsed Time (parallel threads): 267.5493984222412