import csv

word_freq = []
all_words = []

with open("FOIR-site-counts.csv") as fin:
    reader = csv.reader(fin)
    for row in reader:
        word_freq.append((row[0], int(row[1])))

# print(word_freq)

for w in word_freq:
    c = 0
    while c <= w[1]:
        all_words.append(w[0])
        c += 1
print(all_words)
['Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Wikipedia', 'Lynda', 'Lynda', 'Lynda', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Khan Academy', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Stack Overflow', 'Wolfram Alpha', 'Wolfram Alpha', 'Wolfram Alpha', 'Wolfram Alpha', 'Medium', 'Medium', 'Medium', 'Medium', 'Medium', 'Facebook', 'Facebook', 'Facebook', 'Facebook', 'Facebook', 'Facebook', 'Twitter', 'Twitter', 'Twitter', 'Twitter', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Youtube', 'Automate the Boring Stuff with Python', 'Automate the Boring Stuff with Python', 'Dictionary.com', 'Dictionary.com', 'Dictionary.com', 'Dictionary.com', 'Dictionary.com', 'Dictionary.com', 'Dictionary.com', 'Dictionary.com', 'Dictionary.com', 'The New York Times', 'The New York Times', 'The New York Times', 'The New York Times', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'Yahoo Answers', 'GitHub', 'GitHub', 'GitHub', 'Reddit', 'Reddit', 'Reddit', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Coursera', 'Investopedia', 'Investopedia', 'Investopedia', 'Investopedia', 'Investopedia', 'Investopedia', 'UNESCO', 'UNESCO', 'SmartPhysics', 'SmartPhysics', 'MayoClinic', 'MayoClinic', 'MayoClinic', 'Pinterest', 'Pinterest', 'Pinterest', 'Pinterest', 'NHK', 'NHK', 'Quizlet', 'Quizlet', 'Quizlet', 'Quizlet', 'Merriam-Webster', 'Merriam-Webster', 'Merriam-Webster', 'Merriam-Webster', 'Special Collections - UW Libraries ', 'Special Collections - UW Libraries ', 'Stanford Encyclopedia of Philosophy', 'Stanford Encyclopedia of Philosophy', 'Piazza', 'Piazza', 'Bookshare', 'Bookshare', 'The British Encyclopedia ', 'The British Encyclopedia ', 'Alison', 'Alison', 'Business Insider', 'Business Insider', 'The National Archives at Seattle', 'The National Archives at Seattle', 'Udemy', 'Udemy', 'Udemy', 'Udemy', 'Udemy', 'StackExchange ', 'StackExchange ', 'StackExchange ', 'StackExchange ', 'Codeacademy', 'Codeacademy', 'Codeacademy', 'Codeacademy', 'SweetSearch', 'SweetSearch', 'MIT OpenCourseWare', 'MIT OpenCourseWare', 'MIT OpenCourseWare', 'ww.wexample.com', 'ww.wexample.com', 'arXiv', 'arXiv', 'arXiv', 'Stanford iOS course on itunes', 'Stanford iOS course on itunes', 'Canna Law Blog', 'Canna Law Blog', 'Google Scholar', 'Google Scholar', 'Google Scholar', 'Google Scholar', 'Google Scholar', 'Google Scholar', 'Canvas', 'Canvas', 'The Microsoft Website', 'The Microsoft Website', 'The Microsoft Website', 'Purdue University Online Writing Lab', 'Purdue University Online Writing Lab', 'SpanishDict', 'SpanishDict', 'SpanishDict', 'Reddit', 'Reddit', 'UXPin', 'UXPin', 'Encyclopædia Britannica', 'Encyclopædia Britannica', 'MarvinSketch', 'MarvinSketch', 'USATODAY', 'USATODAY', 'Centers for Disease Control and Prevention (CDC)', 'Centers for Disease Control and Prevention (CDC)', 'Centers for Disease Control and Prevention (CDC)', 'Dblp', 'Dblp', 'Thesaurus.com', 'Thesaurus.com', 'Thesaurus.com', 'Thesaurus.com', 'Thesaurus.com', 'Thesaurus.com', 'Chegg ', 'Chegg ', 'Chegg ', 'Chegg ', 'www.apple.com/itunesu', 'www.apple.com/itunesu', 'Pauls Online Math Notes', 'Pauls Online Math Notes', 'Master Organic Chemistry', 'Master Organic Chemistry', 'Wall Street Journal', 'Wall Street Journal', 'PubMed', 'PubMed', 'PubMed', 'PubMed', 'PubMed', 'Symbolab', 'Symbolab', 'Symbolab', 'Udacity', 'Udacity', 'Udacity', 'Dribble', 'Dribble', 'King County GIS Center', 'King County GIS Center', 'W3Schools', 'W3Schools', 'W3Schools', 'W3Schools', 'W3Schools', 'Microsoft Virtual Academy', 'Microsoft Virtual Academy', 'Microsoft Virtual Academy', 'TED talk', 'TED talk', 'Pluralsight', 'Pluralsight', 'IRC', 'IRC', 'WikiAnswers', 'WikiAnswers', 'Ncbi.gov', 'Ncbi.gov', 'Code School', 'Code School', 'Code School', 'ProQuest', 'ProQuest', 'DanceSafe', 'DanceSafe', 'JSTOR', 'JSTOR', 'JSTOR', 'HyperPhysics', 'HyperPhysics', 'HyperPhysics', 'CNN', 'CNN', 'CNN', 'Scribd', 'Scribd', 'Washington State Department of Licensing', 'Washington State Department of Licensing', 'EasyBib', 'EasyBib', 'Webscience', 'Webscience', 'Interactivepython', 'Interactivepython', 'www.theregister.co.uk', 'www.theregister.co.uk', 'www.guidetojapanese.org', 'www.guidetojapanese.org', 'NPR', 'NPR', 'www.tellingstories.org', 'www.tellingstories.org', 'Web MD', 'Web MD', 'www.instructables.com', 'www.instructables.com', '4chan.org', '4chan.org', 'EdX', 'EdX', 'owl.english.purdue.edu', 'owl.english.purdue.edu', 'Sparknotes', 'Sparknotes', 'www.statmethods.net', 'www.statmethods.net', 'chemwiki.ucdavis.edu', 'chemwiki.ucdavis.edu', 'www.nature.com', 'www.nature.com']
with open("FOIR-wordle-list.txt","w") as fout:
    for w in all_words:
        fout.write(w+"\n")