|
| 1 | +import os |
| 2 | +from typing import Dict, List |
| 3 | +import wikipedia as wiki |
| 4 | +import pandas as pd |
| 5 | + |
| 6 | +EXPORT_FILENAME = "city_wikipedia_summaries.csv" |
| 7 | +CITIES = [ |
| 8 | + "New York, New York", |
| 9 | + "Los Angeles, California", |
| 10 | + "Chicago, Illinois", |
| 11 | + "Houston, Texas", |
| 12 | + "Phoenix, Arizona", |
| 13 | + "Philadelphia, Pennsylvania", |
| 14 | + "San Antonio, Texas", |
| 15 | + "San Diego, California", |
| 16 | + "Dallas, Texas", |
| 17 | + "San Jose, California", |
| 18 | + "Austin, Texas", |
| 19 | + "Jacksonville, Florida", |
| 20 | + "Fort Worth, Texas", |
| 21 | + "Columbus, Ohio", |
| 22 | + "Charlotte, North Carolina", |
| 23 | + "San Francisco, California", |
| 24 | + "Indianapolis, Indiana", |
| 25 | + "Seattle, Washington", |
| 26 | + "Denver, Colorado", |
| 27 | + "Washington, D.C.", |
| 28 | + "Boston, Massachusetts", |
| 29 | + "El Paso, Texas", |
| 30 | + "Nashville, Tennessee", |
| 31 | + "Detroit, Michigan", |
| 32 | + "Oklahoma City, Oklahoma", |
| 33 | + "Portland, Oregon", |
| 34 | + "Las Vegas, Nevada", |
| 35 | + "Memphis, Tennessee", |
| 36 | + "Louisville, Kentucky", |
| 37 | + "Baltimore, Maryland", |
| 38 | + "Milwaukee, Wisconsin", |
| 39 | + "Albuquerque, New Mexico", |
| 40 | + "Tucson, Arizona", |
| 41 | + "Fresno, California", |
| 42 | + "Mesa, Arizona", |
| 43 | + "Sacramento, California", |
| 44 | + "Atlanta, Georgia", |
| 45 | + "Kansas City, Missouri", |
| 46 | + "Colorado Springs, Colorado", |
| 47 | + "Miami, Florida", |
| 48 | + "Raleigh, North Carolina", |
| 49 | + "Omaha, Nebraska", |
| 50 | + "Long Beach, California", |
| 51 | + "Virginia Beach, Virginia", |
| 52 | + "Oakland, California", |
| 53 | + "Minneapolis, Minnesota", |
| 54 | + "Tulsa, Oklahoma", |
| 55 | + "Arlington, Texas", |
| 56 | + "Tampa, Florida", |
| 57 | + "New Orleans, Louisiana" |
| 58 | +] |
| 59 | + |
| 60 | +def get_wikipedia_summary(cities: List[str]) -> Dict[str, str]: |
| 61 | + city_summaries = {} |
| 62 | + for city in cities: |
| 63 | + try: |
| 64 | + city_summaries[city] = wiki.summary(city) |
| 65 | + except: |
| 66 | + print(f"error retrieving {city}") |
| 67 | + |
| 68 | + return city_summaries |
| 69 | + |
| 70 | + |
| 71 | +def write_data(output_dict: Dict[str, str]) -> None: |
| 72 | + df = pd.DataFrame([output_dict]).T.reset_index() |
| 73 | + df.columns = ['State', 'Wiki Summary'] |
| 74 | + df.to_csv(EXPORT_FILENAME, index=False) |
| 75 | + |
| 76 | +def pull_state_data() -> None: |
| 77 | + if EXPORT_FILENAME not in os.listdir(): |
| 78 | + print("data not found pullling wikipedia state summaries...") |
| 79 | + city_summary_output = get_wikipedia_summary(CITIES) |
| 80 | + write_data(city_summary_output) |
| 81 | + else: |
| 82 | + print("data already present...skipping download") |
| 83 | + |
| 84 | +if __name__ == "__main__": |
| 85 | + pull_state_data() |
0 commit comments