Пример #1
0
def save_cities_timezones():
    LOGGER.info("Retreiving timezones information for all countries...")
    data = adding_lat_and_lng(get_cities_info())
    # geolocator = Nominatim(user_agent="travelingstrategy")
    #con  = sqlite3.connect('../countries.sqlite')
    #cur = con.cursor()
    # should not create the table every time
    # change in the future
    #cur.execute('DROP TABLE IF EXISTS timezones')
    #con.commit()
    #cur.execute('CREATE TABLE timezones (city VARCHAR, country_name VARCHAR, country_iso VARCHAR, timezone VARCHAR, lat REAL, lng REAL, utc_offset int)')
    #SScon.commit()

    db = Database("countries.sqlite")
    db.drop_table("timezones")
    db.add_table("timezones", city="VARCHAR", country_name="VARCHAR", country_iso="VARCHAR", timezone="VARCHAR", la="REAL", lng="REAL",utc_offset = "int")


    for city_info in data:
        city = city_info["city"]
        country_name = city_info["country_name"]
        country_iso = city_info["country_iso"]
        timezone = city_info["timezone"]
        lat = city_info["lat"]
        lng = city_info["lng"]
        utc_offset = city_info["utc_offset"]
        LOGGER.success(f"{country_name} was sucefuly save into the timezone table with the following information: {country_iso} and {timezone}")
        db.insert("timezones",city, country_name, country_iso, timezone, lat, lng, utc_offset)
        LOGGER.success{f'{country_name} successfully saved to the database.'}
    db.close_connection()
Пример #2
0
def save_to_canada():

    db = Database("countries.sqlite")
    db.drop_table("CA")
    db.add_table("CA",
                 country_iso="text",
                 name="text",
                 advisory_text="text",
                 visa_info="text")
    LOGGER.info('Saving CA table into the databse')
    #getting the data from all countries
    all_countries = get_all_countries()
    countries_data = advisory_canada(all_countries)

    #saving the data in db
    try:
        for country in countries_data:
            iso = countries_data[country].get('country-iso')
            name = countries_data[country].get('name')
            advisory = countries_data[country].get('advisory-text')
            visa = countries_data[country].get('visa-info')
            LOGGER.info(f'Saving {name} into the CA table')
            db.insert("CA", iso, name, advisory, visa)
            LOGGER.success(
                f'{name} was successfully saved into the CA table with the following table: {advisory}. {visa}'
            )
        LOGGER.success('CA table was successfully saved into the database')
    except Exception as error_msg:
        LOGGER.error(
            f'An error has occurred while saving the countries into the CA table because of the following error: {error_msg}'
        )
    db.close_connection()
Пример #3
0
def save_into_db(data):
    # create an an sqlite_advisory object
    db = Database("countries.sqlite")
    db.drop_table("IE")
    db.add_table("IE",
                 country_iso="text",
                 name="text",
                 advisory_text="text",
                 visa_info="text")
    try:
        for country in data:
            iso = data[country].get('country-iso')
            name = data[country].get('name')
            advisory = data[country].get('advisory-text').replace('"', '')
            LOGGER.info(f'Saving {name} into the IE table')
            visa = data[country].get('visa-info')
            db.insert("IE", iso, name, advisory, visa)
            LOGGER.success(
                f"{name} was saved into the IE table with the following information: {visa}. {advisory}"
            )
        LOGGER.info('IE table successfully saved to the database')
    except Exception as error_msg:
        LOGGER.error(
            f'An error has occured while saving {name} into the IE table because of the following error: {error_msg}'
        )
    db.close_connection()
Пример #4
0
def save_drug_law():

    marijuana = get_countries_canabaislaw()
    cocaine = get_countries_cocainelaw()
    methaphetamine = get_countries_methaphetaminelaw()
    DB = Database(sqlite_db)
    DB.drop_table('drugs')
    DB.add_table('drugs', country_iso='text', name="text", methaphetamine_possession='text', methaphetamine_sale='text', methaphetamine_transport='text', methaphetamine_cultivation='text', cocaine_possession='text', cocaine_sale='text', cocaine_transport='text', cocaine_cultivation='text', canabais_recreational='text', canabais_medical='text')
    drug_info = combine_dictionaries(marijuana,cocaine, methaphetamine)

    for iso in drug_info:
        country_iso = drug_info[iso].get("iso")
        country_name =  drug_info[iso].get("name")
        methaphetamine_possession =  drug_info[iso].get("methaphetamine_possession")
        methaphetamine_sale =  drug_info[iso].get("methaphetamine_sale")
        methaphetamine_transport =  drug_info[iso].get("methaphetamine_transport")
        methaphetamine_cultivation =  drug_info[iso].get("methaphetamine_cultivation")
        cocaine_possession =  drug_info[iso].get("cocaine_possession")
        cocaine_sale =  drug_info[iso].get("cocaine_sale")
        cocaine_transport =  drug_info[iso].get("cocaine_transport")
        cocaine_cultivation =  drug_info[iso].get("cocaine_cultivation")
        canabais_recreational =  drug_info[iso].get("canabais_recreational")
        canabais_medical =  drug_info[iso].get("canabais_medical")

        LOGGER.info(f"Parsing {country_name} to insert into drug table with the following information: {canabais_recreational}. {canabais_medical}.{cocaine_possession}.{methaphetamine_possession}")
        DB.insert('drugs', country_iso, country_name, methaphetamine_possession, methaphetamine_sale, methaphetamine_transport, methaphetamine_cultivation, cocaine_possession, cocaine_sale, cocaine_transport, cocaine_cultivation, canabais_recreational, canabais_medical)
Пример #5
0
def save_into_db(data):
    # create an an sqlite_advisory object
    db = Database("countries.sqlite")
    db.drop_table("US")
    db.add_table("US",
                 country_iso="text",
                 name="text",
                 advisory_text="text",
                 visa_info="text")
    LOGGER.info('Saving United State table into the database')
    try:
        for country in data:
            iso = data[country].get('country-iso')
            name = data[country].get('name')
            advisory = data[country].get('advisory-text')
            visa = data[country].get('visa-info')
            LOGGER.info(f"Saving {name} into the US table")
            db.insert("US", iso, name, advisory, visa)
            LOGGER.info(
                f"{name} was succesfully saved into the US table with the following information: {visa}. {advisory}"
            )
        LOGGER.success('US table has been successfully saved into the databse')
    except Exception as error_msg:
        LOGGER.error(
            f'Error has occured while saving the countries into the US table because of the following error: {error_msg}'
        )
    db.close_connection()


#save_to_united_states()
Пример #6
0
def save_to_MU():
    LOGGER.info(f'Saving and parsing Mauritius into the databse')
    driver = create_driver()
    LOGGER.info('Begin parsing for Mauritius advisory')
    try:
        wiki_visa_url = wiki_visa_url_MU
        wiki_visa_ob = wiki_visa_parser(wiki_visa_url, driver)
        visas = wiki_visa_ob.visa_parser_table()
        LOGGER.success(
            'Parsing for Mauritius advisory has been successfully completed')
    except Exception as error_msg:
        LOGGER.error(
            f'Error has occured while parsing for Mauritius advisory because of the following error: {error_msg}'
        )
    info = {}
    array_info = []

    # create an an sqlite_advisory object
    db = Database("countries.sqlite")
    db.drop_table("MU")
    db.add_table("MU",
                 country_iso="text",
                 name="text",
                 advisory_text="text",
                 visa_info="text")
    LOGGER.info('Saving Mauritius table into the database')
    try:
        for country in visas:
            iso = find_iso_of_country(country)
            if (iso != ""):
                name = country
                LOGGER.info(f'Saving {name}')
                visa = visas[country].get(
                    'visa')  #dictionary for visa info is country{visa:text}
                advisory = "Not available yet"
                info = {
                    "country_iso": iso,
                    "name": name,
                    "advisory": advisory,
                    "visa_info": visa
                }
                array_info.append(info)
                print(name, "     ", visa, "    ", advisory)
                db.insert("MU", iso, name, advisory, visa)
                LOGGER.success(
                    f'{name} was sucessfully saved to the database with the following information: {visa}. {advisory}.'
                )
            LOGGER.success(
                'Mauritius table successfully saved to the database')
    except Exception as error_msg:
        LOGGER.error(
            f'An error has occured while saving Mauritius table to the database because of the following error: {error_msg}'
        )
    db.close_connection()

    quit_driver(driver)

    with open('./advisory-mu.json', 'w') as outfile:
        json.dump(array_info, outfile)
Пример #7
0
def save_to_UK():

    LOGGER.info("Begin parsing and saving for United Kingdom table...")
    driver = create_driver()
    LOGGER.info('Parsing the visa requirements of all countries for United Kingdom advisory')
    try:
      wiki_visa_url ="https://en.wikipedia.org/wiki/Visa_requirements_for_British_citizens"
      wiki_visa_ob = wiki_visa_parser(wiki_visa_url,driver)
      visas = wiki_visa_ob.visa_parser_table()
      data = parse_all_countries_advisory()
      LOGGER.success('Successfully parsed the visa requirements of all countries for United Kingdom advisory')
    except Exception as error_msg:
      LOGGER.error(f'An error has occured while retrieving the visa reuirements of all countries for United Kingdom advisory because of the following error: {error_msg}')
    
    info = {}
    array_info = []
    # create an an sqlite_advisory object]
    db = Database("countries.sqlite")
    db.drop_table("GB")
    db.add_table("GB", country_iso="text", name="text", advisory_text="text", visa_info="text")
    LOGGER.info('Saving countries informations into the UK table')

    try:
      for country in visas:
          iso = find_iso_of_country(country)
          if(iso != ""):
              try:
                  name = country
                  advisory = data[iso].get('advisory') #dictionary for the travel advisory is iso{advisory:text}
                  visa_info = visas[country].get('visa') #dictionary for visa info is country{visa:text}
                  info = {
                      "country_iso" : iso,
                      "name": name,
                      "advisory": advisory,
                      "visa_info": visa_info
                  }
                  array_info.append(info)
                  LOGGER.success(f"Saving {name} into the UK table with the following information: {visa_info}. {advisory}")
                  db.insert("GB",iso,name,advisory,visa_info)
                  LOGGER.success(f'{name} sucesfully saved to the database.')
              except KeyError:
                  LOGGER.warning(f'This country doesn\'t have advisory info: {country}')
                  print("This country doesn't have advisory info: ",country)
                  LOGGER.info(f'Its ISO is {iso}')
                  print("Its ISO is: ",iso)
      LOGGER.success('All countries have been succesfully saved into the UK table')
   
    except Exception as error_msg:
      LOGGER.error(f'An error has occured while saving countries into the UK table because of the following: {error_msg}')
    db.close_connection()

    with open('./advisory-uk.json', 'w') as outfile:
        json.dump(array_info, outfile)
Пример #8
0
def save_to_SG():
    LOGGER.info(f'Saving Singapore into the databse')
    driver = create_driver()
    LOGGER.info(
        'Parsing visa requirments for all countries into the Singapore table')
    try:
        wiki_visa_url = wiki_visa_url_SG
        wiki_visa_ob = wiki_visa_parser(wiki_visa_url, driver)
        visas = wiki_visa_ob.visa_parser_table()
        LOGGER.success(
            'Visa requirements have been succesfully parsed for the Singapore table'
        )
    except Exception as error_msg:
        LOGGER.error(
            f'An error has occured whilse parsing for visa requirements because of the following error: {error_msg}'
        )
    advisories = parse_all_countries_advisories()
    array_info = []

    # create an an sqlite_advisory object
    db = Database("countries.sqlite")
    db.drop_table("SG")
    db.add_table("SG",
                 country_iso="text",
                 name="text",
                 advisory_text="text",
                 visa_info="text")

    array_info = save_info(db, visas, advisories, array_info)

    db.close_connection()
    LOGGER.success(f'Singapore was sucesfully saved to the database')
    quit_driver(driver)

    with open('./advisory-sg.json', 'w') as outfile:
        json.dump(array_info, outfile)
Пример #9
0
from helper_class.api_helper import ApiHelper
from helper_class.flags import Flags
from helper_class.logger import Logger
from helper_class.url_helper import UrlHelper
from lib.config import united_nations_api_link, iso_list, sqlite_db
from lib.database import Database
import pandas as pd

# Initialize flags, logger & database
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()
DB = Database(sqlite_db)

# Create table if it does not exist
DB.add_table('un', country='text', lifeExpectancy='text', infantMortality='text', nbOfPhysicians='text', homicideRate='text', sanitation='text', water='text')

# Parse currencies and add to database
for country in iso_list:
    try:
        LOGGER.info(f'Beginning currency parsing for country: {country}')

        url_converter = UrlHelper(united_nations_api_link)
        information_link = url_converter.get_united_nations_api(country)
        LOGGER.info(f'Retrieving information from following link: {information_link}')

        # Scrape United Nations data endpoint with Pandas
        data_tables = pd.read_html(information_link, index_col=0)

        # Pick specific dataframe that will always be the third index
        data_table_social = data_tables[3]
from lib.database import Database

# Initialize flags and logger
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()

# get current date (especially interested in the week_number)
CURRENT_DATE = datetime.date.today()
YEAR, WEEK_NUMBER, DAY_OF_WEEK = CURRENT_DATE.isocalendar()

# Initialize database
DB = Database(sqlite_db)

# Create table if it does not exists
DB.add_table('financials', country='text', gasoline='text',
             rent='text', groceries='text')

GASOLINE_API = f'http://knoema.com/api/1.0/data/GPPW?time=2019W{WEEK_NUMBER - 1}&country=1000310,1001270,1001770,1000030,1001630,1000000,1000010,1000040,1000060,1000100,1000090,1000080,1000110,1000210,1000150,1000140,1000170,1000260,1000250,1000130,1000280,1001780,1000200,1000240,1000120,1000190,1000220,1000160,1000180,1001820,1001500,1000760,1000350,1000300,1000360,1000370,1000380,1000400,1000320,1000410,1000420,1000570,1000430,1000440,1000450,1000020,1000460,1000490,1000470,1001420,1000500,1000520,1000510,1000530,1000540,1000550,1000600,1000560,1000580,1000620,1000590,1000610,1000630,1000660,1000650,1000390,1000640,1000670,1000700,1000730,1000740,1000690,1000720,1000710,1000680,1000750,1000770,1000790,1000780,1000810,1000830,1000290,1001410,1000820,1000330,1000800,1000840,1000860,1001310,1000890,1001430,1000880,1000870,1000900,1000910,1000850,1000960,1001040,1001030,1001060,1000930,1000920,1000970,1000270,1000990,1000980,1001000,1000940,1001020,1000950,1001080,1001090,1001150,1001130,1001110,1001160,1001100,1001120,1001170,1001190,1001210,1001220,1001180,1001230,1001250,1001240,1001200,1001260,1001280,1001350,1001290,1001300,1001330,1001460,1001490,1001370,1001390,1001380,1001360,1001320,1001340,1001470,1000480,1001480,1000340,1001450,1001550,1001600,1001580,1001590,1001570,1001520,1001540,1001620,1001610,1001660,1001650,1001670,1001680,1001690,1001840,1001700,1001010,1001400,1001710,1001720,1001800,1000050,1000070,1000230,1001140,1001790,1001530,1001510,1001640,1001070,1001050,1001440,1001560&indicator=1000030&measure=1000000&frequencies=W&client_id=5ebf6ad1-8e6c-475c-9344-ca72734558b9'
RENT_API = 'http://knoema.com/api/1.0/data/COOFLIIND2016?time=&uiMode=last&uiParams=1&country=1000020,1000030,1000040,1000050,1000060,1000070,1000080,1000090,1000120,1000130,1000140,1000150,1000160,1000180,1000190,1000200,1000220,1000230,1000260,1001550,1001530,1000270,1000280,1000290,1000300,1000320,1000330,1000340,1000350,1000360,1000370,1000380,1000390,1000410,1000420,1000430,1000450,1000460,1000470,1000480,1000490,1000500,1000510,1000520,1000530,1000540,1000570,1000580,1000600,1000610,1000620,1000630,1000640,1000650,1000660,1000680,1000690,1000700,1000710,1000720,1000730,1000750,1000760,1000770,1000780,1000790,1000800,1000810,1000820,1000830,1000850,1000860,1000870,1000880,1000890,1000900,1000910,1000920,1000930,1000940,1000950,1000960,1000970,1000980,1000990,1001000,1001010,1001040,1001050,1001060,1001070,1001080,1001090,1001100,1001110,1001120,1001130,1001150,1001160,1001170,1001180,1001190,1001200,1001210,1001220,1001230,1001240,1001260,1001270,1001280,1001290,1001300,1001310,1001320,1001330,1001340,1001350,1001360,1001370,1001380,1001400,1001410,1001420,1001430,1001440,1001450,1001460,1001470,1001490,1001500,1001510&indicator=1000010&frequencies=A'
GROCERIES_API = 'http://knoema.com/api/1.0/data/COOFLIIND2016?time=&uiMode=last&uiParams=1&country=1000820,1000020,1000030,1000040,1000050,1000060,1000070,1000080,1000090,1000120,1000130,1000140,1000150,1000160,1000180,1000190,1000200,1000220,1000230,1000260,1001550,1001530,1000270,1000280,1000290,1000300,1000320,1000330,1000340,1000350,1000360,1000370,1000380,1000390,1000410,1000420,1000430,1000450,1000460,1000470,1000480,1000490,1000500,1000510,1000520,1000530,1000540,1000570,1000580,1000600,1000610,1000620,1000630,1000640,1000650,1000660,1000680,1000690,1000700,1000710,1000720,1000730,1000750,1000760,1000770,1000780,1000790,1000800,1000810,1000830,1000850,1000860,1000870,1000880,1000890,1000900,1000910,1000920,1000930,1000940,1000950,1000960,1000970,1000980,1000990,1001000,1001010,1001040,1001050,1001060,1001070,1001080,1001090,1001100,1001110,1001120,1001130,1001150,1001160,1001170,1001180,1001190,1001200,1001210,1001220,1001230,1001240,1001260,1001270,1001280,1001290,1001300,1001310,1001320,1001330,1001340,1001350,1001360,1001370,1001380,1001400,1001410,1001420,1001430,1001440,1001450,1001460,1001470,1001490,1001500,1001510&indicator=1000030&frequencies=A'


def gasoline_data(api_link):
    LOGGER.info(f'Beginning gasoline price parsing')
    api_url_gasoline = ApiHelper(api_link)
    json_data_gasoline = api_url_gasoline.get_json()['data']
    gasoline_price_dict = {}
    for region in json_data_gasoline:
        try:
            data_to_insert = region['Value']
            LOGGER.success(
Пример #11
0
		?intermediateStatement (ps:P131/p:P131)* ?statement.
		?intermediateStatement pq:P582 ?endTime.
	}
} GROUP BY ?wikidata ORDER BY ASC(?country) ASC(?city) ASC(?operator) DESC(?type)"""

embassy_results = get_results(embassy_endpoint_url, embassy_query)
consulates_results = get_results(consulates_endpoint_url, consulates_query)

# Initialize database
DB = Database(sqlite_db)

# Create table if it does not exists
DB.add_table('embassies',
             country='text',
             city='text',
             operator='text',
             type='text',
             phone='text',
             email='text',
             website='text')

for result in embassy_results["results"]["bindings"]:
    try:
        if 'city' not in result:
            result['city'] = {'value': ''}
        if 'operator' not in result:
            result['operator'] = {'value': ''}
        if 'phone' not in result:
            result['phone'] = {'value': ''}
        if 'email' not in result:
            result['email'] = {'value': ''}
        if 'website' not in result:
Пример #12
0
from lib.config import currency_api_link, iso_list_2, sqlite_db
import pandas as pd

# Initialize flags and logger
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()

# Initialize DB
DB = Database(sqlite_db)

# Removing table if it was already there
DB.drop_table("emergency")

# Create table if it does not exist
DB.add_table('emergency', country='text', police='text', ambulance='text', fire='text')


data_tables = pd.read_html('http://chartsbin.com/view/1983')
data_table = data_tables[0]
latest_year = data_table.columns[1]

for country in iso_list_2:
  try:
    LOGGER.info(f'Getting emergency contacts data for {country}')
    if str(data_table.iloc[iso_list_2.index(country)][1]) == 'nan':
      police = ''
    else:
      police = data_table.iloc[iso_list_2.index(country)][1]
    if str(data_table.iloc[iso_list_2.index(country)][2]) == 'nan':
      ambulance = ''
Пример #13
0
import pandas as pd
from requests import get
from math import isnan

# Initialize flags, logger & database
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()
DB = Database(sqlite_db)

# Create table if it does not exist
DB.add_table('covid19',
             country='text primary key',
             totalcases='integer',
             newcases='integer',
             totaldeaths='integer',
             newdeaths='integer',
             totalrecovered='integer',
             activecases='integer',
             seriouscritical='integer')

LOGGER.info(f'Retrieving information from following link: {covid19_url}')

# Pretend to be a browser to avoid HTTP 403
header = {
    "User-Agent":
    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.75 Safari/537.36",
    "X-Requested-With": "XMLHttpRequest"
}
r = get(covid19_url, headers=header)
Пример #14
0
from helper_class.url_helper import UrlHelper
from lib.config import currency_api_link, iso_list, sqlite_db
from lib.database import Database

# Initialize flags, logger & database
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()
DB = Database(sqlite_db)

# Removing table if it was already there
DB.drop_table("currencies")
# Create table if it does not exist
DB.add_table('currencies',
             country='text',
             name='text',
             code='text',
             symbol='text')

# Parse currencies and add to database
for country in iso_list:
    try:
        LOGGER.info(f'Beginning currency parsing for country: {country}')

        url_converter = UrlHelper(currency_api_link)
        information_link = url_converter.get_currency_api(country)
        LOGGER.info(
            f'Retrieving information from following link: {information_link}')

        currency_api = ApiHelper(url_converter.get_currency_api(country))
        LOGGER.info(f'Parsing returned HTML code: {currency_api.get_code()}')