def mexico_all_links(driver):

    #the mexican website provides advisory in spanish
    #we can display the link
    url = 'https://guiadelviajero.sre.gob.mx/'
    driver.get(url)
    soup = BeautifulSoup(driver.page_source, 'lxml')
    reg = regex.compile(r'\/103-ficha-de-paises\/')
    a = soup.findAll('a', attrs={'href': reg})

    links = {}
    iso_es = get_iso_es()
    LOGGER.info(
        f'Retrieving the URLs for all countries for the Mexico advisory')
    for att in a:
        try:
            name = att.text.strip()
            iso = iso_es[name]
            href = 'https://guiadelviajero.sre.gob.mx' + att['href']
            href = '<a href =\'' + href + '\'>Mexican Government Webesite</a>'
            links[iso] = {
                'advisory_text': href,
                'country_iso': iso,
                'name': name
            }
            Logger.success(f'The URL for {name} was successfully retrieved')
            LOGGER.success(
                'Successfully retrieved the URLs for all countries of the Mexican advisory'
            )
        except Exception as error_msg:
            LOGGER.warning(
                f"This country's iso was not found for {name} because of the following error: {error_msg}"
            )

    #get the visa for mexico like for other countries from wikipedia
    LOGGER.info(
        'Parsing visa requirements for all countries for the Mexican advisory')
    try:
        wiki_visa_ob_MX = wiki_visa_parser(wiki_visa_url_MX, driver)
        visas = wiki_visa_ob_MX.visa_parser_table()
        visas = replace_key_by_iso(visas)
        LOGGER.success(
            'Successfully parsed all countries for the Mexican advisory')
    except Exception as error_msg:
        LOGGER.error(
            f'Was not successful in parsing visa requirements for Mexican advisory because of the following error: {error_msg}'
        )

    data = {}
    for key in visas:
        try:
            data[key] = links[key]
            info = data[key]
            info['visa-info'] = visas[key].get('visa-info')
        except Exception as error_msg:
            LOGGER.warning(
                f'The following iso was not found: {key} because of the following error: {error_msg}'
            )

    return links
Esempio n. 2
0
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from helper_class.flags import Flags
from helper_class.logger import Logger

# Initialize flags, logger & database
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()


class Email():
    def __init__(self, subject, origin, destination, html):
        LOGGER.info(
            f'Creating an email instance:{subject}, {origin}, {destination}')
        msg = MIMEMultipart('alternative')
        msg['Subject'] = subject
        msg['From'] = origin
        msg['To'] = destination
        htmlMail = MIMEText(html, 'html')
        msg.attach(htmlMail)

        self.sender = origin
        self.recipient = destination
        self.mail_content = msg

    def sendEmail(self, password):
        LOGGER.info(
            f'Creating an email sending an email to: {self.recipient} and  from: {self.sender}'
        )
from helper_class.api_helper import ApiHelper
from helper_class.flags import Flags
from helper_class.logger import Logger
from helper_class.url_helper import UrlHelper
from lib.config import united_nations_api_link, iso_list, sqlite_db
from lib.database import Database
import pandas as pd

# Initialize flags, logger & database
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()
DB = Database(sqlite_db)

# Create table if it does not exist
DB.add_table('un', country='text', lifeExpectancy='text', infantMortality='text', nbOfPhysicians='text', homicideRate='text', sanitation='text', water='text')

# Parse currencies and add to database
for country in iso_list:
    try:
        LOGGER.info(f'Beginning currency parsing for country: {country}')

        url_converter = UrlHelper(united_nations_api_link)
        information_link = url_converter.get_united_nations_api(country)
        LOGGER.info(f'Retrieving information from following link: {information_link}')

        # Scrape United Nations data endpoint with Pandas
        data_tables = pd.read_html(information_link, index_col=0)

        # Pick specific dataframe that will always be the third index
        data_table_social = data_tables[3]
Esempio n. 4
0
# pip install sparqlwrapper before running this script

from SPARQLWrapper import SPARQLWrapper, JSON
from helper_class.api_helper import ApiHelper
from helper_class.flags import Flags
from helper_class.logger import Logger
from lib.database import Database
from lib.config import sqlite_db

# Initialize flags and logger
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()


def get_results(endpoint_url, query):
    sparql = SPARQLWrapper(endpoint_url)
    sparql.setQuery(query)
    sparql.setReturnFormat(JSON)
    return sparql.query().convert()


embassy_endpoint_url = "https://query.wikidata.org/sparql"
consulates_endpoint_url = "https://query.wikidata.org/sparql"

embassy_query = """# Embassies
SELECT DISTINCT #(SAMPLE(?label) as ?label)
	(SAMPLE(?country_label) as ?country)	(SAMPLE(?city_label) as ?city)	(SAMPLE(?address) as ?address)	(SAMPLE(?coordinates) as ?coordinates)
	(SAMPLE(?operator_label) as ?operator)	(SAMPLE(?type_label) as ?type)	(SAMPLE(?phone) as ?phone)		(SAMPLE(?email) as ?email)
	(SAMPLE(?website) as ?website)			(SAMPLE(?image) as ?image)		?wikidata
	#(SAMPLE(?facebook) as ?facebook) (SAMPLE(?twitter) as ?twitter) (SAMPLE(?youtube) as ?youtube) (SAMPLE(?inception) as ?inception)
Esempio n. 5
0
from helper_class.api_helper import ApiHelper
from helper_class.flags import Flags
from helper_class.logger import Logger
from lib.database import Database
from lib.config import currency_api_link, iso_list_2, sqlite_db
import pandas as pd

# Initialize flags and logger
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()

# Initialize DB
DB = Database(sqlite_db)

# Removing table if it was already there
DB.drop_table("emergency")

# Create table if it does not exist
DB.add_table('emergency', country='text', police='text', ambulance='text', fire='text')


data_tables = pd.read_html('http://chartsbin.com/view/1983')
data_table = data_tables[0]
latest_year = data_table.columns[1]

for country in iso_list_2:
  try:
    LOGGER.info(f'Getting emergency contacts data for {country}')
    if str(data_table.iloc[iso_list_2.index(country)][1]) == 'nan':
      police = ''
Esempio n. 6
0
from helper_class.api_helper import ApiHelper
from helper_class.flags import Flags
from helper_class.logger import Logger
from lib.config import covid19_url, iso_list, sqlite_db, iso_dict
from lib.database import Database
import pandas as pd
from requests import get
from math import isnan

# Initialize flags, logger & database
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()
DB = Database(sqlite_db)

# Create table if it does not exist
DB.add_table('covid19',
             country='text primary key',
             totalcases='integer',
             newcases='integer',
             totaldeaths='integer',
             newdeaths='integer',
             totalrecovered='integer',
             activecases='integer',
             seriouscritical='integer')

LOGGER.info(f'Retrieving information from following link: {covid19_url}')

# Pretend to be a browser to avoid HTTP 403
header = {
    "User-Agent":
Esempio n. 7
0
from helper_class.api_helper import ApiHelper
from helper_class.flags import Flags
from helper_class.logger import Logger
from helper_class.url_helper import UrlHelper
from lib.config import currency_api_link, iso_list, sqlite_db
from lib.database import Database

# Initialize flags, logger & database
FLAGS = Flags()
LEVEL = FLAGS.get_logger_level()
LOGGER = Logger(level=LEVEL) if LEVEL is not None else Logger()
DB = Database(sqlite_db)

# Removing table if it was already there
DB.drop_table("currencies")
# Create table if it does not exist
DB.add_table('currencies',
             country='text',
             name='text',
             code='text',
             symbol='text')

# Parse currencies and add to database
for country in iso_list:
    try:
        LOGGER.info(f'Beginning currency parsing for country: {country}')

        url_converter = UrlHelper(currency_api_link)
        information_link = url_converter.get_currency_api(country)
        LOGGER.info(
            f'Retrieving information from following link: {information_link}')