def batch_search():
    #set Batch Job Run Date
    batchRunDate = dt.now()

    print(f'Batch Job Run Date: {batchRunDate}')
    ReqDoc = dbf.retrieve_FlightRequest()

    for row in ReqDoc:
        print(row['Account_Reference'])
        flight_search(row)
def flight_search(flight_request):
    search_dt = dt.today()
    request_id = flight_request['Request_ID']
    info = flight_request['Request_Details']
    t.init()
    t.url('https://www.skyscanner.com.sg/')
    tu.wait_for_pageload('//input[@id="fsc-trip-type-selector-return"]')
    fill_search(info)
    ind = 0
    flight_main = getFlightExcel(info, ind)
    t.wait(10.0)
    t.close()
    flight_main.update({
        'Request_ID': request_id,
        'Search_Datetime': search_dt
    })
    dbf.newFlightDeals(flight_main)
    outFile = dbf.export_FlightDeals(request_id, search_dt)
    return outFile
def on_message(client, userdata, msg):
    #print message
    print('')
    print(msg.topic + " " + str(msg.payload))
    data = dbf.parse_mqtt_payload(str(msg.payload))
    print(data)
    #parse message data
    if(len(data) == 5):
        dbf.make_entry(cursor, data[0], data[1], data[2], data[3], data[4])
        print('data entered')
    else:
        dbf.make_entry(cursor, data[0], data[1], data[2], data[3])
        print('data entered')
    #enter data into database
    dbf.print_latest_entry(cursor, 'readings')
def Translation():
    DB.Abfrage_Leo(DB.DatenbankabfrageTranslation())
def Definition():
    DB.Abfrage_Larousse(DB.DatenbankabfrageDefinition())
Beispiel #6
0
def query_database_for_list_of_filtered_locations(categories, profiles):
    connexion = DB_connexion()
    FileLogger.log(
        logging.DEBUG,
        f"{categories} categories and {profiles} profiles extraction start")
    set_of_all_location = set()
    list_of_location = connexion.Query_SQL_fetchall(apidae.select_apidae)
    for location in list_of_location:
        set_of_all_location.add(location[1])
    nb_locations = len(set_of_all_location)
    nb_locations_extracted = len(list_of_location)
    FileLogger.log(
        logging.DEBUG,
        f"{nb_locations} different locations in set for {nb_locations_extracted} location extracted !!!"
    )
    set_of_location_id = set()
    list_of_location = connexion.Query_SQL_fetchall(
        apidae.select_apidae_with_categorie_list_edited_and_profil_list_edited,
        [profiles, categories])
    for location in list_of_location:
        set_of_location_id.add(location[0])
        try:
            set_of_all_location.remove(location[0])
        except:
            FileLogger.log(
                logging.ERROR,
                f"{location[0]} no more in set of all locations !!!")
    nb_locations_for_json = len(set_of_location_id)
    nb_locations = len(set_of_all_location)
    FileLogger.log(
        logging.DEBUG,
        f"1st step : {nb_locations_for_json} locations for json out of {nb_locations} different locations remaining in set for {nb_locations_extracted} location extracted !!!"
    )
    list_of_location = connexion.Query_SQL_fetchall(
        apidae.select_apidae_with_categorie_list_edited_and_profil_list,
        [profiles, categories])
    for location in list_of_location:
        set_of_location_id.add(location[0])
        try:
            set_of_all_location.remove(location[0])
        except:
            FileLogger.log(
                logging.ERROR,
                f"{location[0]} no more in set of all locations !!!")
    nb_locations_for_json = len(set_of_location_id)
    nb_locations = len(set_of_all_location)
    FileLogger.log(
        logging.DEBUG,
        f"2nd step : {nb_locations_for_json} locations for json out of {nb_locations} different locations remaining in set for {nb_locations_extracted} location extracted !!!"
    )
    list_of_location = connexion.Query_SQL_fetchall(
        apidae.select_apidae_with_categorie_list_and_profil_list_edited,
        [profiles, categories])
    for location in list_of_location:
        set_of_location_id.add(location[0])
        try:
            set_of_all_location.remove(location[0])
        except:
            FileLogger.log(
                logging.ERROR,
                f"{location[0]} no more in set of all locations !!!")
    nb_locations_for_json = len(set_of_location_id)
    nb_locations = len(set_of_all_location)
    FileLogger.log(
        logging.DEBUG,
        f"3rd step : {nb_locations_for_json} locations for json out of {nb_locations} different locations remaining in set for {nb_locations_extracted} location extracted !!!"
    )
    list_of_location = connexion.Query_SQL_fetchall(
        apidae.select_apidae_with_categorie_list_and_profil_list,
        [profiles, categories])
    for location in list_of_location:
        set_of_location_id.add(location[0])
        try:
            set_of_all_location.remove(location[0])
        except:
            FileLogger.log(
                logging.ERROR,
                f"{location[0]} no more in set of all locations !!!")
    nb_locations_for_json = len(set_of_location_id)
    nb_locations = len(set_of_all_location)
    FileLogger.log(
        logging.DEBUG,
        f"4th step : {nb_locations_for_json} locations for json out of {nb_locations} different locations remaining in set for {nb_locations_extracted} location extracted !!!"
    )
    locations_list = []
    nb_location = 0
    for id_location in set_of_location_id:
        data = connexion.Query_SQL_fetchone(
            apidae.select_apidae_1_id_apidae_with_data_edited, [id_location])
        dict_for_apidae, dict_for_geometry = functions.create_dict_for_lieu_validated(
            data)
        # liste properties, geometry
        locations_list.append(dict_for_apidae)
        # locations_list.append(dict_for_properties)  # properties only
        nb_location += 1
    FileLogger.log(
        logging.DEBUG,
        f"{nb_locations_for_json} in set of location and {nb_location} locations extracted !!!"
    )
    del connexion
    return nb_location, locations_list
cursor = conn.cursor()

#create table if it doesn't exist
cursor.execute('''
    CREATE TABLE IF NOT EXISTS readings (
        id INTEGER PRIMARY KEY AUTOINCREMENT,
        date TEXT DEFAULT CURRENT_TIMESTAMP,
        temperature REAL,
        light_level REAL,
        pressure REAL,
        humidity REAL
    );
''')

#print database definition
dbf.print_db_definition(cursor, 'readings')

#define new on_connect function
def on_connect(client, userdata, flags, rc):
    #print connection
    print("\nConnected with result code " + str(rc))
    #subscribe to topic
    client.subscribe("sensorData")

#define new on_message function
def on_message(client, userdata, msg):
    #print message
    print('')
    print(msg.topic + " " + str(msg.payload))
    data = dbf.parse_mqtt_payload(str(msg.payload))
    print(data)
import DB_Functions as DB
import Kindle_Functions

buff = DB.get_Vokabeln()

if buff ==0:
    Kindle_Functions.Kindle_leeren()
else:
    print("Fehler, Kindle nicht geleert")