def __init__(self, config, loggingHandler): all_settings_dir = "Settings" contact_list_path = "myContactList.json" noFilaBot_db = "NoFilaBot.db" self.config = config self.logging = loggingHandler #Loading values self.localParameters = config['local'] self.serverInfo = config['server'] self.mySupermarkets = config['supermarkets'] noFilaBot_db = createAbsolutePath( os.path.join(all_settings_dir, noFilaBot_db)) self.mySupermarketsList = self.getMySuperMarket() self.logging.info('Supermarkets list extracted') #Insert default values if not 'json_db' in self.localParameters: self.localParameters['json_db'] = False #Define Contact List self.db = DbConnector(noFilaBot_db, self.logging) self.contactListPath = createAbsolutePath( os.path.join(all_settings_dir, contact_list_path)) self.readContactList() self.smCache = {} #Connecting to Telegram self.TmUpdater = Updater(self.localParameters['telegram_token'], use_context=True) self.TmDispatcher = self.TmUpdater.dispatcher self.bot = self.TmUpdater.bot self.logging.info("Connected succesfully to Telegram")
class Monitor: def __init__(self): self.life_sign_counter = 0 def init_database(self): self.db_connector = DbConnector() self.db_connector.init_database() def check_person_for_status(self, heartbeats_missing): current_time = datetime.datetime.utcnow() last_hour = current_time - datetime.timedelta(hours=1) if self.db_connector.heartbeat_within_time(last_hour): # received: ok, store new status to db self.db_connector.store_status(LifeStatus.OK) self.life_sign_counter = 0 print "ALL OK!" else: # not received: start incrementing monitor counter if self.life_sign_counter < heartbeats_missing: self.life_sign_counter += 1 self.db_connector.store_status(LifeStatus.WARNING) print "MISSING HEARTBEAT" else: # Uh-oh! Person missing! self.db_connector.store_status(LifeStatus.MISSING) print "UH-OH! PERSON MISSING!"
def __init__(self): self.connection = DbConnector() self.db_connection = self.connection.db_connection self.cursor = self.connection.cursor self.labels = open("dataset/dataset/labeled_ids.txt", "r").read().split("\n") self.users = os.listdir("dataset/dataset/Data")
def __init__(self): """ Sets up database connection """ self.connection = DbConnector(HOST='tdt4225-19.idi.ntnu.no', DATABASE='db', USER='******', PASSWORD=argv[1]) self.db_connection = self.connection.db_connection self.cursor = self.connection.cursor
def __init__(self): self.connection = DbConnector() self.db_connection = self.connection.db_connection self.cursor = self.connection.cursor self.keysToSkip = [] self.tpsToAdd = {} self.acitivityTpsToAdd = [] self.activitiesToAdd = []
def __init__(self): self.connection = DbConnector() self.client = self.connection.client self.db = self.connection.db # Read all labels right away, to avoid doing it later self.labels = open("dataset/labeled_ids.txt", "r").read().split("\n")[:-1]
def __init__(self): self.connection = DbConnector( HOST='tdt4225-19.idi.ntnu.no', DATABASE='my_db', USER='******', PASSWORD=argv[1] ) self.client = self.connection.client self.db = self.connection.db
def __init__(self): """ Upload the dataset to the MySQL database """ self.connection = DbConnector() self.db_connection = self.connection.db_connection self.cursor = self.connection.cursor self.ACTIVITY_ID = 1 self.TRACKPOINT_ID = 1
def main(): config = env.read_env_variables() with open(config["LOG_CONFIG_FILE"], 'r') as logconfig: cfg = yaml.safe_load(logconfig) logging.config.dictConfig(cfg) logger = logging.getLogger('mainLogger') env.log_env_vars(config) conn = DbConnector(logger) db = conn.connect_mongo(url=config["MONGO_URL"], db_name=config["MONGODB_DATABASE"], user=config["MONGODB_USER"], password=config["MONGODB_PASSWORD"]) monitor = ExperimentsMonitor(config["MAIN_TOPIC"], config["KAFKA_SERVERS"], config["KAFKA_GROUP"], logger, db) monitor.listen()
def __init__(self): """ Disse må endres når jeg får tilgang til VMen vår, for øyeblikket kjører jeg det lokalt """ self.connection = DbConnector(HOST='tdt4225-19.idi.ntnu.no', DATABASE='db', USER='******', PASSWORD=argv[1]) self.db_connection = self.connection.db_connection self.cursor = self.connection.cursor self.fs_helper = FileTraversal()
def __init__(self): """ Passord er sendt på facebook """ self.connection = DbConnector(HOST='tdt4225-19.idi.ntnu.no', DATABASE='my_db', USER='******', PASSWORD=argv[1]) self.db = self.connection.db self.client = self.connection.client self.fs_helper = FileTraversal()
def Monitor(self): # Get available space wStat = os.statvfs('/') wTotal = wStat.f_frsize * wStat.f_blocks wAvail = wStat.f_frsize * wStat.f_bavail wUsedPercent = int(100 * (wTotal - wAvail) / wTotal) logging.info("Disk usage: {0}%".format(wUsedPercent)) with DbConnector() as wDb: if wDb is None: logging.error("Cannot connect to DB") else: wDb.UpdateSystem(wUsedPercent)
def __init__(self): self.connection = DbConnector() self.db_connection = self.connection.db_connection self.cursor = self.connection.cursor self.user_ids = {} self.labeled_ids = [] self.activity_data = {} self.labeled_data = {} self.transportation_modes = [ 'walk', 'taxi', 'car', 'airplane', 'bike', 'subway', 'bus', 'train', 'other' ]
def __init__(self): self.connection = DbConnector() self.db_connection = self.connection.db_connection self.cursor = self.connection.cursor # Global variables # Read id for each person self.subfolders = [ f.name for f in os.scandir("dataset/Data") if f.is_dir() ] self.subfolders.sort() self.ids = tuple(self.subfolders) # Set of all users that are labeled self.labeled = set( self.file_reader("dataset/labeled_ids.txt", False, None)) self.long_files = {}
def ProcessFrame(self, aTable, aFrame): """ Save the data in the the database """ wRet = False # TODO: test frame integrity # Frame to dictionary wDic = {} for i in aFrame.strip().split('\n'): wWord = i.split() if 3 == len(wWord): wDic[wWord[0]] = wWord[1] logging.debug("Frame to process: {0}".format(wDic)) # Save data in DB with DbConnector() as wDb: if wDb is None: logging.error("Cannot connect to DB") return wRet if aTable == wDb.RECORDS_M_TABLE: # Update the info logging.info("Update info") try: # save info self.__UpdateInfo(wDb, wDic['ADCO'], wDic['OPTARIF'], wDic['ISOUSC'], wDic['IMAX']) except Exception as e: logging.error("Update info error: {0}".format(e)) # save new record try: self.__SaveRecord(wDb, aTable, wDic['HCHC'] if 'HCHC' in wDic else None, wDic['HCHP'] if 'HCHP' in wDic else None) logging.info("Record saved to {0}".format(aTable)) wRet = True except Exception as e: logging.error("Save record error: {0}".format(e)) return wRet
from DbConnector import DbConnector # (very) simple mysql cli, for debugging purposes connection = DbConnector() cmd = input('mysql> ') while cmd != 'exit': connection.cursor.execute(cmd) output = connection.cursor.fetchall() if (output): print(output) cmd = input('mysql> ')
from DbConnector import DbConnector db = DbConnector() actual_id = 'anotherdbTest' header = 'headertest' seq = 'seqtest' iteration = -1 GO_STUF_D = {'go(biological process)':"test;test;test", 'go(cellular component)':"test2comp;tes2t", 'go(molecular function)':"IDUYEI*UYWGFWGHF"} pos_2c = -1 q = f"""INSERT INTO PROTEIN VALUES( NULL, '{actual_id}', '{header}', '{seq}', {iteration}, '{GO_STUF_D['go(biological process)']}', '{GO_STUF_D['go(cellular component)']}', '{GO_STUF_D['go(molecular function)']}', {pos_2c}); """ print(q) #db.commit_query(q) mess = db.exists_protein('testgfId') iteration = db.selecx_max_iteration() print("iteration = ",iteration, type(iteration)) print(mess)
def __init__(self): self.connection = DbConnector() self.db_connection = self.connection.db_connection self.cursor = self.connection.cursor
def init_database(self): self.db_connector = DbConnector() self.db_connector.init_database()
if header != '': out.write(header) out.write(seq) header = line seq = '' else: seq += line if __name__ == "__main__": """ Main prorgam flow """ print('running from main') # todo set verbose default to False again one day or another im gonna find ya im gonna getcha getcha getcha getcha # create an object to handle communications with mySQL database db = DbConnector() # create uniprot handle object # todo: find out if 'handle' is the right terminology here uniprot_handle = UniProt(verbose=True) # determine input file(s) # check validity # combine multiple fasta's into one for msa # set main loop condition # this could be NOT running out of results or # having more than 50% of results be results we already # found running = True
import sys import os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from DbConnector import DbConnector myDB = DbConnector() identifiery = 16 try: myDB.commit_query( f"INSERT INTO PROTEIN VALUES ({identifiery}, 5, 'hhgtsghsfg', 'tagtgtgatgatg')" ) except Exception as e: print(type(e)) identifiery += 1 myDB.commit_query( f"INSERT INTO PROTEIN VALUES ({identifiery}, 5, 'hhgtsghsfg', 'tagtgtgatgatg')" ) #myDB.insert("INSERT INTO PROTEIN (`id`, `MSA_id`, `header`, `sequence`)", [14, 5, 'hhgtsghsfg', 'tagtgtgatgatg']) myDB.update_row("PROTEIN", 13, { "MSA_id": 1337, "header": ">headeryboi1", "sequence": "atcctg" }) print(myDB.select_results())
# -*- coding: utf-8 -*- from project import app from flask import render_template, request from requests import get, post, put, delete import requests import xml.etree.ElementTree as ET import json from bson import json_util import sys from DbConnector import DbConnector from datetime import datetime reload(sys) sys.setdefaultencoding('utf-8') provider_db = DbConnector(db='project_db', collection='provider') news_db = DbConnector(db='project_db', collection='news') seqs_db = DbConnector(db='project_db', collection='seqs') @app.route('/') def start(): return render_template('index.html') @app.route('/save_providers') def save_provider(): page = requests.get("https://www.aljazeera.com/xml/rss/all.xml") data = [] tree = ET.fromstring(page.content) for item in tree.iter('item'):
def __init__(self): self.connection = DbConnector() self.client = self.connection.client self.db = self.connection.db self.ids = os.listdir("dataset/dataset/Data") self.labeled_ids = open(r"dataset\dataset\labeled_ids.txt", "r").read().splitlines()
import pymongo from DbConnector import DbConnector import sys reload(sys) sys.setdefaultencoding('utf-8') provider_db = DbConnector(db='project_db', collection='provider') news_db = DbConnector(db='project_db', collection='news') seqs_db = DbConnector(db='project_db', collection='seqs') data = [{ "_id": 1, "name": "Al Jazeera English", "link": "https://www.aljazeera.com/xml/rss/all.xml", "Data Added": "Sun, 23 Jun 2019 12:32:50 GMT", "Data Updated": "Sun, 23 Jun 2019 12:32:50 GMT", "Status": 1 }, { "_id": 2, "name": "Defence Blog", "link": "https://defence-blog.com/feed", "Data Added": "Sun, 23 Jun 2019 12:32:50 GMT", "Data Updated": "Sun, 23 Jun 2019 12:32:50 GMT", "Status": 1 }, { "_id": 3, "name": "The Guardian", "link": "https://www.theguardian.com/world/rss", "Data Added": "Sun, 23 Jun 2019 12:32:50 GMT", "Data Updated": "Sun, 23 Jun 2019 12:32:50 GMT", "Status": 1
def main(): connection = DbConnector() t0 = t() def create_table(table_name, definition): query = f'CREATE TABLE IF NOT EXISTS {table_name} ({definition})' connection.cursor.execute(query) connection.commit() # table definitions tables = { 'User': ''' id CHAR(3) PRIMARY KEY NOT NULL, has_labels BOOLEAN NOT NULL ''', 'Activity': ''' id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, user_id CHAR(3) NOT NULL, transportation_mode ENUM('walk', 'bike', 'bus', 'car', 'subway', 'train', 'airplane', 'boat', 'run', 'motorcycle', 'taxi') DEFAULT NULL, start_date_time DATETIME NOT NULL, end_date_time DATETIME NOT NULL, FOREIGN KEY (user_id) REFERENCES User(id) ON DELETE CASCADE ''', 'Trackpoint': ''' id INT PRIMARY KEY NOT NULL AUTO_INCREMENT, activity_id INT NOT NULL, lat DOUBLE NOT NULL, lon DOUBLE NOT NULL, altitude DOUBLE, date_days DOUBLE NOT NULL, date_time DATETIME NOT NULL, FOREIGN KEY (activity_id) REFERENCES Activity(id) ON DELETE CASCADE ''', } # create the tables for table_name, table_definition in tables.items(): create_table(table_name, table_definition) working_directory = getcwd() data_directory, user_ids, _ = next( walk(f'{working_directory}/dataset/Data')) # sorting the user IDs makes it (much) easier to deal with has_labels user_ids.sort() user_data = [[user_id, False] for user_id in user_ids] with open(f'{working_directory}/dataset/labeled_ids.txt') as f: for line in f: # make sure our logic works (would fail e.g. if some person IDs are missing) assert line.strip() == user_data[int(line)][0] user_data[int(line)][1] = True # print(*user_data, sep='\n') t1 = t() print(f'Inserting users. Time elapsed: {t1 - t0} seconds') connection.cursor.executemany( 'INSERT INTO User (id, has_labels) VALUES (%s, %s)', user_data) connection.commit() t2 = t() print( f'{len(user_data)} users inserted in {t2 - t1} seconds ({t2 - t1} total)' ) print('Processing activity and trackpoint data.') # read data about activities and trackpoints activity_data = [] trackpoint_data = [] activity_id = 1 # manually setting the activity IDs # this allows us to know the foreign keys without needing to insert activities one by one and checking the inserted IDs for [user_id, has_labels] in user_data: labels = {} if has_labels: with open(f'{data_directory}/{user_id}/labels.txt', 'r') as f: f.readline() # skip header for line in f: start_date, start_time, end_date, end_time, transport_mode = line.strip( ).split() # dates are slightly differently formatted in the labels-files than the .plt-files: start_date = start_date.replace('/', '-') end_date = end_date.replace('/', '-') labels[f'{start_date} {start_time}'] = ( f'{end_date} {end_time}', transport_mode) # each .plt file is a single activity _, _, activity_filenames = next( walk(f'{data_directory}/{user_id}/Trajectory')) for activity_filename in activity_filenames: with open( f'{data_directory}/{user_id}/Trajectory/{activity_filename}' ) as activity_file: # skip 6 first lines for _ in range(6): activity_file.readline() lines = activity_file.readlines( ) # Doing a full read of the file might be risky # but I really don't hope or believe there is any one file too big to open in memory if len( lines ) > 2500: # skip activities with more than 2500 trackpoints continue _, _, _, _, _, start_date, start_time = lines[0].strip().split( ',') _, _, _, _, _, end_date, end_time = lines[-1].strip().split( ',') transportation_mode = None if f'{start_date} {start_time}' in labels: # if data is labeled _end_date_and_time, mode = labels[ f'{start_date} {start_time}'] end_date_assert, end_time_assert = _end_date_and_time.split( " ") lines_assert = lines[-1].strip().split(",") if f'{end_date_assert}' == lines_assert[ -2] and f'{end_time_assert}' == lines_assert[-1]: # assert end_date_and_time == f'{end_date} {end_time}', f'{end_date_and_time} is not {end_date} {end_time}' # Just making sure transportation_mode = mode activity_data.append([ activity_id, user_id, transportation_mode, f'{start_date} {start_time}', f'{end_date} {end_time}' ]) # activity data is dealt with - time to get trackpoint data for line in lines: lat, lon, _, alt, date_days, date, time = line.strip( ).split(',') trackpoint_data.append([ activity_id, lat, lon, alt, date_days, f'{date} {time}' ]) activity_id += 1 t3 = t() print( f'Processing activity and trackpoints took {t3 - t2} seconds. Total time elapsed: {t3 - t0} seconds' ) # Insert activities: print('Inserting activities...') connection.cursor.executemany( 'INSERT INTO Activity (id, user_id, transportation_mode, start_date_time, end_date_time) VALUES (%s, %s, %s, %s, %s)', activity_data) connection.commit() t4 = t() print( f'{len(activity_data)} activities inserted in {t4 - t3} seconds. Total time elapsed: {t4 - t0}' ) # Insert trackpoints: print('Inserting trackpoints...') batch_size = 100000 for i in range(0, len(trackpoint_data), batch_size): print( f'{(100*i/len(trackpoint_data)):.2f}%, {i} out of {len(trackpoint_data)} trackpoints inserted' ) connection.cursor.executemany( 'INSERT INTO Trackpoint (activity_id, lat, lon, altitude, date_days, date_time) VALUES (%s, %s, %s, %s, %s, %s)', trackpoint_data[i:i + batch_size]) connection.commit() t5 = t() print( f'{len(trackpoint_data)} trackpoints inserted in {t5 - t4} seconds. Total time elapsed: {t5 - t0} seconds' ) connection.cursor.execute( 'UPDATE Trackpoint SET altitude=NULL WHERE altitude=-777') connection.commit() connection.close_connection()
def __init__(self): self.connection = DbConnector() self.client = self.connection.client self.db = self.connection.db
class NoFilaBot: #Load config and psw def __init__(self, config, loggingHandler): all_settings_dir = "Settings" contact_list_path = "myContactList.json" noFilaBot_db = "NoFilaBot.db" self.config = config self.logging = loggingHandler #Loading values self.localParameters = config['local'] self.serverInfo = config['server'] self.mySupermarkets = config['supermarkets'] noFilaBot_db = createAbsolutePath( os.path.join(all_settings_dir, noFilaBot_db)) self.mySupermarketsList = self.getMySuperMarket() self.logging.info('Supermarkets list extracted') #Insert default values if not 'json_db' in self.localParameters: self.localParameters['json_db'] = False #Define Contact List self.db = DbConnector(noFilaBot_db, self.logging) self.contactListPath = createAbsolutePath( os.path.join(all_settings_dir, contact_list_path)) self.readContactList() self.smCache = {} #Connecting to Telegram self.TmUpdater = Updater(self.localParameters['telegram_token'], use_context=True) self.TmDispatcher = self.TmUpdater.dispatcher self.bot = self.TmUpdater.bot self.logging.info("Connected succesfully to Telegram") #Read my contact list def readContactList(self): if self.localParameters['json_db']: return self.readJsonContactList() else: self.myContactList = self.db.getContacts() self.logging.info('Contact list loaded') return self.myContactList #Read my contact list as a json def readJsonContactList(self): try: with open(self.contactListPath) as json_file: self.myContactList = json.load(json_file) except ValueError: self.logging.warning( 'Cannot decode the stored contact list - Using an empty one') print("Invalid json [" + str(self.contactListPath) + "] - Use empty one") self.myContactList = [] except FileNotFoundError: self.logging.warning( 'Stored contact list not found - Using an empty one') print("Contact list not existent [" + str(self.contactListPath) + "] - Using an empty one") self.myContactList = [] self.logging.info('Contact list loaded') return self.myContactList #Updates the contact list def dumpContactList(self): self.storeJsonContactList() #Add a contact to the contact list def addToContactList(self, user): user = str(user) self.myContactList.append(user) if self.localParameters['json_db']: self.dumpContactList() self.logging.info('Appended to JSON contact list') else: print("User to add: " + str(user)) self.db.insertContact(user) self.logging.info('Appended to DB contact list') #Remove a contact from the contact list def removeFromContactList(self, user): user = str(user) self.logging.info("removeFromContactList - Element before: " + str(len(self.myContactList))) self.myContactList.remove(user) self.logging.info("removeFromContactList - Element after: " + str(len(self.myContactList))) if self.localParameters['json_db']: self.dumpContactList() else: self.db.removeContact(user) #Updates the json contact list def storeJsonContactList(self): with open(self.contactListPath, "w") as json_file: json.dump(self.myContactList, json_file) #Enable the deamon to answer to message def start(self): #Defining handlers self.createHandlers() self.logging.info("Bot handlers created") print("Bot handlers created") #Starting bot self.TmUpdater.start_polling() self.logging.info("Bot is now polling for new messages") #The complete function that iterate over all values def updateStatus(self, useCache=False, peopleToNotify=None, disableNotify=False): if peopleToNotify is None: #If no specific user are defined broadcast the message peopleToNotify = self.myContactList self.logging.info( 'updateStatus - Starting periodic update broadcasting') elif type(peopleToNotify) is not list: #If passed a single user transform to list peopleToNotify = [peopleToNotify] self.logging.info('updateStatus - Notifing selected chat') else: self.logging.info('updateStatus - Broadcasting to given chats') #Check if is requested to notify someone if not len(peopleToNotify): self.logging.info('updateStatus - No one to update - Skip refresh') return #Check if a cache refresh is needed if not useCache or len(peopleToNotify) < 1: self.smCache = self.requestUpdateSupermarkets() self.logging.info('updateStatus - Cache refreshed') #Parse data relevant = self.parseAllSupermarkets(self.smCache) self.logging.info('updateStatus - Found ' + str(len(relevant)) + ' relevant updates') #Check if the notify is required if disableNotify: self.logging.info('updateStatus - Update without notify refreshed') return relevant #Send the notify to subscribed users for relSup in relevant: for user in peopleToNotify: self.sendNotify(user, relSup) return relevant #Send the request to the server to update the list of open supermarket def requestUpdateSupermarkets(self): payload = { 'lat': self.localParameters['lat'], 'long': self.localParameters['long'], 'debug': self.serverInfo['debug'] } h = { 'User-Agent': self.serverInfo['user_agent'], 'Accept': 'application/json, text/javascript, */*; q=0.01', 'Accept-Language': 'it-IT,it;q=0.8,en-US;q=0.5,en;q=0.3', 'Content-Type': 'application/json', 'Origin': 'https://filaindiana.it', 'Referer': 'https://filaindiana.it/' } r = requests.post(self.serverInfo['server_handler'], data=json.dumps(payload), headers=h) return r.json() #Parse all the give supermarkets to discover relevant updates def parseAllSupermarkets(self, dct): relevant = [] for sm in dct: #Check the queue and wait time status if 'state' in sm and (sm['state']['queue_wait_minutes'] < self.localParameters['max_wait'] or sm['state']['queue_size_people'] < self.localParameters['max_people']): lastUpdate = self.parseTime(sm['state']['updated_at']) elapsed = datetime.now() - lastUpdate #Check how old is the last report if elapsed.total_seconds( ) < self.localParameters['max_age'] * 60: #Check if enough time has passed relevant.append({ 'id': sm['supermarket']['market_id'], 'name': sm['supermarket']['name'], 'address': sm['supermarket']['address'], 'minutes': sm['state']['queue_wait_minutes'], 'people': sm['state']['queue_size_people'] }) else: self.logging.info( str(sm['supermarket']['name']) + "(" + str(sm['supermarket']['address']) + ") - " + str(int(round(elapsed.total_seconds() / 60))) + " minutes from last update - Skip") else: if not 'state' in sm: self.logging.info( str(sm['supermarket']['name']) + "(" + str(sm['supermarket']['address']) + ") has never received an update") else: self.logging.info( str(sm['supermarket']['name']) + "(" + str(sm['supermarket']['address']) + ") - People: " + str(sm['state']['queue_size_people']) + " - Wait: " + str(sm['state']['queue_wait_minutes'])) self.logging.info("parseAllSupermarkets - Analyzed " + str(len(dct)) + " supermarkets") return relevant #Parse the time in the log def parseTime(self, timeString): timeFormat = '%Y-%m-%d %H:%M:%S' parsed = datetime.strptime(timeString, timeFormat) return parsed #Extract list of my supermarkes def getMySuperMarket(self): return dict((i['market_id'], i['user_friendly_name']) for i in self.mySupermarkets) #Notify all open chat with this bot def sendNotify(self, user, info): self.logging.info('Sending update to: ' + str(user)) if info['id'] in self.mySupermarketsList.keys(): nameToUse = self.mySupermarketsList[info['id']] else: nameToUse = info['name'] + "(" + str(info['address']) + ")" #Check if there is need to send notify for all supermarkets if not self.localParameters['filter_only_my_markets']: self.sendMessage( "*" + helpers.escape_markdown(nameToUse, 2) + "* \- Circa *" + str(info['people']) + " persone* in fila \(stimati " + str(info['minutes']) + " minuti di coda\)", user, telegram.ParseMode.MARKDOWN_V2) self.logging.info('Notify sent to ' + str(user)) else: self.logging.info('Ignoring this supermarket [' + info['id'] + ']') #Send the selected message def sendMessage(self, message, chat=None, parse_mode=None): mex = str(message)[:4095] if not chat: self.logging.error("Missing chat - Message not sent") return try: self.bot.sendMessage(chat, mex, parse_mode=parse_mode) except telegram.error.BadRequest: self.logging.error("Cannot send message to chat [" + str(chat) + "] - Skip") except telegram.error.Unauthorized: self.logging.info("Bot blocked by chat [" + str(chat) + "] - Remove user") self.removeFromContactList(chat) #Define the approriate handlers def createHandlers(self): #Commands self.TmDispatcher.add_handler( CommandHandler("start", self.startHandler)) self.TmDispatcher.add_handler(CommandHandler("stop", self.stopHandler)) self.TmDispatcher.add_handler( CommandHandler("report", self.reportHandler)) self.logging.info("createHandlers - Created handlers for command") #Text message self.TmDispatcher.add_handler( MessageHandler(Filters.text, self.textHandler)) self.logging.info("createHandlers - Created handlers for text") #Handle a received message def textHandler(self, update=None, context=None): self.logging.info("Received text message - Ignoring") update.message.reply_text( "Premi 👉 /start per iniziare a monitorare i supermercati 🕵️🛒🔔\n\nVisita [" + self.serverInfo['report_site_name'] + "](" + self.serverInfo['report_site_url'] + ") per inviare le segnalazioni 🙋", parse_mode=telegram.ParseMode.MARKDOWN_V2) #Start the subscription to the bot def startHandler(self, update=None, context=None): self.logging.info("startHandler - Bot started by: " + str(update.effective_chat)) if update.effective_chat.id in self.myContactList: update.message.reply_text( "Ciao " + str(update.effective_chat.first_name) + " 👋, controllo se ci sono nuove segnalazioni 🕵️🛒") else: self.addToContactList(update.effective_chat.id) update.message.reply_text( "Ciao " + str(update.effective_chat.first_name) + " 👋, da adesso sarai aggiornato 🔔 sulla fila dei supermercati nei dintorni. 🕵️🛒\nPremi 🔕 /stop per non ricevere più notifiche" ) relevant = self.updateStatus(useCache=True, peopleToNotify=update.effective_chat.id) #Notify if no useful supermarket has been found if not len(relevant): update.message.reply_text( "Tutti i supermercati nei tuoi dintorni sono chiusi oppure pieni.\nContinuo a controllare! 🕵️🛒\nPremi 🔕 /stop per non ricevere più notifiche" ) #Stop the subscription to the bot def stopHandler(self, update=None, context=None): chat_id = str(update.effective_chat.id) self.logging.info("stopHandler - Bot stopped by: " + str(update.effective_chat)) if chat_id in self.myContactList: self.removeFromContactList(chat_id) self.logging.info("stopHandler - " + str(chat_id) + " removed from contact list") else: self.logging.warning("stopHandler - " + str(chat_id) + " not in contact list: " + str(self.myContactList)) update.message.reply_text( "Va bene 👍, niente notifiche 🔕\nPremi 👉 /start per ricominciare ad essere aggiornato 🔔" ) #Used to report the supermarket queue status def reportHandler(self, update=None, context=None): self.logging.info("reportHandler - Report requested by: " + str(update.effective_chat)) update.message.reply_text("Visita [" + self.serverInfo['report_site_name'] + "](" + self.serverInfo['report_site_url'] + ") per inviare le segnalazioni 🙋", parse_mode=telegram.ParseMode.MARKDOWN_V2)