class Collector(): DEFAULT_CONFIG_FILE = "collector.config" def __init__(self): #setup logger self.setupLogger() def setupLogger(self): logging.basicConfig(filename="collector.log") self.logger = logging.getLogger('Collector') fh = logging.FileHandler("collector.log") formatter = logging.Formatter('[%(asctime)s]:') fh.setFormatter(formatter) self.logger.addHandler(fh) self.logger.setLevel(logging.INFO) self.logger.info("Collector Started") def getConfig(self, file = DEFAULT_CONFIG_FILE): configParser = SafeConfigParser() if not configParser.read(file): print "Config file \"", file, "\" not found.\nExiting..." self.logger.error("Config file \"" + file + "\" not found.\nExiting...") exit(0) #check config file for missing DB options if "db_info" not in configParser.sections(): print "Config filuser_ide missing database info.\nExiting..." self.logger.error("Config file missing database info.\nExiting...") exit(0) users = [] try: self.logger.info("Parsing config file: ' " + file + " '") sections = configParser.sections(); sections.remove("db_info") #get db info db_info = {"db_host": configParser.get("db_info", "db_host"), "db_user": configParser.get("db_info", "db_user"), "db_pass": configParser.get("db_info", "db_pass"),} #test db con self.sql = SQL(db_info['db_host'], db_info['db_user'], db_info['db_pass']) #get all users in config file for section in sections: users.append( {"name" : section, "con_key" : configParser.get(section, "con_key"), "con_secret" : configParser.get(section, "con_secret"), "key" : configParser.get(section, "key"), "secret" : configParser.get(section, "secret"), "lists" : configParser.get(section, "lists"), "db" : configParser.get(section, "db")} ) #test db self.sql.testDB(configParser.get(section, "db")) except Exception, e: print e, "\nPlease fix the config file.\nExiting..." self.logger.error(str(e) + "\nPlease fix the config file.\nExiting...") exit(0) return users
def getConfig(self, file=DEFAULT_CONFIG_FILE): configParser = SafeConfigParser() if not configParser.read(file): print "Config file \"", file, "\" not found.\nExiting..." self.logger.error("Config file \"" + file + "\" not found.\nExiting...") exit(0) #check config file for missing DB options if "db_info" not in configParser.sections(): print "Config filuser_ide missing database info.\nExiting..." self.logger.error("Config file missing database info.\nExiting...") exit(0) users = [] try: self.logger.info("Parsing config file: ' " + file + " '") sections = configParser.sections() sections.remove("db_info") #get db info db_info = { "db_host": configParser.get("db_info", "db_host"), "db_user": configParser.get("db_info", "db_user"), "db_pass": configParser.get("db_info", "db_pass"), } #test db con self.sql = SQL(db_info['db_host'], db_info['db_user'], db_info['db_pass']) #get all users in config file for section in sections: users.append({ "name": section, "con_key": configParser.get(section, "con_key"), "con_secret": configParser.get(section, "con_secret"), "key": configParser.get(section, "key"), "secret": configParser.get(section, "secret"), "lists": configParser.get(section, "lists"), "db": configParser.get(section, "db") }) #test db self.sql.testDB(configParser.get(section, "db")) except Exception, e: print e, "\nPlease fix the config file.\nExiting..." self.logger.error( str(e) + "\nPlease fix the config file.\nExiting...") exit(0)
def config(): sql = SQL(host=Login_Settings.database['HOST'], port=Login_Settings.database['PORT'], username=Login_Settings.database['DB_USERNAME'], password=Login_Settings.database['DB_PASSWORD'], database=Login_Settings.database['DATABASE']) # Creates twitter_bot that connects to twitter account bot = Bot(consumer_key=Login_Settings.twitter['CONSUMER_KEY'], consumer_secret=Login_Settings.twitter['CONSUMER_SECRET'], access_key=Login_Settings.twitter['ACCESS_TOKEN'], access_secret=Login_Settings.twitter['ACCESS_SECRET']) sql.table_setup() # Inserts initial dataset into database (500 per twitter account) inspire_list = bot.get_user_tweets("inspire_us", "inspired") + bot.get_user_tweets( "InspowerMinds", "inspired") weather_list = bot.get_user_tweets("NWS", "weather") + bot.get_user_tweets( "weatherchannel", "weather") horoscope_list = bot.get_user_tweets("ZodiacFacts", "horoscope") romance_list = bot.get_user_tweets("romntic_quotes", "flirt") + bot.get_user_tweets("LikeLoveInLove", "flirt") + \ bot.get_user_tweets("RomanticcQuotes", "flirt") sql.insertion_tweet(inspire_list + horoscope_list + weather_list + romance_list) sql.close() Scheduler_Initializer.initialize()
async def member_handler(message: types.ChatMember): global logging try: db = SQL(db_path) text, keyboard = None, None user = db.get_user(message['chat']['id']) log_text, update, greeting = Auth.logs.chat_member( message, db.get_user(message['chat']['id'])) if greeting: text = 'Добро пожаловать, снова' if user is None: await asyncio.sleep(1) text, keyboard = first_start(message) if message['chat']['type'] == 'channel': text = None logging.append(log_text) db.update('users', message['chat']['id'], update) if update else None keyboard = keyboard if message['chat']['type'] != 'channel' else None await Auth.async_message(bot.send_message, id=message['chat']['id'], text=text, keyboard=keyboard) db.close() except IndexError and Exception: await Auth.dev.async_except(message)
async def repeat_channel_messages(message: types.Message): global start_message try: if str(message['chat']['id']) == digest_channel: battle = None search = re.search(r'Битва (\d{2}/\d{2}/\d{4} \d{2}:\d{2})', message['text']) if search: battle_stamp = objects.stamper(search.group(1), delta=3, pattern='%d/%m/%Y %H:%M') if (time_now() - battle_stamp) < 1800 and ( time_now() - dict(message).get('date')) < 1800: battle = Auth.time(battle_stamp, form='normal', sep='/', tag=italic, seconds=None) if battle: db = SQL(db_path) users = db.get_users() stamp = datetime.now(tz).timestamp() db.close() coroutines = [ sender(message, user=user, func=bot.forward_message, id=user['id']) for user in users ] await asyncio.gather(*coroutines) text = bold( chats_to_human( len(users), round(datetime.now(tz).timestamp() - stamp, 3))) start_message = Auth.message( old_message=start_message, text=f"\n\nСводки {battle}:\n{text}") else: if str(message['chat']['id']) not in channels: db = SQL(db_path) text, log_text = None, None user = db.get_user(message['chat']['id']) if user is None: text, log_text = first_start(message), ' [#Впервые]' await sender(message, user=user, text=text, log_text=log_text) db.close() except IndexError and Exception: await Auth.dev.async_except(message)
def google_files(): while True: try: db = SQL(db_path) files = drive_client.files(parents=main_folder) for file in files: image = db.get_image_by_id(file['id']) if image: if image['last_update'] < file['modifiedTime']: drive_client.download_file(file_id=file['id'], file_path=image['path']) db.update('images', file['id'], {'last_update': file['modifiedTime']}) sleep(5) except IndexError and Exception: Auth.dev.thread_except()
def run(self, code): self.python = '' code = code.splitlines(True) i = 0 while i < len(code): # Read each line and identify sql insertions line = code[i] index = line.find(self.sql_insertion_tag) if index > -1: # Handle insertion self.python += line[0:index] sql_insert = line[index + len(self.sql_insertion_tag):line. find(self.sql_insertion_end_tag)].strip( ) + ' ' commented_sql_insert = line[line.find(self.sql_insertion_tag) - 1:line.find(self.sql_insertion_tag )] == '#' index = line.find(self.sql_insertion_end_tag) if index == -1: i += 1 line = code[i] index = line.find(self.sql_insertion_end_tag) while not index > -1: # Read insertion until EndTag sql_insert += line.strip() + ' ' i += 1 if i == len(code): raise NameError('SQL: No end sql tag found.') line = code[i] index = line.find(self.sql_insertion_end_tag) # Modify database by SQL statement if not commented_sql_insert: SQL(self, self.database, sql_insert.strip()) self.python += line[index + len(self.sql_insertion_end_tag):] else: # Passively handle python text self.python += line i += 1 return self.python
def google_update(): global google_users_ids while True: try: sleep(2) db = SQL(db_path) users = db.get_updates() if len(users) > 0: client = gspread.service_account('google.json') worksheet = client.open('UNITED USERS').worksheet( os.environ['folder']) for user in users: del user['updates'] if str(user['id']) in google_users_ids: text = 'обновлен' row = google_users_ids.index(str(user['id'])) + 1 else: text = 'добавлен' row = len(google_users_ids) + 1 google_users_ids.append(str(user['id'])) google_row = f'A{row}:{ascii_uppercase[len(user)-1]}{row}' try: user_range = worksheet.range(google_row) except IndexError and Exception as error: if 'exceeds grid limits' in str(error): worksheet.add_rows(1000) user_range = worksheet.range(google_row) sleep(5) else: raise error for index, value, col_type in zip(range(len(user)), user.values(), users_columns): value = Auth.time( value, form='iso', sep='_') if '<DATE>' in col_type else value value = 'None' if value is None else value user_range[index].value = value worksheet.update_cells(user_range) db.update('users', user['id'], {'updates': 0}, True) Auth.dev.printer(f"Пользователь {text} {user['id']}") sleep(1) except IndexError and Exception: Auth.dev.thread_except()
def users_db_creation(): db = SQL(db_path) spreadsheet = gspread.service_account('google.json').open('UNITED USERS') users = spreadsheet.worksheet(os.environ['folder']).get( 'A1:Z50000', major_dimension='ROWS') raw_columns = db.create_table('users', users.pop(0), additional=True) users_ids, columns = db.upload('users', raw_columns, users) _zero_user = db.get_user(0) db.close() return _zero_user, ['id', *users_ids], columns
async def sender(message, user, text=None, keyboard=None, log_text=None, **a_kwargs): global logging dump = True if 'Впервые' in str(log_text) else None task = a_kwargs['func'] if a_kwargs.get('func') else bot.send_message kwargs = { 'log': log_text, 'text': text, 'user': user, 'message': message, 'keyboard': keyboard, **a_kwargs } response, log_text, update = await Auth.async_message(task, **kwargs) if log_text is not None: logging.append(log_text) if dump: head, _, _ = Auth.logs.header(Auth.get_me) await Auth.async_message(bot.send_message, id=Auth.logs.dump_chat_id, text=f'{head}{log_text}') if update: db = SQL(db_path) db.update('users', user['id'], update) db.close() return response
async def repeat_all_messages(message: types.Message): try: db = SQL(db_path) user = db.get_user(message['chat']['id']) text, log_text = None, True if user: if message['chat']['id'] == idMe: if message['text'].lower().startswith('/logs'): text = Auth.logs.text() elif message['text'].lower().startswith('/reboot'): text, log_text = Auth.logs.reboot(dispatcher) elif message['text'].lower().startswith('/reg'): text = '✅' else: log_text = ' [#Впервые]' text = first_start(message) await sender(message, user, text=text, log_text=log_text) db.close() except IndexError and Exception: await Auth.dev.async_except(message)
async def red_messages(message: types.Message): try: db = SQL(db_path) text, keyboard = None, None user = db.get_user(message['chat']['id']) if user and message['migrate_to_chat_id']: db.update('users', user['id'], { 'username': '******', 'reaction': '🅾️' }) await sender(message, user, text, keyboard, log_text=True) db.close() except IndexError and Exception: await Auth.dev.async_except(message)
def first_start(message): db = SQL(db_path) user = deepcopy(zero_user) _, name, username = Auth.logs.header(message['chat'].to_python()) user.update({ 'name': name, 'username': username, 'id': message['chat']['id'] }) db.create_user(user) db.close() return 'Добро пожаловать', keys.folders()
def __init__(self): QMainWindow.__init__(self) self.ventanita = Ui_MainWindow() self.ventanita.setupUi(self) self.connect(self.ventanita.radioManual, SIGNAL('clicked()'), self.enableButtons) self.connect(self.ventanita.radioAuto, SIGNAL('clicked()'), self.disableButtons) self.timer = QTimer() self.connect(self.ventanita.BtnSpd1, SIGNAL('clicked()'), self.speedOne) self.connect(self.ventanita.BtnSpd2, SIGNAL('clicked()'), self.speedTwo) self.connect(self.ventanita.BtnSpd3, SIGNAL('clicked()'), self.speedThree) self.ventanita.radioManual.setChecked(True) self.arduino = ArduinoSerial() self.database = SQL()
def getConfig(self, file = DEFAULT_CONFIG_FILE): configParser = SafeConfigParser() if not configParser.read(file): print "Config file \"", file, "\" not found.\nExiting..." self.logger.error("Config file \"" + file + "\" not found.\nExiting...") exit(0) #check config file for missing DB options if "db_info" not in configParser.sections(): print "Config file missing database info.\nExiting..." self.logger.error("Config file missing database info.\nExiting...") exit(0) users = [] try: self.logger.info("Parsing config file: ' " + file + " '") sections = configParser.sections(); sections.remove("db_info") #get db info db_info = {"db_host": configParser.get("db_info", "db_host"), "db_user": configParser.get("db_info", "db_user"), "db_pass": configParser.get("db_info", "db_pass"),} #test db con self.sql = SQL(db_info['db_host'], db_info['db_user'], db_info['db_pass']) #get all users in config file for section in sections: users.append( {"name" : section, "con_key" : configParser.get(section, "con_key"), "con_secret" : configParser.get(section, "con_secret"), "key" : configParser.get(section, "key"), "secret" : configParser.get(section, "secret"), "lists" : configParser.get(section, "follow"), "track" : configParser.get(section, "track"), "db" : configParser.get(section, "db")} ) #test db self.sql.testDB(configParser.get(section, "db")) except Exception, e: print e, "\nPlease fix the config file.\nExiting..." self.logger.error(str(e) + "\nPlease fix the config file.\nExiting...") exit(0)
def images_db_creation(): _names = [] _frames = [] raw_frames = [] folder_id = None db = SQL(db_path) client = Drive('google.json') db.create_table( 'images', ['id <TEXT>', 'name', 'frame', 'path', 'last_update <DATE>']) allowed = os.environ['allowed'].split('/') if os.environ.get( 'allowed') else [] for folder in client.files(only_folders=True): if folder['name'] == os.environ.get('folder'): folder_id = folder['id'] for file in client.files(parents=folder_id): name = re.sub(r'\.jpg', '', file['name']) if name in allowed or len(allowed) == 0: frame = int(re.sub('[^0-9]', '', name)) name = re.sub('[^A-Z]', '', name) path = f"images/{file['name']}" db.create_image({ 'name': name, 'path': path, 'frame': frame, 'id': file['id'], 'last_update': file['modifiedTime'] }) _names.append(name) if name not in _names else None raw_frames.append(frame) if frame not in raw_frames else None client.download_file(file['id'], path) for frame in sorted(raw_frames): text = '5 мин' if frame == 5 else '1 час' _frames.append((frame, text)) db.close() return sorted(_names), _frames, client, folder_id
async def editor(call, user, text, keyboard, log_text=None): global logging await bot.answer_callback_query(call['id']) kwargs = { 'log': log_text, 'call': call, 'text': text, 'user': user, 'keyboard': keyboard } response, log_text, update = await Auth.async_message( bot.edit_message_text, **kwargs) if log_text is not None: logging.append(log_text) if update: db = SQL(db_path) db.update('users', user['id'], update) db.close() return response
async def callbacks(call): try: db = SQL(db_path) keyboard = call['message']['reply_markup'] user = db.get_user(call['message']['chat']['id']) if user: text, log_text = None, None split = call['data'].split('_') if len(split) == 2: if split[0] in names: image = db.get_image(name=split[0], frame=split[1]) await editor(call, user, text=text, keyboard=keyboard, log_text=log_text) if image: caption = None if call['message']['chat']['id'] == idMe: last_update = Auth.logs.time(image['last_update'], tag=bold, form='iso') caption = f"Изображение: {bold(image['path'])}\nОбновлено: {last_update}" await sender(call['message'], user, id=call['message']['chat']['id'], func=bot.send_photo, path=image['path'], caption=caption) else: for frame, f_text in frames: if str(frame) in call['data']: keyboard = keys.currencies(frame) text = f'Выбран фрейм {f_text}, выберите валюту' await editor(call, user, text=text, keyboard=keyboard, log_text=log_text) db.close() except IndexError and Exception: await Auth.dev.async_except(call)
class DatabaseApi(): """ Contains the functions for all the database interactions necessary for the line to function. """ def __init__(self,dbConfig): self.sql = SQL(**dbConfig) def submitTestRunData(self,testRunSummary, testMeasurementList, stringDictionary, numericDictionary, fileDictionary, dependencyDict, testRunID=None): """ Submits the test run data to the database Args: * testRunSummary (list): The summary of the run (each entry is a string) * testMeasurementList (list): The measurement data (see :ref:'label_LimitManager`) * stringDictionary (dict): Key,value pairs of string data * numericDictionary (dict): Key,value pairs of numeric data * fileDictionary (dict): Key,value pairs of fileID and its content (zip and base64 encoded) * dependencyDict (dict): The subcomponent dependencies of the UUT Returns: None """ self.sql.conn() if testRunID==None: testRunID = str(uuid.uuid4()) s,v = self.prepsForTestRun(testRunSummary,testRunID) if s!='': self.sql.execute(s,v) s,v = self.prepsForTestMeasurement(testMeasurementList,testRunID) if s!='': self.sql.execute(s,v) s,v = self.prepsForDictionary(stringDictionary,testRunID,'StringDictionary') if s!='': self.sql.execute(s,v) s,v = self.prepsForDictionary(numericDictionary,testRunID,'DoubleDictionary') if s!='': self.sql.execute(s,v) s,v = self.prepsForDictionary(fileDictionary,testRunID,'FileDictionary') if s!='': self.sql.execute(s,v) s,v = self.prepsForDictionary(dependencyDict,testRunID,'Components') if s!='': self.sql.execute(s,v) self.sql.commit() self.sql.close() def prepsForTestRun(self,testRunSummary,testRunID): """ Prepares the sql statement and values vector for the submission of the test run summary to the database Args: * testRunSummary (list): The data * testRunID (str): The unique identifier to the test run Returns: A tupple (s,v) where * s (str): The sql statement * v (list): Vector with the values """ s = 'INSERT INTO TestRun' s+= ' (testRunID,SN,siteID,stationID,testSequenceID,startTimestamp,endTimestamp,lastTestEntered,isPass)' s+= ' VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s);' v = [testRunID]+testRunSummary #This converts YYYMMDD-hhmmss to YYY-MM-DD hh:mm:ss, if needed leaves it the same otherwise v[5] = pUtils.dateTimeToString(pUtils.stringToDateTime(v[5]),1) v[6] = pUtils.dateTimeToString(pUtils.stringToDateTime(v[6]),1) return s,v def prepsForTestMeasurement(self,measurementList,testRunID): """ Prepares the sql statement and values vector for the submission of the measurement list to the database Args: * measurementList (list): The data * testRunID (str): The unique identifier to the test run Returns: A tupple (s,v) where * s (str): The sql statement * v (list): Vector with the values """ if len(measurementList)==0: return '',[] s = 'INSERT INTO TestMeasurement' s+= ' (testRunID,startTimestamp,endTimestamp,testName,testMeasurementName,dataType,stringMin,stringMeasurement,stringMax,doubleMin,doubleMeasurement,doubleMax,isPass)' s+= ' VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)' s+= ',(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'*(len(measurementList)-1) s+= ';' v = [] for measurement in measurementList: data = measurement if data[4]=='numeric': formattedMeasurement = [testRunID]+data[0:8]+data[5:8]+data[8:] else: #Is dataType is string or something else just store it as string formattedMeasurement = [testRunID]+data[0:8]+[0,0,0]+data[8:] #This converts YYYMMDD-hhmmss to YYY-MM-DD hh:mm:ss, if needed leaves it the same otherwise formattedMeasurement[1] = pUtils.dateTimeToString(pUtils.stringToDateTime(formattedMeasurement[1]),1) formattedMeasurement[2] = pUtils.dateTimeToString(pUtils.stringToDateTime(formattedMeasurement[2]),1) v += formattedMeasurement return s,v def prepsForDictionary(self,d,testRunID,tableName): """ Prepares the sql statement and values vector for the submission of the specified dictionary data to the database Args: * d (list): The dictionary data * testRunID (str): The unique identifier to the test run * tableName (str): The database table to which this data should go Returns: A tupple (s,v) where * s (str): The sql statement * v (list): Vector with the values """ if len(d)==0: return '',[] s = 'INSERT INTO '+tableName s+= ' (testRunID,key,value)' s+= ' VALUES (%s,%s,%s)' s+= ',(%s,%s,%s)'*(len(d)-1) s+= ';' v = [] for key in d: v += [testRunID]+[key]+[d[key]] return s,v def getNextTestSequenceID(self,SN,processID): """ | A route control query. | Gets the ID of the test sequence that should be run on the UUT. Args: * SN (str): UUT serial number * processID (str): The ID for the process to which this UUT belongs on its current assembly stage. Returns: * None, if the unit is not in the system * A string containing the test sequence ID that should be run on the given UUT """ s = 'SELECT nextTestSequenceID FROM Transitions' s+= ' WHERE SN = %s' s+= ' AND processID = %s' s+= 'ORDER BY creationTimeStamp DESC Limit 1' s+= ';' v = [SN,processID] t=self.sql.quickSqlRead(s,v) if len(t)==1: return t[0][0] return None def addTransitions(self,entryList): """ | A route control query. | Updates the transitions table. Args: * entryList (list): A list of entries of the form *(SN,processID,nextTestSequenceID)* Returns: None """ if len(entryList)==0: return s = 'INSERT INTO Transitions' s+= ' (SN,processID,nextTestSequenceID)' s+= ' VALUES (%s,%s,%s)' s+= ',(%s,%s,%s)'*(len(entryList)-1) s+= ';' v = [item for entry in entryList for item in entry] self.sql.quickSqlWrite(s,v) def getSubcomponentData(self,SN,testSequenceID): """ Retrieves the subcomponents for the specified UUT """ s = 'SELECT key,value FROM Components,TestRun' s+= ' WHERE SN = %s' s+= ' AND testSequenceID = %s' s+= ' AND Components.testRunID = TestRun.testRunID' s+= ';' v = [SN,testSequenceID] t=self.sql.quickSqlRead(s,v) if len(t)==0: return {} d=[] for key,value in t: d.append({'processID':key,'SN':value}) return d def createDatabase(self,databaseName,owner): """ Creates a database with the given name and owner """ v = [] s = 'CREATE DATABASE "%s"' % databaseName s+= '\n WITH OWNER = postgres' #v.append(owner) s+= '\n ENCODING = %s' v.append('UTF8') s+= '\n TABLESPACE = pg_default' s+= '\n LC_COLLATE = %s' v.append('en_US.UTF-8') s+= '\n LC_CTYPE = %s' v.append('en_US.UTF-8') s+= '\n CONNECTION LIMIT = -1;' from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT self.sql.conn() self.sql.cnx.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) self.sql.execute(s,v) self.sql.close() def setupMTPdatabase(self): """ Assumes it is connected to an empty database and it sets it up as an MTP database. """ fileFullPath = os.path.join(os.environ['MTP_TESTSTATION'],'MTP','scripts','initDB.sql') s = pUtils.quickFileRead(fileFullPath) v = [] self.sql.quickSqlWrite(s,v) def getTestRunIDlist(self,startRange=None,endRange=None): """ Returns a list of Test Run ID's for the given range """ def sqlAppend(first,noFirst,isFirst): if isFirst: return '\n '+first return '\n '+noFirst isFirst = True v = [] s = 'SELECT testRunID from TestRun' if startRange!=None: s+= sqlAppend('WHERE ','AND ',isFirst)+'endTimestamp > %s' v.append(startRange) isFirst = False if endRange!=None: s+= sqlAppend('WHERE ','AND ',isFirst)+'endTimestamp < %s' v.append(endRange) isFirst = False s+= '\n ;' t = self.sql.quickSqlRead(s,v) t = [item[0] for item in t] return t
import argparse parser = argparse.ArgumentParser(description='parse input args') parser.add_argument('--data_folder', default='./data', help='specify data folder where .txt files are located') parser.add_argument('--instruction_path', default='./data/instructions.txt', help='instruction.txt file') args = parser.parse_args() data_folder = args.data_folder instruction_path = args.instruction_path IO = IOModule() sql = SQL() instruction_list = IO.load_instructions(instruction_path) parser = InstructionParser(data_folder, sql) for instruct in instruction_list: print('exec: ' + instruct) table = parser.read_instruct(instruct) if table is not None: sql.table_dict[table.name] = table print("done.") ''' R = IO.load_data('./data/sales1.txt', name='R') R1 = sql.select(R, [], [['time', '>', '50']], [['qty', '<', '30']], name='R1') R2 = sql.select(R1, ['saleid', 'qty', 'pricerange'], [], [], 'R2') R3 = sql.avg(R1, 'qty', 'R3')
# List of General_Post functions (update as you create functions) post_list = info # Tracker that is set to the last time bot posted, does not include interactions # Creates twitter_bot that connects to twitter account bot = Bot(consumer_key=Login_Settings.twitter['CONSUMER_KEY'], consumer_secret=Login_Settings.twitter['CONSUMER_SECRET'], access_key=Login_Settings.twitter['ACCESS_TOKEN'], access_secret=Login_Settings.twitter['ACCESS_SECRET']) # Connects to SQL database sql = SQL(host=Login_Settings.database['HOST'], port=Login_Settings.database['PORT'], username=Login_Settings.database['DB_USERNAME'], password=Login_Settings.database['DB_PASSWORD'], database=Login_Settings.database['DATABASE']) # Creates empty Markov chain for later use chain = Chain(chars=chars, tries=tries, ratio=ratio, tweet_list=["dummy info \n"]) while True: # Gets all the times that the bot is mentioned mentions = bot.get_mentions(latest_mention) # Only runs if there are any new mentions if len(mentions) > 0: for mention in mentions:
from SQL import SQL, SQLReturn from SQLAppendices import Table, Data SERVER = '99.999.999.99' DATABASE = 'NAME_OF_DATABASE' USERNAME = '******' PASSWORD = '******' sqlConn = SQL(SERVER, DATABASE, USERNAME, PASSWORD) # something more beautiful class ClassToGetData: def __init__(self, example1, example2): self.example1 = example1 self.example2 = example2 class FatherClass: def Callback(self, father, sqlReturn): value = ClassToGetData(sqlReturn.getDataByName('example1'), sqlReturn.getDataByName('example2')) father.dataList.append(value) def LoadExample(self): self.dataList = [] self.sqlConnection.RunQuery('select example1, example2 from t_table', self.Callback) def __init__(self): self.sqlConnection = SQL(SERVER, DATABASE, USERNAME, PASSWORD)
def __init__(self): self.sqlConnection = SQL(SERVER, DATABASE, USERNAME, PASSWORD) self.sqlConnection.father = self self.LoadExample()
def push(dirFullPath): sql = SQL() sql.conn() testRunID = str(uuid.uuid4()) fileFullPath = os.path.join(dirFullPath,'TestRun.csv') s,v = prepsForTestRun(fileFullPath,testRunID) if s!='': sql.execute(s,v) fileFullPath = os.path.join(dirFullPath,'TestMeasurement.csv') s,v = prepsForTestMeasurement(fileFullPath,testRunID) if s!='': sql.execute(s,v) fileFullPath = os.path.join(dirFullPath,'StringDictionary.json') s,v = prepsForDictionary(fileFullPath,testRunID) if s!='': sql.execute(s,v) fileFullPath = os.path.join(dirFullPath,'DoubleDictionary.json') s,v = prepsForDictionary(fileFullPath,testRunID) if s!='': sql.execute(s,v) fileFullPath = os.path.join(dirFullPath,'FileDictionary.json') s,v = prepsForDictionary(fileFullPath,testRunID) if s!='': sql.execute(s,v) sql.commit() sql.close()
async def repeat_all_messages(message: types.Message): try: db = SQL(db_path) user = db.get_user(message['chat']['id']) if user: keyboard = keys.folders() text, response, log_text = None, None, True if message['text'].startswith('/'): if message['text'].lower().startswith('/st'): text = 'Добро пожаловать, снова' if message['chat']['id'] == idMe: if message['text'].lower().startswith('/info'): text = '' images = db.get_images() now = Auth.logs.time(form='iso', tag=bold) for image in images: name = re.sub('images/', '', image['path']) last_update = Auth.logs.time(image['last_update'], tag=bold, form='iso') text += f'{name}: {last_update}\n' text += f"{code('-' * 30)}\nСейчас: {now}" elif message['text'].lower().startswith('/logs'): text = Auth.logs.text() elif message['text'].lower().startswith('/reboot'): text, log_text = Auth.logs.reboot(dispatcher) elif message['text'].lower().startswith('/new'): response = True await Auth.async_message(bot.send_photo, id=message['chat']['id'], path='images/alt.jpg') elif message['text'].lower().startswith('/reload'): text = 'Успешно отправлено:' query = "SELECT id FROM users WHERE reaction = '✅' AND NOT id = 0" users = db.request(query) for target_user in users: text += f"\n{target_user['id']}" await Auth.async_message(bot.send_message, id=target_user['id'], text=bold('Бот обновлен'), keyboard=keys.folders()) if message['text'].lower().startswith('/remove'): await bot.send_message( message['chat']['id'], bold('Окей'), reply_markup=types.ReplyKeyboardRemove(True), parse_mode='HTML') elif message['text'].lower().startswith('f'): text = bold('Пример сообщения') elif message['text'].lower().startswith('h'): text = 'Выбор таймфрейма' keyboard = keys.frames() await sender(message, user, text, keyboard, log_text=log_text) if text is None and response is None: task = Auth.async_bot.forward_message await Auth.logs.async_message(task, id=Auth.logs.dump_chat_id, message=message) else: text, keyboard = first_start(message) await sender(message, user, text, keyboard, log_text=' [#Впервые]') db.close() except IndexError and Exception: await Auth.dev.async_except(message)
def __init__(self): basics.__init__(self) SQL.__init__(self)
def convert_to_sql(self): sql_file = SQL(self.file_name) sql_file.generate_from_csv(separator=self.separator, header_line=self.header_line, data_lines=self.data_lines)
def __init__(self,dbConfig): self.sql = SQL(**dbConfig)
@app.after_request def after_request(response): response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" response.headers["Expires"] = 0 response.headers["Pragma"] = "no-cache" return response # Configure session to use filesystem (instead of signed cookies) app.config["SESSION_FILE_DIR"] = mkdtemp() app.config["SESSION_PERMANENT"] = False app.config["SESSION_TYPE"] = "filesystem" Session(app) # Configure SQL Library to use SQLite database db = SQL("sqlite:///NOTME.db") @app.route("/") def index(): # if session is not set, return index, else send to dashboard if not session: return render_template("index.html") return redirect("/dashboard") @app.route("/dashboard", methods=["GET", "POST"]) @login_required def data(): # check for infection infected(session["user_id"])
class Collector(): DEFAULT_CONFIG_FILE = "collector.config" def __init__(self): #setup logger self.setupLogger() def setupLogger(self): logging.basicConfig(filename="collector.log") self.logger = logging.getLogger('Collector') fh = logging.FileHandler("collector.log") formatter = logging.Formatter('[%(asctime)s]:') fh.setFormatter(formatter) self.logger.addHandler(fh) self.logger.setLevel(logging.INFO) self.logger.info("Collector Started") def getConfig(self, file=DEFAULT_CONFIG_FILE): configParser = SafeConfigParser() if not configParser.read(file): print "Config file \"", file, "\" not found.\nExiting..." self.logger.error("Config file \"" + file + "\" not found.\nExiting...") exit(0) #check config file for missing DB options if "db_info" not in configParser.sections(): print "Config filuser_ide missing database info.\nExiting..." self.logger.error("Config file missing database info.\nExiting...") exit(0) users = [] try: self.logger.info("Parsing config file: ' " + file + " '") sections = configParser.sections() sections.remove("db_info") #get db info db_info = { "db_host": configParser.get("db_info", "db_host"), "db_user": configParser.get("db_info", "db_user"), "db_pass": configParser.get("db_info", "db_pass"), } #test db con self.sql = SQL(db_info['db_host'], db_info['db_user'], db_info['db_pass']) #get all users in config file for section in sections: users.append({ "name": section, "con_key": configParser.get(section, "con_key"), "con_secret": configParser.get(section, "con_secret"), "key": configParser.get(section, "key"), "secret": configParser.get(section, "secret"), "lists": configParser.get(section, "lists"), "db": configParser.get(section, "db") }) #test db self.sql.testDB(configParser.get(section, "db")) except Exception, e: print e, "\nPlease fix the config file.\nExiting..." self.logger.error( str(e) + "\nPlease fix the config file.\nExiting...") exit(0) return users