def main(): current_worker = 0 while True: db = DB('..\\aip.db') current_worker = run_boss(db, current_worker) print("Sleeping for 1 min") db.close_connection() time.sleep(60) print('Waking...')
def __init__(self, access_token, database_url): self.api = pytvmaze.TVMaze() self.db = DB(database_url) self.access_token = access_token self.updater = Updater(token=self.access_token, use_context=True) dispatcher = self.updater.dispatcher start_handler = CommandHandler("start", self.start) search_handler = CommandHandler("search", self.search) list_handler = CommandHandler("list", self.list_favorites) show_picked_handler = CallbackQueryHandler( self.show_picked, pattern="show_picked_*" ) add_show_handler = CallbackQueryHandler( self.add_show, pattern="add_show_*" ) remove_show_handler = CallbackQueryHandler( self.remove_show, pattern="remove_show_*" ) unknown_handler = MessageHandler(Filters.command, self.unknown) dispatcher.add_handler(start_handler) dispatcher.add_handler(search_handler) dispatcher.add_handler(add_show_handler) dispatcher.add_handler(remove_show_handler) dispatcher.add_handler(show_picked_handler) dispatcher.add_handler(list_handler) dispatcher.add_handler(unknown_handler)
def main(): """ Continuously run worker. Get single file assigned to this worker and send it to processing. """ while True: db = DB('..\\aip.db') file = fetch_from_file_queue(db) if file: file = file[0] dm_fileid = file[0] client_id = file[1] print('Inserting to file_process_log') insert_to_process_log(db, file) print('Deleting from file queue') delete_from_file_queue(db, dm_fileid) print('Starting file processing') start_time = datetime.now().strftime('%Y:%m:%d %H:%M:%S') print(start_time) update_condition = f'dm_fileid = {dm_fileid}' update_file_process_log(db, 'start_time', start_time, update_condition) client_config_path = f'../config/client{client_id}.ini' if os.path.exists(client_config_path): parser.read(client_config_path) try: client_process_script = parser.get('ACTIONS', 'process_file') client_process_script = import_module( f'process.{client_process_script}') client_process_script.process_file(db, dm_fileid) except Exception as e: file.append('') print(e) else: process_file(db, dm_fileid) end_time = (datetime.now() + timedelta(hours=2)).strftime('%Y:%m:%d :%H:%M:%S') update_file_process_log(db, 'end_time', end_time, update_condition) else: print(f'{NAME} could not fetch file from file_queue') print(f'{NAME} time to sleep for one minute') db.close_connection() time.sleep(60) print(f'{NAME} wake up time')
async def main(): db = DB() await db.start() # await add_test_tx() # await my_test_scan(db, version=1, start=0, full_scan=False) # await my_test_progress() # await my_test_fill_algo() await my_test_leaderboard()
def main(): db = DB('pipeline') name = 'worker01' statement = ''' create table pipeline( fileid int, clientid int, filename char(255), source char(50), worker char(20), starttime char(20), endtime char(20), ); ''' db.execute(statement) statement = ''' insert into pipeline values ( 123, 23, 'abc.txt', 'ftp', f{name}, '12:12:12 1:1:1', '13:12:12 1:1:1' ); ''' while True: db = DB('pipeline') files = get_files(db, name) print(files)
def __init__(self) -> None: self.cfg = Config() self.db = DB() self.tx_storage = TxStorage() # self.tx_storage = TxStorageMock() self.network_id = self.cfg.as_str('thorchain.network_id', NetworkIdents.TESTNET_MULTICHAIN) logging.info(f'Starting Chaosnetleaders backend for network {self.network_id!r}') self.scanner: Optional[TxScanner] = None self.thor: Optional[ThorConnector] = None self.value_filler: Optional[ValueFiller] = None self.api: Optional[API] = None
from helpers.db import DB db = DB('aip.db') try: statement = ''' insert into file_source select 1 as dm_fileid, 100 as client_id, 'file1.txt' as filename, 'ftp' as source union all select 2 as col1, 100 as col2, 'file2.txt' as col3, 'ftp' as col4 union all select 3 as col1, 100 as col2, 'file3.txt' as col3, 'ftp' as col4 union all select 4 as col1, 200 as col2, 'file4.txt' as col3, 'ftp' as col4 union all select 5 as col1, 200 as col2, 'file5.txt' as col3, 'ftp' as col4 union all select 6 as col1, 200 as col2, 'file6.txt' as col3, 'ftp' as col4 union all select 7 as col1, 200 as col2, 'file7.txt' as col4, 'ftp' as col5 union all select 8 as col1, 300 as col2, 'file8.txt' as col3, 'ftp' as col4 union all select 9 as col1, 300 as col2, 'file9.txt' as col3, 'ftp' as col4 union all select 10 as col1, 300 as col2, 'file10.txt' as col3, 'ftp' col4 union all select 11 as col1, 300 as col2, 'file11.txt' as col3, 'ftp' as col4 union all select 12 as col1, 300 as col2, 'file12.txt' as col3, 'ftp' as col4 union all select 13 as col1, 300 as col2, 'file13.txt' as col3, 'ftp' as col4 union all
statement = ''' insert into pipeline values ( 123, 23, 'abc.txt', 'ftp', f{name}, '12:12:12 1:1:1', '13:12:12 1:1:1' ); ''' while True: db = DB('pipeline') files = get_files(db, name) print(files) if __name__ == '__main__': db = DB('data_source') try: output = db.execute(''' select * from data_source; ''') print(output) except Exception as e: print(e) # main()