def start(message): """ Show a keyboard for user's """ user_id = message.from_user.id kb, txt = utils.keyboard(user_id) bot.send_message(message.chat.id, text = txt, reply_markup = kb)
def anketa_skip_comment(bot, update, user_data): user = get_or_create_user(db, update.effective_user, update.message) text = """ <b>Имя Фамилия:</b> {anketa_name} <b>Оценка:</b> {anketa_rating}""".format(**user_data) update.message.reply_text(text, reply_markup=keyboard(), parse_mode=ParseMode.HTML) return ConversationHandler.END
def start(bot, update): """Send a message when the command /start is issued.""" # print(update.message) user = get_or_create_user(db, update.effective_user, update.message) # print(user) text = 'Вызван /start, ВНИМАНИЕ БОТ НЕ ЗАСТРАХОВАН ОТ ВВОДА ВСЯКОЙ ХУЙНИ!' game_dict[update.message.chat.id] = ORIG_CITIES.copy( ) # Формируем словарь с городами в момент начала пользования чатом, # используя заранее заготовленный оригинал update.message.reply_text( text, reply_markup=keyboard()) # можно добавить reply_markup в каждую строку
def draw_board(bot, update): global board for i in range(3): update.message.reply_text(f"| {board[0 + i * 3]} | {board[1 + i * 3]} | {board[2 + i * 3]} |", reply_markup=keyboard())
def consolidate_results(n_workers, task_iterator, fname, result_locks, debug=False): # Create a helper for extracting files and making backups backups, all_results = [], [] def _append_results(fname, move_existing=True, all_results=all_results, backups=backups, lock=None): if os.path.exists(fname): if (lock is None) or (lock.acquire()): # Get results if they exist store = pd.HDFStore(fname) if '/results' in store.keys(): all_results.append(store['results']) store.close() # Move the file to a backup if required if move_existing: bak_fname = fname + '.bak' # print('\nDoes %s already exist? %r\n' % (bak_fname,os.path.exists(bak_fname))) # if os.path.exists(bak_fname): # print('\'%s\' already exists.' % bak_fname) # os.remove(bak_fname) try: os.rename(fname, bak_fname) except Exception as e: os.remove(bak_fname) os.rename(fname, bak_fname) backups.append(bak_fname) # Release the lock if one was specified if not (lock is None): lock.release() # Get existing completed results _append_results(fname, move_existing=False) # Get existing incomplete results i_fname = fname.replace('.h5', '.incomplete.h5') _append_results(i_fname, move_existing=False) # Get results from each worker's record file for wid, lock in enumerate(result_locks): w_fname = fname.replace('.h5', '.worker_%d.h5' % wid) _append_results(w_fname, lock=lock) # Exit if there are no results if len(all_results) == 0: return # Add the results to a single, new dataframe df = pd.concat(all_results, ignore_index=True) # Split the results into samples for completed and incompleted trials gb = df.groupby(['seed']).size() == task_iterator.n_tasks_per_trial() gb = gb.reset_index(level=['seed']) gb = gb.rename(columns={0: '_is_complete'}) df = df.merge(gb, on=['seed']) cresults = df.loc[df._is_complete, df.columns != '_is_complete'] iresults = df.loc[~df._is_complete, df.columns != '_is_complete'] if any(iresults.groupby(['seed', 'name', 'tid', 'pid']).size() > 1): print([len(v) for v in all_results]) with pd.HDFStore(i_fname.replace('incomplete', 'reference')) as store: print(i_fname.replace('incomplete', 'reference')) store.put('results', iresults) utils.keyboard() else: # Save completed trials if len(cresults) > 0: with pd.HDFStore(fname) as store: store.put('results', cresults) # Save incomplete trials if len(iresults) > 0: with pd.HDFStore(i_fname) as store: store.put('results', iresults) else: if os.path.exists(i_fname): os.remove(i_fname) # Delete backups for bak in backups: os.remove(bak) # Print a debug message if requred if debug: print('...............................................') # with pd.HDFStore(fname) as store: # if '/results' in store.keys(): # print('Completed results:') # print( store['results'].groupby('seed').size()) # else: # print('No complete results.') # print() with pd.HDFStore(i_fname) as store: if '/results' in store.keys(): print('Incomplete results:') print(store['results'].groupby('seed').size()) else: print('No incomplete results.') print('...............................................')