def create_menu_buttons(chunk): products = moltin.get_products() chunks = list(utils.create_chunks(products, size=5)) last_chunk = len(chunks) - 1 keyboard = VkKeyboard(one_time=True) for product in chunks[chunk]: slug, name = product keyboard.add_button(name, payload=json.dumps(slug)) keyboard.add_line() if chunk == 0: keyboard.add_button("Следущие", payload=json.dumps("next")) elif chunk == last_chunk: keyboard.add_button("Предыдущие", payload=json.dumps("prev")) else: keyboard.add_button("Предыдущие", payload=json.dumps("prev")) keyboard.add_button("Следущие", payload=json.dumps("next")) keyboard.add_line() keyboard.add_button("Корзина", payload=json.dumps("basket"), color=VkKeyboardColor.PRIMARY) return keyboard.get_keyboard()
# Reading and parsing optional arguments from command line (e.g.,--optimization,lr=0.002) [section_args, field_args, value_args] = read_args_command_line(sys.argv, config) # Read, parse, and check the config file cfg_file_proto = config['cfg_proto']['cfg_proto'] [config, name_data, name_arch] = check_cfg(cfg_file, config, cfg_file_proto) print("- Reading config file......OK!") # Copy the global cfg file into the output folder cfg_file = out_folder + '/conf.cfg' with open(cfg_file, 'w') as configfile: config.write(configfile) # Splitting data into chunks (see out_folder/additional_files) create_chunks(config) print("- Chunk creation......OK!\n") # create res_file res_file_path = out_folder + '/res.res' res_file = open(res_file_path, "w") res_file.close() # Read cfg file options is_production = strtobool(config['exp']['production']) cfg_file_proto_chunk = config['cfg_proto']['cfg_proto_chunk'] run_nn_script = config['exp']['run_nn_script'] cmd = config['exp']['cmd'] N_ep = int(config['exp']['N_epochs_tr']) tr_data_lst = config['data_use']['train_with'].split(',')
path_to_save = "./data/lookups/{}_lookup.json".format(type_of_ids) df = pd.read_csv("./data/raw/bgg_GameItem.csv") df = df[["bgg_id", type_of_ids]] ids = make_list_and_explode(df, type_of_ids) ids_and_names = dict() try: with open(path_to_save) as f: ids_and_names.update(json.load(f)) except FileNotFoundError: pass # TODO remove ids we have already as to not scrape redundantly chunked_ids = utils.create_chunks(ids, 100) for id_chunk in chunked_ids: try: id_and_name = parse_boardgame_id(id_chunk, type_of_ids) ids_and_names.update(id_and_name) except UnboundLocalError as error: print(error) except ET.ParseError as error: print(error) time.sleep(10) with open(path_to_save, "w") as fp: json.dump(ids_and_names, fp)
df = pd.read_csv("./data/bgg_GameItem.csv") df = df[["bgg_id", type_of_ids]] ids = utils.extract_ids_from_column(df[type_of_ids]) ids_and_names = dict() try: with open(path_to_save) as f: ids_and_names.update(json.load(f)) except FileNotFoundError: pass # remove ids that we have already ids = list(set(ids) - set((ids_and_names.keys()))) chunked_list = list(utils.create_chunks(ids, 500)) for id_chunk in chunked_list: try: id_and_name = group_id_to_name(id_chunk, type_of_ids) ids_and_names.update(id_and_name) except UnboundLocalError as error: print(error) except ET.ParseError as error: print(error) time.sleep(10) with open(path_to_save, "w") as fp: json.dump(ids_and_names, fp)