def submit_data(self): cl = self.cl.get() pl = self.pl.get() sql = "SELECT * FROM users where email='" + cl + "' and password='******'" flag = read_data(sql) if (flag == 1): global current_user print("login successful ") active_user = getUsers(sql) print("current user id is ") print(active_user) current_user = active_user today = datetime.date.today() end_date = today + datetime.timedelta(days=7) #sql1 = "SELECT * FROM p_cal WHERE user_id="+"'"+current_user+"'" #flag2 = read_data(sql) db.setId(current_user) messagebox.showinfo("Login Message", "Successfully Logged In ") d = data("current_user") d.setName("current_user") self.master.destroy() #os.system('python3 pcal.py') else: print("login failed") messagebox.showerror("Login Message", "Please Try Again")
def get(self): args = self.reqparser.parse_args() db_name = args['db_name'] measurement = args['measurement'] try: result = read_data(db_name, measurement) except Exception: print_exc() raise Exception return str(result)
def get_snapshots(): """ Get snapshots of EBS data volumes * Return a dictionary mapping snapshot ids to virtual machine names """ filename = DB_FILES["snapshots"] rows = db.read_data(filename) snapshots = {} if len(rows) > EC2_DEFAULT_INSTANCE_NUM: rows = rows[-EC2_DEFAULT_INSTANCE_NUM:] for row in rows: name = row[0] snapshot_id = row[1] snapshots[snapshot_id] = name return snapshots
def login(name,passw): name,password=name.get(),passw.get() query=f"SELECT * FROM employee WHERE code='{name}';" user=read_data(query) if user: current_user=User(user[0]) if crypt.check_password_hash(current_user.password,password): write_details(current_user.code,True) change_status() return (True,"") else : return (False,"Wrong Credentials.Please Try Again") else : return (False,"Wrong Credentials.Please Try Again")
def get_addresses(): """ Get public IPs * Return a dictionary mapping virtual machine names to previously associated public IPs """ filename = DB_FILES["addresses"] rows = db.read_data(filename) addresses = {} for row in rows: name = row[0] ip = row[1] addresses[name] = ip return addresses
def history(): data = read_data() return render_template("history.html", data=data)
xycoords='axes fraction', xytext=(1, 1.25), textcoords='axes fraction', arrowprops=dict(color=spinecolor, width=1, headwidth=7, headlength=10)) fig.text(0.01, 0.01, '{0} time points'.format(len(times)), color=spinecolor, alpha=0.25) xticklabels = ax0.get_xticklabels() ax0.set_xticklabels(xticklabels[:-1], rotation=20, ha="left") if isTest: fig.show() else: buf = BytesIO() fig.savefig(buf, format="png", facecolor=facecolor) return (base64.b64encode(buf.getvalue()).decode("ascii"), plotHeight * dpi) if __name__ == "__main__": import db dbdata = db.read_data() make_plots_mpl(dbdata) print("Done")
def main(): if not is_known(): if allowRemote: return redirect(url_for('login')) else: return abort(403) # forbidden if request.method == "POST": data = request.get_json(force=True) if 'viewpanel' in data: td = data['viewpanel'] viewstates[td][0] = not viewstates[td][0] elif 'pin' in data: try: name = data['pin'] state = data['state'] how = data['how'] skipset = 'skipset' in data except: # noqa return "" stateStr = 'on' if state else 'off' message = "{0} {1} {2}".format(how, name, stateStr) messages.appendleft(message) logging.info(message) now = datetime.datetime.now() messageTimes.appendleft(now.strftime("%Y/%m/%d, %H:%M:%S")) hPins = supply.thermostat['heatingPins'] + \ supply.thermostat['coolingPins'] if name in hPins: supply.thermostat['lastSwitch'] = now if name == 'feeding': if state: for nameF in supply.outPins[name]['toSwitchOffBefore']: acquire.set_pin(nameF, 'off') else: for nameF in supply.outPins[name]['toSwitchOnAfter']: acquire.set_pin(nameF, 'on') supply.feedStartTime = now if state else None if not skipset: acquire.set_pin(name, stateStr) elif 'thermostat' in data: supply.thermostat['active'] = data['state'] == 1 stateStr = 'on' if supply.thermostat['active'] else 'off' name = 'thermostat' how = 'Set' message = "{0} {1} {2}".format(how, name, stateStr) messages.appendleft(message) logging.info(message) now = datetime.datetime.now() messageTimes.appendleft(now.strftime("%Y/%m/%d, %H:%M:%S")) elif 'plotdelta' in data: td = data['plotdelta'] plots.currentDelta = td elif 'yrange' in data: plots.yrange = "auto" if plots.yrange != "auto" else "user" elif 'cameradelta' in data: td = data['cameradelta'] camera.currentDelta = td elif 'info' in data: message = data['info'] messages.appendleft(message) logging.info(message) now = datetime.datetime.now() messageTimes.appendleft(now.strftime("%Y/%m/%d, %H:%M:%S")) return "" # must return something (not None) acquire.get_pins() if supply.feedStartTime is None: feedCounter = 0 else: now = datetime.datetime.now() feedCounter = (supply.outPins['feeding']['delay'] - ( now - supply.feedStartTime)).seconds plotStr = '' plotHeight = 360 if wantPlot and viewstates['plots'][0]: if 'plotly' in wantPlot: dbdata = db.read_data() plots.make_plots_plotly(dbdata) elif 'mpl' in wantPlot: td = plots.timeDeltas[plots.currentDelta] dbdata = db.read_data(td) plotStr, plotHeight = plots.make_plots_mpl(dbdata, td) cronTasks = cron.get_cron_tasks() if supply.wantCronTable else {} templateData = dict( viewstates=viewstates, outPins=supply.outPins, thermostat=supply.thermostat, cronTasks=cronTasks, feedCounter=feedCounter, temperatures=supply.temperatures, temperatureUnit=supply.temperatureUnit, temperatureDisplayLimits=supply.temperatureDisplayLimits, temperatureColorSectors=supply.temperatureColorSectors, sensorsFromArduino=supply.sensorsFromArduino, sensorsFromRaspberry=supply.sensorsFromRaspberry, inPinsFromArduino=supply.inPinsFromArduino, inPinsFromRaspberry=supply.inPinsFromRaspberry, gaugePins=supply.gaugePins, wantPlot=wantPlot, plotStr=plotStr, plotHeight=plotHeight, plotDeltas=plots.timeDeltas, currentPlotDelta=plots.currentDelta, yrange=plots.yrange, cameraDeltas=camera.timeDeltas, currentCameraDelta=camera.currentDelta, messages=messages, messageTimes=messageTimes, version=__version__) return render_template('aquarium.html', **templateData)
def store_instances(conn, copy_snapshots=False, idle_only=False): """ Store instances * Detach data volumes from instances, create volume snapshots, create AMIs and terminate instances """ instances = [] if idle_only: # Get all idle instances instances = get_idle_instances(conn) if not instances: all_instances = get_instances(conn) list_instances_info(conn, all_instances) output.warning("There is no idle instance at this time.") return else: instances = get_instances(conn, True, "running") output.debug("The following idle instances will be stored.") list_instances_info(conn, instances) output.debug("Preparing to detach data volumes from instances...") volumes = get_data_volumes(conn, instances) volume_ids = [volume.id for volume in volumes] data = [] # Read previous snapshots data if any rows = db.read_data(DB_FILES["snapshots"]) data += rows source_snapshots = [] old_ami_snapshot_ids = [] for volume in volumes: # Detach the data volume and create snapshots instance_id = volume.attach_data.instance_id instance = get_instance(conn, instance_id) name = instance.tags.get("Name", "-") output.debug("Detaching data volume from instance %s..." % name) volume.detach() while volume.update() != "available": time.sleep(EC2_DEFAULT_WAIT_INTERVAL) msg = "Creating snapshot of this volume..." output.debug(msg) snapshot = volume.create_snapshot() snapshot_id = snapshot.id if copy_snapshots: # Copy the snapshot to Amazon S3 snapshot.update() if snapshot.status != "completed": time.sleep(EC2_DEFAULT_WAIT_INTERVAL) snapshot.update() msg = "Copying the snapshot to Amazon S3..." output.debug(msg) snapshot_id = conn.copy_snapshot(EC2_DEFAULT_REGION, snapshot.id) time.sleep(EC2_DEFAULT_WAIT_INTERVAL) # Get the copied snapshot and make sure they are comleted copied_snapshot = conn.get_all_snapshots([snapshot_id])[0] copied_snapshot.update() if copied_snapshot.status != "completed": time.sleep(EC2_DEFAULT_WAIT_INTERVAL) copied_snapshot.update() source_snapshots.append(snapshot) data.append([name, snapshot_id]) db.write_data(DB_FILES["snapshots"], data) for instance in instances: name = instance.tags.get("Name", "-") image = conn.get_image(instance.image_id) if image: if image.id != EC2_DEFAULT_IMAGE_ID: bdm = image.block_device_mapping snapshot_id = bdm["/dev/sda1"].snapshot_id old_ami_snapshot_ids.append(snapshot_id) msg = "Deleting old AMI of instance %s..." % name output.debug(msg) image.deregister() msg = "Creating AMI from instance %s..." % (name) output.debug(msg) image_id = instance.create_image(name) time.sleep(EC2_DEFAULT_WAIT_INTERVAL) image = conn.get_image(image_id) while image.update() != "available": time.sleep(EC2_DEFAULT_WAIT_INTERVAL) public_ip = instance.ip_address msg = "Disassociating public IP %s from instance %s..." % (name, instance.id) conn.disassociate_address(public_ip) # After AMI is created, terminate the instance msg = "Terminating instance %s (%s)..." % (name, instance.id) output.debug(msg) instance.terminate() output.debug("Waiting for all idle instances terminated before deleting " "their data volumes...") for instance in instances: while instance.update() == "shutting-down": time.sleep(EC2_DEFAULT_WAIT_INTERVAL) assert instance.update() == "terminated" delete_all_data_volumes(conn, volume_ids=volume_ids) if copy_snapshots: output.debug("Deleting source snapshots... ") for snapshot in source_snapshots: snapshot.delete() output.debug("Deleting snapshots of old AMIs...") for sid in old_ami_snapshot_ids: conn.delete_snapshot(sid) output.success("All idle instances are stored and backed up.")
def unpack(args): update_progress("STARTED_AT_" + time.strftime("%H:%M:%S")) if args.verbose: print_log("Source dir: " + args.src_dir) print_log("Destination dir: " + args.dst_dir) print_log("\nCreate working copy\n") count = 0 if not args.skip_copy: # Создание папок и копирование контента игры for d, dirs, files in os.walk(args.src_dir): for file in files: if os.path.splitext(file)[1][1:].lower() not in not_copy: dest = os.path.join(args.dst_dir, os.path.relpath(d, args.src_dir)) if not os.path.exists(dest): if args.verbose: print_log("Create folder \"" + dest + "\"") os.makedirs(dest) shutil.copyfile(os.path.join(d, file), os.path.join(dest, file)) count += 1 update_progress() elif args.verbose: print_log("Skip \"" + file + "\"") update_progress("FOLDERS_CONVERTED_" + time.strftime("%H:%M:%S")) # Распаковка архивов, пока есть, что распаковывать if args.verbose: print_log("{} files copied; no need to read source \ folder anymore".format(count)) print_log("\nUnpack archives recursively") count = 0 flag = 0 if args.skip_extract else 1 while flag: if count == 1: shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "unmoco1.anm"), os.path.join(args.dst_dir, "Res", "figures", "unmoco1.anm")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "unmoco2.anm"), os.path.join(args.dst_dir, "Res", "figures", "unmoco2.anm")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "unmoco1.bon"), os.path.join(args.dst_dir, "Res", "figures", "unmoco1.bon")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "unmoco2.bon"), os.path.join(args.dst_dir, "Res", "figures", "unmoco2.bon")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "unmoco1.mod"), os.path.join(args.dst_dir, "Res", "figures", "unmoco1.mod")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "unmoco2.mod"), os.path.join(args.dst_dir, "Res", "figures", "unmoco2.mod")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "mainmenu00labels.mmp"), os.path.join(args.dst_dir, "Res", "textures", "mainmenu00labels.mmp")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "escmenu00.mmp"), os.path.join(args.dst_dir, "Res", "textures", "escmenu00.mmp")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "mainmenu00.mmp"), os.path.join(args.dst_dir, "Res", "textures", "mainmenu00.mmp")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "escmenu00labels.mmp"), os.path.join(args.dst_dir, "Res", "textures", "escmenu00labels.mmp")) shutil.copyfile( os.path.join(args.dst_dir, "Res", "menus", "numbers.mmp"), os.path.join(args.dst_dir, "Res", "textures", "numbers.mmp")) count += 1 flag = 0 if args.verbose: print_log("\n{} iteration of file unpacking".format(count)) arr = [] for d, dirs, files in os.walk(args.dst_dir): for file in files: if os.path.splitext(file)[1][1:].lower() in archives: with open(os.path.join(d, file), "rb") as f_tst: magic = f_tst.read(4) if magic == b'\x3C\xE2\x9C\x01': print_log(os.path.join(d, file)) update_progress() flag = 1 if file[-3:] == "mod": mod.read_info(os.path.join(d, file)) elif file[-3:] == "bon": with open(os.path.join(d, file), "rb") as f: tree = res.read_filetree(f) for element in tree: element[0] += ".bon" res.unpack_res(f, tree, os.path.join(d, file)) elif file[-3:] == "anm": with open(os.path.join(d, file), "rb") as f: tree = res.read_filetree(f) for element in tree: element[0] += ".anm" res.unpack_res(f, tree, os.path.join(d, file)) else: with open(os.path.join(d, file), "rb") as f: filetree = res.read_filetree(f) res.unpack_res(f, filetree, os.path.join(d, file)) os.remove(os.path.join(d, file)) update_progress("ARCHIVES_CONVERTED_" + time.strftime("%H:%M:%S")) if args.verbose: print_log("\nAfter {} iterations all archives unpacked".format(count)) print_log("\nStart figures folder reorganisation\n") try: compact.compact_figs(os.path.join(args.dst_dir, "Res", "figures")) except: if args.verbose: print_log("Can't reorganise figures folder!") # Конвертация файлов if args.verbose: print_log("\nFigures folder reorganised\n") if not args.skip_convert: if args.verbose: print_log("\nConvert files\n") static_objs = { } # name, objname, texture, complection, position, rotation, parts maps = [] figs = [] count = 0 for d, dirs, files in os.walk(args.dst_dir): for file in files: file_e = os.path.splitext(file)[1][1:].lower() if file_e in convert: count += 1 update_progress() file_n = os.path.splitext(file)[0] print_log(os.path.join(d, file)) if file_e == "adb": try: info = adb.read_info(os.path.join(d, file)) except: print_log("ADB ERROR in file \"{}\"".format(file)) info = None if info != None: with open(os.path.join(d, file) + ".yaml", "w") as f: f.write(adb.build_yaml(info)) elif file_e == "anm": info = anm.read_info(os.path.join(d, file)) if info != None: with open(os.path.join(d, file) + ".yaml", "w") as f: f.write(anm.build_yaml(info)) elif file_e == "bon": continue # model convertion later elif file_e == "cam": info = cam.read_info(os.path.join(d, file)) if info != None: with open(os.path.join(d, file) + ".yaml", "w") as f: f.write(cam.build_yaml(info)) elif file_e in [ "idb", "ldb", "pdb", "db", "sdb", "udb", "qdb" ]: data = db.read_data(os.path.join(d, file)) with open(os.path.join(d, file) + ".csv", "w") as f: f.write(db.build_data(data)) elif file_e == "fig": continue # model convertion later elif file_e == "lnk": figs.append([d, file_n]) continue # model convertion later elif file_e == "mmp": image = mmp.read_image(os.path.join(d, file)) image.save(os.path.join(d, file_n) + ".png") elif file_e == "mp": maps.append([d, file_n]) continue # map convertion later elif file_e == "reg": try: info = reg.read_info(os.path.join(d, file)) except UnicodeEncodeError: reg.ENCODE = "cp1251" info = reg.read_info(os.path.join(d, file)) reg.ENCODE = "cp866" if info != None: with open(os.path.join(d, file) + ".yaml", "w") as f: try: f.write(reg.build_yaml(info)) except UnicodeEncodeError: reg.ENCODE = "cp1251" info = reg.read_info(os.path.join(d, file)) f.write(reg.build_yaml(info)) reg.ENCODE = "cp866" elif file_e == "sec": continue # map convertion later elif file_e == "mob": info = mob.read_info(os.path.join(d, file)) if info != None: with open(os.path.join(d, file) + ".yaml", "w") as f: f.write(mob.build_yaml(info)) # Просмотрим файл на статику buf_objs = [] with open(os.path.join(d, file) + ".yaml") as f: cnt = 0 for line in f.readlines(): if len(line) > 100: continue buf = line.replace("\n", "").replace("\r", "").\ replace(" ", "").replace("\"", "").split(":") if cnt > 0: if buf_objs[-1][var_lbl][-cnt] is None: buf_objs[-1][var_lbl][-cnt] = float( buf[0][1:]) cnt -= 1 continue elif cnt < 0: if buf[0][0] == "-": buf_objs[-1][6].append(buf[0][1:]) continue else: cnt = 0 if buf[0] == "OBJTEMPLATE": count += 1 buf_objs[-1][0] = buf[1] buf_objs[-1][1] = buf[ 1] + "_" + file_n + "_{}".format(count) if buf[0] == "OBJPRIMTXTR" and buf_objs[-1][ 2] is None: buf_objs[-1][2] = buf[1] if buf[0] == "OBJCOMPLECTION": var_lbl = 3 cnt = 3 if buf[0] == "OBJPOSITION": var_lbl = 4 cnt = 3 if buf[0] == "OBJROTATION": var_lbl = 5 cnt = 4 if buf[0] == "OBJBODYPARTS": buf_objs.append([ None, None, None, [None, None, None], [None, None, None], [None, None, None, None], None ]) if buf[1] != "None": buf_objs[-1][6] = [] var_lbl = 6 cnt = -1 static_objs.update({file_n.lower(): buf_objs}) os.remove(os.path.join(d, file)) update_progress("COMMON_CONVERTED_" + time.strftime("%H:%M:%S")) if args.verbose: print_log("{} files converted".format(count)) print_log("\nConvert game maps\n") # Конвертация карт # Для этого используются MP, SEC файлы карты и дополнительные текстуры count = 0 for i in maps: if i[1].lower() not in static_objs: if os.path.isfile( os.path.join(args.dst_dir, "Maps", i[1] + ".mob.yaml")): # Просмотрим файл на статику buf_objs = [] with open( os.path.join(args.dst_dir, "Maps", i[1] + ".mob.yaml")) as f: cnt = 0 for line in f.readlines(): if len(line) > 100: continue buf = line.replace("\n", "").replace("\r", "").\ replace(" ", "").replace("\"", "").split(":") if cnt > 0: if buf_objs[-1][var_lbl][-cnt] is None: buf_objs[-1][var_lbl][-cnt] = float( buf[0][1:]) cnt -= 1 continue elif cnt < 0: if buf[0][0] == "-": buf_objs[-1][6].append(buf[0][1:]) continue else: cnt = 0 if buf[0] == "OBJTEMPLATE": count += 1 buf_objs[-1][0] = buf[1] buf_objs[-1][1] = buf[1] + "_" + i[ 1] + "_{}".format(count) if buf[0] == "OBJPRIMTXTR" and buf_objs[-1][ 2] is None: buf_objs[-1][2] = buf[1] if buf[0] == "OBJCOMPLECTION": var_lbl = 3 cnt = 3 if buf[0] == "OBJPOSITION": var_lbl = 4 cnt = 3 if buf[0] == "OBJROTATION": var_lbl = 5 cnt = 4 if buf[0] == "OBJBODYPARTS": buf_objs.append([ None, None, None, [None, None, None], [None, None, None], [None, None, None, None], None ]) if buf[1] != "None": buf_objs[-1][6] = [] var_lbl = 6 cnt = -1 static_objs.update({i[1].lower(): buf_objs}) else: static_objs.update({i[1].lower(): None}) # Параметры карты map_info = mp.read_info(os.path.join(i[0], i[1] + ".mp")) if args.verbose: print_log( os.path.join(i[0], i[1]) + " + {} textures and {}x{} sectors".format( map_info[3], map_info[1], map_info[2])) count += map_info[3] + map_info[1] * map_info[2] + 1 update_progress(map_info[3] + map_info[1] * map_info[2] + 1) # Скопируем текстуры for j in range(map_info[3]): shutil.copyfile( os.path.join(args.dst_dir, "Res", "textures", i[1] + "{:03}.png".format(j)), os.path.join(i[0], i[1] + "{:03}.png".format(j))) if static_objs[i[1].lower()] is not None: # Позиции юнитов на карте, с относительной Z unit_pos = [s_obj[4] for s_obj in static_objs[i[1].lower()]] if len(unit_pos) == 0: unit_pos = None else: unit_pos = None # Конвертация карты и текстур, получение Z координаты юнитов unit_pos = convert_map.convert_map(os.path.join(i[0], i[1]), unit_pos) if static_objs[i[1].lower()] is not None: # Сконвертируем статику # name, objname, texture, complection, position, rotation, parts list_fpath_inputs = [os.path.join(args.dst_dir, "Res", "figures", s_obj[0], s_obj[1] + ".dae") \ for s_obj in static_objs[i[1].lower()]] for k, s_obj in enumerate(static_objs[i[1].lower()]): convert_model.convert_model( os.path.join(args.dst_dir, "Res", "figures", s_obj[0], s_obj[0]), add_suf=s_obj[1][len(s_obj[0]):], coefs=s_obj[3], root_pos=unit_pos[k], root_rot=s_obj[5], tex_name=s_obj[2], need_parts=s_obj[6]) ind = 0 while ind < len(list_fpath_inputs): if os.path.isfile(list_fpath_inputs[ind]): ind += 1 else: print_log("File {} not exist".format( list_fpath_inputs.pop(ind))) # Создадим карту со статикой list_fpath_inputs.append(os.path.join(i[0], i[1]) + ".dae") merge_collada.merge_dae_files( list_fpath_inputs, os.path.join(i[0], i[1] + "_full.dae"), i[1] + "_full") # Скопируем текстуры статики for s_obj in static_objs[i[1].lower()]: if not os.path.isfile(os.path.join(i[0], s_obj[2] + ".png")): shutil.copyfile( os.path.join(args.dst_dir, "Res", "textures", s_obj[2] + ".png"), os.path.join(i[0], s_obj[2] + ".png")) # Удалим использованную статику for st_dae in list_fpath_inputs[:-1]: os.remove(st_dae) # Удаляем исходные файлы for j in range(map_info[1]): for k in range(map_info[2]): os.remove(os.path.join(i[0], i[1] + \ "{:03}{:03}.sec".format(j, k))) for j in range(map_info[3]): os.remove(os.path.join(i[0], i[1] + "{:03}.png".format(j))) os.remove(os.path.join(i[0], i[1] + ".mp")) update_progress("MAPS_CONVERTED_" + time.strftime("%H:%M:%S")) if args.verbose: print_log("{} files converted ({} maps)".format(count, len(maps))) print_log("\nConvert models\n") # Конвертация моделей # Включает LNK, SEC, ANM, BON файлы count = 0 for i in figs: print_log(i[0]) try: if convert_model.convert_model(os.path.join(i[0], i[1])) is not None: continue except Exception as e: print_log(str(e)) continue # Копируем текстуры for j in textures_link.textures.get(i[1], []): try: shutil.copyfile( os.path.join(args.dst_dir, "Res", "textures", j + ".png"), os.path.join(i[0], j + ".png")) except Exception as e: print_log(str(e)) filelist = convert_model.flat_tree( lnk.read_info(os.path.join(i[0], i[1] + ".lnk"))) count += 1 + len(filelist) * 2 update_progress(1 + len(filelist) * 2) # Удаляем исходные файлы for j in filelist: os.remove(os.path.join(i[0], j + ".fig")) os.remove(os.path.join(i[0], j + ".bon")) os.remove(os.path.join(i[0], i[1] + ".lnk")) update_progress("FIGURES_CONVERTED_" + time.strftime("%H:%M:%S")) if args.verbose: print_log("{} files converted".format(count)) # Конвертация текстов игры # Примечание: в оригинальной игре есть кривой файл - # "Gipat Medium (тип материала - кожа), мы его пропускаем count = 0 if args.text_joint and os.path.isdir( os.path.join(args.dst_dir, "Res", "texts")): if args.verbose: print_log("\nJoint game strings\n") with open(os.path.join(args.dst_dir, "Res", "texts", "texts.yaml"), "w") as file: file.write( text.build_yaml( text.read_info(os.path.join(args.dst_dir, "Res", "texts")))) for file in os.listdir(os.path.join(args.dst_dir, "Res", "texts")): if "." in file or "(" in file: continue os.remove(os.path.join(args.dst_dir, "Res", "texts", file)) count += 1 update_progress() with open( os.path.join(args.dst_dir, "Res", "textslmp", "textslmp.yaml"), "w") as file: file.write( text.build_yaml( text.read_info( os.path.join(args.dst_dir, "Res", "textslmp")))) for file in os.listdir(os.path.join(args.dst_dir, "Res", "textslmp")): if "." in file or "(" in file: continue os.remove(os.path.join(args.dst_dir, "Res/textslmp", file)) count += 1 update_progress() update_progress("TEXTS_CONVERTED_" + time.strftime("%H:%M:%S")) if args.verbose: print_log("{} files converted".format(count))
def read(): srcs = db.read_data() return reduce(lambda a, x: a + str(x[2]), srcs, "")