def checkFtpBinary(): # Check WinSCP path contains binary test = False # LizSync config file from ini ls = lizsyncConfig() # Windows : search for WinSCP if psys().lower().startswith('win'): test_path = ls.variable('binaries/winscp') test_bin = 'WinSCP.com' error_message = 'WinSCP binary has not been found in specified path' test = True # Linux : search for lftp if psys().lower().startswith('linux'): test_path = '/usr/bin/' test_bin = 'lftp' error_message = 'LFTP binary has not been found in your system' test = True # Compute full path to test ftp_bin = os.path.join(test_path, test_bin) # Run test if test and not os.path.isfile(ftp_bin): return False, tr(error_message) if not test: return False, tr('No FTP binary has been found in your system') return True, tr('FTP Binary has been found in your system')
def autoDetectSteam(): if psys() == 'Windows': path_core = 'C:\\Program Files\\Steam\\steamapps\\common\\RimWorld' path_workshop = 'C:\\Program Files\\Steam\\steamapps\\workshop\\content\\294100' elif psys() in {'Linux', 'Darwin'}: path_core = '{}/.local/share/Steam/steamapps/common/RimWorld'.format( Path.home()) path_workshop = '{}/.local/share/Steam/steamapps/workshop/content/294100'.format( Path.home()) flags = [None, None] if os.path.exists(path_core): directory_core.set(path_core) flags[0] = path_core else: flags[ 0] = 'Please set RimWorld directory.\nCommonly found in /Steam/steamapps/common/Rimworld' if os.path.isdir(path_workshop): directory_workshop.set(path_workshop) flags[1] = path_workshop else: flags[ 1] = 'Please set RimWorld mods directory.\nCommonly found in /Steam/steamapps/workshop/content/294100' return flags
def get_ftp_password(host, port, login): # Check FTP password # Get FTP password # First check if it is given in ini file ls = lizsyncConfig() password = ls.variable('ftp:central/password') # If not given, search for it in ~/.netrc if not password: try: auth = netrc.netrc().authenticators(host) if auth is not None: ftpuser, _, password = auth except (netrc.NetrcParseError, IOError): m = tr('Could not retrieve password from ~/.netrc file') return False, None, m if not password: m = tr( 'Could not retrieve password from ~/.netrc file or is empty') return False, None, m else: # Use None to force to use netrc file # only for linux (lftp). we need to use password for winscp if psys().lower().startswith('linux'): password = None return True, password, ''
def checkParameterValues(self, parameters, context): # First save the given parameters self.saveParameterValues(parameters) # Check current project has a file path = context.project().absoluteFilePath() if not path: msg = tr( 'You must save the current project before running this algorithm' ) return False, msg # Check the current project has been exported project = context.project() project_directory = project.absolutePath() output_directory = project_directory + '/' + project.baseName( ) + '_mobile' if not os.path.isdir(output_directory): msg = tr( 'The current project has not been exported to a mobile version.' ' You need to use the algorithm "Package project and data from the central server"' ' (Procesing algorithm id: lizync.package_all)') return False, msg # Check FTP binary status, msg = checkFtpBinary() if not status: return status, msg # Check postgresql binary path postgresql_binary_path = parameters[self.POSTGRESQL_BINARY_PATH] test_bin = 'psql' if psys().lower().startswith('win'): test_bin += '.exe' has_bin_file = os.path.isfile( os.path.join(postgresql_binary_path, test_bin)) if not has_bin_file: return False, tr( 'The needed PostgreSQL binaries cannot be found in the specified path' ) # Check zip archive path database_archive_file = os.path.join(output_directory, 'lizsync.zip') if not os.path.exists(database_archive_file): return False, tr( "The ZIP archive does not exists in the specified path" ) + ": {0}".format(database_archive_file) # Check connections connection_name_central = parameters[self.CONNECTION_NAME_CENTRAL] connection_name_clone = parameters[self.CONNECTION_NAME_CLONE] ok, uri, msg = getUriFromConnectionName(connection_name_central, True) if not ok: return False, msg ok, uri, msg = getUriFromConnectionName(connection_name_clone, True) if not ok: return False, msg return super(DeployAll, self).checkParameterValues(parameters, context)
def checkParameterValues(self, parameters, context): # Check postgresql binary path postgresql_binary_path = parameters[self.POSTGRESQL_BINARY_PATH] test_bin = 'psql' if psys().lower().startswith('win'): test_bin += '.exe' has_bin_file = os.path.isfile( os.path.join(postgresql_binary_path, test_bin)) if not has_bin_file: return False, tr( 'The needed PostgreSQL binaries cannot be found in the specified path' ) # Check zip archive path database_archive_file = self.parameterAsString(parameters, self.ZIP_FILE, context) if not os.path.exists(database_archive_file): database_archive_file = os.path.join( tempfile.gettempdir(), 'central_database_package.zip') ok = os.path.exists(database_archive_file) if not ok: return False, tr( "The ZIP archive does not exists in the specified path" ) + ": {0}".format(database_archive_file) parameters[self.ZIP_FILE] = database_archive_file # Check connections connection_name_central = parameters[self.CONNECTION_NAME_CENTRAL] connection_name_clone = parameters[self.CONNECTION_NAME_CLONE] ok, uri_central, msg = getUriFromConnectionName( connection_name_central, True) if not ok: return False, msg ok, uri, msg = getUriFromConnectionName(connection_name_clone, True) if not ok: return False, msg # Check we can retrieve host, port, user and password # for central database # since they are used inside the clone to connect to the central database with dblink # service file are not possible yet if uri_central.service(): msg = tr( 'Central database connection uses a service file. This is not supported yet' ) return False, msg if not uri_central.password(): password = get_connection_password_from_ini(uri_central) uri_central.setPassword(password) if not uri_central.password(): msg = tr('No password found for the central database connection !') msg += tr('It is needed to let the clone connect to the central' ' database during the synchronisation') return False, msg return super(DeployDatabaseServerPackage, self).checkParameterValues(parameters, context)
def minput(): fp=getcwd() if psys()=='Windows': fp+='\\input.txt' else: fp+='/input.txt' inf=open(fp,'w+') inf.close() print('请打开%s,并将内容输入到该文件中。'%fp) print('输入完成后',end='') if psys()=="Windows": osys("pause") else: print('请在此输入任何内容继续。') input() with open(fp,'r') as f: mstr=f.readline() while mstr: dealWith(mstr) mstr=f.readline()
def main(): VERSION = "1.1.0"; config = GetConfig(); mpdc = MPDClient(); mpdc.timeout = int(config["General"]["MPDtimeout"]); mpdc.idletimeout = int(config["General"]["MPDtimeout_idle"]); try: mpdc.connect(config["General"]["MPDip"], port=int(config["General"]["MPDport"])); except ConnectionRefusedError: print(f"{TermColors.ERROR}ERROR!!! Either mpd isn't running or you have a mistake in the config. Fix ASAP! Exiting!{TermColors.END}"); exit(-1); RPC = Presence(710956455867580427); RPC.connect(); if (psys() == "Windows"): ossys("cls"); else: ossys("clear"); print(f"{TermColors.WORKS}MPDDRP v.{VERSION} - https://github.com/AKurushimi/mpddrp{TermColors.END}"); try: while True: statusout = mpdc.status(); csout = mpdc.currentsong(); if (statusout["state"] != "stop"): title = csout["title"]; artist = csout["artist"]; album = csout["album"]; timevar = statusout["time"].split(":"); timenow = str(timedelta(seconds=int(timevar[0]))); timeall = str(timedelta(seconds=int(timevar[1]))); if (statusout["state"] == "pause"): RPC.update(details=title + " | " + album, state="Paused | " + artist, large_image="mpdlogo", small_image="pause", small_text="Paused"); elif (statusout["state"] == "stop"): RPC.update(details="Stopped | MPD", state=" ", large_image="mpdlogo", small_image="stop", small_text="Stopped"); elif (statusout["state"] == "play"): RPC.update(details=title + " | " + album, state=timenow + "/" + timeall + " | " + artist, large_image="mpdlogo", small_image="play", small_text="Playing"); sleep(1); except (RuntimeError): pass; except (KeyboardInterrupt, SystemExit): RPC.clear(); RPC.close(); exit(0);
def func(): print('''请选择: 0->“加密” 其他->“解密”''') choice = input() if psys() == "Windows": osys("cls") else: osys("clear") if choice == '0': e.func() else: d.func()
def GetConfig(): platform = psys() username = getuser() if (platform == "Windows"): config_dir = f"C:\\Users\\{username}\\AppData\\Roaming\\mpddrp\\" else: config_dir = f"/home/{username}/.config/mpddrp/" default_config = { "General": { "MPDip": "localhost", "MPDport": 6600, "MPDtimeout": 10, "MPDtimeout_idle": 0 } } if (path.exists(config_dir + "config.json")): configfile = open(config_dir + "config.json", "r") config = loads(configfile.read()) configfile.close() for default_setting in default_config["General"]: for setting in config["General"]: if (config["General"][setting] == ""): print( f"{TermColors.ERROR}ERROR!!! {setting} doesn't have a value assigned! Fix ASAP! Exiting!{TermColors.END}" ) exit(-1) try: if (config["General"][default_setting]): continue except KeyError: print( f"{TermColors.ERROR}ERROR!!! Please add {default_setting} in your config file! Fix ASAP! Exiting!{TermColors.END}" ) exit(-1) else: ossys(f"mkdir -p {config_dir}") configfile = open(config_dir + "config.json", "w") configfile.write(dumps(default_config, indent=4, sort_keys=True)) configfile.close() config = default_config return config
def vmCheck(): """Checks to see if this module is being run from within a virtual machine :return: instantiated object :rtype: bool """ from platform import system as psys if psys() == "Linux": from os import system as osys # old method ##out = osys("grep -q \"^flags.*\ hypervisor\" /proc/cpuinfo") # slightly more reliable method: out = osys("dmesg | grep -q \"DMI:.*VirtualBox\"") return out == 0 else: return False
def run_command(cmd, myenv, feedback): """ Run any command using subprocess """ # print(" ".join(cmd)) if psys().lower().startswith('win'): proc = subprocess.Popen( " ".join(cmd), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=myenv, # Do not use the following for Windows as it seems to create some encoding errors in Windows # universal_newlines=True, # encoding='utf8', # text=True, ) else: proc = subprocess.Popen( " ".join(cmd), shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=myenv, universal_newlines=True, encoding='utf8', # not available in old versios of Python 3 # text=True, ) stdout = [] while proc.poll() is None: for line in proc.stdout: if line != "": try: out = "{}".format(str(line).strip().decode("utf-8")) except Exception: out = "{}".format(str(line).strip()) stdout.append(out) feedback.pushInfo(out) proc.poll() returncode = proc.returncode return returncode, stdout
def checkParameterValues(self, parameters, context): # Check FTP binary status, msg = checkFtpBinary() if not status: return status, msg # Check postgresql binary path postgresql_binary_path = parameters[self.POSTGRESQL_BINARY_PATH] test_bin = 'psql' if psys().lower().startswith('win'): test_bin += '.exe' has_bin_file = os.path.isfile( os.path.join(postgresql_binary_path, test_bin)) if not has_bin_file: return False, tr( 'The needed PostgreSQL binaries cannot be found in the specified path' ) # Check zip archive path database_archive_file = self.parameterAsString(parameters, self.ZIP_FILE, context) if not os.path.exists(database_archive_file): database_archive_file = os.path.join( tempfile.gettempdir(), 'central_database_package.zip') ok = os.path.exists(database_archive_file) if not ok: return False, tr( "The ZIP archive does not exists in the specified path" ) + ": {0}".format(database_archive_file) parameters[self.ZIP_FILE] = database_archive_file # Check connections connection_name_central = parameters[self.CONNECTION_NAME_CENTRAL] connection_name_clone = parameters[self.CONNECTION_NAME_CLONE] ok, uri, msg = getUriFromConnectionName(connection_name_central, True) if not ok: return False, msg ok, uri, msg = getUriFromConnectionName(connection_name_clone, True) if not ok: return False, msg return super(DeployAll, self).checkParameterValues(parameters, context)
def checkParameterValues(self, parameters, context): # Check postgresql binary path postgresql_binary_path = parameters[self.POSTGRESQL_BINARY_PATH] test_bin = 'psql' if psys().lower().startswith('win'): test_bin += '.exe' has_bin_file = os.path.isfile( os.path.join(postgresql_binary_path, test_bin)) if not has_bin_file: return False, tr( 'The needed PostgreSQL binaries cannot be found in the specified path' ) # Check connection connection_name_central = parameters[self.CONNECTION_NAME_CENTRAL] ok, uri_central, msg = getUriFromConnectionName( connection_name_central, True) if not ok: return False, msg # Check we can retrieve host, port, user and password # for central database # since they are used inside the clone to connect to the central database with dblink # service file are not possible yet if uri_central.service(): msg = tr( 'Central database connection uses a service file. This is not supported yet' ) return False, msg # Check input layers layers = self.parameterAsLayerList(parameters, self.PG_LAYERS, context) layers = [ layer for layer in layers if layer.providerType() == 'postgres' ] if not layers: return False, tr('At least one PostgreSQL layer is required') return super(PackageCentralDatabase, self).checkParameterValues(parameters, context)
def checkParameterValues(self, parameters, context): # Check postgresql binary path postgresql_binary_path = parameters[self.POSTGRESQL_BINARY_PATH] test_bin = 'psql' if psys().lower().startswith('win'): test_bin += '.exe' has_bin_file = os.path.isfile( os.path.join(postgresql_binary_path, test_bin)) if not has_bin_file: return False, tr( 'The needed PostgreSQL binaries cannot be found in the specified path' ) # Check connection connection_name_central = parameters[self.CONNECTION_NAME_CENTRAL] ok, uri, msg = getUriFromConnectionName(connection_name_central, True) if not ok: return False, msg return super(PackageCentralDatabase, self).checkParameterValues(parameters, context)
def pg_dump(feedback, postgresql_binary_path, connection_name, output_file_name, schemas, tables=None, additional_parameters=[]): messages = [] status = False # Check binary pgbin = 'pg_dump' if psys().lower().startswith('win'): pgbin += '.exe' pgbin = os.path.join( postgresql_binary_path, pgbin ) if not os.path.isfile(pgbin): messages.append(tr('PostgreSQL pg_dump tool cannot be found in specified path')) return False, messages # Get connection parameters # And check we can connect status, uri, error_message = getUriFromConnectionName(connection_name, True) if not uri or not status: messages.append(tr('Error getting database connection information')) messages.append(error_message) return status, messages # Create pg_dump command if uri.service(): cmdo = [ 'service={0}'.format(uri.service()) ] else: cmdo = [ '-h {0}'.format(uri.host()), '-p {0}'.format(uri.port()), '-d {0}'.format(uri.database()), '-U {0}'.format(uri.username()), ] # Escape pgbin for Windows if psys().lower().startswith('win'): pgbin = '"' + pgbin + '"' # Build pg_dump command. Add needed options cmd = [ pgbin ] + cmdo + [ '--verbose', '--no-acl', '--no-owner', '-Fp', '-f "{0}"'.format(output_file_name) ] # Add given schemas for s in schemas: cmd.append('-n "{0}"'.format(s)) # Add given tables if tables: quote = "'" if psys().lower().startswith('win'): quote = '' for table in tables: cmd.append( "-t {quote}{table}{quote}".format( quote=quote, table=table, ) ) # Add additional parameters if additional_parameters: cmd = cmd + additional_parameters # Run command # print('PG_DUMP = %s' % ' '.join(cmd)) # Add password if needed myenv = {**os.environ} if not uri.service(): myenv = {**{'PGPASSWORD': uri.password()}, **os.environ} try: returncode, stdout = run_command(cmd, myenv, feedback) except Exception as e: status = False messages.append(tr('Error dumping database into the file') + ' {0}'.format(output_file_name)) messages.append(tr('Command exception message') + ': {0}'.format(str(e))) return status, messages if returncode == 0: messages.append(tr('Database has been successfull dumped') + ' into {0}'.format(output_file_name)) else: messages.append(tr('Error dumping database into the file') + ' {0}'.format(output_file_name)) messages.append(tr('Command return code') + ': {0}'.format(returncode)) messages.append(stdout[-1]) status = False return status, messages
def pg_dump(feedback, postgresql_binary_path, connection_name, output_file_name, schemas, additional_parameters=[]): messages = [] status = False # Check binary pgbin = 'pg_dump' if psys().lower().startswith('win'): pgbin += '.exe' pgbin = os.path.join(postgresql_binary_path, pgbin) if not os.path.isfile(pgbin): messages.append( tr('PostgreSQL pg_dump tool cannot be found in specified path')) return False, messages # Get connection parameters # And check we can connect status, uri, error_message = getUriFromConnectionName( connection_name, True) if not uri or not status: messages.append(tr('Error getting database connection information')) messages.append(error_message) return status, messages # Create pg_dump command if uri.service(): cmdo = ['service={0}'.format(uri.service())] else: cmdo = [ '-h {0}'.format(uri.host()), '-p {0}'.format(uri.port()), '-d {0}'.format(uri.database()), '-U {0}'.format(uri.username()), ] # Escape pgbin for Windows if psys().lower().startswith('win'): pgbin = '"' + pgbin + '"' cmd = [pgbin] + cmdo + [ '--no-acl', '--no-owner', '-Fp', '-f "{0}"'.format(output_file_name) ] # Add given schemas for s in schemas: cmd.append('-n {0}'.format(s)) # Add additional parameters if additional_parameters: cmd = cmd + additional_parameters # Run command # print(" ".join(cmd)) try: # messages.append('PG_DUMP = %s' % ' '.join(cmd) ) # Add password if needed myenv = {**os.environ} if not uri.service(): myenv = {**{'PGPASSWORD': uri.password()}, **os.environ} rc, status = run_command(cmd, myenv, feedback) # subprocess.run( # " ".join(cmd), # shell=True, # env=myenv # ) if status: messages.append( tr('Database has been successfull dumped') + ' into {0}'.format(output_file_name)) else: messages.append( tr('Error dumping database') + ' into {0}'.format(output_file_name)) except Exception: status = False messages.append( tr('Error dumping database') + ' into {0}'.format(output_file_name)) return status, messages
def ftp_sync(ftphost, ftpport, ftpuser, ftppass, localdir, ftpdir, direction, excludedirs, feedback): # LizSync config file from ini ls = lizsyncConfig() # LINUX : USE lftp command line if psys().lower().startswith('linux'): try: cmd = [] cmd.append('lftp') pass_str = '' if ftppass: pass_str = ':{}'.format(ftppass) cmd.append('ftp://{ftpuser}{pass_str}@{ftphost}:{ftpport}'.format( ftpuser=ftpuser, pass_str=pass_str, ftphost=ftphost, ftpport=ftpport)) cmd.append('-e') cmd.append('"') cmd.append('set ftp:ssl-allow no; set ssl:verify-certificate no; ') cmd.append('mirror') if direction == 'to': cmd.append('-R') cmd.append('--verbose') cmd.append('--continue') cmd.append('--use-cache') # cmd.append('-e') # pour supprimer tout ce qui n'est pas sur le serveur for d in excludedirs.split(','): ed = d.strip().strip('/') + '/' if ed != '/': cmd.append('-x %s' % ed) cmd.append('--ignore-time') # LFTP NEEDS TO PUT # * from -> ftpdir (remote FTP server) BEFORE # * to (-R) -> localdir (computer) BEFORE ftpdir (remote FTP server) if direction == 'to': cmd.append('{} {}'.format(localdir, ftpdir)) else: cmd.append('{} {}'.format(ftpdir, localdir)) cmd.append('; quit"') feedback.pushInfo('LFTP = %s' % ' '.join(cmd)) myenv = {**os.environ} run_command(cmd, myenv, feedback) except Exception: m = tr('Error during FTP sync') return False, m finally: feedback.pushInfo(tr('FTP sync done')) # WINDOWS : USE WinSCP.com tool elif psys().lower().startswith('win'): try: cmd = [] winscp_bin = os.path.join(ls.variable('binaries/winscp'), 'WinSCP.com').replace('\\', '/') cmd.append('"' + winscp_bin + '"') cmd.append('/ini=nul') cmd.append('/console') cmd.append('/command') cmd.append('"option batch off"') cmd.append('"option transfer binary"') cmd.append('"option confirm off"') pass_str = '' if ftppass: pass_str = ':{}'.format(ftppass) cmd.append( '"open ftp://{ftpuser}{pass_str}@{ftphost}:{ftpport}"'.format( ftpuser=ftpuser, pass_str=pass_str, ftphost=ftphost, ftpport=ftpport)) cmd.append('"') cmd.append('synchronize') way = 'local' if direction == 'to': way = 'remote' cmd.append(way) # WINSCP NEED TO ALWAYS HAVE local directory (computer) BEFORE FTP server remote directory cmd.append('{} {}'.format(localdir, ftpdir)) cmd.append('-mirror') # cmd.append('-delete') # to delete "to" side files not present in the "from" side cmd.append('-criteria=time') cmd.append('-resumesupport=on') ex = [] for d in excludedirs.split(','): ed = d.strip().strip('/') + '/' if ed != '/': # For directory, no need to put * after. # Just use the / at the end, for example: data/ ex.append('%s' % ed) if ex: # | 2010*; 2011* # double '""' needed because it's inside already quoted synchronize subcommand cmd.append('-filemask=""|' + ';'.join(ex) + '""') cmd.append('"') cmd.append('"close"') cmd.append('"exit"') infomsg = 'WinSCP = %s' % ' '.join(cmd) feedback.pushInfo( infomsg.replace(':{}@'.format(ftppass), ':********@')) myenv = {**os.environ} run_command(cmd, myenv, feedback) except Exception: m = tr('Error during FTP sync') return False, m finally: feedback.pushInfo(tr('FTP sync done')) return True, 'Success'
def get_const(): global const const = { # General 'auto_sort': True, 'auto_apply': True, 'autoprint_atselect': False, 'autoprint_atsort': False, 'autoprint_atchange': False, # Pandas Display 'pandas_max_rows': 1000, 'pandas_max_cols': 50, 'pandas_disp_width': 250, 'pandas_hide_warnings': True, # Directories 'dir_sep': '/', 'data_subdir': 'data', 'attr_subdir': 'attributes', 'fnames': { 'segion': 'superregion.txt', 'region': 'region.txt', 'area': 'area.txt', 'provloc': 'localisation.yml', }, # Encodings 'histload_prim_enc': 'utf-8', # Try to load history... 'histload_secn_enc': 'ansi', # ... and use this if 1st coulndt do it 'histsave_enc': 'ansi', # Always save history in this encoding 'sprd_enc': 'utf-8-sig', # Spreadsheets 'locl_enc': 'utf-8-sig', # Localisation 'all_encodings': ['ansi', 'utf-8', 'utf-8-sig'], # Appearance 'preceding_blank_line': False, 'show_save_toggle': True, 'show_save_freq': 350, # Message every _ provinces saved 'empty_marker': '', 'show_save_msg': 'Progress: ', 'error_prefix': '(Error) ', 'input_prefix': '[Editor] > ', 'program_header': 'EU4 Province Editor v0.1', 'drop_from_print': [ 'filename', 'discovered', #'modifiers', ], # Value Related 'shorten_region_names': True, 'multival_sep': '&', 'default_name': 'NoNameFound', 'default_area': 'NoArea', 'default_region': 'NoRegion', 'default_segion': 'NoSegion', 'value_empty': ['xxx', 'nan', 'no', '0', 'none'], # lowercase # DataFrames / SpreadSheets 'index_column': 'id', 'auto_sort_by': ['segion', 'region', 'area', 'tax'], 'column_order': [ 'id', 'name', 'capital', 'area', 'region', 'segion', 'cores', 'claims', 'owner', 'cntrl', 'culture', 'religion', 'hre', 'tax', 'prod', 'manp', 'trade_goods', 'city', 'cost', 'fort', 'discovered', 'modifiers', 'filename', 'group', ], # User's functions 'legal_nonexit_calls': [ 'load', 'save', 'apply', 'operate_on', 'select', 'subselect', 'append', 'sort', 'set', 'inprov', 'print', 'clear', 'help', ], 'legal_exit_calls': ['exit', 'quit', 'leave'], # Localisation 'locl_key_prefix': 'PROV', 'lcl_languages': [ 'l_english', 'l_spanish', 'l_french', 'l_german', ], # Region files 'none': 'none', 'skip_at_region_load': '{=}colorareas', 'regioning_names_suffiexes': { 'area': '_area', 'region': '_region', 'segion': '_superregion', }, # Province Files 'indent': ' ' * 4, 'historyfile_keys': { 'capital': 'capital', 'cores': 'add_core', 'claims': 'add_claim', 'owner': 'owner', 'cntrl': 'controller', 'culture': 'culture', 'religion': 'religion', 'hre': 'hre', 'tax': 'base_tax', 'prod': 'base_production', 'manp': 'base_manpower', 'trade_goods': 'trade_goods', 'city': 'is_city', 'ntv_size': 'native_size', 'ntv_ferc': 'native_ferocity', 'ntv_hstl': 'native_hostileness', 'cost': 'extra_cost', 'fort': 'fort_15th', 'discovered': 'discovered_by', 'modifiers': ['add_permanent_province_modifier', 'name'], }, 'province_attr_keys': { 'id': 'id', 'nm': 'name', 'fn': 'filename', 'gr': 'group', 'ar': 'area', 'rg': 'region', 'sg': 'segion', }, 'additional_save_info': { # Lines added to every scope with specified name 'add_permanent_province_modifier': 'duration = -1', }, } # Endof Const ################################ # Dependends cwd = getcwd() + const['dir_sep'] bck = '\\' if bck in cwd: cwd = const['dir_sep'].join(cwd.split(bck)) const['cwd'] = cwd if psys() == 'Windows': const['terminal_clear'] = 'cls' else: const['terminal_clear'] = 'clear' ################################ return const
intro='''--------------------------------- 版本 2 2020/03/22 1.修复了iong问题 ---------------------------------''' ecs=[{'zhi':'5','chi':'t','shi':'g','ri':'b','zi':'y','ci':'h','si':'n','ying':'u/','yuan':'m0','yue':'m,','yin':'up','yun':'mp','yi':'u','wu':'j','yu':'m','ye':'u,'}, {'zh':'5','ch':'t','sh':'g','b':'1','p':'q','m':'a','f':'z','d':'2','t':'w','n':'s','l':'x','g':'e','k':'d','h':'c','j':'r','q':'f','x':'v','r':'b','z':'y','c':'h','s':'n','y':'u','w':'j'}, {'ang':';','eng':'/','ing':'u/','ong':'j/','ai':'9','ei':'o','ui':'jo','ao':'l','ou':'.','iu':'u.','ie':'u,','ue':'m,','er':'-','an':'0','en':'p','in':'up','un':'jp','ün':'mp','a':'8','o':'i','e':'k','i':'u','u':'j','ü':'m','ê':','}] tones={'1':' ','2':'6','3':'3','4':'4',' ':'7'} from platform import system as psys if psys()=='Windows': from os import system as osys from os import getcwd from random import shuffle def func(): print('''请选择: 0->退出 1->输入多行 其他内容->输入一行''') choice=input() print('''温馨提示: 输入拼音,分别用1,2,3,4,(空格)表示一声,二声,三声,四声,轻声。 轻声请用且必须用空格表示。拼音与拼音之间请不要有多余的空格。 标点符号请用全角符号。每一行的最后请用"|"结束。 例如: 这是一句不长不短的话。 zhe4shi4yi1ju4bu4chang2bu4duan3de hua4。|''') if choice=='0': exit() elif choice=='1':
def func(): c = struct() wb = op.load_workbook(c.templateName) ws = wb[c.templateSheet] wb.close() sid = dict() i = 'A' ws['A1'] = c.title.format(month=dt.datetime.now().strftime("%m"), day=dt.datetime.now().strftime("%d")) for cell in ws[2]: sid[cell.value] = i i = chr(ord(i) + 1) nid, i = dict(), 0 for name in ws[sid['姓名']]: i += 1 if i <= 2: continue nid[name.value] = i fp = getcwd() + ps + 'input.txt' inf = open(fp, 'w+') inf.close() print('''请打开%s,并将内容输入到该文件中。 一般来说,第一行应该有一个@。 如果数据有序号,请在@后面加一个I; 如果数据有附加信息,请在@后面加一个C。 (两者可同时出现,不区分大小写。如果结果为@I/@i,可省略不输入) 如果以上皆无,第一行应该有且只有一个@。 输入完成后,''' % fp) nc, ni = True, True if psys() == "Windows": osys("pause") else: print('请在此输入任何内容并换行以继续...') input() with open(fp, 'r', encoding='utf-8') as f: mstr = f.readline().strip() if mstr and mstr[0] == '@': mstr = mstr.upper() if 'C' in mstr: nc = False if 'I' in mstr: ni = False mstr = f.readline().strip() else: ni = False while mstr: try: if mstr == '': continue if nc and ni: ws['%s%d' % (sid[c.subject], nid[mstr])] = c.correct else: ns = 0 if not ni: ns = -1 for i in range(0, len(mstr)): ch = mstr[i] if is_Chinese(ch): if ns == -1: ns = i break else: ns = 0 if not nc: ne = -1 for i in range(1, len(mstr) + 1): ch = mstr[-i] if is_Chinese(ch): if ne == -1: ne = -i + 1 break else: ne = len(mstr) ws['%s%d' % (sid[c.subject], nid[mstr[ns:ne]])] = c.correct if nc else mstr[ne:] pass except: err.append(mstr) mstr = f.readline().strip() if path.exists(fp): remove(fp) wb.save(c.outputFileName) print('文件已输出到%s' % (getcwd() + ps + c.outputFileName)) if len(err) != 0: print('以下数据未能成功处理:') for mstr in err: print(mstr)
def processAlgorithm(self, parameters, context, feedback): """ Here is where the processing itself takes place. """ output = { self.OUTPUT_STATUS: 0, self.OUTPUT_STRING: '' } database_archive_file = self.parameterAsString(parameters, self.ZIP_FILE, context) connection_name_central = parameters[self.CONNECTION_NAME_CENTRAL] connection_name_clone = parameters[self.CONNECTION_NAME_CLONE] postgresql_binary_path = parameters[self.POSTGRESQL_BINARY_PATH] recreate_clone_server_id = self.parameterAsBool( parameters, self.RECREATE_CLONE_SERVER_ID, context ) # store parameters ls = lizsyncConfig() ls.setVariable('general/database_archive_file', database_archive_file) ls.setVariable('postgresql:central/name', connection_name_central) ls.setVariable('postgresql:clone/name', connection_name_clone) ls.setVariable('binaries/postgresql', postgresql_binary_path) ls.save() # Check archive if not os.path.exists(database_archive_file): m = tr('Package not found') + ' : %s' % database_archive_file raise QgsProcessingException(m) msg = '' # Uncompress package feedback.pushInfo(tr('UNCOMPRESS PACKAGE') + ' {0}'.format(database_archive_file)) import zipfile dir_path = os.path.dirname(os.path.abspath(database_archive_file)) try: with zipfile.ZipFile(database_archive_file) as t: t.extractall(dir_path) feedback.pushInfo(tr('Package uncompressed successfully')) except Exception: m = tr('Package extraction error') raise QgsProcessingException(m) feedback.pushInfo('') # Check needed files feedback.pushInfo(tr('CHECK UNCOMPRESSED FILES')) archive_files = [ '01_before.sql', '02_predata.sql', '02_data.sql', '03_after.sql', '04_lizsync.sql', 'sync_id.txt', 'sync_tables.txt' ] for f in archive_files: if not os.path.exists(os.path.join(dir_path, f)): m = tr('One mandatory file has not been found in the ZIP archive') + ' - %s' % f raise QgsProcessingException(m) feedback.pushInfo(tr('All the mandatory files have been sucessfully found')) feedback.pushInfo('') # CLONE DATABASE # Check if clone database already has a lizsync structure installed # Get existing data to avoid recreating server_id for this machine feedback.pushInfo(tr('GET EXISTING CLONE DATABASE ID TO AVOID RECREATING SERVER_ID FOR THIS CLONE')) clone_id = None clone_name = None sql = ''' SELECT table_name FROM information_schema.tables WHERE table_name = 'server_metadata' and table_schema = 'lizsync'; ''' header, data, rowCount, ok, error_message = fetchDataFromSqlQuery( connection_name_clone, sql ) has_sync = False if ok: for a in data: if a[0] == 'server_metadata': has_sync = True feedback.pushInfo(tr('Clone database already has sync metadata table')) else: raise QgsProcessingException(error_message) # get existing server_id if has_sync: sql = ''' SELECT server_id, server_name FROM lizsync.server_metadata LIMIT 1; ''' header, data, rowCount, ok, error_message = fetchDataFromSqlQuery( connection_name_clone, sql ) if ok: for a in data: clone_id = a[0] clone_name = a[1] feedback.pushInfo(tr('Clone metadata are already set')) feedback.pushInfo(tr('* server id') + ' = {0}'.format(clone_id)) feedback.pushInfo(tr('* server name') + ' = {0}'.format(clone_name)) else: raise QgsProcessingException(error_message) feedback.pushInfo('') # Get last synchro and # check if no newer bi-directionnal (partial sync) # or archive deployment (full sync) # have been made since last deployment if has_sync and clone_id: feedback.pushInfo(tr('CHECK LAST SYNCHRONIZATION')) with open(os.path.join(dir_path, 'sync_id.txt')) as f: sync_id = f.readline().strip() if not sync_id: m = tr('No synchronization ID has been found in the file sync_id.txt') raise QgsProcessingException(m) sql = ''' SELECT sync_id FROM lizsync.history WHERE TRUE AND sync_time > ( SELECT sync_time FROM lizsync.history WHERE sync_id::text = '{sync_id}' ) AND server_from::text = ( SELECT server_id::text FROM lizsync.server_metadata LIMIT 1 ) AND '{clone_id}' = ANY (server_to) '''.format( sync_id=sync_id, clone_id=clone_id ) last_sync = None header, data, rowCount, ok, error_message = fetchDataFromSqlQuery( connection_name_central, sql ) if not ok: m = error_message + ' ' + sql raise QgsProcessingException(m) for a in data: last_sync = a[0] if last_sync: m = tr( 'Bi-directionnal synchronization has already been made on this clone' ' since the deployment of this package. Abort the current deployment.' ) raise QgsProcessingException(m) else: feedback.pushInfo(tr( 'No previous bi-directionnal synchronization found since the deployment' ' of this package. Everything is ok.' )) # Get synchronized schemas from text file feedback.pushInfo(tr('GET THE LIST OF SYNCHRONIZED TABLES FROM THE FILE sync_tables.txt')) with open(os.path.join(dir_path, 'sync_tables.txt')) as f: tables = f.readline().strip() if tables == '': m = tr('No table to syncronize') raise QgsProcessingException(m) feedback.pushInfo(tr('List of tables found in sync_tables.txt') + ': %s' % tables) feedback.pushInfo('') # CLONE DATABASE # Run SQL scripts from archive with PSQL command feedback.pushInfo(tr('RUN SQL SCRIPT FROM THE DECOMPRESSED ZIP FILE')) sql_files = [ os.path.join(dir_path, '01_before.sql'), os.path.join(dir_path, '02_predata.sql'), os.path.join(dir_path, '02_data.sql'), os.path.join(dir_path, '03_after.sql'), os.path.join(dir_path, '04_lizsync.sql'), ] for f in sql_files: if not os.path.exists(f): m = tr('SQL files not found') + ': {}'.format(f) raise QgsProcessingException(m) # Add additional SQL file if present last_sql = os.path.join(dir_path, '99_last.sql') if os.path.exists(last_sql): sql_files.append(last_sql) # Build clone database connection parameters for psql status, uri, error_message = getUriFromConnectionName(connection_name_clone) if not status or not uri: m = tr('Error getting database connection information') m += ' ' + error_message raise QgsProcessingException(m) if uri.service(): cmdo = [ 'service={0}'.format(uri.service()) ] else: cmdo = [ '-h {0}'.format(uri.host()), '-p {0}'.format(uri.port()), '-d {0}'.format(uri.database()), '-U {0}'.format(uri.username()), ] # Build psql command to run pgbin = 'psql' if psys().lower().startswith('win'): pgbin += '.exe' pgbin = os.path.join( postgresql_binary_path, pgbin ) if psys().lower().startswith('win'): pgbin = '"' + pgbin + '"' # Run SQL files for sql_file in sql_files: try: short_file_name = sql_file.replace(dir_path, '') feedback.pushInfo(tr('Loading file') + ' {0} ...'.format(short_file_name)) cmd = [ pgbin ] + cmdo + [ '-v "ON_ERROR_STOP=1"', '--no-password', '-f "{0}"'.format(sql_file) ] # feedback.pushInfo('PSQL = %s' % ' '.join(cmd) ) # Add password if needed myenv = {**os.environ} if not uri.service(): if not uri.password(): password = get_connection_password_from_ini(uri) uri.setPassword(password) myenv = {**{'PGPASSWORD': uri.password()}, **os.environ} returncode, stdout = run_command(cmd, myenv, feedback) if returncode != 0: m = tr('Error loading file') + ' {0}'.format(short_file_name) raise QgsProcessingException(m) msg += '* {0} -> OK'.format(short_file_name) feedback.pushInfo('* {0} has been loaded'.format(sql_file.replace(dir_path, ''))) # Delete SQL scripts if os.path.exists(sql_file): os.remove(sql_file) except Exception as e: m = tr('Error loading file') + ' {0}'.format(short_file_name) m += ' - Details: ' + str(e) raise QgsProcessingException(m) finally: feedback.pushInfo('') feedback.pushInfo('') # CLONE DATABASE # Add server_id in lizsync.server_metadata if needed if not clone_id or recreate_clone_server_id: # Generate a new ID feedback.pushInfo(tr('ADDING THE SERVER ID IN THE CLONE metadata table')) sql = ''' DELETE FROM lizsync.server_metadata; INSERT INTO lizsync.server_metadata (server_name) VALUES ( concat('clone', ' ', md5((now())::text) ) ) RETURNING server_id, server_name ''' else: # Keep the already present ID feedback.pushInfo(tr('KEEP THE SERVER ID IN THE CLONE metadata table')) sql = ''' DELETE FROM lizsync.server_metadata; INSERT INTO lizsync.server_metadata (server_id, server_name) VALUES ( '{0}', '{1}' ) RETURNING server_id, server_name '''.format( clone_id, clone_name ) header, data, rowCount, ok, error_message = fetchDataFromSqlQuery( connection_name_clone, sql ) if ok: for a in data: clone_id = a[0] clone_name = a[1] feedback.pushInfo(tr('Server metadata in the clone database')) feedback.pushInfo(tr('* server id') + ' = {0}'.format(clone_id)) feedback.pushInfo(tr('* server name') + ' = {0}'.format(clone_name)) else: m = tr('Error while adding server id in clone metadata table') m+= ' ' + error_message raise QgsProcessingException(m) feedback.pushInfo('') # CENTRAL DATABASE # Add an item in lizsync.synchronized_tables # to know afterward wich schemas to use when performing sync feedback.pushInfo(tr('ADDING THE LIST OF SYNCHRONIZED TABLES FOR THIS CLONE IN THE CENTRAL DATABASE ')) sql = ''' INSERT INTO lizsync.synchronized_tables AS s (server_id, sync_tables) VALUES ( '{0}', jsonb_build_array( '{1}' ) ) ON CONFLICT ON CONSTRAINT synchronized_tables_pkey DO UPDATE SET sync_tables = EXCLUDED.sync_tables || s.sync_tables ; '''.format( clone_id, "', '".join([a.strip() for a in tables.split(',')]) ) # feedback.pushInfo(sql) header, data, rowCount, ok, error_message = fetchDataFromSqlQuery( connection_name_central, sql ) if ok: msg = tr('List of synchronized tables added in central database for this clone') feedback.pushInfo(msg) else: m = tr('Error while adding the synchronized tables in the central database') m+= ' ' + error_message raise QgsProcessingException(m) feedback.pushInfo('') # CLONE DATABASE # Add foreign server and foreign schemas for synced schemas # We need full connection params: host, port, dbname, user, password ok, uri, msg = getUriFromConnectionName(connection_name_central, True) if not ok: raise QgsProcessingException(msg) # Get password if not found in URI if not uri.password(): password = get_connection_password_from_ini(uri) uri.setPassword(password) if not uri.password(): msg = tr('No password found for the central database connection !') raise QgsProcessingException(msg) # Add foreign server in the clone database feedback.pushInfo(tr('ADDING THE FOREIGN SERVER AND SCHEMAS ID IN THE CLONE DATABASE')) sql = ''' SELECT lizsync.create_central_server_fdw('{0}','{1}','{2}','{3}', '{4}'); SELECT lizsync.import_central_server_schemas(); '''.format( uri.host(), uri.port(), uri.database(), uri.username(), uri.password() ) # feedback.pushInfo(sql) header, data, rowCount, ok, error_message = fetchDataFromSqlQuery( connection_name_clone, sql ) if ok: feedback.pushInfo(tr('Foreign server and schemas have been added in the clone database')) else: m = tr('Error while adding the foregin server and schemas in clone database') m+= ' ' + error_message raise QgsProcessingException(m) feedback.pushInfo('') # CENTRAL DATABASE - Add clone Id in the lizsync.history line # corresponding to this deployed package feedback.pushInfo(tr('ADD CLONE ID IN THE CENTRAL DATABASE HISTORY ITEM FOR THIS ARCHIVE DEPLOYEMENT')) with open(os.path.join(dir_path, 'sync_id.txt')) as f: sync_id = f.readline().strip() sql = ''' UPDATE lizsync.history SET server_to = array_append(server_to, '{0}') WHERE sync_id = '{1}' ; '''.format( clone_id, sync_id ) # feedback.pushInfo(sql) header, data, rowCount, ok, error_message = fetchDataFromSqlQuery( connection_name_central, sql ) feedback.pushInfo(sql) if ok: msg = tr('History item has been successfully updated for this archive deployement in the central database') feedback.pushInfo(msg) else: m = tr('Error while updating the history item for this archive deployement') m+= ' ' + error_message raise QgsProcessingException(m) feedback.pushInfo('') # Delete txt files other_files = [o for o in archive_files if not o.endswith('.sql')] for a in other_files: f = os.path.join(dir_path, a) if os.path.exists(f): os.remove(f) output = { self.OUTPUT_STATUS: 1, self.OUTPUT_STRING: tr('The central database ZIP package has been successfully deployed to the clone') } return output
# -*- coding:utf-8 -*- import openpyxl as op from platform import system as psys from os import system as osys, getcwd, path, remove import configparser as cp import datetime as dt ps = '\\' if psys() == 'Windows' else '/' class struct: def __init__(self): c = cp.ConfigParser() c.read(path.dirname(path.realpath(__file__)) + '/config.ini', encoding='utf-8') self.subject = c['DEFAULT']['subject'] self.outputFileName = c['DEFAULT']['outputFileName'] self.templateName = c['DEFAULT']['templateName'] self.templateSheet = c['DEFAULT']['templateSheet'] self.correct = c['DEFAULT']['correct'] self.title = c['DEFAULT']['title'] err = list() def is_Chinese(ch): if '\u4e00' <= ch <= '\u9fff': return True return False