def configure(self, interpretor=None): # Copy nginx configuration nginx_config_file = os.path.join( self.application.get("source_directory"), "php", "frontend", "nginx", "nginx.conf" ) shutil.copyfile(nginx_config_file, "/etc/nginx/nginx.conf") # Copy vhost configuration shutil.copyfile(self.get_vhost_filepath(), "/etc/nginx/vhost.conf") if interpretor is not None: address = interpretor.get_address() replace("/etc/nginx/vhost.conf", "FASTCGI_INTERPRETOR_ADDRESS", address) # Clean log files logs_directory = "/var/log/nginx" if not os.path.exists(logs_directory): os.makedirs(logs_directory) map(os.unlink, [os.path.join(logs_directory, f) for f in os.listdir(logs_directory)]) for log_file in ["access.log", "error.log"]: log_file_path = os.path.join(logs_directory, log_file) open(log_file_path, "a").close() # Fix user rights open("/run/nginx.pid", "a").close() os.system("chown -R %s /etc/nginx /var/log/nginx /var/lib/nginx /run/nginx.pid" % self.application.get("user"))
def discrete_markov_glucose(time_obj, switch_interval=1, gluc_on_value=1, gluc_off_value=0, p_gluc=0.5, p_gluc_given_on=0.9, p_gluc_given_off=0.2): """ Discrete Markov glucose levels. """ def sample_next_gluc(prev_val): if prev_val == 1: next_gluc_val = np.random.binomial(1, p_gluc_given_on) else: next_gluc_val = np.random.binomial(1, p_gluc_given_off) return next_gluc_val # sample from prior glucose level gluc_val = np.random.binomial(1, p_gluc) data = [] for curr_interval in time_obj.iter_interval(switch_interval): data.extend([gluc_val] * len(curr_interval)) gluc_val = sample_next_gluc(gluc_val) # Replace with designated values for glucose on/glucose off data = utils.replace(data, 1, gluc_on_value) data = utils.replace(data, 0, gluc_off_value) return data
def configure(self, interpretor=None): # Copy nginx configuration nginx_config_file = os.path.join( self.application.get('source_directory'), 'php', 'frontend', 'nginx', 'nginx.conf') shutil.copyfile(nginx_config_file, '/etc/nginx/nginx.conf') # Copy vhost configuration shutil.copyfile(self.get_vhost_filepath(), '/etc/nginx/vhost.conf') if interpretor is not None: address = interpretor.get_address() replace('/etc/nginx/vhost.conf', 'FASTCGI_INTERPRETOR_ADDRESS', address) # Clean log files logs_directory = '/var/log/nginx' if not os.path.exists(logs_directory): os.makedirs(logs_directory) map(os.unlink, [ os.path.join(logs_directory, f) for f in os.listdir(logs_directory) ]) for log_file in ['access.log', 'error.log']: log_file_path = os.path.join(logs_directory, log_file) open(log_file_path, 'a').close() # Fix user rights open('/run/nginx.pid', 'a').close() os.system( 'chown -R %s /etc/nginx /var/log/nginx /var/lib/nginx /run/nginx.pid' % self.application.get('user'))
def apply(self, deduction): for (p, f) in enumerate(deduction.right): if isinstance(f, Not): newleft = utils.replace(deduction.left, len(deduction.left), [f.left]) newright = utils.replace(deduction.right, p, []) yield (self.kind, p, Deduction(list(newleft), list(newright)))
def configure(self, interpretor=None): # Set apache virtual host vhost_directory = '/etc/apache2/sites-enabled' confs_directory = '/etc/apache2/conf-available' map(os.unlink, [ os.path.join(vhost_directory, f) for f in os.listdir(vhost_directory) ]) vhost_path = os.path.join(vhost_directory, 'tsuru-vhost.conf') security_dst_file = os.path.join(confs_directory, 'security.conf') shutil.copyfile(self.get_vhost_filepath(), vhost_path) security_src_file = os.path.join( self.application.get('source_directory'), 'php', 'frontend', 'apache', 'security.conf') shutil.copyfile(security_src_file, security_dst_file) # Set interpretor address is there's any if interpretor is not None: address = interpretor.get_address() replace(vhost_path, 'FASTCGI_INTERPRETOR_ADDRESS', address) os.system('a2enmod proxy_fcgi') # Empty `ports.conf` file open('/etc/apache2/ports.conf', 'w').close() # Set Apache environment variables accessible when running though cmd with open('/etc/profile', 'a') as profile_file: profile_file.write( "\n" "export APACHE_RUN_USER=%s\n" "export APACHE_RUN_GROUP=%s\n" "export APACHE_PID_FILE=/var/run/apache2/apache2.pid\n" "export APACHE_RUN_DIR=/var/run/apache2\n" "export APACHE_LOCK_DIR=/var/lock/apache2\n" "export APACHE_LOG_DIR=/var/log/apache2\n" "sudo chmod 777 /dev/stdout /dev/stderr\n" % (self.application.get('user'), self.application.get('user'))) # Create directories logs_directory = '/var/log/apache2' directories = [logs_directory, '/var/lock/apache2', '/var/run/apache2'] for directory in directories: if not os.path.exists(directory): os.makedirs(directory) map(os.unlink, [ os.path.join(logs_directory, f) for f in os.listdir(logs_directory) ]) for log_file in ['access.log', 'error.log']: log_file_path = os.path.join(logs_directory, log_file) open(log_file_path, 'a').close() # Configure modules if needed for module in self.configuration.get('modules', []): os.system('a2enmod %s' % module) # Fix user rights os.system( 'chown -R %s /etc/apache2 /var/run/apache2 /var/log/apache2 /var/lock/apache2' % self.application.get('user'))
def apply(self, deduction): for (p, f) in enumerate(deduction.right): if isinstance(f, And): deductionA = utils.replace(deduction.right, p, [f.left]) deductionB = utils.replace(deduction.right, p, [f.right]) yield (self.kind, p, Deduction(deduction.left, list(deductionA)), Deduction(deduction.left, list(deductionB)))
def configure(self, frontend): # If frontend supports unix sockets, use them by default self.socket_address = 'unix:/var/run/php/fpm.sock' if not frontend.supports_unix_proxy(): self.socket_address = '127.0.0.1:9000' # Clear pre-configured pools for version in php_versions: removed_fpm_files = [] for f in os.listdir('/etc/php/{}/fpm/pool.d'.format(version)): removed_fpm_files.append( os.path.join('/etc/php/{}/fpm/pool.d'.format(version), f)) map(os.unlink, removed_fpm_files) templates_mapping = { 'pool.conf': '/etc/php/{}/fpm/pool.d/tsuru.conf', 'php-fpm.conf': '/etc/php/{}/fpm/php-fpm.conf' } for template, target in templates_mapping.iteritems(): shutil.copyfile( os.path.join(self.application.get('source_directory'), 'php', 'interpretor', 'fpm', template.format(version)), target.format(version)) # Replace pool listen address listen_address = self.socket_address if listen_address[0:5] == 'unix:': listen_address = listen_address[5:] replace(templates_mapping['pool.conf'].format(version), '_FPM_POOL_LISTEN_', listen_address) replace(templates_mapping['php-fpm.conf'].format(version), '_PHP_VERSION_', version) # Fix user rights run_directory = '/var/run/php' if not os.path.exists(run_directory): os.makedirs(run_directory) os.system('chown -R {} /etc/php/{}/fpm /var/run/php'.format( self.application.get('user'), version)) # Clean and touch some files for file_path in [ '/var/log/php-fpm.log', '/etc/php/{}/fpm/environment.conf'.format(version) ]: open(file_path, 'a').close() os.system('chown %s %s' % (self.application.get('user'), file_path)) if 'ini_file' in self.configuration: shutil.copyfile( os.path.join(self.application.get('directory'), self.configuration.get('ini_file')), '/etc/php/{}/fpm/php.ini'.format(version))
def apply(self, deduction): for (p, f) in enumerate(deduction.left): if isinstance(f, Then): newleft = utils.replace(deduction.left, p, [Or(Not(f.left), f.right)]) yield (self.kind, p, -1, Deduction(list(newleft), deduction.right)) for (p, f) in enumerate(deduction.right): if isinstance(f, Then): newRight = utils.replace(deduction.right, p, [Or(Not(f.left), f.right)]) yield (self.kind, -1, p, Deduction(deduction.left, list(newRight)))
def configure(self, frontend): # If frontend supports unix sockets, use them by default self.socket_address = 'unix:/var/run/php5/fpm.sock' if not frontend.supports_unix_proxy(): self.socket_address = '127.0.0.1:9000' # Clear pre-configured pools map(os.unlink, [ os.path.join('/etc/php5/fpm/pool.d', f) for f in os.listdir('/etc/php5/fpm/pool.d') ]) templates_mapping = { 'pool.conf': '/etc/php5/fpm/pool.d/tsuru.conf', 'php-fpm.conf': '/etc/php5/fpm/php-fpm.conf' } for template, target in templates_mapping.iteritems(): shutil.copyfile( os.path.join(self.application.get('source_directory'), 'php', 'interpretor', 'fpm5', template), target) # Replace pool listen address listen_address = self.socket_address if listen_address[0:5] == 'unix:': listen_address = listen_address[5:] replace(templates_mapping['pool.conf'], '_FPM_POOL_LISTEN_', listen_address) if 'ini_file' in self.configuration: shutil.copyfile( os.path.join(self.application.get('directory'), self.configuration.get('ini_file')), '/etc/php5/fpm/php.ini') # Clean and touch some files for file_path in [ '/var/log/php5-fpm.log', '/etc/php5/fpm/environment.conf' ]: open(file_path, 'a').close() os.system('chown %s %s' % (self.application.get('user'), file_path)) # Clean run directory run_directory = '/var/run/php5' if not os.path.exists(run_directory): os.makedirs(run_directory) # Fix user rights os.system('chown -R %s /etc/php5/fpm /var/run/php5' % self.application.get('user'))
def configure(self, interpretor=None): # Set apache virtual host vhost_directory = "/etc/apache2/sites-enabled" map(os.unlink, [os.path.join(vhost_directory, f) for f in os.listdir(vhost_directory)]) vhost_path = os.path.join(vhost_directory, "tsuru-vhost.conf") shutil.copyfile(self.get_vhost_filepath(), vhost_path) # Set interpretor address is there's any if interpretor is not None: address = interpretor.get_address() replace(vhost_path, "FASTCGI_INTERPRETOR_ADDRESS", address) os.system("a2enmod proxy_fcgi") # Empty `ports.conf` file open("/etc/apache2/ports.conf", "w").close() # Set Apache environment variables accessible when running though cmd with open("/etc/profile", "a") as profile_file: profile_file.write( "\n" "export APACHE_RUN_USER=%s\n" "export APACHE_RUN_GROUP=%s\n" "export APACHE_PID_FILE=/var/run/apache2/apache2.pid\n" "export APACHE_RUN_DIR=/var/run/apache2\n" "export APACHE_LOCK_DIR=/var/lock/apache2\n" "export APACHE_LOG_DIR=/var/log/apache2\n" "sudo chmod 777 /dev/stdout /dev/stderr\n" % (self.application.get("user"), self.application.get("user")) ) # Create directories logs_directory = "/var/log/apache2" directories = [logs_directory, "/var/lock/apache2", "/var/run/apache2"] for directory in directories: if not os.path.exists(directory): os.makedirs(directory) map(os.unlink, [os.path.join(logs_directory, f) for f in os.listdir(logs_directory)]) for log_file in ["access.log", "error.log"]: log_file_path = os.path.join(logs_directory, log_file) open(log_file_path, "a").close() # Configure modules if needed for module in self.configuration.get("modules", []): os.system("a2enmod %s" % module) # Fix user rights os.system( "chown -R %s /etc/apache2 /var/run/apache2 /var/log/apache2 /var/lock/apache2" % self.application.get("user") )
def cubicSplineInterpolation(): sample_rate, sample = wavfile.read('songs/hakuna_matata.wav') BadSample = sample.copy() dz.theEvilMethod(BadSample, 0.5) matches = recognize.cheat(sample, BadSample) x, y = utils.tovalidxy(BadSample, matches) f = InterpolatedUnivariateSpline(x, y) xNotValid = utils.invalidx(matches) fixedy = f(xNotValid) utils.replace(BadSample, xNotValid, fixedy) wavfile.write('songs/generator_song/regen_splineUnivariate_song.wav', sample_rate, BadSample)
def diff_singer(): for item in get_items(): beat, we_singer, we_song, da_j, work_code = item if we_singer in singers: __singers[we_singer] = [we_singer] if we_singer not in __singers: __singers[we_singer] = __diff_singer(replace(we_singer))
def preprocess(data_dir="./data"): print("begin to preprocess...") train_data_path = os.path.join(data_dir, "train.csv") new_train_data_path = os.path.join(data_dir, "train_prcssd.csv") test_data_path = os.path.join(data_dir, "test.csv") new_test_data_path = os.path.join(data_dir, "test_prcssd.csv") vocab_path = os.path.join(data_dir, "vocab.txt") # 读数据 logging.info("loading data...") train_data = pd.read_csv(train_data_path) test_data = pd.read_csv(test_data_path) # 预处理 train_data["tag"] = "train" test_data["tag"] = "test" data = train_data.append(test_data) logging.info("replacing bad words...") data["comment_text"] = data.apply(lambda d : my_utils.replace(d["comment_text"]), axis=1) logging.info("tokenizing...") data["tokens"] = data.apply(lambda d: my_utils.tokenize(d["comment_text"]), axis=1) logging.info("making vocabulary...") vocab = my_utils.make_vocab(data["tokens"]) data["tokens"] = data.apply(lambda d: " ".join(d["tokens"])) train_data = data[data.tag == "train"] test_data = data[data.tag == "test"] #保存 logging.info("saving...") train_data.to_csv(new_train_data_path) test_data.to_csv(new_test_data_path) my_utils.dump_vocab(vocab, vocab_path) logging.info("preprocess finished!") return train_data, test_data
def rotations(self): rots = [self.matrix] for i in range(3): base = rotate90(rots[-1]) for p in permutations(range(1,4)): rots.append(replace(base,p)) return rots
def configure(self, interpretor=None): # Set apache virtual host vhost_directory = '/etc/apache2/sites-enabled' map(os.unlink, [os.path.join(vhost_directory, f) for f in os.listdir(vhost_directory)]) vhost_path = os.path.join(vhost_directory, 'tsuru-vhost.conf') shutil.copyfile(self.get_vhost_filepath(), vhost_path) # Set interpretor address is there's any if interpretor is not None: address = interpretor.get_address() replace(vhost_path, 'FASTCGI_INTERPRETOR_ADDRESS', address) os.system('a2enmod proxy_fcgi') # Empty `ports.conf` file open('/etc/apache2/ports.conf', 'w').close() # Set Apache environment variables accessible when running though cmd with open('/etc/profile', 'a') as profile_file: profile_file.write( "\n" "export APACHE_RUN_USER=%s\n" "export APACHE_RUN_GROUP=%s\n" "export APACHE_PID_FILE=/var/run/apache2/apache2.pid\n" "export APACHE_RUN_DIR=/var/run/apache2\n" "export APACHE_LOCK_DIR=/var/lock/apache2\n" "export APACHE_LOG_DIR=/var/log/apache2\n" % (self.application.get('user'), self.application.get('user')) ) # Create directories logs_directory = '/var/log/apache2' directories = [logs_directory, '/var/lock/apache2', '/var/run/apache2'] for directory in directories: if not os.path.exists(directory): os.makedirs(directory) map(os.unlink, [os.path.join(logs_directory, f) for f in os.listdir(logs_directory)]) for log_file in ['access.log', 'error.log']: log_file_path = os.path.join(logs_directory, log_file) open(log_file_path, 'a').close() # Configure modules if needed for module in self.configuration.get('modules', []): os.system('a2enmod %s' % module) # Fix user rights os.system('chown -R %s /etc/apache2 /var/run/apache2 /var/log/apache2 /var/lock/apache2' % self.application.get('user'))
def cubitInterpolation1D(): sample_rate, sample = wavfile.read('songs/hakuna_matata.wav') BadSample = sample.copy() dz.theEvilMethod(BadSample, 0.5) wavfile.write('songs/bad_songs/not_good_song.wav', sample_rate, BadSample) matches = recognize.cheat(sample, BadSample) x, y = utils.tovalidxy(BadSample, matches) f = interp1d(x, y, kind='cubic', fill_value='extrapolate') xNotValid = utils.invalidx(matches) fixedy = f(xNotValid) utils.replace(BadSample, xNotValid, fixedy) wavfile.write('songs/generator_song/regen_sinOriginal_song.wav', sample_rate, BadSample)
def endElement(self, tag): tag, attrs, data = self._history.pop() data = data.strip() if untranslated._translatable( data) and not untranslated._tal_replaced_content(tag, attrs): # not enclosed if (self._i18nlevel == 0) and tag not in ['script', 'style', 'html']: severity = untranslated._severity(tag, attrs) or '' if severity: if untranslated.IGNORE_UNTRANSLATED in attrs.keys(): # Ignore untranslated data. This is necessary for # including literal content, that does not need to be # translated. pass elif not untranslated.CHAMELEON_SUBST.match(data): h = HTMLParser.HTMLParser() with open(self._filename, 'r') as source_file: bs = BeautifulSoup.BeautifulSoup( source_file, 'html.parser') source_file.close() attr = {} for key in attrs.keys(): if key not in ['selected']: attr[key] = attrs.getValue(key) values = bs.findAll(tag.lower(), attrs=attr) if not values: self.log( 'i18n:translate missing for this:\n' '"""\n%s\n"""\nTag:<%s> Attrs:%s' % (data.encode('utf8'), tag, attr), severity) for v in values: if not v.has_attr('i18n:translate'): v.name = tag escaper = EntitySubstitution() substitute = copy(v) if v.string: substitute.string = escaper.substitute_html( v.string) for i in [v, substitute]: pattern = h.unescape(str(i)) i['i18n:translate'] = "" substring = h.unescape(str(i)) match = replace( self._filename, str(pattern), str(substring), self._parser.getLineNumber()) if match: break if not match: self.log( 'i18n:translate missing for this:\n' '"""\n%s\n"""\nPattern: %s' % (data.encode('utf8'), str(pattern)), severity) if self._i18nlevel != 0: self._i18nlevel -= 1
def main(): """Read a directory containing json files for Kibana panels, beautify them and replace size value in aggregations as specified through corresponding params params. """ args = parse_args() configure_logging(args.debug) src_path = args.src_path dest_path = args.dest_path old_str1 = '\\"size\\":' + args.old_size old_str2 = '\\"size\\": ' + args.old_size new_str = '\\"size\\":' + args.new_size logging.info('Input path: %s', src_path) logging.info('Output path: %s', dest_path) logging.info('old str: %s', old_str1) logging.info('old str: %s', old_str2) logging.info('new str: %s', new_str) if os.path.abspath(src_path) == os.path.abspath(dest_path): logging.error('source and destination directiories must be different') sys.exit(1) # Iterate over input files json_files = [f for f in os.listdir(src_path) if f.endswith('.json')] for filename in json_files: in_file_path = os.path.join(src_path, filename) in_file_path = os.path.join(src_path, filename) out_file_path = os.path.join(dest_path, filename) logging.info('INPUT FILE: %s',in_file_path) logging.info('OUTPUT FILE: %s',out_file_path) # First beautify input pretty = utils.beautify(filename=in_file_path) # Iterate the beautified json string line by line pretty_replaced = utils.replace(pretty, old_str1, new_str) pretty_replaced = utils.replace(pretty_replaced, old_str2, new_str) with open(out_file_path, 'w') as output_file: output_file.write(pretty_replaced) logging.info('This is the end.')
def main(): """Read a directory containing json files for Kibana panels, beautify them and replace size value in aggregations as specified through corresponding params params. """ args = parse_args() configure_logging(args.debug) src_path = args.src_path dest_path = args.dest_path old_str1 = '\\"size\\":' + args.old_size old_str2 = '\\"size\\": ' + args.old_size new_str = '\\"size\\":' + args.new_size logging.info('Input path: %s', src_path) logging.info('Output path: %s', dest_path) logging.info('old str: %s', old_str1) logging.info('old str: %s', old_str2) logging.info('new str: %s', new_str) if os.path.abspath(src_path) == os.path.abspath(dest_path): logging.error('source and destination directiories must be different') sys.exit(1) # Iterate over input files json_files = [f for f in os.listdir(src_path) if f.endswith('.json')] for filename in json_files: in_file_path = os.path.join(src_path, filename) in_file_path = os.path.join(src_path, filename) out_file_path = os.path.join(dest_path, filename) logging.info('INPUT FILE: %s', in_file_path) logging.info('OUTPUT FILE: %s', out_file_path) # First beautify input pretty = utils.beautify(filename=in_file_path) # Iterate the beautified json string line by line pretty_replaced = utils.replace(pretty, old_str1, new_str) pretty_replaced = utils.replace(pretty_replaced, old_str2, new_str) with open(out_file_path, 'w') as output_file: output_file.write(pretty_replaced) logging.info('This is the end.')
def is_max(self): p = self.current n = to_int(p) for i in range(3): p = rotate90(p) for q in permutations(range(1, 4)): if len(p) == len(self.current) and to_int(replace(p, q)) > n: return False return True
def is_max(self): p = self.current n = to_int(p) for i in range(3): p = rotate90(p) for q in permutations(range(1,4)): if len(p) == len(self.current) and to_int(replace(p,q)) > n: return False return True
def configure(self, frontend): # If frontend supports unix sockets, use them by default self.socket_address = 'unix:/var/run/php5/fpm.sock' if not frontend.supports_unix_proxy(): self.socket_address = '127.0.0.1:9000' # Clear pre-configured pools map(os.unlink, [os.path.join('/etc/php5/fpm/pool.d', f) for f in os.listdir('/etc/php5/fpm/pool.d')]) templates_mapping = { 'pool.conf': '/etc/php5/fpm/pool.d/tsuru.conf', 'php-fpm.conf': '/etc/php5/fpm/php-fpm.conf' } for template, target in templates_mapping.iteritems(): shutil.copyfile( os.path.join(self.application.get('source_directory'), 'php', 'interpretor', 'fpm5', template), target ) # Replace pool listen address listen_address = self.socket_address if listen_address[0:5] == 'unix:': listen_address = listen_address[5:] replace(templates_mapping['pool.conf'], '_FPM_POOL_LISTEN_', listen_address) if 'ini_file' in self.configuration: shutil.copyfile( os.path.join(self.application.get('directory'), self.configuration.get('ini_file')), '/etc/php5/fpm/php.ini' ) # Clean and touch some files for file_path in ['/var/log/php5-fpm.log', '/etc/php5/fpm/environment.conf']: open(file_path, 'a').close() os.system('chown %s %s' % (self.application.get('user'), file_path)) # Clean run directory run_directory = '/var/run/php5' if not os.path.exists(run_directory): os.makedirs(run_directory) # Fix user rights os.system('chown -R %s /etc/php5/fpm /var/run/php5' % self.application.get('user'))
def configure(self, frontend): # If frontend supports unix sockets, use them by default self.socket_address = "unix:/var/run/php5/fpm.sock" if not frontend.supports_unix_proxy(): self.socket_address = "127.0.0.1:9000" # Clear pre-configured pools map(os.unlink, [os.path.join("/etc/php5/fpm/pool.d", f) for f in os.listdir("/etc/php5/fpm/pool.d")]) templates_mapping = { "pool.conf": "/etc/php5/fpm/pool.d/tsuru.conf", "php-fpm.conf": "/etc/php5/fpm/php-fpm.conf", } for template, target in templates_mapping.iteritems(): shutil.copyfile( os.path.join(self.application.get("source_directory"), "php", "interpretor", "fpm5", template), target ) # Replace pool listen address listen_address = self.socket_address if listen_address[0:5] == "unix:": listen_address = listen_address[5:] replace(templates_mapping["pool.conf"], "_FPM_POOL_LISTEN_", listen_address) if "ini_file" in self.configuration: shutil.copyfile( os.path.join(self.application.get("directory"), self.configuration.get("ini_file")), "/etc/php5/fpm/php.ini", ) # Clean and touch some files for file_path in ["/var/log/php5-fpm.log", "/etc/php5/fpm/environment.conf"]: open(file_path, "a").close() os.system("chown %s %s" % (self.application.get("user"), file_path)) # Clean run directory run_directory = "/var/run/php5" if not os.path.exists(run_directory): os.makedirs(run_directory) # Fix user rights os.system("chown -R %s /etc/php5/fpm /var/run/php5" % self.application.get("user"))
def compare(__song_name, __target) -> list: percent = 0 __item = [] for tg in __target: nex_code, old, old_jrc, song_name, sub_title, artist, lyrics, song = tg we_song_name, target_song_name = replace(__song_name), replace(song_name) if we_song_name == target_song_name: return [ nex_code, replace_dot_zero(old, old_jrc), song_name, sub_title, artist, lyrics, song, 100.00, ] __percent = difflib.SequenceMatcher(None, we_song_name, target_song_name).quick_ratio() if percent < __percent: percent = __percent __item = [ nex_code, replace_dot_zero(old, old_jrc), song_name, sub_title, artist, lyrics, song, '%.2f' % (__percent * 100), ] return __item
def configure_configfiles(db, user_config): domain = user_config['HOST_DOMAIN'] release = user_config['PRESTASHOP_RELEASE'] user_database = db['MYSQL_DATABASE'] template_db = _database_name(release) # parameters config domain_dir = domain_path(domain, release) parameter_dir = domain_dir + 'app/config/' parameter_file = parameter_dir + 'parameters.php' if not os.path.isdir(parameter_dir): mkdir("-p", parameter_dir) parameter_srcpath = _install_dir(release) + 'app/config/parameters.php' cp("-rf", parameter_srcpath, parameter_dir) # startup script config startupscript_dir = domain_dir + 'usr/html/' startupscript_file = startupscript_dir + 'startup.php' if not os.path.isdir(startupscript_dir): mkdir("-p", startupscript_dir) startupscript_srcpath = _release_dir(release) + 'files/startup.php' cp("-rf", startupscript_srcpath, startupscript_dir) replace(old="'database_host' => 'localhost',", new="'database_host' => '{}',".format(db['MYSQL_HOST']), file=parameter_file) replace(old="'database_port' => '',", new="'database_port' => '{}',".format(3306), file=parameter_file) replace(old="'database_name' => '{}',".format(template_db), new="'database_name' => '{}',".format(user_database), file=parameter_file) replace(old="'database_user' => '{}',".format(MYSQL_USER), new="'database_user' => '{}',".format(db['MYSQL_USER']), file=parameter_file) replace(old="'database_password' => '{}',".format(MYSQL_PASSWORD), new="'database_password' => '{}',".format(db['MYSQL_PASSWORD']), file=parameter_file)
def configure(self, interpretor=None): # Copy nginx configuration nginx_config_file = os.path.join(self.application.get('source_directory'), 'php', 'frontend', 'nginx', 'nginx.conf') shutil.copyfile(nginx_config_file, '/etc/nginx/nginx.conf') # Copy vhost configuration shutil.copyfile(self.get_vhost_filepath(), '/etc/nginx/vhost.conf') if interpretor is not None: address = interpretor.get_address() replace('/etc/nginx/vhost.conf', 'FASTCGI_INTERPRETOR_ADDRESS', address) # Clean log files logs_directory = '/var/log/nginx' if not os.path.exists(logs_directory): os.makedirs(logs_directory) map(os.unlink, [os.path.join(logs_directory, f) for f in os.listdir(logs_directory)]) for log_file in ['access.log', 'error.log']: log_file_path = os.path.join(logs_directory, log_file) open(log_file_path, 'a').close() # Fix user rights open('/run/nginx.pid', 'a').close() os.system('chown -R %s /etc/nginx /var/log/nginx /var/lib/nginx /run/nginx.pid' % self.application.get('user'))
async def weather(ctx, city): city = replace(city, ['-', '_'], ' ') url = 'https://api.openweathermap.org/data/2.5/weather' params = { 'q': city, 'appid': OWM_API, 'units': 'metric', } rjs = requests.get(url=url, params=params).json() if int(rjs['cod']) == 200: current_time = datetime.utcfromtimestamp(rjs['dt']+rjs['timezone'])\ .strftime('%I:%M %p') title = f'Weather at {rjs["name"]}' desc = f'Data at {current_time} local time' field_params = { 'Weather': f'{rjs["weather"][0]["description"].capitalize()}', 'Temperature (\u00b0C)': f'{rjs["main"]["temp"]:.1f} '\ f'({rjs["main"]["temp_min"]:.1f} - {rjs["main"]["temp_max"]:.1f})', 'Feels like (\u00b0C)': f'{rjs["main"]["feels_like"]:.1f}', 'Humidity': f'{rjs["main"]["humidity"]}\%', 'Wind speed': f'{rjs["wind"]["speed"]} m/s', } weather_icon_url = \ 'http://openweathermap.org/img/wn/' + rjs['weather'][0]['icon'] + '.png' footer = 'Powered by OpenWeather' favicon_url = \ 'https://openweathermap.org/themes/openweathermap/assets/vendor/owm/img/icons/logo_32x32.png' embed = discord.Embed(title=title, description=desc, color=0x7289da) for key, value in field_params.items(): embed.add_field(name=key, value=value) embed.set_thumbnail(url=weather_icon_url) embed.set_footer(text=footer, icon_url=favicon_url) await ctx.send(embed=embed) elif int(rjs['cod']) == 404: await ctx.send('City not found.') else: await ctx.send(f'{rjs["cod"]}: Weather data could not be requested.')
def create_vocab(data, init_vocab=None): stats = {} if not init_vocab: vocab = {'<none>': 0, '<unk>': 1} else: vocab = init_vocab stats['max_span'] = 0 stats['max_q'] = 0 def update_vocab(sentence): for word in sentence: if word in vocab: continue vocab[word] = len(vocab) for (pi, paragraph) in enumerate(data): update_vocab(paragraph['context.tokens']) for qa in paragraph['qas']: update_vocab(qa['question.tokens']) if len(qa['question.tokens']) > stats['max_q']: stats['max_q'] = len(qa['question.tokens']) for answer in qa['answers']: if len(answer['text.tokens']) > stats['max_span']: stats['max_span'] = len(answer['text.tokens']) update_vocab(answer['text.tokens']) stats['max_num_span'] = 0 for (pi, paragraph) in enumerate(data): new_spans = [] for spans in paragraph['spans']: new_span = [] for span in spans: update_vocab(span) if len(span) > stats['max_span']: continue span = replace(span, '-LRB-', '(') span = replace(span, '-RRB-', ')') span = replace(span, '-LSB-', '[') span = replace(span, '-RSB-', ']') span = replace(span, '-LCB-', '{') span = replace(span, '-RCB-', '}') new_span.append(span) new_spans.append(new_span) paragraph['spans'] = new_spans if len(sum(new_spans, [])) > stats['max_num_span']: stats['max_num_span'] = len(sum(new_spans, [])) stats['vocab_size'] = len(vocab) return (vocab, stats)
async def wiki(ctx, search_string): search_string = replace(search_string, [' ', '-'], '_') url = 'https://en.wikipedia.org/w/api.php' params = { 'action': 'query', 'format': 'json', 'list': 'search', 'srsearch': search_string, 'srlimit': 1, } rjs = requests.get(url=url, params=params).json() page_url = 'https://en.wikipedia.org/wiki/' if not rjs['query']['search']: await ctx.send('No results found.') return title = rjs['query']['search'][0]['title'].replace(' ', '_') await ctx.send(page_url + title)
async def randomwiki(ctx, category=None): def good_title(title): return ':' not in title category = replace(category, [' ', '-'], '_') url = 'https://en.wikipedia.org/w/api.php' params = { 'action': 'query', 'format': 'json', } if category is None: params.update({ 'list': 'random', 'rnlimit': 50, }) else: params.update({ 'list': 'categorymembers', 'cmtitle': 'Category:' + category, 'cmtype': 'page', 'cmlimit': 'max', }) rjs = requests.get(url=url, params=params).json() page_url = 'https://en.wikipedia.org/wiki/' if category is None: all_titles = [ent['title'] for ent in rjs['query']['random']] else: if not rjs['query']['categorymembers']: await ctx.send("Category doesn't exist.") return all_titles = [ent['title'] for ent in rjs['query']['categorymembers']] choice = random.choice(list(filter(good_title, all_titles))) choice = choice.replace(' ', '_') await ctx.send(page_url + choice)
def filter_vocab(data, vocab, stats): data = deepcopy(data) for (pi, paragraph) in enumerate(data): new_spans = [] for spans in paragraph['spans']: new_span = [] for span in spans: if len(span) > stats['max_span']: continue span = replace(span, '-LRB-', '(') span = replace(span, '-RRB-', ')') span = replace(span, '-LSB-', '[') span = replace(span, '-RSB-', ']') span = replace(span, '-LCB-', '{') span = replace(span, '-RCB-', '}') new_span.append(span) new_spans.append(new_span) paragraph['spans'] = new_spans return data
def main(): """Suppress warnings from deprecated functions (configparser compatibility for python 3)""" warnings.simplefilter("ignore", category=DeprecationWarning) warnings.filterwarnings("ignore") """Specify, parse & assign positional (compulsory) & optional arguments for the script""" arg_parser_description = 'Take the hostfile, logging level and SSH details' parser = argparse.ArgumentParser(description=arg_parser_description) parser_help_text = "Provide a hostfile in ip,username,password format" parser.add_argument("hostfile", help=parser_help_text) parser.add_argument("--loglevel", help="Provide a log level. Supported values: (Info,Debug);\ Default is Info") parser.add_argument("--passwordless", help="Setup passwordless SSH with hosts.\ Supported values: (True,False); Default is False") args = parser.parse_args() Hosts = args.hostfile # Specify SSH details key_rsa_path = '~/.ssh/id_rsa.pub' key_append_path = '~/.ssh/authorized_keys' key_gen_cmd = 'echo -e "y\n"|ssh-keygen -q -t rsa -N "" -f ~/.ssh/id_rsa' # Create the configparser object config = configparser.ConfigParser() # Specify path for inventory current_path = os.path.dirname(os.path.realpath(sys.argv[0])) if "ansible/roles/inventory/files" in current_path: parent_dirname = os.path.dirname(os.path.dirname(current_path)) ansible_path = os.path.dirname(parent_dirname) ansible_cfg_path = ansible_path + 'ansible.cfg' default_inventory_path = ansible_path + '/inventory/' try: config.read_file(open(ansible_cfg_path)) inventory_path = ansible_path + config.get('defaults', 'inventory') except IOError: if os.path.isdir(default_inventory_path) is not True: os.makedirs(default_inventory_path) inventory_path = default_inventory_path + 'hosts' else: os.makedirs('inventory') default_inventory_path = current_path + '/inventory/' inventory_path = default_inventory_path + 'hosts' # Define logging levels for script execution logfile = '%s/host-status.log' % (default_inventory_path) if args.loglevel and args.loglevel.upper() == "DEBUG": logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', filename=logfile, filemode='a', level=10) else: logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', filename=logfile, filemode='a', level=20) # Initiate log file clearLogCmd = '> %s' % (logfile) executeCmd(clearLogCmd) # Initialize dictionary holding supported host codes SupportedHostCodes = { 'localhost': None, 'mayamaster': 'openebs-mayamasters', 'mayahost': 'openebs-mayahosts', 'kubemaster': 'kubernetes-kubemasters', 'kubeminion': 'kubernetes-kubeminions' } """Create list of tuples containing individual machine info & initialize localhost password""" HostList = [] local_password = None v = Validator() with open(Hosts, "rb") as fp: for i in fp.readlines(): tmp = i.split(",") if tmp[0] != '\n' and "#" not in tmp[0]: ret, msg = v.validateInput(tmp, SupportedHostCodes) if not ret: print msg exit() if tmp[0] == "localhost": local_password = tmp[3].rstrip('\n') try: HostList.append((tmp[0], tmp[1], tmp[2], tmp[3].rstrip('\n'))) except IndexError as e: info_text = "Unable to parse input, failed with error %s" logging.info(info_text, e) exit() if args.passwordless and args.passwordless.upper() == "TRUE": # Setup passwordless SSH between the localhost and target hosts setupSSH(key_rsa_path, key_append_path, key_gen_cmd, HostList) passwdless = True else: passwdless = False # Generate Ansible hosts file from 'machines.in' codes = list(SupportedHostCodes) inventory = Inventory() for i in codes: codeSubList = [] for j in HostList: if i in j: codeSubList.append(j) ret, msg = inventory.generateInventory(config, codeSubList, inventory_path, SupportedHostCodes, passwdless) if not ret: print msg exit() print "Inventory config generated successfully" logging.info("Inventory config generated successfully") # Insert localhost line into beginning of inventory file if local_password: lpasswd = "\"{{ lookup('env','%s') }}\"" % (local_password) localhostString = """localhost ansible_connection=local ansible_become_pass=%s\n\n""" % (lpasswd) with open(inventory_path, 'rb') as f: with open('hosts.tmp', 'wb') as f2: f2.write(localhostString) f2.write(f.read()) os.rename('hosts.tmp', inventory_path) # Sanitize the Ansible inventory file replace(inventory_path, " = ", "=")
def create( coin_name="Bitcoin", coin_code="BTC", coin_uuid=str(uuid.uuid4().hex), init_subsidy=50, halving_interval=210000, block1_value=50, block_time=600, retarget_num=2016, max_money=21000000, port=8333, testport=18333, rpcport=8332, rpctestport=18332, website="example.com", address_version=0, address_test_version=111, timestamp=DEFAULT_TIMESTAMP, time=1311305081, pubkey=DEFAULT_PUBKEY, nonce=3085127155, merkle_hash=DEFAULT_MERKLE, genesis_hash=DEFAULT_GENESIS, splash_image=None, icon_ico=None, icon_png=None, ): """ The main function that creates coins. This function modifies the source coins sourcecode to fit the provided options.""" blocks_per_day = 60 * 60 * 24 / block_time # Create working directory if doesn't exist if not os.path.exists(SETTINGS.working_dir): os.makedirs(SETTINGS.working_dir) # Coin source is nested in a random directory, for uniqueness random_dir = os.path.join(SETTINGS.working_dir, coin_uuid) os.makedirs(random_dir) # Copy the sourcecoin source into randomdir copy_anything(SETTINGS.source_dir, os.path.join(random_dir, coin_name)) os.chdir(os.path.join(random_dir, coin_name)) coindir = os.getcwd() reps = [ ['CM_LowercaseCoinCode', coin_code.lower()], ['CM_AllCapsCoinCode', coin_code.upper()], ['CM_LowercaseCoinName', coin_name.lower()], ['CM_AllCapsCoinName', coin_name.upper()], ['CM_UppercaseCoinName', coin_name.title()], ['CM_LowercaseCoinName', coin_name.lower()], ['CM_UppercaseCoinName', coin_name.title()], ['CM_AllCapsCoinName', coin_name.upper()], ['CM_RPCPort', str(rpcport)], ['CM_RPCTestnetPort', str(rpctestport)], ['CM_Port', str(port)], ['CM_TestnetPort', str(testport)], ['CM_WebsiteDomain', str(website)], ['CM_InitialSubsidy', str(init_subsidy)], ['CM_MaxMoney', str(max_money)], ['CM_BlocksPerDay', str(blocks_per_day)], ['CM_AddressVersion', str(address_version)], ['CM_HalvingIntervalBlocks', str(halving_interval)], ['CM_GenesisHeadline', str(timestamp)], ['CM_GenesisTimecode', str(int(time))], ['CM_GenesisPubkey', str(pubkey)], ['CM_GenesisNonce', str(int(nonce))], ['CM_GenesisMerkleHash', str(merkle_hash)], ['CM_GenesisHash', str(genesis_hash)], ['CM_BlocksPerDay', str(blocks_per_day)], ['CM_GenesisHash', str(genesis_hash)], ["CM_RetargetBlocks", str(retarget_num)], ["CM_BlockOneValue", str(int(block1_value))], ["CM_TargetTimespanSeconds", str(int(block_time))], ["CM_BlocksPerDay", str(blocks_per_day)], ["CM_RetargetBlocks", str(retarget_num)], ] replace(coindir, reps, ignore_types=[".png", ".ico"]) # Copy image files if splash_image: try: shutil.copyfile(splash_image, 'qt/res/images/splash.png') except IOError: pass if icon_ico: try: shutil.copyfile(icon_ico, 'qt/res/icons/{0}.ico'.format(coin_name.lower())) except IOError: pass if icon_png: try: shutil.copyfile(icon_png, 'qt/res/icons/{0}.png'.format(coin_name.lower())) shutil.copyfile(icon_png, 'qt/res/icons/toolbar.png') except IOError: pass
'J编码(下方)', '歌曲名', '副标题', '歌手', '作词', '作曲', '匹配百分比 %', ] ll = len(__header) spam_writer.writerow(__header) for i in get_beats(): beat, singer, song, da_j, jar_code = i try: content = fetch('data/result/%s.html' % beat) items = fetch_info(content) if len(items): percent = difflib.SequenceMatcher( None, replace(song), replace(items[0][2])).quick_ratio() # for item in items: spam_writer.writerow(i + list(items[0]) + [ '%.2f' % (percent * 100), ]) else: spam_writer.writerow(i) except Exception as e: # print(e) spam_writer.writerow(i) pass # print(e)
if __name__ == '__main__': jm = json_maker([], all_json_path, 0) yolo_label = Yolo_label(img_dir, label_dir, cls_list) img_names = get_file_list_from_dir(img_dir, is_full_path=False) img_names.sort() seen_patterns = [] #2. for line : add instance summary and instances lines = csv2list(csv_path, header=False) for line in lines: cls, pattern, cam_num = line cls = cls_list.index(cls) valid_cams = list(map(int, cam_num.split('/'))) for i, img_name in enumerate(img_names): if not check_pattern_exist(img_name, pattern): continue cur_cls, cur_cam_num, cur_scene = replace( pattern, '\g<class> \g<cam_num> \g<scene>', img_name).split() cam_num = int(cur_cam_num) if cam_num not in valid_cams: continue cur_pattern = '%s_%s' % (cur_cls, cur_scene) if cur_pattern not in seen_patterns: scene_num = len(seen_patterns) scene_num = "%08d" % (scene_num) seen_patterns.append(cur_pattern) jm.insert_scene(scene_num) jm.insert_instance_summary(scene_num, 0, cls) else: scene_num = seen_patterns.index(cur_pattern) scene_num = "%08d" % (scene_num) jm.insert_cam(scene_num, cam_num) labels = yolo_label.get_labels(img_name) _, [x1, y1, x2, y2] = labels[0]
for (ind, d) in enumerate(drivers_list)) return True if __name__ == "__main__": start = time.time() folders = os.listdir("data/drivers/") drivers = filter(lambda idd: idd[0] != "." and 0 < int(idd) < 4000, folders) print "reducing %s drivers" % len(drivers) X, _ = utils.load_data() X = preprocessing.scale(X) if len(sys.argv) > 1: cols = int(sys.argv[1]) X = np.delete(X, cols, 1) pca = decomposition.PCA() pca.fit(X) pca.n_components = pca.explained_variance_[ pca.explained_variance_ > 1e-05].shape[0] X = pca.fit_transform(X) save_pcaed_feat(drivers, X) print str(time.time() - start) # rewrite _COMP_PCA in utils # the line to look for is old = "_COMP_PCA = %s" % str(utils._COMP_PCA) new = "_COMP_PCA = %s" % str(pca.n_components) utils.replace("utils.py", old, new) print "reduced to %s-dim and saved" % str(pca.n_components)
def create(coin_name="Bitcoin", coin_code="BTC", coin_uuid=str(uuid.uuid4().hex), init_subsidy=50, halving_interval=210000, block1_value=50, block_time=600, retarget_num=2016, max_money=21000000, port=8333, testport=18333, rpcport=8332, rpctestport=18332, website="example.com", address_version=0, address_test_version=111, timestamp=DEFAULT_TIMESTAMP, time=1311305081, pubkey=DEFAULT_PUBKEY, nonce=3085127155, merkle_hash=DEFAULT_MERKLE, genesis_hash=DEFAULT_GENESIS, splash_image=None, icon_ico=None, icon_png=None, ): """ The main function that creates coins. This function modifies the source coins sourcecode to fit the provided options.""" blocks_per_day = 60*60*24 / block_time # Create working directory if doesn't exist if not os.path.exists(SETTINGS.working_dir): os.makedirs(SETTINGS.working_dir) # Coin source is nested in a random directory, for uniqueness random_dir = os.path.join(SETTINGS.working_dir, coin_uuid) os.makedirs(random_dir) # Copy the sourcecoin source into randomdir copy_anything(SETTINGS.source_dir, os.path.join(random_dir, coin_name)) os.chdir(os.path.join(random_dir, coin_name)) coindir = os.getcwd() reps = [ ['CM_LowercaseCoinCode', coin_code.lower()], ['CM_AllCapsCoinCode', coin_code.upper()], ['CM_LowercaseCoinName', coin_name.lower()], ['CM_AllCapsCoinName', coin_name.upper()], ['CM_UppercaseCoinName', coin_name.title()], ['CM_LowercaseCoinName', coin_name.lower()], ['CM_UppercaseCoinName', coin_name.title()], ['CM_AllCapsCoinName', coin_name.upper()], ['CM_RPCPort', str(rpcport)], ['CM_RPCTestnetPort', str(rpctestport)], ['CM_Port', str(port)], ['CM_TestnetPort', str(testport)], ['CM_WebsiteDomain', str(website)], ['CM_InitialSubsidy', str(init_subsidy)], ['CM_MaxMoney', str(max_money)], ['CM_BlocksPerDay', str(blocks_per_day)], ['CM_AddressVersion', str(address_version)], ['CM_HalvingIntervalBlocks', str(halving_interval)], ['CM_GenesisHeadline', str(timestamp)], ['CM_GenesisTimecode', str(int(time))], ['CM_GenesisPubkey', str(pubkey)], ['CM_GenesisNonce', str(int(nonce))], ['CM_GenesisMerkleHash', str(merkle_hash)], ['CM_GenesisHash', str(genesis_hash)], ['CM_BlocksPerDay', str(blocks_per_day)], ['CM_GenesisHash', str(genesis_hash)], ["CM_RetargetBlocks", str(retarget_num)], ["CM_BlockOneValue", str(int(block1_value))], ["CM_TargetTimespanSeconds", str(int(block_time))], ["CM_BlocksPerDay", str(blocks_per_day)], ["CM_RetargetBlocks", str(retarget_num)], ] replace(coindir, reps, ignore_types=[".png", ".ico"]) # Copy image files if splash_image: try: shutil.copyfile(splash_image, 'qt/res/images/splash.png') except IOError: pass if icon_ico: try: shutil.copyfile(icon_ico, 'qt/res/icons/{0}.ico'.format(coin_name.lower()) ) except IOError: pass if icon_png: try: shutil.copyfile(icon_png, 'qt/res/icons/{0}.png'.format(coin_name.lower()) ) shutil.copyfile(icon_png, 'qt/res/icons/toolbar.png') except IOError: pass
def test_010_replace(self): replace('./test.txt', ' = ', '=') with open('./test.txt', 'r') as infile: txt = infile.read() self.assertEqual(txt, 'foo=bar')
n_proc = cpu_count() Parallel(n_jobs=n_proc)(delayed(p_save_reduced)(d, X[ind*200:(ind+1)*200, :]) for (ind, d) in enumerate(drivers_list)) return True if __name__ == "__main__": start = time.time() folders = os.listdir("data/drivers/") drivers = filter(lambda idd: idd[0] != "." and 0<int(idd)<4000, folders) print "reducing %s drivers" % len(drivers) X, _ = utils.load_data() X = preprocessing.scale(X) if len(sys.argv) > 1: cols = int(sys.argv[1]) X = np.delete(X, cols, 1) pca = decomposition.PCA() pca.fit(X) pca.n_components = pca.explained_variance_[pca.explained_variance_ > 1e-05].shape[0] X = pca.fit_transform(X) save_pcaed_feat(drivers, X) print str(time.time()-start) # rewrite _COMP_PCA in utils # the line to look for is old = "_COMP_PCA = %s" % str(utils._COMP_PCA) new = "_COMP_PCA = %s" % str(pca.n_components) utils.replace("utils.py", old, new) print "reduced to %s-dim and saved" % str(pca.n_components)
def index(request): # read current configuration value try: cfg_mon_file = open(CFG_PATH + CFG_MON_IP_FILENAME) str_mon = cfg_mon_file.read() current_mon_address, current_mon_netmask = parse_interfaces_cfg( str_mon) except IOError: current_mon_address, current_mon_netmask = MON_ADDRESS_NOT_CONFIGURED, MON_NETMASK_NOT_CONFIGURED cam_config_entry = parse_cam_config(CFG_PATH + CFG_CAM_IP_FILENAME) print 'Current config loaded.' current_cam_address1 = cam_config_entry[0].address current_cam_address2 = cam_config_entry[1].address current_cam_address3 = cam_config_entry[2].address current_cam_address4 = cam_config_entry[3].address current_cam_port1 = cam_config_entry[0].port current_cam_port2 = cam_config_entry[1].port current_cam_port3 = cam_config_entry[2].port current_cam_port4 = cam_config_entry[3].port current_cam_stream_m1 = cam_config_entry[0].stream_m current_cam_stream_m2 = cam_config_entry[1].stream_m current_cam_stream_m3 = cam_config_entry[2].stream_m current_cam_stream_m4 = cam_config_entry[3].stream_m current_cam_stream_s1 = cam_config_entry[0].stream_s current_cam_stream_s2 = cam_config_entry[1].stream_s current_cam_stream_s3 = cam_config_entry[2].stream_s current_cam_stream_s4 = cam_config_entry[3].stream_s current_cam_user1 = cam_config_entry[0].user current_cam_user2 = cam_config_entry[1].user current_cam_user3 = cam_config_entry[2].user current_cam_user4 = cam_config_entry[3].user current_cam_pass1 = cam_config_entry[0].password current_cam_pass2 = cam_config_entry[1].password current_cam_pass3 = cam_config_entry[2].password current_cam_pass4 = cam_config_entry[3].password current_cam_type1 = cam_config_entry[0].cam_type current_cam_type2 = cam_config_entry[1].cam_type current_cam_type3 = cam_config_entry[2].cam_type current_cam_type4 = cam_config_entry[3].cam_type print 'Current config data fetched.' # read new values from request and validate them (only if changed) new_mon_address = request.POST.get('new_mon_address', current_mon_address) new_mon_netmask = request.POST.get('new_mon_netmask', current_mon_netmask) new_cam_address1 = request.POST.get('new_cam_address1', current_cam_address1) new_cam_address2 = request.POST.get('new_cam_address2', current_cam_address2) new_cam_address3 = request.POST.get('new_cam_address3', current_cam_address3) new_cam_address4 = request.POST.get('new_cam_address4', current_cam_address4) new_cam_port1 = request.POST.get('new_cam_port1', current_cam_port1) new_cam_port2 = request.POST.get('new_cam_port2', current_cam_port2) new_cam_port3 = request.POST.get('new_cam_port3', current_cam_port3) new_cam_port4 = request.POST.get('new_cam_port4', current_cam_port4) new_cam_stream_m1 = request.POST.get('new_cam_stream_m1', current_cam_stream_m1) new_cam_stream_m2 = request.POST.get('new_cam_stream_m2', current_cam_stream_m2) new_cam_stream_m3 = request.POST.get('new_cam_stream_m3', current_cam_stream_m3) new_cam_stream_m4 = request.POST.get('new_cam_stream_m4', current_cam_stream_m4) new_cam_stream_s1 = request.POST.get('new_cam_stream_s1', current_cam_stream_s1) new_cam_stream_s2 = request.POST.get('new_cam_stream_s2', current_cam_stream_s2) new_cam_stream_s3 = request.POST.get('new_cam_stream_s3', current_cam_stream_s3) new_cam_stream_s4 = request.POST.get('new_cam_stream_s4', current_cam_stream_s4) new_cam_user1 = request.POST.get('new_cam_user1', current_cam_user1) new_cam_user2 = request.POST.get('new_cam_user2', current_cam_user2) new_cam_user3 = request.POST.get('new_cam_user3', current_cam_user3) new_cam_user4 = request.POST.get('new_cam_user4', current_cam_user4) new_cam_pass1 = request.POST.get('new_cam_pass1', current_cam_pass1) new_cam_pass2 = request.POST.get('new_cam_pass2', current_cam_pass2) new_cam_pass3 = request.POST.get('new_cam_pass3', current_cam_pass3) new_cam_pass4 = request.POST.get('new_cam_pass4', current_cam_pass4) new_cam_type1 = request.POST.get('new_cam_type1', current_cam_type1) new_cam_type2 = request.POST.get('new_cam_type2', current_cam_type2) new_cam_type3 = request.POST.get('new_cam_type3', current_cam_type3) new_cam_type4 = request.POST.get('new_cam_type4', current_cam_type4) new_serial_number = request.POST.get('new_serial_number', '') admin_password = request.POST.get('admin_password', '') if new_serial_number != '' and admin_password == ADMIN_PASS: with open(CFG_PATH + CFG_SERIAL_FILENAME, 'w+') as f: f.write(new_serial_number.strip()) op_status = u'Zmieniono numer seryjny.' elif new_serial_number != '' and admin_password != ADMIN_PASS: op_status = u'Niepoprawne hasło serwisowe.' else: op_status = '' redirect_address = '' info_mon_address = '' info_mon_netmask = '' info_cam_data1 = '' info_cam_data2 = '' info_cam_data3 = '' info_cam_data4 = '' info_serial_number = '' change_mon_address = False change_mon_netmask = False change_cam_address = False reboot_needed = False if new_mon_address != current_mon_address: if re.match(REGEX_IP_ADDRESS, new_mon_address) and is_valid_ip(new_mon_address): change_mon_address = True else: info_mon_address = ERR_MSG_DETAILS % ( 'IP monitora', new_mon_address, REQUIRED_FORMAT_IP_ADDRESS) op_status = ERR_MSG_VALIDATION if new_mon_netmask != current_mon_netmask: if re.match(REGEX_IP_ADDRESS, new_mon_netmask) and is_valid_ip(new_mon_netmask): change_mon_netmask = True else: info_mon_netmask = ERR_MSG_DETAILS % ( 'Maska monitora', new_mon_address, REQUIRED_FORMAT_IP_ADDRESS) op_status = ERR_MSG_VALIDATION # cameras addresses if new_cam_address1 != current_cam_address1 \ or new_cam_port1 != current_cam_port1 \ or new_cam_stream_m1 != current_cam_stream_m1 \ or new_cam_stream_s1 != current_cam_stream_s1 \ or new_cam_user1 != current_cam_user1 \ or new_cam_pass1 != current_cam_pass1 \ or new_cam_type1 != current_cam_type1: if is_valid_ip_field_value(new_cam_address1): change_cam_address = True else: info_cam_data1 = ERR_MSG_DETAILS % ( 'IP kamery 1', new_cam_address1, REQUIRED_FORMAT_IP_ADDRESS) op_status = ERR_MSG_VALIDATION if new_cam_address2 != current_cam_address2 \ or new_cam_port2 != current_cam_port2 \ or new_cam_stream_m2 != current_cam_stream_m2 \ or new_cam_stream_s2 != current_cam_stream_s2 \ or new_cam_user2 != current_cam_user2 \ or new_cam_pass2 != current_cam_pass2 \ or new_cam_type2 != current_cam_type2: if is_valid_ip_field_value(new_cam_address2): change_cam_address = True else: info_cam_data2 = ERR_MSG_DETAILS % ( 'IP kamery 2', new_cam_address2, REQUIRED_FORMAT_IP_ADDRESS) op_status = ERR_MSG_VALIDATION if new_cam_address3 != current_cam_address3 \ or new_cam_port3 != current_cam_port3 \ or new_cam_stream_m3 != current_cam_stream_m3 \ or new_cam_stream_s3 != current_cam_stream_s3 \ or new_cam_user3 != current_cam_user3 \ or new_cam_pass3 != current_cam_pass3 \ or new_cam_type3 != current_cam_type3: if is_valid_ip_field_value(new_cam_address3): change_cam_address = True else: info_cam_data3 = ERR_MSG_DETAILS % ( 'IP kamery 3', new_cam_address3, REQUIRED_FORMAT_IP_ADDRESS) op_status = ERR_MSG_VALIDATION if new_cam_address4 != current_cam_address4 \ or new_cam_port4 != current_cam_port4 \ or new_cam_stream_m4 != current_cam_stream_m4 \ or new_cam_stream_s4 != current_cam_stream_s4 \ or new_cam_user4 != current_cam_user4 \ or new_cam_pass4 != current_cam_pass4 \ or new_cam_type4 != current_cam_type4: if is_valid_ip_field_value(new_cam_address4): change_cam_address = True else: info_cam_data4 = ERR_MSG_DETAILS % ( 'IP kamery 4', new_cam_address4, REQUIRED_FORMAT_IP_ADDRESS) op_status = ERR_MSG_VALIDATION # update current values if op_status == '': print 'Checking current values...' if change_mon_address: replace(CFG_PATH + CFG_MON_IP_FILENAME, r'%s %s' % (KEYWORD_ADDRESS, REGEX_IP_FORMAT), KEYWORD_ADDRESS + ' ' + new_mon_address.strip()) reboot_needed = True redirect_address = new_mon_address # after reboot user should be redirected to the new address he set new_mon_address = '' if change_mon_netmask: replace(CFG_PATH + CFG_MON_IP_FILENAME, r'%s %s' % (KEYWORD_NETMASK, REGEX_IP_FORMAT), KEYWORD_NETMASK + ' ' + new_mon_netmask.strip()) reboot_needed = True new_mon_netmask = '' if change_cam_address: with open(CFG_PATH + CFG_CAM_IP_FILENAME, 'w+') as f: f.write('%s|%s|%s|%s|%s|%s|%s\n' % (new_cam_address1.strip(), port_check(new_cam_port1), new_cam_stream_m1.strip(), new_cam_stream_s1.strip(), new_cam_user1.strip(), new_cam_pass1.strip(), new_cam_type1.strip())) f.write('%s|%s|%s|%s|%s|%s|%s\n' % (new_cam_address2.strip(), port_check(new_cam_port2), new_cam_stream_m2.strip(), new_cam_stream_s2.strip(), new_cam_user2.strip(), new_cam_pass2.strip(), new_cam_type2.strip())) f.write('%s|%s|%s|%s|%s|%s|%s\n' % (new_cam_address3.strip(), port_check(new_cam_port3), new_cam_stream_m3.strip(), new_cam_stream_s3.strip(), new_cam_user3.strip(), new_cam_pass3.strip(), new_cam_type3.strip())) f.write('%s|%s|%s|%s|%s|%s|%s\n' % (new_cam_address4.strip(), port_check(new_cam_port4), new_cam_stream_m4.strip(), new_cam_stream_s4.strip(), new_cam_user4.strip(), new_cam_pass4.strip(), new_cam_type4.strip())) f.close() new_cam_address1 = '' new_cam_address2 = '' new_cam_address3 = '' new_cam_address4 = '' new_cam_port1 = '' new_cam_port2 = '' new_cam_port3 = '' new_cam_port4 = '' new_cam_stream_m1 = '' new_cam_stream_m2 = '' new_cam_stream_m3 = '' new_cam_stream_m4 = '' new_cam_stream_s1 = '' new_cam_stream_s2 = '' new_cam_stream_s3 = '' new_cam_stream_s4 = '' new_cam_user1 = '' new_cam_user2 = '' new_cam_user3 = '' new_cam_user4 = '' new_cam_pass1 = '' new_cam_pass2 = '' new_cam_pass3 = '' new_cam_pass4 = '' new_cam_type1 = CAM_TYPE_HASO_KG1 new_cam_type2 = CAM_TYPE_HASO_KG1 new_cam_type3 = CAM_TYPE_HASO_KG1 new_cam_type4 = CAM_TYPE_HASO_KG1 btn_esc( ) # emulates ESC button pressed - exits from streaming to configuration view # read new values and send to view try: cfg_mon_file = open(CFG_PATH + CFG_MON_IP_FILENAME) str_mon = cfg_mon_file.read() current_mon_address, current_mon_netmask = parse_interfaces_cfg( str_mon) print 'New monitor config values: (%s) (%s)' % (current_mon_address, current_mon_netmask) except IOError: current_mon_address, current_mon_netmask = MON_ADDRESS_NOT_CONFIGURED, MON_NETMASK_NOT_CONFIGURED cam_config_entry = parse_cam_config(CFG_PATH + CFG_CAM_IP_FILENAME) print 'New config loaded.' current_cam_address1 = cam_config_entry[0].address current_cam_address2 = cam_config_entry[1].address current_cam_address3 = cam_config_entry[2].address current_cam_address4 = cam_config_entry[3].address current_cam_port1 = cam_config_entry[0].port current_cam_port2 = cam_config_entry[1].port current_cam_port3 = cam_config_entry[2].port current_cam_port4 = cam_config_entry[3].port current_cam_stream_m1 = cam_config_entry[0].stream_m current_cam_stream_m2 = cam_config_entry[1].stream_m current_cam_stream_m3 = cam_config_entry[2].stream_m current_cam_stream_m4 = cam_config_entry[3].stream_m current_cam_stream_s1 = cam_config_entry[0].stream_s current_cam_stream_s2 = cam_config_entry[1].stream_s current_cam_stream_s3 = cam_config_entry[2].stream_s current_cam_stream_s4 = cam_config_entry[3].stream_s current_cam_user1 = cam_config_entry[0].user current_cam_user2 = cam_config_entry[1].user current_cam_user3 = cam_config_entry[2].user current_cam_user4 = cam_config_entry[3].user current_cam_pass1 = cam_config_entry[0].password current_cam_pass2 = cam_config_entry[1].password current_cam_pass3 = cam_config_entry[2].password current_cam_pass4 = cam_config_entry[3].password current_cam_type1 = cam_config_entry[0].cam_type current_cam_type2 = cam_config_entry[1].cam_type current_cam_type3 = cam_config_entry[2].cam_type current_cam_type4 = cam_config_entry[3].cam_type print 'New config data fetched.' try: cfg_serial_file = open(CFG_PATH + CFG_SERIAL_FILENAME) current_serial_number = cfg_serial_file.read() except IOError: current_serial_number = DEFAULT_SERIAL # return to same page to set new values or display message context = { 'current_mon_address': current_mon_address.strip(), 'current_mon_netmask': current_mon_netmask.strip(), 'current_cam_address1': current_cam_address1.strip(), 'current_cam_address2': current_cam_address2.strip(), 'current_cam_address3': current_cam_address3.strip(), 'current_cam_address4': current_cam_address4.strip(), 'current_cam_port1': current_cam_port1.strip(), 'current_cam_port2': current_cam_port2.strip(), 'current_cam_port3': current_cam_port3.strip(), 'current_cam_port4': current_cam_port4.strip(), 'current_cam_stream_m1': current_cam_stream_m1.strip(), 'current_cam_stream_m2': current_cam_stream_m2.strip(), 'current_cam_stream_m3': current_cam_stream_m3.strip(), 'current_cam_stream_m4': current_cam_stream_m4.strip(), 'current_cam_stream_s1': current_cam_stream_s1.strip(), 'current_cam_stream_s2': current_cam_stream_s2.strip(), 'current_cam_stream_s3': current_cam_stream_s3.strip(), 'current_cam_stream_s4': current_cam_stream_s4.strip(), 'current_cam_user1': current_cam_user1.strip(), 'current_cam_user2': current_cam_user2.strip(), 'current_cam_user3': current_cam_user3.strip(), 'current_cam_user4': current_cam_user4.strip(), 'current_cam_pass1': current_cam_pass1.strip(), 'current_cam_pass2': current_cam_pass2.strip(), 'current_cam_pass3': current_cam_pass3.strip(), 'current_cam_pass4': current_cam_pass4.strip(), 'current_cam_type1': current_cam_type1.strip(), 'current_cam_type2': current_cam_type2.strip(), 'current_cam_type3': current_cam_type3.strip(), 'current_cam_type4': current_cam_type4.strip(), 'current_serial_number': current_serial_number.strip(), 'new_mon_address': new_mon_address.strip(), 'new_mon_netmask': new_mon_netmask.strip(), 'new_cam_address1': new_cam_address1.strip(), 'new_cam_address2': new_cam_address2.strip(), 'new_cam_address3': new_cam_address3.strip(), 'new_cam_address4': new_cam_address4.strip(), 'new_cam_port1': new_cam_port1.strip(), 'new_cam_port2': new_cam_port2.strip(), 'new_cam_port3': new_cam_port3.strip(), 'new_cam_port4': new_cam_port4.strip(), 'new_cam_stream_m1': new_cam_stream_m1.strip(), 'new_cam_stream_m2': new_cam_stream_m2.strip(), 'new_cam_stream_m3': new_cam_stream_m3.strip(), 'new_cam_stream_m4': new_cam_stream_m4.strip(), 'new_cam_stream_s1': new_cam_stream_s1.strip(), 'new_cam_stream_s2': new_cam_stream_s2.strip(), 'new_cam_stream_s3': new_cam_stream_s3.strip(), 'new_cam_stream_s4': new_cam_stream_s4.strip(), 'new_cam_user1': new_cam_user1.strip(), 'new_cam_user2': new_cam_user2.strip(), 'new_cam_user3': new_cam_user3.strip(), 'new_cam_user4': new_cam_user4.strip(), 'new_cam_pass1': new_cam_pass1.strip(), 'new_cam_pass2': new_cam_pass2.strip(), 'new_cam_pass3': new_cam_pass3.strip(), 'new_cam_pass4': new_cam_pass4.strip(), 'new_cam_type1': new_cam_type1.strip(), 'new_cam_type2': new_cam_type2.strip(), 'new_cam_type3': new_cam_type3.strip(), 'new_cam_type4': new_cam_type4.strip(), 'info_mon_address': info_mon_address.strip(), 'info_mon_netmask': info_mon_netmask.strip(), 'info_cam_data1': info_cam_data1.strip(), 'info_cam_data2': info_cam_data2.strip(), 'info_cam_data3': info_cam_data3.strip(), 'info_cam_data4': info_cam_data4.strip(), 'info_serial_number': info_serial_number.strip(), 'redirect_address': redirect_address, 'op_status': op_status, } if reboot_needed: print 'Reboot required..' # Network interfaces and then cfgviewer server itself need to be reboot subprocess.call([ 'bash', 'reboot.sh' ]) # should fire after some delay and go asynchronously return render(request, 'cfgpanel/reboot.html', context) pass return render(request, 'cfgpanel/index.html', context)
fn = [0, 0.05, 0.1, 0.2, 0.3] print('zerofill') for p in fp: for n in fn: newsamples = samples.copy() damage.zerofill(newsamples, 0.4) matches = recognize.cheat(samples, newsamples, false_positives=p, false_negatives=n) validx, validy = utils.tovalidxy(newsamples, matches) f = interp1d(validx, validy, kind='cubic', fill_value='extrapolate') invalidx = utils.invalidx(matches) fixedy = f(invalidx) utils.replace(newsamples, invalidx, fixedy) print('fp:', p, 'fn:', n, 'mean:', np.mean(evaluate.abserrors(samples, newsamples))) print('noiseadd') for p in fp: for n in fn: newsamples = samples.copy() damage.noiseadd(newsamples, 0.6, rate=0.3) matches = recognize.cheat(samples, newsamples, false_positives=p, false_negatives=n) validx, validy = utils.tovalidxy(newsamples, matches) f = interp1d(validx, validy, kind='cubic', fill_value='extrapolate') invalidx = utils.invalidx(matches)