def process_message(self, peer, mailfrom, rcpttos, data): if savemsg: filename = "savedmsg/%s-%d.eml" % (datetime.now().strftime("%Y%m%d%H%M%S"), self.no) f = open(filename, "w") f.write(data) f.close if "X-Arf: yes" in data: mail = email.message_from_string(data) for part in mail.walk(): ctypes = part.get_params(None, "Content-Type") if ctypes and len(ctypes) > 2: if ctypes[0] == ("text/plain", "") and ctypes[2] == ("name", "report.txt"): payload = part.get_payload() xarf = load(payload, Loader=Loader) handle_xarf(xarf) dtypes = part.get_params(None, "Content-Disposition") if dtypes and dtypes[1] == ("filename", "report.txt"): payload = b64decode(part.get_payload()) xarf = load(payload, Loader=Loader) handle_xarf(xarf) print "Email received at %s" % datetime.now().strftime("%H:%M:%S") self.no += 1
def test_name_with_version_specified(testing_workdir, test_config): api.skeletonize('sympy', 'pypi', version='0.7.5', config=test_config) with open('{}/test-skeleton/sympy-0.7.5/meta.yaml'.format(thisdir)) as f: expected = yaml.load(f) with open('sympy/meta.yaml') as f: actual = yaml.load(f) assert expected == actual, (expected, actual)
def _child_main_loop(self, queue): while True: url = "http://geekhost.net/OK" f = urllib.urlopen(url) data = f.read() #print data abcPattern = re.compile(r'OK') if abcPattern.match(data): queue.put('Already logined') else: queue.put('Need login') LOGIN_URL = 'https://auth-wlc.ntwk.dendai.ac.jp/login.html' #LOGIN_URL = 'http://geekhost.net/checkparams.php' pd = yaml.load(open('config.yaml').read().decode('utf-8')) pd['buttonClicked'] = '4' pd['redirect_url'] = 'http://google.com/' pd["err_flag"] = "0" pd["err_msg"] = "" pd["info_flag"] = "0" pd["info_msg"] = "" params = urllib.urlencode(pd) print repr(params) up = urllib.urlopen(LOGIN_URL, params) # あとは寝てる time.sleep(yaml.load(open('config.yaml').read().decode('utf-8'))['threadtime'])
def load_yaml(): """ Load YAML-format configuration files :return: """ config = getattr(load_yaml, 'config', None) if config: return config from yaml import load import os from glob import glob cfg_dir = os.path.abspath(os.path.join(os.path.split(__file__)[0], 'conf/')) cfg_file = os.path.join(cfg_dir, 'andaman.yaml') with open(cfg_file) as f: config = load(f) # Resolve includes if 'include' in config: for entry in config['include']: for fname in glob(os.path.join(cfg_dir, entry)): if fname == cfg_file: continue try: with open(fname) as f: include_data = load(f) for k, v in include_data.items(): config[k] = v except IOError: continue setattr(load_yaml, 'config', config) return config
def test_yaml_representation_has_all_expected_fields(self): """Verify that the YAML representation of reference props is ok.""" prop = properties.ReferenceProperty( 'name', references.Reference( '5f2c9a1d-1113-49f1-9d1d-29aaa4a520b0', None, None)) string = yaml.dump(prop) data = yaml.load(string) self.assertTrue(isinstance(data, dict)) self.assertEqual(data['uuid'], '5f2c9a1d-1113-49f1-9d1d-29aaa4a520b0') self.assertTrue(not 'service' in data) self.assertTrue(not 'ref' in data) prop = properties.ReferenceProperty( 'name', references.Reference( '5f2c9a1d-1113-49f1-9d1d-29aaa4a520b0', 'issues', None)) string = yaml.dump(prop) data = yaml.load(string) self.assertTrue(isinstance(data, dict)) self.assertEqual(data['uuid'], '5f2c9a1d-1113-49f1-9d1d-29aaa4a520b0') self.assertEqual(data['service'], 'issues') self.assertTrue(not 'ref' in data) prop = properties.ReferenceProperty( 'name', references.Reference( '5f2c9a1d-1113-49f1-9d1d-29aaa4a520b0', 'issues', 'master')) string = yaml.dump(prop) data = yaml.load(string) self.assertTrue(isinstance(data, dict)) self.assertEqual(data['uuid'], '5f2c9a1d-1113-49f1-9d1d-29aaa4a520b0') self.assertEqual(data['service'], 'issues') self.assertEqual(data['ref'], 'master')
def get_config(queue_names, yaml_filename='/etc/tada/tada.conf', hiera_filename='/etc/tada/hiera.yaml', validate=False): """Read multi-queue config from yaml_filename. Validate its contents. Insure queue_names are all in the list of named queues.""" try: cfg = yaml.load(open(yaml_filename)) except: raise Exception('ERROR: Could not read data-queue config file "{}"' .format(yaml_filename)) try: cfg.update(yaml.load(open(hiera_filename))) except Exception as err: raise Exception('ERROR: Could not read data-queue config file "{}"; {}' .format(hiera_filename, str(err))) if validate: validate_config(cfg, qnames=queue_names, fname=yaml_filename) #!lut = get_config_lut(cfg) #return dict([[q['name'], q] for q in config['queues']]) lut = cfg if validate: missing = set(queue_names) - set(lut.keys()) else: missing = set() if len(missing) > 0: raise Exception( 'ERROR: Config file "{}" does not contain named queues: {}' .format(yaml_filename, missing)) #return lut, cfg['dirs'] logging.debug('get_config got: {}'.format(lut)) return lut, dict()
def _expand_repeats(yaml_str, yaml_data): entry = yaml.load(yaml_str) depth = _calculate_depth(entry, 0) if (depth < 2): if entry is not None: yaml_data.append(entry) return entry_val = entry.values()[0] name = entry.keys()[0] repeats = entry_val["repeats"] part_str = "" parts = [] for line in yaml_str.split("\n"): if len(line) < 1 or re.search(r"\w", line[0]) or re.search(r"repeats:", line): continue if re.search(r" \w", line[0:3]): part = yaml.load(part_str) if part is not None: parts.append(part) part_str = "" else: part_str += (line + "\n") part = yaml.load(part_str) if part is not None: parts.append(part) for i in xrange(repeats): for j, part in enumerate(parts): yaml_data.append({"%s_%d_%d" % (name, i, j): part}) return
def test_cleaner(self): c = util.Cleaner(hide_keys=['pass'], hide_values=['xyz', 'zyx']) _in = yaml.load(""" pass: alma password: xyz public: yaay stuff: - secret: zyx - - zyx - alma """) _out = yaml.load(""" pass: XXX password: XXX public: yaay stuff: - secret: XXX - - XXX - alma """) obfuscated = c.deep_copy(_in) self.assertEqual(obfuscated, _out)
def setUpClass(self): valid_subregion_config = """ type: subregion lats: range_min: -20 range_max: 20 range_step: 1 lons: range_min: -20 range_max: 20 range_step: 1 output_name: fake_plot_name """ self.valid_subregion = yaml.load(valid_subregion_config) missing_keys_subregion_config = """ type: subregion """ self.missing_keys_subregion = yaml.load(missing_keys_subregion_config) self.required_subregion_keys = set([ 'lats', 'lons', 'output_name' ])
def __read_over(self, overstate): ''' Read in the overstate file ''' if overstate: with salt.utils.fopen(overstate) as fp_: try: # TODO Use render system return self.__sort_stages(yaml.load(fp_)) except Exception: return {} if self.env not in self.opts['file_roots']: return {} for root in self.opts['file_roots'][self.env]: fn_ = os.path.join( root, self.opts.get('overstate', 'overstate.sls') ) if not os.path.isfile(fn_): continue with salt.utils.fopen(fn_) as fp_: try: # TODO Use render system return self.__sort_stages(yaml.load(fp_)) except Exception: return {} return {}
def setUpClass(self): not_minimal_config = """ datasets: """ self.not_minimal = yaml.load(not_minimal_config) not_well_formed_config = """ datasets: reference: data_source: local file_count: 1 path: /a/fake/path/file.py variable: pr targets: - data_source: local file_count: 5 file_glob_pattern: something for globbing files here variable: pr optional_args: name: Target1 - data_source: esgf dataset_id: fake dataset id variable: pr esgf_username: my esgf username esgf_password: my esgf password metrics: - Bias - TemporalStdDev """ self.not_well_formed = yaml.load(not_well_formed_config)
def main(): # Connection Database conf_file_path = sys.path[0]+"/conf/crawl.conf" conf_file = open(conf_file_path) conf = yaml.load(conf_file) host = conf['host'] port = conf['port'] user = conf['user'] password = conf['password'] database = conf['conf_database'] tablename = conf['conf_table'] # Read initial_sql_table sql_file_path = sys.path[0]+"/conf/sql.ini" sql_file = open(sql_file_path) sql = yaml.load(sql_file) # SQL Section comment_table = sql['top_api_shop_order'] try: itemstage = sys.argv[1] except: itemstage = 10 projectlist = getTask(host,port,user,password,database,tablename,itemstage) for project in projectlist: do_run(host,port,user,password,project[0],project[1],project[2],project[3],comment_table)
def main(): logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', level=logging.INFO) logging.getLogger('requests').setLevel(logging.WARNING) setup_signal_handlers() # Patroni reads the configuration from the command-line argument if it exists, and from the environment otherwise. use_env = False use_file = (len(sys.argv) >= 2 and os.path.isfile(sys.argv[1])) if not use_file: config_env = os.environ.get(Patroni.PATRONI_CONFIG_VARIABLE) use_env = config_env is not None if not use_env: print('Usage: {0} config.yml'.format(sys.argv[0])) print('\tPatroni may also read the configuration from the {} environment variable'. format(Patroni.PATRONI_CONFIG_VARIABLE)) return if use_file: with open(sys.argv[1], 'r') as f: config = yaml.load(f) elif use_env: config = yaml.load(config_env) patroni = Patroni(config) try: patroni.run() except KeyboardInterrupt: pass finally: patroni.api.shutdown() patroni.postgresql.stop(checkpoint=False) patroni.dcs.delete_leader()
def load_from_dir(self, directory): with open("{}/yaml/venues.yaml".format(directory), "r") as f: venue_dict = yaml.load(f, Loader=Loader) for acronym, name_str in venue_dict.items(): name, venue_type = name_str.split(":") self.venues[acronym] = { "name": name, "is_acl": (venue_type == "ACL"), "is_toplevel": False, "slug": slugify(acronym), "type": venue_type, "years": set(), "volumes": [], } with open("{}/yaml/venues_letters.yaml".format(directory), "r") as f: self.letters = yaml.load(f, Loader=Loader) for letter, acronym in self.letters.items(): self.venues[acronym]["is_toplevel"] = True self.venues[acronym]["main_letter"] = letter with open("{}/yaml/venues_joint_map.yaml".format(directory), "r") as f: map_dict = yaml.load(f, Loader=Loader) for id_, joint in map_dict.items(): if isinstance(joint, str): joint = [joint] self.joint_map[id_] = joint
def main(): try: import yaml except ImportError: # Don't do anything if yaml module is not available. return 0 if len(sys.argv) != 2: raise SystemExit('usage: %s filename' % (sys.argv[0],)) filename = sys.argv[1] try: input_file = open(filename, 'rb') try: yaml.load(input_file) finally: input_file.close() except yaml.error.YAMLError: error = sys.exc_info()[1] mark = error.problem_mark sys.stderr.write('%s:%s:%s: %s\n' % (filename, mark.line + 1, mark.column + 1, error.problem)) return 1 except IOError: sys.stderr.write('%s\n' % (sys.exc_info()[1],)) return 1
def test_disks_flag(self): # specifying a EBS mount or PD mount is only valid for EC2/Euca/GCE, so # fail on a cluster deployment. argv = self.cluster_argv[:] + ["--disks", "ABCDFEG"] self.assertRaises(BadConfigurationException, ParseArgs, argv, self.function) # if we get a --disk flag, fail if it's not a dict (after base64, yaml load) bad_disks_layout = yaml.load(""" public1, """) base64ed_bad_disks = base64.b64encode(yaml.dump(bad_disks_layout)) cloud_argv1 = self.cloud_argv[:] + ["--disks", base64ed_bad_disks] self.assertRaises(BadConfigurationException, ParseArgs, cloud_argv1, self.function) # passing in a dict should be fine, and result in us seeing the same value # for --disks that we passed in. disks = {'public1' : 'vol-ABCDEFG'} good_disks_layout = yaml.load(""" public1 : vol-ABCDEFG """) base64ed_good_disks = base64.b64encode(yaml.dump(good_disks_layout)) cloud_argv2 = self.cloud_argv[:] + ["--disks", base64ed_good_disks] actual = ParseArgs(cloud_argv2, self.function).args self.assertEquals(disks, actual.disks)
def _main(args, config): logging.setup_logging(config) fp = config.ircbot_channel_config if fp: fp = os.path.expanduser(fp) if not os.path.exists(fp): raise ElasticRecheckException( "Unable to read layout config file at %s" % fp) else: raise ElasticRecheckException( "Channel Config must be specified in config file.") channel_config = ChannelConfig(yaml.load(open(fp))) msgs = MessageConfig(yaml.load(open(fp))) if not args.noirc: bot = RecheckWatchBot( channel_config.channels, config=config) else: bot = None recheck = RecheckWatch( bot, channel_config, msgs, config=config, commenting=not args.nocomment, ) recheck.start() if not args.noirc: bot.start()
def __init__(self, directory): os.chdir(directory) if os.path.exists('bakery.yaml'): self.configfile = 'bakery.yaml' self.config = yaml.load(open('bakery.yaml')) elif os.path.exists('bakery.yml'): self.config = yaml.load(open('bakery.yml')) self.configfile = 'bakery.yml' self.commit = self.config.get('commit', 'HEAD') self.process_files = self.config.get('process_files', []) self.subset = self.config.get('subset', []) self.compiler = self.config.get('compiler', 'fontforge') self.ttfautohint = self.config.get('ttfautohint', '-l 7 -r 28 -G 50 -x 13 -w "G"') self.afdko = self.config.get('afdko', '') self.license = self.config.get('license', '') self.pyftsubset = self.config.get('pyftsubset', '--notdef-outline --name-IDs=* --hinting') self.notes = self.config.get('notes', '') self.newfamily = self.config.get('newfamily', '') self.widgets = Widgets(self)
def get_jinja_vars(self): # order for per-project variables (each overrides the previous): # 1. /etc/kolla/globals.yml and passwords.yml # 2. config/all.yml # 3. config/<project>/defaults/main.yml with open(file_utils.find_config_file('passwords.yml'), 'r') as gf: global_vars = yaml.load(gf) with open(file_utils.find_config_file('globals.yml'), 'r') as gf: global_vars.update(yaml.load(gf)) all_yml_name = os.path.join(self.config_dir, 'all.yml') jvars = yaml.load(jinja_utils.jinja_render(all_yml_name, global_vars)) jvars.update(global_vars) for proj in self.get_projects(): proj_yml_name = os.path.join(self.config_dir, proj, 'defaults', 'main.yml') if os.path.exists(proj_yml_name): proj_vars = yaml.load(jinja_utils.jinja_render(proj_yml_name, jvars)) jvars.update(proj_vars) else: LOG.warn('path missing %s' % proj_yml_name) # override node_config_directory to empty jvars.update({'node_config_directory': ''}) return jvars
def _cmd_config_set(self, argument): """ set a configvalue. syntax for argument: [network=net] [channel=chan] module.setting newvalue """ #how this works: #args[x][:8] is checked for network= or channel=. channel= must come after network= #args[x][8:] is the network/channelname without the prefix #" ".join(args[x:]) joins all arguments after network= and channel= to a string from word x to the end of input args=argument.split(" ") if len(args)>=4 and len(args[0])>=8 and len(args[1])>=8 and args[0][:8]=="network=" and args[1][:8]=="channel=": try: (module, setting)=args[2].split(".", 1) self.parent.getServiceNamed("config").set(setting, yaml.load(" ".join(args[3:])), module, args[0][8:], args[1][8:]) return self.parent.getServiceNamed("config").get(setting, "[unset]", module, args[0][8:], args[1][8:]) except ValueError: return "Error: your setting is not in the module.setting form" elif len(args)>=3 and len(args[0])>=8 and args[0][:8]=="network=": try: (module, setting)=args[1].split(".", 1) self.parent.getServiceNamed("config").set(setting, yaml.load(" ".join(args[2:])), module, args[0][8:]) return self.parent.getServiceNamed("config").get(setting, "[unset]", module, args[0][8:]) except ValueError: return "Error: your setting is not in the module.setting form" elif len(argument): try: (module, setting)=args[0].split(".", 1) self.parent.getServiceNamed("config").set(args[0], yaml.load(" ".join(args[1:])), module) return self.parent.getServiceNamed("config").get(setting, "[unset]", module) except ValueError: return "Error: your setting is not in the module.setting form" else: return "config set [network=networkname] [channel=#somechannel] setting value"
def test_extra_options(self): device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/kvm01.yaml')) kvm_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/kvm-inline.yaml') with open(kvm_yaml) as sample_job_data: job_data = yaml.load(sample_job_data) device['actions']['boot']['methods']['qemu']['parameters']['extra'] = yaml.load(""" - -smp - 1 - -global - virtio-blk-device.scsi=off - -device virtio-scsi-device,id=scsi - --append "console=ttyAMA0 root=/dev/vda rw" """) self.assertIsInstance(device['actions']['boot']['methods']['qemu']['parameters']['extra'][1], int) parser = JobParser() job = parser.parse(yaml.dump(job_data), device, 4212, None, None, None, output_dir='/tmp/') job.validate() boot_image = [action for action in job.pipeline.actions if action.name == 'boot_image_retry'][0] boot_qemu = [action for action in boot_image.internal_pipeline.actions if action.name == 'boot_qemu_image'][0] qemu = [action for action in boot_qemu.internal_pipeline.actions if action.name == 'execute-qemu'][0] self.assertIsInstance(qemu.sub_command, list) [self.assertIsInstance(item, str) for item in qemu.sub_command] self.assertIn('virtio-blk-device.scsi=off', qemu.sub_command) self.assertIn('1', qemu.sub_command) self.assertNotIn(1, qemu.sub_command)
def get_configuration(default_filename: str = 'defaults.yml', user_filename: str = 'config.yml') -> ConfigTree: """ gets the current configuration, as specified by YAML files :param default_filename: name of the default settings file (relative to :file:`configparser.py`) :param user_filename: name of the user settings file (relative to :file:`configparser.py`) :return: settings tree """ # read defaults with open(default_filename, 'r') as file: defaults = yaml.load(file) logger.info("Successfully parsed {} as default configuration".format(default_filename)) with open(user_filename, 'r') as file: user_config = yaml.load(file) logger.info("Successfully parsed {} as user configuration".format(user_filename)) # apply user config over Defaults configuration = update_settings_tree(base=defaults, update=user_config) # parse MQTT path templates for path_name in configuration.MQTT.Path: path_template = configuration.MQTT.Path[path_name] path = path_template.format(prefix=configuration.MQTT.prefix, sys_name=configuration.sys_name) configuration.MQTT.Path[path_name] = path return configuration
def test_unsafe(self): dummy = Dummy() with self.assertRaises(yaml.representer.RepresenterError): yaml.dump_all([dummy]) with self.assertRaises(yaml.representer.RepresenterError): yaml.dump(dummy, Dumper=yDumper) # reverse monkey patch and try again monkey_patch_pyyaml_reverse() with tempfile.TemporaryFile(suffix='.yaml') as f: yaml.dump_all([dummy], stream=f) f.seek(0) # rewind doc_unsafe = yaml.load(f) self.assertTrue(type(doc_unsafe) is Dummy) monkey_patch_pyyaml() with self.assertRaises(yaml.constructor.ConstructorError): f.seek(0) # rewind safe_yaml_load(f) with self.assertRaises(yaml.constructor.ConstructorError): f.seek(0) # rewind yaml.load(f)
def evaluate(definition, args, account_info, force: bool): # extract Senza* meta information info = definition.pop("SenzaInfo") info["StackVersion"] = args.version template = yaml.dump(definition, default_flow_style=False) definition = evaluate_template(template, info, [], args, account_info) definition = yaml.load(definition) components = definition.pop("SenzaComponents", []) # merge base template with definition BASE_TEMPLATE.update(definition) definition = BASE_TEMPLATE # evaluate all components for component in components: componentname, configuration = named_value(component) configuration["Name"] = componentname componenttype = configuration["Type"] componentfn = get_component(componenttype) if not componentfn: raise click.UsageError('Component "{}" does not exist'.format(componenttype)) definition = componentfn(definition, configuration, args, info, force) # throw executed template to templating engine and provide all information for substitutions template = yaml.dump(definition, default_flow_style=False) definition = evaluate_template(template, info, components, args, account_info) definition = yaml.load(definition) return definition
def setUpClass(self): valid_contour_config = """ type: contour results_indices: - !!python/tuple [0, 0] lats: range_min: -20 range_max: 20 range_step: 1 lons: range_min: -20 range_max: 20 range_step: 1 output_name: wrf_bias_compared_to_knmi """ self.valid_contour = yaml.load(valid_contour_config) missing_keys_contour_config = """ type: contour """ self.missing_keys_contour = yaml.load(missing_keys_contour_config) self.required_contour_keys = set([ 'results_indices', 'lats', 'lons', 'output_name' ])
def run_condor_jobs(c, config_file, subject_list_file, p_name): ''' ''' # Import packages import commands from time import strftime try: sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r')) except: raise Exception ("Subject list is not in proper YAML format. Please check your file") cluster_files_dir = os.path.join(os.getcwd(), 'cluster_files') subject_bash_file = os.path.join(cluster_files_dir, 'submit_%s.condor' % str(strftime("%Y_%m_%d_%H_%M_%S"))) f = open(subject_bash_file, 'w') print >>f, "Executable = /usr/bin/python" print >>f, "Universe = vanilla" print >>f, "transfer_executable = False" print >>f, "getenv = True" print >>f, "log = %s" % os.path.join(cluster_files_dir, 'c-pac_%s.log' % str(strftime("%Y_%m_%d_%H_%M_%S"))) sublist = yaml.load(open(os.path.realpath(subject_list_file), 'r')) for sidx in range(1,len(sublist)+1): print >>f, "error = %s" % os.path.join(cluster_files_dir, 'c-pac_%s.%s.err' % (str(strftime("%Y_%m_%d_%H_%M_%S")), str(sidx))) print >>f, "output = %s" % os.path.join(cluster_files_dir, 'c-pac_%s.%s.out' % (str(strftime("%Y_%m_%d_%H_%M_%S")), str(sidx))) print >>f, "arguments = \"-c 'import CPAC; CPAC.pipeline.cpac_pipeline.run( ''%s'',''%s'',''%s'',''%s'',''%s'',''%s'',''%s'')\'\"" % (str(config_file), subject_list_file, str(sidx), c.maskSpecificationFile, c.roiSpecificationFile, c.templateSpecificationFile, p_name) print >>f, "queue" f.close() #commands.getoutput('chmod +x %s' % subject_bash_file ) print commands.getoutput("condor_submit %s " % (subject_bash_file))
def test_checkbox_value(self): attrs = ''' editable: storage: osd_pool_size: description: desc label: OSD Pool Size type: checkbox value: true weight: 80 ''' self.assertNotRaises(errors.InvalidData, AttributesValidator.validate_editable_attributes, yaml.load(attrs)) attrs = ''' editable: storage: osd_pool_size: description: desc label: OSD Pool Size type: checkbox value: 'x' weight: 80 ''' self.assertRaises(errors.InvalidData, AttributesValidator.validate_editable_attributes, yaml.load(attrs))
def test_yaml_representation_has_all_expected_fields(self): """Verify that the YAML representation of list properties is ok.""" prop = properties.ListProperty('name', []) string = yaml.dump(prop) data = yaml.load(string) self.assertTrue(isinstance(data, list)) self.assertEqual(len(data), 0) prop = properties.ListProperty('name', [ properties.IntProperty('name', 5), properties.IntProperty('name', -17), ]) string = yaml.dump(prop) data = yaml.load(string) self.assertTrue(isinstance(data, list)) self.assertEqual(len(data), 2) self.assertEqual(data[0], 5) self.assertEqual(data[1], -17) prop = properties.ListProperty('name', [ properties.TextProperty('name', 'foo'), properties.TextProperty('name', 'bar'), properties.TextProperty('name', 'baz'), ]) string = yaml.dump(prop) data = yaml.load(string) self.assertTrue(isinstance(data, list)) self.assertEqual(len(data), 3) self.assertEqual(data[0], 'foo') self.assertEqual(data[1], 'bar') self.assertEqual(data[2], 'baz')
def _install_container_bcbio_system(datadir): """Install limited bcbio_system.yaml file for setting core and memory usage. Adds any non-specific programs to the exposed bcbio_system.yaml file, only when upgrade happening inside a docker container. """ base_file = os.path.join(datadir, "config", "bcbio_system.yaml") if not os.path.exists(base_file): return expose_file = os.path.join(datadir, "galaxy", "bcbio_system.yaml") expose = set(["memory", "cores", "jvm_opts"]) with open(base_file) as in_handle: config = yaml.load(in_handle) if os.path.exists(expose_file): with open(expose_file) as in_handle: expose_config = yaml.load(in_handle) else: expose_config = {"resources": {}} for pname, vals in config["resources"].iteritems(): expose_vals = {} for k, v in vals.iteritems(): if k in expose: expose_vals[k] = v if len(expose_vals) > 0 and pname not in expose_config["resources"]: expose_config["resources"][pname] = expose_vals with open(expose_file, "w") as out_handle: yaml.safe_dump(expose_config, out_handle, default_flow_style=False, allow_unicode=False) return expose_file
def name_to_config(template): """Read template file into a dictionary to use as base for all samples. Handles well-known template names, pulled from GitHub repository and local files. """ if objectstore.is_remote(template): with objectstore.open(template) as in_handle: config = yaml.load(in_handle) with objectstore.open(template) as in_handle: txt_config = in_handle.read() elif os.path.isfile(template): if template.endswith(".csv"): raise ValueError("Expected YAML file for template and found CSV, are arguments switched? %s" % template) with open(template) as in_handle: txt_config = in_handle.read() with open(template) as in_handle: config = yaml.load(in_handle) else: base_url = "https://raw.github.com/chapmanb/bcbio-nextgen/master/config/templates/%s.yaml" try: with contextlib.closing(urllib2.urlopen(base_url % template)) as in_handle: txt_config = in_handle.read() with contextlib.closing(urllib2.urlopen(base_url % template)) as in_handle: config = yaml.load(in_handle) except (urllib2.HTTPError, urllib2.URLError): raise ValueError("Could not find template '%s' locally or in standard templates on GitHub" % template) return config, txt_config
import yaml import argparse import numpy as np from lifcon import World from lifcon_dqn import LiftControllerDQN import mxnet as mx parser = argparse.ArgumentParser() parser.add_argument('--world', type=str, help='world configuration') parser.add_argument('--saveparam', type=str, default=None, help='Write DQN parameter') parser.add_argument('--seed', type=int, default=0, help='seed') opt = parser.parse_args() world_conf = yaml.load(open(opt.world).read()) world = World(world_conf) mx.random.seed(opt.seed) dqn = LiftControllerDQN(world.nlifts, world.nfloors, world.lift_inv_speed) dqn.initialize(mx.init.MSRAPrelu()) liftvecs = mx.nd.array(np.zeros((1, world.nlifts, dqn.nliftinfo))) sidevec = mx.nd.array(np.zeros((1, dqn.nsideinfo))) _ = dqn(liftvecs, sidevec) dqn.save_params(opt.saveparam)
fonts = None if args.fonts == "accurate": try: fonts = AccurateFonts() except TTFError: raise Exception("Failed to load accurate fonts, are you sure you used the correct file names?") else: fonts = FreeFonts() canvas = canvas.Canvas(args.output_path, pagesize=(SmallCard.WIDTH*4, SmallCard.HEIGHT)) with open(args.input, 'r') as stream: try: entries = yaml.load(stream) except yaml.YAMLError as exc: print(exc) exit() for entry in entries: if args.type == "monster": card = MonsterCard( entry["title"], entry["subtitle"], entry.get("artist", None), entry["image_path"], entry["armor_class"], entry["max_hit_points"], entry["speed"], entry["strength"],
print('---------------------------') print('You are in the ' + currentRoom) #print the current inventory print('Inventory : ' + str(inventory)) #print an item if there is one if "item" in rooms[currentRoom]: print('You see a ' + rooms[currentRoom]['item']) print("---------------------------") #an inventory, which is initially empty inventory = [] # open json file with open("game.json","r") as json_file: rooms = yaml.load(json_file) #start the player in the Hall currentRoom = 'Hall' showInstructions() #loop forever while True: showStatus() #get the player's next 'move' #.split() breaks it up into an list array #eg typing 'go east' would give the list: #['go','east'] move = ''
import yaml import Queue from twisted.python import log from twisted.internet import reactor from twisted.internet import protocol from cloud.common import Struct from cloud.transport.gsclient import TransportGSClientFactory from cloud.transport.gsserver import TransportGSServerFactory # run the worker and twisted reactor if __name__ == "__main__": # read the configuration f = open('config.yaml') configDict = yaml.load(f) f.close() config = Struct(configDict) # setup logging log.startLogging(sys.stdout) # create communication channels fromGSClientToGSServer = Queue.Queue() fromGSServerToGSClient = Queue.Queue() # configure worker = config.worker groundstation = config.groundstation if len(sys.argv) > 1: groundstation.port = groundstation.port + int(sys.argv[1])
print( f"Acceptance occult S->E: {posterior['results/occult/S->E/is_accepted'][:].mean()}" ) print( f"Acceptance occult E->I: {posterior['results/occult/E->I/is_accepted'][:].mean()}" ) del posterior if __name__ == "__main__": from argparse import ArgumentParser parser = ArgumentParser(description="Run MCMC inference algorithm") parser.add_argument( "-c", "--config", type=str, help="Config file", required=True ) parser.add_argument( "-o", "--output", type=str, help="Output file", required=True ) parser.add_argument( "data_file", type=str, help="Data pickle file", required=True ) args = parser.parse_args() with open(args.config, "r") as f: config = yaml.load(f, Loader=yaml.FullLoader) mcmc(args.data_file, args.output, config["Mcmc"])
def _with_app_prefix(path): """Summary Args: path (TYPE): Description Returns: TYPE: Description """ if not _APP_PREFIX: return path return _APP_PREFIX + path with open('apidoc/template.json', 'r') as f: template = yaml.load(f) @app.route('/') def _index(): """ swagger_from_file: apidoc/index.yaml """ return '' @app.route(_with_app_prefix('/account/login'), methods=['POST']) def _login(): """ swagger_from_file: apidoc/login.yaml
import sys import yaml with open(sys.argv[1]) as f: seed = yaml.load(f) i = 0 snaps = seed['snaps'] while i < len(snaps): entry = snaps[i] if entry['name'] == 'pc': snaps[i] = { "name": "pc", "unasserted": True, "file": "pc_x1.snap", } break i += 1 snaps.append({ "name": "test-snapd-with-configure", "channel": "edge", "file": sys.argv[2], }) with open(sys.argv[1], 'w') as f: yaml.dump(seed, stream=f, indent=2, default_flow_style=False)
def cli(input_yaml, template): data = yaml.load(input_yaml) # Partition `variables` and `environment-variables` lists into "pure" (key)words and regexes to match for var_key in _VAR_KIND_LIST: data[var_key] = { k: sorted(set(v)) for k, v in zip(_KW_RE_LIST, [ *partition_iterable( lambda x: _TEMPLATED_NAME.search(x) is None, data[var_key]) ]) } data[var_key]['re'] = [ *map(lambda x: try_transform_placeholder_string_to_regex(x), data[var_key]['re']) ] # Transform properties and make all-properties list data['properties'] = {} for prop in _PROPERTY_KEYS: python_prop_list_name = prop.replace('-', '_') props, props_re = partition_iterable( lambda x: _TEMPLATED_NAME.search(x) is None, data[prop]) del data[prop] data['properties'][python_prop_list_name] = { k: sorted(set(v)) for k, v in zip(_KW_RE_LIST, [props, props_re]) } data['properties'][python_prop_list_name]['re'] = [ *map(lambda x: try_transform_placeholder_string_to_regex(x), props_re) ] data['properties']['kinds'] = [ *map(lambda name: name.replace('-', '_'), _PROPERTY_KEYS) ] # Make all commands list data['commands'] = [ *map( lambda cmd: transform_command(cmd), data['scripting-commands'] + data['project-commands'] + data['ctest-commands']) ] # Fix node names to be accessible from Jinja template data['generator_expressions'] = data['generator-expressions'] data['deprecated_or_internal_variables'] = data[ 'deprecated-or-internal-variables'] data['environment_variables'] = data['environment-variables'] del data['generator-expressions'] del data['deprecated-or-internal-variables'] del data['environment-variables'] env = jinja2.Environment(keep_trailing_newline=True) # Register convenience filters env.tests['nulary'] = cmd_is_nulary tpl = env.from_string(template.read()) result = tpl.render(data) print(result)
use_mem=meminfo_dict['use_mem'], mem_percent=meminfo_dict['mem_percent']) g_diskinfo.labels( disk_total=diskinfo_dict['disk'][0]['disk_total'], disk_free=diskinfo_dict['disk'][1]['disk_free']) pushadd_to_gateway(target, job='info_pushgateway', registry=self.registry) except Exception as e: logging.error(str(e)) if __name__ == '__main__': parser = ArgumentParser(description='describe') parser.add_argument("--verbose", help="Increase output verbosity", action="store_const", const=logging.DEBUG, default=logging.INFO) parser.add_argument('--filename', default='config.yaml') args = parser.parse_args() filename = os.path.join(os.path.dirname(__file__), args.filename).replace("\\", "/") f = open(filename) y = yaml.load(f, Loader=yaml.FullLoader) target = y['pushgateway']['targets'] Server_ojb = Server_info() Server_ojb.push2gateway()
default=False, help="Flag if you want to keep temp directory for debugging purposes") args = parser.parse_args() #################################################################################### ################################ Main Function ############################### #################################################################################### # Check between input from config file and commandline parameter. Use the parameter from command line to overwrite # any variables assigned through the config file yamlFile = args.yamlfile with open(yamlFile, 'r') as stream: yml = yaml.load(stream) ################## ### Files ################## # Assign Infile if(args.infile != None): inFile = args.infile else: inFile = AssignVariableFromConfig(yml, False, 'Files', 'Infile', "infile") if(inFile == None): sys.exit("Need to specify an inFile") ## define baseName baseName = os.path.basename(inFile).replace('.csv','')
import os import yaml with open(os.getenv("DB_CFG_FILE"), 'r') as f: try: params = yaml.load(f) except yaml.YAMLError as e: raise e
g_link = Graph() g_map = Graph() else: print('Les fichiers n\'existent pas') exit() files = [] files.append(args.file1) files.append(args.file2) files.append(args.file3) for file in files: if Path(file).suffix == '.rdf': g_onto.parse(open(file)) for s, p, o in g_onto.triples((None, None, None)): g_link.add((s, p, o)) elif Path(file).suffix == '.yml': #Penser au JSON pour après !!! mapping = yaml.load(open(file), Loader=yaml.FullLoader) liste_map = yamlToTriples(mapping) elif Path(file).suffix == '.json': raw_data = json.load(open(file)) #pprint.pprint(raw_data[0]["fields"]) else: print('You should have a .rdf, .yml and a .json') exit() nbTriples = 0 for triple in liste_map: nbTriples = nbTriples + 1 g_link.add(triple) g_map.add(triple) #for s, p, o in g_onto.triples((None,rdflib.term.URIRef('http://www.w3.org/2002/07/owl#equivalentClass'),None)) : # print('---------------------') # pprint.pprint(s)
import requests import json import yaml import logging import time import os import re path = os.environ["WORKDIR"] try: with open(path + "/trigger_plugins/clicksend/dnifconfig.yml", 'r') as ymlfile: CFG = yaml.load(ymlfile) USERNAME = CFG['trigger_plugin']['CS_USERNAME'] API_KEY = CFG['trigger_plugin']['CS_API_KEY'] SOURCE = CFG['trigger_plugin']['CS_SOURCE'] LANG = CFG['trigger_plugin']['CS_LANG'] VOICE = CFG['trigger_plugin']['CS_VOICE'] REQ_INPUT = CFG['trigger_plugin']['CS_REQUIRE_INPUT'] MACH_DETECT = CFG['trigger_plugin']['CS_MACHINE_DETECTION'] except Exception, e: logging.warning() def clicksend_call(cno, msg): logging.debug("In clicksend_call") logging.debug("Call place to >>{}<< with message >>{}<<".format(cno, msg)) messg = str("<speak><prosody volume='x-loud' rate='medium'>" + msg + "</prosody></speak>") url = "https://rest.clicksend.com/v3/voice/send" headers = {"Content-Type": "application/json"}
def main(): parser = ArgumentParser() parser.add_argument("-f", "--file", dest="testplan_list", required=True, nargs="+", help="Test plan file to be used") parser.add_argument("-r", "--repositories", dest="repository_path", default="repositories", help="Test plan file to be used") parser.add_argument("-o", "--output", dest="output", default="output", help="Destination directory for generated files") parser.add_argument("-i", "--ignore-clone", dest="ignore_clone", action="store_true", default=False, help="Ignore cloning repositories and use previously cloned") parser.add_argument("-s", "--single-file-output", dest="single_output", action="store_true", default=False, help="""Render test plan into single HTML file. This option ignores any metadata that is available in test cases""") parser.add_argument("-c", "--csv", dest="csv_name", required=False, help="Name of CSV to store overall list of requirements and test. If name is absent, the file will not be generated") _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG yaml.add_representer(PrependOrderedDict, dict_representer) yaml.add_constructor(_mapping_tag, dict_constructor) args = parser.parse_args() if not os.path.exists(os.path.abspath(args.output)): os.makedirs(os.path.abspath(args.output), 0755) for testplan in args.testplan_list: if os.path.exists(testplan) and os.path.isfile(testplan): testplan_file = open(testplan, "r") tp_obj = yaml.load(testplan_file.read()) repo_list = repository_list(tp_obj) repositories = {} for repo in repo_list: repo_url, repo_path = clone_repository(repo, args.repository_path, args.ignore_clone) repositories.update({repo_url: repo_path}) # ToDo: check test plan structure tp_version = tp_obj['metadata']['format'] if tp_version == "Linaro Test Plan v1": for requirement in tp_obj['requirements']: check_coverage(requirement, repositories, args) if tp_version == "Linaro Test Plan v2": if 'manual' in tp_obj['tests'].keys() and tp_obj['tests']['manual'] is not None: for test in tp_obj['tests']['manual']: test_exists(test, repositories, args) if 'automated' in tp_obj['tests'].keys() and tp_obj['tests']['automated'] is not None: for test in tp_obj['tests']['automated']: test_exists(test, repositories, args) tp_name = tp_obj['metadata']['name'] + ".html" tp_file_name = os.path.join(os.path.abspath(args.output), tp_name) if tp_version == "Linaro Test Plan v1": render(tp_obj, name=tp_file_name) if tp_version == "Linaro Test Plan v2": render(tp_obj, name=tp_file_name, template="testplan_v2.html") testplan_file.close()
def load_yaml_config(filepath): with open(filepath) as f: return yaml.load(f)
import logging.handlers import datetime from datetime import datetime import yaml import uuid from uuid import getnode as get_mac import json import requests import pprint #TODO: Loop through speed test server list if test fail #TODO: Robustness if RabbitMQ fails? #Load configuration file with open('/opt/SpeedDevice/config.yaml', 'r') as f: config = yaml.load(f) #Global variables ##Device configuration deviceId = config["device-info"]["deviceId"] deviceType = config["device-info"]["deviceType"] macAddress = config["device-info"]["mac-address"] ##Message receiver (Remote REST API) URI = config["message-service"]["uri"] postSvc = config["message-service"]["post-svc"] ##Request configuration service (Remote REST API) configURI = config["request-service"]["uri"] getConfigSvc = config["request-service"]["get-config-svc"] updateStatusSvc = config["request-service"]["update-status-svc"]
from flask import Flask app = Flask(__name__) import settings app.config.from_object(settings) # load yaml config file, customize the blog import yaml config = yaml.load(file('app/config.yaml', 'r')) from app import views
import yaml import os from array import * from ROOT import * intLum = 150 gROOT.SetBatch(1) with open('../../python/postprocessing/monoZ/ROOTfiles.yml', 'r') as f_yml: dict_yml = yaml.load(f_yml) def Draw_pT(): datasets = [ 'TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8', 'WWTo2L2Nu_NNPDF31_TuneCP5_13TeV-powheg-pythia8', 'DYJetsToLL_M-50_TuneCP5_13TeV-amcatnloFXFX-pythia8', 'WZTo3LNu_TuneCP5_13TeV-amcatnloFXFX-pythia8', 'ZZTo2L2Nu_13TeV_powheg_pythia8' ] hex_cols = ['#8dd3c7','#ffffb3','#bebada','#fb8072','#80b1d3'] ndatasets = len(datasets) stk = THStack("stk", ";Dilepton pT [GeV];Events / GeV") bins_Det = array('d', [40, 60, 80, 100, 200, 400, 1000]) # bins_Det = array('d', [ 50.00, 55.62, 61.25, 66.87, 72.49, 78.12, 84.88, 91.65, # 98.42, 105.19, 111.96, 118.73, 125.50, 134.55, 143.60, 152.66, 161.71, 170.77, 179.82, 191.64, # 203.46, 215.28, 227.10, 241.96, 256.82, 271.69, 286.55, 305.06, 323.58, 342.10, 367.25, 392.40, # 419.66, 446.92, 476.25, 509.53, 542.81, 580.49, 618.98, 657.47, 698.26, 741.03, 790.66,
parser = argparse.ArgumentParser( description='Patches an exported Avi Configuration with a configuration patch input as yaml file.', usage=""" python config_patch -c avi_config.json -p test/patch.yml output: avi_config.json.patched Contents of the patch.yml Pool: - match_name: cool patch: name: awesome VirtualService: - match_name: cool patch: name: awesome """) parser.add_argument('-c', '--aviconfig', help='Avi configuration in JSON format') parser.add_argument('-p', '--patchconfig', help='Avi configuration objects to be patched. It is list of patterns and object overrides') args = parser.parse_args() with open(args.aviconfig) as f: acfg = json.load(f) with open(args.patchconfig) as f: patches = yaml.load(f) cp = ConfigPatch(acfg, patches) patched_cfg = cp.patch() with open(args.aviconfig + '.patched', 'w') as f: f.write(json.dumps(patched_cfg, indent=4))
- 'anlist': (list of ints) a_p for p<20 """ import os.path import re import sys import os import pymongo from sage.all import ZZ, RR, EllipticCurve, prod from lmfdb.utils import web_latex from lmfdb.base import getDBConnection print "getting connection" C= getDBConnection() print "authenticating on the elliptic_curves database" import yaml pw_dict = yaml.load(open(os.path.join(os.getcwd(), os.extsep, os.extsep, os.extsep, "passwords.yaml"))) username = pw_dict['data']['username'] password = pw_dict['data']['password'] C['elliptic_curves'].authenticate(username, password) print "setting curves" curves = C.elliptic_curves.curves curves2 = C.elliptic_curves.curves2 def parse_tgens(s): r""" Converts projective coordinates to affine coordinates for generator """ g1 = s.replace('(', ' ').replace(')', ' ').split(':') x, y, z = [ZZ(c) for c in g1] g = (x / z, y / z) return str(g)
def loadYaml(self, filename): f = yaml.load( open(filename, 'r') ) return f
import whois import yaml from jinja2 import Environment, FileSystemLoader from publicsuffix import PublicSuffixList # whois参照 def get_whois(host): time.sleep(2) return whois.whois(host) # YamlをOpen f = open("./chk_hosts.yml", encoding='utf-8') chk_hosts = yaml.load(f) chk_hosts = chk_hosts['chk_host_list'] #print (chk_hosts) domain_list = [] # サブドメインや日本語ドメインを正規化する for chk in chk_hosts: hostname = chk['uri'] if hostname is None: next normalize_hostname = hostname.encode('idna').decode('utf-8') psl = PublicSuffixList() domain_list.append(psl.get_public_suffix(normalize_hostname)) #print (domain_list) uniq_domain_list = list(set(domain_list))
def load_yaml(self, *args): dir_path = os.path.dirname(os.path.realpath(__file__)) o = open(os.path.join(dir_path, *args)) d = yaml.load(o.read(), Loader=yaml.SafeLoader) self.update(d)
#!/usr/bin/env python import os import sys import yaml # YAML_DIR is the location of the directory where the YAML files are kept YAML_DIR = "%s/../vars/" % os.path.dirname(os.path.abspath(__file__)) # loop over the YAML files and try to load them for filename in os.listdir(YAML_DIR): yaml_file = "%s%s" % (YAML_DIR, filename) if os.path.isfile(yaml_file) and ".yml" in yaml_file: try: with open(yaml_file) as yamlfile: configdata = yaml.load(yamlfile) # If there was a problem importing the YAML, we can print # an error message, and quit with a non-zero error code # (which will trigger our CI system to indicate failure) except Exception: print("%s failed YAML import" % yaml_file) sys.exit(1) sys.exit(0)
return config if __name__ == "__main__": rospy.init_node("pid_coeff_publisher", anonymous=True) rospy.loginfo("Start pid coefficient publisher") pid_coeff_cmd_topic = rospy.get_param("~pid_coeff_cmd_topic") default_pid_coeff_file = rospy.get_param("~default_pid_coeff_file", "../config/PidCoeffDefault.yaml") pub = rospy.Publisher(pid_coeff_cmd_topic, Float32MultiArray, queue_size=1) srv = Server(PidCoeffConfig, callback) try: with open(default_pid_coeff_file, 'r') as f: def_pid_coeff = yaml.load(f) for item in coeff_names: PidCoeffConfig.defaults[item] = def_pid_coeff[item] except Exception as e: rospy.logerr( "Failed to setup default pid coefficients. Error : {}".format(e)) # Send once to initialize: #print "!!!! PUBLISH DEFAULT VALUES : {}".format(PidCoeffConfig.defaults) # !!! Probably, this does not work !!! srv.update_configuration(PidCoeffConfig.defaults) rospy.spin()
In [13]: result = send_config_commands(r1, commands) Подключаюсь к 192.168.100.1... In [14]: result = send_config_commands(r1, commands, verbose=False) In [15]: Скрипт должен отправлять список команд commands на все устройства из файла devices.yaml с помощью функции send_config_commands. ''' import netmiko, yaml def send_config_commands(device, config_commands, verbose=True): with netmiko.ConnectHandler(**device) as ssh: ssh.enable() if verbose: return 'Connecting to {}'.format( device['ip'] + '\n' + ssh.send_config_set(config_commands)) return ssh.send_config_set(config_commands) if __name__ == '__main__': commands = [ 'logging 10.255.255.1', 'logging buffered 20010', 'no logging console' ] with open('devices.yaml') as f: devices = yaml.load(f, Loader=yaml.FullLoader) for device in devices: print(send_config_commands(device, commands))
from flask import Flask, render_template, url_for, redirect, request, session, flash from flask_bootstrap import Bootstrap from flask_mysqldb import MySQL import yaml import os from werkzeug.security import generate_password_hash app = Flask(__name__) Bootstrap(app) mysql = MySQL(app) # Configure db db = yaml.load(open('db.yaml')) app.config['MYSQL_HOST'] = db['mysql_host'] app.config['MYSQL_USER'] = db['mysql_user'] app.config['MYSQL_PASSWORD'] = db['mysql_password'] app.config['MYSQL_DB'] = db['mysql_db'] app.config['MYSQL_CURSORCLASS'] = 'DictCursor' app.config['SECRET_KEY'] = os.urandom(24) @app.route('/', methods=['GET', 'POST']) def index(): # fruits_arr=['Apple','Orange'] # return render_template('index.html',fruits=fruits_arr) # return redirect(url_for('about')) # if request.method== 'POST': # user_name = request.form.get('username') # cur = mysql.connection.cursor() # cur.execute("INSERT INTO user VALUES(%s)",[user_name]) # mysql.connection.commit()
import yaml import dash_html_components as html import dash_core_components as dcc from loggers.logger import Logger # Initialize logger logger = Logger().getLogger(__file__) with open(r'configs/zone_settings.yaml') as file: zones = yaml.load(file, Loader=yaml.FullLoader) with open(r'configs/class_settings.yaml') as file: classes = yaml.load(file, Loader=yaml.FullLoader) with open(r'configs/servers.yaml') as file: servers = yaml.load(file, Loader=yaml.FullLoader) serverRegion_div = html.Div( className='row', children=[ html.Div( className='largeleftblock', children=[ html.Datalist( id='serverlist', children=[html.Option(value=server) for server in servers] ), dcc.Input( type='text', placeholder='Server', id='serverinput',
# Candidate prefixes for detectron ops lib path prefixes = [_CMAKE_INSTALL_PREFIX, sys.prefix, sys.exec_prefix] + sys.path # Candidate subdirs for detectron ops lib subdirs = ['lib', 'torch/lib'] # Try to find detectron ops lib for prefix in prefixes: for subdir in subdirs: ops_path = os.path.join(prefix, subdir, _DETECTRON_OPS_LIB) if os.path.exists(ops_path): print('Found Detectron ops lib: {}'.format(ops_path)) return ops_path raise Exception('Detectron ops lib not found') def get_custom_ops_lib(): """Retrieve custom ops library.""" det_dir, _ = os.path.split(os.path.dirname(__file__)) root_dir, _ = os.path.split(det_dir) custom_ops_lib = os.path.join( root_dir, 'build/libcaffe2_detectron_custom_ops_gpu.so') assert os.path.exists(custom_ops_lib), \ 'Custom ops lib not found at \'{}\''.format(custom_ops_lib) return custom_ops_lib # YAML load/dump function aliases #yaml_load = yaml.load # Apply hot-fix patch (https://github.com/facebookresearch/Detectron/issues/840#issuecomment-478301208) yaml_load = lambda x: yaml.load(x, Loader=yaml.Loader) yaml_dump = yaml.dump
fourcc = cv2.VideoWriter_fourcc('X', 'V', 'I', 'D') # các lựa chọn: ('P','I','M','1'), ('D','I','V','X'), ('M','J','P','G'), ('X','V','I','D') out = cv2.VideoWriter(fn_out, -1, 25.0, (video_info['width'], video_info['height'])) # Khởi tạo HOG descriptor/person detector. Mất rất nhiều power cho quá trình này. if dict['pedestrian_detection']: hog = cv2.HOGDescriptor() hog.setSVMDetector(cv2.HOGDescriptor_getDefaultPeopleDetector()) # Sử dụng Background subtraction tách cảnh nền. if dict['motion_detection']: fgbg = cv2.createBackgroundSubtractorMOG2(history=300, varThreshold=16, detectShadows=True) # Đọc file yaml (parking space polygons) with open(fn_yaml, 'r') as stream: parking_data = yaml.load(stream) parking_contours = [] parking_bounding_rects = [] parking_mask = [] parking_data_motion = [] if parking_data != None: for park in parking_data: points = np.array(park['points']) rect = cv2.boundingRect(points) points_shifted = points.copy() points_shifted[:, 0] = points[:, 0] - rect[0] # shift contour to region of interest points_shifted[:, 1] = points[:, 1] - rect[1] parking_contours.append(points) parking_bounding_rects.append(rect) mask = cv2.drawContours(np.zeros((rect[3], rect[2]), dtype=np.uint8), [points_shifted], contourIdx=-1, color=255, thickness=-1, lineType=cv2.LINE_8)