コード例 #1
0
    def test_package_metadata_config_gen_task(self):
        task = package_metadata_tasks.PackageMetadataConfigGenTask()
        repo_root = os.path.abspath('.')

        task.execute(
            api_name='fake',
            api_version='v1',
            language='python',
            root_dir='%s/googleapis' % repo_root,
            organization_name='google-cloud',
            output_dir=str(self.output_dir),
            proto_deps=['googleapis-common-protos'],
            artifact_type='GAPIC',
            src_proto_path=['path/to/protos'],
            release_level='beta'
        )
        actual_file = os.path.join(str(self.output_dir),
                                   'python_google-cloud-fake-v1_package2.yaml')
        expected_file = 'test/testdata/python_google-cloud-fake-v1_package2.yaml'
        with open(actual_file) as f:
            actual = yaml.safe_load(f)
        with open(expected_file) as f:
            expected = yaml.safe_load(f)
        # Don't compare files directly because yaml doesn't preserve ordering
        try:
            self.assertDictEqual(actual, expected)
        except:
            print("comparison failure: actual = " + actual_file + ", expected = " + expected_file)
            raise
コード例 #2
0
 def assertParametersConverted(self, actual, expected):
     print('expected: ' + json.dumps(yaml.safe_load(expected)['parameters']))
     print('actual:   ' + json.dumps(lose_parameters_to_full(yaml.safe_load(actual)['parameters'])))
     self.assertEquals(
         yaml.safe_load(expected),
         {'parameters': lose_parameters_to_full(yaml.safe_load(actual)['parameters'])}
     )
コード例 #3
0
def _write_summary_file(result_json_paths, output_file, ablation=0):
    """
    Function to take a list of paths to individual result
    json files and returns a single file that summarizes
    all of them.

    :param result_json_paths: A list of paths to the
                              individual result json files.
    :type result_json_paths: list

    :returns: The output file to contain a summary of the individual result
              files.
    :rtype: file
    """
    learner_result_dicts = []
    # Map from feature set names to all features in them
    all_features = defaultdict(set)
    logger = logging.getLogger(__name__)
    for json_path in result_json_paths:
        if not exists(json_path):
            logger.error(('JSON results file %s not found. Skipping summary '
                          'creation. You can manually create the summary file'
                          ' after the fact by using the summarize_results '
                          'script.'), json_path)
            return
        else:
            with open(json_path, 'r') as json_file:
                obj = json.load(json_file)
                featureset_name = obj[0]['featureset_name']
                if ablation != 0 and '_minus_' in featureset_name:
                    parent_set = featureset_name.split('_minus_', 1)[0]
                    all_features[parent_set].update(
                        yaml.safe_load(obj[0]['featureset']))
                learner_result_dicts.extend(obj)

    # Build and write header
    header = set(learner_result_dicts[0].keys()) - {'result_table',
                                                    'descriptive'}
    if ablation != 0:
        header.add('ablated_features')
    header = sorted(header)
    writer = csv.DictWriter(output_file, header, extrasaction='ignore',
                            dialect=csv.excel_tab)
    writer.writeheader()

    # Build "ablated_features" list and fix some backward compatible things
    for lrd in learner_result_dicts:
        featureset_name = lrd['featureset_name']
        if ablation != 0:
            parent_set = featureset_name.split('_minus_', 1)[0]
            ablated_features = all_features[parent_set].difference(
                yaml.safe_load(lrd['featureset']))
            lrd['ablated_features'] = ''
            if ablated_features:
                lrd['ablated_features'] = json.dumps(sorted(ablated_features))

        # write out the new learner dict with the readable fields
        writer.writerow(lrd)

    output_file.flush()
コード例 #4
0
ファイル: deploy.py プロジェクト: rackerlabs/yoke
    def render_swagger(self):
        LOG.warning("Templating swagger.yml for region %s ...", self.region)
        swagger_file = self.config['apiGateway'].get('swaggerTemplate',
                                                     'template.yml')
        j2_env = Environment(loader=FileSystemLoader(self.project_dir),
                             trim_blocks=True, lstrip_blocks=True)
        first_template = yaml.safe_load(
            j2_env.get_template(swagger_file).render(
                accountId=self.account_id,
                Lambda=self.config['Lambda'],
                apiGateway=self.config['apiGateway'],
                region=self.region,
                stage=self.stage))

        integrations_template = self.apply_templates(first_template)

        # We have to do this twice to template the integrations - I'm sorry.
        j2_env = Environment(loader=DictLoader(
            {'template': json.dumps(integrations_template)}))
        j2_template = j2_env.get_template('template')
        rendered_template = yaml.safe_load(j2_template.render(
            accountId=self.account_id,
            Lambda=self.config['Lambda'],
            apiGateway=self.config['apiGateway'],
            region=self.region,
            stage=self.stage
        ))

        return rendered_template
コード例 #5
0
ファイル: test_config.py プロジェクト: knitori/shanghai
 def setUp(self):
     self.fake_yaml = {
         'foo': 123,
         'bar': 'baz',
     }
     self.sample_yaml = ryaml.safe_load(io.StringIO(SAMPLE_YAML))
     self.broken_conf_1 = ryaml.safe_load(io.StringIO(BROKEN_CONF_1))
     self.broken_conf_2 = ryaml.safe_load(io.StringIO(BROKEN_CONF_2))
コード例 #6
0
ファイル: arv_copy.py プロジェクト: chapmanb/arvados
def copy_workflow(wf_uuid, src, dst, args):
    # fetch the workflow from the source instance
    wf = src.workflows().get(uuid=wf_uuid).execute(num_retries=args.retries)

    # copy collections and docker images
    if args.recursive:
        wf_def = yaml.safe_load(wf["definition"])
        if wf_def is not None:
            locations = []
            docker_images = {}
            graph = wf_def.get('$graph', None)
            if graph is not None:
                workflow_collections(graph, locations, docker_images)
            else:
                workflow_collections(wf_def, locations, docker_images)

            if locations:
                copy_collections(locations, src, dst, args)

            for image in docker_images:
                copy_docker_image(image, docker_images[image], src, dst, args)

    # copy the workflow itself
    del wf['uuid']
    wf['owner_uuid'] = args.project_uuid
    return dst.workflows().create(body=wf).execute(num_retries=args.retries)
コード例 #7
0
    def run_task(self, fw_spec):
        from functools import reduce
        import operator
        import json
        import ruamel.yaml as yaml

        filename = self['filename']
        mapstring = self['mapstring']
        assert isinstance(filename, basestring)
        assert isinstance(mapstring, basestring)
        maplist = mapstring.split('/')

        fmt = filename.split('.')[-1]
        assert fmt in ['json', 'yaml']
        with open(filename, 'r') as inp:
            data = json.load(inp) if fmt == 'json' else yaml.safe_load(inp)

        leaf = reduce(operator.getitem, maplist[:-1], fw_spec)
        if isinstance(data, dict):
            if maplist[-1] not in leaf:
                leaf[maplist[-1]] = data
            else:
                leaf[maplist[-1]].update(data)
        else:
            leaf[maplist[-1]] = data

        return FWAction(update_spec={maplist[0]: fw_spec[maplist[0]]})
コード例 #8
0
    def parse_value(inc_value, vtype=''):
        '''determine value type passed'''
        true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
                      'on', 'On', 'ON', ]
        false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
                       'off', 'Off', 'OFF']

        # It came in as a string but you didn't specify value_type as string
        # we will convert to bool if it matches any of the above cases
        if isinstance(inc_value, str) and 'bool' in vtype:
            if inc_value not in true_bools and inc_value not in false_bools:
                raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
        elif isinstance(inc_value, bool) and 'str' in vtype:
            inc_value = str(inc_value)

        # There is a special case where '' will turn into None after yaml loading it so skip
        if isinstance(inc_value, str) and inc_value == '':
            pass
        # If vtype is not str then go ahead and attempt to yaml load it.
        elif isinstance(inc_value, str) and 'str' not in vtype:
            try:
                inc_value = yaml.safe_load(inc_value)
            except Exception:
                raise YeditException('Could not determine type of incoming value. ' +
                                     'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))

        return inc_value
コード例 #9
0
def main(json_config_path, yaml_config_path, prow_config_path, output_dir):
    """Creates test job definitions.

    Converts the test configurations in yaml_config_path to the job definitions
    in json_config_path and the env files in output_dir.
    """
    # TODO(yguo0905): Validate the configurations from yaml_config_path.

    with open(json_config_path) as fp:
        json_config = json.load(fp)
    json_config = remove_generated_jobs(json_config)

    with open(prow_config_path) as fp:
        prow_config = yaml.round_trip_load(fp, preserve_quotes=True)
    remove_generated_prow_configs(prow_config)

    with open(yaml_config_path) as fp:
        yaml_config = yaml.safe_load(fp)

    for job_name, _ in yaml_config['jobs'].items():
        # Get the envs and args for each job defined under "jobs".
        job, prow = for_each_job(
            output_dir, job_name, yaml_config['jobs'][job_name], yaml_config)
        json_config[job_name] = job
        prow_config['periodics'].append(prow)

    # Write the job definitions to config.json.
    write_job_defs_file(output_dir, json_config)
    write_prow_configs_file('prow', prow_config)
コード例 #10
0
ファイル: __init__.py プロジェクト: Tendrl/gluster_bridge
    def __init__(self, *args, **kwargs):
        self._defs = {}
        super(Definition, self).__init__(*args, **kwargs)

        self.data = pkg_resources.resource_string(__name__, "gluster.yaml")
        self._parsed_defs = yaml.safe_load(self.data)
        self.value = 'clusters/{0}/_NS/definitions'
コード例 #11
0
ファイル: abiinspect.py プロジェクト: albalu/pymatgen
def yaml_read_irred_perts(filename, doc_tag="!IrredPerts"):
    """Read the list of irreducible perturbations from file."""
    with YamlTokenizer(filename) as r:
        doc = r.next_doc_with_tag(doc_tag)
        d = yaml.safe_load(doc.text_notag)

        return [AttrDict(**pert) for pert in d["irred_perts"]]
コード例 #12
0
    def test_from_spec(self):
        spec = """jobs:
- jb: custodian.vasp.jobs.VaspJob
  params:
    final: False
    suffix: .relax1
- jb: custodian.vasp.jobs.VaspJob
  params:
    final: True
    suffix: .relax2
    settings_override: {"file": "CONTCAR", "action": {"_file_copy": {"dest": "POSCAR"}}}
jobs_common_params:
  $vasp_cmd: ["mpirun", "-machinefile", "$PBS_NODEFILE", "-np", "24", "/opt/vasp/5.4.1/bin/vasp"]
handlers:
- hdlr: custodian.vasp.handlers.VaspErrorHandler
- hdlr: custodian.vasp.handlers.AliasingErrorHandler
- hdlr: custodian.vasp.handlers.MeshSymmetryErrorHandler
validators:
- vldr: custodian.vasp.validators.VasprunXMLValidator
custodian_params:
  $scratch_dir: $TMPDIR"""

        os.environ["TMPDIR"] = "/tmp/random"
        os.environ["PBS_NODEFILE"] = "whatever"
        d = yaml.safe_load(spec)
        c = Custodian.from_spec(d)
        self.assertEqual(c.jobs[0].vasp_cmd[2], "whatever")
        self.assertEqual(c.scratch_dir, "/tmp/random")
        self.assertEqual(len(c.jobs), 2)
        self.assertEqual(len(c.handlers), 3)
        self.assertEqual(len(c.validators), 1)
コード例 #13
0
ファイル: abiinspect.py プロジェクト: albalu/pymatgen
def yaml_read_kpoints(filename, doc_tag="!Kpoints"):
    """Read the K-points from file."""
    with YamlTokenizer(filename) as r:
        doc = r.next_doc_with_tag(doc_tag)
        d = yaml.safe_load(doc.text_notag)

        return np.array(d["reduced_coordinates_of_qpoints"])
コード例 #14
0
ファイル: config.py プロジェクト: rackerlabs/yoke
 def load_config_file(self):
     # Read config file lines as list in order to template.
     LOG.warning("Getting config from %s ...", self.project_dir)
     with open(self.yoke_path, 'r') as config_file:
         raw = config_file.readlines()
     raw = self.render_config(raw)
     return yaml.safe_load(raw)
コード例 #15
0
ファイル: command_loading.py プロジェクト: gyaresu/dotfiles
    def _GetRefData(self, path):
      """Loads the YAML data from the given reference.

      A YAML reference must refer to a YAML file and an attribute within that
      file to extract.

      Args:
        path: str, The path of the YAML file to import. It must be in the
          form of: package.module:attribute.attribute, where the module path is
          separated from the sub attributes within the YAML by a ':'.

      Raises:
        LayoutException: If the given module or attribute cannot be loaded.

      Returns:
        The referenced YAML data.
      """
      root = os.path.dirname(os.path.dirname(googlecloudsdk.__file__))
      parts = path.split(':')
      if len(parts) != 2:
        raise LayoutException(
            'Invalid Yaml reference: [{}]. References must be in the format: '
            'path(.path)+:attribute(.attribute)*'.format(path))
      yaml_path = os.path.join(root, *parts[0].split('.'))
      yaml_path += '.yaml'
      try:
        data = yaml.safe_load(pkg_resources.GetResourceFromFile(yaml_path))
      except IOError as e:
        raise LayoutException(
            'Failed to load Yaml reference file [{}]: {}'.format(yaml_path, e))

      return self._GetAttribute(data, parts[1], yaml_path)
コード例 #16
0
ファイル: migrations.py プロジェクト: devcurmudgeon/spec
def check_definitions_version(from_version, version_file='./VERSION',
                              to_version=None):
    '''Check if migration between 'from_version' and 'to_version' is needed.

    Both 'from_version' and 'to_version' should be whole numbers. The
    'to_version' defaults to from_version + 1.

    This function reads the version marker file specified by 'version_file'.
    Returns True if the version is between 'from_version' and 'to_version',
    indicating that migration needs to be done. Returns False if the version is
    already at or beyond 'to_version'. Raises MigrationOutOfOrderError if the
    version is below 'from_version'.

    If 'version_file' is missing or invalid, it raises VersionFileError. The
    version file is expected to follow the following format:

        version: 1

    '''
    to_version = to_version or (from_version + 1)
    need_to_migrate = False

    if os.path.exists(version_file):
        logging.info("Found version information file: %s" % version_file)

        with open(version_file) as f:
            version_text = f.read()

        if len(version_text) == 0:
            raise VersionFileError(
                "File %s exists but is empty." % version_file)

        try:
            version_info = yaml.safe_load(version_text)
            current_version = version_info['version']

            if current_version >= to_version:
                logging.info(
                    "Already at version %i." % current_version)
            elif current_version < from_version:
                raise MigrationOutOfOrderError(
                    "This tool expects to migrate from version %i to version "
                    "%i of the Baserock Definitions syntax. These definitions "
                    "claim to be version %i." % (
                        from_version, to_version, current_version))
            else:
                logging.info("Need to migrate from %i to %i.",
                             current_version, to_version)
                need_to_migrate = True
        except (KeyError, TypeError, ValueError) as e:
            logging.exception(e)
            raise VersionFileError(
                "Invalid version info: '%s'" % version_text)
    else:
        raise VersionFileError(
            "No file %s was found. Please run the migration scripts in order,"
            "starting from 000-version-info.py." % version_file)

    return need_to_migrate
コード例 #17
0
ファイル: spec.py プロジェクト: pombredanne/exoline
 def load_spec(args):
     # returns loaded spec and path for script files
     try:
         content, base_url = load_file(args['<spec-yaml>'])
         spec = yaml.safe_load(content)
         return spec, base_url
     except yaml.scanner.ScannerError as ex:
         raise ExoException('Error parsing YAML in {0}\n{1}'.format(args['<spec-yaml>'],ex))
コード例 #18
0
ファイル: cat.py プロジェクト: dotmpe/script-mpe
 def __init__(self, fn):
     self.name = fn
     self.data = yaml.safe_load(open(fn))
     if not self.data:
         self.data = []
     self.names = {}
     for i, r in enumerate(self.data):
         self.names[r['name']] = i
コード例 #19
0
ファイル: __init__.py プロジェクト: ansible/ansible-container
 def get_dependencies_for_role(role_path):
     meta_main_path = os.path.join(role_path, 'meta', 'main.yml')
     if os.path.exists(meta_main_path):
         meta_main = yaml.safe_load(open(meta_main_path))
         for dependency in meta_main.get('dependencies', []):
             yield dependency.get('role', None)
     else:
         yield None
コード例 #20
0
    def setUp(self):
        """
        1) Basic check for pymatgen configurations.
        2) Setup all test workflow.
        """
        super(TestNudgedElasticBandWorkflow, self).setUp()
        # Structures used for test:
        parent = PymatgenTest.get_structure("Li2O")
        parent.remove_oxidation_states()
        parent.make_supercell(2)
        ep0, ep1 = get_endpoints_from_index(parent, [0, 1])
        neb_dir = [os.path.join(module_dir, "..", "..", "test_files", "neb_wf", "4", "inputs", "{:02}",
                                "POSCAR").format(i) for i in range(5)]
        self.structures = [Structure.from_file(n) for n in neb_dir]

        # Run fake vasp
        test_yaml = os.path.join(module_dir, "../../test_files/neb_wf/config/neb_unittest.yaml")
        with open(test_yaml, 'r') as stream:
            self.config = yaml.safe_load(stream)
            # Use scratch directory as destination directory for testing
            self.config["common_params"]["_fw_env"] = {"run_dest_root": self.scratch_dir}

        # Config 1: The parent structure & two endpoint indexes provided; need relaxation first.
        self.config_1 = copy.deepcopy(self.config)
        self.config_1["common_params"]["is_optimized"] = False
        self.config_1["common_params"]["wf_name"] = "NEB_test_1"

        # Config 2: The parent structure & two endpoint indexes provided; no need to relax.
        self.config_2 = copy.deepcopy(self.config)
        del self.config_2["fireworks"][0]
        self.config_2["common_params"]["is_optimized"] = True
        self.config_2["common_params"]["wf_name"] = "NEB_test_2"

        # Config 3: Two endpoints provided; need to relax two endpoints.
        self.config_3 = copy.deepcopy(self.config)
        del self.config_3["fireworks"][0]
        self.config_3["common_params"]["is_optimized"] = False
        self.config_3["common_params"]["wf_name"] = "NEB_test_3"

        # Config 4: Two relaxed endpoints provided; no need to relax two endpoints.
        self.config_4 = copy.deepcopy(self.config_3)
        del self.config_4["fireworks"][0]
        self.config_4["common_params"]["is_optimized"] = True
        self.config_4["common_params"]["wf_name"] = "NEB_test_4"

        # Config 5: All images including two endpoints are provided.
        self.config_5 = copy.deepcopy(self.config)
        del self.config_5["fireworks"][0: 2]
        self.config_5["common_params"]["wf_name"] = "NEB_test_5"

        self.wf_1 = wf_nudged_elastic_band([parent], parent, self.config_1)
        self.wf_2 = wf_nudged_elastic_band([parent], parent, self.config_2)
        self.wf_3 = wf_nudged_elastic_band([ep0, ep1], parent, self.config_3)
        self.wf_4 = wf_nudged_elastic_band([ep0, ep1], parent, self.config_4)
        self.wf_5 = wf_nudged_elastic_band(self.structures, parent, self.config_5)

        # Workflow without the config file
        self.wf_6 = wf_nudged_elastic_band(self.structures, parent)
コード例 #21
0
ファイル: refyaml.py プロジェクト: drone115b/cog
 def get_ref_ccn( self, ccn ) :
     value =  docio.get_view_body(self.view)
     filename = self.resolve_filename( value, ccn )
     yaml_doc = open( filename, 'rt' ).read()
     yaml_doc = yaml.safe_load( yaml_doc )
     import cog.ccn
     refccn = cog.ccn.Context()
     refccn.load_doc( yaml_doc )
     return refccn
コード例 #22
0
ファイル: prosopopee.py プロジェクト: titoko/prosopopee
def get_settings():
    error(Path("settings.yaml").exists(), "I can't find a "
          "settings.yaml in the current working directory")

    try:
        settings = yaml.safe_load(open("settings.yaml", "r"))
    except yaml.YAMLError as exc:
        if hasattr(exc, 'problem_mark'):
            mark = exc.problem_mark
            error(False, "There are something wrong in settings.yaml line %s" % (mark.line))
        else:
            error(False, "There are omething wrong in settings.yaml")

    error(isinstance(settings, dict), "Your settings.yaml should be a dict")

    for key, value in list(DEFAULTS.items()):
        if key not in settings:
            settings[key] = value

    for key, value in list(SETTINGS.items()):
        if key not in settings:
            settings[key] = value

    if settings["settings"].get("ffmpeg"):
        SETTINGS["ffmpeg"].update(settings["settings"]["ffmpeg"])

        conv_video = settings["settings"]["ffmpeg"]["binary"]
    else:
        conv_video = "ffmpeg"

    error(os.system("which gm > /dev/null") == 0, "I can't locate the gm binary, "
          "please install the 'graphicsmagick' package.\n")

    if os.system("which " + conv_video + " > /dev/null") != 0:
        if conv_video == "ffmpeg" and os.system("which avconv > /dev/null") == 0:
            SETTINGS["ffmpeg"]["binary"] = "avconv"
            warning("Video", "I couldn't locate ffmpeg but I could find avconv, "
                             "switching to avconv for video conversion")
        else:
            warning("Video", "I can't locate the " + conv_video + " binary, "
                    "please install the '" + conv_video + "' package.\n")
            warning("Video", "I won't be able to encode video and I will stop if I encounter a video to convert")
            SETTINGS["ffmpeg"] = False

    error(settings.get("title"), "You need to specify a title in your main settings.yaml")

    if (settings["rss"] or settings["share"]) and not settings.get("url"):
        warning("warning", "If you want the rss and/or the social network share to work, "
                "you need to specify the website url in root settings")
        settings["rss"] = False
        settings["share"] = False

    if settings["settings"].get("gm"):
        SETTINGS["gm"].update(settings["settings"]["gm"])

    return settings
コード例 #23
0
def build_index():
    """Create the index of all (YAML) sheets available."""
    from mathmaker import settings
    from ruamel import yaml
    # Below snippet from https://stackoverflow.com/a/21048064/3926735
    # to load roadmap.yaml using OrderedDict instead of dict
    _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG

    def dict_representer(dumper, data):
        return dumper.represent_dict(data.items())

    def dict_constructor(loader, node):
        return OrderedDict(loader.construct_pairs(node))

    yaml.add_representer(OrderedDict, dict_representer)
    yaml.add_constructor(_mapping_tag, dict_constructor)
    index = dict()
    themes_dirs = [x
                   for x in os.listdir(settings.frameworksdir)
                   if os.path.isdir(settings.frameworksdir + x)]
    for theme in themes_dirs:
        folder_path = os.path.join(settings.frameworksdir, theme)
        folder_files = glob(folder_path + '/*.yaml')
        for folder_path in folder_files:
            subtheme = os.path.splitext(os.path.basename(folder_path))[0]
            with open(folder_path) as f:
                loaded_data = yaml.safe_load(f)
                if loaded_data is not None:
                    folder = OrderedDict(loaded_data)
                for sheet_name in folder:
                    directive = '_'.join([subtheme, sheet_name])
                    index[directive] = (theme, subtheme, sheet_name)
                    # Automatic add possibly missing sheet integration test
                    sheet_test_dir = Path(os.path.join(settings.testsdir,
                                                       'integration',
                                                       theme,
                                                       subtheme))
                    file_name = subtheme + '_' + sheet_name
                    sheet_file = Path(os.path.join(sheet_test_dir,
                                                   'test_{}.py'
                                                   .format(file_name)))
                    if not sheet_file.is_file():
                        sheet_test_dir.mkdir(parents=True, exist_ok=True)
                        template = TESTFILE_TEMPLATE
                        if (theme == 'mental_calculation'
                            and not sheet_name.startswith('W')):
                            template += \
                                MENTAL_CALCULATION_TESTFILE_TEMPLATE_ADDENDUM
                        with open(sheet_file, 'w') as f:
                            f.write(template.format(theme=theme,
                                                    subtheme=subtheme,
                                                    sheet_name=sheet_name))

    with open(settings.index_path, 'w') as f:
        json.dump(index, f, indent=4)
        f.write('\n')
コード例 #24
0
ファイル: test_init.py プロジェクト: mbkumar/pymatgen
 def test_something(self):
     SETTINGS = _load_pmg_settings()
     if os.path.exists(SETTINGS_FILE):
         with open(SETTINGS_FILE, "rt") as f:
             d = yaml.safe_load(f)
             for k, v in d.items():
                 self.assertEqual(v, SETTINGS[k])
     else:
         for k, v in SETTINGS.items():
             self.assertEqual(v, os.environ.get(k))
コード例 #25
0
 def load(self, path_cfg: str):
     with open(path_cfg, 'r') as stream:
         try:
             self.cfg_dict = yaml3ed.safe_load(stream)
         except yaml3ed.YAMLError as exc:
             print(exc)
     self.check()
     self.zbx = ZabbixAgent(self.cfg_dict['zabbix']['url'], self.cfg_dict['zabbix']['login'],
                            self.cfg_dict['zabbix']['password'])
     log.debug('Config loaded')
コード例 #26
0
ファイル: yedit.py プロジェクト: georgegoh/openshift-ansible
    def load(self, content_type='yaml'):
        ''' return yaml file '''
        contents = self.read()

        if not contents and not self.content:
            return None

        if self.content:
            if isinstance(self.content, dict):
                self.yaml_dict = self.content
                return self.yaml_dict
            elif isinstance(self.content, str):
                contents = self.content

        # check if it is yaml
        try:
            if content_type == 'yaml' and contents:
                # Try to set format attributes if supported
                try:
                    self.yaml_dict.fa.set_block_style()
                except AttributeError:
                    pass

                # Try to use RoundTripLoader if supported.
                try:
                    self.yaml_dict = yaml.safe_load(contents, yaml.RoundTripLoader)
                except AttributeError:
                    self.yaml_dict = yaml.safe_load(contents)

                # Try to set format attributes if supported
                try:
                    self.yaml_dict.fa.set_block_style()
                except AttributeError:
                    pass

            elif content_type == 'json' and contents:
                self.yaml_dict = json.loads(contents)
        except yaml.YAMLError as err:
            # Error loading yaml or json
            raise YeditException('Problem with loading yaml file. %s' % err)

        return self.yaml_dict
コード例 #27
0
ファイル: update_list.py プロジェクト: Parsely/serpextract
def main():
    filename = _here('serpextract', 'search_engines.json')
    print('Updating search engine parser definitions.')

    url = urlopen('https://raw.githubusercontent.com/piwik/searchengine-and-social-list/master/SearchEngines.yml')
    piwik_engines = yaml.safe_load(url)
    with open(filename, 'w') as json_file:
        json.dump(piwik_engines, json_file, indent=2, sort_keys=True)

    print('Saved {} search engine parser definitions to {}.'
          .format(len(piwik_engines), filename))
コード例 #28
0
def _parse_and_validate_metrics(metrics, option_name, logger=None):
    """
    Given a string containing a list of metrics, this function
    parses that string into a list and validates the list.

    Parameters
    ----------
    metrics : str
        A string containing a list of metrics
    option_name : str
        The name of the option with which the metrics are associated.
    logger : logging.Logger, optional
        A logging object
        Defaults to ``None``.

    Returns
    -------
    metrics : list of str
        A list of metrics for the given option.

    Raises
    ------
    TypeError
        If the given string cannot be converted to a list.
    ValueError
        If there are any invalid metrics specified.
    """

    # create a logger if one was not passed in
    if not logger:
        logger = logging.getLogger(__name__)

    # make sure the given metrics data type is a list
    # and parse it correctly
    metrics = yaml.safe_load(_fix_json(metrics))
    if not isinstance(metrics, list):
        raise TypeError("{} should be a list, not a {}.".format(option_name,
                                                                type(metrics)))

    # `mean_squared_error` is no more supported.
    # It is replaced by `neg_mean_squared_error`
    if 'mean_squared_error' in metrics:
        raise ValueError("The metric \"mean_squared_error\" "
                         "is no longer supported."
                         " please use the metric "
                         "\"neg_mean_squared_error\" instead.")

    invalid_metrics = [metric for metric in metrics if metric not in SCORERS]
    if invalid_metrics:
        raise ValueError('Invalid metric(s) {} '
                         'specified for {}'.format(invalid_metrics,
                                                   option_name))

    return metrics
コード例 #29
0
ファイル: run_vasp.py プロジェクト: czhengsci/custodian
def load_class(mod, name):
    toks = name.split("?")
    params = {}
    if len(toks) == 2:
        for p in toks[-1].split(","):
            ptoks = p.split("=")
            params[ptoks[0]] = yaml.safe_load(ptoks[1])
    elif len(toks) > 2:
        print("Bad handler specification")
        sys.exit(-1)
    mod = __import__(mod, globals(), locals(), [toks[0]], 0)
    return getattr(mod, toks[0])(**params)
コード例 #30
0
ファイル: config.py プロジェクト: carlosp420/shub
 def load(self, stream):
     """Load Scrapinghub configuration from stream."""
     try:
         yaml_cfg = yaml.safe_load(stream)
         if not yaml_cfg:
             return
         for option in ('projects', 'endpoints', 'apikeys'):
             getattr(self, option).update(yaml_cfg.get(option, {}))
         self.version = yaml_cfg.get('version', self.version)
     except (yaml.YAMLError, AttributeError):
         # AttributeError: stream is valid YAML but not dictionary-like
         raise ConfigParseException
コード例 #31
0
 def __init__(self, yaml_config_path):
     with open(yaml_config_path) as fp:
         self.version_dict = yaml.safe_load(fp)
コード例 #32
0
def pkmain():
    global log
    parser = argparse.ArgumentParser(
        description='MiCADO component to realise scaling policies')
    parser.add_argument('--cfg',
                        dest='cfg_path',
                        default='./config.yaml',
                        help='path to configuration file')
    parser.add_argument('--policy',
                        dest='cfg_policy',
                        help='specifies the policy to execute')
    parser.add_argument('--srv',
                        action='store_true',
                        dest='cfg_srv',
                        default=False,
                        help='run in service mode')
    parser.add_argument('--host',
                        type=str,
                        default='127.0.0.1',
                        help='host to bind service to')
    parser.add_argument('--port',
                        type=int,
                        default='12345',
                        help='port to bind service to')
    args = parser.parse_args()
    #read configuration
    try:
        with open(args.cfg_path, 'r') as c:
            pk_config.config(yaml.safe_load(c))
    except Exception as e:
        print('ERROR: Cannot read configuration file "{0}": {1}'.format(
            args.cfg_path, str(e)))
    config = pk_config.config()
    #initialise logging facility based on the configuration
    try:
        logging.config.dictConfig(config['logging'])
        log = logging.getLogger('pk')
    except Exception as e:
        print('ERROR: Cannot process configuration file "{0}": {1}'.format(
            args.cfg_path, str(e)))
    #read policy file and start periodic policy evaluation in case of command-line mode
    if not args.cfg_srv:
        if not args.cfg_policy:
            log.error(
                'Policy file must be specified for standalone execution!')
            sys.exit(1)
        try:
            policy_yaml = load_policy_from_file(args.cfg_policy)
            start(policy_yaml)
        except KeyboardInterrupt:
            log.warning('Keyboard interruption detected! Shutting down...')
            stop(policy_yaml)
        except Exception:
            log.exception('An error occured during policy execution:')
            return

    #launch web service and wait for oncoming requests
    if args.cfg_srv:
        if args.cfg_policy:
            log.warning(
                'Policy file in parameter is unsused, must be defined through the API in service mode!'
            )
        pk_rest.init_logging()
        evaluator.init_logging()
        pk_rest.app.run(debug=True, host=args.host, port=args.port)
コード例 #33
0
def read_config(path):
    with open(path, 'rt') as f:
        return yaml.safe_load(f.read())
コード例 #34
0
ファイル: dodo.py プロジェクト: claudijd/subhub
def load_serverless(svc):
    return yaml.safe_load(open(f'services/{svc}/serverless.yml'))
コード例 #35
0
 def setUp(self):
     fixture = os.path.join(os.path.dirname(__file__), "CITATION.cff")
     with open(fixture, "r") as f:
         cffstr = f.read()
         cff_object = yaml.safe_load(cffstr)
         self.so = SchemaorgObject(cff_object, initialize_empty=True)
コード例 #36
0
 def construct_yaml_include(self, loader, node):
     return yaml.safe_load(get_data('data', node.value))
コード例 #37
0
                    break
                self.conn.send(("Commitment values : {0:04d} + {1:10d}\n".format(commit_result[1],commit_result[2])).encode())
                self.conn.send(("You have %d credits remaining\n"%(self.init_credit)).encode())
            except socket.error as e:
                print("Error",e)
                break
        self.conn.close()

if __name__ == '__main__':
    p = ArgumentParser()
    p.add_argument('-c', '--config', type=Path, default=Path('.mkctf.yml'),
                   help="Configuration file.")
    args = p.parse_args()

    with open(args.config) as f:
        conf = safe_load(f)

    tcpHost = '0.0.0.0'
    tcpPort = conf['parameters']['port']
    flag = conf['flag']

    tcpServer = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    tcpServer.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
    tcpServer.bind((tcpHost, tcpPort))
    print("server listening on {}:{}".format(tcpHost, tcpPort))
    threads = []

    try:
        while True:
            tcpServer.listen()
            (conn, (ip, port)) = tcpServer.accept()
コード例 #38
0
def yaml_load(file, as_namespace=False):
    if as_namespace:
        return yaml.load(file, Loader=YAMLNamespaceLoader)
    else:
        return yaml.safe_load(file)