Exemplo n.º 1
0
def main(json_config_path, yaml_config_path, prow_config_path, output_dir):
    """Creates test job definitions.

    Converts the test configurations in yaml_config_path to the job definitions
    in json_config_path and the env files in output_dir.
    """
    # TODO(yguo0905): Validate the configurations from yaml_config_path.

    with open(json_config_path) as fp:
        json_config = json.load(fp)
    json_config = remove_generated_jobs(json_config)

    with open(prow_config_path) as fp:
        prow_config = yaml.round_trip_load(fp, preserve_quotes=True)
    remove_generated_prow_configs(prow_config)

    with open(yaml_config_path) as fp:
        yaml_config = yaml.safe_load(fp)

    for job_name, _ in yaml_config['jobs'].items():
        # Get the envs and args for each job defined under "jobs".
        job, prow = for_each_job(
            output_dir, job_name, yaml_config['jobs'][job_name], yaml_config)
        json_config[job_name] = job
        prow_config['periodics'].append(prow)

    # Write the job definitions to config.json.
    write_job_defs_file(output_dir, json_config)
    write_prow_configs_file('prow', prow_config)
Exemplo n.º 2
0
    def set_env(self, env, config=None):
        try:
            config = yaml.round_trip_load(open(self.config_path))
        except IOError:
            raise AnsibleContainerNotInitializedException()
        except yaml.YAMLError as exc:
            raise AnsibleContainerConfigException(u"Parsing container.yml - %s" % unicode(exc))

        new_services = yaml.compat.ordereddict()
        for service_name, service_config in iteritems(config.get('services') or {}):
            if service_config.get('containers'):
                # If containers is defined, convert it to services, and drop any other keys
                for container in service_config['containers']:
                    if not container.get('container_name'):
                        raise AnsibleContainerConfigException(
                            u"Expecting container to have container_name defined. None found."
                        )
                    new_service_name = "{}-{}".format(service_name, container['container_name'])
                    new_services[new_service_name] = copy.deepcopy(container)
            else:
                new_services[service_name] = copy.deepcopy(service_config)

        config['services'] = new_services
        super(AnsibleContainerConfig, self).set_env(env, config=config)

        if self._config.get('volumes'):
            for vol_key in self._config['volumes']:
                if 'docker' in self._config['volumes'][vol_key]:
                    settings = copy.deepcopy(self._config['volumes'][vol_key][self.engine_name])
                    self._config['volumes'][vol_key] = settings
                else:
                    # remove non-docker settings
                    for engine_name in self.remove_engines:
                        if engine_name in self._config['volumes'][vol_key]:
                            del self._config['volumes'][vol_key][engine_name]
Exemplo n.º 3
0
def main():  # pragma: no cover
    try:
        args = parse_arguments(*sys.argv[1:])
        schema = yaml.round_trip_load(open(validate.schema_filename()).read())
        source_config = legacy.parse_configuration(
            args.source_config_filename, legacy.CONFIG_FORMAT
        )
        source_config_file_mode = os.stat(args.source_config_filename).st_mode
        source_excludes = (
            open(args.source_excludes_filename).read().splitlines()
            if args.source_excludes_filename
            else []
        )

        destination_config = convert.convert_legacy_parsed_config(
            source_config, source_excludes, schema
        )

        generate.write_configuration(
            args.destination_config_filename, destination_config, mode=source_config_file_mode
        )

        display_result(args)
    except (ValueError, OSError) as error:
        print(error, file=sys.stderr)
        sys.exit(1)
Exemplo n.º 4
0
 def __get_prow_config(self, test_suite, k8s_version):
     """Returns the Prow config for the job from the given fields."""
     prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
     prow_config['name'] = self.job_name
     prow_config['interval'] = self.job['interval']
     # Assumes that the value in --timeout is of minutes.
     timeout = int(next(
         x[10:-1] for x in test_suite['args'] if (
             x.startswith('--timeout='))))
     container = prow_config['spec']['containers'][0]
     if not container['args']:
         container['args'] = []
     if not container['env']:
         container['env'] = []
     # Prow timeout = job timeout + 20min
     container['args'].append('--timeout=%d' % (timeout + 20))
     container['args'].extend(k8s_version.get('args', []))
     container['args'].append('--root=/go/src')
     container['env'].extend([{'name':'GOPATH', 'value': '/go'}])
     # Specify the appropriate kubekins-e2e image. This allows us to use a
     # specific image (containing a particular Go version) to build and
     # trigger the node e2e test to avoid issues like
     # https://github.com/kubernetes/kubernetes/issues/43534.
     if k8s_version.get('prowImage', None):
         container['image'] = k8s_version['prowImage']
     return prow_config
Exemplo n.º 5
0
    def _get_variables_from_file(self):
        """
        Looks for file relative to base_path. If not found, checks relative to base_path/ansible.
        If file extension is .yml | .yaml, parses as YAML, otherwise parses as JSON.

        :return: ruamel.yaml.compat.ordereddict
        """
        abspath = path.abspath(self.var_file)
        if not path.exists(abspath):
            dirname, filename = path.split(abspath)
            raise AnsibleContainerConfigException(
                u'Variables file "%s" not found. (I looked in "%s" for it.)' % (
                    filename, dirname))
        logger.debug("Use variable file: %s", abspath, file=abspath)

        if path.splitext(abspath)[-1].lower().endswith(('yml', 'yaml')):
            try:
                config = yaml.round_trip_load(open(abspath))
            except yaml.YAMLError as exc:
                raise AnsibleContainerConfigException(u"YAML exception: %s" % unicode(exc))
        else:
            try:
                config = json.load(open(abspath))
            except Exception as exc:
                raise AnsibleContainerConfigException(u"JSON exception: %s" % unicode(exc))
        return six.iteritems(config)
Exemplo n.º 6
0
Arquivo: main.py Projeto: bmeg/cwltool
def load_job_order(args,   # type: argparse.Namespace
                   stdin,  # type: IO[Any]
                   fetcher_constructor,  # Fetcher
                   overrides,  # type: List[Dict[Text, Any]]
                   tool_file_uri  # type: Text
):
    # type: (...) -> Tuple[Dict[Text, Any], Text, Loader]

    job_order_object = None

    _jobloaderctx = jobloaderctx.copy()
    loader = Loader(_jobloaderctx, fetcher_constructor=fetcher_constructor)  # type: ignore

    if len(args.job_order) == 1 and args.job_order[0][0] != "-":
        job_order_file = args.job_order[0]
    elif len(args.job_order) == 1 and args.job_order[0] == "-":
        job_order_object = yaml.round_trip_load(stdin)
        job_order_object, _ = loader.resolve_all(job_order_object, file_uri(os.getcwd()) + "/")
    else:
        job_order_file = None

    if job_order_object:
        input_basedir = args.basedir if args.basedir else os.getcwd()
    elif job_order_file:
        input_basedir = args.basedir if args.basedir else os.path.abspath(os.path.dirname(job_order_file))
        job_order_object, _ = loader.resolve_ref(job_order_file, checklinks=False)

    if job_order_object and "http://commonwl.org/cwltool#overrides" in job_order_object:
        overrides.extend(resolve_overrides(job_order_object, file_uri(job_order_file), tool_file_uri))
        del job_order_object["http://commonwl.org/cwltool#overrides"]

    if not job_order_object:
        input_basedir = args.basedir if args.basedir else os.getcwd()

    return (job_order_object, input_basedir, loader)
Exemplo n.º 7
0
def load_sheet(theme, subtheme, sheet_name):
    """
    Retrieve sheet data from yaml file.

    :param theme: the theme where to find the sheet
    :type theme: str
    :param subtheme: the subtheme where to find the sheet
    :type subtheme: str
    :param sheet_name: the name of the sheet
    :type sheet_name: str
    :rtype: OrderedDict
    """
    from ruamel import yaml
    # from ruamel.yaml import YAML
    # yaml = YAML(typ='safe')

    theme_dir = os.path.join(settings.frameworksdir, theme)
    subtheme_file = os.path.join(settings.frameworksdir, theme,
                                 subtheme + '.yaml')
    if os.path.isdir(theme_dir):
        if os.path.isfile(subtheme_file):
            with open(subtheme_file) as file_path:
                file_data = yaml.round_trip_load(file_path)
                if sheet_name in file_data:
                    return file_data[sheet_name]
                else:
                    raise ValueError('No sheet of this name ({}) in the '
                                     'provided theme and subtheme ({}, {}).'
                                     .format(sheet_name, theme, subtheme))
        else:
            raise IOError('Could not find the provided subtheme ({}) in the '
                          'provided theme ({}).'.format(subtheme, theme))
    else:
        raise IOError('Could not find the provided theme ({}) among the '
                      'frameworks.'.format(theme))
Exemplo n.º 8
0
def process():
    with open('../calliope/config/defaults.yaml', 'r') as f:
        defaults = yaml.round_trip_load(f)

    write_csv(
        './user/includes/default_essentials.csv',
        get_section(defaults['default_tech']['essentials'])
    )
    write_csv(
        './user/includes/default_constraints.csv',
        get_section(defaults['default_tech']['constraints'])
    )
    write_csv(
        './user/includes/default_costs.csv',
        get_section(defaults['default_tech']['costs']['default'])
    )

    with open('../calliope/config/model.yaml', 'r') as f:
        model = yaml.round_trip_load(f)

    write_csv(
        './user/includes/model_settings.csv',
        get_section(model['model'])
    )
    write_csv(
        './user/includes/run_settings.csv',
        get_section(model['run'])
    )

    y = yaml.YAML()

    for tech_group in model['tech_groups']:
        defaults = {
            'essentials': model['tech_groups'][tech_group].get('essentials', {}),
            'constraints': model['tech_groups'][tech_group].get('constraints', {}),
            'costs': model['tech_groups'][tech_group].get('costs', {})
        }
        with open('./user/includes/basetech_{}.yaml'.format(tech_group), 'w') as f:
            f.write(yaml.dump(defaults, Dumper=yaml.RoundTripDumper))

        required_allowed = {
            'required_constraints': y.seq(model['tech_groups'][tech_group].get('required_constraints', [])),
            'allowed_constraints': y.seq(model['tech_groups'][tech_group].get('allowed_constraints', [])),
            'allowed_costs': y.seq(model['tech_groups'][tech_group].get('allowed_costs', []))
        }
        with open('./user/includes/required_allowed_{}.yaml'.format(tech_group), 'w') as f:
            f.write(yaml.dump(required_allowed, indent=4, Dumper=yaml.RoundTripDumper))
Exemplo n.º 9
0
def get_content_from_role(role_name, relative_path):
    role_path = resolve_role_to_path(role_name)
    metadata_file = os.path.join(role_path, relative_path)
    if os.path.exists(metadata_file):
        with open(metadata_file) as ifs:
            metadata = yaml.round_trip_load(ifs)
        return metadata or yaml.compat.ordereddict()
    return yaml.compat.ordereddict()
Exemplo n.º 10
0
def generate_sample_configuration(config_filename, schema_filename):
    '''
    Given a target config filename and the path to a schema filename in pykwalify YAML schema
    format, write out a sample configuration file based on that schema.
    '''
    schema = yaml.round_trip_load(open(schema_filename))
    config = _schema_to_sample_configuration(schema)

    write_configuration(config_filename, config)
Exemplo n.º 11
0
def sorted_boskos_config():
    """Get the sorted boskos configuration."""
    with open(test_infra('boskos/resources.yaml'), 'r') as fp:
        configs = yaml.round_trip_load(fp, preserve_quotes=True)
    for rtype in configs['resources']:
        rtype["names"] = sorted(rtype["names"])
    output = cStringIO.StringIO()
    yaml.round_trip_dump(
        configs, output, default_flow_style=False, width=float("inf"))
    return output
Exemplo n.º 12
0
def sorted_prow_config(prow_config_path=None):
    """Get the sorted Prow configuration."""
    with open(prow_config_path, 'r') as fp:
        configs = yaml.round_trip_load(fp, preserve_quotes=True)
    configs['periodics'] = sorted_seq(configs['periodics'])
    configs['presubmits'] = sorted_map(configs['presubmits'])
    configs['postsubmits'] = sorted_map(configs['postsubmits'])
    output = cStringIO.StringIO()
    yaml.round_trip_dump(
        configs, output, default_flow_style=False, width=float("inf"))
    return output
Exemplo n.º 13
0
def successive_merge(contents):
    """
    Successively merge a list of yaml contents by calling merge()
    :param contents: list of yaml contents in str format
    :return: merged yaml in str format
    """
    data = []
    for i in contents:
        data.append(round_trip_load(i, preserve_quotes=True))
    for i in range(-1, -len(contents), -1):
        final_data = merge(data[i - 1], data[i], 'ROOT')
    return final_data
Exemplo n.º 14
0
def generate_prow_config(job_name, test_suite, job):
    """Returns the Prow config for the job from the given fields."""
    prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
    prow_config['name'] = job_name
    prow_config['interval'] = job['interval']
    # Assumes that the value in --timeout is of minutes.
    timeout = int(next(
        x[10:-1] for x in test_suite['args'] if x.startswith('--timeout=')))
    # Prow timeout = job timeout + 20min
    prow_config['spec']['containers'][0]['args'].append(
        '--timeout=%d' % (timeout + 20))
    return prow_config
Exemplo n.º 15
0
 def __get_prow_config(self, test_suite):
     """Returns the Prow config for the e2e job from the given fields."""
     prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
     prow_config['name'] = self.job_name
     prow_config['interval'] = self.job['interval']
     # Assumes that the value in --timeout is of minutes.
     timeout = int(
         next(x[10:-1] for x in test_suite['args']
              if (x.startswith('--timeout='))))
     container = prow_config['spec']['containers'][0]
     if not container['args']:
         container['args'] = []
     container['args'].append('--bare')
     # Prow timeout = job timeout + 20min
     container['args'].append('--timeout=%d' % (timeout + 20))
     return prow_config
Exemplo n.º 16
0
def import_config(args, input_file=None):
    if not input_file:
        input_file = sys.stdin
    source = input_file.read().strip()
    if source[0] == "{":
        # JSON input
        config = json.loads(source)
    else:
        # YAML input
        config = yaml.round_trip_load(source)

    STATE["stages"] = config["stages"]
    config["config"] = _encrypt_dict(config["config"])
    with open(args.config, "wt") as f:
        if config:
            yaml.round_trip_dump(config, f)
Exemplo n.º 17
0
 def __get_prow_config(self, test_suite):
     """Returns the Prow config for the e2e job from the given fields."""
     prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
     prow_config['name'] = self.job_name
     prow_config['interval'] = self.job['interval']
     # Assumes that the value in --timeout is of minutes.
     timeout = int(next(
         x[10:-1] for x in test_suite['args'] if (
             x.startswith('--timeout='))))
     container = prow_config['spec']['containers'][0]
     if not container['args']:
         container['args'] = []
     container['args'].append('--bare')
     # Prow timeout = job timeout + 20min
     container['args'].append('--timeout=%d' % (timeout + 20))
     return prow_config
Exemplo n.º 18
0
    def convert_modules(self, directories):
        to_convert = {}
        to_update = {}

        for directory in directories:
            for filename in os.listdir(directory):
                if not filename.endswith('.bst'):
                    continue

                name = filename[:-len('.bst')]
                fullpath = os.path.join(directory, filename)

                with open(fullpath) as f:
                    element = yaml.round_trip_load(f)

                module_kind = self._get_module_kind(element)
                if module_kind == 'git':
                    to_convert[name] = fullpath, element
                elif module_kind == 'tarball':
                    to_update[name] = fullpath, element

        executor = ThreadPoolExecutor()

        converted = None
        if self.convert:
            converted = {executor.submit(self._convert_one_module, name, True): name for name in to_convert}

        updated = {executor.submit(self._convert_one_module, name, False): name for name in to_update}

        if converted:
            for future in tqdm(as_completed(converted), 'Converting git repos',
                               unit='', total=len(converted)):
                name = converted[future]
                fullpath, element = to_convert[name]
                location, checksum = future.result()

                if location:
                    self._write_bst_file(fullpath, element, location, checksum)

        for future in tqdm(as_completed(updated), 'Updating existing tarballs',
                           unit='', total=len(updated)):
            name = updated[future]
            fullpath, element = to_update[name]
            location, checksum = future.result()

            if location:
                self._write_bst_file(fullpath, element, location, checksum)
Exemplo n.º 19
0
def init():
    global accountList, config, runTimeItems
    with open(configFileName) as file:
        configStr = file.read()
        # remove unASCII char
        configStr = re.sub(r'[^\u0000-\u007F]', '', configStr)
        config = yaml.round_trip_load(configStr)
        runTimeItems = {
            itemId: {
                isInStock: False,
                isSnappingUp: False
            }
            for itemId in config['items'].keys()
        }
        for _id, _config in config['accounts'].items():
            accountDict[_id] = account.Account(_id, _config)
        accountList = list(accountDict.values())
Exemplo n.º 20
0
def process():
    with open("../calliope/config/defaults.yaml", "r") as f:
        defaults = yaml.round_trip_load(f)

    write_csv(
        "./user/includes/default_essentials.csv",
        get_section(defaults["techs"]["default_tech"]["essentials"]),
    )
    write_csv(
        "./user/includes/default_constraints.csv",
        get_section(defaults["techs"]["default_tech"]["constraints"]),
    )
    write_csv(
        "./user/includes/default_costs.csv",
        get_section(defaults["techs"]["default_tech"]["costs"]["default_cost"]),
    )

    write_csv("./user/includes/model_settings.csv", get_section(defaults["model"]))
    write_csv("./user/includes/run_settings.csv", get_section(defaults["run"]))

    y = yaml.YAML()

    for tech_group in defaults["tech_groups"]:
        this_group_defaults = {
            "essentials": defaults["tech_groups"][tech_group].get("essentials", {}),
            "constraints": defaults["tech_groups"][tech_group].get("constraints", {}),
            "costs": defaults["tech_groups"][tech_group].get("costs", {}),
        }
        with open("./user/includes/basetech_{}.yaml".format(tech_group), "w") as f:
            f.write(yaml.dump(this_group_defaults, Dumper=yaml.RoundTripDumper))

        required_allowed = {
            "required_constraints": y.seq(
                defaults["tech_groups"][tech_group].get("required_constraints", [])
            ),
            "allowed_constraints": y.seq(
                defaults["tech_groups"][tech_group].get("allowed_constraints", [])
            ),
            "allowed_costs": y.seq(
                defaults["tech_groups"][tech_group].get("allowed_costs", [])
            ),
        }
        with open(
            "./user/includes/required_allowed_{}.yaml".format(tech_group), "w"
        ) as f:
            f.write(yaml.dump(required_allowed, indent=4, Dumper=yaml.RoundTripDumper))
Exemplo n.º 21
0
def update_generated_config(path, latest_version):
    with open(path, 'r') as f:
        config = yaml.round_trip_load(f)

    v = latest_version
    suffixes = ['beta', 'stable1', 'stable2', 'stable3']
    for i, s in enumerate(suffixes):
        vs = "%d.%d" % (v[0], v[1] + 1 - i)
        config['k8sVersions'][s]['version'] = vs
        node = config['nodeK8sVersions'][s]
        for j, arg in enumerate(node['args']):
            node['args'][j] = re.sub(
                r'release-\d+\.\d+', 'release-%s' % vs, arg)
        node['prowImage'] = node['prowImage'].rpartition('-')[0] + '-' + vs

    with open(path, 'w') as f:
        yaml.round_trip_dump(config, f)
Exemplo n.º 22
0
def load_job_order(args,                 # type: argparse.Namespace
                   stdin,                # type: IO[Any]
                   fetcher_constructor,  # Fetcher
                   overrides_list,       # type: List[Dict[Text, Any]]
                   tool_file_uri         # type: Text
                  ):  # type: (...) -> Tuple[Optional[MutableMapping[Text, Any]], Text, Loader]

    job_order_object = None
    job_order_file = None

    _jobloaderctx = jobloaderctx.copy()
    loader = Loader(_jobloaderctx, fetcher_constructor=fetcher_constructor)  # type: ignore

    if len(args.job_order) == 1 and args.job_order[0][0] != "-":
        job_order_file = args.job_order[0]
    elif len(args.job_order) == 1 and args.job_order[0] == "-":
        job_order_object = yaml.round_trip_load(stdin)
        job_order_object, _ = loader.resolve_all(job_order_object, file_uri(os.getcwd()) + "/")
    else:
        job_order_file = None

    if job_order_object is not None:
        input_basedir = args.basedir if args.basedir else os.getcwd()
    elif job_order_file is not None:
        input_basedir = args.basedir if args.basedir \
            else os.path.abspath(os.path.dirname(job_order_file))
        job_order_object, _ = loader.resolve_ref(job_order_file, checklinks=False)

    if job_order_object is not None and "http://commonwl.org/cwltool#overrides" in job_order_object:
        ov_uri = file_uri(job_order_file or input_basedir)
        overrides_list.extend(
            resolve_overrides(job_order_object, ov_uri, tool_file_uri))
        del job_order_object["http://commonwl.org/cwltool#overrides"]

    if job_order_object is None:
        input_basedir = args.basedir if args.basedir else os.getcwd()

    if job_order_object is not None and not isinstance(job_order_object, MutableMapping):
        _logger.error(
            'CWL input object at %s is not formatted correctly, it should be a '
            'JSON/YAML dictionay, not %s.\n'
            'Raw input object:\n%s', job_order_file or "stdin",
            type(job_order_object), job_order_object)
        sys.exit(1)
    return (job_order_object, input_basedir, loader)
Exemplo n.º 23
0
def load_job_order(args,                 # type: argparse.Namespace
                   stdin,                # type: IO[Any]
                   fetcher_constructor,  # Fetcher
                   overrides_list,       # type: List[Dict[Text, Any]]
                   tool_file_uri         # type: Text
                  ):  # type: (...) -> Tuple[Optional[MutableMapping[Text, Any]], Text, Loader]

    job_order_object = None
    job_order_file = None

    _jobloaderctx = jobloaderctx.copy()
    loader = Loader(_jobloaderctx, fetcher_constructor=fetcher_constructor)  # type: ignore

    if len(args.job_order) == 1 and args.job_order[0][0] != "-":
        job_order_file = args.job_order[0]
    elif len(args.job_order) == 1 and args.job_order[0] == "-":
        job_order_object = yaml.round_trip_load(stdin)
        job_order_object, _ = loader.resolve_all(job_order_object, file_uri(os.getcwd()) + "/")
    else:
        job_order_file = None

    if job_order_object is not None:
        input_basedir = args.basedir if args.basedir else os.getcwd()
    elif job_order_file is not None:
        input_basedir = args.basedir if args.basedir \
            else os.path.abspath(os.path.dirname(job_order_file))
        job_order_object, _ = loader.resolve_ref(job_order_file, checklinks=False)

    if job_order_object is not None and "http://commonwl.org/cwltool#overrides" in job_order_object:
        ov_uri = file_uri(job_order_file or input_basedir)
        overrides_list.extend(
            resolve_overrides(job_order_object, ov_uri, tool_file_uri))
        del job_order_object["http://commonwl.org/cwltool#overrides"]

    if job_order_object is None:
        input_basedir = args.basedir if args.basedir else os.getcwd()

    if job_order_object is not None and not isinstance(job_order_object, MutableMapping):
        _logger.error(
            'CWL input object at %s is not formatted correctly, it should be a '
            'JSON/YAML dictionay, not %s.\n'
            'Raw input object:\n%s', job_order_file or "stdin",
            type(job_order_object), job_order_object)
        sys.exit(1)
    return (job_order_object, input_basedir, loader)
Exemplo n.º 24
0
def main(testgrid):
    """/shrug."""

    with open(testgrid) as fp:
        config = yaml.round_trip_load(fp)

    for dashboard in config['dashboards']:
        if any(prefix in dashboard['name'] for prefix in DASHBOARD_PREFIX):
            for tab in dashboard['dashboard_tab']:
                name = tab['test_group_name']
                for key, val in MAP.iteritems():
                    name = name.replace(key, val)
                tab['name'] = name

    # write out yaml
    with open(testgrid, 'w') as fp:
        yaml.dump(config, fp, Dumper=yaml.RoundTripDumper, width=float("inf"))
        fp.write('\n')
Exemplo n.º 25
0
def _document_load_by_url(loader, url, loadingOptions):
    if url in loadingOptions.idx:
        return _document_load(loader, loadingOptions.idx[url], url, loadingOptions)

    text = loadingOptions.fetcher.fetch_text(url)
    if isinstance(text, bytes):
        textIO = StringIO(text.decode('utf-8'))
    else:
        textIO = StringIO(text)
    textIO.name = url    # type: ignore
    result = yaml.round_trip_load(textIO)
    add_lc_filename(result, url)

    loadingOptions.idx[url] = result

    loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url)

    return _document_load(loader, result, url, loadingOptions)
Exemplo n.º 26
0
 def __get_prow_config(self, test_suite, k8s_version):
     """Returns the Prow config for the job from the given fields."""
     prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
     prow_config['name'] = self.job_name
     prow_config['interval'] = self.job['interval']
     # Assumes that the value in --timeout is of minutes.
     timeout = int(
         next(x[10:-1] for x in test_suite['args']
              if (x.startswith('--timeout='))))
     container = prow_config['spec']['containers'][0]
     if not container['args']:
         container['args'] = []
     # Prow timeout = job timeout + 20min
     container['args'].append('--timeout=%d' % (timeout + 20))
     container['args'].extend(k8s_version)
     container['args'].append('--root=/go/src')
     container['env'].extend([{'name': 'GOPATH', 'value': '/go'}])
     return prow_config
Exemplo n.º 27
0
 def __get_prow_config(self, test_suite, k8s_version):
     """Returns the Prow config for the job from the given fields."""
     prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
     prow_config['name'] = self.job_name
     prow_config['interval'] = self.job['interval']
     # Assumes that the value in --timeout is of minutes.
     timeout = int(next(
         x[15:-1] for x in test_suite['args'] if (
             x.startswith('--test-timeout='))))
     container = prow_config['spec']['containers'][0]
     if not container['args']:
         container['args'] = []
     # Prow timeout = job timeout + 20min
     container['args'].append('--timeout=%d' % (timeout + 20))
     container['args'].extend(k8s_version)
     container['args'].append('--root=/go/src')
     container['env'].extend([{'name':'GOPATH', 'value': '/go'}])
     return prow_config
Exemplo n.º 28
0
 def __get_prow_config(self, test_suite, k8s_version):
     """Returns the Prow config for the job from the given fields."""
     prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
     prow_config['name'] = self.job_name
     # use cluster from test_suite, or job, or not at all
     if 'cluster' in test_suite:
         prow_config['cluster'] = test_suite['cluster']
     elif 'cluster' in self.job:
         prow_config['cluster'] = self.job['cluster']
     # use resources from test_suite, or job, or default
     if 'resources' in test_suite:
         prow_config['resources'] = test_suite['resources']
     elif 'resources' in self.job:
         prow_config['resources'] = self.job['resources']
     # pull interval or cron from job
     if 'interval' in self.job:
         del prow_config['cron']
         prow_config['interval'] = self.job['interval']
     elif 'cron' in self.job:
         del prow_config['cron']
         prow_config['cron'] = self.job['cron']
     else:
         raise Exception("no interval or cron definition found")
     # Assumes that the value in --timeout is of minutes.
     timeout = int(
         next(x[10:-1] for x in test_suite['args']
              if (x.startswith('--timeout='))))
     container = prow_config['spec']['containers'][0]
     if not container['args']:
         container['args'] = []
     if not container['env']:
         container['env'] = []
     # Prow timeout = job timeout + 20min
     container['args'].append('--timeout=%d' % (timeout + 20))
     container['args'].extend(k8s_version.get('args', []))
     container['args'].append('--root=/go/src')
     container['env'].extend([{'name': 'GOPATH', 'value': '/go'}])
     # Specify the appropriate kubekins-e2e image. This allows us to use a
     # specific image (containing a particular Go version) to build and
     # trigger the node e2e test to avoid issues like
     # https://github.com/kubernetes/kubernetes/issues/43534.
     if k8s_version.get('prowImage', None):
         container['image'] = k8s_version['prowImage']
     return prow_config
Exemplo n.º 29
0
def _to_cwl_tool_object(tool_path=None, tool_object=None, cwl_tool_object=None, raw_process_reference=None, strict_cwl_validation=False, tool_directory=None, uuid=None):
    if uuid is None:
        uuid = str(uuid4())
    schema_loader = _schema_loader(strict_cwl_validation)
    if raw_process_reference is None and tool_path is not None:
        assert cwl_tool_object is None
        assert tool_object is None

        raw_process_reference = schema_loader.raw_process_reference(tool_path)
        cwl_tool = schema_loader.tool(
            raw_process_reference=raw_process_reference,
        )
    elif tool_object is not None:
        assert raw_process_reference is None
        assert cwl_tool_object is None

        # Allow loading tools from YAML...
        from ruamel import yaml as ryaml
        as_str = json.dumps(tool_object)
        tool_object = ryaml.round_trip_load(as_str)
        path = tool_directory
        if path is None:
            path = os.getcwd()
        uri = ref_resolver.file_uri(path) + "/"
        sourceline.add_lc_filename(tool_object, uri)
        raw_process_reference = schema_loader.raw_process_reference_for_object(
            tool_object,
            uri=uri
        )
        cwl_tool = schema_loader.tool(
            raw_process_reference=raw_process_reference,
        )
    else:
        cwl_tool = cwl_tool_object

    if isinstance(cwl_tool, int):
        raise Exception("Failed to load tool.")

    raw_tool = cwl_tool.tool
    # Apply Galaxy hacks to CWL tool representation to bridge semantic differences
    # between Galaxy and cwltool.
    _hack_cwl_requirements(cwl_tool)
    check_requirements(raw_tool)
    return _cwl_tool_object_to_proxy(cwl_tool, uuid, raw_process_reference=raw_process_reference, tool_path=tool_path)
Exemplo n.º 30
0
def main(testgrid):
    """/shrug."""

    with open(testgrid) as fp:
        config = yaml.round_trip_load(fp)

    for dashboard in config['dashboards']:
        if any(prefix in dashboard['name'] for prefix in DASHBOARD_PREFIX):
            for tab in dashboard['dashboard_tab']:
                name = tab['test_group_name']
                for key, val in MAP.iteritems():
                    name = name.replace(key, val)
                tab['name'] = name

    # write out yaml
    with open(testgrid, 'w') as fp:
        yaml.dump(
            config, fp, Dumper=yaml.RoundTripDumper, width=float("inf"))
        fp.write('\n')
Exemplo n.º 31
0
def _document_load_by_url(loader, url, loadingOptions):
    if url in loadingOptions.idx:
        return _document_load(loader, loadingOptions.idx[url], url,
                              loadingOptions)

    text = loadingOptions.fetcher.fetch_text(url)
    if isinstance(text, bytes):
        textIO = StringIO(text.decode('utf-8'))
    else:
        textIO = StringIO(text)
    textIO.name = url  # type: ignore
    result = yaml.round_trip_load(textIO, preserve_quotes=True)
    add_lc_filename(result, url)

    loadingOptions.idx[url] = result

    loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url)

    return _document_load(loader, result, url, loadingOptions)
Exemplo n.º 32
0
def load() -> schema.Config:
    config = schema.ConfigSchema()
    dump = False
    try:
        with open(path) as f:
            obj = yaml.round_trip_load(f)
    except IOError:
        obj = get_settings()
        dump = True

    conf = config.load(obj)

    if dump:
        data = config.dump(conf)
        print("Saving config to {}".format(path))
        with open(path, "w") as f:
            yaml.round_trip_dump(data, f)

    return conf
Exemplo n.º 33
0
def normalize_docker_compose(content):
    """
    If content is a CommentedMap, convert all key-value string (e.g. 'foo=bar' or '80:8080')
    to key-value dicts inside the services' `labels` and `environment` fields,
    also delete all duplicated volumes and env_file (and its preceding comments) for each services
    """
    data = round_trip_load(content, preserve_quotes=True)
    if isinstance(data, CommentedMap):
        keys = [key.lower() for key in data.keys()]
        if 'services' in keys:
            services = data['services']
            for k in services:
                if 'labels' in services[k] and isinstance(services[k]['labels'], CommentedSeq):
                    services[k]['labels'] = convert_commented_seq_to_dict(services[k]['labels'])
                if 'environment' in services[k] and isinstance(services[k]['environment'], CommentedSeq):
                    services[k]['environment'] = convert_commented_seq_to_dict(services[k]['environment'])
                delete_duplicated_items(services[k], 'volumes')
                delete_duplicated_items(services[k], 'env_file')
    return data
Exemplo n.º 34
0
 def load(self):
     try:
         with open(self.permission_file, encoding='utf8') as file:
             self.data = yaml.round_trip_load(file)
     except:
         self.server.logger.warning(
             self.server.t('permission_manager.load.fail',
                           self.permission_file))
         self.data = None
     if self.data is None:
         self.data = {
             'default_level': 'user',
             'admin': [],
             'helper': [],
             'user': [],
             'guest': []
         }
     self.unique()
     self.save()
Exemplo n.º 35
0
def normalize_docker_compose(content):
    """
    If content is a CommentedMap, convert all key-value string (e.g. 'foo=bar' or '80:8080')
    to key-value dicts inside the services' `labels` and `environment` fields,
    also delete all duplicated volumes and env_file (and its preceding comments) for each services
    """
    data = round_trip_load(content, preserve_quotes=True)
    if isinstance(data, CommentedMap):
        keys = [key.lower() for key in data.keys()]
        if 'services' in keys:
            services = data['services']
            for k in services:
                if 'labels' in services[k] and isinstance(services[k]['labels'], CommentedSeq):
                    services[k]['labels'] = convert_commented_seq_to_dict(services[k]['labels'])
                if 'environment' in services[k] and isinstance(services[k]['environment'], CommentedSeq):
                    services[k]['environment'] = convert_commented_seq_to_dict(services[k]['environment'])
                delete_duplicated_items(services[k], 'volumes')
                delete_duplicated_items(services[k], 'env_file')
    return data
Exemplo n.º 36
0
def load_job_order(
        args,  # type: argparse.Namespace
        stdin,  # type: IO[Any]
        fetcher_constructor,  # Fetcher
        overrides,  # type: List[Dict[Text, Any]]
        tool_file_uri  # type: Text
):
    # type: (...) -> Tuple[Dict[Text, Any], Text, Loader]

    job_order_object = None

    _jobloaderctx = jobloaderctx.copy()
    loader = Loader(_jobloaderctx,
                    fetcher_constructor=fetcher_constructor)  # type: ignore

    if len(args.job_order) == 1 and args.job_order[0][0] != "-":
        job_order_file = args.job_order[0]
    elif len(args.job_order) == 1 and args.job_order[0] == "-":
        job_order_object = yaml.round_trip_load(stdin)
        job_order_object, _ = loader.resolve_all(job_order_object,
                                                 file_uri(os.getcwd()) + "/")
    else:
        job_order_file = None

    if job_order_object:
        input_basedir = args.basedir if args.basedir else os.getcwd()
    elif job_order_file:
        input_basedir = args.basedir if args.basedir else os.path.abspath(
            os.path.dirname(job_order_file))
        job_order_object, _ = loader.resolve_ref(job_order_file,
                                                 checklinks=False)

    if job_order_object and "http://commonwl.org/cwltool#overrides" in job_order_object:
        overrides.extend(
            resolve_overrides(job_order_object, file_uri(job_order_file),
                              tool_file_uri))
        del job_order_object["http://commonwl.org/cwltool#overrides"]

    if not job_order_object:
        input_basedir = args.basedir if args.basedir else os.getcwd()

    return (job_order_object, input_basedir, loader)
Exemplo n.º 37
0
    def _get_infra_def(self, tmp):
        """Read infra def and modify the min max instances according to the Tosca policies.
        If the template doesn't have polcy section or it is invalid then set a default value """
        yaml.default_flow_style = False

        try:
            with open(self.infra_def_path_input, 'r') as f:
                infra_def = yaml.round_trip_load(f, preserve_quotes=True)
            infra_def["nodes"][0]["scaling"]["min"] = self.min_instances
            infra_def["nodes"][0]["scaling"]["max"] = self.max_instances
            infra_def["variables"]["master_host_ip"]
        except OSError as e:
            logger.error(e)

        if tmp:
            with open(self.infra_def_path_output_tmp, 'w') as ofile:
                yaml.round_trip_dump(infra_def, ofile)
        else:
            with open(self.infra_def_path_output, 'w') as ofile:
                yaml.round_trip_dump(infra_def, ofile)
def best_practice_comment_round_trip(yaml_path):
    """Round-trips the YAML document. If comments (#) are
    incorrectly escaped or not using literal style they will
    affect tools parsing the content. Also catches indentation changes,
    extra spaces in unneeded areas, other minor stylistic changes."""
    with open(yaml_path, mode="r", encoding="utf-8") as f:
        yaml_str1 = f.read()

    yaml_object = yaml.round_trip_load(yaml_str1, preserve_quotes=True)
    yaml_str2 = yaml.round_trip_dump(yaml_object, width=300)

    results = list(difflib.Differ().compare(
        yaml_str1.splitlines(keepends=True),
        yaml_str2.splitlines(keepends=True)))

    for item in results:
        if item.startswith(("+ ", "- ", "? ")):
            pprint.pprint(item)

    assert yaml_str1 == yaml_str2
Exemplo n.º 39
0
def load_yaml_file(path, round_tripping=False):
    with io.open(path, 'r', encoding='utf-8') as reader:
        pathdir = os.path.dirname(path)
        newfloder = pathdir + "copy"
        if not os.path.exists(newfloder):  #判断文件夹是否存在
            os.mkdir(newfloder)
        if round_tripping:  #判断是否有注释
            data = round_trip_load(reader)
            # with open(newfloder+"/"+os.path.basename(path),"w",encoding="utf-8") as w:    拼接路径 加号方式需要有"/" join是逗号
            with open(os.path.join(newfloder, os.path.basename(path)),
                      "w",
                      encoding="utf-8") as wr:
                round_trip_dump(data, wr, allow_unicode=True)

        else:
            data = safe_load(reader)
            with open(os.path.basename(path), "w", encoding="utf-8") as wr:
                # dump(data, w, allow_unicode=True)
                dump_all([data], wr, allow_unicode=True)
    return data
Exemplo n.º 40
0
    def __get__(self, instance, owner):
        if instance is None:
            return self

        value = undefined

        app = getattr(instance, '_app', None)

        if self.auto_global and app is not None:
            value = app.cli.globals.get(self.qualified_name, undefined)

            # Only take the value if it's not the default.
            if value is not undefined and value == self.default:
                value = undefined

        if self.environ_name and value is undefined:
            # pylint: disable=invalid-envvar-default
            value = os.getenv(self.environ_name, undefined)

        if value is undefined:
            if issubclass(owner, Config):
                value = instance.get(self.name, self.default)
            else:
                root, levels = instance.get_root(self.name)
                value, found = get_deep(root, *levels)
                if not found:  # pragma: no cover
                    value = self.default

        if value is not None and self.config_type is not None:
            try:
                if isinstance(value, str) and self.config_type == list:
                    value = yaml.round_trip_load(value)
                else:
                    # pylint: disable=not-callable
                    value = self.config_type(value)
            except TypeError as err:
                raise ConfigError(
                    f'Expected type {repr(self.config_type)} for {value}'
                ) from err

        return value
Exemplo n.º 41
0
def export_config(args, output_file=None):
    if not output_file:
        output_file = sys.stdout
    if os.path.exists(args.config):
        with open(args.config, 'rt') as f:
            config = yaml.round_trip_load(f.read())
        STATE['stages'] = config['stages']
        config['config'] = _decrypt_dict(config['config'])
    else:
        config = {
            'stages': {
                env['name']: {
                    'environment': env['name'],
                    'key': 'enter-key-name-here'
                } for env in STATE['awscreds'].environments
            },
            'config': {}}
    if args.json:
        output_file.write(json.dumps(config, indent=4))
    elif config:
        yaml.round_trip_dump(config, output_file)
Exemplo n.º 42
0
def generate_sample_configuration(source_filename, destination_filename, schema_filename):
    '''
    Given an optional source configuration filename, and a required destination configuration
    filename, and the path to a schema filename in pykwalify YAML schema format, write out a
    sample configuration file based on that schema. If a source filename is provided, merge the
    parsed contents of that configuration into the generated configuration.
    '''
    schema = yaml.round_trip_load(open(schema_filename))
    source_config = None

    if source_filename:
        source_config = load.load_configuration(source_filename)

    destination_config = merge_source_configuration_into_destination(
        _schema_to_sample_configuration(schema), source_config
    )

    write_configuration(
        destination_filename,
        _comment_out_optional_configuration(render_configuration(destination_config)),
    )
Exemplo n.º 43
0
def sync(name, metrics):
    """Write markdown docs"""
    metrics_file = Path().resolve().parent / name / "metrics.yaml"
    cur = {}

    if metrics_file.exists():
        cur = yaml.round_trip_load(metrics_file.read_text())

    for m in metrics:
        entry = cur.setdefault(m.title, asdict(m))

        # If the fetched value exists override it, otherwise leave the current one alone.
        if m.description:
            entry["description"] = m.description
        if m.brief:
            entry["brief"] = m.brief
        if m.metric_type:
            entry["metric_type"] = m.metric_type

    with metrics_file.open("wt") as f:
        yaml.round_trip_dump(cur, f)
Exemplo n.º 44
0
    def _load_data(self, allowed_missing_file) -> bool:
        """
		:param bool allowed_missing_file: If set to True, missing data file will result in a FileNotFoundError(),
		otherwise it will treat it as an empty config gile
		:return: if there is any missing data entry
		:raise: FileNotFoundError
		"""
        if self.file_presents():
            with open(self.__file_path, encoding='utf8') as file:
                users_data = yaml.round_trip_load(file)
        else:
            if not allowed_missing_file:
                raise FileNotFoundError()
            users_data = {}
        self.__has_changes = False
        fixed_result = self.__fix(dict(self.__default_data.get()), users_data)
        with self._data_operation_lock:
            self._data = fixed_result
        if self.__has_changes:
            self.save()
        return self.__has_changes
	def load(self):
		"""
		Load the permission file from disk
		"""
		try:
			with open(self.permission_file, encoding='utf8') as file:
				self.data = yaml.round_trip_load(file)
		except:
			self.server.logger.warning(self.server.t('permission_manager.load.fail', self.permission_file))
			self.data = {
				'default_level': 'user',
				'owner': None,
				'admin': None,
				'helper': None,
				'user': None,
				'guest': None
			}
		for name in PermissionLevel.NAME:
			if name not in self.data:
				self.data[name] = None
		self.save()
Exemplo n.º 46
0
def main(json_config_path, yaml_config_path, prow_config_path, output_dir):
    """Creates test job definitions.

    Converts the test configurations in yaml_config_path to the job definitions
    in json_config_path and the env files in output_dir.
    """
    # TODO(yguo0905): Validate the configurations from yaml_config_path.

    with open(json_config_path) as fp:
        json_config = json.load(fp)
    json_config = remove_generated_jobs(json_config)

    with open(prow_config_path) as fp:
        prow_config = yaml.round_trip_load(fp, preserve_quotes=True)
    remove_generated_prow_configs(prow_config)

    with open(yaml_config_path) as fp:
        yaml_config = yaml.safe_load(fp)

    for job_name, _ in yaml_config['jobs'].items():
        # Get the envs and args for each job defined under "jobs".
        envs, args, prow = for_each_job(
            job_name,
            yaml_config['common'],
            yaml_config['cloudProviders'],
            yaml_config['images'],
            yaml_config['k8sVersions'],
            yaml_config['testSuites'],
            yaml_config['jobs'])
        # Write the extracted envs into an env file for the job.
        env_filename = write_env_file(output_dir, job_name, envs)
        # Add the job to the definitions.
        sig_owners = yaml_config['jobs'][job_name].get('sigOwners')
        json_config[job_name] = get_job_def(env_filename, args, sig_owners)
        prow_config['periodics'].append(prow)

    # Write the job definitions to config.json.
    write_job_defs_file(output_dir, json_config)
    write_prow_configs_file('prow', prow_config)
Exemplo n.º 47
0
def main(json_config_path, yaml_config_path, prow_config_path, output_dir):
    """Creates test job definitions.

    Converts the test configurations in yaml_config_path to the job definitions
    in json_config_path and the env files in output_dir.
    """
    # TODO(yguo0905): Validate the configurations from yaml_config_path.

    with open(json_config_path) as fp:
        json_config = json.load(fp)
    json_config = remove_generated_jobs(json_config)

    with open(prow_config_path) as fp:
        prow_config = yaml.round_trip_load(fp, preserve_quotes=True)
    remove_generated_prow_configs(prow_config)

    with open(yaml_config_path) as fp:
        yaml_config = yaml.safe_load(fp)

    for job_name, _ in yaml_config['jobs'].items():
        # Get the envs and args for each job defined under "jobs".
        envs, args, prow = for_each_job(
            job_name,
            yaml_config['common'],
            yaml_config['cloudProviders'],
            yaml_config['images'],
            yaml_config['k8sVersions'],
            yaml_config['testSuites'],
            yaml_config['jobs'])
        # Write the extracted envs into an env file for the job.
        env_filename = write_env_file(output_dir, job_name, envs)
        # Add the job to the definitions.
        sig_owners = yaml_config['jobs'][job_name].get('sigOwners')
        json_config[job_name] = get_job_def(env_filename, args, sig_owners)
        prow_config['periodics'].append(prow)

    # Write the job definitions to config.json.
    write_job_defs_file(output_dir, json_config)
    write_prow_configs_file('prow', prow_config)
Exemplo n.º 48
0
 def fetch(self, url, inject_ids=True):  # type: (unicode, bool) -> Any
     if url in self.idx:
         return self.idx[url]
     try:
         text = self.fetch_text(url)
         if isinstance(text, bytes):
             textIO = StringIO(text.decode('utf-8'))
         else:
             textIO = StringIO(text)
         textIO.name = url    # type: ignore
         result = yaml.round_trip_load(textIO)  # type: ignore
         add_lc_filename(result, url)
     except yaml.parser.ParserError as e:
         raise validate.ValidationException("Syntax error %s" % (e))
     if isinstance(result, CommentedMap) and inject_ids and self.identifiers:
         for identifier in self.identifiers:
             if identifier not in result:
                 result[identifier] = url
             self.idx[self.expand_url(result[identifier], url)] = result
     else:
         self.idx[url] = result
     return result
Exemplo n.º 49
0
def main():  # pragma: no cover
    try:
        args = parse_arguments(*sys.argv[1:])
        schema = yaml.round_trip_load(open(validate.schema_filename()).read())
        source_config = legacy.parse_configuration(args.source_config_filename,
                                                   legacy.CONFIG_FORMAT)
        source_config_file_mode = os.stat(args.source_config_filename).st_mode
        source_excludes = (open(
            args.source_excludes_filename).read().splitlines()
                           if args.source_excludes_filename else [])

        destination_config = convert.convert_legacy_parsed_config(
            source_config, source_excludes, schema)

        generate.write_configuration(args.destination_config_filename,
                                     destination_config,
                                     mode=source_config_file_mode)

        display_result(args)
    except (ValueError, OSError) as error:
        print(error, file=sys.stderr)
        sys.exit(1)
Exemplo n.º 50
0
    def cwl_dispatch(self, json):
        try:
            cwlwf, it_is_workflow = load_cwl(
                self.dag.default_args["cwl_workflow"], self.dag.default_args)
            cwl_context = {
                "outdir":
                mkdtemp(dir=get_folder(os.path.abspath(self.tmp_folder)),
                        prefix="dag_tmp_")
            }

            _jobloaderctx = jobloaderctx.copy()
            _jobloaderctx.update(cwlwf.metadata.get("$namespaces", {}))
            loader = Loader(_jobloaderctx)

            try:
                job_order_object = yaml.round_trip_load(
                    io.StringIO(initial_value=dumps(json)))
                job_order_object, _ = loader.resolve_all(
                    job_order_object,
                    file_uri(os.getcwd()) + "/",
                    checklinks=False)
            except Exception as e:
                _logger.error("Job Loader: {}".format(str(e)))

            job_order_object = init_job_order(job_order_object, None, cwlwf,
                                              loader, sys.stdout)

            cwl_context['promises'] = job_order_object

            logging.info('{0}: Final job: \n {1}'.format(
                self.task_id, dumps(cwl_context, indent=4)))

            return cwl_context

        except Exception as e:
            _logger.info('Dispatch Exception {0}: \n {1} {2}'.format(
                self.task_id, type(e), e))
            pass
        return None
Exemplo n.º 51
0
    def set_env(self, env, config=None):
        try:
            config = yaml.round_trip_load(open(self.config_path))
        except IOError:
            raise AnsibleContainerNotInitializedException()
        except yaml.YAMLError as exc:
            raise AnsibleContainerConfigException(
                u"Parsing container.yml - %s" % exc)

        new_services = yaml.compat.ordereddict()
        for service_name, service_config in iteritems(
                config.get('services') or {}):
            if service_config.get('containers'):
                # If containers is defined, convert it to services, and drop any other keys
                for container in service_config['containers']:
                    if not container.get('container_name'):
                        raise AnsibleContainerConfigException(
                            u"Expecting container to have container_name defined. None found."
                        )
                    new_service_name = "{}-{}".format(
                        service_name, container['container_name'])
                    new_services[new_service_name] = copy.deepcopy(container)
            else:
                new_services[service_name] = copy.deepcopy(service_config)

        config['services'] = new_services
        super(AnsibleContainerConfig, self).set_env(env, config=config)

        if self._config.get('volumes'):
            for vol_key in self._config['volumes']:
                if 'docker' in self._config['volumes'][vol_key]:
                    settings = copy.deepcopy(
                        self._config['volumes'][vol_key][self.engine_name])
                    self._config['volumes'][vol_key] = settings
                else:
                    # remove non-engine settings
                    for engine_name in self.remove_engines:
                        if engine_name in self._config['volumes'][vol_key]:
                            del self._config['volumes'][vol_key][engine_name]
Exemplo n.º 52
0
def delete_command():
    """
    Sub-command, see main()
    """
    parser = argparse.ArgumentParser(
        description='Delete one item from the input yaml file')
    parser.add_argument('path_to_key', type=str, nargs='+',
                        help='<Required> Yaml item to be deleted, e.g. "foo 0 bar"')
    parser.add_argument('-i', '--input', type=str,
                        help='<Required> Path to the input yaml files', required=True)
    parser.add_argument('-o', '--output', type=str,
                        help='Path to the output file, or stdout by default')

    args = parser.parse_args(sys.argv[2:])
    input_file = open(args.input, 'r')
    data = round_trip_load(input_file.read(), preserve_quotes=True)
    input_file.close()

    output_data, _ = delete_yaml_item(data, args.path_to_key, True)

    output_file = open(args.output, 'w') if args.output else sys.stdout
    round_trip_dump(output_data, output_file)
    output_file.close()
Exemplo n.º 53
0
 def _process_section(self, section_value, callback=None, templar=None):
     if not templar:
         templar = self._templar
     processed = yaml.compat.ordereddict()
     for key, value in section_value.items():
         if isinstance(value, basestring):
             # strings can be templated
             processed[key] = templar.template(value)
             if isinstance(processed[key], AnsibleUnsafeText):
                 processed[key] = str(processed[key])
         elif isinstance(value, (list, dict)):
             # if it's a dimensional structure, it's cheaper just to serialize
             # it, treat it like a template, and then deserialize it again
             buffer = BytesIO() # use bytes explicitly, not unicode
             yaml.round_trip_dump(value, buffer)
             processed[key] = yaml.round_trip_load(
                 templar.template(buffer.getvalue())
             )
         else:
             # ints, booleans, etc.
             processed[key] = value
         if callback:
             callback(processed)
     return processed
Exemplo n.º 54
0
def to_cwl_tool_object(tool_path=None, tool_object=None, persisted_tool=None, strict_cwl_validation=True):
    schema_loader = _schema_loader(strict_cwl_validation)
    if tool_path is not None:
        cwl_tool = schema_loader.tool(
            path=tool_path
        )
    elif tool_object is not None:
        # Allow loading tools from YAML...
        from ruamel import yaml as ryaml
        import json
        as_str = json.dumps(tool_object)
        tool_object = ryaml.round_trip_load(as_str)
        from schema_salad import sourceline
        from schema_salad.ref_resolver import file_uri
        uri = file_uri(os.getcwd()) + "/"
        sourceline.add_lc_filename(tool_object, uri)
        tool_object, _ = schema_loader.raw_document_loader.resolve_all(tool_object, uri)
        raw_process_reference = schema_loader.raw_process_reference_for_object(
            tool_object,
            uri=uri
        )
        cwl_tool = schema_loader.tool(
            raw_process_reference=raw_process_reference,
        )
    else:
        cwl_tool = ToolProxy.from_persistent_representation(persisted_tool)

    if isinstance(cwl_tool, int):
        raise Exception("Failed to load tool.")

    raw_tool = cwl_tool.tool
    # Apply Galaxy hacks to CWL tool representation to bridge semantic differences
    # between Galaxy and cwltool.
    _hack_cwl_requirements(cwl_tool)
    check_requirements(raw_tool)
    return cwl_tool_object_to_proxy(cwl_tool, tool_path=tool_path)
Exemplo n.º 55
0
 def load(self):
     with open(self.path, "r") as fh:
         self.data = yaml.round_trip_load(fh, version=(1, 2))
Exemplo n.º 56
0
try:
    from ansible.vars import Templar
except ImportError:
    from ansible.template import Templar

from ruamel import yaml
import docker

if 'TO_AC' not in os.environ or 'DISTRO_DATA' not in os.environ:
    raise ImportError('TO_AC and DISTRO_DATA must be in the environment. You '
                      'probably want to run this via "python setup.py test"')

distro_vars = json.loads(os.environ['DISTRO_DATA'])

role_defaults = yaml.round_trip_load(
    open(os.path.join(os.environ['TO_AC'], 'roles', distro_vars['name'],
                      'defaults', 'main.yml'))
)

role_meta = yaml.round_trip_load(
    open(os.path.join(os.environ['TO_AC'], 'roles', distro_vars['name'],
                      'meta', 'container.yml'))
)

role_tasks = yaml.round_trip_load(
    open(os.path.join(os.environ['TO_AC'], 'roles', distro_vars['name'],
                      'tasks', 'main.yml'))
)

docker_client = docker.from_env(version='auto')
built_image_name = u'test-%s-%s:latest' % (distro_vars['name'], distro_vars['name'])
built_image_info = docker_client.images.get(built_image_name).attrs
Exemplo n.º 57
0
if not os.path.exists(directory):
    print("Error: {} does not exist.".format(directory))
    exit(1)

if not os.path.isdir(directory):
    print("Error: {} is not a file.".format(directory))
    exit(1)

# Read in the existing YAML data

data = {}

try:
    with open(filename, "r") as fh:
        data = yaml.round_trip_load(fh)
except Exception as e:
    print("Failed to load YAML-format file: {}".format(e))
    exit(1)

dirs_set = set()
files_set = set()

print("Ignoring extensions: {}".format(", ".join(sorted(ignore))))
print("Walking directory tree: {}".format(directory))

for root, dirs, files in os.walk(directory):
    # Walk the tree, sanitising and storing the dirs and files within

    root = root.replace("\\", "/")
    if root.endswith("/"):
Exemplo n.º 58
0
def load_job_order(args, t, stdin, print_input_deps=False, relative_deps=False,
                   stdout=sys.stdout, make_fs_access=None, fetcher_constructor=None):
    # type: (argparse.Namespace, Process, IO[Any], bool, bool, IO[Any], Callable[[Text], StdFsAccess], Callable[[Dict[unicode, unicode], requests.sessions.Session], Fetcher]) -> Union[int, Tuple[Dict[Text, Any], Text]]

    job_order_object = None

    _jobloaderctx = jobloaderctx.copy()
    _jobloaderctx.update(t.metadata.get("$namespaces", {}))
    loader = Loader(_jobloaderctx, fetcher_constructor=fetcher_constructor)

    if len(args.job_order) == 1 and args.job_order[0][0] != "-":
        job_order_file = args.job_order[0]
    elif len(args.job_order) == 1 and args.job_order[0] == "-":
        job_order_object = yaml.round_trip_load(stdin)  # type: ignore
        job_order_object, _ = loader.resolve_all(job_order_object, file_uri(os.getcwd()) + "/")
    else:
        job_order_file = None

    if job_order_object:
        input_basedir = args.basedir if args.basedir else os.getcwd()
    elif job_order_file:
        input_basedir = args.basedir if args.basedir else os.path.abspath(os.path.dirname(job_order_file))
        try:
            job_order_object, _ = loader.resolve_ref(job_order_file, checklinks=False)
        except Exception as e:
            _logger.error(Text(e), exc_info=args.debug)
            return 1
        toolparser = None
    else:
        input_basedir = args.basedir if args.basedir else os.getcwd()
        namemap = {}  # type: Dict[Text, Text]
        records = []  # type: List[Text]
        toolparser = generate_parser(
            argparse.ArgumentParser(prog=args.workflow), t, namemap, records)
        if toolparser:
            if args.tool_help:
                toolparser.print_help()
                return 0
            cmd_line = vars(toolparser.parse_args(args.job_order))
            for record_name in records:
                record = {}
                record_items = {
                    k: v for k, v in cmd_line.iteritems()
                    if k.startswith(record_name)}
                for key, value in record_items.iteritems():
                    record[key[len(record_name) + 1:]] = value
                    del cmd_line[key]
                cmd_line[str(record_name)] = record

            if cmd_line["job_order"]:
                try:
                    input_basedir = args.basedir if args.basedir else os.path.abspath(
                        os.path.dirname(cmd_line["job_order"]))
                    job_order_object = loader.resolve_ref(cmd_line["job_order"])
                except Exception as e:
                    _logger.error(Text(e), exc_info=args.debug)
                    return 1
            else:
                job_order_object = {"id": args.workflow}

            del cmd_line["job_order"]

            job_order_object.update({namemap[k]: v for k, v in cmd_line.items()})

            if _logger.isEnabledFor(logging.DEBUG):
                _logger.debug(u"Parsed job order from command line: %s", json.dumps(job_order_object, indent=4))
        else:
            job_order_object = None

    for inp in t.tool["inputs"]:
        if "default" in inp and (not job_order_object or shortname(inp["id"]) not in job_order_object):
            if not job_order_object:
                job_order_object = {}
            job_order_object[shortname(inp["id"])] = inp["default"]

    if not job_order_object and len(t.tool["inputs"]) > 0:
        if toolparser:
            print(u"\nOptions for {} ".format(args.workflow))
            toolparser.print_help()
        _logger.error("")
        _logger.error("Input object required, use --help for details")
        return 1

    if print_input_deps:
        printdeps(job_order_object, loader, stdout, relative_deps, "",
                  basedir=file_uri(input_basedir + "/"))
        return 0

    def pathToLoc(p):
        if "location" not in p and "path" in p:
            p["location"] = p["path"]
            del p["path"]

    visit_class(job_order_object, ("File", "Directory"), pathToLoc)
    adjustDirObjs(job_order_object, trim_listing)
    normalizeFilesDirs(job_order_object)

    if "cwl:tool" in job_order_object:
        del job_order_object["cwl:tool"]
    if "id" in job_order_object:
        del job_order_object["id"]

    return (job_order_object, input_basedir)
Exemplo n.º 59
0
    def wrapped(self, events, keep_client1, keep_client2, keepdocker, *args, **kwargs):
        class Stubs:
            pass
        stubs = Stubs()
        stubs.events = events
        stubs.keepdocker = keepdocker


        def putstub(p, **kwargs):
            return "%s+%i" % (hashlib.md5(p).hexdigest(), len(p))
        keep_client1().put.side_effect = putstub
        keep_client1.put.side_effect = putstub
        keep_client2().put.side_effect = putstub
        keep_client2.put.side_effect = putstub

        stubs.keep_client = keep_client2
        stubs.keepdocker.return_value = [("zzzzz-4zz18-zzzzzzzzzzzzzz3", "")]
        stubs.fake_user_uuid = "zzzzz-tpzed-zzzzzzzzzzzzzzz"

        stubs.api = mock.MagicMock()
        stubs.api._rootDesc = get_rootDesc()

        stubs.api.users().current().execute.return_value = {
            "uuid": stubs.fake_user_uuid,
        }
        stubs.api.collections().list().execute.return_value = {"items": []}
        stubs.api.collections().create().execute.side_effect = ({
            "uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz1",
            "portable_data_hash": "99999999999999999999999999999991+99",
            "manifest_text": ""
        }, {
            "uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2",
            "portable_data_hash": "99999999999999999999999999999992+99",
            "manifest_text": "./tool 00000000000000000000000000000000+0 0:0:submit_tool.cwl 0:0:blub.txt"
        },
        {
            "uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz4",
            "portable_data_hash": "99999999999999999999999999999994+99",
            "manifest_text": ""
        },
        {
            "uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz5",
            "portable_data_hash": "99999999999999999999999999999995+99",
            "manifest_text": ""
        },
        {
            "uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz6",
            "portable_data_hash": "99999999999999999999999999999996+99",
            "manifest_text": ""
        }
        )
        stubs.api.collections().get().execute.return_value = {
            "portable_data_hash": "99999999999999999999999999999993+99", "manifest_text": "./tool 00000000000000000000000000000000+0 0:0:submit_tool.cwl 0:0:blub.txt"}

        stubs.expect_job_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
        stubs.api.jobs().create().execute.return_value = {
            "uuid": stubs.expect_job_uuid,
            "state": "Queued",
        }

        stubs.expect_container_request_uuid = "zzzzz-xvhdp-zzzzzzzzzzzzzzz"
        stubs.api.container_requests().create().execute.return_value = {
            "uuid": stubs.expect_container_request_uuid,
            "container_uuid": "zzzzz-dz642-zzzzzzzzzzzzzzz",
            "state": "Queued"
        }

        stubs.expect_pipeline_template_uuid = "zzzzz-d1hrv-zzzzzzzzzzzzzzz"
        stubs.api.pipeline_templates().create().execute.return_value = {
            "uuid": stubs.expect_pipeline_template_uuid,
        }
        stubs.expect_job_spec = {
            'runtime_constraints': {
                'docker_image': 'arvados/jobs:'+arvados_cwl.__version__,
                'min_ram_mb_per_node': 1024
            },
            'script_parameters': {
                'x': {
                    'basename': 'blorp.txt',
                    'location': 'keep:99999999999999999999999999999992+99/blorp.txt',
                    'class': 'File'
                },
                'y': {
                    'basename': '99999999999999999999999999999998+99',
                    'location': 'keep:99999999999999999999999999999998+99',
                    'class': 'Directory'
                },
                'z': {
                    'basename': 'anonymous',
                    "listing": [{
                        "basename": "renamed.txt",
                        "class": "File",
                        "location": "keep:99999999999999999999999999999998+99/file1.txt"
                    }],
                    'class': 'Directory'
                },
                'cwl:tool':
                '99999999999999999999999999999994+99/workflow.cwl#main'
            },
            'repository': 'arvados',
            'script_version': 'master',
            'minimum_script_version': '570509ab4d2ef93d870fd2b1f2eab178afb1bad9',
            'script': 'cwl-runner'
        }
        stubs.pipeline_component = stubs.expect_job_spec.copy()
        stubs.expect_pipeline_instance = {
            'name': 'submit_wf.cwl',
            'state': 'RunningOnServer',
            'owner_uuid': None,
            "components": {
                "cwl-runner": {
                    'runtime_constraints': {'docker_image': 'arvados/jobs:'+arvados_cwl.__version__, 'min_ram_mb_per_node': 1024},
                    'script_parameters': {
                        'y': {"value": {'basename': '99999999999999999999999999999998+99', 'location': 'keep:99999999999999999999999999999998+99', 'class': 'Directory'}},
                        'x': {"value": {'basename': 'blorp.txt', 'class': 'File', 'location': 'keep:99999999999999999999999999999992+99/blorp.txt'}},
                        'z': {"value": {'basename': 'anonymous', 'class': 'Directory',
                              'listing': [
                                  {'basename': 'renamed.txt', 'class': 'File', 'location': 'keep:99999999999999999999999999999998+99/file1.txt'}
                              ]}},
                        'cwl:tool': '99999999999999999999999999999994+99/workflow.cwl#main',
                        'arv:enable_reuse': True,
                        'arv:on_error': 'continue'
                    },
                    'repository': 'arvados',
                    'script_version': 'master',
                    'minimum_script_version': '570509ab4d2ef93d870fd2b1f2eab178afb1bad9',
                    'script': 'cwl-runner',
                    'job': {'state': 'Queued', 'uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'}
                }
            }
        }
        stubs.pipeline_create = copy.deepcopy(stubs.expect_pipeline_instance)
        stubs.expect_pipeline_uuid = "zzzzz-d1hrv-zzzzzzzzzzzzzzz"
        stubs.pipeline_create["uuid"] = stubs.expect_pipeline_uuid
        stubs.pipeline_with_job = copy.deepcopy(stubs.pipeline_create)
        stubs.pipeline_with_job["components"]["cwl-runner"]["job"] = {
            "uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz",
            "state": "Queued"
        }
        stubs.api.pipeline_instances().create().execute.return_value = stubs.pipeline_create
        stubs.api.pipeline_instances().get().execute.return_value = stubs.pipeline_with_job

        with open("tests/wf/submit_wf_packed.cwl") as f:
            expect_packed_workflow = yaml.round_trip_load(f)

        stubs.expect_container_spec = {
            'priority': 1,
            'mounts': {
                '/var/spool/cwl': {
                    'writable': True,
                    'kind': 'collection'
                },
                '/var/lib/cwl/workflow.json': {
                    'content': expect_packed_workflow,
                    'kind': 'json'
                },
                'stdout': {
                    'path': '/var/spool/cwl/cwl.output.json',
                    'kind': 'file'
                },
                '/var/lib/cwl/cwl.input.json': {
                    'kind': 'json',
                    'content': {
                        'y': {'basename': '99999999999999999999999999999998+99', 'location': 'keep:99999999999999999999999999999998+99', 'class': 'Directory'},
                        'x': {'basename': u'blorp.txt', 'class': 'File', 'location': u'keep:99999999999999999999999999999992+99/blorp.txt'},
                        'z': {'basename': 'anonymous', 'class': 'Directory', 'listing': [
                            {'basename': 'renamed.txt', 'class': 'File', 'location': 'keep:99999999999999999999999999999998+99/file1.txt'}
                        ]}
                    },
                    'kind': 'json'
                }
            },
            'state': 'Committed',
            'owner_uuid': None,
            'command': ['arvados-cwl-runner', '--local', '--api=containers', '--no-log-timestamps',
                        '--enable-reuse', '--on-error=continue',
                        '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
            'name': 'submit_wf.cwl',
            'container_image': 'arvados/jobs:'+arvados_cwl.__version__,
            'output_path': '/var/spool/cwl',
            'cwd': '/var/spool/cwl',
            'runtime_constraints': {
                'API': True,
                'vcpus': 1,
                'ram': 1024*1024*1024
            },
            "properties": {}
        }

        stubs.expect_workflow_uuid = "zzzzz-7fd4e-zzzzzzzzzzzzzzz"
        stubs.api.workflows().create().execute.return_value = {
            "uuid": stubs.expect_workflow_uuid,
        }
        def update_mock(**kwargs):
            stubs.updated_uuid = kwargs.get('uuid')
            return mock.DEFAULT
        stubs.api.workflows().update.side_effect = update_mock
        stubs.api.workflows().update().execute.side_effect = lambda **kwargs: {
            "uuid": stubs.updated_uuid,
        }

        return func(self, stubs, *args, **kwargs)