Ejemplo n.º 1
0
def run_local_topology(name=None, env_name=None, time=0, options=None):
    """Run a topology locally using Flux and `storm jar`."""
    name, topology_file = get_topology_definition(name)
    env_name, env_config = get_env_config(env_name)
    topology_class = get_topology_from_file(topology_file)

    storm_options = resolve_options(options, env_config, topology_class, name)
    if storm_options['topology.acker.executors'] != 0:
        storm_options['topology.acker.executors'] = 1
    storm_options['topology.workers'] = 1

    # Check Storm version is the same
    local_version = local_storm_version()
    project_version = storm_lib_version()
    if local_version != project_version:
        raise ValueError('Local Storm version, {}, is not the same as the '
                         'version in your project.clj, {}. The versions must '
                         'match.'.format(local_version, project_version))

    # Prepare a JAR that has Storm dependencies packaged
    topology_jar = jar_for_deploy(simple_jar=False)

    if time <= 0:
        time = 9223372036854775807  # Max long value in Java

    # Write YAML file
    with NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) as yaml_file:
        topology_flux_dict = topology_class.to_flux_dict(name)
        topology_flux_dict['config'] = storm_options
        yaml.safe_dump(topology_flux_dict, yaml_file)
        cmd = ('storm jar {jar} org.apache.storm.flux.Flux --local --no-splash '
               '--sleep {time} {yaml}'.format(jar=topology_jar,
                                              time=time,
                                              yaml=yaml_file.name))
        local(cmd)
Ejemplo n.º 2
0
def do_run(problem,
           hparam_set: str = None,
           extra_hparams: dict = None,
           progress: bool = False,
           cuda: bool = False,
           device: str = None,
           log_dir: str = None,
           resume: bool = False,
           start_epoch: int = None,
           checkpoint_prefix: str = 'checkpoint',
           **kwargs):
    problem_cls = registry.get_problem(problem)
    if not hparam_set:
        hparam_set_list = registry.get_problem_hparam(problem)
        assert hparam_set_list
        hparam_set = hparam_set_list[0]

    hparams = registry.get_hparam(hparam_set)()
    if extra_hparams:
        hparams.update(extra_hparams)

    cuda = cuda and torch.cuda.is_available()
    if not cuda:
        device = 'cpu'

    if log_dir:
        os.makedirs(log_dir, exist_ok=True)
        if os.listdir(log_dir):
            warnings.warn('Directory "{}" not empty!'.format(log_dir))

        hparams_file_path = os.path.join(log_dir, 'hparams.yaml')
        args_file_path = os.path.join(log_dir, 'args.yaml')

        args = {
            'problem': problem,
            'seed': kwargs.get('seed', None),
            'checkpoint_prefix': checkpoint_prefix,
        }

        with open(hparams_file_path, 'w') as hparams_file, \
             open(args_file_path, 'w') as args_file:
            yaml.safe_dump(hparams.__dict__,
                           stream=hparams_file,
                           default_flow_style=False)
            yaml.safe_dump(args, stream=args_file, default_flow_style=False)

    problem = problem_cls(hparams,
                          argparse.Namespace(**kwargs),
                          log_dir,
                          device=device,
                          show_progress=progress,
                          checkpoint_prefix=checkpoint_prefix)

    if resume:
        problem.load_checkpoint(log_dir, epoch=start_epoch)

    problem.run()
Ejemplo n.º 3
0
def run_local_topology(
    name=None, env_name=None, time=0, options=None, config_file=None
):
    """Run a topology locally using Flux and `storm jar`."""
    name, topology_file = get_topology_definition(name, config_file=config_file)
    config = get_config(config_file=config_file)
    env_name, env_config = get_env_config(env_name, config_file=config_file)
    topology_class = get_topology_from_file(topology_file)

    set_topology_serializer(env_config, config, topology_class)

    storm_options = resolve_options(
        options, env_config, topology_class, name, local_only=True
    )
    if storm_options["topology.acker.executors"] != 0:
        storm_options["topology.acker.executors"] = 1
    storm_options["topology.workers"] = 1

    # Set parallelism based on env_name if necessary
    for spec in topology_class.specs:
        if isinstance(spec.par, dict):
            spec.par = spec.par.get(env_name)

    # Check Storm version is the same
    local_version = local_storm_version()
    project_version = storm_lib_version()
    if local_version != project_version:
        raise ValueError(
            "Local Storm version, {}, is not the same as the "
            "version in your project.clj, {}. The versions must "
            "match.".format(local_version, project_version)
        )

    # Prepare a JAR that has Storm dependencies packaged
    topology_jar = jar_for_deploy(simple_jar=False)

    if time <= 0:
        time = 9223372036854775807  # Max long value in Java

    # Write YAML file
    with show("output"):
        with NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as yaml_file:
            topology_flux_dict = topology_class.to_flux_dict(name)
            topology_flux_dict["config"] = storm_options
            if yaml.version_info < (0, 15):
                yaml.safe_dump(topology_flux_dict, yaml_file, default_flow_style=False)
            else:
                yml = yaml.YAML(typ="safe", pure=True)
                yml.default_flow_style = False
                yml.dump(topology_flux_dict, yaml_file)
            cmd = (
                "storm jar {jar} org.apache.storm.flux.Flux --local --no-splash "
                "--sleep {time} {yaml}".format(
                    jar=topology_jar, time=time, yaml=yaml_file.name
                )
            )
            local(cmd)
 def __init__(self, parameters, target_file_path):
     export = {"parameters": {"hexaa_service_entityids": parameters}}
     with open(target_file_path, 'w') as yaml_file:
         ryaml.safe_dump(export,
                         yaml_file,
                         default_flow_style=False,
                         allow_unicode=True,
                         indent=4,
                         width=float('inf'))
Ejemplo n.º 5
0
 def save(self, filename):
     filename = pathlib.Path(filename)
     if filename.suffix == '.json':
         filename.write_text(json.dumps(dict(self)))
     elif filename.suffix in ('.yml', '.yaml'):
         import ruamel.yaml as yaml
         with filename.open('w') as f:
             yaml.safe_dump(dict(self), f)
     else:
         raise NotImplementedError(filename.suffix)
Ejemplo n.º 6
0
def dump_yaml(yaml_name, data: dict):
    """
    将字典格式数据写入到yaml文件
    :param yaml_name:将被创建的yaml文件名
    :param data:字典格式数据
    :return:没有返回值
    """
    # 创建env.yaml文件,给可写权限,将env中数据写入env.yaml
    with open(yaml_name, "w") as f:
        yaml.safe_dump(data=data, stream=f)
Ejemplo n.º 7
0
    def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict",
                **kwargs: Any) -> "MigrationUidTypedDict":
        with indir(recipe_dir + "/.."):
            self.set_build_number("recipe/meta.yaml")
            with open("conda-forge.yml", "r") as f:
                y = safe_load(f)
            y.update(self.additional_keys)
            with open("conda-forge.yml", "w") as f:
                safe_dump(y, f)

        return super().migrate(recipe_dir, attrs, **kwargs)
Ejemplo n.º 8
0
def set_definitions_version(new_version, version_file='./VERSION'):
    '''Update the version information stored in 'version_file'.

    The new version must be a whole number. If 'version_file' doesn't exist,
    it will be created.

    '''
    version_info = {'version': new_version}
    with open(version_file, 'w') as f:
        # If 'default_flow_style' is True (the default) then the output here
        # will look like "{version: 0}" instead of "version: 0".
        yaml.safe_dump(version_info, f, default_flow_style=False)
Ejemplo n.º 9
0
def to_speaker():
    data = yaml.safe_load(read_text('ttslib.data', 'supported.yaml'))

    d0 = dict()
    for platform, d in data.items():
        for lang, speakers in d.items():
            if speakers:
                d0.setdefault(lang,
                              dict.fromkeys(data.keys()))[platform] = speakers

    with open('../ttslib/data/to_speaker.yaml', 'w') as f:
        yaml.safe_dump(d0, f)
Ejemplo n.º 10
0
def set_definitions_version(new_version, version_file='./VERSION'):
    '''Update the version information stored in 'version_file'.

    The new version must be a whole number. If 'version_file' doesn't exist,
    it will be created.

    '''
    version_info = {'version': new_version}
    with open(version_file, 'w') as f:
        # If 'default_flow_style' is True (the default) then the output here
        # will look like "{version: 0}" instead of "version: 0".
        yaml.safe_dump(version_info, f, default_flow_style=False)
Ejemplo n.º 11
0
 def _set_data(self, code: str) -> NoReturn:
     if len(code.split()) > 0:
         cwd = Path(self._cwl_executor.file_manager.get_files_uri().path)
         data = self._preprocess_data(yaml.load(code, Loader=yaml.Loader))
         self._cwl_executor.validate_input_files(data, cwd)
         code_stream = StringIO()
         yaml.safe_dump(data, code_stream)
         self._yaml_input_data = code_stream.getvalue()
         self.send_response(self.iopub_socket, 'stream', {
             'name': 'stdout',
             'text': 'Add data in memory'
         })
Ejemplo n.º 12
0
    def run_project_config(self, *, project_config=None, **kwargs):

        # First load the project.conf and substitute {project_dir}
        #
        # Save the original project.conf, because we will run more than
        # once in the same temp directory
        #
        project_directory = kwargs["project"]
        project_filename = os.path.join(project_directory, "project.conf")
        project_backup = os.path.join(project_directory, "project.conf.backup")
        project_load_filename = project_filename

        if not os.path.exists(project_backup):
            shutil.copy(project_filename, project_backup)
        else:
            project_load_filename = project_backup

        with open(project_load_filename) as f:
            config = f.read()
        config = config.format(project_dir=project_directory)

        if project_config is not None:

            # If a custom project configuration dictionary was
            # specified, composite it on top of the already
            # substituted base project configuration
            #
            base_config = _yaml.load_data(config)

            # In order to leverage _yaml.composite_dict(), both
            # dictionaries need to be loaded via _yaml.load_data() first
            #
            with tempfile.TemporaryDirectory(
                    dir=project_directory) as scratchdir:

                temp_project = os.path.join(scratchdir, "project.conf")
                with open(temp_project, "w") as f:
                    yaml.safe_dump(project_config, f)

                project_config = _yaml.load(temp_project,
                                            shortname="project.conf")

            project_config._composite(base_config)

            _yaml.roundtrip_dump(base_config, project_filename)

        else:

            # Otherwise, just dump it as is
            with open(project_filename, "w") as f:
                f.write(config)

        return super().run(**kwargs)
Ejemplo n.º 13
0
    def save_server(self, server_id, data=None):
        if not data:
            data = self.data[server_id]

        try:
            if not os.path.exists("data/{}".format(server_id)):
                self.add_server(server_id)

            with open("data/{}/config.yml".format(server_id), "w") as config_fh:
                yaml.safe_dump(data["config"], config_fh)
        except Exception:
            log.exception("Error saving server '{}'".format(server_id))
Ejemplo n.º 14
0
def save_context(context, **kwargs):
    project_dir = yield
    with open(os.path.join(project_dir, '.cookiecutterrc'),
              'w',
              encoding='utf8') as fh:
        fh.write(
            "# Generated by cookiepatcher, a small shim around cookiecutter (pip install cookiepatcher)\n\n"
        )
        yaml.safe_dump(context,
                       fh,
                       indent=4,
                       default_flow_style=False,
                       allow_unicode=True)
Ejemplo n.º 15
0
    def save_trials(self, trials_dir, num=1):
        trials_dir = os.path.abspath(trials_dir)
        os.makedirs(trials_dir, exist_ok=True)
        for trial in self.trials(num=num):
            name = '{}-{}-{}'.format(self.exp_class.__name__, trial['name'],
                                     time.time())
            t_dir = os.path.join(trials_dir, name)

            os.makedirs(t_dir, exist_ok=True)
            with open(os.path.join(t_dir, 'trial.yaml'), 'w') as f:
                trial['name'] = name
                trial['log_dir'] = t_dir
                yaml.safe_dump(trial, stream=f, default_flow_style=False)
Ejemplo n.º 16
0
def renameElements(filename, suffix, debug):
    print("Renaming elemnts in filename: \"{}\"".format(filename))
    elementNamesMap = {}
    with root_open(filename, "UPDATE") as f:
        if debug:
            print("f.ls()  pre:")
            f.ls()
        # Cannot just iterate over GetListOfKeys because the hash list is updated when an element is added to a file...
        # Instead, we copy the keys so the iterable is not updated (this should really be treated as a ROOT bug...)
        keys = f.GetListOfKeys()
        simpleListOfKeys = []
        for key in keys:
            simpleListOfKeys.append(key.GetName())

        # Loop over the available keys. If it is the correction task, then unpack the component lists
        for key in simpleListOfKeys:
            if key.endswith("_" + suffix):
                print(
                    "Skipping the processing of element {} since it has already been processed!"
                    .format(key))
                # Same the name in the proper formatt to be used in the YAML map
                elementNamesMap[key.replace("_" + suffix, "")] = [key]
                # Don't apply the suffix twice
                continue

            element = f.Get(key)
            print("Processing element: {}".format(element.GetName()))
            # Remove the existing element from the file
            f.Delete(element.GetName() + ";*")
            # Rewrite the name
            if "AliEmcalCorrectionTask" in element.GetName():
                for component in element:
                    elementNamesMap = rewriteWithDifferentName(
                        component, suffix, elementNamesMap)
            else:
                elementNamesMap = rewriteWithDifferentName(
                    element, suffix, elementNamesMap)

        if debug:
            print("keys: {}".format(keys.GetEntries()))
            print("f.ls() post:")
            f.ls()

    # Save the map for user with the comparison script
    # Write the reference map to the same directory as the reference file
    yamlReferenceMapLocation = os.path.join(os.path.dirname(filename),
                                            "referenceMap.yaml")
    print("Writing yaml reference map to \"{}\"".format(
        yamlReferenceMapLocation))
    with open(yamlReferenceMapLocation, "w+b") as f:
        yaml.safe_dump(elementNamesMap, f, default_flow_style=False)
Ejemplo n.º 17
0
def get_alias():
    data = yaml.safe_load(read_text('ttslib.data', 'to_speaker.yaml'))

    d = dict()
    for k in data.keys():
        match_obj = re.fullmatch(r'(..)_(..)', k)
        if match_obj:
            for k1 in match_obj.groups():
                d.setdefault(k1, []).append(k)
        else:
            d.setdefault(k[:2], []).append(k)

    with open('../ttslib/data/alias.yaml', 'w') as f:
        yaml.safe_dump(d, f)
Ejemplo n.º 18
0
def yaml_dump(filepath, conf):
    """Write a configuration to a YAML configuration file

    Arguments:
        filepath {Path} -- Configuration file's path to write to
        conf {dict} -- Configuration values
    """
    with filepath.open('w') as f:
        f.write("""#
# This file was generated using mkctf utility. Do not edit it manually unless
# you know exactly what you're doing. #PEBCAK
#
""")
        yaml.safe_dump(conf, f, default_flow_style=False)
Ejemplo n.º 19
0
def dump_yaml(data, *path):
    def convert(obj):
        if isinstance(obj, dict):
            obj = {k: v for k, v in obj.items() if not k.startswith('_')}
            return {convert(k): convert(v) for k, v in obj.items()}
        if isinstance(obj, list):
            return [convert(x) for x in obj]
        if isinstance(obj, type):
            return obj.__name__
        return obj

    filename = os.path.join(*path)
    ensure_directory(os.path.dirname(filename))
    with open(filename, 'w') as file_:
        yaml.safe_dump(convert(data), file_, default_flow_style=False)
Ejemplo n.º 20
0
    def write_dois_debian(dois, file):
        parsed_yaml = parse_yaml(file)
        identifiers = parsed_yaml['identifiers']
        # if doi doesn't exist
        if 'doi' not in identifiers:
            identifiers['doi'] = [dois.pop()]

        for doi in dois:
            identifiers['doi'].append(doi)

        parsed_yaml['identifiers'] = identifiers

        with open(file, 'w') as outfile:
            # do something fancy like indentation/mapping/sequence/offset
            yaml.safe_dump(parsed_yaml, outfile, default_flow_style=False)
Ejemplo n.º 21
0
    def test_no_change(self, tmpdir, spec, config_file, new_config):
        with open(config_file, "w") as f:
            yaml.safe_dump(new_config,
                           stream=f,
                           default_flow_style=False,
                           encoding="utf-8")

        beer_garden.config.load(["-c", str(config_file)], force=True)
        assert beer_garden.config.get("log.level") == "INFO"

        with open(config_file) as f:
            new_config_value = yaml.safe_load(f)

        assert new_config_value == new_config
        assert len(os.listdir(tmpdir)) == 1
Ejemplo n.º 22
0
def run_local_topology(name=None, env_name=None, time=0, options=None):
    """Run a topology locally using Flux and `storm jar`."""
    name, topology_file = get_topology_definition(name)
    config = get_config()
    env_name, env_config = get_env_config(env_name)
    topology_class = get_topology_from_file(topology_file)

    set_topology_serializer(env_config, config, topology_class)

    storm_options = resolve_options(options, env_config, topology_class, name)
    if storm_options['topology.acker.executors'] != 0:
        storm_options['topology.acker.executors'] = 1
    storm_options['topology.workers'] = 1

    # Check Storm version is the same
    local_version = local_storm_version()
    project_version = storm_lib_version()
    if local_version != project_version:
        raise ValueError('Local Storm version, {}, is not the same as the '
                         'version in your project.clj, {}. The versions must '
                         'match.'.format(local_version, project_version))

    # Prepare a JAR that has Storm dependencies packaged
    topology_jar = jar_for_deploy(simple_jar=False)

    if time <= 0:
        time = 9223372036854775807  # Max long value in Java

    # Write YAML file
    with show('output'):
        with NamedTemporaryFile(mode='w', suffix='.yaml',
                                delete=False) as yaml_file:
            topology_flux_dict = topology_class.to_flux_dict(name)
            topology_flux_dict['config'] = storm_options
            if yaml.version_info < (0, 15):
                yaml.safe_dump(topology_flux_dict,
                               yaml_file,
                               default_flow_style=False)
            else:
                yml = yaml.YAML(typ='safe', pure=True)
                yml.default_flow_style = False
                yml.dump(topology_flux_dict, yaml_file)
            cmd = (
                'storm jar {jar} org.apache.storm.flux.Flux --local --no-splash '
                '--sleep {time} {yaml}'.format(jar=topology_jar,
                                               time=time,
                                               yaml=yaml_file.name))
            local(cmd)
Ejemplo n.º 23
0
def test_model_hot_reloading(app, rasa_default_train_data):
    query = "http://dummy-uri/parse?q=hello&project=my_keyword_model"
    response = yield app.get(query)
    assert response.code == 404, "Project should not exist yet"
    train_u = "http://dummy-uri/train?project=my_keyword_model"
    model_config = {"pipeline": "keyword", "data": rasa_default_train_data}
    model_str = yaml.safe_dump(model_config, default_flow_style=False,
                               allow_unicode=True)
    response = app.post(train_u,
                        headers={b"Content-Type": b"application/x-yml"},
                        data=model_str)
    time.sleep(3)
    app.flush()
    response = yield response
    assert response.code == 200, "Training should end successfully"

    response = app.post(train_u,
                        headers={b"Content-Type": b"application/json"},
                        data=json.dumps(model_config))
    time.sleep(3)
    app.flush()
    response = yield response
    assert response.code == 200, "Training should end successfully"

    response = yield app.get(query)
    assert response.code == 200, "Project should now exist after it got trained"
Ejemplo n.º 24
0
def _write(f, data):
    try:
        _f = open(f, "w")
    except IOError:
        raise KeyError
    else:
        return yaml.safe_dump(data, _f, default_flow_style=False)
Ejemplo n.º 25
0
    def write(self):
        ''' write to file '''
        if not self.filename:
            raise YeditException('Please specify a filename.')

        if self.backup and self.file_exists():
            shutil.copy(self.filename, '{}{}'.format(self.filename, self.backup_ext))

        # Try to set format attributes if supported
        try:
            self.yaml_dict.fa.set_block_style()
        except AttributeError:
            pass

        # Try to use RoundTripDumper if supported.
        if self.content_type == 'yaml':
            try:
                Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
            except AttributeError:
                Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
        elif self.content_type == 'json':
            Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
        else:
            raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
                                 'Please specify a content_type of yaml or json.')

        return (True, self.yaml_dict)
Ejemplo n.º 26
0
    def write(self):
        ''' write to file '''
        if not self.filename:
            raise YeditException('Please specify a filename.')

        if self.backup and self.file_exists():
            shutil.copy(self.filename, self.filename + '.orig')

        # Try to set format attributes if supported
        try:
            self.yaml_dict.fa.set_block_style()
        except AttributeError:
            pass

        # Try to use RoundTripDumper if supported.
        if self.content_type == 'yaml':
            try:
                Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
            except AttributeError:
                Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
        elif self.content_type == 'json':
            Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
        else:
            raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
                                 'Please specify a content_type of yaml or json.')

        return (True, self.yaml_dict)
Ejemplo n.º 27
0
    def test_catastrophe(self, capsys, tmpdir, spec, config_file, old_config):
        with open(config_file, "w") as f:
            yaml.safe_dump(old_config,
                           stream=f,
                           default_flow_style=False,
                           encoding="utf-8")

        with patch("os.rename", Mock(side_effect=[Mock(), ValueError])):
            with pytest.raises(ValueError):
                beer_garden.config.load(["-c", str(config_file)], force=True)

        # Make sure we printed something
        assert capsys.readouterr().err

        # Both the tmp file and the old JSON should still be there.
        assert len(os.listdir(tmpdir)) == 2
Ejemplo n.º 28
0
def test_model_hot_reloading(app, rasa_default_train_data):
    query = "http://dummy-uri/parse?q=hello&project=my_keyword_model"
    response = yield app.get(query)
    assert response.code == 404, "Project should not exist yet"
    train_u = "http://dummy-uri/train?project=my_keyword_model"
    model_config = {"pipeline": "keyword", "data": rasa_default_train_data}
    model_str = yaml.safe_dump(model_config,
                               default_flow_style=False,
                               allow_unicode=True)
    response = app.post(train_u,
                        headers={b"Content-Type": b"application/x-yml"},
                        data=model_str)
    time.sleep(3)
    app.flush()
    response = yield response
    assert response.code == 200, "Training should end successfully"

    response = app.post(train_u,
                        headers={b"Content-Type": b"application/json"},
                        data=json.dumps(model_config))
    time.sleep(3)
    app.flush()
    response = yield response
    assert response.code == 200, "Training should end successfully"

    response = yield app.get(query)
    assert response.code == 200, "Project should now exist after it got trained"
Ejemplo n.º 29
0
def format_base64_as_yaml(source: str) -> str:
    s = yaml.safe_dump(yaml.safe_load(base64.b64decode(source)),
                       default_flow_style=False)

    if not isinstance(s, str):
        raise AssertionError("cannot format base64 string to yaml")
    return s
Ejemplo n.º 30
0
    def write(self):
        ''' write to file '''
        if not self.filename:
            raise YeditException('Please specify a filename.')

        if self.backup and self.file_exists():
            shutil.copy(self.filename, self.filename + '.orig')

        # Try to set format attributes if supported
        try:
            self.yaml_dict.fa.set_block_style()
        except AttributeError:
            pass

        # Try to use RoundTripDumper if supported.
        try:
            Yedit._write(
                self.filename,
                yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
        except AttributeError:
            Yedit._write(
                self.filename,
                yaml.safe_dump(self.yaml_dict, default_flow_style=False))

        return (True, self.yaml_dict)
Ejemplo n.º 31
0
    def inner(self, req, resp, *args, **kwargs):
        try:
            data = func(self, req, resp, *args, **kwargs)
        except Exception as e:
            log.exception("Error in API call")

            resp.status = "500 Internal Server Error"
            resp.content_type = "text"
            resp.body = str(e)
            return

        if not data:
            return

        accepts = req.get_header("Accepts")

        if not accepts:
            accepts = "application/json"

        accepts = accepts.lower()

        if accepts == "application/x-yaml":
            resp.content_type = accepts
            resp.body = yaml.safe_dump(data)
        elif accepts == "application/json":
            resp.content_type = accepts
            resp.body = json.dumps(data)
        elif accepts == "application/xml":
            resp.content_type = accepts
            resp.body = dicttoxml(data)
        else:
            resp.status = "400 Bad Request"
            resp.content_type = "text"
            resp.body = "Unknown or unsupported content type: {}\n\n" \
                        "We support application/json, application/xml or application/x-yaml".format(accepts)
Ejemplo n.º 32
0
    def get_styles(self, out_file=None, skip_cell_formatting=True):
        formatting = self._to_styles(self.wb, skip_cell_formatting)

        if out_file is not None:
            return Path(out_file).write_text(yaml.safe_dump(formatting))
        else:
            return formatting
Ejemplo n.º 33
0
def write_file_config(file_config):
    with tempfile.NamedTemporaryFile("w+",
                                     suffix="_tmp_config_file.yml",
                                     delete=False) as f:
        f.write(yaml.safe_dump(file_config))
        f.flush()
        return f
Ejemplo n.º 34
0
    def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict",
                **kwargs: Any) -> "MigrationUidTypedDict":
        with indir(recipe_dir + "/.."):
            self.set_build_number("recipe/meta.yaml")
            with open("conda-forge.yml", "r") as f:
                y = safe_load(f)
            if "provider" not in y:
                y["provider"] = {}
            for k, v in self.arches.items():
                if k not in y["provider"]:
                    y["provider"][k] = v

            with open("conda-forge.yml", "w") as f:
                safe_dump(y, f)

        return super().migrate(recipe_dir, attrs, **kwargs)
Ejemplo n.º 35
0
def write_file_config(file_config):
    with tempfile.NamedTemporaryFile("w+",
                                     suffix="_tmp_config_file.yml",
                                     delete=False) as f:
        f.write(yaml.safe_dump(file_config))
        f.flush()
        return f
Ejemplo n.º 36
0
def parse_shannon_radii():
    with open('periodic_table.yaml', 'r') as f:
        data = yaml.load(f)
    from openpyxl import load_workbook
    import collections
    wb = load_workbook('Shannon Radii.xlsx')
    print(wb.get_sheet_names())
    sheet = wb["Sheet1"]
    i = 2
    radii = collections.defaultdict(dict)
    while sheet["E%d" % i].value:
        if sheet["A%d" % i].value:
            el = sheet["A%d" % i].value
        if sheet["B%d" % i].value:
            charge = int(sheet["B%d" % i].value)
            radii[el][charge] = dict()
        if sheet["C%d" % i].value:
            cn = sheet["C%d" % i].value
            if cn not in radii[el][charge]:
                radii[el][charge][cn] = dict()

        if sheet["D%d" % i].value is not None:
            spin = sheet["D%d" % i].value
        else:
            spin = ""
        # print("%s - %d - %s" % (el, charge, cn))

        radii[el][charge][cn][spin] = {
            "crystal_radius": float(sheet["E%d" % i].value),
            "ionic_radius": float(sheet["F%d" % i].value),
        }
        i += 1

    for el in radii.keys():
        if el in data:
            data[el]["Shannon radii"] = dict(radii[el])

    with open('periodic_table.yaml', 'w') as f:
        yaml.safe_dump(data, f)
    with open('periodic_table.json', 'w') as f:
        json.dump(data, f)
Ejemplo n.º 37
0
def dump(data, stream=None, **kwargs):
  # type: (Any, Optional[IO[AnyStr]], Any) -> str
  """Dumps the given YAML data to the stream.

  Args:
    data: The YAML serializable Python object to dump.
    stream: The stream to write the data to or None to return it as a string.
    **kwargs: Other arguments to the dump method.

  Returns:
    The string representation of the YAML data if stream is None.
  """
  return yaml.safe_dump(
      data, stream=stream, default_flow_style=False, indent=2, **kwargs)
Ejemplo n.º 38
0
    def to_format(self, f_format='json', **kwargs):
        """
        returns a String representation in the given format

        Args:
            f_format (str): the format to output to (default json)
        """
        if f_format == 'json':
            return json.dumps(self.to_dict(), default=DATETIME_HANDLER, **kwargs)
        elif f_format == 'yaml':
            # start with the JSON format, and convert to YAML
            return yaml.safe_dump(self.to_dict(), default_flow_style=YAML_STYLE,
                             allow_unicode=True)
        else:
            raise ValueError('Unsupported format {}'.format(f_format))
Ejemplo n.º 39
0
    def write(self):
        ''' write to file '''
        if not self.filename:
            raise YeditException('Please specify a filename.')

        if self.backup and self.file_exists():
            shutil.copy(self.filename, self.filename + '.orig')

        # Try to set format attributes if supported
        try:
            self.yaml_dict.fa.set_block_style()
        except AttributeError:
            pass

        # Try to use RoundTripDumper if supported.
        try:
            Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
        except AttributeError:
            Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))

        return (True, self.yaml_dict)
Ejemplo n.º 40
0
    def create_tmp_file_from_contents(rname, data, ftype='yaml'):
        ''' create a file in tmp with name and contents'''

        tmp = Utils.create_tmpfile(prefix=rname)

        if ftype == 'yaml':
            # AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
            # pylint: disable=no-member
            if hasattr(yaml, 'RoundTripDumper'):
                Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
            else:
                Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))

        elif ftype == 'json':
            Utils._write(tmp, json.dumps(data))
        else:
            Utils._write(tmp, data)

        # Register cleanup when module is done
        atexit.register(Utils.cleanup, [tmp])
        return tmp
Ejemplo n.º 41
0
def generate_biobox_file_content(args):
    output = {"version" : "0.9.0", "arguments" : args}
    return yaml.safe_dump(output, default_flow_style = False)
Ejemplo n.º 42
0
def save_context(context, **kwargs):
    project_dir = yield
    with io.open(os.path.join(project_dir, '.cookiecutterrc'), 'w', encoding='utf8') as fh:
        fh.write(u"# Generated by cookiepatcher, a small shim around cookiecutter (pip install cookiepatcher)\n\n")
        yaml.safe_dump(context, fh, indent=4, default_flow_style=False, allow_unicode=True)
Ejemplo n.º 43
0
def main(argsl=None,                   # type: List[str]
         args=None,                    # type: argparse.Namespace
         job_order_object=None,        # type: MutableMapping[Text, Any]
         stdin=sys.stdin,              # type: IO[Any]
         stdout=None,                  # type: Union[TextIO, codecs.StreamWriter]
         stderr=sys.stderr,            # type: IO[Any]
         versionfunc=versionstring,    # type: Callable[[], Text]
         logger_handler=None,          #
         custom_schema_callback=None,  # type: Callable[[], None]
         executor=None,                # type: Callable[..., Tuple[Dict[Text, Any], Text]]
         loadingContext=None,          # type: LoadingContext
         runtimeContext=None           # type: RuntimeContext
        ):  # type: (...) -> int
    if not stdout:  # force UTF-8 even if the console is configured differently
        if (hasattr(sys.stdout, "encoding")  # type: ignore
                and sys.stdout.encoding != 'UTF-8'):  # type: ignore
            if six.PY3 and hasattr(sys.stdout, "detach"):
                stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
            else:
                stdout = codecs.getwriter('utf-8')(sys.stdout)  # type: ignore
        else:
            stdout = cast(TextIO, sys.stdout)  # type: ignore

    _logger.removeHandler(defaultStreamHandler)
    if logger_handler:
        stderr_handler = logger_handler
    else:
        stderr_handler = logging.StreamHandler(stderr)
    _logger.addHandler(stderr_handler)
    # pre-declared for finally block
    workflowobj = None
    try:
        if args is None:
            if argsl is None:
                argsl = sys.argv[1:]
            args = arg_parser().parse_args(argsl)

        if runtimeContext is None:
            runtimeContext = RuntimeContext(vars(args))
        else:
            runtimeContext = runtimeContext.copy()

        # If on Windows platform, a default Docker Container is used if not
        # explicitely provided by user
        if onWindows() and not runtimeContext.default_container:
            # This docker image is a minimal alpine image with bash installed
            # (size 6 mb). source: https://github.com/frol/docker-alpine-bash
            runtimeContext.default_container = windows_default_container_id

        # If caller parsed its own arguments, it may not include every
        # cwltool option, so fill in defaults to avoid crashing when
        # dereferencing them in args.
        for key, val in six.iteritems(get_default_args()):
            if not hasattr(args, key):
                setattr(args, key, val)

        rdflib_logger = logging.getLogger("rdflib.term")
        rdflib_logger.addHandler(stderr_handler)
        rdflib_logger.setLevel(logging.ERROR)
        if args.quiet:
            _logger.setLevel(logging.WARN)
        if runtimeContext.debug:
            _logger.setLevel(logging.DEBUG)
            rdflib_logger.setLevel(logging.DEBUG)
        if args.timestamps:
            formatter = logging.Formatter("[%(asctime)s] %(message)s",
                                          "%Y-%m-%d %H:%M:%S")
            stderr_handler.setFormatter(formatter)

        if args.version:
            print(versionfunc())
            return 0
        _logger.info(versionfunc())

        if args.print_supported_versions:
            print("\n".join(supported_cwl_versions(args.enable_dev)))
            return 0

        if not args.workflow:
            if os.path.isfile("CWLFile"):
                setattr(args, "workflow", "CWLFile")
            else:
                _logger.error("")
                _logger.error("CWL document required, no input file was provided")
                arg_parser().print_help()
                return 1
        if args.relax_path_checks:
            command_line_tool.ACCEPTLIST_RE = command_line_tool.ACCEPTLIST_EN_RELAXED_RE

        if args.ga4gh_tool_registries:
            ga4gh_tool_registries[:] = args.ga4gh_tool_registries
        if not args.enable_ga4gh_tool_registry:
            del ga4gh_tool_registries[:]

        if custom_schema_callback:
            custom_schema_callback()
        elif args.enable_ext:
            res = pkg_resources.resource_stream(__name__, 'extensions.yml')
            use_custom_schema("v1.0", "http://commonwl.org/cwltool", res.read())
            res.close()
        else:
            use_standard_schema("v1.0")
        #call function from provenance.py if the provenance flag is enabled.
        if args.provenance:
            if not args.compute_checksum:
                _logger.error("--provenance incompatible with --no-compute-checksum")
                return 1

            runtimeContext.research_obj = ResearchObject(
                temp_prefix_ro=args.tmpdir_prefix,
                # Optionals, might be None
                orcid=args.orcid,
                full_name=args.cwl_full_name)



        if loadingContext is None:
            loadingContext = LoadingContext(vars(args))
        else:
            loadingContext = loadingContext.copy()
        loadingContext.research_obj = runtimeContext.research_obj
        loadingContext.disable_js_validation = \
            args.disable_js_validation or (not args.do_validate)
        loadingContext.construct_tool_object = getdefault(
            loadingContext.construct_tool_object, workflow.default_make_tool)
        loadingContext.resolver = getdefault(loadingContext.resolver, tool_resolver)

        uri, tool_file_uri = resolve_tool_uri(
            args.workflow, resolver=loadingContext.resolver,
            fetcher_constructor=loadingContext.fetcher_constructor)

        try_again_msg = "" if args.debug else ", try again with --debug for more information"

        try:
            job_order_object, input_basedir, jobloader = load_job_order(
                args, stdin, loadingContext.fetcher_constructor,
                loadingContext.overrides_list, tool_file_uri)

            if args.overrides:
                loadingContext.overrides_list.extend(load_overrides(
                    file_uri(os.path.abspath(args.overrides)), tool_file_uri))

            document_loader, workflowobj, uri = fetch_document(
                uri, resolver=loadingContext.resolver,
                fetcher_constructor=loadingContext.fetcher_constructor)

            if args.print_deps:
                printdeps(workflowobj, document_loader, stdout, args.relative_deps, uri)
                return 0

            document_loader, avsc_names, processobj, metadata, uri \
                = validate_document(document_loader, workflowobj, uri,
                                    enable_dev=loadingContext.enable_dev,
                                    strict=loadingContext.strict,
                                    preprocess_only=(args.print_pre or args.pack),
                                    fetcher_constructor=loadingContext.fetcher_constructor,
                                    skip_schemas=args.skip_schemas,
                                    overrides=loadingContext.overrides_list,
                                    do_validate=loadingContext.do_validate)
            if args.pack:
                stdout.write(print_pack(document_loader, processobj, uri, metadata))
                return 0
            if args.provenance and runtimeContext.research_obj:
                # Can't really be combined with args.pack at same time
                runtimeContext.research_obj.packed_workflow(
                    print_pack(document_loader, processobj, uri, metadata))

            if args.print_pre:
                stdout.write(json_dumps(processobj, indent=4))
                return 0

            loadingContext.overrides_list.extend(metadata.get("cwltool:overrides", []))

            tool = make_tool(document_loader, avsc_names,
                             metadata, uri, loadingContext)
            if args.make_template:
                yaml.safe_dump(generate_input_template(tool), sys.stdout,
                               default_flow_style=False, indent=4,
                               block_seq_indent=2)
                return 0

            if args.validate:
                _logger.info("Tool definition is valid")
                return 0

            if args.print_rdf:
                stdout.write(printrdf(tool, document_loader.ctx, args.rdf_serializer))
                return 0

            if args.print_dot:
                printdot(tool, document_loader.ctx, stdout)
                return 0

        except (validate.ValidationException) as exc:
            _logger.error(u"Tool definition failed validation:\n%s", exc,
                          exc_info=args.debug)
            return 1
        except (RuntimeError, WorkflowException) as exc:
            _logger.error(u"Tool definition failed initialization:\n%s", exc,
                          exc_info=args.debug)
            return 1
        except Exception as exc:
            _logger.error(
                u"I'm sorry, I couldn't load this CWL file%s.\nThe error was: %s",
                try_again_msg,
                exc if not args.debug else "",
                exc_info=args.debug)
            return 1

        if isinstance(tool, int):
            return tool
        # If on MacOS platform, TMPDIR must be set to be under one of the
        # shared volumes in Docker for Mac
        # More info: https://dockstore.org/docs/faq
        if sys.platform == "darwin":
            default_mac_path = "/private/tmp/docker_tmp"
            if runtimeContext.tmp_outdir_prefix == DEFAULT_TMP_PREFIX:
                runtimeContext.tmp_outdir_prefix = default_mac_path

        for dirprefix in ("tmpdir_prefix", "tmp_outdir_prefix", "cachedir"):
            if getattr(runtimeContext, dirprefix) and getattr(runtimeContext, dirprefix) != DEFAULT_TMP_PREFIX:
                sl = "/" if getattr(runtimeContext, dirprefix).endswith("/") or dirprefix == "cachedir" \
                        else ""
                setattr(runtimeContext, dirprefix,
                        os.path.abspath(getattr(runtimeContext, dirprefix)) + sl)
                if not os.path.exists(os.path.dirname(getattr(runtimeContext, dirprefix))):
                    try:
                        os.makedirs(os.path.dirname(getattr(runtimeContext, dirprefix)))
                    except Exception as e:
                        _logger.error("Failed to create directory: %s", e)
                        return 1

        if args.cachedir:
            if args.move_outputs == "move":
                runtimeContext.move_outputs = "copy"
            runtimeContext.tmp_outdir_prefix = args.cachedir

        runtimeContext.secret_store = getdefault(runtimeContext.secret_store, SecretStore())
        runtimeContext.make_fs_access = getdefault(runtimeContext.make_fs_access, StdFsAccess)
        try:
            initialized_job_order_object = init_job_order(
                job_order_object, args, tool, jobloader, stdout,
                print_input_deps=args.print_input_deps,
                relative_deps=args.relative_deps,
                make_fs_access=runtimeContext.make_fs_access,
                input_basedir=input_basedir,
                secret_store=runtimeContext.secret_store)
        except SystemExit as err:
            return err.code

        if not executor:
            if args.parallel:
                executor = MultithreadedJobExecutor()
                runtimeContext.select_resources = executor.select_resources
            else:
                executor = SingleJobExecutor()
        assert executor is not None

        try:
            runtimeContext.basedir = input_basedir
            del args.workflow
            del args.job_order

            conf_file = getattr(args, "beta_dependency_resolvers_configuration", None)  # Text
            use_conda_dependencies = getattr(args, "beta_conda_dependencies", None)  # Text

            if conf_file or use_conda_dependencies:
                runtimeContext.job_script_provider = DependenciesConfiguration(args)

            runtimeContext.find_default_container = functools.partial(
                find_default_container,
                default_container=runtimeContext.default_container,
                use_biocontainers=args.beta_use_biocontainers)
            (out, status) = executor(tool,
                                     initialized_job_order_object,
                                     runtimeContext,
                                     logger=_logger)

            if out is not None:
                def loc_to_path(obj):
                    for field in ("path", "nameext", "nameroot", "dirname"):
                        if field in obj:
                            del obj[field]
                    if obj["location"].startswith("file://"):
                        obj["path"] = uri_file_path(obj["location"])

                visit_class(out, ("File", "Directory"), loc_to_path)

                # Unsetting the Generation from final output object
                visit_class(out, ("File", ), MutationManager().unset_generation)

                if isinstance(out, string_types):
                    stdout.write(out)
                else:
                    stdout.write(json_dumps(out, indent=4,  # type: ignore
                                            ensure_ascii=False))
                stdout.write("\n")
                if hasattr(stdout, "flush"):
                    stdout.flush()  # type: ignore

            if status != "success":
                _logger.warning(u"Final process status is %s", status)
                return 1
            _logger.info(u"Final process status is %s", status)
            return 0

        except (validate.ValidationException) as exc:
            _logger.error(u"Input object failed validation:\n%s", exc,
                          exc_info=args.debug)
            return 1
        except UnsupportedRequirement as exc:
            _logger.error(
                u"Workflow or tool uses unsupported feature:\n%s", exc,
                exc_info=args.debug)
            return 33
        except WorkflowException as exc:
            _logger.error(
                u"Workflow error%s:\n%s", try_again_msg, strip_dup_lineno(six.text_type(exc)),
                exc_info=args.debug)
            return 1
        except Exception as exc:
            _logger.error(
                u"Unhandled error%s:\n  %s", try_again_msg, exc, exc_info=args.debug)
            return 1

    finally:
        if args and runtimeContext and runtimeContext.research_obj \
                and args.rm_tmpdir and workflowobj:
            #adding all related cwl files to RO
            prov_dependencies = printdeps(
                workflowobj, document_loader, stdout, args.relative_deps, uri,
                runtimeContext.research_obj)
            prov_dep = prov_dependencies[1]
            assert prov_dep
            runtimeContext.research_obj.generate_snapshot(prov_dep)

            runtimeContext.research_obj.close(args.provenance)

        _logger.removeHandler(stderr_handler)
        _logger.addHandler(defaultStreamHandler)
Ejemplo n.º 44
0
 def _write_yaml(self, config_dict, dest):
     with io.open(dest, 'w', encoding='UTF-8') as f:
         yaml.safe_dump(config_dict, f, default_flow_style=False)
Ejemplo n.º 45
0
 def _write_yaml(self, config_dict, dest):
     with open(dest, 'w') as f:
         yaml.safe_dump(config_dict, f, default_flow_style=False)
Ejemplo n.º 46
0
    def run(self, cmd, args, options):
        if args['--example']:
            s = '''
# Example client specification file
# Specification files are in YAML format (a superset of JSON
# with more readable syntax and support for comments) and
# look like this. They may contain comments that begin
# with a # sign.

# Device client model information
device:
    model: "myModel"
    vendor: "myVendor"

# list of dataports that must exist
dataports:
      # this the absolute minimum needed to specify a
      # dataport.
    - alias: mystring
      # names are created, but not compared
    - name: Temperature
      # aliases, type, and format are created
      # and compared
      alias: temp
      format: float
      unit: °F
    - name: LED Control
      alias: led6
      format: integer
    - alias: config
      # format should be string, and parseable JSON
      format: string/json
      # initial value (if no other value is read back)
      initial: '{"text": "555-555-1234", "email": "*****@*****.**"}'
    - alias: person
      format: string/json
      # JSON schema specified inline (http://json-schema.org/)
      # format must be string/json to do validate
      # you may also specify a string to reference schema in an
      # external file. E.g. jsonschema: personschema.json
      jsonschema: {"title": "Person Schema",
                   "type": "object",
                   "properties": {"name": {"type": "string"}},
                   "required": ["name"]}
      initial: '{"name":"John Doe"}'
    - alias: place
      # An description of the dataport.
      description: 'This is a place I have been'
      # Dataports are not public by default,
      # but if you want to share one with the world
      public: true

    # any dataports not listed but found in the client
    # are ignored. The spec command does not delete things.

# list of script datarules that must exist
scripts:
    # by default, scripts are datarules with
    # names and aliases set to the file name
    - file: test/files/helloworld.lua
    # you can also set them explicitly
    - file: test/files/helloworld.lua
      alias: greeting
    # you can also place lua code inline
    - alias: singleLineScript
      code: debug('hello from inside lua!')
    # multiline lua scripts should start with | and
    # be indented inside the "code:" key.
    - alias: multilineScript
      code: |
        for x=1,10 do
            debug('hello from for loop ' .. x)
        end
    # simple templating for script aliases and
    # content is also supported.
    - file: test/files/convert.lua
      # if <% id %> is embedded in aliases
      # or script content, the --ids parameter must
      # be passed in. The spec command then expects
      # a script or dataport resource per id passed, substituting
      # each ID for <% id %>. In this example, if the command was:
      #
      # $ exo spec mysensor sensorspec.yaml --ids=A,B
      #
      # ...then the spec command looks for *two* script datarules
      # in mysensor, with aliases convertA.lua and convertB.lua.
      # Additionally, any instances of <% id %> in the content of
      # convert.lua are substituted with A and B before being
      # written to each script datarule.
      #
      alias: convert<% id %>.lua

# list of dispatches that must exist
dispatches:
    - alias: myDispatch
      # email | http_get | http_post | http_put | sms | xmpp
      method: email
      recipient: [email protected]
      message: hello from Exoline spec example!
      subject: hello!
      # may be an RID or alias
      subscribe: mystring

# list of simple datarules that must exist.
# scripts may go here too, but it's better to
# to put them under scripts (above)
datarules:
    - alias: highTemp
      format: float
      subscribe: temp
      rule: {
        "simple": {
          "comparison": "gt",
          "constant": 80,
          "repeat": true
        }
      }
'''
            if not six.PY3:
                s = s.encode('utf-8')
            print(s)
            return

        ExoException = options['exception']
        def load_file(path, base_url=None):
            '''load a file based on a path that may be a filesystem path
            or a URL. Consider it a URL if it starts with two or more
            alphabetic characters followed by a colon'''
            def load_from_url(url):
                # URL. use requests
                r = requests.get(url)
                if r.status_code >= 300:
                    raise ExoException('Failed to read file at URL ' + url)
                return r.text, '/'.join(r.url.split('/')[:-1])

            if re.match('[a-z]{2}[a-z]*:', path):
                return load_from_url(path)
            elif base_url is not None:
                # non-url paths when spec is loaded from URLs
                # are considered relative to that URL
                return load_from_url(base_url + '/' + path)
            else:
                with open(path, 'rb') as f:
                    return f.read(), None


        def load_spec(args):
            # returns loaded spec and path for script files
            try:
                content, base_url = load_file(args['<spec-yaml>'])
                spec = yaml.safe_load(content)
                return spec, base_url
            except yaml.scanner.ScannerError as ex:
                raise ExoException('Error parsing YAML in {0}\n{1}'.format(args['<spec-yaml>'],ex))

        def check_spec(spec, args):
            msgs = []
            for typ in TYPES:
                if typ in spec and plural(typ) not in spec:
                    msgs.append('found "{0}"... did you mean "{1}"?'.format(typ, typ + 's'))
            for dp in spec.get('dataports', []):
                if 'alias' not in dp:
                    msgs.append('dataport is missing alias: {0}'.format(dp))
                    continue
                alias = dp['alias']
                if 'jsonschema' in dp:
                    schema = dp['jsonschema']
                    if isinstance(schema, six.string_types):
                        schema = json.loads(open(schema).read())
                    try:
                        jsonschema.Draft4Validator.check_schema(schema)
                    except Exception as ex:
                        msgs.append('{0} failed jsonschema validation.\n{1}'.format(alias, str(ex)))
            if len(msgs) > 0:
                raise ExoException('Found some problems in spec:\n' + '\n'.join(msgs))

        if args['--check']:
            # Validate all the jsonschema
            spec, base_url = load_spec(args)
            check_spec(spec, args)
            return

        reid = re.compile('<% *id *%>')
        def infoval(input_cik, alias):
            '''Get info and latest value for a resource'''
            return rpc._exomult(
                input_cik,
                [['info', {'alias': alias}, {'description': True, 'basic': True}],
                ['read', {'alias': alias}, {'limit': 1}]])

        def check_or_create_description(auth, info, args):
            if 'device' in spec and 'limits' in spec['device']:
                speclimits = spec['device']['limits']
                infolimits = info['description']['limits']
                limits_mismatched = False
                for limit in speclimits:
                    if limit not in infolimits:
                        raise ExoException('spec file includes invalid limit {0}'.format(limit))
                    if speclimits[limit] != infolimits[limit]:
                        limits_mismatched = True
                if limits_mismatched:
                    if create:
                        if 'client_id' not in auth:
                            raise ExoException('limits update for client requires --portal or --domain')

                        rpc.update(auth['cik'], auth['client_id'], {'limits': speclimits})
                        sys.stdout.write('updated limits for client' +
                                         ' RID {0}'.format(auth['client_id']))
                    else:
                        sys.stdout.write(
                            'limits for client {0} do not match spec:\nspec: {1}\nclient: {2}'.format(
                                auth,
                                json.dumps(speclimits, sort_keys=True),
                                json.dumps(infolimits, sort_keys=True)))


        def check_or_create_common(auth, res, info, alias, aliases):
            if info['basic']['type'] != typ:
                raise ExoException('{0} is a {1} but should be a {2}.'.format(alias, info['basic']['type'], typ))

            new_desc = info['description'].copy()
            need_update = False

            if 'public' in res:
                res_pub = res['public']
                desc = info['description']
                if desc['public'] != res_pub:
                    if create:
                        new_desc['public'] = res_pub
                        need_update = True
                    else:
                        sys.stdout.write('spec expects public for {0} to be {1}, but it is not.\n'.format(alias, res_pub))
                        print(json.dumps(res))

            if 'subscribe' in res:
                # Alias *must* be local to this client
                resSub = res['subscribe']
                # Lookup alias/name if need be
                if resSub in aliases:
                    resSub = aliases[resSub]
                desc = info['description']
                if desc['subscribe'] != resSub:
                    if create:
                        new_desc['subscribe'] = resSub
                        need_update = True
                    else:
                        sys.stdout.write('spec expects subscribe for {0} to be {1}, but they are not.\n'.format(alias, resSub))

            if 'preprocess' in res:
                def fromAliases(pair):
                    if pair[1] in aliases:
                        return [pair[0], aliases[pair[1]]]
                    else:
                        return pair
                resPrep = [fromAliases(x) for x in res['preprocess']]
                preprocess = info['description']['preprocess']
                if create:
                    new_desc['preprocess'] = resPrep
                    need_update = True
                else:
                    if preprocess is None or len(preprocess) == 0:
                        sys.stdout.write('spec expects preprocess for {0} to be {1}, but they are missing.\n'.format(alias, resPrep))
                    elif preprocess != resPrep:
                        sys.stdout.write('spec expects preprocess for {0} to be {1}, but they are {2}.\n'.format(alias, resPrep, preprocess))

            if 'retention' in res:
                resRet = {}
                if 'count' in res['retention']:
                    resRet['count'] = res['retention']['count']
                if 'duration' in res['retention']:
                    resRet['duration'] = res['retention']['duration']
                retention = info['description']['retention']
                if create:
                    new_desc['retention'] = resRet
                    need_update = True
                elif retention != resRet:
                    sys.stdout.write('spec expects retention for {0} to be {1}, but they are {2}.\n'.format(alias, resRet, retention))

            if need_update:
                rpc.update(auth, {'alias': alias}, new_desc)

        def get_format(res, default='string'):
            format = res['format'] if 'format' in res else default
            pieces = format.split('/')
            if len(pieces) > 1:
                format = pieces[0]
                format_content = pieces[1]
            else:
                format_content = None
            return format, format_content

        def add_desc(key, res, desc, required=False):
            '''add key from spec resource to a 1P resource description'''
            if key in res:
                desc[key] = res[key]
            else:
                if required:
                    raise ExoException('{0} in spec is missing required property {1}.'.format(alias, key))

        def create_resource(auth, typ, desc, alias, msg=''):
            name = res['name'] if 'name' in res else alias
            print('Creating {0} with name: {1}, alias: {2}{3}'.format(
                typ, name, alias, msg))
            rid = rpc.create(auth, typ, desc, name=name)
            rpc.map(auth, rid, alias)
            info, val = infoval(auth, alias)
            aliases[alias] = rid
            return info, val

        def check_or_create_datarule(auth, res, info, val, alias, aliases):
            format, format_content = get_format(res, 'float')
            if not exists and create:
                desc = {'format': format}
                desc['retention'] = {'count': 'infinity', 'duration': 'infinity'}
                add_desc('rule', res, desc, required=True)
                info, val = create_resource(
                    auth,
                    'datarule',
                    desc,
                    alias,
                    msg=', format: {0}, rule: {1}'.format(desc['format'], desc['rule']))

            # check format
            if format != info['description']['format']:
                raise ExoException(
                    '{0} is a {1} but should be a {2}.'.format(
                    alias, info['description']['format'], format))

            # check rule
            infoRule = json.dumps(info['description']['rule'], sort_keys=True)
            specRule = json.dumps(res['rule'], sort_keys=True)
            if infoRule != specRule:
                if create:
                    info['description']['rule'] = res['rule']
                    rpc.update(auth, {'alias': alias}, info['description'])
                    sys.stdout.write('updated rule for {0}\n'.format(alias))
                else:
                    sys.stdout.write(
                        'spec expects rule for {0} to be:\n{1}\n...but it is:\n{2}\n'.format(
                        alias, specRule, infoRule))

            check_or_create_common(auth, res, info, alias, aliases)

        def check_or_create_dataport(auth, res, info, val, alias, aliases):
            format, format_content = get_format(res, 'string')
            if not exists and create:
                desc = {'format': format}
                desc['retention'] = {'count': 'infinity', 'duration': 'infinity'}
                info, val = create_resource(
                    auth,
                    'dataport',
                    desc,
                    alias,
                    msg=', format: {0}'.format(format))

            # check format
            if format != info['description']['format']:
                raise ExoException(
                    '{0} is a {1} but should be a {2}.'.format(
                    alias, info['description']['format'], format))

            # check initial value
            if 'initial' in res and len(val) == 0:
                if create:
                    initialValue = template(res['initial'])
                    print('Writing initial value {0}'.format(initialValue))
                    rpc.write(auth, {'alias': alias}, initialValue)
                    # update values being validated
                    info, val = infoval(auth, alias)
                else:
                    print('Required initial value not found in {0}. Pass --create to write initial value.'.format(alias))

            # check format content (e.g. json)
            if format_content == 'json':
                if format != 'string':
                    raise ExoException(
                        'Invalid spec for {0}. json content type only applies to string, not {1}.'.format(alias, format));
                if len(val) == 0:
                    print('Spec requires {0} be in JSON format, but it is empty.'.format(alias))
                else:
                    obj = None
                    try:
                        obj = json.loads(val[0][1])
                    except:
                        print('Spec requires {0} be in JSON format, but it does not parse as JSON. Value: {1}'.format(
                            alias,
                            val[0][1]))

                    if obj is not None and 'jsonschema' in res:
                        schema = res['jsonschema']
                        if isinstance(schema, six.string_types):
                            schema = json.loads(open(schema).read())
                        try:
                            jsonschema.validate(obj, schema)
                        except Exception as ex:
                            print("{0} failed jsonschema validation.".format(alias))
                            print(ex)

            elif format_content is not None:
                raise ExoException(
                    'Invalid spec for {0}. Unrecognized format content {1}'.format(alias, format_content))

            # check unit
            if 'unit' in res or 'description' in res:
                meta_string = info['description']['meta']
                try:
                    meta = json.loads(meta_string)
                except:
                    meta = None

                def bad_desc_msg(s):
                    desc='""'
                    if 'description' in res:
                        desc = res['description']
                    sys.stdout.write('spec expects description for {0} to be {1}{2}\n'.format(alias, desc, s))
                def bad_unit_msg(s):
                    unit=''
                    if 'unit' in res:
                        unit = res['unit']
                    sys.stdout.write('spec expects unit for {0} to be {1}{2}\n'.format(alias, unit, s))

                if create:
                    if meta is None:
                        meta = {'datasource':{'description':'','unit':''}}
                    if 'datasource' not in meta:
                        meta['datasource'] = {'description':'','unit':''}
                    if 'unit' in res:
                        meta['datasource']['unit'] = res['unit']
                    if 'description:' in res:
                        meta['datasource']['description'] = res['description']

                    info['description']['meta'] = json.dumps(meta)
                    rpc.update(auth, {'alias': alias}, info['description'])

                else:
                    if meta is None:
                        sys.stdout.write('spec expects metadata but found has no metadata at all. Pass --create to write metadata.\n')
                    elif 'datasource' not in meta:
                        sys.stdout.write('spec expects datasource in metadata but found its not there. Pass --create to write metadata.\n')
                    elif 'unit' not in meta['datasource'] and 'unit' in res:
                        bad_unit_msg(', but no unit is specified in metadata. Pass --create to set unit.\n')
                    elif 'description' not in meta['datasource'] and 'description' in res:
                        bad_desc_msg(', but no description is specified in metadata. Pass --create to set description.\n')
                    elif 'unit' in res and meta['datasource']['unit'] != res['unit']:
                        bad_unit_msg(', but metadata specifies unit of {0}. Pass --create to update unit.\n'.format(meta['datasource']['unit']))
                    elif 'description' in res and meta['datasource']['description'] != res['description']:
                        bad_desc_msg(', but metadata specifies description of {0}. Pass --create to update description.\n'.format(meta['datasource']['description']))

            check_or_create_common(auth, res, info, alias, aliases)

        def check_or_create_dispatch(auth, res, info, alias, aliases):
            if not exists and create:
                desc = {}
                add_desc('method', res, desc, required=True)
                add_desc('recipient', res, desc, required=True)
                add_desc('subject', res, desc)
                add_desc('message', res, desc)
                desc['retention'] = {'count': 'infinity', 'duration': 'infinity'}
                info, val = create_resource(
                    auth,
                    'dispatch',
                    desc,
                    alias,
                    msg=', method: {0}, recipient: {1}'.format(desc['method'], desc['recipient']))

            # check dispatch-specific things
            def check_desc(key, res, desc):
                '''check a specific key and return whether an update is required'''
                if key in res and desc[key] != res[key]:
                    if create:
                        desc[key] = res[key]
                        return True
                    else:
                        sys.stdout.write(
                            'spec expects {0} for {1} to be {2} but it is {3}\n'.format(
                            key, alias, res[key], desc[key]))
                return False

            desc = info['description']
            need_update = False
            need_update = check_desc('method', res, desc) or need_update
            need_update = check_desc('recipient', res, desc) or need_update
            need_update = check_desc('subject', res, desc) or need_update
            need_update = check_desc('message', res, desc) or need_update
            if need_update:
                rpc.update(auth, {'alias': alias}, desc)
                sys.stdout.write('updated {0} to {1}\n'.format(alias, json.dumps(desc, sort_keys=True)))

            check_or_create_common(auth, res, info, alias, aliases)


        input_cik = options['cik']
        rpc = options['rpc']
        asrid = args['--asrid']

        if cmd == 'spec':

            if args['--generate'] is not None:
                spec_file = args['--generate']
                if args['--scripts'] is not None:
                    script_dir = args['--scripts']
                else:
                    script_dir = 'scripts'
                print('Generating spec for {0}.'.format(input_cik))
                print('spec file: {0}, scripts directory: {1}'.format(spec_file, script_dir))

                # generate spec file, download scripts
                spec = {}
                info, listing = rpc._exomult(input_cik,
                    [['info', {'alias': ''}, {'basic': True,
                                              'description': True,
                                              'aliases': True}],
                     ['listing', ['dataport', 'datarule', 'dispatch'], {}, {'alias': ''}]])
                rids = listing['dataport'] + listing['datarule'] + listing['dispatch']

                if len(rids) > 0:
                    child_info = rpc._exomult(input_cik, [['info', rid, {'basic': True, 'description': True}] for rid in rids])
                    for idx, rid in enumerate(rids):
                        myinfo = child_info[idx]
                        name = myinfo['description']['name']
                        def skip_msg(msg):
                            print('Skipping {0} (name: {1}). {2}'.format(rid, name, msg))
                        if rid not in info['aliases']:
                            skip_msg('It needs an alias.')
                            continue

                        # adds properties common to dataports and dispatches:
                        # preprocess, subscribe, retention, meta, public
                        def add_common_things(res):
                            res['name'] = myinfo['description']['name']
                            res['alias'] = info['aliases'][rid][0]
                            preprocess = myinfo['description']['preprocess']
                            if preprocess is not None and len(preprocess) > 0:
                                def toAlias(pair):
                                    if not asrid and pair[1] in info['aliases']:
                                        return [pair[0], info['aliases'][pair[1]][0]]
                                    else:
                                        return pair
                                res['preprocess'] = [toAlias(x) for x in preprocess]


                            subscribe = myinfo['description']['subscribe']
                            if subscribe is not None and subscribe is not "":
                                if not asrid and subscribe in info['aliases']:
                                    res['subscribe'] = info['aliases'][subscribe][0]
                                else:
                                    res['subscribe'] = subscribe

                            retention = myinfo['description']['retention']
                            if retention is not None:
                                count = retention['count']
                                duration = retention['duration']
                                if count is not None and duration is not None:
                                    if count == 'infinity':
                                        del retention['count']
                                    if duration == 'infinity':
                                        del retention['duration']
                                    if len(retention) > 0:
                                        res['retention'] = retention

                            meta_string = myinfo['description']['meta']
                            try:
                                meta = json.loads(meta_string)
                                unit = meta['datasource']['unit']
                                if len(unit) > 0:
                                    res['unit'] = unit
                                desc = meta['datasource']['description']
                                if len(desc) > 0:
                                    res['description'] = desc
                            except:
                                # assume unit is not present in metadata
                                pass

                            public = myinfo['description']['public']
                            if public is not None and public:
                                res['public'] = public


                        typ = myinfo['basic']['type']
                        if typ == 'dataport':
                            res = {
                                'format': myinfo['description']['format']
                            }
                            add_common_things(res)
                            spec.setdefault('dataports', []).append(res)

                        elif typ == 'datarule':
                            desc = myinfo['description']
                            is_script = desc['format'] == 'string' and 'rule' in desc and 'script' in desc['rule']
                            if is_script:
                                filename = os.path.join(script_dir, info['aliases'][rid][0])
                                spec.setdefault('scripts', []).append({'file': filename})
                                with open(filename, 'w') as f:
                                    print('Writing {0}...'.format(filename))
                                    f.write(desc['rule']['script'])
                            else:
                                res = {
                                    'rule': desc['rule']
                                }
                                add_common_things(res)
                                spec.setdefault('datarules', []).append(res)

                        elif typ == 'dispatch':
                            desc = myinfo['description']
                            res = {
                                'method': desc['method'],
                                'message': desc['message'],
                                'recipient': desc['recipient'],
                                'subject': desc['subject']
                            }
                            add_common_things(res)
                            spec.setdefault('dispatches', []).append(res)

                with open(spec_file, 'w') as f:
                    print('Writing {0}...'.format(spec_file))
                    yaml.safe_dump(spec, f, encoding='utf-8', indent=4, default_flow_style=False, allow_unicode=True)
                return

            updatescripts = args['--update-scripts']
            create = args['--create']

            def query_yes_no(question, default="yes"):
                """Ask a yes/no question via raw_input() and return their answer.

                "question" is a string that is presented to the user.
                "default" is the presumed answer if the user just hits <Enter>.
                    It must be "yes" (the default), "no" or None (meaning
                    an answer is required of the user).

                The "answer" return value is one of "yes" or "no".
                """
                valid = {"yes":True,   "y":True,  "ye":True,
                         "no":False,     "n":False}
                if default == None:
                    prompt = " [y/n] "
                elif default == "yes":
                    prompt = " [Y/n] "
                elif default == "no":
                    prompt = " [y/N] "
                else:
                    raise ValueError("invalid default answer: '%s'" % default)

                while True:
                    sys.stdout.write(question + prompt)
                    choice = raw_input().lower()
                    if default is not None and choice == '':
                        return valid[default]
                    elif choice in valid:
                        return valid[choice]
                    else:
                        sys.stdout.write("Please respond with 'yes' or 'no' "\
                                         "(or 'y' or 'n').\n")

            def generate_aliases_and_data(res, args):
                ids = args['--ids']
                if 'alias' in res:
                    alias = res['alias']
                else:
                    if 'file' in res:
                        alias = os.path.basename(res['file'])
                    else:
                        raise ExoException('Resources in spec must have an alias. (For scripts, "file" will substitute.)')

                if reid.search(alias) is None:
                    yield alias, None
                else:
                    alias_template = alias
                    if ids is None:
                        raise ExoException('This spec requires --ids')
                    ids = ids.split(',')
                    for id, alias in [(id, reid.sub(id, alias_template)) for id in ids]:
                        yield alias, {'id': id}

            spec, base_url = load_spec(args)
            check_spec(spec, args)

            device_auths = []
            portal_ciks = []

            iterate_portals = False

            def auth_string(auth):
                if isinstance(auth, dict):
                    return json.dumps(auth)
                else:
                    return auth

            if args['--portal'] == True:
                portal_ciks.append((input_cik,''))
                iterate_portals = True

            if args['--domain'] == True:
                #set iterate_portals flag to true so we can interate over each portal
                iterate_portals = True
                # Get list of users under a domain
                user_keys = []
                clients = rpc._listing_with_info(input_cik,['client'])

                email_regex = re.compile(r'[^@]+@[^@]+\.[^@]+')

                for k,v in clients['client'].items():
                    name = v['description']['name']
                    # if name is an email address
                    if email_regex.match(name):
                        user_keys.append(v['key'])


                # Get list of each portal
                for key in user_keys:
                    userlisting = rpc._listing_with_info(key,['client'])
                    for k,v in userlisting['client'].items():
                        portal_ciks.append((v['key'],v['description']['name']))
                    #print(x)


            if iterate_portals == True:
                for portal_cik, portal_name in portal_ciks:
                    # If user passed in the portal flag, but the spec doesn't have
                    # a vendor/model, exit
                    if (not 'device' in spec) or (not 'model' in spec['device']) or (not 'vendor' in spec['device']):
                        print("With --portal (or --domain) option, spec file requires a\r\n"
                              "device model and vendor field:\r\n"
                              "e.g.\r\n"
                              "device:\r\n"
                              "    model: modelName\r\n"
                              "    vendor: vendorName\r\n")
                        raise ExoException('--portal flag requires a device model/vendor in spec file')
                    else:

                        # get device vendor and model
                        modelName = spec['device']['model']
                        vendorName = spec['device']['vendor']

                        # If the portal has no name, use the cik as the name
                        if portal_name == '':
                            portal_name = portal_cik
                        print('Looking in ' + portal_name + ' for ' + modelName + '/' + vendorName)
                        # Get all clients in the portal
                        clients = rpc._listing_with_info(portal_cik, ['client'])
                        #print(modelName)
                        # for each client
                        for rid, v in iteritems(list(iteritems(clients))[0][1]):
                            # Get meta field
                            validJson = False
                            meta = None
                            try:
                                meta = json.loads(v['description']['meta'])
                                validJson = True
                            except ValueError as e:
                                # no json in this meat field
                                validJson = False
                            if validJson == True:
                                # get device type (only vendor types have a model and vendor
                                typ = meta['device']['type']

                                # if the device type is 'vendor'
                                if typ == 'vendor':
                                    # and it matches our vendor/model in the spec file
                                    if meta['device']['vendor'] == vendorName:
                                        if meta['device']['model'] == modelName:
                                            # Append an auth for this device to our list
                                            auth = {
                                                'cik': portal_cik, # v['key'],
                                                'client_id': rid
                                            }
                                            device_auths.append(auth)
                                            print('  found: {0} {1}'.format(v['description']['name'], auth_string(auth)))
            else:
                # only for single client
                device_auths.append(input_cik)

            # Make sure user knows they are about to update multiple devices
            # unless the `-f` flag is passed
            if ((args['--portal'] or args['--domain']) and args['--create']) and not args['-f']:
                res = query_yes_no("You are about to update " + str(len(device_auths)) + " devices, are you sure?")
                if res == False:
                    print('exiting')
                    return

            # for each device in our list of device_auths
            for auth in device_auths:
                try:
                    aliases = {}
                    print("Running spec on: {0}".format(auth_string(auth)))
                    #   apply spec [--create]

                    # Get map of aliases and description
                    info = rpc.info(auth, {'alias': ''}, {'aliases': True, 'description': True})
                    try:
                        for rid, alist in info['aliases'].items():
                            for alias in alist:
                                aliases[alias] = rid
                    except:
                        pass

                    # Check limits
                    check_or_create_description(auth, info, args)

                    for typ in TYPES:
                        for res in spec.get(plural(typ), []):
                            for alias, resource_data in generate_aliases_and_data(res, args):
                                # TODO: handle nonexistence
                                exists = True
                                try:
                                    info, val = infoval(auth, alias)
                                except rpc.RPCException as e:
                                    info = None
                                    val = None
                                    exists = False
                                    print('{0} not found.'.format(alias))
                                    if not create:
                                        print('Pass --create to create it')
                                        continue
                                except pyonep.exceptions.OnePlatformException as ex:
                                    exc = ast.literal_eval(ex.message)

                                    if exc['code'] == 401:
                                        raise Spec401Exception()
                                    else:
                                        raise ex

                                def template(script):
                                    if resource_data is None:
                                        return script
                                    else:
                                        return reid.sub(resource_data['id'], script)

                                if typ == 'client':
                                    if not exists:
                                        if create:
                                            print('Client creation is not yet supported')
                                        continue
                                elif typ == 'dataport':
                                    check_or_create_dataport(auth, res, info, val, alias, aliases)
                                elif typ == 'dispatch':
                                    check_or_create_dispatch(auth, res, info, alias, aliases)
                                elif typ == 'datarule':
                                    check_or_create_datarule(auth, res, info, val, alias, aliases)
                                elif typ == 'script':
                                    if 'file' not in res and 'code' not in res:
                                        raise ExoException('{0} is a script, so it needs a "file" or "code" key'.format(alias))
                                    if 'file' in res and 'code' in res:
                                        raise ExoException('{0} specifies both "file" and "code" keys, but they\'re mutually exclusive.')

                                    name = res['name'] if 'name' in res else alias

                                    if 'file' in res:
                                        content, _ = load_file(res['file'], base_url=base_url)
                                        if not six.PY3 or type(content) is bytes:
                                            content = content.decode('utf8')
                                    else:
                                        content = res['code']
                                    if not exists and create:
                                        rpc.upload_script_content([auth], content, name=alias, create=True, filterfn=template)
                                        continue

                                    script_spec = template(content)
                                    script_svr = info['description']['rule']['script']
                                    script_friendly = 'file {0}'.format(res['file']) if 'file' in res else '"code" value in spec'
                                    if script_svr != script_spec:
                                        print('Script for {0} does not match {1}.'.format(alias, script_friendly))
                                        if updatescripts:
                                            print('Uploading script to {0}...'.format(alias))
                                            rpc.upload_script_content([auth], script_spec, name=name, create=False, filterfn=template)
                                        elif not args['--no-diff']:
                                            # show diff
                                            import difflib
                                            differences = '\n'.join(
                                                difflib.unified_diff(
                                                    script_spec.splitlines(),
                                                    script_svr.splitlines(),
                                                    fromfile=script_friendly,
                                                    tofile='info["description"]["rule"]["script"]'))

                                            print(differences)
                                else:
                                    raise ExoException('Found unsupported type {0} in spec.'.format(typ))
                except Spec401Exception as ex:
                    print("******WARNING******* 401 received in spec, is the device expired?")
                    pass
Ejemplo n.º 47
0
        formatter = Terminal256Formatter
    else:
        formatter = NullFormatter

if sys.version_info < (3, 0):
    sys.stdout.write("Sorry, Python 3 og higher required\n")
    sys.exit(1)

try: # ... to read json
    i = args.infile.read()
    d = json.loads( i )
    if args.alwaysjson:
        if pygments:
            i = highlight( out, JsonLexer(), formatter() )
        print( i )
    else:
        out = yaml.safe_dump(d, indent=args.indent, allow_unicode=True )
        if pygments:
            out = highlight( out, YamlLexer(), formatter() )
        print( out )
except:
    try: # ... to read yaml
        d = yaml.load( i )
        out = json.dumps(d, indent=args.indent)
        if pygments:
            out = highlight(out, JsonLexer(), formatter() )
        print(out)
    except:
        print("input error: invalid json or yaml format")

Ejemplo n.º 48
0
Archivo: main.py Proyecto: bmeg/cwltool
def main(argsl=None,  # type: List[str]
         args=None,  # type: argparse.Namespace
         executor=single_job_executor,  # type: Callable[..., Tuple[Dict[Text, Any], Text]]
         makeTool=workflow.defaultMakeTool,  # type: Callable[..., Process]
         selectResources=None,  # type: Callable[[Dict[Text, int]], Dict[Text, int]]
         stdin=sys.stdin,  # type: IO[Any]
         stdout=sys.stdout,  # type: IO[Any]
         stderr=sys.stderr,  # type: IO[Any]
         versionfunc=versionstring,  # type: Callable[[], Text]
         job_order_object=None,  # type: MutableMapping[Text, Any]
         make_fs_access=StdFsAccess,  # type: Callable[[Text], StdFsAccess]
         fetcher_constructor=None,  # type: FetcherConstructorType
         resolver=tool_resolver,
         logger_handler=None,
         custom_schema_callback=None  # type: Callable[[], None]
         ):
    # type: (...) -> int

    _logger.removeHandler(defaultStreamHandler)
    if logger_handler:
        stderr_handler = logger_handler
    else:
        stderr_handler = logging.StreamHandler(stderr)
    _logger.addHandler(stderr_handler)
    try:
        if args is None:
            if argsl is None:
                argsl = sys.argv[1:]
            args = arg_parser().parse_args(argsl)

        # If On windows platform, A default Docker Container is Used if not explicitely provided by user
        if onWindows() and not args.default_container:
            # This docker image is a minimal alpine image with bash installed(size 6 mb). source: https://github.com/frol/docker-alpine-bash
            args.default_container = windows_default_container_id

        # If caller provided custom arguments, it may be not every expected
        # option is set, so fill in no-op defaults to avoid crashing when
        # dereferencing them in args.
        for k, v in six.iteritems({'print_deps': False,
                     'print_pre': False,
                     'print_rdf': False,
                     'print_dot': False,
                     'relative_deps': False,
                     'tmp_outdir_prefix': 'tmp',
                     'tmpdir_prefix': 'tmp',
                     'print_input_deps': False,
                     'cachedir': None,
                     'quiet': False,
                     'debug': False,
                     'js_console': False,
                     'version': False,
                     'enable_dev': False,
                     'enable_ext': False,
                     'strict': True,
                     'skip_schemas': False,
                     'rdf_serializer': None,
                     'basedir': None,
                     'tool_help': False,
                     'workflow': None,
                     'job_order': None,
                     'pack': False,
                     'on_error': 'continue',
                     'relax_path_checks': False,
                     'validate': False,
                     'enable_ga4gh_tool_registry': False,
                     'ga4gh_tool_registries': [],
                     'find_default_container': None,
                                   'make_template': False,
                                   'overrides': None
        }):
            if not hasattr(args, k):
                setattr(args, k, v)

        if args.quiet:
            _logger.setLevel(logging.WARN)
        if args.debug:
            _logger.setLevel(logging.DEBUG)

        if args.version:
            print(versionfunc())
            return 0
        else:
            _logger.info(versionfunc())

        if args.print_supported_versions:
            print("\n".join(supportedCWLversions(args.enable_dev)))
            return 0

        if not args.workflow:
            if os.path.isfile("CWLFile"):
                setattr(args, "workflow", "CWLFile")
            else:
                _logger.error("")
                _logger.error("CWL document required, no input file was provided")
                arg_parser().print_help()
                return 1
        if args.relax_path_checks:
            draft2tool.ACCEPTLIST_RE = draft2tool.ACCEPTLIST_EN_RELAXED_RE

        if args.ga4gh_tool_registries:
            ga4gh_tool_registries[:] = args.ga4gh_tool_registries
        if not args.enable_ga4gh_tool_registry:
            del ga4gh_tool_registries[:]

        if custom_schema_callback:
            custom_schema_callback()
        elif args.enable_ext:
            res = pkg_resources.resource_stream(__name__, 'extensions.yml')
            use_custom_schema("v1.0", "http://commonwl.org/cwltool", res.read())
            res.close()
        else:
            use_standard_schema("v1.0")

        uri, tool_file_uri = resolve_tool_uri(args.workflow,
                                              resolver=resolver,
                                              fetcher_constructor=fetcher_constructor)

        overrides = []  # type: List[Dict[Text, Any]]

        try:
            job_order_object, input_basedir, jobloader = load_job_order(args,
                                                                        stdin,
                                                                        fetcher_constructor,
                                                                        overrides,
                                                                        tool_file_uri)
        except Exception as e:
            _logger.error(Text(e), exc_info=args.debug)

        if args.overrides:
            overrides.extend(load_overrides(file_uri(os.path.abspath(args.overrides)), tool_file_uri))

        try:
            document_loader, workflowobj, uri = fetch_document(uri, resolver=resolver,
                                                               fetcher_constructor=fetcher_constructor)

            if args.print_deps:
                printdeps(workflowobj, document_loader, stdout, args.relative_deps, uri)
                return 0

            document_loader, avsc_names, processobj, metadata, uri \
                = validate_document(document_loader, workflowobj, uri,
                                    enable_dev=args.enable_dev, strict=args.strict,
                                    preprocess_only=args.print_pre or args.pack,
                                    fetcher_constructor=fetcher_constructor,
                                    skip_schemas=args.skip_schemas,
                                    overrides=overrides)

            if args.print_pre:
                stdout.write(json.dumps(processobj, indent=4))
                return 0

            overrides.extend(metadata.get("cwltool:overrides", []))

            conf_file = getattr(args, "beta_dependency_resolvers_configuration", None)  # Text
            use_conda_dependencies = getattr(args, "beta_conda_dependencies", None)  # Text

            make_tool_kwds = vars(args)

            job_script_provider = None  # type: Callable[[Any, List[str]], Text]
            if conf_file or use_conda_dependencies:
                dependencies_configuration = DependenciesConfiguration(args)  # type: DependenciesConfiguration
                make_tool_kwds["job_script_provider"] = dependencies_configuration

            make_tool_kwds["find_default_container"] = functools.partial(find_default_container, args)
            make_tool_kwds["overrides"] = overrides

            tool = make_tool(document_loader, avsc_names, metadata, uri,
                             makeTool, make_tool_kwds)
            if args.make_template:
                yaml.safe_dump(generate_input_template(tool), sys.stdout,
                               default_flow_style=False, indent=4,
                               block_seq_indent=2)
                return 0

            if args.validate:
                _logger.info("Tool definition is valid")
                return 0

            if args.pack:
                stdout.write(print_pack(document_loader, processobj, uri, metadata))
                return 0

            if args.print_rdf:
                stdout.write(printrdf(tool, document_loader.ctx, args.rdf_serializer))
                return 0

            if args.print_dot:
                printdot(tool, document_loader.ctx, stdout)
                return 0

        except (validate.ValidationException) as exc:
            _logger.error(u"Tool definition failed validation:\n%s", exc,
                          exc_info=args.debug)
            return 1
        except (RuntimeError, WorkflowException) as exc:
            _logger.error(u"Tool definition failed initialization:\n%s", exc,
                          exc_info=args.debug)
            return 1
        except Exception as exc:
            _logger.error(
                u"I'm sorry, I couldn't load this CWL file%s",
                ", try again with --debug for more information.\nThe error was: "
                "%s" % exc if not args.debug else ".  The error was:",
                exc_info=args.debug)
            return 1

        if isinstance(tool, int):
            return tool

        for dirprefix in ("tmpdir_prefix", "tmp_outdir_prefix", "cachedir"):
            if getattr(args, dirprefix) and getattr(args, dirprefix) != 'tmp':
                sl = "/" if getattr(args, dirprefix).endswith("/") or dirprefix == "cachedir" else ""
                setattr(args, dirprefix,
                        os.path.abspath(getattr(args, dirprefix)) + sl)
                if not os.path.exists(os.path.dirname(getattr(args, dirprefix))):
                    try:
                        os.makedirs(os.path.dirname(getattr(args, dirprefix)))
                    except Exception as e:
                        _logger.error("Failed to create directory: %s", e)
                        return 1

        if args.cachedir:
            if args.move_outputs == "move":
                setattr(args, 'move_outputs', "copy")
            setattr(args, "tmp_outdir_prefix", args.cachedir)

        try:
            job_order_object = init_job_order(job_order_object, args, tool,
                                              print_input_deps=args.print_input_deps,
                                              relative_deps=args.relative_deps,
                                              stdout=stdout,
                                              make_fs_access=make_fs_access,
                                              loader=jobloader,
                                              input_basedir=input_basedir)
        except SystemExit as e:
            return e.code

        if isinstance(job_order_object, int):
            return job_order_object

        try:
            setattr(args, 'basedir', input_basedir)
            del args.workflow
            del args.job_order
            (out, status) = executor(tool, job_order_object,
                                     makeTool=makeTool,
                                     select_resources=selectResources,
                                     make_fs_access=make_fs_access,
                                     **vars(args))

            # This is the workflow output, it needs to be written
            if out is not None:

                def locToPath(p):
                    for field in ("path", "nameext", "nameroot", "dirname"):
                        if field in p:
                            del p[field]
                    if p["location"].startswith("file://"):
                        p["path"] = uri_file_path(p["location"])

                visit_class(out, ("File", "Directory"), locToPath)

                # Unsetting the Generation fron final output object
                visit_class(out,("File",), MutationManager().unset_generation)

                if isinstance(out, six.string_types):
                    stdout.write(out)
                else:
                    stdout.write(json.dumps(out, indent=4))
                stdout.write("\n")
                stdout.flush()

            if status != "success":
                _logger.warning(u"Final process status is %s", status)
                return 1
            else:
                _logger.info(u"Final process status is %s", status)
                return 0

        except (validate.ValidationException) as exc:
            _logger.error(u"Input object failed validation:\n%s", exc,
                          exc_info=args.debug)
            return 1
        except UnsupportedRequirement as exc:
            _logger.error(
                u"Workflow or tool uses unsupported feature:\n%s", exc,
                exc_info=args.debug)
            return 33
        except WorkflowException as exc:
            _logger.error(
                u"Workflow error, try again with --debug for more "
                "information:\n%s", strip_dup_lineno(six.text_type(exc)), exc_info=args.debug)
            return 1
        except Exception as exc:
            _logger.error(
                u"Unhandled error, try again with --debug for more information:\n"
                "  %s", exc, exc_info=args.debug)
            return 1

    finally:
        _logger.removeHandler(stderr_handler)
        _logger.addHandler(defaultStreamHandler)