コード例 #1
0
 def yaml(self):
     """Return standard out as YAML."""
     if self._stdout_bytes:
         if not self._yaml:
             self._yaml = yaml.load(self.output)
         return self._yaml
     else:
         return None
コード例 #2
0
ファイル: test_site_catalog.py プロジェクト: jie-jay/pegasus
def test_dumps(sc1):
    expected = json.loads(json.dumps(sc1, cls=_CustomEncoder))

    result = yaml.load(dumps(sc1))
    # file info not needed for test
    del result["x-pegasus"]

    assert result == expected
コード例 #3
0
def loads(s: str, *args, **kwargs) -> ReplicaCatalog:
    """
    Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a ReplicaCatalog document) to a :py:class:`~Pegasus.api.replica_catalog.ReplicaCatalog` object.

    :param s: string to load from
    :type s: str
    :return: deserialized ReplicaCatalog object
    :rtype: ReplicaCatalog
    """
    return _to_rc(yaml.load(s))
コード例 #4
0
def load(fp: TextIO, *args, **kwargs) -> ReplicaCatalog:
    """
    Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a ReplicaCatalog document) to a :py:class:`~Pegasus.api.replica_catalog.ReplicaCatalog` object.

    :param fp: file like object to load from
    :type fp: TextIO
    :return: deserialized ReplicaCatalog object
    :rtype: ReplicaCatalog
    """
    return _to_rc(yaml.load(fp))
コード例 #5
0
def loads(s: str, *args, **kwargs) -> Workflow:
    """
    Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a Workflow document) to a :py:class:`~Pegasus.api.workflow.Workflow` object.

    :param s: string to load from
    :type s: str
    :return: deserialized Workflow object
    :rtype: Workflow
    """
    return _to_wf(yaml.load(s))
コード例 #6
0
def load(fp: TextIO, *args, **kwargs) -> Workflow:
    """
    Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a Workflow document) to a :py:class:`~Pegasus.api.workflow.Workflow` object.

    :param fp: file like object to load from
    :type fp: TextIO
    :return: deserialized Workflow object
    :rtype: Workflow
    """
    return _to_wf(yaml.load(fp))
コード例 #7
0
def loads(s: str, *args, **kwargs) -> TransformationCatalog:
    """
    Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a TransformationCatalog document) to a :py:class:`~Pegasus.api.transformation_catalog.TransformationCatalog` object.

    :param s: string to load from
    :type s: str
    :return: deserialized TransformationCatalog object
    :rtype: TransformationCatalog
    """
    return _to_tc(yaml.load(s))
コード例 #8
0
def load(fp: TextIO, *args, **kwargs) -> TransformationCatalog:
    """
    Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a TransformationCatalog document) to a :py:class:`~Pegasus.api.transformation_catalog.TransformationCatalog` object.

    :param fp: file like object to load from
    :type fp: TextIO
    :return: deserialized TransformationCatalog object
    :rtype: TransformationCatalog
    """
    return _to_tc(yaml.load(fp))
コード例 #9
0
def load_wf_inputs(input_spec_file_path: str) -> dict:
    try:
        with open(input_spec_file_path) as f:
            wf_inputs = yaml.load(f)

        log.info(
            "Loaded workflow inputs file: {}".format(input_spec_file_path))
    except FileNotFoundError:
        log.exception("Unable to find {}".format(input_spec_file_path))
        sys.exit(1)

    return wf_inputs
コード例 #10
0
def test_dumps(rc_as_dict):
    rc = ReplicaCatalog()
    rc.add_replica("local", "a", "/a", regex=True)
    rc.add_replica("local", "b", "/b")
    rc.add_replica("local", "c", "/c", checksum_type="sha256", checksum_value="abc123")

    rc_as_dict["replicas"] = sorted(rc_as_dict["replicas"], key=lambda r: r["lfn"])

    result = yaml.load(dumps(rc))
    result["replicas"] = sorted(result["replicas"], key=lambda r: r["lfn"])

    assert result["replicas"] == rc_as_dict["replicas"]
コード例 #11
0
def test_dumps(rc):
    result = _to_rc(yaml.load(dumps(rc)))

    assert len(result.entries) == 2
    assert result.entries[("a", False)].lfn == "a"
    assert result.entries[("a", False)].pfns == {
        _PFN("local", "/a"),
        _PFN("condorpool", "/a"),
    }
    assert result.entries[("a", False)].metadata == {"key": "value"}
    assert result.entries[("a", False)].checksum == {"sha256": "abc123"}

    assert result.entries[("b*", True)].lfn == "b*"
    assert result.entries[("b*", True)].pfns == {_PFN("local", "/b")}
    assert result.entries[("b*", True)].metadata == {"key": "value"}
コード例 #12
0
def loads(s: str, *args, **kwargs) -> Braindump:
    """
    Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a Braindump document) to a Python object.

    [extended_summary]

    :param s: [description]
    :type s: str
    :return: [description]
    :rtype: Dict
    """
    _dict = yaml.load(s, *args, **kwargs)

    if not isinstance(_dict, dict):
        raise ValueError("Invalid braindump file.")

    return Braindump(**_dict)
コード例 #13
0
def load_tr_specs(tr_specs_file_path: str) -> dict:
    log.info("Validating {}".format(tr_specs_file_path))
    schema = {
        "type": "object",
        "patternProperties": {
            ".+": {
                "type": "object",
                "properties": {
                    "site": {
                        "type": "string"
                    },
                    "is_stageable": {
                        "type": "boolean"
                    },
                },
                "required": ["site", "is_stageable"],
                "additionalPropertes": False,
            }
        },
    }

    try:
        with open(tr_specs_file_path) as f:
            specs = yaml.load(f)

        validate(instance=specs, schema=schema)
    except ValidationError:
        log.exception(
            "Invalid transformation spec file. File should be in the following format:\n"
            "\t\t\t<tr name1>:\n"
            "\t\t\t    site: <site name>\n"
            "\t\t\t    is_stageable: <boolean>\n"
            "\t\t\t<tr name2>:\n"
            "\t\t\t    site: <site name>\n"
            "\t\t\t    is_stageable: <boolean>\n"
            "\t\t\t...\n")
        sys.exit(1)
    except FileNotFoundError:
        log.exception("Unable to find transformation spec file: {}".format(
            tr_specs_file_path))
        sys.exit(1)

    log.info("Successfully loaded {}".format(tr_specs_file_path))

    return specs
コード例 #14
0
ファイル: test_workflow.py プロジェクト: ryantanaka/pegasus
def test_dumps(wf1):
    assert yaml.load(dumps(wf1)) == json.loads(
        json.dumps(wf1, cls=_CustomEncoder))
コード例 #15
0
ファイル: pegasus-graphviz.py プロジェクト: jie-jay/pegasus
def parse_yamlfile(fname, include_files):
    """
    Parse a DAG from a YAML workflow file.
    """
    with open(fname) as f:
        wf = yaml.load(f)

    dag = DAG()

    for job in wf["jobs"]:
        # parse job
        j = Job()

        # compute job
        if job["type"] == "job":
            j.xform = job["name"]
        # subworkflow job
        else:
            j.xform = job["file"]

        j.id = j.label = job["id"]
        dag.nodes[j.id] = j

        # parse uses (files)
        if include_files:
            for use in job["uses"]:
                if use["lfn"] in dag.nodes:
                    f = dag.nodes[use["lfn"]]
                else:
                    f = File()
                    f.id = f.label = use["lfn"]
                    dag.nodes[f.id] = f

                link_type = use["type"]

                if link_type == "input":
                    j.parents.append(f)
                    f.children.append(j)
                elif link_type == "output":
                    j.children.append(f)
                    f.parents.append(j)
                elif link_type == "inout":
                    print(
                        "WARNING: inout file {} of {} creates a cycle.".format(
                            f.id, j.id
                        )
                    )
                    f.children.append(j)
                    f.parents.append(j)
                    j.parents.append(f)
                    j.children.append(f)
                elif link_type == "none":
                    pass
                else:
                    raise Exception("Unrecognized link value: {}".format(link_type))

    for dep in wf["jobDependencies"]:
        for child in dep["children"]:
            dag.nodes[dep["id"]].children.append(dag.nodes[child])
            dag.nodes[child].parents.append(dag.nodes[dep["id"]])

    return dag