Exemplo n.º 1
0
    def _dump_pipeline_file(self, stage):
        data = {}
        if self.exists():
            with open(self.path) as fd:
                data = parse_yaml_for_update(fd.read(), self.path)
        else:
            logger.info("Creating '%s'", self.relpath)
            open(self.path, "w+").close()

        data["stages"] = data.get("stages", {})
        stage_data = serialize.to_pipeline_file(stage)
        existing_entry = stage.name in data["stages"]

        action = "Modifying" if existing_entry else "Adding"
        logger.info("%s stage '%s' in '%s'", action, stage.name, self.relpath)

        if existing_entry:
            orig_stage_data = data["stages"][stage.name]
            if "meta" in orig_stage_data:
                stage_data[stage.name]["meta"] = orig_stage_data["meta"]
            apply_diff(stage_data[stage.name], orig_stage_data)
        else:
            data["stages"].update(stage_data)

        dump_yaml(self.path, data)
        self.repo.scm.track_file(self.relpath)
Exemplo n.º 2
0
def test_run_overwrite_order(tmp_dir, dvc, run_copy):
    from dvc.dvcfile import PIPELINE_FILE

    tmp_dir.gen({"foo": "foo", "foo1": "foo1"})
    run_copy("foo", "bar", name="copy-foo-bar")
    run_copy("bar", "foobar", name="copy-bar-foobar")

    run_copy("foo1", "bar1", name="copy-foo-bar", force=True)

    data = parse_yaml_for_update((tmp_dir / PIPELINE_FILE).read_text(),
                                 PIPELINE_FILE)
    assert list(data["stages"].keys()) == ["copy-foo-bar", "copy-bar-foobar"]
Exemplo n.º 3
0
    def remove_stage(self, stage):
        if not self.exists():
            return

        with open(self.path) as f:
            d = parse_yaml_for_update(f.read(), self.path)
        self.validate(d, self.path)

        if stage.name not in d:
            return

        logger.debug("Removing '%s' from '%s'", stage.name, self.path)
        del d[stage.name]

        dump_yaml(self.path, d)
Exemplo n.º 4
0
    def remove_stage(self, stage):
        self._lockfile.remove_stage(stage)
        if not self.exists():
            return

        with open(self.path, "r") as f:
            d = parse_yaml_for_update(f.read(), self.path)

        self.validate(d, self.path)
        if stage.name not in d.get("stages", {}):
            return

        logger.debug("Removing '%s' from '%s'", stage.name, self.path)
        del d["stages"][stage.name]
        dump_yaml(self.path, d)
Exemplo n.º 5
0
 def dump(self, stage, **kwargs):
     stage_data = serialize.to_lockfile(stage)
     if not self.exists():
         modified = True
         logger.info("Generating lock file '%s'", self.relpath)
         data = stage_data
         open(self.path, "w+").close()
     else:
         with self.repo.tree.open(self.path, "r") as fd:
             data = parse_yaml_for_update(fd.read(), self.path)
         modified = data.get(stage.name, {}) != stage_data.get(
             stage.name, {})
         if modified:
             logger.info("Updating lock file '%s'", self.relpath)
         data.update(stage_data)
     dump_yaml(self.path, data)
     if modified:
         self.repo.scm.track_file(self.relpath)
Exemplo n.º 6
0
def to_single_stage_file(stage: "Stage"):
    state = stage.dumpd()

    # When we load a stage we parse yaml with a fast parser, which strips
    # off all the comments and formatting. To retain those on update we do
    # a trick here:
    # - reparse the same yaml text with a slow but smart ruamel yaml parser
    # - apply changes to a returned structure
    # - serialize it
    if stage._stage_text is not None:
        saved_state = parse_yaml_for_update(stage._stage_text, stage.path)
        # Stage doesn't work with meta in any way, so .dumpd() doesn't
        # have it. We simply copy it over.
        if "meta" in saved_state:
            state["meta"] = saved_state["meta"]
        apply_diff(state, saved_state)
        state = saved_state
    return state
Exemplo n.º 7
0
    def _dump_pipeline_file(self, stage):
        data = {}
        if self.exists():
            with open(self.path) as fd:
                data = parse_yaml_for_update(fd.read(), self.path)
        else:
            logger.info("Creating '%s'", self.relpath)
            open(self.path, "w+").close()

        data["stages"] = data.get("stages", {})
        stage_data = serialize.to_pipeline_file(stage)
        if data["stages"].get(stage.name):
            orig_stage_data = data["stages"][stage.name]
            apply_diff(stage_data[stage.name], orig_stage_data)
        else:
            data["stages"].update(stage_data)

        logger.info(
            "Adding stage '%s' to '%s'",
            stage.name,
            self.relpath,
        )
        dump_yaml(self.path, data)
        self.repo.scm.track_file(self.relpath)
Exemplo n.º 8
0
def read_lock_file(file=PIPELINE_LOCK):
    with open(file) as f:
        data = parse_yaml_for_update(f.read(), file)
    assert isinstance(data, OrderedDict)
    return data