def dump(self): fname = self.path self._check_dvc_filename(fname) logger.debug( "Saving information to '{file}'.".format(file=relpath(fname))) state = self.dumpd() # When we load a stage we parse yaml with a fast parser, which strips # off all the comments and formatting. To retain those on update we do # a trick here: # - reparse the same yaml text with a slow but smart ruamel yaml parser # - apply changes to a returned structure # - serialize it if self._stage_text is not None: saved_state = parse_stage_for_update(self._stage_text, fname) # Stage doesn't work with meta in any way, so .dumpd() doesn't # have it. We simply copy it over. if "meta" in saved_state: state["meta"] = saved_state["meta"] apply_diff(state, saved_state) state = saved_state dump_stage_file(fname, state) self.repo.scm.track_file(relpath(fname))
def dump(self, stage, **kwargs): stage_data = serialize.to_lockfile(stage) if not self.exists(): data = stage_data open(self.path, "w+").close() else: with self.repo.tree.open(self.path, "r") as fd: data = parse_stage_for_update(fd.read(), self.path) data.update(stage_data) dump_stage_file(self.path, data) self.repo.scm.track_file(relpath(self.path))
def to_single_stage_file(stage: "Stage"): state = stage.dumpd() # When we load a stage we parse yaml with a fast parser, which strips # off all the comments and formatting. To retain those on update we do # a trick here: # - reparse the same yaml text with a slow but smart ruamel yaml parser # - apply changes to a returned structure # - serialize it if stage._stage_text is not None: saved_state = parse_stage_for_update(stage._stage_text, stage.path) # Stage doesn't work with meta in any way, so .dumpd() doesn't # have it. We simply copy it over. if "meta" in saved_state: state["meta"] = saved_state["meta"] apply_diff(state, saved_state) state = saved_state return state
def _dump_pipeline_file(self, stage): data = {} if self.exists(): with open(self.path) as fd: data = parse_stage_for_update(fd.read(), self.path) else: open(self.path, "w+").close() data["stages"] = data.get("stages", {}) stage_data = serialize.to_pipeline_file(stage) if data["stages"].get(stage.name): orig_stage_data = data["stages"][stage.name] apply_diff(stage_data[stage.name], orig_stage_data) else: data["stages"].update(stage_data) dump_stage_file(self.path, data) self.repo.scm.track_file(relpath(self.path))
def dump(self, stage, **kwargs): stage_data = serialize.to_lockfile(stage) if not self.exists(): modified = True logger.info("Generating lock file '%s'", self.relpath) data = stage_data open(self.path, "w+").close() else: with self.repo.tree.open(self.path, "r") as fd: data = parse_stage_for_update(fd.read(), self.path) modified = data.get(stage.name, {}) != stage_data.get( stage.name, {}) if modified: logger.info("Updating lock file '%s'", self.relpath) data.update(stage_data) dump_stage_file(self.path, data) if modified: self.repo.scm.track_file(self.relpath)
def read_lock_file(file=PIPELINE_LOCK): with open(file) as f: data = parse_stage_for_update(f.read(), file) assert isinstance(data, OrderedDict) return data