def test_module_save_and_load_roundtrip(self, basic_object, pickle_only, compress_save_file): old_obj = basic_object(from_config=True) with tempfile.TemporaryDirectory() as root_path: path = os.path.join(root_path, 'savefile.flambe') save(old_obj, path, compress_save_file, pickle_only) if pickle_only: path += '.pkl' if compress_save_file: path += '.tar.gz' new_obj = load(path) old_state = old_obj.get_state() new_state = new_obj.get_state() check_mapping_equivalence(new_state, old_state) check_mapping_equivalence(old_state._metadata, new_state._metadata, exclude_config=False)
def test_module_save_requirements_file(self, mock_freeze, compress_save_file, basic_object): mock_freeze.return_value = ['pkgA==1.2.3', 'pkgB'] old_obj = basic_object(from_config=True) with tempfile.TemporaryDirectory() as root_path: path = os.path.join(root_path, 'savefile.flambe') save(old_obj, path, compress=compress_save_file, pickle_only=False) if compress_save_file: with tarfile.open(f"{path}.tar.gz", 'r:gz') as tar_gz: tar_gz.extractall(path=root_path) mock_freeze.assert_called_once() assert os.path.exists(os.path.join(path, 'requirements.txt')) with open(os.path.join(path, 'requirements.txt'), 'r') as f: assert f.read() == 'pkgA==1.2.3\npkgB'
def save_local(self, force) -> None: """Save an object locally. Parameters ---------- force: bool Wheter to use a non-empty folder or not """ if (os.path.exists(self.destination) and os.listdir(self.destination) and not force): raise error.ParsingRunnableError( f"Destination {self.destination} folder is not empty. " + "Use --force to force the usage of this folder or " + "pick another destination.") flambe.save(self.compiled_component, self.destination, **self.serialization_args)
def save_s3(self, force) -> None: """Save an object to s3 using awscli Parameters ---------- force: bool Wheter to use a non-empty bucket folder or not """ url = urlparse(self.destination) if url.scheme != 's3' or url.netloc == '': raise error.ParsingRunnableError( "When uploading to s3, destination should be: " + "s3://<bucket-name>[/path/to/dir]") bucket_name = url.netloc s3 = self.get_boto_session().resource('s3') bucket = s3.Bucket(bucket_name) for content in bucket.objects.all(): path = url.path[1:] # Remove first '/' if content.key.startswith(path) and not force: raise error.ParsingRunnableError( f"Destination {self.destination} is not empty. " + "Use --force to force the usage of this bucket folder or " + "pick another destination.") with tempfile.TemporaryDirectory() as tmpdirname: flambe.save(self.compiled_component, tmpdirname, **self.serialization_args) try: subprocess.check_output( f"aws s3 cp --recursive {tmpdirname} {self.destination}". split(), stderr=subprocess.STDOUT, universal_newlines=True) except subprocess.CalledProcessError as exc: logger.debug(exc.output) raise ValueError(f"Error uploading artifacts to s3. " + "Check logs for more information") else: logger.info(cl.BL(f"Done uploading to {self.destination}"))
def test_module_save_and_load_single_instance_appears_twice(self, make_classes_2): txt = """ !C one: !A akw2: &theb !B bkw2: test bkw1: 1 akw1: 8 two: !A akw1: 8 # Comment Here akw2: *theb """ c = yaml.load(txt)() c.one.akw2.bkw1 = 6 assert c.one.akw2 is c.two.akw2 assert c.one.akw2.bkw1 == c.two.akw2.bkw1 with tempfile.TemporaryDirectory() as path: save(c, path) state = load_state_from_file(path) loaded_c = load(path) assert loaded_c.one.akw2 is loaded_c.two.akw2 assert loaded_c.one.akw2.bkw1 == loaded_c.two.akw2.bkw1