def test_writing_obj_with_mtl(self): local_file = os.path.join(self.tmp_dir, "test_writing_obj_with_mtl.obj") m = obj.load(sc(self.obj_with_texure)) obj.dump(m, local_file) self.assertTrue(s3.exists(os.path.splitext(local_file)[0] + '.mtl')) self.assertTrue(s3.exists(os.path.splitext(local_file)[0] + '.png'))
def test_s3_exists_return_false_if_with_unmatched_version_id(self): # test not exists with specified versionId unknown_version_id = '5elgojhtA8BGJerqfbciN78eU74SJ9mX' self.assertFalse( s3.exists(self.existing_versioned_remote_file, version_id=unknown_version_id))
def existing_remote_file(self): ''' In some tests it is convenient to have a file already on s3; in some others we need it not to be there (e.g. for clarity in the s3.ls test) ''' uri = self.remote_file("FOO/A_preexisting_file.md") if not s3.exists(uri): s3.cp(self.local_file, uri) return uri
def test_s3_tmpdir(self): def fake_uuid(): fake_uuid.counter += 1 return "FAKE-UUID-%d" % fake_uuid.counter fake_uuid.counter = 0 self.assertEqual( s3.path.gettmpdir(bucket=TEST_BUCKET, prefix=self.s3_path, uuid_generator=fake_uuid), 's3://%s/%sFAKE-UUID-1/' % (TEST_BUCKET, self.s3_path)) self.assertEqual( len(list(s3.ls('s3://%s/%sFAKE-UUID-1' % (TEST_BUCKET, self.s3_path)))), 1) self.assertTrue( s3.exists('s3://%s/%sFAKE-UUID-1/.tempdir' % (TEST_BUCKET, self.s3_path))) self.assertEqual( s3.path.gettmpdir(bucket=TEST_BUCKET, prefix=self.s3_path, uuid_generator=fake_uuid), 's3://%s/%sFAKE-UUID-2/' % (TEST_BUCKET, self.s3_path)) self.assertEqual(len(list(s3.ls('s3://%s/%sFAKE-UUID-2' % (TEST_BUCKET, self.s3_path)))), 1) self.assertTrue(s3.exists('s3://%s/%sFAKE-UUID-2/.tempdir' % (TEST_BUCKET, self.s3_path)))
def main(self, prefix): from baiji.util.console import LabeledSpinner if s3.path.islocal(prefix): raise ValueError("restore command only works on s3") spin = LabeledSpinner() for key in s3.ls(prefix, return_full_urls=True, require_s3_scheme=True, list_versions=True): if not s3.exists(key): spin.drop("Restoring deleted file {}".format(key)) s3.restore(key) else: spin.spin(key)
def existing_versioned_remote_file(self): # use a hardcoded path for test versioned file on S3 # to avoid bookkeeping # the current test won't make versioned copies of the file # the remote object will be either deleted (which will be overwritten later) # or download to local uri = 's3://{}/FOO/A_preexisting_file.md'.format(VERSIONED_TEST_BUCKET) if not s3.exists(uri): s3.cp(self.local_file, uri) return uri
def existing_versioned_remote_file(self): # use a hardcoded path for test versioned file on S3 # to avoid bookkeeping # the current test won't make versioned copies of the file # the remote object will be either deleted (which will be overwritten later) # or download to local uri = 's3://baiji-test-versioned/FOO/A_preexisting_file.md' if not s3.exists(uri): s3.cp(self.local_file, uri) return uri
def test_s3_exists_retries_if_not_found_at_first(self, mock_lookup): import warnings from baiji.exceptions import EventualConsistencyWarning mock_key = "all_we_care_is_that_it's not None" mock_lookup.side_effect = [None, None, mock_key] with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") self.assertTrue(s3.exists('s3://foo')) # Verify the warning was triggered self.assertEqual(len(w), 1) self.assertIs(w[-1].category, EventualConsistencyWarning) self.assertEqual(str(w[-1].message), "S3 is behaving in an eventually consistent way in s3.exists(s3://foo) -- it took 3 attempts to locate the key") self.assertEqual(mock_lookup.call_count, 3)
def test_s3_exists_retries_if_not_found_at_first(self, mock_lookup): import warnings from baiji.exceptions import EventualConsistencyWarning mock_key = "all_we_care_is_that_it's not None" mock_lookup.side_effect = [None, None, mock_key] with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") self.assertTrue(s3.exists('s3://foo')) # Verify the warning was triggered self.assertEqual(len(w), 1) self.assertIs(w[-1].category, EventualConsistencyWarning) self.assertEqual( str(w[-1].message), "S3 is behaving in an eventually consistent way in s3.exists(s3://foo) -- it took 3 attempts to locate the key" ) self.assertEqual(mock_lookup.call_count, 3)
def uri(self, path, version=None, allow_local=True, suffixes=None): ''' Default version is manifest version ''' path = self.normalize_path(path) if version is None: version = self.manifest_version(path) if self.version_number_is_valid(version): base_path, ext = os.path.splitext(path) suffixes = '.' + '.'.join(suffixes) if suffixes is not None and len(suffixes) > 0 else '' return 's3://' + self.bucket + base_path + '.' + version + suffixes + ext elif allow_local and s3.exists(version): # version here is a local or s3 path return version else: raise self.KeyNotFound("File not found: {}".format(version))
def uri(self, path, version=None, allow_local=True, suffixes=None): ''' Default version is manifest version ''' path = self.normalize_path(path) if version is None: version = self.manifest_version(path) if self.version_number_is_valid(version): base_path, ext = os.path.splitext(path) suffixes = '.' + '.'.join(suffixes) if suffixes is not None and len(suffixes) > 0 else '' return 's3://' + self.bucket + base_path + '.' + version + suffixes + ext elif allow_local and s3.exists(version): # version here is a local or s3 path return version else: raise self.KeyNotFound("File not found: %s", version)
def load_landmarks(filename): import re from baiji import s3 from baiji.serialization import json, pickle, yaml from lace.serialization import lmrk, meshlab_pickedpoints if not s3.exists(filename): raise ValueError("Landmark file %s not found" % filename) if re.search(".ya{0,1}ml$", filename): return yaml.load(filename) elif re.search(".json$", filename): return json.load(filename) elif re.search(".pkl$", filename): return pickle.load(filename) elif re.search(".lmrk$", filename): return lmrk.load(filename) elif re.search(".pp$", filename): return meshlab_pickedpoints.load(filename) else: raise ValueError("Landmark file %s is of unknown format" % filename)
def path_relative_to_mesh(filename): # The OBJ file we're loading may have come from a local path, an s3 url, # or a file cached by sc. Since OBJ defines materials and texture files # with paths relative to the OBJ itself, we need to cope with the various # possibilities and if it's a cached file make sure that the material and # texture have been downloaded as well. # # If an absolute path is given and the file is missing, try looking in the same directory; # this lets you find the most common intention when an abs path is used. # # NB: We do not support loading material & texture info from objs read # from filelike objects without a location on the filesystem; what would # the relative file names mean in that case, anyway? (unless we're given # a valid absolute path, in which case go for it) import os import re # The second term here let's us detect windows absolute paths when we're running on posix if filename == os.path.abspath(filename) or re.match( r'^.\:(\\|/)', filename): if s3.exists(filename): return filename else: filename = s3.path.basename(filename) if hasattr(fd, 'remotename'): mesh_path = fd.remotename elif hasattr(fd, 'name'): mesh_path = fd.name else: return None path = s3.path.join(s3.path.dirname(mesh_path), filename) if sc.is_cachefile(mesh_path): try: return sc(path) except s3.KeyNotFound: return None return path
def main(self, key): if not s3.exists(key, retries_allowed=self.retries): return -1
def _load(fd, mesh=None): from collections import OrderedDict from baiji import s3 from lace.mesh import Mesh from lace.cache import sc import lace.serialization.obj.objutils as objutils # pylint: disable=no-name-in-module v, vt, vn, vc, f, ft, fn, mtl_path, landm, segm = objutils.read(fd.name) if not mesh: mesh = Mesh() if v.size != 0: mesh.v = v if f.size != 0: mesh.f = f if vn.size != 0: mesh.vn = vn if vt.size != 0: mesh.vt = vt if vc.size != 0: mesh.vc = vc if fn.size != 0: mesh.fn = fn if ft.size != 0: mesh.ft = ft if segm: mesh.segm = OrderedDict([(k, v if isinstance(v, list) else v.tolist()) for k, v in segm.items()]) def path_relative_to_mesh(filename): # The OBJ file we're loading may have come from a local path, an s3 url, # or a file cached by sc. Since OBJ defines materials and texture files # with paths relative to the OBJ itself, we need to cope with the various # possibilities and if it's a cached file make sure that the material and # texture have been downloaded as well. # # If an absolute path is given and the file is missing, try looking in the same directory; # this lets you find the most common intention when an abs path is used. # # NB: We do not support loading material & texture info from objs read # from filelike objects without a location on the filesystem; what would # the relative file names mean in that case, anyway? (unless we're given # a valid absolute path, in which case go for it) import os import re # The second term here let's us detect windows absolute paths when we're running on posix if filename == os.path.abspath(filename) or re.match( r'^.\:(\\|/)', filename): if s3.exists(filename): return filename else: filename = s3.path.basename(filename) if hasattr(fd, 'remotename'): mesh_path = fd.remotename elif hasattr(fd, 'name'): mesh_path = fd.name else: return None path = s3.path.join(s3.path.dirname(mesh_path), filename) if sc.is_cachefile(mesh_path): try: return sc(path) except s3.KeyNotFound: return None return path mesh.materials_filepath = None if mtl_path: materials_filepath = path_relative_to_mesh(mtl_path.strip()) if materials_filepath and s3.exists(materials_filepath): with s3.open(materials_filepath, 'r') as f: mesh.materials_file = f.readlines() mesh.materials_filepath = materials_filepath if hasattr(mesh, 'materials_file'): mesh.texture_filepaths = { line.split(None, 1)[0].strip(): path_relative_to_mesh(line.split(None, 1)[1].strip()) for line in mesh.materials_file if line.startswith('map_K') } if 'map_Ka' in mesh.texture_filepaths: mesh.texture_filepath = mesh.texture_filepaths['map_Ka'] elif 'map_Kd' in mesh.texture_filepaths: mesh.texture_filepath = mesh.texture_filepaths['map_Kd'] if landm: mesh.landm = landm return mesh
def test_exists_returns_false_for_nonexistent_bucket(self): self.assertFalse( s3.exists('s3://foo-bar-baz-please-this-is-not-a-bucket-amirite'))
def test_s3_exists_return_false_if_the_file_never_shows_up( self, mock_lookup): mock_lookup.return_value = None self.assertFalse(s3.exists('s3://foo')) self.assertEqual(mock_lookup.call_count, 3)
def test_s3_exists_return_false_if_the_file_never_shows_up(self, mock_lookup): mock_lookup.return_value = None self.assertFalse(s3.exists('s3://foo')) self.assertEqual(mock_lookup.call_count, 3)
def test_s3_exists_does_not_retry_if_found_immidiately(self, mock_lookup): mock_key = "all_we_care_is_that_it's not None" mock_lookup.return_value = mock_key self.assertTrue(s3.exists('s3://foo')) self.assertEqual(mock_lookup.call_count, 1)
def test_s3_exists_return_false_if_with_unmatched_version_id(self): # test not exists with specified versionId unknown_version_id = '5elgojhtA8BGJerqfbciN78eU74SJ9mX' self.assertFalse(s3.exists(self.existing_versioned_remote_file, version_id=unknown_version_id))
def assert_s3_exists(self, path): self.assertTrue(self.retriable_s3_call(lambda: s3.exists(path)))
def test_exists_returns_false_for_nonexistent_bucket(self): self.assertFalse(s3.exists('s3://foo-bar-baz-please-this-is-not-a-bucket-amirite'))
def test_uploading_a_directory_without_slash(self): from baiji.util.shutillib import mkdir_p mkdir_p(os.path.join(self.tmp_dir, "foo")) with self.assertRaises(ValueError): s3.cp(os.path.join(self.tmp_dir, "foo"), self.remote_file("foo")) self.assertFalse(s3.exists(self.remote_file("")))
def assert_s3_does_not_exist(self, path): self.assertFalse(self.retriable_s3_call(lambda: s3.exists(path)))
def _dump(f, obj, flip_faces=False, ungroup=False, comments=None, split_normals=False, write_mtl=True): # pylint: disable=redefined-outer-name ''' write_mtl: When True and mesh has a texture, includes a mtllib reference in the .obj and writes a .mtl alongside. ''' import os import numpy as np from baiji import s3 ff = -1 if flip_faces else 1 def write_face_to_obj_file(obj, faces, face_index, obj_file): vertex_indices = faces[face_index][::ff] + 1 write_normals = obj.fn is not None or (obj.vn is not None and obj.vn.shape == obj.v.shape) write_texture = obj.ft is not None and obj.vt is not None if write_normals and obj.fn is not None: normal_indices = obj.fn[face_index][::ff] + 1 assert len(normal_indices) == len(vertex_indices) elif write_normals: # unspecified fn but per-vertex normals, assume ordering is same as for v normal_indices = faces[face_index][::ff] + 1 if write_texture: texture_indices = obj.ft[face_index][::ff] + 1 assert len(texture_indices) == len(vertex_indices) # Valid obj face lines are: v, v/vt, v//vn, v/vt/vn if write_normals and write_texture: pattern = '%d/%d/%d' value = tuple( np.array([vertex_indices, texture_indices, normal_indices]).T.flatten()) elif write_normals: pattern = '%d//%d' value = tuple( np.array([vertex_indices, normal_indices]).T.flatten()) elif write_texture: pattern = '%d/%d' value = tuple( np.array([vertex_indices, texture_indices]).T.flatten()) else: pattern = '%d' value = tuple(vertex_indices) obj_file.write( ('f ' + ' '.join([pattern] * len(vertex_indices)) + '\n') % value) if comments != None: if isinstance(comments, basestring): comments = [comments] for comment in comments: for line in comment.split("\n"): f.write("# %s\n" % line) if write_mtl and hasattr( obj, 'texture_filepath') and obj.texture_filepath is not None: save_to = s3.path.dirname(f.name) mtl_name = os.path.splitext(s3.path.basename(f.name))[0] mtl_filename = mtl_name + '.mtl' f.write('mtllib %s\n' % mtl_filename) f.write('usemtl %s\n' % mtl_name) texture_filename = mtl_name + os.path.splitext(obj.texture_filepath)[1] if not s3.exists(s3.path.join(save_to, texture_filename)): s3.cp(obj.texture_filepath, s3.path.join(save_to, texture_filename)) obj.write_mtl(s3.path.join(save_to, mtl_filename), mtl_name, texture_filename) if obj.vc is not None: for r, c in zip(obj.v, obj.vc): f.write('v %f %f %f %f %f %f\n' % (r[0], r[1], r[2], c[0], c[1], c[2])) elif obj.v is not None: for r in obj.v: f.write('v %f %f %f\n' % (r[0], r[1], r[2])) if obj.vn is not None: if split_normals: for vn_idx in obj.fn: r = obj.vn[vn_idx[0]] f.write('vn %f %f %f\n' % (r[0], r[1], r[2])) r = obj.vn[vn_idx[1]] f.write('vn %f %f %f\n' % (r[0], r[1], r[2])) r = obj.vn[vn_idx[2]] f.write('vn %f %f %f\n' % (r[0], r[1], r[2])) else: for r in obj.vn: f.write('vn %f %f %f\n' % (r[0], r[1], r[2])) if obj.ft is not None and obj.vt is not None: for r in obj.vt: if len(r) == 3: f.write('vt %f %f %f\n' % (r[0], r[1], r[2])) else: f.write('vt %f %f\n' % (r[0], r[1])) if obj.f4 is not None: faces = obj.f4 elif obj.f is not None: faces = obj.f else: faces = None if obj.segm is not None and not ungroup: if faces is not None: # An array of strings. group_names = np.array(obj.segm.keys()) # A 2d array of booleans indicating which face is in which group. group_mask = np.zeros((len(group_names), len(faces)), dtype=bool) for i, segm_faces in enumerate(obj.segm.itervalues()): group_mask[i][segm_faces] = True # In an OBJ file, "g" changes the current state. This is a slice of # group_mask that represents the current state. current_group_mask = np.zeros((len(group_names), ), dtype=bool) for face_index in range(len(faces)): # If the group has changed from the previous face, write the # group entry. this_group_mask = group_mask[:, face_index] if any(current_group_mask != this_group_mask): current_group_mask = this_group_mask f.write('g %s\n' % ' '.join(group_names[current_group_mask])) write_face_to_obj_file(obj, faces, face_index, f) else: if faces is not None: for face_index in range(len(faces)): write_face_to_obj_file(obj, faces, face_index, f)