def test_s3_open_exclusive_write_local_file_throws_error_for_double_open(self): local_file_name = os.path.join(self.tmp_dir, "exclusive_write_test_local_3") self.assert_s3_does_not_exist(local_file_name) with s3.open(local_file_name, 'x'): with self.assertRaises(s3.KeyExists): with s3.open(local_file_name, 'x'): pass
def test_s3_open_exclusive_write_local_file_throws_error_for_double_open(self): local_file_name = os.path.join(self.tmp_dir, "exclusive_write_test_local_3") self.assert_s3_does_not_exist(local_file_name) with s3.open(local_file_name, 'x'): with self.assertRaises(s3.KeyExists): with s3.open(local_file_name, 'x'): pass
def assertFilesEqual(self, candidate_path, truth_path): from baiji import s3 with s3.open(candidate_path, 'rb') as f: candidate = f.read() with s3.open(truth_path, 'rb') as f: truth = f.read() self.assertTextEqual(candidate, truth, candidate_path, truth_path)
def test_s3_open_write_does_not_upload_if_exception_raised(self, upload): remote_file_name = self.remote_file("write_test_1") try: with s3.open(remote_file_name, 'w'): raise AttributeError() except AttributeError: pass self.assertFalse(upload.called) # Sanity check with s3.open(remote_file_name, 'w'): pass self.assertTrue(upload.called)
def test_s3_open_write_does_not_upload_if_exception_raised(self, upload): remote_file_name = self.remote_file("write_test_1") try: with s3.open(remote_file_name, 'w'): raise AttributeError() except AttributeError: pass self.assertFalse(upload.called) # Sanity check with s3.open(remote_file_name, 'w'): pass self.assertTrue(upload.called)
def test_s3_open_exclusive_write_does_not_raise_error_for_nonexistent_local_file(self): nonexistent_file = os.path.join(self.tmp_dir, "test_s3_open_exclusive_write_does_not_raise_error_for_nonexistent_local_file") self.assert_s3_does_not_exist(nonexistent_file) with s3.open(nonexistent_file, 'x') as f: test_string = 'FOO!\n' f.write(test_string) self.assert_s3_exists(nonexistent_file)
def test_s3_open_read_local_file_without_context_manager(self): self.assert_s3_exists(self.local_file) f = s3.open(self.local_file, 'r') self.assertEqual(self.truth, f.read()) self.assertEqual(f.name, self.local_file) f.close()
def test_s3_open_read_local_file_without_context_manager(self): self.assert_s3_exists(self.local_file) f = s3.open(self.local_file, 'r') self.assertEqual(self.truth, f.read()) self.assertEqual(f.name, self.local_file) f.close()
def test_s3_open_exclusive_write_does_not_raise_error_for_nonexistent_remote_file(self): nonexistent_file = self.remote_file(str(uuid.uuid4())) self.assert_s3_does_not_exist(nonexistent_file) with s3.open(nonexistent_file, 'x') as f: test_string = 'FOO!\n' f.write(test_string) self.assert_s3_exists(nonexistent_file)
def test_s3_open_exclusive_write_does_not_raise_error_for_nonexistent_remote_file(self): nonexistent_file = self.remote_file(str(uuid.uuid4())) self.assert_s3_does_not_exist(nonexistent_file) with s3.open(nonexistent_file, 'x') as f: test_string = 'FOO!\n' f.write(test_string) self.assert_s3_exists(nonexistent_file)
def test_s3_open_exclusive_write_does_not_raise_error_for_nonexistent_local_file(self): nonexistent_file = os.path.join(self.tmp_dir, "test_s3_open_exclusive_write_does_not_raise_error_for_nonexistent_local_file") self.assert_s3_does_not_exist(nonexistent_file) with s3.open(nonexistent_file, 'x') as f: test_string = 'FOO!\n' f.write(test_string) self.assert_s3_exists(nonexistent_file)
def test_s3_open_read_remote_file_with_context_manager(self): self.assert_s3_exists(self.remote_file("openable")) with s3.open(self.remote_file("openable"), 'r') as f: tempname = f.name self.assertEqual(self.truth, f.read()) self.assertFalse(os.path.exists(tempname))
def test_s3_open_read_versioned_remote_file(self): remote_file_name = self.existing_versioned_remote_file version_id = s3.info(remote_file_name)['version_id'] with s3.open(remote_file_name, 'r', version_id=version_id) as f: tempname = f.name self.assertFalse(os.path.exists(tempname))
def test_s3_open_read_versioned_remote_file(self): remote_file_name = self.existing_versioned_remote_file version_id = s3.info(remote_file_name)['version_id'] with s3.open(remote_file_name, 'r', version_id=version_id) as f: tempname = f.name self.assertFalse(os.path.exists(tempname))
def test_s3_open_read_remote_file_with_context_manager(self): self.assert_s3_exists(self.remote_file("openable")) with s3.open(self.remote_file("openable"), 'r') as f: tempname = f.name self.assertEqual(self.truth, f.read()) self.assertFalse(os.path.exists(tempname))
def test_s3_open_write_update_does_not_raise_error_for_nonexistent_local_file(self): nonexistent_file = os.path.join(self.tmp_dir, "test_s3_open_write_update_does_not_raise_error_for_nonexistent_local_file") self.assert_s3_does_not_exist(nonexistent_file) with s3.open(nonexistent_file, 'w+') as f: test_string = 'FOO!\n' f.write(test_string) f.seek(0) self.assertEqual(test_string, f.read()) self.assert_s3_exists(nonexistent_file)
def test_s3_open_write_update_does_not_raise_error_for_nonexistent_local_file(self): nonexistent_file = os.path.join(self.tmp_dir, "test_s3_open_write_update_does_not_raise_error_for_nonexistent_local_file") self.assert_s3_does_not_exist(nonexistent_file) with s3.open(nonexistent_file, 'w+') as f: test_string = 'FOO!\n' f.write(test_string) f.seek(0) self.assertEqual(test_string, f.read()) self.assert_s3_exists(nonexistent_file)
def test_s3_open_write_local_file_creates_parent_dirs(self): local_file_name = os.path.join(self.tmp_dir, "subdir", "write_test_subdir") self.assert_s3_does_not_exist(local_file_name) self.assert_s3_does_not_exist(os.path.dirname(local_file_name)) with s3.open(local_file_name, 'w') as f: f.write(self.truth) self.assert_s3_exists(local_file_name) self.assert_s3_exists(os.path.dirname(local_file_name))
def test_s3_open_write_local_file_creates_parent_dirs(self): local_file_name = os.path.join(self.tmp_dir, "subdir", "write_test_subdir") self.assert_s3_does_not_exist(local_file_name) self.assert_s3_does_not_exist(os.path.dirname(local_file_name)) with s3.open(local_file_name, 'w') as f: f.write(self.truth) self.assert_s3_exists(local_file_name) self.assert_s3_exists(os.path.dirname(local_file_name))
def test_s3_open_write_local_file_with_context_manager(self): local_file_name = os.path.join(self.tmp_dir, "write_test_local_1") self.assert_s3_does_not_exist(local_file_name) with s3.open(local_file_name, 'w') as f: self.assertEqual(f.name, local_file_name) f.write(self.truth) self.assert_s3_exists(local_file_name) # confirm that it contains the correct contents with open(local_file_name) as f: self.assertEqual(self.truth, f.read())
def write_mtl(path, material_name, texture_name): from baiji import s3 with s3.open(path, 'w') as f: f.write('newmtl %s\n' % material_name) # copied from another obj, no idea about what it does f.write('ka 0.329412 0.223529 0.027451\n') f.write('kd 0.780392 0.568627 0.113725\n') f.write('ks 0.992157 0.941176 0.807843\n') f.write('illum 0\n') f.write('map_Ka %s\n' % texture_name) f.write('map_Kd %s\n' % texture_name) f.write('map_Ks %s\n' % texture_name)
def test_s3_open_write_local_file_with_context_manager(self): local_file_name = os.path.join(self.tmp_dir, "write_test_local_1") self.assert_s3_does_not_exist(local_file_name) with s3.open(local_file_name, 'w') as f: self.assertEqual(f.name, local_file_name) f.write(self.truth) self.assert_s3_exists(local_file_name) # confirm that it contains the correct contents with open(local_file_name) as f: self.assertEqual(self.truth, f.read())
def ensure_file_open_and_call(path_or_fp, fn, mode='r', *args, **kwargs): from baiji import s3 if isinstance(path_or_fp, basestring): with s3.open(path_or_fp, mode) as f: return fn(f, *args, **kwargs) elif isinstance(path_or_fp, file) or (hasattr(path_or_fp, 'read') and hasattr(path_or_fp, 'seek')): result = fn(path_or_fp, *args, **kwargs) if hasattr(path_or_fp, 'flush'): path_or_fp.flush() return result else: raise ValueError('Object {} does not appear to be a path or a file like object'.format(path_or_fp))
def test_s3_open_write_remote_file_with_context_manager(self): remote_file_name = self.remote_file("write_test_1") local_file_name = os.path.join(self.tmp_dir, "write_test_1") self.assert_s3_does_not_exist(remote_file_name) with s3.open(remote_file_name, 'w') as f: tempname = f.name f.write(self.truth) self.assertFalse(os.path.exists(tempname)) self.assert_s3_exists(remote_file_name) # download and confirm that it contains the correct contents s3.cp(remote_file_name, local_file_name) with open(local_file_name) as f: self.assertEqual(self.truth, f.read())
def test_s3_open_write_remote_file_with_context_manager(self): remote_file_name = self.remote_file("write_test_1") local_file_name = os.path.join(self.tmp_dir, "write_test_1") self.assert_s3_does_not_exist(remote_file_name) with s3.open(remote_file_name, 'w') as f: tempname = f.name f.write(self.truth) self.assertFalse(os.path.exists(tempname)) self.assert_s3_exists(remote_file_name) # download and confirm that it contains the correct contents s3.cp(remote_file_name, local_file_name) with open(local_file_name) as f: self.assertEqual(self.truth, f.read())
def ensure_file_open_and_call(path_or_fp, fn, mode='r', *args, **kwargs): import six from baiji import s3 if isinstance(path_or_fp, six.string_types): with s3.open(path_or_fp, mode) as f: return fn(f, *args, **kwargs) elif isinstance(path_or_fp, file_types) or (hasattr(path_or_fp, 'read') and hasattr(path_or_fp, 'seek')): result = fn(path_or_fp, *args, **kwargs) if hasattr(path_or_fp, 'flush'): path_or_fp.flush() return result else: raise ValueError( 'Object {} does not appear to be a path or a file like object'. format(path_or_fp))
def reload_texture_image(self): import cv2 import numpy as np from baiji import s3 if not self.texture_filepath: self._texture_image = None else: # image is loaded as image_height-by-image_width-by-3 array in BGR color order. with s3.open(self.texture_filepath) as f: self._texture_image = cv2.imread(f.name) texture_sizes = [ 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384 ] if self._texture_image is not None: h, w = self._texture_image.shape[:2] if h != w or h not in texture_sizes or w not in texture_sizes: closest_texture_size_idx = (np.abs( np.array(texture_sizes) - max(self._texture_image.shape))).argmin() sz = texture_sizes[closest_texture_size_idx] self._texture_image = cv2.resize(self._texture_image, (sz, sz))
def test_s3_open_exclusive_write_remote_file_throws_error_for_double_open(self): self.assert_s3_does_not_exist(self.remote_file("exclusive_write_test_3")) with s3.open(self.remote_file("exclusive_write_test_3"), 'x'): with self.assertRaises(s3.KeyExists): with s3.open(self.remote_file("exclusive_write_test_3"), 'x'): pass
def test_s3_open_exclusive_write_local_file_throws_error_for_existing_file(self): self.assert_s3_exists(self.local_file) with self.assertRaises(s3.KeyExists): s3.open(self.local_file, 'x')
def test_s3_open_read_raises_error_for_nonexistent_remote_file(self): nonexistent_file = self.remote_file(str(uuid.uuid4())) self.assert_s3_does_not_exist(nonexistent_file) with self.assertRaises(s3.KeyNotFound): s3.open(nonexistent_file, 'r')
def test_s3_open_read_raises_error_for_nonexistent_local_file(self): nonexistent_file = os.path.join(self.tmp_dir, "test_s3_open_read_raises_error_for_nonexistent_local_file") self.assert_s3_does_not_exist(nonexistent_file) with self.assertRaises(s3.KeyNotFound): s3.open(nonexistent_file, 'r')
def test_s3_open_local_underlying_error_raises_ioerror_with_errno_and_strerror(self): import errno with self.assertRaises(IOError) as ctx: s3.open(os.getcwd(), 'w') self.assertEquals(ctx.exception.errno, errno.EISDIR) self.assertIn('Is a directory:', ctx.exception.strerror)
def create_random_file_at(self, bases, path): from .test_s3 import random_data data = random_data() for base in bases: with s3.open(s3.path.join(base, path), 'w') as f: f.write(data)
def _load(fd, mesh=None): from collections import OrderedDict from baiji import s3 from lace.mesh import Mesh from lace.cache import sc import lace.serialization.obj.objutils as objutils # pylint: disable=no-name-in-module v, vt, vn, vc, f, ft, fn, mtl_path, landm, segm = objutils.read(fd.name) if not mesh: mesh = Mesh() if v.size != 0: mesh.v = v if f.size != 0: mesh.f = f if vn.size != 0: mesh.vn = vn if vt.size != 0: mesh.vt = vt if vc.size != 0: mesh.vc = vc if fn.size != 0: mesh.fn = fn if ft.size != 0: mesh.ft = ft if segm: mesh.segm = OrderedDict([(k, v if isinstance(v, list) else v.tolist()) for k, v in segm.items()]) def path_relative_to_mesh(filename): # The OBJ file we're loading may have come from a local path, an s3 url, # or a file cached by sc. Since OBJ defines materials and texture files # with paths relative to the OBJ itself, we need to cope with the various # possibilities and if it's a cached file make sure that the material and # texture have been downloaded as well. # # If an absolute path is given and the file is missing, try looking in the same directory; # this lets you find the most common intention when an abs path is used. # # NB: We do not support loading material & texture info from objs read # from filelike objects without a location on the filesystem; what would # the relative file names mean in that case, anyway? (unless we're given # a valid absolute path, in which case go for it) import os import re # The second term here let's us detect windows absolute paths when we're running on posix if filename == os.path.abspath(filename) or re.match( r'^.\:(\\|/)', filename): if s3.exists(filename): return filename else: filename = s3.path.basename(filename) if hasattr(fd, 'remotename'): mesh_path = fd.remotename elif hasattr(fd, 'name'): mesh_path = fd.name else: return None path = s3.path.join(s3.path.dirname(mesh_path), filename) if sc.is_cachefile(mesh_path): try: return sc(path) except s3.KeyNotFound: return None return path mesh.materials_filepath = None if mtl_path: materials_filepath = path_relative_to_mesh(mtl_path.strip()) if materials_filepath and s3.exists(materials_filepath): with s3.open(materials_filepath, 'r') as f: mesh.materials_file = f.readlines() mesh.materials_filepath = materials_filepath if hasattr(mesh, 'materials_file'): mesh.texture_filepaths = { line.split(None, 1)[0].strip(): path_relative_to_mesh(line.split(None, 1)[1].strip()) for line in mesh.materials_file if line.startswith('map_K') } if 'map_Ka' in mesh.texture_filepaths: mesh.texture_filepath = mesh.texture_filepaths['map_Ka'] elif 'map_Kd' in mesh.texture_filepaths: mesh.texture_filepath = mesh.texture_filepaths['map_Kd'] if landm: mesh.landm = landm return mesh
def test_s3_open_exclusive_write_remote_file_throws_error_for_existing_file(self): self.assert_s3_exists(self.remote_file("openable")) with self.assertRaises(s3.KeyExists): s3.open(self.remote_file("openable"), 'x')
def test_s3_open_read_raises_error_for_nonexistent_local_file(self): nonexistent_file = os.path.join(self.tmp_dir, "test_s3_open_read_raises_error_for_nonexistent_local_file") self.assert_s3_does_not_exist(nonexistent_file) with self.assertRaises(s3.KeyNotFound): s3.open(nonexistent_file, 'r')
def test_s3_open_write_update_raises_error_for_nonexistent_remote_file(self): nonexistent_file = self.remote_file(str(uuid.uuid4())) self.assert_s3_does_not_exist(nonexistent_file) with self.assertRaises(NotImplementedError): s3.open(nonexistent_file, 'w+')
def test_s3_open_exclusive_write_remote_file_throws_error_for_double_open(self): self.assert_s3_does_not_exist(self.remote_file("exclusive_write_test_3")) with s3.open(self.remote_file("exclusive_write_test_3"), 'x'): with self.assertRaises(s3.KeyExists): with s3.open(self.remote_file("exclusive_write_test_3"), 'x'): pass
def test_s3_open_exclusive_write_local_file_throws_error_for_existing_file(self): self.assert_s3_exists(self.local_file) with self.assertRaises(s3.KeyExists): s3.open(self.local_file, 'x')
def test_s3_open_exclusive_write_remote_file_throws_error_for_existing_file(self): self.assert_s3_exists(self.remote_file("openable")) with self.assertRaises(s3.KeyExists): s3.open(self.remote_file("openable"), 'x')
def test_s3_open_local_underlying_error_raises_ioerror_with_errno_and_strerror(self): import errno with self.assertRaises(IOError) as ctx: s3.open(os.getcwd(), 'w') self.assertEquals(ctx.exception.errno, errno.EISDIR) self.assertIn('Is a directory:', ctx.exception.strerror)
def test_s3_open_write_calls_upload(self, upload): remote_file_name = self.remote_file("write_test_1") with s3.open(remote_file_name, 'w'): self.assertFalse(upload.called) self.assertTrue(upload.called)
def test_s3_open_write_calls_upload(self, upload): remote_file_name = self.remote_file("write_test_1") with s3.open(remote_file_name, 'w'): self.assertFalse(upload.called) self.assertTrue(upload.called)
def test_s3_open_read_versioned_remote_file_with_unkown_version_id_raise_key_not_found(self): remote_file_name = self.existing_versioned_remote_file unknown_version_id = '5elgojhtA8BGJerqfbciN78eU74SJ9mX' with self.assertRaises(s3.KeyNotFound): s3.open(remote_file_name, 'r', version_id=unknown_version_id)
def test_s3_open_read_does_not_call_upload(self, upload): self.assert_s3_exists(self.remote_file("openable")) with s3.open(self.remote_file("openable"), 'r'): pass self.assertFalse(upload.called)
def test_s3_open_write_update_raises_error_for_nonexistent_remote_file(self): nonexistent_file = self.remote_file(str(uuid.uuid4())) self.assert_s3_does_not_exist(nonexistent_file) with self.assertRaises(NotImplementedError): s3.open(nonexistent_file, 'w+')
def test_s3_open_read_raises_error_for_nonexistent_remote_file(self): nonexistent_file = self.remote_file(str(uuid.uuid4())) self.assert_s3_does_not_exist(nonexistent_file) with self.assertRaises(s3.KeyNotFound): s3.open(nonexistent_file, 'r')
def create_random_file_at(self, bases, path): from bltest.random_data import random_data data = random_data() for base in bases: with s3.open(s3.path.join(base, path), 'w') as f: f.write(data)
def test_s3_open_read_versioned_remote_file_with_unkown_version_id_raise_key_not_found(self): remote_file_name = self.existing_versioned_remote_file unknown_version_id = '5elgojhtA8BGJerqfbciN78eU74SJ9mX' with self.assertRaises(s3.KeyNotFound): s3.open(remote_file_name, 'r', version_id=unknown_version_id)
def test_s3_open_read_does_not_call_upload(self, upload): self.assert_s3_exists(self.remote_file("openable")) with s3.open(self.remote_file("openable"), 'r'): pass self.assertFalse(upload.called)