def test_entry_read_after_write_or_tag(tempdir): '''Entry read after write or tag''' path = os.path.join(tempdir, 'thing1') entry = fsfs.get_entry(path) entry.write(key='value') assert entry.read() == {'key': 'value'} path = os.path.join(tempdir, 'thing2') entry = fsfs.get_entry(path) entry.tag('thingy') assert entry.read() == {}
def setup_workspace(ctx, app): '''Setup a workspace for the current task''' workspace = app.task.workspaces.name(app.host).one() if not workspace and not app.default_workspace: raise Disable('Could not find workspace for %s' % app.host) artifact = False if not workspace: path_template = api.get_path_template('workspace') template = api.get_template(app.default_workspace, 'workspace') path = path_template.format( dict(task=app.task.path, workspace=app.host)) if os.path.exists(path): import fsfs workspace = fsfs.get_entry(path) workspace.tag(*template.tags) workspace.write(**template.read()) else: workspace = template.copy(path) artifact = True ctx.workspace = workspace app.cwd = workspace.path if artifact: return workspace
def ensure_workspace(ctx, task, name, template): '''Setup a workspace for the current task''' workspace = task.workspaces.name(name).one() if workspace: raise Disable('Workspace already exists.') if not template: raise Disable('Could not find a workspace for %s' % task) path_template = api.get_path_template('workspace') template = api.get_template(template, 'workspace') path = path_template.format(dict( task=task.path, workspace=name, )) if os.path.exists(path): import fsfs workspace = fsfs.get_entry(path) workspace.tag(*template.tags) workspace.write(**template.read()) else: workspace = template.copy(path) return workspace
def delete(root, remove_root): '''Delete an entry''' entry = fsfs.get_entry(root) if not entry.exists: raise UsageError(f('{root} is not an Entry.')) if click.confirm('Are you sure you want to delete {}?'.format(entry.name)): fsfs.delete(root, remove_root=remove_root)
def test_entryfactory(tempdir): '''Custom EntryFactory''' entry_path = util.unipath(tempdir, 'entry') fsfs.set_entry_factory(CustomFactory) entry = fsfs.get_entry(entry_path) # Factory returns cache entry obj assert entry is fsfs.get_entry(entry_path) # No tag == default entry assert type(entry) == CustomFactory.EntryProxy assert type(entry.obj()) == CustomFactory.Entry # Add project tag, now we get a Project instance entry.tag('project') assert type(entry) == CustomFactory.EntryProxy assert type(entry.obj()) == CustomFactory.get_type('project') assert hasattr(entry, 'project_method') # Remove project tag entry.untag('project') assert type(entry) == CustomFactory.EntryProxy assert type(entry.obj()) == CustomFactory.Entry assert not hasattr(entry, 'project_method') # Add asset tag now we get asset methods entry.tag('asset') assert type(entry) == CustomFactory.EntryProxy assert type(entry.obj()) == CustomFactory.get_type('asset') assert hasattr(entry, 'asset_method') # Relinked when moved new_entry_path = util.unipath(tempdir, 'supercool') assert samefile(entry.path, entry_path) os.rename(entry.path, new_entry_path) entry.read() # Triggers entry project to relink entry assert samefile(entry.path, new_entry_path) # Restore DefaultFactory fsfs.set_entry_factory(fsfs.DefaultFactory)
def commit_asset(asset_item): '''Make new asset''' if asset_item['template']: asset = asset_item['template'].copy(asset_item['path']) else: asset = fsfs.get_entry(asset_item['path']) asset.tag(*asset_item['tags']) return asset
def test_move_entry_raises(tempdir): '''Copy or Move Entry to existing location raises''' entry_path = util.unipath(tempdir, 'entry') dest_path = util.unipath(tempdir, 'move') os.makedirs(dest_path) entry = fsfs.get_entry(entry_path) entry.tag('generic') assert_raises(OSError, entry.move, dest_path) assert_raises(OSError, entry.copy, dest_path)
def commit_task(task_item): '''Create new task''' if task_item['template']: task = task_item['template'].copy(task_item['path']) else: task = fsfs.get_entry(task_item['path']) task.tag(*task_item['tags']) return task
def commit_workspace(workspace_item): '''Create new workspace''' if workspace_item['template']: workspace = workspace_item['template'].copy(workspace_item['path']) else: workspace = fsfs.get_entry(workspace_item['path']) workspace.tag(*workspace_item['tags']) return workspace
def commit_shot(shot_item): '''Create new shot''' if shot_item['template']: shot = shot_item['template'].copy(shot_item['path']) else: shot = fsfs.get_entry(shot_item['path']) shot.tag(*shot_item['tags']) return shot
def commit_collection(collection_item): '''Make new collection''' if collection_item['template']: collection = collection_item['template'].copy(collection_item['path']) else: collection = fsfs.get_entry(collection_item['path']) collection.tag(*collection_item['tags']) return collection
def commit_sequence(sequence_item): '''Make new sequence''' if sequence_item['template']: sequence = sequence_item['template'].copy(sequence_item['path']) else: sequence = fsfs.get_entry(sequence_item['path']) sequence.tag(*sequence_item['tags']) return sequence
def stage_asset_type(project, collection, name): '''Stage new asset_type Entry''' path_template = api.get_path_template('asset_type') asset_type_path = path_template.format( dict( project=project.path, collection=collection, asset_type=name, )) return fsfs.get_entry(asset_type_path)
def test_new_uuid_after_copy(tempdir): '''Copy Entry creates a new uuid''' entry_path = util.unipath(tempdir, 'entry') dest_path = util.unipath(tempdir, 'copy') entry = fsfs.get_entry(entry_path) entry.tag('generic') new_entry = entry.copy(dest_path) assert new_entry.path == dest_path assert new_entry.uuid != entry.uuid assert len(glob.glob(new_entry.data.path + '/uuid_*')) == 1
def test_copy_entry(tempdir): '''Copy Entry with files''' entry_path = util.unipath(tempdir, 'entry') entry_copy_path = util.unipath(tempdir, 'copy') entry, manifest, data_manifest = random_entry(entry_path) new_entry = entry.copy(entry_copy_path) assert new_entry is fsfs.get_entry(entry_copy_path) assert new_entry.uuid != entry.uuid for file in manifest + data_manifest: assert os.path.exists(util.unipath(new_entry.path, file))
def parameters(ctx): params = dict( root={ 'label': 'Root', 'required': True, 'type': fsfs.Entry, 'help': 'Root directory to set cpenv modules for', }) if ctx: entry = ctx.get_deepest_entry() params['root']['default'] = entry or fsfs.get_entry(os.getcwd()) return params
def random_entry(entry_path): files = [fake_name() for _ in range(4)] data_globs = ['/'.join([fsfs.get_data_root(), 'globs', fake_name()]) for _ in range(4)] data_files = ['/'.join([fsfs.get_data_root(), 'files', fake_name()]) for _ in range(4)] children = { fake_name(): { 'files': [fake_name() for _ in range(4)], 'data_globs': [fake_name() for _ in range(4)], 'data_files': [fake_name() for _ in range(4)], } for _ in range(4) } data_manifest = data_globs + data_files manifest = files for child, data in children.items(): for f in data['files']: manifest.append('/'.join([child, f])) for f in data['data_files'] + data['data_globs']: data_manifest.append('/'.join([child, fsfs.get_data_root(), f])) entry = fsfs.get_entry(entry_path) entry.tag('parent') for child_entry_path in children: child_entry = fsfs.get_entry( util.unipath(entry.path, child_entry_path) ) child_entry.tag('child') for file in manifest + data_manifest: util.touch(util.unipath(entry.path, file)) return entry, manifest, data_manifest
def test_custom_uuid(tempdir): '''Assign custom uuid using Entry.uuid setter''' path = util.unipath(tempdir, 'entry') entry = fsfs.get_entry(path) entry.tag('generic') old_uuid = entry.uuid new_uuid = 'custom_uuid' entry.uuid = new_uuid assert entry.uuid != old_uuid assert entry.uuid == new_uuid
def test_move_entry(tempdir): '''Move Entry''' entry_path = util.unipath(tempdir, 'entry') entry_move_path = util.unipath(tempdir, 'move') entry, manifest, data_manifest = random_entry(entry_path) old_uuid = entry.uuid entry.move(entry_move_path) assert entry.name == 'move' assert entry.path != entry_path assert entry.path == entry_move_path assert entry is fsfs.get_entry(entry_move_path) assert entry.uuid == old_uuid for file in manifest + data_manifest: assert os.path.exists(util.unipath(entry.path, file))
def write(root, data, delkeys): '''Write metadata''' entry = fsfs.get_entry(root) if delkeys: entry.remove(*delkeys) data = {k: v for k, v in data} try: entry.write(**data) except Exception as e: print('Failed to write data: ') print(dict(data)) print(e.message) else: print(f('Wrote data to {root}'))
def run(self, args, *extra_args): import fsfs from fsfs.cli import safe_eval entry = fsfs.get_entry(args.root) if args.delkeys: entry.remove(*args.delkeys) data = {k: safe_eval(v) for k, v in args.data} try: entry.write(**data) except Exception as e: print('Failed to write data: ') print(dict(data)) print(e.message) else: print('Wrote data to ' + args.root)
def test_id_generator(tempdir): '''Custom id generator''' def make_id(count=[0]): _id = str(count[0]) count[0] += 1 return _id fsfs.set_id_generator(make_id) generated = [] for i in range(10): e = fsfs.get_entry(util.unipath(tempdir, 'entry_' + str(i))) e.tag('generic') generated.append(e) for i, e in enumerate(generated): assert e.uuid == str(i) fsfs.set_default_policy()
def parameters(ctx): params = dict(root={ 'label': 'Root', 'required': True, 'type': types.Entry, 'help': 'Root directory to set cpenv modules for', }, modules={ 'label': 'Modules', 'required': True, 'type': types.String, 'help': 'Space separated list of cpenv modules' }) if not ctx: return params entry = ctx.get_deepest_entry() params['root']['default'] = entry or fsfs.get_entry(os.getcwd()) return params
def from_env(cls, exclude=None): '''Create new context from environment variables''' exclude = exclude or [] data = dict( root=os.environ.get('CONSTRUCT_ROOT', DEFAULT_ROOT), host=os.environ.get('CONSTRUCT_HOST', DEFAULT_HOST), ) for entry in cls.entry_keys: if entry in exclude: continue env_var = 'CONSTRUCT_' + entry.upper() value = os.environ.get(env_var, None) if value: value = fsfs.get_entry(value) data[entry] = value return cls(**data)
def test_read_write(tempdir): '''Entry read and write.''' fake = ProjectFaker(root=tempdir) project_path = fake.project_path() fsfs.tag(project_path, 'project') # First read will be empty project_data = fsfs.read(project_path) assert project_data == {} # Write updates the cached data and mtime in EntryData # This should prevent subsequent reads from unnecessarily accessing disk fsfs.write(project_path, hello='world!') # ids are the same because we haven't read from disk assert fsfs.read(project_path) is project_data assert project_data == {'hello': 'world!'} # Write another key fsfs.write(project_path, integer=10) # Still receiving cached data on read assert fsfs.read(project_path) is project_data assert project_data == {'hello': 'world!', 'integer': 10} # If keys are included in read, we return a new dict with only those keys assert fsfs.read(project_path, 'integer') is not project_data # External data change causes mtime to change entry = fsfs.get_entry(project_path) time.sleep(0.1) with open(entry.data.file, 'w') as f: data = dict(hello='wurld!') f.write(json.dumps(data)) # Now ids are different, because our cached mtime is < the mtime on disk # causing read to return a new dict assert fsfs.read(project_path) is not project_data
def load(self): # cli actions self.add_action(CpenvSet) self.add_task(CpenvSet, validate_cpenv_modules) self.add_task(CpenvSet, write_cpenv_modules) self.add_action(CpenvShow) self.add_task(CpenvShow, get_cpenv) self.add_task(CpenvShow, show_cpenv) self.add_action(CpenvList) self.add_task(CpenvList, list_cpenv_modules) self.add_action(CpenvShell) self.add_task(CpenvShell, get_cpenv) self.add_task(CpenvShell, launch_cpenv_shell) self.add_action(CpenvEdit) self.add_task(CpenvEdit, edit_cpenv_modules) # Extend cpenv_launcher to activate cpenv modules before launch from construct_launcher.constants import BEFORE_LAUNCH self.add_task( 'launch.*', get_cpenv, arg_getters=[ lambda ctx: ctx.get_deepest_entry() or fsfs.get_entry(os. getcwd()) ], priority=BEFORE_LAUNCH, ) self.add_task( 'launch.*', activate_cpenv_modules, priority=BEFORE_LAUNCH, )
def test_entry_read_before_write_or_tag(tempdir): '''Entry read before write or tag raises OSError''' path = os.path.join(tempdir, 'thing') entry = fsfs.get_entry(path) entry.read()