def create_project_definition(collection, data): """Create a project definition in the given colleciton When giving a divert keys, the function will temporarely take this information from the data to ensure `schema.validate` is successful. Args: collection(pymongo.collection.Collection): collection from database data(dict): project data Returns: bson.ObjectId """ # Validate data for project data["schema"] = "avalon-core:project-2.0" schema.validate(data) # Check if the name is unique exists = collection.find_one({"type": "project", "name": data["name"]}) if exists: raise RuntimeError("Project with name `%s` already exists in this " "collection `%s`" % (data["name"], collection.name)) result = collection.insert_one(data) assert result.acknowledged, ("Could not create project definition, " "please contact a Pipeline TD!") return result.inserted_id
def parse_container(track_item, validate=True): """Return container data from track_item's pype tag. Args: track_item (hiero.core.TrackItem): A containerised track item. validate (bool)[optional]: validating with avalon scheme Returns: dict: The container schema data for input containerized track item. """ # convert tag metadata to normal keys names data = lib.get_track_item_pype_data(track_item) if validate and data and data.get("schema"): schema.validate(data) if not isinstance(data, dict): return # If not all required data return the empty container required = ['schema', 'id', 'name', 'namespace', 'loader', 'representation'] if not all(key in data for key in required): return container = {key: data[key] for key in required} container["objectName"] = track_item.name() # Store reference to the node object container["_track_item"] = track_item return container
def setup(): self._tempdir = tempfile.mkdtemp() api.register_root(self._tempdir) # Setup environment os.environ["AVALON_CONFIG"] = "polly" os.environ["AVALON_PROJECT"] = PROJECT_NAME os.environ["AVALON_ASSET"] = ASSET_NAME os.environ["AVALON_TASK"] = TASK_NAME os.environ["AVALON_ASSETPATH"] = ("{root}/{project}/{silo}/{asset}".format( root=api.registered_root(), project=PROJECT_NAME, asset=ASSET_NAME, silo="assets")) os.environ["AVALON_SILO"] = "assets" api.install(maya) io.activate_project(PROJECT_NAME) schema.validate(self._config) schema.validate(self._inventory) inventory.save(name=PROJECT_NAME, config=self._config, inventory=self._inventory)
def insert_many(self, items, ordered=True): # check if all items are valid assert isinstance(items, list), "`items` must be of type <list>" for item in items: assert isinstance(item, dict), "`item` must be of type <dict>" schema.validate(item) return self._database[self.Session["AVALON_PROJECT"]].insert_many( items, ordered=ordered)
def insert_many(self, items, *args, **kwargs): # check if all items are valid assert isinstance(items, list), "`items` must be of type <list>" for item in items: assert isinstance(item, dict), "`item` must be of type <dict>" schema.validate(item) return self._database[self.active_project()].insert_many( items, *args, **kwargs)
def _write(root, name, data): fname = os.path.join(root, ".%s.toml" % name) try: with open(fname, "w") as f: toml.dump(data, f) schema.validate(data) except IOError: raise return data
def _save_config_1_0(project_name, data): document = io.find_one({"type": "project"}) config = document["config"] config["apps"] = data.get("apps", []) config["tasks"] = data.get("tasks", []) config["template"].update(data.get("template", {})) schema.validate(document) io.save(document)
def _save_config_1_0(project_name, data): _filter = {"type": "project"} document = io.find_one(_filter) config = document["config"] config["apps"] = data.get("apps", []) config["tasks"] = data.get("tasks", []) config["template"].update(data.get("template", {})) config["families"] = data.get("families", []) config["groups"] = data.get("groups", []) schema.validate(document) io.replace_one(_filter, document)
def create_asset(data): """Create asset Requires: {"name": "uniquecode", "silo": "assets"} Optional: {"data": {}} """ data = data.copy() project = io.find_one({"type": "project"}) if project is None: raise RuntimeError("Project must exist prior to creating assets") # Link to parent by id if provided, otherwise parent to the project visual_parent = data.pop("visualParent", None) asset = { "schema": "avalon-core:asset-2.0", "parent": project['_id'], "name": data.pop("name"), "silo": data.pop("silo"), "visualParent": visual_parent, "type": "asset", "data": data } # Asset *must* have a name and silo assert asset['name'], "Asset has no name" assert asset['silo'], "Asset has no silo" # Ensure it has a unique name asset_doc = io.find_one({ "name": asset['name'], "type": "asset", }) if asset_doc is not None: raise RuntimeError("Asset named {} already " "exists.".format(asset['name'])) schema.validate(asset) io.insert_one(asset)
def test_save_toml(): text = """\ schema = "avalon-core:config-1.0" [[apps]] name = "maya2016" label = "Autodesk Maya 2016" [[apps]] name = "nuke10" label = "The Foundry Nuke 10.0" [[tasks]] name = "model" [[tasks]] name = "render" [[tasks]] name = "animation" [[tasks]] name = "rig" [[tasks]] name = "lookdev" [[tasks]] name = "layout" [template] work = "{root}/{project}/f02_prod/{silo}" publish = "{root}/{project}/f02_prod/{silo}" [copy]""" config_ = toml.loads(text) schema.validate(config_) inventory.save(name=self._project["name"], config=config_, inventory=self._inventory)
def save(name, config, inventory): """Write `config` and `inventory` to database as `name` Given a configuration and inventory, this function writes the changes to the current database. Arguments: name (str): Project name config (dict): Current configuration inventory (dict): Current inventory """ config = copy.deepcopy(config) inventory = copy.deepcopy(inventory) if "config" not in config.get("schema", ""): raise schema.SchemaError("Missing schema for config") if "inventory" not in inventory.get("schema", ""): raise schema.SchemaError("Missing schema for inventory") handlers = { "avalon-core:inventory-1.0": _save_inventory_1_0, "avalon-core:config-1.0": _save_config_1_0 } for data in (inventory, config): try: schema_ = data.get("schema") handler = handlers[schema_] except KeyError: raise schema.SchemaError( "ERROR: Missing handler for %s)" % (schema)) else: schema.validate(data) print("Saving %s.." % schema_) handler(name, data)
def parse_container(container, validate=True): """Return the container node's full container data. Args: container (hou.Node): A container node name. validate(bool): turn the validation for the container on or off Returns: dict: The container schema data for this container node. """ data = lib.read(container) # Backwards compatibility pre-schemas for containers data["schema"] = data.get("schema", "avalon-core:container-1.0") # Append transient data data["objectName"] = container.path() data["node"] = container if validate: schema.validate(data) return data
def test_load(): """Loading produces compatible results for saving""" inventory.save(name=self._project["name"], config=self._config, inventory=self._inventory) _config, _inventory = inventory.load(PROJECT_NAME) schema.validate(_config) schema.validate(_inventory) inventory.save(name=self._project["name"], config=_config, inventory=_inventory) _config, _inventory = inventory.load(PROJECT_NAME) schema.validate(_config) schema.validate(_inventory)
def import_to_avalon(session, entity, ft_project, av_project, custom_attributes): database = get_avalon_database() project_name = ft_project['full_name'] output = {} errors = [] ca_mongoid = get_ca_mongoid() # Validate if entity has custom attribute avalon_mongo_id if ca_mongoid not in entity['custom_attributes']: msg = ('Custom attribute "{}" for "{}" is not created' ' or don\'t have set permissions for API').format( ca_mongoid, entity['name']) errors.append({'Custom attribute error': msg}) output['errors'] = errors return output # Validate if entity name match REGEX in schema try: avalon_check_name(entity) except ValidationError: msg = '"{}" includes unsupported symbols like "dash" or "space"' errors.append({'Unsupported character': msg}) output['errors'] = errors return output entity_type = entity.entity_type # Project //////////////////////////////////////////////////////////////// if entity_type in ['Project']: type = 'project' config = get_project_config(entity) schema.validate(config) av_project_code = None if av_project is not None and 'code' in av_project['data']: av_project_code = av_project['data']['code'] ft_project_code = ft_project['name'] if av_project is None: project_schema = pypelib.get_avalon_project_template_schema() item = { 'schema': project_schema, 'type': type, 'name': project_name, 'data': dict(), 'config': config, 'parent': None, } schema.validate(item) database[project_name].insert_one(item) av_project = database[project_name].find_one({'type': type}) elif (av_project['name'] != project_name or (av_project_code is not None and av_project_code != ft_project_code)): msg = ('You can\'t change {0} "{1}" to "{2}"' ', avalon wouldn\'t work properly!' '\n{0} was changed back!') if av_project['name'] != project_name: entity['full_name'] = av_project['name'] errors.append({ 'Changed name error': msg.format('Project name', av_project['name'], project_name) }) if (av_project_code is not None and av_project_code != ft_project_code): entity['name'] = av_project_code errors.append({ 'Changed name error': msg.format('Project code', av_project_code, ft_project_code) }) session.commit() output['errors'] = errors return output else: # not override existing templates! templates = av_project['config'].get('template', None) if templates is not None: for key, value in config['template'].items(): if (key in templates and templates[key] is not None and templates[key] != value): config['template'][key] = templates[key] projectId = av_project['_id'] data = get_data(entity, session, custom_attributes) database[project_name].update_many( {'_id': ObjectId(projectId)}, {'$set': { 'name': project_name, 'config': config, 'data': data, }}) entity['custom_attributes'][ca_mongoid] = str(projectId) session.commit() output['project'] = av_project return output # Asset - ///////////////////////////////////////////////////////////// if av_project is None: result = import_to_avalon(session, ft_project, ft_project, av_project, custom_attributes) if 'errors' in result: output['errors'] = result['errors'] return output elif 'project' not in result: msg = 'During project import went something wrong' errors.append({'Unexpected error': msg}) output['errors'] = errors return output av_project = result['project'] output['project'] = result['project'] projectId = av_project['_id'] data = get_data(entity, session, custom_attributes) # 1. hierarchical entity have silo set to None silo = None if len(data['parents']) > 0: silo = data['parents'][0] name = entity['name'] avalon_asset = None # existence of this custom attr is already checked if ca_mongoid not in entity['custom_attributes']: msg = '"{}" don\'t have "{}" custom attribute' errors.append( {'Missing Custom attribute': msg.format(entity_type, ca_mongoid)}) output['errors'] = errors return output mongo_id = entity['custom_attributes'][ca_mongoid] mongo_id = mongo_id.replace(' ', '').replace('\n', '') try: ObjectId(mongo_id) except Exception: mongo_id = '' if mongo_id is not '': avalon_asset = database[project_name].find_one( {'_id': ObjectId(mongo_id)}) if avalon_asset is None: avalon_asset = database[project_name].find_one({ 'type': 'asset', 'name': name }) if avalon_asset is None: asset_schema = pypelib.get_avalon_asset_template_schema() item = { 'schema': asset_schema, 'name': name, 'silo': silo, 'parent': ObjectId(projectId), 'type': 'asset', 'data': data } schema.validate(item) mongo_id = database[project_name].insert_one(item).inserted_id # Raise error if it seems to be different ent. with same name elif (avalon_asset['data']['parents'] != data['parents'] or avalon_asset['silo'] != silo): msg = ('In Avalon DB already exists entity with name "{0}"' ).format(name) errors.append({'Entity name duplication': msg}) output['errors'] = errors return output # Store new ID (in case that asset was removed from DB) else: mongo_id = avalon_asset['_id'] else: if avalon_asset['name'] != entity['name']: if silo is None or changeability_check_childs(entity) is False: msg = ('You can\'t change name {} to {}' ', avalon wouldn\'t work properly!' '\n\nName was changed back!' '\n\nCreate new entity if you want to change name.' ).format(avalon_asset['name'], entity['name']) entity['name'] = avalon_asset['name'] session.commit() errors.append({'Changed name error': msg}) if (avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']): old_path = '/'.join(avalon_asset['data']['parents']) new_path = '/'.join(data['parents']) msg = ('You can\'t move with entities.' '\nEntity "{}" was moved from "{}" to "{}"' '\n\nAvalon won\'t work properly, {}!') moved_back = False if 'visualParent' in avalon_asset['data']: if silo is None: asset_parent_id = avalon_asset['parent'] else: asset_parent_id = avalon_asset['data']['visualParent'] asset_parent = database[project_name].find_one( {'_id': ObjectId(asset_parent_id)}) ft_parent_id = asset_parent['data']['ftrackId'] try: entity['parent_id'] = ft_parent_id session.commit() msg = msg.format(avalon_asset['name'], old_path, new_path, 'entity was moved back') moved_back = True except Exception: moved_back = False if moved_back is False: msg = msg.format(avalon_asset['name'], old_path, new_path, 'please move it back') errors.append({'Hierarchy change error': msg}) if len(errors) > 0: output['errors'] = errors return output database[project_name].update_many({'_id': ObjectId(mongo_id)}, { '$set': { 'name': name, 'silo': silo, 'data': data, 'parent': ObjectId(projectId) } }) entity['custom_attributes'][ca_mongoid] = str(mongo_id) session.commit() return output
def insert_one(self, item, *args, **kwargs): assert isinstance(item, dict), "item must be of type <dict>" schema.validate(item) return self._database[self.active_project()].insert_one( item, *args, **kwargs)
def launch(self, name): """Launch `app` Arguments: name (str): Name of app """ application_definition = lib.which_app(name) if application_definition is None: return terminal.log( "Application Definition for '%s' not found." % name, terminal.ERROR) try: with open(application_definition) as f: app = toml.load(f) terminal.log(json.dumps(app, indent=4), terminal.DEBUG) schema.validate(app, "application") except (schema.ValidationError, schema.SchemaError, toml.TomlDecodeError) as e: terminal.log("Application definition was invalid.", terminal.ERROR) terminal.log("%s" % e, terminal.ERROR) return terminal.log(" - %s" % application_definition, terminal.ERROR) executable = lib.which(app["executable"]) if executable is None: return terminal.log( "'%s' not found on your PATH\n%s" % (app["executable"], os.getenv("PATH")), terminal.ERROR) frame = self.current_frame() frame["environment"]["root"] = self._root template_private = frame["config"]["template"]["work"] try: workdir = template_private.format( **dict(user=getpass.getuser(), app=app["application_dir"], **frame["environment"])) except KeyError as e: return terminal.log("Missing environment variable: %s" % e, terminal.ERROR) # TODO(marcus): These shouldn't be necessary # once the templates are used. # ---------------------------------------------------------------------- template_rootpath = template_private.split("{silo}")[0] template_assetpath = template_private.split("{asset}")[0] + "{asset}" template_taskpath = template_private.split("{task}")[0] + "{task}" silospath = template_rootpath.format(**frame["environment"]) assetpath = template_assetpath.format(**frame["environment"]) taskpath = template_taskpath.format(**frame["environment"]) frame["environment"]["silospath"] = silospath frame["environment"]["assetpath"] = assetpath frame["environment"]["taskpath"] = taskpath frame["environment"]["workdir"] = workdir # ---------------------------------------------------------------------- # TODO(marcus): These will eventually replace the name-based # references currently stored in the environment. frame["environment"]["_project"] = frame["project"] frame["environment"]["_asset"] = frame["asset"] environment = os.environ.copy() environment = dict( environment, **{ "AVALON_" + key.upper(): str(value) for key, value in frame["environment"].items() }) try: app = lib.dict_format(app, **environment) except KeyError as e: terminal.log( "Application error: variable %s " "not found in application .json" % e, terminal.ERROR) terminal.log(json.dumps(environment, indent=4, sort_keys=True), terminal.ERROR) return terminal.log( "This is typically a bug in the pipeline, " "ask your developer.", terminal.ERROR) for key, value in app.get("environment", {}).items(): if isinstance(value, list): # Treat list values as application_definition variables environment[key] = os.pathsep.join(value) elif isinstance(value, six.string_types): if PY2: # Protect against unicode in the environment encoding = sys.getfilesystemencoding() environment[key] = value.encode(encoding) else: environment[key] = value else: terminal.log( "'%s': Unsupported environment variable in %s" % (value, application_definition), terminal.ERROR) raise TypeError("Unsupported environment variable") try: os.makedirs(workdir) terminal.log("Creating working directory '%s'" % workdir, terminal.INFO) except OSError as e: # An already existing working directory is fine. if e.errno == errno.EEXIST: terminal.log("Existing working directory found.", terminal.INFO) else: terminal.log("Could not create working directory.", terminal.ERROR) return terminal.log(traceback.format_exc(), terminal.ERROR) else: terminal.log("Creating default directories..", terminal.DEBUG) for dirname in app.get("default_dirs", []): terminal.log(" - %s" % dirname, terminal.DEBUG) os.makedirs(os.path.join(workdir, dirname)) # Perform application copy for src, dst in app.get("copy", {}).items(): dst = os.path.join(workdir, dst) try: terminal.log("Copying %s -> %s" % (src, dst)) shutil.copy(src, dst) except OSError as e: terminal.log("Could not copy application file: %s" % e, terminal.ERROR) terminal.log(" - %s -> %s" % (src, dst), terminal.ERROR) item = next(app for app in frame["config"]["apps"] if app["name"] == name) args = item.get("args", []) + app.get("arguments", []) try: popen = lib.launch( executable=executable, args=args, environment=environment, ) except ValueError: return terminal.log(traceback.format_exc()) except OSError: return terminal.log(traceback.format_exc()) except Exception as e: terminal.log("Something unexpected happened..") return terminal.log(traceback.format_exc()) else: terminal.log(json.dumps(environment, indent=4, sort_keys=True), terminal.DEBUG) terminal.log("Launching {executable} {args}".format( executable=executable, args=" ".join(args))) process = {} class Thread(QtCore.QThread): messaged = Signal(str) def run(self): for line in lib.stream(process["popen"].stdout): self.messaged.emit(line.rstrip()) self.messaged.emit("%s killed." % process["app"]["executable"]) thread = Thread() thread.messaged.connect(lambda line: terminal.log(line, terminal.INFO)) process.update({"app": app, "thread": thread, "popen": popen}) self._processes.append(process) thread.start() return process
def create_asset(self): name_input = self.data['inputs']['name'] name = name_input.text() test_name = name.replace(' ', '') error_message = None message = QtWidgets.QMessageBox(self) message.setWindowTitle("Some errors has occured") message.setIcon(QtWidgets.QMessageBox.Critical) # TODO: show error messages on any error if self.valid_parent is not True and test_name == '': error_message = "Name is not set and Parent is not selected" elif self.valid_parent is not True: error_message = "Parent is not selected" elif test_name == '': error_message = "Name is not set" if error_message is not None: message.setText(error_message) message.show() return test_name_exists = io.find({ 'type': 'asset', 'name': name }) existing_assets = [x for x in test_name_exists] if len(existing_assets) > 0: message.setText("Entered Asset name is occupied") message.show() return checkbox_app = self.data['inputs']['open_app'] if checkbox_app is not None and checkbox_app.isChecked() is True: task_view = self.data["view"]["tasks"] task_model = self.data["model"]["tasks"] try: index = task_view.selectedIndexes()[0] task_name = task_model.itemData(index)[0] except Exception: message.setText("Please select task") message.show() return # Get ftrack session if self.session is None: session = ftrack_api.Session() self.session = session else: session = self.session # Get Ftrack project entity project_name = io.Session['AVALON_PROJECT'] project_query = 'Project where full_name is "{}"'.format(project_name) try: ft_project = session.query(project_query).one() except Exception: message.setText("Ftrack project was not found") message.show() return # Get Ftrack entity of parent ft_parent = None assets_model = self.data["model"]["assets"] selected = assets_model.get_selected_assets() parent = io.find_one({"_id": selected[0], "type": "asset"}) asset_id = parent.get('data', {}).get('ftrackId', None) asset_entity_type = parent.get('data', {}).get('entityType', None) asset_query = '{} where id is "{}"' if asset_id is not None and asset_entity_type is not None: try: ft_parent = session.query(asset_query.format( asset_entity_type, asset_id) ).one() except Exception: ft_parent = None if ft_parent is None: ft_parent = self.get_ftrack_asset(parent, ft_project) if ft_parent is None: message.setText("Parent's Ftrack entity was not found") message.show() return asset_build_combo = self.data['inputs']['assetbuild'] asset_type_name = asset_build_combo.currentText() asset_type_query = 'Type where name is "{}"'.format(asset_type_name) try: asset_type = session.query(asset_type_query).one() except Exception: message.setText("Selected Asset Build type does not exists") message.show() return for children in ft_parent['children']: if children['name'] == name: message.setText("Entered Asset name is occupied") message.show() return task_template_combo = self.data['inputs']['tasktemplate'] task_template = task_template_combo.currentText() tasks = [] for template in self.config_data['task_templates']: if template['name'] == task_template: tasks = template['task_types'] break available_task_types = [] task_types = ft_project['project_schema']['_task_type_schema'] for task_type in task_types['types']: available_task_types.append(task_type['name']) not_possible_tasks = [] for task in tasks: if task not in available_task_types: not_possible_tasks.append(task) if len(not_possible_tasks) != 0: message.setText(( "These Task types weren't found" " in Ftrack project schema:\n{}").format( ', '.join(not_possible_tasks)) ) message.show() return # Create asset build asset_build_data = { 'name': name, 'project_id': ft_project['id'], 'parent_id': ft_parent['id'], 'type': asset_type } new_entity = session.create('AssetBuild', asset_build_data) task_data = { 'project_id': ft_project['id'], 'parent_id': new_entity['id'] } for task in tasks: type = session.query('Type where name is "{}"'.format(task)).one() task_data['type_id'] = type['id'] task_data['name'] = task session.create('Task', task_data) av_project = io.find_one({'type': 'project'}) hiearchy_items = [] hiearchy_items.extend(self.get_avalon_parent(parent)) hiearchy_items.append(parent['name']) hierarchy = os.path.sep.join(hiearchy_items) new_asset_data = { 'ftrackId': new_entity['id'], 'entityType': new_entity.entity_type, 'visualParent': parent['_id'], 'tasks': tasks, 'parents': hiearchy_items, 'hierarchy': hierarchy } new_asset_info = { 'parent': av_project['_id'], 'name': name, 'schema': "openpype:asset-3.0", 'type': 'asset', 'data': new_asset_data } # Backwards compatibility (add silo from parent if is silo project) if self.silos: new_asset_info["silo"] = parent["silo"] try: schema.validate(new_asset_info) except Exception: message.setText(( 'Asset information are not valid' ' to create asset in avalon database' )) message.show() session.rollback() return io.insert_one(new_asset_info) session.commit() outlink_cb = self.data['inputs']['outlink_cb'] if outlink_cb.isChecked() is True: outlink_input = self.data['inputs']['outlink'] outlink_name = outlink_input.text() outlink_asset = io.find_one({ 'type': 'asset', 'name': outlink_name }) outlink_ft_id = outlink_asset.get('data', {}).get('ftrackId', None) outlink_entity_type = outlink_asset.get( 'data', {} ).get('entityType', None) if outlink_ft_id is not None and outlink_entity_type is not None: try: outlink_entity = session.query(asset_query.format()).one() except Exception: outlink_entity = None if outlink_entity is None: outlink_entity = self.get_ftrack_asset( outlink_asset, ft_project ) if outlink_entity is None: message.setText("Outlink's Ftrack entity was not found") message.show() return link_data = { 'from_id': new_entity['id'], 'to_id': outlink_entity['id'] } session.create('TypedContextLink', link_data) session.commit() if checkbox_app is not None and checkbox_app.isChecked() is True: origin_asset = api.Session.get('AVALON_ASSET', None) origin_task = api.Session.get('AVALON_TASK', None) asset_name = name task_view = self.data["view"]["tasks"] task_model = self.data["model"]["tasks"] try: index = task_view.selectedIndexes()[0] except Exception: message.setText("No task is selected. App won't be launched") message.show() return task_name = task_model.itemData(index)[0] try: api.update_current_task(task=task_name, asset=asset_name) self.open_app() finally: if origin_task is not None and origin_asset is not None: api.update_current_task( task=origin_task, asset=origin_asset ) message.setWindowTitle("Asset Created") message.setText("Asset Created successfully") message.setIcon(QtWidgets.QMessageBox.Information) message.show()
def test_save(): """Saving works well under normal circumstances""" config_ = { "schema": "avalon-core:config-1.0", "apps": [ { "name": "app1" }, { "name": "app2" }, ], "tasks": [ { "name": "task1" }, { "name": "task2" }, ], "template": { "work": "{root}/{project}/{silo}/{asset}/work/" "{task}/{user}/{app}", "publish": "{root}/{project}/{silo}/{asset}/publish/" "{subset}/v{version:0>3}/{subset}.{representation}" }, "copy": {} } inventory_ = { "schema": "avalon-core:inventory-1.0", "assets": [{ "name": "asset1" }, { "name": "asset2" }], "film": [ { "name": "shot1" }, { "name": "shot2" }, ] } schema.validate(config_) schema.validate(inventory_) _id = io.insert_one(self._project).inserted_id project = io.find_one({"_id": _id}) assert_equals(project["config"], self._project["config"]) inventory.save(name=self._project["name"], config=config_, inventory=inventory_) project = io.find_one({"_id": _id}) config_.pop("schema") assert_equals(project["config"], config_) for asset in inventory_["assets"]: assert io.find_one({ "type": "asset", "parent": project["_id"], "name": asset["name"] })
def _from_environment(self): Session = { item[0]: os.getenv(item[0], item[1]) for item in ( # Root directory of projects on disk ("AVALON_PROJECTS", None), # Name of current Project ("AVALON_PROJECT", ""), # Name of current Asset ("AVALON_ASSET", ""), # Name of current silo ("AVALON_SILO", ""), # Name of current task ("AVALON_TASK", None), # Name of current app ("AVALON_APP", None), # Path to working directory ("AVALON_WORKDIR", None), # Name of current Config # TODO(marcus): Establish a suitable default config ("AVALON_CONFIG", "no_config"), # Name of Avalon in graphical user interfaces # Use this to customise the visual appearance of Avalon # to better integrate with your surrounding pipeline ("AVALON_LABEL", "Avalon"), # Used during any connections to the outside world ("AVALON_TIMEOUT", "1000"), # Address to Asset Database ("AVALON_MONGO", "mongodb://localhost:27017"), # Name of database used in MongoDB ("AVALON_DB", "avalon"), # Address to Sentry ("AVALON_SENTRY", None), # Address to Deadline Web Service # E.g. http://192.167.0.1:8082 ("AVALON_DEADLINE", None), # Enable features not necessarily stable. The user's own risk ("AVALON_EARLY_ADOPTER", None), # Address of central asset repository, contains # the following interface: # /upload # /download # /manager (optional) ("AVALON_LOCATION", "http://127.0.0.1"), # Boolean of whether to upload published material # to central asset repository ("AVALON_UPLOAD", None), # Generic username and password ("AVALON_USERNAME", "avalon"), ("AVALON_PASSWORD", "secret"), # Unique identifier for instances in working files ("AVALON_INSTANCE_ID", "avalon.instance"), ("AVALON_CONTAINER_ID", "avalon.container"), # Enable debugging ("AVALON_DEBUG", None), ) if os.getenv(item[0], item[1]) is not None } Session["schema"] = "avalon-core:session-2.0" try: schema.validate(Session) except schema.ValidationError as e: # TODO(marcus): Make this mandatory self.log.warning(e) return Session
def insert_one(self, item): assert isinstance(item, dict), "item must be of type <dict>" schema.validate(item) return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
def test_save(): """Saving works well under normal circumstances""" config_ = { "schema": "avalon-core:config-1.0", "apps": [ { "name": "app1" }, { "name": "app2" }, ], "tasks": { { "Animation": { "short_name": "anim" } }, { "Modeling": { "short_name": "mdl" } }, }, "template": { "work": "{root}/{project}/{silo}/{asset}/work/" "{task}/{user}/{app}", "publish": "{root}/{project}/{silo}/{asset}/publish/" "{subset}/v{version:0>3}/{subset}.{representation}" }, "families": [{ "name": "avalon.model", "label": "Model", "icon": "cube" }], "groups": [ { "name": "charCaches", "icon": "diamond", "color": "#C4CEDC", "order": -99 }, ], "copy": {} } inventory_ = { "schema": "avalon-core:inventory-1.0", "assets": [{ "name": "asset1" }, { "name": "asset2" }], "film": [ { "name": "shot1" }, { "name": "shot2" }, ] } schema.validate(config_) schema.validate(inventory_) _id = io.insert_one(self._project).inserted_id project = io.find_one({"_id": _id}) assert_equals(project["config"], self._project["config"]) inventory.save(name=self._project["name"], config=config_, inventory=inventory_) project = io.find_one({"_id": _id}) config_.pop("schema") assert_equals(project["config"], config_) for asset in inventory_["assets"]: assert io.find_one({ "type": "asset", "parent": project["_id"], "name": asset["name"] })
def process(self, instance): self.log.debug( "--- Integration of Master version for subset `{}` begins.".format( instance.data.get("subset", str(instance)) ) ) published_repres = instance.data.get("published_representations") if not published_repres: self.log.debug( "*** There are not published representations on the instance." ) return project_name = api.Session["AVALON_PROJECT"] # TODO raise error if master not set? anatomy = instance.context.data["anatomy"] if "master" not in anatomy.templates: self.log.warning("!!! Anatomy does not have set `master` key!") return if "path" not in anatomy.templates["master"]: self.log.warning(( "!!! There is not set `path` template in `master` anatomy" " for project \"{}\"." ).format(project_name)) return master_template = anatomy.templates["master"]["path"] self.log.debug("`Master` template check was successful. `{}`".format( master_template )) master_publish_dir = self.get_publish_dir(instance) src_version_entity = instance.data.get("versionEntity") filtered_repre_ids = [] for repre_id, repre_info in published_repres.items(): repre = repre_info["representation"] if repre["name"].lower() in self.ignored_representation_names: self.log.debug( "Filtering representation with name: `{}`".format( repre["name"].lower() ) ) filtered_repre_ids.append(repre_id) for repre_id in filtered_repre_ids: published_repres.pop(repre_id, None) if not published_repres: self.log.debug( "*** All published representations were filtered by name." ) return if src_version_entity is None: self.log.debug(( "Published version entity was not sent in representation data." " Querying entity from database." )) src_version_entity = ( self.version_from_representations(published_repres) ) if not src_version_entity: self.log.warning(( "!!! Can't find origin version in database." " Skipping Master version publish." )) return all_copied_files = [] transfers = instance.data.get("transfers", list()) for _src, dst in transfers: dst = os.path.normpath(dst) if dst not in all_copied_files: all_copied_files.append(dst) hardlinks = instance.data.get("hardlinks", list()) for _src, dst in hardlinks: dst = os.path.normpath(dst) if dst not in all_copied_files: all_copied_files.append(dst) all_repre_file_paths = [] for repre_info in published_repres.values(): published_files = repre_info.get("published_files") or [] for file_path in published_files: file_path = os.path.normpath(file_path) if file_path not in all_repre_file_paths: all_repre_file_paths.append(file_path) # TODO this is not best practice of getting resources for publish # WARNING due to this we must remove all files from master publish dir instance_publish_dir = os.path.normpath( instance.data["publishDir"] ) other_file_paths_mapping = [] for file_path in all_copied_files: # Check if it is from publishDir if not file_path.startswith(instance_publish_dir): continue if file_path in all_repre_file_paths: continue dst_filepath = file_path.replace( instance_publish_dir, master_publish_dir ) other_file_paths_mapping.append((file_path, dst_filepath)) # Current version old_version, old_repres = ( self.current_master_ents(src_version_entity) ) old_repres_by_name = { repre["name"].lower(): repre for repre in old_repres } if old_version: new_version_id = old_version["_id"] else: new_version_id = io.ObjectId() new_master_version = { "_id": new_version_id, "version_id": src_version_entity["_id"], "parent": src_version_entity["parent"], "type": "master_version", "schema": "pype:master_version-1.0" } schema.validate(new_master_version) # Don't make changes in database until everything is O.K. bulk_writes = [] if old_version: self.log.debug("Replacing old master version.") bulk_writes.append( ReplaceOne( {"_id": new_master_version["_id"]}, new_master_version ) ) else: self.log.debug("Creating first master version.") bulk_writes.append( InsertOne(new_master_version) ) # Separate old representations into `to replace` and `to delete` old_repres_to_replace = {} old_repres_to_delete = {} for repre_info in published_repres.values(): repre = repre_info["representation"] repre_name_low = repre["name"].lower() if repre_name_low in old_repres_by_name: old_repres_to_replace[repre_name_low] = ( old_repres_by_name.pop(repre_name_low) ) if old_repres_by_name: old_repres_to_delete = old_repres_by_name archived_repres = list(io.find({ # Check what is type of archived representation "type": "archived_repsentation", "parent": new_version_id })) archived_repres_by_name = {} for repre in archived_repres: repre_name_low = repre["name"].lower() archived_repres_by_name[repre_name_low] = repre backup_master_publish_dir = None if os.path.exists(master_publish_dir): backup_master_publish_dir = master_publish_dir + ".BACKUP" max_idx = 10 idx = 0 _backup_master_publish_dir = backup_master_publish_dir while os.path.exists(_backup_master_publish_dir): self.log.debug(( "Backup folder already exists." " Trying to remove \"{}\"" ).format(_backup_master_publish_dir)) try: shutil.rmtree(_backup_master_publish_dir) backup_master_publish_dir = _backup_master_publish_dir break except Exception: self.log.info(( "Could not remove previous backup folder." " Trying to add index to folder name" )) _backup_master_publish_dir = ( backup_master_publish_dir + str(idx) ) if not os.path.exists(_backup_master_publish_dir): backup_master_publish_dir = _backup_master_publish_dir break if idx > max_idx: raise AssertionError(( "Backup folders are fully occupied to max index \"{}\"" ).format(max_idx)) break idx += 1 self.log.debug("Backup folder path is \"{}\"".format( backup_master_publish_dir )) try: os.rename(master_publish_dir, backup_master_publish_dir) except PermissionError: raise AssertionError(( "Could not create master version because it is not" " possible to replace current master files." )) try: src_to_dst_file_paths = [] for repre_info in published_repres.values(): # Skip if new repre does not have published repre files published_files = repre_info["published_files"] if len(published_files) == 0: continue # Prepare anatomy data anatomy_data = repre_info["anatomy_data"] anatomy_data.pop("version", None) # Get filled path to repre context anatomy_filled = anatomy.format(anatomy_data) template_filled = anatomy_filled["master"]["path"] repre_data = { "path": str(template_filled), "template": master_template } repre_context = template_filled.used_values for key in self.db_representation_context_keys: if ( key in repre_context or key not in anatomy_data ): continue repre_context[key] = anatomy_data[key] # Prepare new repre repre = copy.deepcopy(repre_info["representation"]) repre["parent"] = new_master_version["_id"] repre["context"] = repre_context repre["data"] = repre_data repre.pop("_id", None) schema.validate(repre) repre_name_low = repre["name"].lower() # Replace current representation if repre_name_low in old_repres_to_replace: old_repre = old_repres_to_replace.pop(repre_name_low) repre["_id"] = old_repre["_id"] bulk_writes.append( ReplaceOne( {"_id": old_repre["_id"]}, repre ) ) # Unarchive representation elif repre_name_low in archived_repres_by_name: archived_repre = archived_repres_by_name.pop( repre_name_low ) old_id = archived_repre["old_id"] repre["_id"] = old_id bulk_writes.append( ReplaceOne( {"old_id": old_id}, repre ) ) # Create representation else: repre["_id"] = io.ObjectId() bulk_writes.append( InsertOne(repre) ) # Prepare paths of source and destination files if len(published_files) == 1: src_to_dst_file_paths.append( (published_files[0], template_filled) ) continue collections, remainders = clique.assemble(published_files) if remainders or not collections or len(collections) > 1: raise Exception(( "Integrity error. Files of published representation " "is combination of frame collections and single files." "Collections: `{}` Single files: `{}`" ).format(str(collections), str(remainders))) src_col = collections[0] # Get head and tail for collection frame_splitter = "_-_FRAME_SPLIT_-_" anatomy_data["frame"] = frame_splitter _anatomy_filled = anatomy.format(anatomy_data) _template_filled = _anatomy_filled["master"]["path"] head, tail = _template_filled.split(frame_splitter) padding = int( anatomy.templates["render"].get( "frame_padding", anatomy.templates["render"].get("padding") ) ) dst_col = clique.Collection( head=head, padding=padding, tail=tail ) dst_col.indexes.clear() dst_col.indexes.update(src_col.indexes) for src_file, dst_file in zip(src_col, dst_col): src_to_dst_file_paths.append( (src_file, dst_file) ) self.path_checks = [] # Copy(hardlink) paths of source and destination files # TODO should we *only* create hardlinks? # TODO should we keep files for deletion until this is successful? for src_path, dst_path in src_to_dst_file_paths: self.copy_file(src_path, dst_path) for src_path, dst_path in other_file_paths_mapping: self.copy_file(src_path, dst_path) # Archive not replaced old representations for repre_name_low, repre in old_repres_to_delete.items(): # Replace archived representation (This is backup) # - should not happen to have both repre and archived repre if repre_name_low in archived_repres_by_name: archived_repre = archived_repres_by_name.pop( repre_name_low ) repre["old_id"] = repre["_id"] repre["_id"] = archived_repre["_id"] repre["type"] = archived_repre["type"] bulk_writes.append( ReplaceOne( {"_id": archived_repre["_id"]}, repre ) ) else: repre["old_id"] = repre["_id"] repre["_id"] = io.ObjectId() repre["type"] = "archived_representation" bulk_writes.append( InsertOne(repre) ) if bulk_writes: io._database[io.Session["AVALON_PROJECT"]].bulk_write( bulk_writes ) # Remove backuped previous master if ( backup_master_publish_dir is not None and os.path.exists(backup_master_publish_dir) ): shutil.rmtree(backup_master_publish_dir) except Exception: if ( backup_master_publish_dir is not None and os.path.exists(backup_master_publish_dir) ): os.rename(backup_master_publish_dir, master_publish_dir) self.log.error(( "!!! Creating of Master version failed." " Previous master version maybe lost some data!" )) raise self.log.debug(( "--- Master version integration for subset `{}`" " seems to be successful." ).format( instance.data.get("subset", str(instance)) ))