def get_subset(self, instance): asset_id = instance.context.data["assetDoc"]["_id"] subset = io.find_one({ "type": "subset", "parent": asset_id, "name": instance.data["subset"] }) if subset is None: subset_name = instance.data["subset"] self.log.info("Subset '%s' not found, creating.." % subset_name) _id = io.insert_one({ "schema": "avalon-core:subset-2.0", "type": "subset", "name": subset_name, "data": {}, "parent": asset_id }).inserted_id subset = io.find_one({"_id": _id}) return subset
def write_database(self, instance, version, representations): """Write version and representations to database Should write version documents until files collecting passed without error. """ # Write version # self.log.info("Registering version {} to database ..." "".format(version["name"])) if "pregeneratedVersionId" in instance.data: version["_id"] = instance.data["pregeneratedVersionId"] version_id = io.insert_one(version).inserted_id # Write representations # self.log.info("Registering {} representations ..." "".format(len(representations))) for representation in representations: representation["parent"] = version_id io.insert_many(representations) return version_id
def get_subset(self, asset, instance): subset = io.find_one({ "type": "subset", "parent": asset["_id"], "name": instance.data["subset"] }) if subset is None: subset_name = instance.data["subset"] self.log.info("Subset '%s' not found, creating.." % subset_name) self.log.debug("families. %s" % instance.data.get('families')) self.log.debug("families. %s" % type(instance.data.get('families'))) _id = io.insert_one({ "schema": "pype:subset-3.0", "type": "subset", "name": subset_name, "data": { "families": instance.data.get('families') }, "parent": asset["_id"] }).inserted_id subset = io.find_one({"_id": _id}) return subset
def process(self, instance): context = instance.context if not all(result["success"] for result in context.data["results"]): self.log.warning("Atomicity not held, aborting.") return # Integrate representations' to database self.log.info("Integrating representations to database ...") asset = context.data["assetDoc"] subset, version, representations = instance.data["toDatabase"] # Write subset if not exists filter = {"parent": asset["_id"], "name": subset["name"]} if io.find_one(filter) is None: io.insert_one(subset) # Write version if not exists filter = {"parent": subset["_id"], "name": version["name"]} existed_version = io.find_one(filter) if existed_version is None: # Write version and representations to database version_id = self.write_database(instance, version, representations) instance.data["insertedVersionId"] = version_id # Update dependent self.update_dependent(instance, version_id) else: self.log.info("Version existed, representation file has been " "overwritten.") # Update version document "data.time" filter_ = {"_id": existed_version["_id"]} update = {"$set": {"data.time": context.data["time"]}} io.update_many(filter_, update) # Update representation documents "data" for representation in representations: filter_ = { "name": representation["name"], "parent": existed_version["_id"], } update = {"$set": {"data": representation["data"]}} io.update_many(filter_, update)
def get_subset(self, asset, instance): subset_name = instance.data["subset"] subset = io.find_one({ "type": "subset", "parent": asset["_id"], "name": subset_name }) if subset is None: self.log.info("Subset '%s' not found, creating ..." % subset_name) self.log.debug("families. %s" % instance.data.get('families')) self.log.debug("families. %s" % type(instance.data.get('families'))) family = instance.data.get("family") families = [] if family: families.append(family) for _family in (instance.data.get("families") or []): if _family not in families: families.append(_family) _id = io.insert_one({ "schema": "openpype:subset-3.0", "type": "subset", "name": subset_name, "data": { "families": families }, "parent": asset["_id"] }).inserted_id subset = io.find_one({"_id": _id}) # add group if available if instance.data.get("subsetGroup"): io.update_many( { 'type': 'subset', '_id': io.ObjectId(subset["_id"]) }, { '$set': { 'data.subsetGroup': instance.data.get('subsetGroup') } }) # Update families on subset. families = [instance.data["family"]] families.extend(instance.data.get("families", [])) io.update_many({ "type": "subset", "_id": io.ObjectId(subset["_id"]) }, {"$set": { "data.families": families }}) return subset
def create_asset(data): """Create asset Requires: {"name": "uniquecode", "silo": "assets"} Optional: {"data": {}} """ data = data.copy() project = io.find_one({"type": "project"}) if project is None: raise RuntimeError("Project must exist prior to creating assets") # Link to parent by id if provided, otherwise parent to the project visual_parent = data.pop("visualParent", None) asset = { "schema": "avalon-core:asset-2.0", "parent": project['_id'], "name": data.pop("name"), "silo": data.pop("silo"), "visualParent": visual_parent, "type": "asset", "data": data } # Asset *must* have a name and silo assert asset['name'], "Asset has no name" assert asset['silo'], "Asset has no silo" # Ensure it has a unique name asset_doc = io.find_one({ "name": asset['name'], "type": "asset", }) if asset_doc is not None: raise RuntimeError("Asset named {} already " "exists.".format(asset['name'])) schema.validate(asset) io.insert_one(asset)
def create_avalon_asset(self, name, data): item = { "schema": "avalon-core:asset-3.0", "name": name, "parent": self.project["_id"], "type": "asset", "data": data } self.log.debug("Creating asset: {}".format(item)) entity_id = io.insert_one(item).inserted_id return io.find_one({"_id": entity_id})
def create_project(name): if io.find_one({"type": "project", "name": name}): raise RuntimeError("%s already exists" % name) return io.insert_one({ "schema": "avalon-core:project-2.0", "type": "project", "name": name, "data": dict(), "config": DEFAULTS["config"], "parent": None, }).inserted_id
def create_asset(name, silo, data, parent): assert isinstance(parent, io.ObjectId) if io.find_one({"type": "asset", "name": name}): raise RuntimeError("%s already exists" % name) return io.insert_one({ "schema": "avalon-core:asset-2.0", "name": name, "silo": silo, "parent": parent, "type": "asset", "data": data }).inserted_id
def get_subset(self, asset, instance): subset_name = instance.data["subset"] subset = io.find_one({ "type": "subset", "parent": asset["_id"], "name": subset_name }) if subset is None: self.log.info("Subset '%s' not found, creating.." % subset_name) self.log.debug("families. %s" % instance.data.get('families')) self.log.debug("families. %s" % type(instance.data.get('families'))) _id = io.insert_one({ "schema": "pype:subset-3.0", "type": "subset", "name": subset_name, "data": { "families": instance.data.get("families", []) }, "parent": asset["_id"] }).inserted_id subset = io.find_one({"_id": _id}) # add group if available if instance.data.get("subsetGroup"): io.update_many( { 'type': 'subset', '_id': io.ObjectId(subset["_id"]) }, { '$set': { 'data.subsetGroup': instance.data.get('subsetGroup') } }) return subset
def register(self, instance): # Required environment variables anatomy_data = instance.data["anatomyData"] io.install() context = instance.context project_entity = instance.data["projectEntity"] context_asset_name = context.data["assetEntity"]["name"] asset_name = instance.data["asset"] asset_entity = instance.data.get("assetEntity") if not asset_entity or asset_entity["name"] != context_asset_name: asset_entity = io.find_one({ "type": "asset", "name": asset_name, "parent": project_entity["_id"] }) assert asset_entity, ( "No asset found by the name \"{0}\" in project \"{1}\"" ).format(asset_name, project_entity["name"]) instance.data["assetEntity"] = asset_entity # update anatomy data with asset specific keys # - name should already been set hierarchy = "" parents = asset_entity["data"]["parents"] if parents: hierarchy = "/".join(parents) anatomy_data["hierarchy"] = hierarchy task_name = instance.data.get("task") if task_name: anatomy_data["task"] = task_name anatomy_data["family"] = instance.data.get("family") stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info( ("{0} is missing reference to staging directory." " Will try to get it from representation.").format(instance)) else: self.log.debug( "Establishing staging directory @ {0}".format(stagingdir)) # Ensure at least one file is set up for transfer in staging dir. repres = instance.data.get("representations") assert repres, "Instance has no files to transfer" assert isinstance( repres, (list, tuple)), ("Instance 'files' must be a list, got: {0} {1}".format( str(type(repres)), str(repres))) subset = self.get_subset(asset_entity, instance) instance.data["subsetEntity"] = subset version_number = instance.data["version"] self.log.debug("Next version: v{}".format(version_number)) version_data = self.create_version_data(context, instance) version_data_instance = instance.data.get('versionData') if version_data_instance: version_data.update(version_data_instance) # TODO rename method from `create_version` to # `prepare_version` or similar... version = self.create_version(subset=subset, version_number=version_number, data=version_data) self.log.debug("Creating version ...") new_repre_names_low = [_repre["name"].lower() for _repre in repres] existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], 'name': version_number }) if existing_version is None: version_id = io.insert_one(version).inserted_id else: # Check if instance have set `append` mode which cause that # only replicated representations are set to archive append_repres = instance.data.get("append", False) # Update version data # TODO query by _id and io.update_many( { 'type': 'version', 'parent': subset["_id"], 'name': version_number }, {'$set': version}) version_id = existing_version['_id'] # Find representations of existing version and archive them current_repres = list( io.find({ "type": "representation", "parent": version_id })) bulk_writes = [] for repre in current_repres: if append_repres: # archive only duplicated representations if repre["name"].lower() not in new_repre_names_low: continue # Representation must change type, # `_id` must be stored to other key and replaced with new # - that is because new representations should have same ID repre_id = repre["_id"] bulk_writes.append(DeleteOne({"_id": repre_id})) repre["orig_id"] = repre_id repre["_id"] = io.ObjectId() repre["type"] = "archived_representation" bulk_writes.append(InsertOne(repre)) # bulk updates if bulk_writes: io._database[io.Session["AVALON_PROJECT"]].bulk_write( bulk_writes) version = io.find_one({"_id": version_id}) instance.data["versionEntity"] = version existing_repres = list( io.find({ "parent": version_id, "type": "archived_representation" })) instance.data['version'] = version['name'] intent_value = instance.context.data.get("intent") if intent_value and isinstance(intent_value, dict): intent_value = intent_value.get("value") if intent_value: anatomy_data["intent"] = intent_value anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] if 'transfers' not in instance.data: instance.data['transfers'] = [] template_name = self.template_name_from_instance(instance) published_representations = {} for idx, repre in enumerate(instance.data["representations"]): published_files = [] # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) if intent_value is not None: template_data["intent"] = intent_value resolution_width = repre.get("resolutionWidth") resolution_height = repre.get("resolutionHeight") fps = instance.data.get("fps") if resolution_width: template_data["resolution_width"] = resolution_width if resolution_width: template_data["resolution_height"] = resolution_height if resolution_width: template_data["fps"] = fps files = repre['files'] if repre.get('stagingDir'): stagingdir = repre['stagingDir'] if repre.get("outputName"): template_data["output"] = repre['outputName'] template = os.path.normpath( anatomy.templates[template_name]["path"]) sequence_repre = isinstance(files, list) repre_context = None if sequence_repre: self.log.debug("files: {}".format(files)) src_collections, remainder = clique.assemble(files) self.log.debug("src_tail_collections: {}".format( str(src_collections))) src_collection = src_collections[0] # Assert that each member has identical suffix src_head = src_collection.format("{head}") src_tail = src_collection.format("{tail}") # fix dst_padding valid_files = [x for x in files if src_collection.match(x)] padd_len = len(valid_files[0].replace(src_head, "").replace( src_tail, "")) src_padding_exp = "%0{}d".format(padd_len) test_dest_files = list() for i in [1, 2]: template_data["representation"] = repre['ext'] template_data["frame"] = src_padding_exp % i anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] if repre_context is None: repre_context = template_filled.used_values test_dest_files.append(os.path.normpath(template_filled)) template_data["frame"] = repre_context["frame"] self.log.debug("test_dest_files: {}".format( str(test_dest_files))) dst_collections, remainder = clique.assemble(test_dest_files) dst_collection = dst_collections[0] dst_head = dst_collection.format("{head}") dst_tail = dst_collection.format("{tail}") index_frame_start = None if repre.get("frameStart"): frame_start_padding = int(anatomy.templates["render"].get( "frame_padding", anatomy.templates["render"].get("padding"))) index_frame_start = int(repre.get("frameStart")) # exception for slate workflow if index_frame_start and "slate" in instance.data["families"]: index_frame_start -= 1 dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: # TODO 1.) do not count padding in each index iteration # 2.) do not count dst_padding from src_padding before # index_frame_start check src_padding = src_padding_exp % i src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail) dst_padding = src_padding_exp % i if index_frame_start: dst_padding_exp = "%0{}d".format(frame_start_padding) dst_padding = dst_padding_exp % index_frame_start index_frame_start += 1 dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail).replace("..", ".") self.log.debug("destination: `{}`".format(dst)) src = os.path.join(stagingdir, src_file_name) self.log.debug("source: {}".format(src)) instance.data["transfers"].append([src, dst]) published_files.append(dst) # for adding first frame into db if not dst_start_frame: dst_start_frame = dst_padding # Store used frame value to template data template_data["frame"] = dst_start_frame dst = "{0}{1}{2}".format(dst_head, dst_start_frame, dst_tail).replace("..", ".") repre['published_path'] = dst else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # template_data.pop("frame", None) fname = files assert not os.path.isabs(fname), ( "Given file name is a full path") template_data["representation"] = repre['ext'] src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] repre_context = template_filled.used_values dst = os.path.normpath(template_filled).replace("..", ".") instance.data["transfers"].append([src, dst]) published_files.append(dst) repre['published_path'] = dst self.log.debug("__ dst: {}".format(dst)) repre["publishedFiles"] = published_files for key in self.db_representation_context_keys: value = template_data.get(key) if not value: continue repre_context[key] = template_data[key] # Use previous representation's id if there are any repre_id = None repre_name_low = repre["name"].lower() for _repre in existing_repres: # NOTE should we check lowered names? if repre_name_low == _repre["name"]: repre_id = _repre["orig_id"] break # Create new id if existing representations does not match if repre_id is None: repre_id = io.ObjectId() representation = { "_id": repre_id, "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": repre['name'], "data": { 'path': dst, 'template': template }, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": repre_context } if repre.get("outputName"): representation["context"]["output"] = repre['outputName'] if sequence_repre and repre.get("frameStart"): representation['context']['frame'] = ( dst_padding_exp % int(repre.get("frameStart"))) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) self.log.debug("__ destination_list: {}".format(destination_list)) instance.data['destination_list'] = destination_list representations.append(representation) published_representations[repre_id] = { "representation": representation, "anatomy_data": template_data, "published_files": published_files } self.log.debug("__ representations: {}".format(representations)) # Remove old representations if there are any (before insertion of new) if existing_repres: repre_ids_to_remove = [] for repre in existing_repres: repre_ids_to_remove.append(repre["_id"]) io.delete_many({"_id": {"$in": repre_ids_to_remove}}) self.log.debug("__ representations: {}".format(representations)) for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) self.log.debug("__ represPATH: {}".format(rep['published_path'])) io.insert_many(representations) instance.data["published_representations"] = ( published_representations) # self.log.debug("Representation: {}".format(representations)) self.log.info("Registered {} items".format(len(representations)))
def create_asset(self): name_input = self.data['inputs']['name'] name = name_input.text() test_name = name.replace(' ', '') error_message = None message = QtWidgets.QMessageBox(self) message.setWindowTitle("Some errors has occured") message.setIcon(QtWidgets.QMessageBox.Critical) # TODO: show error messages on any error if self.valid_parent is not True and test_name == '': error_message = "Name is not set and Parent is not selected" elif self.valid_parent is not True: error_message = "Parent is not selected" elif test_name == '': error_message = "Name is not set" if error_message is not None: message.setText(error_message) message.show() return test_name_exists = io.find({ 'type': 'asset', 'name': name }) existing_assets = [x for x in test_name_exists] if len(existing_assets) > 0: message.setText("Entered Asset name is occupied") message.show() return checkbox_app = self.data['inputs']['open_app'] if checkbox_app is not None and checkbox_app.isChecked() is True: task_view = self.data["view"]["tasks"] task_model = self.data["model"]["tasks"] try: index = task_view.selectedIndexes()[0] task_name = task_model.itemData(index)[0] except Exception: message.setText("Please select task") message.show() return # Get ftrack session if self.session is None: session = ftrack_api.Session() self.session = session else: session = self.session # Get Ftrack project entity project_name = io.Session['AVALON_PROJECT'] project_query = 'Project where full_name is "{}"'.format(project_name) try: ft_project = session.query(project_query).one() except Exception: message.setText("Ftrack project was not found") message.show() return # Get Ftrack entity of parent ft_parent = None assets_model = self.data["model"]["assets"] selected = assets_model.get_selected_assets() parent = io.find_one({"_id": selected[0], "type": "asset"}) asset_id = parent.get('data', {}).get('ftrackId', None) asset_entity_type = parent.get('data', {}).get('entityType', None) asset_query = '{} where id is "{}"' if asset_id is not None and asset_entity_type is not None: try: ft_parent = session.query(asset_query.format( asset_entity_type, asset_id) ).one() except Exception: ft_parent = None if ft_parent is None: ft_parent = self.get_ftrack_asset(parent, ft_project) if ft_parent is None: message.setText("Parent's Ftrack entity was not found") message.show() return asset_build_combo = self.data['inputs']['assetbuild'] asset_type_name = asset_build_combo.currentText() asset_type_query = 'Type where name is "{}"'.format(asset_type_name) try: asset_type = session.query(asset_type_query).one() except Exception: message.setText("Selected Asset Build type does not exists") message.show() return for children in ft_parent['children']: if children['name'] == name: message.setText("Entered Asset name is occupied") message.show() return task_template_combo = self.data['inputs']['tasktemplate'] task_template = task_template_combo.currentText() tasks = [] for template in self.config_data['task_templates']: if template['name'] == task_template: tasks = template['task_types'] break available_task_types = [] task_types = ft_project['project_schema']['_task_type_schema'] for task_type in task_types['types']: available_task_types.append(task_type['name']) not_possible_tasks = [] for task in tasks: if task not in available_task_types: not_possible_tasks.append(task) if len(not_possible_tasks) != 0: message.setText(( "These Task types weren't found" " in Ftrack project schema:\n{}").format( ', '.join(not_possible_tasks)) ) message.show() return # Create asset build asset_build_data = { 'name': name, 'project_id': ft_project['id'], 'parent_id': ft_parent['id'], 'type': asset_type } new_entity = session.create('AssetBuild', asset_build_data) task_data = { 'project_id': ft_project['id'], 'parent_id': new_entity['id'] } for task in tasks: type = session.query('Type where name is "{}"'.format(task)).one() task_data['type_id'] = type['id'] task_data['name'] = task session.create('Task', task_data) av_project = io.find_one({'type': 'project'}) hiearchy_items = [] hiearchy_items.extend(self.get_avalon_parent(parent)) hiearchy_items.append(parent['name']) hierarchy = os.path.sep.join(hiearchy_items) new_asset_data = { 'ftrackId': new_entity['id'], 'entityType': new_entity.entity_type, 'visualParent': parent['_id'], 'tasks': tasks, 'parents': hiearchy_items, 'hierarchy': hierarchy } new_asset_info = { 'parent': av_project['_id'], 'name': name, 'schema': "openpype:asset-3.0", 'type': 'asset', 'data': new_asset_data } # Backwards compatibility (add silo from parent if is silo project) if self.silos: new_asset_info["silo"] = parent["silo"] try: schema.validate(new_asset_info) except Exception: message.setText(( 'Asset information are not valid' ' to create asset in avalon database' )) message.show() session.rollback() return io.insert_one(new_asset_info) session.commit() outlink_cb = self.data['inputs']['outlink_cb'] if outlink_cb.isChecked() is True: outlink_input = self.data['inputs']['outlink'] outlink_name = outlink_input.text() outlink_asset = io.find_one({ 'type': 'asset', 'name': outlink_name }) outlink_ft_id = outlink_asset.get('data', {}).get('ftrackId', None) outlink_entity_type = outlink_asset.get( 'data', {} ).get('entityType', None) if outlink_ft_id is not None and outlink_entity_type is not None: try: outlink_entity = session.query(asset_query.format()).one() except Exception: outlink_entity = None if outlink_entity is None: outlink_entity = self.get_ftrack_asset( outlink_asset, ft_project ) if outlink_entity is None: message.setText("Outlink's Ftrack entity was not found") message.show() return link_data = { 'from_id': new_entity['id'], 'to_id': outlink_entity['id'] } session.create('TypedContextLink', link_data) session.commit() if checkbox_app is not None and checkbox_app.isChecked() is True: origin_asset = api.Session.get('AVALON_ASSET', None) origin_task = api.Session.get('AVALON_TASK', None) asset_name = name task_view = self.data["view"]["tasks"] task_model = self.data["model"]["tasks"] try: index = task_view.selectedIndexes()[0] except Exception: message.setText("No task is selected. App won't be launched") message.show() return task_name = task_model.itemData(index)[0] try: api.update_current_task(task=task_name, asset=asset_name) self.open_app() finally: if origin_task is not None and origin_asset is not None: api.update_current_task( task=origin_task, asset=origin_asset ) message.setWindowTitle("Asset Created") message.setText("Asset Created successfully") message.setIcon(QtWidgets.QMessageBox.Information) message.show()
def process(self, instance): if not os.environ.get("AVALON_THUMBNAIL_ROOT"): self.log.warning("AVALON_THUMBNAIL_ROOT is not set." " Skipping thumbnail integration.") return published_repres = instance.data.get("published_representations") if not published_repres: self.log.debug( "There are no published representations on the instance.") return project_name = api.Session["AVALON_PROJECT"] anatomy = instance.context.data["anatomy"] if "publish" not in anatomy.templates: self.log.warning("Anatomy is missing the \"publish\" key!") return if "thumbnail" not in anatomy.templates["publish"]: self.log.warning(( "There is no \"thumbnail\" template set for the project \"{}\"" ).format(project_name)) return thumb_repre = None thumb_repre_anatomy_data = None for repre_info in published_repres.values(): repre = repre_info["representation"] if repre["name"].lower() == "thumbnail": thumb_repre = repre thumb_repre_anatomy_data = repre_info["anatomy_data"] break if not thumb_repre: self.log.debug( "There is not representation with name \"thumbnail\"") return io.install() thumbnail_template = anatomy.templates["publish"]["thumbnail"] version = io.find_one({"_id": thumb_repre["parent"]}) if not version: raise AssertionError( "There does not exist version with id {}".format( str(thumb_repre["parent"]))) # Get full path to thumbnail file from representation src_full_path = os.path.normpath(thumb_repre["data"]["path"]) if not os.path.exists(src_full_path): self.log.warning( "Thumbnail file was not found. Path: {}".format(src_full_path)) return filename, file_extension = os.path.splitext(src_full_path) # Create id for mongo entity now to fill anatomy template thumbnail_id = ObjectId() # Prepare anatomy template fill data template_data = copy.deepcopy(thumb_repre_anatomy_data) template_data.update({ "_id": str(thumbnail_id), "thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"), "ext": file_extension[1:], "thumbnail_type": "thumbnail" }) anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled["publish"]["thumbnail"] dst_full_path = os.path.normpath(str(template_filled)) self.log.debug("Copying file .. {} -> {}".format( src_full_path, dst_full_path)) dirname = os.path.dirname(dst_full_path) try: os.makedirs(dirname) except OSError as e: if e.errno != errno.EEXIST: tp, value, tb = sys.exc_info() six.reraise(tp, value, tb) shutil.copy(src_full_path, dst_full_path) # Clean template data from keys that are dynamic template_data.pop("_id") template_data.pop("thumbnail_root") repre_context = template_filled.used_values for key in self.required_context_keys: value = template_data.get(key) if not value: continue repre_context[key] = template_data[key] thumbnail_entity = { "_id": thumbnail_id, "type": "thumbnail", "schema": "pype:thumbnail-1.0", "data": { "template": thumbnail_template, "template_data": repre_context } } # Create thumbnail entity io.insert_one(thumbnail_entity) self.log.debug("Creating entity in database {}".format( str(thumbnail_entity))) # Set thumbnail id for version io.update_many({"_id": version["_id"]}, {"$set": { "data.thumbnail_id": thumbnail_id }}) self.log.debug("Setting thumbnail for version \"{}\" <{}>".format( version["name"], str(version["_id"]))) asset_entity = instance.data["assetEntity"] io.update_many({"_id": asset_entity["_id"]}, {"$set": { "data.thumbnail_id": thumbnail_id }}) self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format( asset_entity["name"], str(version["_id"])))
def test_save(): """Saving works well under normal circumstances""" config_ = { "schema": "avalon-core:config-1.0", "apps": [ { "name": "app1" }, { "name": "app2" }, ], "tasks": { { "Animation": { "short_name": "anim" } }, { "Modeling": { "short_name": "mdl" } }, }, "template": { "work": "{root}/{project}/{silo}/{asset}/work/" "{task}/{user}/{app}", "publish": "{root}/{project}/{silo}/{asset}/publish/" "{subset}/v{version:0>3}/{subset}.{representation}" }, "families": [{ "name": "avalon.model", "label": "Model", "icon": "cube" }], "groups": [ { "name": "charCaches", "icon": "diamond", "color": "#C4CEDC", "order": -99 }, ], "copy": {} } inventory_ = { "schema": "avalon-core:inventory-1.0", "assets": [{ "name": "asset1" }, { "name": "asset2" }], "film": [ { "name": "shot1" }, { "name": "shot2" }, ] } schema.validate(config_) schema.validate(inventory_) _id = io.insert_one(self._project).inserted_id project = io.find_one({"_id": _id}) assert_equals(project["config"], self._project["config"]) inventory.save(name=self._project["name"], config=config_, inventory=inventory_) project = io.find_one({"_id": _id}) config_.pop("schema") assert_equals(project["config"], config_) for asset in inventory_["assets"]: assert io.find_one({ "type": "asset", "parent": project["_id"], "name": asset["name"] })
def main(): projects = {} objects = {} objects_count = 0 tasks = [{"name": task["name"]} for task in gazu.task.all_task_types()] for project in gazu.project.all_projects(): # Ensure project["code"] consistency. project_name = get_consistent_name(project["name"]) if project["code"] != project_name: proj = {} proj["code"] = project_name proj["id"] = project["id"] project = gazu.project.update_project(proj) print("Updating Project Code...") # Collect assets. assets = [] for asset in gazu.asset.all_assets_for_project(project): # Faking a parent for better hierarchy structure, until folders are # supported in Kitsu. asset["parents"] = ["assets"] assets.append(asset) # Collect shots and parents. episodes = [] sequences = [] shots = [] for episode in (gazu.shot.all_episodes_for_project(project) or []): episode["code"] = get_consistent_name(episode["name"]) episode["parent"] = project # Faking a parent for better hierarchy structure, until folders are # supported in Kitsu. episode["parents"] = ["episodes"] episodes.append(episode) for sequence in gazu.shot.all_sequences_for_episode(episode): sequence["code"] = get_consistent_name(sequence["name"]) sequence["parent"] = episode sequence["parents"] = episode["parents"] + [episode["code"]] sequence["label"] = sequence["name"] sequence["name"] = "{0}_{1}".format( episode["code"], sequence["code"] ) sequence["visualParent"] = episode["name"] sequences.append(sequence) for shot in gazu.shot.all_shots_for_sequence(sequence): shot["code"] = get_consistent_name(shot["name"]) shot["parent"] = sequence shot["parents"] = sequence["parents"] + [sequence["code"]] shot["label"] = shot["name"] shot["name"] = "{0}_{1}_{2}".format( episode["code"], sequence["code"], shot["code"] ) shot["visualParent"] = sequence["name"] shot["tasks"] = gazu.task.all_tasks_for_shot(shot) shots.append(shot) silos = [ [assets, "assets"], [episodes, "shots"], [sequences, "shots"], [shots, "shots"] ] entities = {} for assets, silo in silos: for asset in assets: entity_type = gazu.entity.get_entity_type( asset["entity_type_id"] ) data = { "schema": "avalon-core:asset-2.0", "name": get_consistent_name(asset["name"]), "silo": silo, "type": "asset", "parent": project["code"], "data": { "label": asset.get("label", asset["name"]), "group": entity_type["name"], "parents": asset["parents"] } } if asset.get("visualParent"): data["data"]["visualParent"] = asset["visualParent"] if asset.get("tasks"): data["data"]["tasks"] = [ task["task_type_name"] for task in asset["tasks"] ] entities[data["name"]] = data objects_count += 1 objects[project["code"]] = entities projects[project["code"]] = { "schema": "avalon-core:project-2.0", "type": "project", "name": project["code"], "data": { "label": project["name"], "code": project["code"] }, "parent": None, "config": { "schema": "avalon-core:config-1.0", "apps": [ { "name": "maya2015", "label": "Autodesk Maya 2015" }, { "name": "maya2016", "label": "Autodesk Maya 2016" }, { "name": "maya2017", "label": "Autodesk Maya 2017" }, { "name": "nuke10", "label": "The Foundry Nuke 10.0" } ], "tasks": tasks, "template": { "work": "{root}/{project}/{silo}/{asset}/work/" "{task}/{app}", "publish": "{root}/{project}/{silo}/{asset}/publish/" "{subset}/v{version:0>3}/{subset}.{representation}" } } } print("Found:") print("- %d projects" % len(projects)) print("- %d assets" % objects_count) os.environ["AVALON_PROJECTS"] = r"" os.environ["AVALON_PROJECT"] = "temp" os.environ["AVALON_ASSET"] = "bruce" os.environ["AVALON_SILO"] = "assets" os.environ["AVALON_CONFIG"] = "polly" os.environ["AVALON_MONGO"] = os.environ.get( "AVALON_MONGO", "mongodb://127.0.0.1:27017" ) print("Fetching Avalon data..") avalon.install() existing_projects = {} existing_objects = {} for project in avalon.projects(): existing_projects[project["name"]] = project # Update project os.environ["AVALON_PROJECT"] = project["name"] avalon.uninstall() avalon.install() # Collect assets assets = {} for asset in avalon.find({"type": "asset"}): assets[asset["name"]] = asset existing_objects[project["name"]] = assets print("Synchronising..") for name, project in projects.items(): if project["name"] in existing_projects: # Update task types existing_project = existing_projects[project["name"]] existing_project_task_types = existing_project["config"]["tasks"] if existing_project_task_types != tasks: print( "Updating tasks types on \"{0}\" to:\n{1}".format( project["name"], tasks ) ) existing_project["config"]["tasks"] = tasks os.environ["AVALON_PROJECT"] = project["name"] avalon.uninstall() avalon.install() avalon.replace_one({"type": "project"}, existing_project) continue print("Installing project: %s" % project["name"]) os.environ["AVALON_PROJECT"] = project["name"] avalon.uninstall() avalon.install() avalon.insert_one(project) for project["code"], assets in objects.items(): os.environ["AVALON_PROJECT"] = project["code"] avalon.uninstall() avalon.install() for asset_name, asset in assets.items(): if asset_name in existing_objects.get(project["code"], {}): # Update tasks if asset["data"].get("tasks"): existing_project = existing_objects[project["code"]] existing_asset = existing_project[asset_name] existing_tasks = existing_asset["data"].get("tasks", []) if existing_tasks != asset["data"]["tasks"]: tasks = asset["data"]["tasks"] print( "Updating tasks on \"{0} / {1}\" to:\n{2}".format( project["code"], asset_name, tasks ) ) existing_asset["data"]["tasks"] = tasks avalon.replace_one( {"type": "asset", "name": asset_name}, existing_asset ) continue asset["parent"] = avalon.locate([asset["parent"]]) if asset["data"].get("visualParent"): asset["data"]["visualParent"] = avalon.find_one( {"type": "asset", "name": asset["data"]["visualParent"]} )["_id"] print( "Installing asset: \"{0} / {1}\"".format( project["code"], asset_name ) ) avalon.insert_one(asset) print("Success")
def register(self, instance): # Required environment variables PROJECT = api.Session["AVALON_PROJECT"] ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] TASK = instance.data.get("task") or api.Session["AVALON_TASK"] LOCATION = api.Session["AVALON_LOCATION"] context = instance.context # Atomicity # # Guarantee atomic publishes - each asset contains # an identical set of members. # __ # / o # / \ # | o | # \ / # o __/ # # for result in context.data["results"]: # if not result["success"]: # self.log.debug(result) # exc_type, exc_value, exc_traceback = result["error_info"] # extracted_traceback = traceback.extract_tb(exc_traceback)[-1] # self.log.debug( # "Error at line {}: \"{}\"".format( # extracted_traceback[1], result["error"] # ) # ) # assert all(result["success"] for result in context.data["results"]),( # "Atomicity not held, aborting.") # Assemble # # | # v # ---> <---- # ^ # | # stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info('''{} is missing reference to staging directory Will try to get it from representation'''.format(instance)) # extra check if stagingDir actually exists and is available self.log.debug("Establishing staging directory @ %s" % stagingdir) # Ensure at least one file is set up for transfer in staging dir. repres = instance.data.get("representations", None) assert repres, "Instance has no files to transfer" assert isinstance(repres, (list, tuple)), ( "Instance 'files' must be a list, got: {0}".format(repres)) # FIXME: io is not initialized at this point for shell host io.install() project = io.find_one({"type": "project"}) asset = io.find_one({ "type": "asset", "name": ASSET, "parent": project["_id"] }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) subset = self.get_subset(asset, instance) # get next version latest_version = io.find_one( { "type": "version", "parent": subset["_id"] }, {"name": True}, sort=[("name", -1)]) next_version = 1 if latest_version is not None: next_version += latest_version["name"] if instance.data.get('version'): next_version = int(instance.data.get('version')) # self.log.info("Verifying version from assumed destination") # assumed_data = instance.data["assumedTemplateData"] # assumed_version = assumed_data["version"] # if assumed_version != next_version: # raise AttributeError("Assumed version 'v{0:03d}' does not match" # "next version in database " # "('v{1:03d}')".format(assumed_version, # next_version)) self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) version_data_instance = instance.data.get('versionData') if version_data_instance: version_data.update(version_data_instance) version = self.create_version(subset=subset, version_number=next_version, locations=[LOCATION], data=version_data) self.log.debug("Creating version ...") existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], 'name': next_version }) if existing_version is None: version_id = io.insert_one(version).inserted_id else: io.update_many( { 'type': 'version', 'parent': subset["_id"], 'name': next_version }, {'$set': version}) version_id = existing_version['_id'] instance.data['version'] = version['name'] # Write to disk # _ # | | # _| |_ # ____\ / # |\ \ / \ # \ \ v \ # \ \________. # \|________| # root = api.registered_root() hierarchy = "" parents = io.find_one({ "type": 'asset', "name": ASSET })['data']['parents'] if parents and len(parents) > 0: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*parents) anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] template_name = 'publish' if 'transfers' not in instance.data: instance.data['transfers'] = [] for idx, repre in enumerate(instance.data["representations"]): # Collection # _______ # |______|\ # | |\| # | || # | || # | || # |_______| # # create template data for Anatomy template_data = { "root": root, "project": { "name": PROJECT, "code": project['data']['code'] }, "silo": asset.get('silo'), "task": TASK, "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), "hierarchy": hierarchy } files = repre['files'] if repre.get('stagingDir'): stagingdir = repre['stagingDir'] if repre.get('anatomy_template'): template_name = repre['anatomy_template'] template = os.path.normpath( anatomy.templates[template_name]["path"]) sequence_repre = isinstance(files, list) if sequence_repre: src_collections, remainder = clique.assemble(files) self.log.debug("src_tail_collections: {}".format( str(src_collections))) src_collection = src_collections[0] # Assert that each member has identical suffix src_head = src_collection.format("{head}") src_tail = src_collection.format("{tail}") # fix dst_padding valid_files = [x for x in files if src_collection.match(x)] padd_len = len(valid_files[0].replace(src_head, "").replace( src_tail, "")) src_padding_exp = "%0{}d".format(padd_len) test_dest_files = list() for i in [1, 2]: template_data["representation"] = repre['ext'] template_data["frame"] = src_padding_exp % i anatomy_filled = anatomy.format(template_data) test_dest_files.append( os.path.normpath( anatomy_filled[template_name]["path"])) self.log.debug("test_dest_files: {}".format( str(test_dest_files))) dst_collections, remainder = clique.assemble(test_dest_files) dst_collection = dst_collections[0] dst_head = dst_collection.format("{head}") dst_tail = dst_collection.format("{tail}") index_frame_start = None if repre.get("frameStart"): frame_start_padding = len(str(repre.get("frameEnd"))) index_frame_start = int(repre.get("frameStart")) dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: src_padding = src_padding_exp % i # for adding first frame into db if not dst_start_frame: dst_start_frame = src_padding src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail) dst_padding = src_padding_exp % i if index_frame_start: dst_padding_exp = "%0{}d".format(frame_start_padding) dst_padding = dst_padding_exp % index_frame_start index_frame_start += 1 dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail).replace("..", ".") self.log.debug("destination: `{}`".format(dst)) src = os.path.join(stagingdir, src_file_name) self.log.debug("source: {}".format(src)) instance.data["transfers"].append([src, dst]) dst = "{0}{1}{2}".format(dst_head, dst_start_frame, dst_tail).replace("..", ".") repre['published_path'] = dst else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # template_data.pop("frame", None) fname = files assert not os.path.isabs(fname), ( "Given file name is a full path") template_data["representation"] = repre['ext'] if repre.get("outputName"): template_data["output"] = repre['outputName'] src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) dst = os.path.normpath( anatomy_filled[template_name]["path"]).replace("..", ".") instance.data["transfers"].append([src, dst]) repre['published_path'] = dst self.log.debug("__ dst: {}".format(dst)) representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": repre['name'], "data": { 'path': dst, 'template': template }, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { "root": root, "project": { "name": PROJECT, "code": project['data']['code'] }, 'task': TASK, "silo": asset.get('silo'), "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], "version": version["name"], "hierarchy": hierarchy, "representation": repre['ext'] } } if sequence_repre and repre.get("frameStart"): representation['context']['frame'] = repre.get("frameStart") self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) self.log.debug("__ destination_list: {}".format(destination_list)) instance.data['destination_list'] = destination_list representations.append(representation) self.log.debug("__ representations: {}".format(representations)) self.log.debug("__ representations: {}".format(representations)) for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) self.log.debug("__ represPATH: {}".format(rep['published_path'])) io.insert_many(representations) # self.log.debug("Representation: {}".format(representations)) self.log.info("Registered {} items".format(len(representations)))
def register(self, instance): # Required environment variables PROJECT = api.Session["AVALON_PROJECT"] ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] LOCATION = api.Session["AVALON_LOCATION"] context = instance.context # Atomicity # # Guarantee atomic publishes - each asset contains # an identical set of members. # __ # / o # / \ # | o | # \ / # o __/ # assert all(result["success"] for result in context.data["results"]), ( "Atomicity not held, aborting.") # Assemble # # | # v # ---> <---- # ^ # | # stagingdir = instance.data.get("stagingDir") assert stagingdir, ("Incomplete instance \"%s\": " "Missing reference to staging area." % instance) # extra check if stagingDir actually exists and is available self.log.debug("Establishing staging directory @ %s" % stagingdir) project = io.find_one({"type": "project"}, projection={"config.template.publish": True}) asset = io.find_one({ "type": "asset", "name": ASSET, "parent": project["_id"] }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) subset = self.get_subset(asset, instance) # get next version latest_version = io.find_one( { "type": "version", "parent": subset["_id"] }, {"name": True}, sort=[("name", -1)]) next_version = 1 if latest_version is not None: next_version += latest_version["name"] self.log.info("Verifying version from assumed destination") assumed_data = instance.data["assumedTemplateData"] assumed_version = assumed_data["version"] if assumed_version != next_version: raise AttributeError("Assumed version 'v{0:03d}' does not match" "next version in database " "('v{1:03d}')".format(assumed_version, next_version)) self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) version = self.create_version(subset=subset, version_number=next_version, locations=[LOCATION], data=version_data) self.log.debug("Creating version ...") version_id = io.insert_one(version).inserted_id # Write to disk # _ # | | # _| |_ # ____\ / # |\ \ / \ # \ \ v \ # \ \________. # \|________| # root = api.registered_root() template_data = { "root": root, "project": PROJECT, "silo": asset['silo'], "asset": ASSET, "subset": subset["name"], "version": version["name"] } template_publish = project["config"]["template"]["publish"] # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] for files in instance.data["files"]: # Collection # _______ # |______|\ # | |\| # | || # | || # | || # |_______| # if isinstance(files, list): collection = files # Assert that each member has identical suffix _, ext = os.path.splitext(collection[0]) assert all(ext == os.path.splitext(name)[1] for name in collection), ( "Files had varying suffixes, this is a bug") assert not any(os.path.isabs(name) for name in collection) template_data["representation"] = ext[1:] for fname in collection: src = os.path.join(stagingdir, fname) dst = os.path.join( template_publish.format(**template_data), fname) instance.data["transfers"].append([src, dst]) else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # fname = files assert not os.path.isabs(fname), ( "Given file name is a full path") _, ext = os.path.splitext(fname) template_data["representation"] = ext[1:] src = os.path.join(stagingdir, fname) dst = template_publish.format(**template_data) instance.data["transfers"].append([src, dst]) representation = { "schema": "avalon-core:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], "data": {}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { "project": PROJECT, "asset": ASSET, "silo": asset['silo'], "subset": subset["name"], "version": version["name"], "representation": ext[1:] } } representations.append(representation) self.log.info("Registering {} items".format(len(representations))) io.insert_many(representations)
def test_save(): """Saving works well under normal circumstances""" config_ = { "schema": "avalon-core:config-1.0", "apps": [ { "name": "app1" }, { "name": "app2" }, ], "tasks": [ { "name": "task1" }, { "name": "task2" }, ], "template": { "work": "{root}/{project}/{silo}/{asset}/work/" "{task}/{user}/{app}", "publish": "{root}/{project}/{silo}/{asset}/publish/" "{subset}/v{version:0>3}/{subset}.{representation}" }, "copy": {} } inventory_ = { "schema": "avalon-core:inventory-1.0", "assets": [{ "name": "asset1" }, { "name": "asset2" }], "film": [ { "name": "shot1" }, { "name": "shot2" }, ] } schema.validate(config_) schema.validate(inventory_) _id = io.insert_one(self._project).inserted_id project = io.find_one({"_id": _id}) assert_equals(project["config"], self._project["config"]) inventory.save(name=self._project["name"], config=config_, inventory=inventory_) project = io.find_one({"_id": _id}) config_.pop("schema") assert_equals(project["config"], config_) for asset in inventory_["assets"]: assert io.find_one({ "type": "asset", "parent": project["_id"], "name": asset["name"] })
def register(self, instance): # Required environment variables PROJECT = api.Session["AVALON_PROJECT"] ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] LOCATION = api.Session["AVALON_LOCATION"] context = instance.context # Atomicity # # Guarantee atomic publishes - each asset contains # an identical set of members. # __ # / o # / \ # | o | # \ / # o __/ # assert all(result["success"] for result in context.data["results"]), ( "Atomicity not held, aborting.") # Assemble # # | # v # ---> <---- # ^ # | # stagingdir = instance.data.get("stagingDir") assert stagingdir, ("Incomplete instance \"%s\": " "Missing reference to staging area." % instance) # extra check if stagingDir actually exists and is available self.log.debug("Establishing staging directory @ %s" % stagingdir) project = io.find_one({"type": "project"}) asset = io.find_one({ "type": "asset", "name": ASSET, "parent": project["_id"] }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) subset = self.get_subset(asset, instance) # get next version latest_version = io.find_one( { "type": "version", "parent": subset["_id"] }, {"name": True}, sort=[("name", -1)]) next_version = 1 if latest_version is not None: next_version += latest_version["name"] self.log.info("Verifying version from assumed destination") assumed_data = instance.data["assumedTemplateData"] assumed_version = assumed_data["version"] if assumed_version != next_version: raise AttributeError("Assumed version 'v{0:03d}' does not match" "next version in database " "('v{1:03d}')".format(assumed_version, next_version)) if instance.data.get('version'): next_version = int(instance.data.get('version')) self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) version = self.create_version(subset=subset, version_number=next_version, locations=[LOCATION], data=version_data) self.log.debug("Creating version ...") version_id = io.insert_one(version).inserted_id # Write to disk # _ # | | # _| |_ # ____\ / # |\ \ / \ # \ \ v \ # \ \________. # \|________| # root = api.registered_root() hierarchy = "" parents = io.find_one({ "type": 'asset', "name": ASSET })['data']['parents'] if parents and len(parents) > 0: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*parents) template_data = { "root": root, "project": { "name": PROJECT, "code": project['data']['code'] }, "silo": asset['silo'], "task": api.Session["AVALON_TASK"], "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), "hierarchy": hierarchy } # template_publish = project["config"]["template"]["publish"] anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] for files in instance.data["files"]: # Collection # _______ # |______|\ # | |\| # | || # | || # | || # |_______| # if isinstance(files, list): src_collections, remainder = clique.assemble(files) src_collection = src_collections[0] # Assert that each member has identical suffix src_head = src_collection.format("{head}") src_tail = ext = src_collection.format("{tail}") test_dest_files = list() for i in [1, 2]: template_data["representation"] = src_tail[1:] template_data["frame"] = src_collection.format( "{padding}") % i anatomy_filled = anatomy.format(template_data) test_dest_files.append(anatomy_filled.render.path) dst_collections, remainder = clique.assemble(test_dest_files) dst_collection = dst_collections[0] dst_head = dst_collection.format("{head}") dst_tail = dst_collection.format("{tail}") for i in src_collection.indexes: src_padding = src_collection.format("{padding}") % i src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail) dst_padding = dst_collection.format("{padding}") % i dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) src = os.path.join(stagingdir, src_file_name) instance.data["transfers"].append([src, dst]) else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # template_data.pop("frame", None) anatomy.pop("frame", None) fname = files self.log.info("fname: {}".format(fname)) assert not os.path.isabs(fname), ( "Given file name is a full path") _, ext = os.path.splitext(fname) template_data["representation"] = ext[1:] src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) dst = anatomy_filled.render.path instance.data["transfers"].append([src, dst]) template_data["frame"] = "#####" anatomy_filled = anatomy.format(template_data) path_to_save = anatomy_filled.render.path template = anatomy.render.fullpath self.log.debug('ext[1:]: {}'.format(ext[1:])) representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], "data": { 'path': path_to_save, 'template': template }, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { "root": root, "project": { "name": PROJECT, "code": project['data']['code'] }, "task": api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), "hierarchy": hierarchy, "representation": ext[1:] } } destination_list.append(dst) instance.data['destination_list'] = destination_list representations.append(representation) self.log.info("Registering {} items".format(len(representations))) io.insert_many(representations)
def process(self, instance): import os import errno import shutil from pprint import pformat from avalon import api, io from avalon.vendor import filelink # Required environment variables PROJECT = api.Session["AVALON_PROJECT"] ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] SILO = api.Session["AVALON_SILO"] LOCATION = api.Session["AVALON_LOCATION"] context = instance.context # Atomicity # # Guarantee atomic publishes - each asset contains # an identical set of members. # __ # / o # / \ # | o | # \ / # o __/ # assert all(result["success"] for result in context.data["results"]), ( "Atomicity not held, aborting.") # Assemble # # | # v # ---> <---- # ^ # | # stagingdir = instance.data.get("stagingDir") assert stagingdir, ("Incomplete instance \"%s\": " "Missing reference to staging area." % instance) self.log.debug("Establishing staging directory @ %s" % stagingdir) project = io.find_one({"type": "project"}) asset = io.find_one({"name": ASSET}) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) subset = io.find_one({ "type": "subset", "parent": asset["_id"], "name": instance.data["subset"] }) if subset is None: subset_name = instance.data["subset"] self.log.info("Subset '%s' not found, creating.." % subset_name) _id = io.insert_one({ "schema": "avalon-core:subset-2.0", "type": "subset", "name": subset_name, "data": {}, "parent": asset["_id"] }).inserted_id subset = io.find_one({"_id": _id}) latest_version = io.find_one( { "type": "version", "parent": subset["_id"] }, {"name": True}, sort=[("name", -1)]) next_version = 1 if latest_version is not None: next_version += latest_version["name"] self.log.debug("Next version: %i" % next_version) version = { "schema": "avalon-core:version-2.0", "type": "version", "parent": subset["_id"], "name": next_version, "locations": [LOCATION] if LOCATION else [], "data": { "families": (instance.data.get("families", list()) + [instance.data["family"]]), # Enable overriding with current information from instance "time": instance.data.get("time", context.data["time"]), "author": instance.data.get("user", context.data["user"]), "source": instance.data.get("source", context.data["currentFile"]).replace( api.registered_root(), "{root}").replace("\\", "/"), "comment": context.data.get("comment") } } self.log.debug("Creating version: %s" % pformat(version)) version_id = io.insert_one(version).inserted_id # Write to disk # _ # | | # _| |_ # ____\ / # |\ \ / \ # \ \ v \ # \ \________. # \|________| # template_data = { "root": api.registered_root(), "project": PROJECT, "silo": SILO, "asset": ASSET, "subset": subset["name"], "version": version["name"], } template_publish = project["config"]["template"]["publish"] if "output" not in instance.data: instance.data["output"] = list() def copy(src, dst): dirname = os.path.dirname(dst) try: os.makedirs(dirname) except OSError as e: if e.errno == errno.EEXIST: pass else: self.log.critical("An unexpected error occurred.") raise try: filelink.create(src, dst) self.log.info("Linking %s -> %s" % (src, dst)) except Exception: # Revert to a normal copy # TODO(marcus): Once filelink is proven stable, # improve upon or remove this fallback. shutil.copy(src, dst) self.log.info("Linking failed, copying %s -> %s" % (src, dst)) for _ in instance.data["files"]: # Collection # _______ # |______|\ # | |\| # | || # | || # | || # |_______| # if isinstance(_, list): collection = _ # Assert that each member has identical suffix _, ext = os.path.splitext(collection[0]) assert all(ext == os.path.splitext(name)[1] for name in collection), ( "Files had varying suffixes, this is a bug") template_data["representation"] = ext[1:] for fname in collection: src = os.path.join(stagingdir, fname) dst = os.path.join( template_publish.format(**template_data), fname) copy(src, dst) instance.data["output"].append(dst) else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # fname = _ _, ext = os.path.splitext(fname) template_data["representation"] = ext[1:] src = os.path.join(stagingdir, fname) dst = template_publish.format(**template_data) copy(src, dst) instance.data["output"].append(dst) representation = { "schema": "avalon-core:representation-2.0", "type": "representation", "parent": version_id, "name": template_data["representation"], "data": {}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context for performance reasons. "context": { "project": PROJECT, "asset": ASSET, "silo": SILO, "subset": subset["name"], "version": version["name"], "representation": template_data["representation"] } } io.insert_one(representation) context.data["published_version"] = str(version_id) self.log.info("Successfully integrated \"%s\" to \"%s\"" % (instance, dst))
def process(self, instance): context = instance.context if not all(result["success"] for result in context.data["results"]): self.log.warning("Atomicity not held, aborting.") return # Integrate representations' to database self.log.info("Integrating representations to database ...") asset = context.data["assetDoc"] subset, version, representations = instance.data["toDatabase"] # Write subset if not exists filter = {"parent": asset["_id"], "name": subset["name"]} if io.find_one(filter) is None: io.insert_one(subset) # Write version if not exists filter = {"parent": subset["_id"], "name": version["name"]} existed_version = io.find_one(filter) if existed_version is None: # Write version and representations to database version_id = self.write_database(instance, version, representations) instance.data["insertedVersionId"] = version_id # Update dependent self.update_dependent(instance, version_id) else: if context.data.get("_progressivePublishing"): if instance.data.get("_progressiveOutput") is None: pass # Not given any output, no progress change else: self.log.info("Update version publish progress.") # Update version document "data.time" filter_ = {"_id": existed_version["_id"]} update = {"$set": {"data.time": context.data["time"]}} if "progress" in version["data"]: # Update version document "progress.current" progress = version["data"]["progress"]["current"] update["$inc"] = {"data.progress.current": progress} else: pass # progress == -1, no progress update needed. io.update_many(filter_, update) else: self.log.info("Version existed, representation file has been " "overwritten.") # Update version document "data.time" filter_ = {"_id": existed_version["_id"]} update = {"$set": {"data.time": context.data["time"]}} io.update_many(filter_, update) # Update representation documents "data" for representation in representations: filter_ = { "name": representation["name"], "parent": existed_version["_id"], } update = {"$set": {"data": representation["data"]}} io.update_many(filter_, update)
def main(): projects = [] objects = [] for project in gazu.project.all_projects(): assets = gazu.asset.all_assets_for_project(project) shots = gazu.shot.all_shots_for_project(project) for assets, silo in ((assets, "assets"), (shots, "shots")): for asset in assets: objects.append({ "schema": "avalon-core:asset-2.0", "name": asset["name"].replace(" ", ""), # remove spaces "silo": silo, "data": {}, "type": "asset", "parent": project["name"], }) projects.append({ "schema": "avalon-core:project-2.0", "type": "project", "name": project["name"], "data": {}, "parent": None, "config": { "schema": "avalon-core:config-1.0", "apps": [ { "name": "maya2015", "label": "Autodesk Maya 2015" }, { "name": "maya2016", "label": "Autodesk Maya 2016" }, { "name": "maya2017", "label": "Autodesk Maya 2017" }, { "name": "nuke10", "label": "The Foundry Nuke 10.0" } ], "tasks": [ {"name": task["name"]} for task in gazu.task.all_task_types() ], "template": { "work": "{root}/{project}/{silo}/{asset}/work/" "{task}/{app}", "publish": "{root}/{project}/{silo}/{asset}/publish/" "{subset}/v{version:0>3}/{subset}.{representation}" } } }) print("Found:") print("- %d projects" % len(projects)) print("- %d assets" % len(objects)) os.environ["AVALON_PROJECTS"] = r"" os.environ["AVALON_PROJECT"] = "temp" os.environ["AVALON_ASSET"] = "bruce" os.environ["AVALON_SILO"] = "assets" os.environ["AVALON_CONFIG"] = "polly" os.environ["AVALON_MONGO"] = "mongodb://192.168.99.100:27017" existing_projects = {} existing_assets = {} print("Fetching Avalon data..") avalon.install() for project in avalon.projects(): existing_projects[project["name"]] = project for asset in avalon.find({"type": "asset"}): existing_assets[asset["name"]] = asset print("Synchronising..") for project in projects: if project["name"] in existing_projects: continue print("Installing project: %s" % project["name"]) os.environ["AVALON_PROJECT"] = project["name"] avalon.uninstall() avalon.install() avalon.insert_one(project) for asset in objects: if asset["name"] in existing_assets: continue asset["parent"] = avalon.locate([asset["parent"]]) print("Installing asset: %s" % asset["name"]) avalon.insert_one(asset) print("Success")