def write_database(self, instance, version, representations): """Write version and representations to database Should write version documents until files collecting passed without error. """ # Write version # self.log.info("Registering version {} to database ..." "".format(version["name"])) if "pregeneratedVersionId" in instance.data: version["_id"] = instance.data["pregeneratedVersionId"] version_id = io.insert_one(version).inserted_id # Write representations # self.log.info("Registering {} representations ..." "".format(len(representations))) for representation in representations: representation["parent"] = version_id io.insert_many(representations) return version_id
def register(self, instance): # Required environment variables anatomy_data = instance.data["anatomyData"] io.install() context = instance.context project_entity = instance.data["projectEntity"] context_asset_name = context.data["assetEntity"]["name"] asset_name = instance.data["asset"] asset_entity = instance.data.get("assetEntity") if not asset_entity or asset_entity["name"] != context_asset_name: asset_entity = io.find_one({ "type": "asset", "name": asset_name, "parent": project_entity["_id"] }) assert asset_entity, ( "No asset found by the name \"{0}\" in project \"{1}\"" ).format(asset_name, project_entity["name"]) instance.data["assetEntity"] = asset_entity # update anatomy data with asset specific keys # - name should already been set hierarchy = "" parents = asset_entity["data"]["parents"] if parents: hierarchy = "/".join(parents) anatomy_data["hierarchy"] = hierarchy task_name = instance.data.get("task") if task_name: anatomy_data["task"] = task_name anatomy_data["family"] = instance.data.get("family") stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info( ("{0} is missing reference to staging directory." " Will try to get it from representation.").format(instance)) else: self.log.debug( "Establishing staging directory @ {0}".format(stagingdir)) # Ensure at least one file is set up for transfer in staging dir. repres = instance.data.get("representations") assert repres, "Instance has no files to transfer" assert isinstance( repres, (list, tuple)), ("Instance 'files' must be a list, got: {0} {1}".format( str(type(repres)), str(repres))) subset = self.get_subset(asset_entity, instance) instance.data["subsetEntity"] = subset version_number = instance.data["version"] self.log.debug("Next version: v{}".format(version_number)) version_data = self.create_version_data(context, instance) version_data_instance = instance.data.get('versionData') if version_data_instance: version_data.update(version_data_instance) # TODO rename method from `create_version` to # `prepare_version` or similar... version = self.create_version(subset=subset, version_number=version_number, data=version_data) self.log.debug("Creating version ...") new_repre_names_low = [_repre["name"].lower() for _repre in repres] existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], 'name': version_number }) if existing_version is None: version_id = io.insert_one(version).inserted_id else: # Check if instance have set `append` mode which cause that # only replicated representations are set to archive append_repres = instance.data.get("append", False) # Update version data # TODO query by _id and io.update_many( { 'type': 'version', 'parent': subset["_id"], 'name': version_number }, {'$set': version}) version_id = existing_version['_id'] # Find representations of existing version and archive them current_repres = list( io.find({ "type": "representation", "parent": version_id })) bulk_writes = [] for repre in current_repres: if append_repres: # archive only duplicated representations if repre["name"].lower() not in new_repre_names_low: continue # Representation must change type, # `_id` must be stored to other key and replaced with new # - that is because new representations should have same ID repre_id = repre["_id"] bulk_writes.append(DeleteOne({"_id": repre_id})) repre["orig_id"] = repre_id repre["_id"] = io.ObjectId() repre["type"] = "archived_representation" bulk_writes.append(InsertOne(repre)) # bulk updates if bulk_writes: io._database[io.Session["AVALON_PROJECT"]].bulk_write( bulk_writes) version = io.find_one({"_id": version_id}) instance.data["versionEntity"] = version existing_repres = list( io.find({ "parent": version_id, "type": "archived_representation" })) instance.data['version'] = version['name'] intent_value = instance.context.data.get("intent") if intent_value and isinstance(intent_value, dict): intent_value = intent_value.get("value") if intent_value: anatomy_data["intent"] = intent_value anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] if 'transfers' not in instance.data: instance.data['transfers'] = [] template_name = self.template_name_from_instance(instance) published_representations = {} for idx, repre in enumerate(instance.data["representations"]): published_files = [] # create template data for Anatomy template_data = copy.deepcopy(anatomy_data) if intent_value is not None: template_data["intent"] = intent_value resolution_width = repre.get("resolutionWidth") resolution_height = repre.get("resolutionHeight") fps = instance.data.get("fps") if resolution_width: template_data["resolution_width"] = resolution_width if resolution_width: template_data["resolution_height"] = resolution_height if resolution_width: template_data["fps"] = fps files = repre['files'] if repre.get('stagingDir'): stagingdir = repre['stagingDir'] if repre.get("outputName"): template_data["output"] = repre['outputName'] template = os.path.normpath( anatomy.templates[template_name]["path"]) sequence_repre = isinstance(files, list) repre_context = None if sequence_repre: self.log.debug("files: {}".format(files)) src_collections, remainder = clique.assemble(files) self.log.debug("src_tail_collections: {}".format( str(src_collections))) src_collection = src_collections[0] # Assert that each member has identical suffix src_head = src_collection.format("{head}") src_tail = src_collection.format("{tail}") # fix dst_padding valid_files = [x for x in files if src_collection.match(x)] padd_len = len(valid_files[0].replace(src_head, "").replace( src_tail, "")) src_padding_exp = "%0{}d".format(padd_len) test_dest_files = list() for i in [1, 2]: template_data["representation"] = repre['ext'] template_data["frame"] = src_padding_exp % i anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] if repre_context is None: repre_context = template_filled.used_values test_dest_files.append(os.path.normpath(template_filled)) template_data["frame"] = repre_context["frame"] self.log.debug("test_dest_files: {}".format( str(test_dest_files))) dst_collections, remainder = clique.assemble(test_dest_files) dst_collection = dst_collections[0] dst_head = dst_collection.format("{head}") dst_tail = dst_collection.format("{tail}") index_frame_start = None if repre.get("frameStart"): frame_start_padding = int(anatomy.templates["render"].get( "frame_padding", anatomy.templates["render"].get("padding"))) index_frame_start = int(repre.get("frameStart")) # exception for slate workflow if index_frame_start and "slate" in instance.data["families"]: index_frame_start -= 1 dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: # TODO 1.) do not count padding in each index iteration # 2.) do not count dst_padding from src_padding before # index_frame_start check src_padding = src_padding_exp % i src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail) dst_padding = src_padding_exp % i if index_frame_start: dst_padding_exp = "%0{}d".format(frame_start_padding) dst_padding = dst_padding_exp % index_frame_start index_frame_start += 1 dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail).replace("..", ".") self.log.debug("destination: `{}`".format(dst)) src = os.path.join(stagingdir, src_file_name) self.log.debug("source: {}".format(src)) instance.data["transfers"].append([src, dst]) published_files.append(dst) # for adding first frame into db if not dst_start_frame: dst_start_frame = dst_padding # Store used frame value to template data template_data["frame"] = dst_start_frame dst = "{0}{1}{2}".format(dst_head, dst_start_frame, dst_tail).replace("..", ".") repre['published_path'] = dst else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # template_data.pop("frame", None) fname = files assert not os.path.isabs(fname), ( "Given file name is a full path") template_data["representation"] = repre['ext'] src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) template_filled = anatomy_filled[template_name]["path"] repre_context = template_filled.used_values dst = os.path.normpath(template_filled).replace("..", ".") instance.data["transfers"].append([src, dst]) published_files.append(dst) repre['published_path'] = dst self.log.debug("__ dst: {}".format(dst)) repre["publishedFiles"] = published_files for key in self.db_representation_context_keys: value = template_data.get(key) if not value: continue repre_context[key] = template_data[key] # Use previous representation's id if there are any repre_id = None repre_name_low = repre["name"].lower() for _repre in existing_repres: # NOTE should we check lowered names? if repre_name_low == _repre["name"]: repre_id = _repre["orig_id"] break # Create new id if existing representations does not match if repre_id is None: repre_id = io.ObjectId() representation = { "_id": repre_id, "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": repre['name'], "data": { 'path': dst, 'template': template }, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": repre_context } if repre.get("outputName"): representation["context"]["output"] = repre['outputName'] if sequence_repre and repre.get("frameStart"): representation['context']['frame'] = ( dst_padding_exp % int(repre.get("frameStart"))) self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) self.log.debug("__ destination_list: {}".format(destination_list)) instance.data['destination_list'] = destination_list representations.append(representation) published_representations[repre_id] = { "representation": representation, "anatomy_data": template_data, "published_files": published_files } self.log.debug("__ representations: {}".format(representations)) # Remove old representations if there are any (before insertion of new) if existing_repres: repre_ids_to_remove = [] for repre in existing_repres: repre_ids_to_remove.append(repre["_id"]) io.delete_many({"_id": {"$in": repre_ids_to_remove}}) self.log.debug("__ representations: {}".format(representations)) for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) self.log.debug("__ represPATH: {}".format(rep['published_path'])) io.insert_many(representations) instance.data["published_representations"] = ( published_representations) # self.log.debug("Representation: {}".format(representations)) self.log.info("Registered {} items".format(len(representations)))
def register(self, instance): # Required environment variables PROJECT = api.Session["AVALON_PROJECT"] ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] LOCATION = api.Session["AVALON_LOCATION"] context = instance.context # Atomicity # # Guarantee atomic publishes - each asset contains # an identical set of members. # __ # / o # / \ # | o | # \ / # o __/ # assert all(result["success"] for result in context.data["results"]), ( "Atomicity not held, aborting.") # Assemble # # | # v # ---> <---- # ^ # | # stagingdir = instance.data.get("stagingDir") assert stagingdir, ("Incomplete instance \"%s\": " "Missing reference to staging area." % instance) # extra check if stagingDir actually exists and is available self.log.debug("Establishing staging directory @ %s" % stagingdir) project = io.find_one({"type": "project"}) asset = io.find_one({ "type": "asset", "name": ASSET, "parent": project["_id"] }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) subset = self.get_subset(asset, instance) # get next version latest_version = io.find_one( { "type": "version", "parent": subset["_id"] }, {"name": True}, sort=[("name", -1)]) next_version = 1 if latest_version is not None: next_version += latest_version["name"] self.log.info("Verifying version from assumed destination") assumed_data = instance.data["assumedTemplateData"] assumed_version = assumed_data["version"] if assumed_version != next_version: raise AttributeError("Assumed version 'v{0:03d}' does not match" "next version in database " "('v{1:03d}')".format(assumed_version, next_version)) if instance.data.get('version'): next_version = int(instance.data.get('version')) self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) version = self.create_version(subset=subset, version_number=next_version, locations=[LOCATION], data=version_data) self.log.debug("Creating version ...") version_id = io.insert_one(version).inserted_id # Write to disk # _ # | | # _| |_ # ____\ / # |\ \ / \ # \ \ v \ # \ \________. # \|________| # root = api.registered_root() hierarchy = "" parents = io.find_one({ "type": 'asset', "name": ASSET })['data']['parents'] if parents and len(parents) > 0: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*parents) template_data = { "root": root, "project": { "name": PROJECT, "code": project['data']['code'] }, "silo": asset['silo'], "task": api.Session["AVALON_TASK"], "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), "hierarchy": hierarchy } # template_publish = project["config"]["template"]["publish"] anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] for files in instance.data["files"]: # Collection # _______ # |______|\ # | |\| # | || # | || # | || # |_______| # if isinstance(files, list): src_collections, remainder = clique.assemble(files) src_collection = src_collections[0] # Assert that each member has identical suffix src_head = src_collection.format("{head}") src_tail = ext = src_collection.format("{tail}") test_dest_files = list() for i in [1, 2]: template_data["representation"] = src_tail[1:] template_data["frame"] = src_collection.format( "{padding}") % i anatomy_filled = anatomy.format(template_data) test_dest_files.append(anatomy_filled.render.path) dst_collections, remainder = clique.assemble(test_dest_files) dst_collection = dst_collections[0] dst_head = dst_collection.format("{head}") dst_tail = dst_collection.format("{tail}") for i in src_collection.indexes: src_padding = src_collection.format("{padding}") % i src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail) dst_padding = dst_collection.format("{padding}") % i dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) src = os.path.join(stagingdir, src_file_name) instance.data["transfers"].append([src, dst]) else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # template_data.pop("frame", None) anatomy.pop("frame", None) fname = files self.log.info("fname: {}".format(fname)) assert not os.path.isabs(fname), ( "Given file name is a full path") _, ext = os.path.splitext(fname) template_data["representation"] = ext[1:] src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) dst = anatomy_filled.render.path instance.data["transfers"].append([src, dst]) template_data["frame"] = "#####" anatomy_filled = anatomy.format(template_data) path_to_save = anatomy_filled.render.path template = anatomy.render.fullpath self.log.debug('ext[1:]: {}'.format(ext[1:])) representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], "data": { 'path': path_to_save, 'template': template }, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { "root": root, "project": { "name": PROJECT, "code": project['data']['code'] }, "task": api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), "hierarchy": hierarchy, "representation": ext[1:] } } destination_list.append(dst) instance.data['destination_list'] = destination_list representations.append(representation) self.log.info("Registering {} items".format(len(representations))) io.insert_many(representations)
def register(self, instance): # Required environment variables PROJECT = api.Session["AVALON_PROJECT"] ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] LOCATION = api.Session["AVALON_LOCATION"] context = instance.context # Atomicity # # Guarantee atomic publishes - each asset contains # an identical set of members. # __ # / o # / \ # | o | # \ / # o __/ # assert all(result["success"] for result in context.data["results"]), ( "Atomicity not held, aborting.") # Assemble # # | # v # ---> <---- # ^ # | # stagingdir = instance.data.get("stagingDir") assert stagingdir, ("Incomplete instance \"%s\": " "Missing reference to staging area." % instance) # extra check if stagingDir actually exists and is available self.log.debug("Establishing staging directory @ %s" % stagingdir) project = io.find_one({"type": "project"}, projection={"config.template.publish": True}) asset = io.find_one({ "type": "asset", "name": ASSET, "parent": project["_id"] }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) subset = self.get_subset(asset, instance) # get next version latest_version = io.find_one( { "type": "version", "parent": subset["_id"] }, {"name": True}, sort=[("name", -1)]) next_version = 1 if latest_version is not None: next_version += latest_version["name"] self.log.info("Verifying version from assumed destination") assumed_data = instance.data["assumedTemplateData"] assumed_version = assumed_data["version"] if assumed_version != next_version: raise AttributeError("Assumed version 'v{0:03d}' does not match" "next version in database " "('v{1:03d}')".format(assumed_version, next_version)) self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) version = self.create_version(subset=subset, version_number=next_version, locations=[LOCATION], data=version_data) self.log.debug("Creating version ...") version_id = io.insert_one(version).inserted_id # Write to disk # _ # | | # _| |_ # ____\ / # |\ \ / \ # \ \ v \ # \ \________. # \|________| # root = api.registered_root() template_data = { "root": root, "project": PROJECT, "silo": asset['silo'], "asset": ASSET, "subset": subset["name"], "version": version["name"] } template_publish = project["config"]["template"]["publish"] # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] for files in instance.data["files"]: # Collection # _______ # |______|\ # | |\| # | || # | || # | || # |_______| # if isinstance(files, list): collection = files # Assert that each member has identical suffix _, ext = os.path.splitext(collection[0]) assert all(ext == os.path.splitext(name)[1] for name in collection), ( "Files had varying suffixes, this is a bug") assert not any(os.path.isabs(name) for name in collection) template_data["representation"] = ext[1:] for fname in collection: src = os.path.join(stagingdir, fname) dst = os.path.join( template_publish.format(**template_data), fname) instance.data["transfers"].append([src, dst]) else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # fname = files assert not os.path.isabs(fname), ( "Given file name is a full path") _, ext = os.path.splitext(fname) template_data["representation"] = ext[1:] src = os.path.join(stagingdir, fname) dst = template_publish.format(**template_data) instance.data["transfers"].append([src, dst]) representation = { "schema": "avalon-core:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], "data": {}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { "project": PROJECT, "asset": ASSET, "silo": asset['silo'], "subset": subset["name"], "version": version["name"], "representation": ext[1:] } } representations.append(representation) self.log.info("Registering {} items".format(len(representations))) io.insert_many(representations)
def register(self, instance): # Required environment variables PROJECT = api.Session["AVALON_PROJECT"] ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] TASK = instance.data.get("task") or api.Session["AVALON_TASK"] LOCATION = api.Session["AVALON_LOCATION"] context = instance.context # Atomicity # # Guarantee atomic publishes - each asset contains # an identical set of members. # __ # / o # / \ # | o | # \ / # o __/ # # for result in context.data["results"]: # if not result["success"]: # self.log.debug(result) # exc_type, exc_value, exc_traceback = result["error_info"] # extracted_traceback = traceback.extract_tb(exc_traceback)[-1] # self.log.debug( # "Error at line {}: \"{}\"".format( # extracted_traceback[1], result["error"] # ) # ) # assert all(result["success"] for result in context.data["results"]),( # "Atomicity not held, aborting.") # Assemble # # | # v # ---> <---- # ^ # | # stagingdir = instance.data.get("stagingDir") if not stagingdir: self.log.info('''{} is missing reference to staging directory Will try to get it from representation'''.format(instance)) # extra check if stagingDir actually exists and is available self.log.debug("Establishing staging directory @ %s" % stagingdir) # Ensure at least one file is set up for transfer in staging dir. repres = instance.data.get("representations", None) assert repres, "Instance has no files to transfer" assert isinstance(repres, (list, tuple)), ( "Instance 'files' must be a list, got: {0}".format(repres)) # FIXME: io is not initialized at this point for shell host io.install() project = io.find_one({"type": "project"}) asset = io.find_one({ "type": "asset", "name": ASSET, "parent": project["_id"] }) assert all([project, asset]), ("Could not find current project or " "asset '%s'" % ASSET) subset = self.get_subset(asset, instance) # get next version latest_version = io.find_one( { "type": "version", "parent": subset["_id"] }, {"name": True}, sort=[("name", -1)]) next_version = 1 if latest_version is not None: next_version += latest_version["name"] if instance.data.get('version'): next_version = int(instance.data.get('version')) # self.log.info("Verifying version from assumed destination") # assumed_data = instance.data["assumedTemplateData"] # assumed_version = assumed_data["version"] # if assumed_version != next_version: # raise AttributeError("Assumed version 'v{0:03d}' does not match" # "next version in database " # "('v{1:03d}')".format(assumed_version, # next_version)) self.log.debug("Next version: v{0:03d}".format(next_version)) version_data = self.create_version_data(context, instance) version_data_instance = instance.data.get('versionData') if version_data_instance: version_data.update(version_data_instance) version = self.create_version(subset=subset, version_number=next_version, locations=[LOCATION], data=version_data) self.log.debug("Creating version ...") existing_version = io.find_one({ 'type': 'version', 'parent': subset["_id"], 'name': next_version }) if existing_version is None: version_id = io.insert_one(version).inserted_id else: io.update_many( { 'type': 'version', 'parent': subset["_id"], 'name': next_version }, {'$set': version}) version_id = existing_version['_id'] instance.data['version'] = version['name'] # Write to disk # _ # | | # _| |_ # ____\ / # |\ \ / \ # \ \ v \ # \ \________. # \|________| # root = api.registered_root() hierarchy = "" parents = io.find_one({ "type": 'asset', "name": ASSET })['data']['parents'] if parents and len(parents) > 0: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*parents) anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] destination_list = [] template_name = 'publish' if 'transfers' not in instance.data: instance.data['transfers'] = [] for idx, repre in enumerate(instance.data["representations"]): # Collection # _______ # |______|\ # | |\| # | || # | || # | || # |_______| # # create template data for Anatomy template_data = { "root": root, "project": { "name": PROJECT, "code": project['data']['code'] }, "silo": asset.get('silo'), "task": TASK, "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], "version": int(version["name"]), "hierarchy": hierarchy } files = repre['files'] if repre.get('stagingDir'): stagingdir = repre['stagingDir'] if repre.get('anatomy_template'): template_name = repre['anatomy_template'] template = os.path.normpath( anatomy.templates[template_name]["path"]) sequence_repre = isinstance(files, list) if sequence_repre: src_collections, remainder = clique.assemble(files) self.log.debug("src_tail_collections: {}".format( str(src_collections))) src_collection = src_collections[0] # Assert that each member has identical suffix src_head = src_collection.format("{head}") src_tail = src_collection.format("{tail}") # fix dst_padding valid_files = [x for x in files if src_collection.match(x)] padd_len = len(valid_files[0].replace(src_head, "").replace( src_tail, "")) src_padding_exp = "%0{}d".format(padd_len) test_dest_files = list() for i in [1, 2]: template_data["representation"] = repre['ext'] template_data["frame"] = src_padding_exp % i anatomy_filled = anatomy.format(template_data) test_dest_files.append( os.path.normpath( anatomy_filled[template_name]["path"])) self.log.debug("test_dest_files: {}".format( str(test_dest_files))) dst_collections, remainder = clique.assemble(test_dest_files) dst_collection = dst_collections[0] dst_head = dst_collection.format("{head}") dst_tail = dst_collection.format("{tail}") index_frame_start = None if repre.get("frameStart"): frame_start_padding = len(str(repre.get("frameEnd"))) index_frame_start = int(repre.get("frameStart")) dst_padding_exp = src_padding_exp dst_start_frame = None for i in src_collection.indexes: src_padding = src_padding_exp % i # for adding first frame into db if not dst_start_frame: dst_start_frame = src_padding src_file_name = "{0}{1}{2}".format(src_head, src_padding, src_tail) dst_padding = src_padding_exp % i if index_frame_start: dst_padding_exp = "%0{}d".format(frame_start_padding) dst_padding = dst_padding_exp % index_frame_start index_frame_start += 1 dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail).replace("..", ".") self.log.debug("destination: `{}`".format(dst)) src = os.path.join(stagingdir, src_file_name) self.log.debug("source: {}".format(src)) instance.data["transfers"].append([src, dst]) dst = "{0}{1}{2}".format(dst_head, dst_start_frame, dst_tail).replace("..", ".") repre['published_path'] = dst else: # Single file # _______ # | |\ # | | # | | # | | # |_______| # template_data.pop("frame", None) fname = files assert not os.path.isabs(fname), ( "Given file name is a full path") template_data["representation"] = repre['ext'] if repre.get("outputName"): template_data["output"] = repre['outputName'] src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) dst = os.path.normpath( anatomy_filled[template_name]["path"]).replace("..", ".") instance.data["transfers"].append([src, dst]) repre['published_path'] = dst self.log.debug("__ dst: {}".format(dst)) representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": repre['name'], "data": { 'path': dst, 'template': template }, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { "root": root, "project": { "name": PROJECT, "code": project['data']['code'] }, 'task': TASK, "silo": asset.get('silo'), "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], "version": version["name"], "hierarchy": hierarchy, "representation": repre['ext'] } } if sequence_repre and repre.get("frameStart"): representation['context']['frame'] = repre.get("frameStart") self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) self.log.debug("__ destination_list: {}".format(destination_list)) instance.data['destination_list'] = destination_list representations.append(representation) self.log.debug("__ representations: {}".format(representations)) self.log.debug("__ representations: {}".format(representations)) for rep in instance.data["representations"]: self.log.debug("__ represNAME: {}".format(rep['name'])) self.log.debug("__ represPATH: {}".format(rep['published_path'])) io.insert_many(representations) # self.log.debug("Representation: {}".format(representations)) self.log.info("Registered {} items".format(len(representations)))