def avalon_api_publish(data, gui=True): ''' Launches Pyblish (GUI by default) :param data: Should include data for pyblish and standalone collector :type data: dict :param gui: Pyblish will be launched in GUI mode if set to True :type gui: bool ''' io.install() # Create hash name folder in temp chars = "".join([random.choice(string.ascii_letters) for i in range(15)]) staging_dir = tempfile.mkdtemp(chars) # create also json and fill with data json_data_path = staging_dir + os.path.basename(staging_dir) + '.json' with open(json_data_path, 'w') as outfile: json.dump(data, outfile) args = ["-pp", os.pathsep.join(pyblish.api.registered_paths())] envcopy = os.environ.copy() envcopy["PYBLISH_HOSTS"] = "standalonepublisher" envcopy["SAPUBLISH_INPATH"] = json_data_path if gui: av_publish.show() else: returncode = execute([sys.executable, "-u", "-m", "pyblish"] + args, env=envcopy) io.uninstall()
def cli_publish(data, gui=True): io.install() pyblish.api.deregister_all_plugins() # Registers Global pyblish plugins pype.install() # Registers Standalone pyblish plugins for path in PUBLISH_PATHS: pyblish.api.register_plugin_path(path) project_plugins_paths = os.environ.get("PYPE_PROJECT_PLUGINS") project_name = os.environ["AVALON_PROJECT"] if project_plugins_paths and project_name: for path in project_plugins_paths.split(os.pathsep): if not path: continue plugin_path = os.path.join(path, project_name, "plugins") if os.path.exists(plugin_path): pyblish.api.register_plugin_path(plugin_path) api.register_plugin_path(api.Loader, plugin_path) api.register_plugin_path(api.Creator, plugin_path) # Create hash name folder in temp chars = "".join([random.choice(string.ascii_letters) for i in range(15)]) staging_dir = tempfile.mkdtemp(chars) # create json for return data return_data_path = (staging_dir + os.path.basename(staging_dir) + 'return.json') # create also json and fill with data json_data_path = staging_dir + os.path.basename(staging_dir) + '.json' with open(json_data_path, 'w') as outfile: json.dump(data, outfile) args = ["-pp", os.pathsep.join(pyblish.api.registered_paths())] if gui: args += ["gui"] envcopy = os.environ.copy() envcopy["PYBLISH_HOSTS"] = "standalonepublisher" envcopy["SAPUBLISH_INPATH"] = json_data_path envcopy["SAPUBLISH_OUTPATH"] = return_data_path envcopy["PYBLISH_GUI"] = "pyblish_pype" returncode = execute([sys.executable, "-u", "-m", "pyblish"] + args, env=envcopy) result = {} if os.path.exists(json_data_path): with open(json_data_path, "r") as f: result = json.load(f) io.uninstall() # TODO: check if was pyblish successful # if successful return True print('Check result here') return False
def on_profile_changed(self, package): # TODO: should not be touching os.environ os.environ["AVALON_PROJECT"] = package.name io.uninstall() io.install() self._widgets["task"].get_task_icons() self._widgets["asset"].refresh() self._env.clear() self.clear_env() self.reveal()
def cli_publish(data, gui=True): io.install() # Create hash name folder in temp chars = "".join( [random.choice(string.ascii_letters) for i in range(15)] ) staging_dir = tempfile.mkdtemp(chars) # create json for return data return_data_path = ( staging_dir + os.path.basename(staging_dir) + 'return.json' ) # create also json and fill with data json_data_path = staging_dir + os.path.basename(staging_dir) + '.json' with open(json_data_path, 'w') as outfile: json.dump(data, outfile) args = [ "-pp", os.pathsep.join(pyblish.api.registered_paths()) ] if gui: args += ["gui"] envcopy = os.environ.copy() envcopy["PYBLISH_HOSTS"] = "standalonepublisher" envcopy["SAPUBLISH_INPATH"] = json_data_path envcopy["SAPUBLISH_OUTPATH"] = return_data_path envcopy["PYBLISH_GUI"] = "pyblish_lite" returncode = execute([ sys.executable, "-u", "-m", "pyblish" ] + args, env=envcopy) result = {} if os.path.exists(json_data_path): with open(json_data_path, "r") as f: result = json.load(f) io.uninstall() # TODO: check if was pyblish successful # if successful return True print('Check result here') return False
def set_context(project, asset, task, app): ''' Sets context for pyblish (must be done before pyblish is launched) :param project: Name of `Project` where instance should be published :type project: str :param asset: Name of `Asset` where instance should be published :type asset: str ''' os.environ["AVALON_PROJECT"] = project io.Session["AVALON_PROJECT"] = project os.environ["AVALON_ASSET"] = asset io.Session["AVALON_ASSET"] = asset if not task: task = '' os.environ["AVALON_TASK"] = task io.Session["AVALON_TASK"] = task io.install() av_project = io.find_one({'type': 'project'}) av_asset = io.find_one({ "type": 'asset', "name": asset }) parents = av_asset['data']['parents'] hierarchy = '' if parents and len(parents) > 0: hierarchy = os.path.sep.join(parents) os.environ["AVALON_HIERARCHY"] = hierarchy io.Session["AVALON_HIERARCHY"] = hierarchy os.environ["AVALON_PROJECTCODE"] = av_project['data'].get('code', '') io.Session["AVALON_PROJECTCODE"] = av_project['data'].get('code', '') io.Session["current_dir"] = os.path.normpath(os.getcwd()) os.environ["AVALON_APP"] = app io.Session["AVALON_APP"] = app io.uninstall()
def cli_publish(data, publish_paths, gui=True): PUBLISH_SCRIPT_PATH = os.path.join( os.path.dirname(os.path.dirname(__file__)), "publish.py") io.install() # Create hash name folder in temp chars = "".join([random.choice(string.ascii_letters) for i in range(15)]) staging_dir = tempfile.mkdtemp(chars) # create also json and fill with data json_data_path = staging_dir + os.path.basename(staging_dir) + '.json' with open(json_data_path, 'w') as outfile: json.dump(data, outfile) envcopy = os.environ.copy() envcopy["PYBLISH_HOSTS"] = "standalonepublisher" envcopy["SAPUBLISH_INPATH"] = json_data_path envcopy["PYBLISHGUI"] = "pyblish_pype" envcopy["PUBLISH_PATHS"] = os.pathsep.join(publish_paths) if data.get("family", "").lower() == "editorial": envcopy["PYBLISH_SUSPEND_LOGS"] = "1" project_name = os.environ["AVALON_PROJECT"] env_copy = apply_project_environments_value(project_name, envcopy) args = get_pype_execute_args("run", PUBLISH_SCRIPT_PATH) result = execute(args, env=envcopy) result = {} if os.path.exists(json_data_path): with open(json_data_path, "r") as f: result = json.load(f) log.info(f"Publish result: {result}") io.uninstall() return False
def main(): projects = [] objects = [] for project in gazu.project.all_projects(): assets = gazu.asset.all_assets_for_project(project) shots = gazu.shot.all_shots_for_project(project) for assets, silo in ((assets, "assets"), (shots, "shots")): for asset in assets: objects.append({ "schema": "avalon-core:asset-2.0", "name": asset["name"].replace(" ", ""), # remove spaces "silo": silo, "data": {}, "type": "asset", "parent": project["name"], }) projects.append({ "schema": "avalon-core:project-2.0", "type": "project", "name": project["name"], "data": {}, "parent": None, "config": { "schema": "avalon-core:config-1.0", "apps": [ { "name": "maya2015", "label": "Autodesk Maya 2015" }, { "name": "maya2016", "label": "Autodesk Maya 2016" }, { "name": "maya2017", "label": "Autodesk Maya 2017" }, { "name": "nuke10", "label": "The Foundry Nuke 10.0" } ], "tasks": [ {"name": task["name"]} for task in gazu.task.all_task_types() ], "template": { "work": "{root}/{project}/{silo}/{asset}/work/" "{task}/{app}", "publish": "{root}/{project}/{silo}/{asset}/publish/" "{subset}/v{version:0>3}/{subset}.{representation}" } } }) print("Found:") print("- %d projects" % len(projects)) print("- %d assets" % len(objects)) os.environ["AVALON_PROJECTS"] = r"" os.environ["AVALON_PROJECT"] = "temp" os.environ["AVALON_ASSET"] = "bruce" os.environ["AVALON_SILO"] = "assets" os.environ["AVALON_CONFIG"] = "polly" os.environ["AVALON_MONGO"] = "mongodb://192.168.99.100:27017" existing_projects = {} existing_assets = {} print("Fetching Avalon data..") avalon.install() for project in avalon.projects(): existing_projects[project["name"]] = project for asset in avalon.find({"type": "asset"}): existing_assets[asset["name"]] = asset print("Synchronising..") for project in projects: if project["name"] in existing_projects: continue print("Installing project: %s" % project["name"]) os.environ["AVALON_PROJECT"] = project["name"] avalon.uninstall() avalon.install() avalon.insert_one(project) for asset in objects: if asset["name"] in existing_assets: continue asset["parent"] = avalon.locate([asset["parent"]]) print("Installing asset: %s" % asset["name"]) avalon.insert_one(asset) print("Success")
def main(): projects = {} objects = {} objects_count = 0 tasks = [{"name": task["name"]} for task in gazu.task.all_task_types()] for project in gazu.project.all_projects(): # Ensure project["code"] consistency. project_name = get_consistent_name(project["name"]) if project["code"] != project_name: proj = {} proj["code"] = project_name proj["id"] = project["id"] project = gazu.project.update_project(proj) print("Updating Project Code...") # Collect assets. assets = [] for asset in gazu.asset.all_assets_for_project(project): # Faking a parent for better hierarchy structure, until folders are # supported in Kitsu. asset["parents"] = ["assets"] assets.append(asset) # Collect shots and parents. episodes = [] sequences = [] shots = [] for episode in (gazu.shot.all_episodes_for_project(project) or []): episode["code"] = get_consistent_name(episode["name"]) episode["parent"] = project # Faking a parent for better hierarchy structure, until folders are # supported in Kitsu. episode["parents"] = ["episodes"] episodes.append(episode) for sequence in gazu.shot.all_sequences_for_episode(episode): sequence["code"] = get_consistent_name(sequence["name"]) sequence["parent"] = episode sequence["parents"] = episode["parents"] + [episode["code"]] sequence["label"] = sequence["name"] sequence["name"] = "{0}_{1}".format( episode["code"], sequence["code"] ) sequence["visualParent"] = episode["name"] sequences.append(sequence) for shot in gazu.shot.all_shots_for_sequence(sequence): shot["code"] = get_consistent_name(shot["name"]) shot["parent"] = sequence shot["parents"] = sequence["parents"] + [sequence["code"]] shot["label"] = shot["name"] shot["name"] = "{0}_{1}_{2}".format( episode["code"], sequence["code"], shot["code"] ) shot["visualParent"] = sequence["name"] shot["tasks"] = gazu.task.all_tasks_for_shot(shot) shots.append(shot) silos = [ [assets, "assets"], [episodes, "shots"], [sequences, "shots"], [shots, "shots"] ] entities = {} for assets, silo in silos: for asset in assets: entity_type = gazu.entity.get_entity_type( asset["entity_type_id"] ) data = { "schema": "avalon-core:asset-2.0", "name": get_consistent_name(asset["name"]), "silo": silo, "type": "asset", "parent": project["code"], "data": { "label": asset.get("label", asset["name"]), "group": entity_type["name"], "parents": asset["parents"] } } if asset.get("visualParent"): data["data"]["visualParent"] = asset["visualParent"] if asset.get("tasks"): data["data"]["tasks"] = [ task["task_type_name"] for task in asset["tasks"] ] entities[data["name"]] = data objects_count += 1 objects[project["code"]] = entities projects[project["code"]] = { "schema": "avalon-core:project-2.0", "type": "project", "name": project["code"], "data": { "label": project["name"], "code": project["code"] }, "parent": None, "config": { "schema": "avalon-core:config-1.0", "apps": [ { "name": "maya2015", "label": "Autodesk Maya 2015" }, { "name": "maya2016", "label": "Autodesk Maya 2016" }, { "name": "maya2017", "label": "Autodesk Maya 2017" }, { "name": "nuke10", "label": "The Foundry Nuke 10.0" } ], "tasks": tasks, "template": { "work": "{root}/{project}/{silo}/{asset}/work/" "{task}/{app}", "publish": "{root}/{project}/{silo}/{asset}/publish/" "{subset}/v{version:0>3}/{subset}.{representation}" } } } print("Found:") print("- %d projects" % len(projects)) print("- %d assets" % objects_count) os.environ["AVALON_PROJECTS"] = r"" os.environ["AVALON_PROJECT"] = "temp" os.environ["AVALON_ASSET"] = "bruce" os.environ["AVALON_SILO"] = "assets" os.environ["AVALON_CONFIG"] = "polly" os.environ["AVALON_MONGO"] = os.environ.get( "AVALON_MONGO", "mongodb://127.0.0.1:27017" ) print("Fetching Avalon data..") avalon.install() existing_projects = {} existing_objects = {} for project in avalon.projects(): existing_projects[project["name"]] = project # Update project os.environ["AVALON_PROJECT"] = project["name"] avalon.uninstall() avalon.install() # Collect assets assets = {} for asset in avalon.find({"type": "asset"}): assets[asset["name"]] = asset existing_objects[project["name"]] = assets print("Synchronising..") for name, project in projects.items(): if project["name"] in existing_projects: # Update task types existing_project = existing_projects[project["name"]] existing_project_task_types = existing_project["config"]["tasks"] if existing_project_task_types != tasks: print( "Updating tasks types on \"{0}\" to:\n{1}".format( project["name"], tasks ) ) existing_project["config"]["tasks"] = tasks os.environ["AVALON_PROJECT"] = project["name"] avalon.uninstall() avalon.install() avalon.replace_one({"type": "project"}, existing_project) continue print("Installing project: %s" % project["name"]) os.environ["AVALON_PROJECT"] = project["name"] avalon.uninstall() avalon.install() avalon.insert_one(project) for project["code"], assets in objects.items(): os.environ["AVALON_PROJECT"] = project["code"] avalon.uninstall() avalon.install() for asset_name, asset in assets.items(): if asset_name in existing_objects.get(project["code"], {}): # Update tasks if asset["data"].get("tasks"): existing_project = existing_objects[project["code"]] existing_asset = existing_project[asset_name] existing_tasks = existing_asset["data"].get("tasks", []) if existing_tasks != asset["data"]["tasks"]: tasks = asset["data"]["tasks"] print( "Updating tasks on \"{0} / {1}\" to:\n{2}".format( project["code"], asset_name, tasks ) ) existing_asset["data"]["tasks"] = tasks avalon.replace_one( {"type": "asset", "name": asset_name}, existing_asset ) continue asset["parent"] = avalon.locate([asset["parent"]]) if asset["data"].get("visualParent"): asset["data"]["visualParent"] = avalon.find_one( {"type": "asset", "name": asset["data"]["visualParent"]} )["_id"] print( "Installing asset: \"{0} / {1}\"".format( project["code"], asset_name ) ) avalon.insert_one(asset) print("Success")