def __init__(self, **kwargs): script_name = pm.sceneName() _, ext = os.path.splitext(script_name) if ext == '.mb': pm.confirmDialog( message= "There is no option to export on render farm the *.mb file. Please, save file as Maya ASCII and try again.", button=["ok"]) raise Exception("File must be MAYA ASCII") self._assembly = kwargs.get('assembly', False) if self._assembly == True: if pm.renderer('redshift', exists=True) == 0: pm.confirmDialog( message= "Render shaded option uses Redshift renderer. Please, turn on redshift renderer and try again.", button=["ok"]) raise Exception("Turn on REDSHIFT") if int(pm.playbackOptions(q=True, min=True)) < 0: pm.confirmDialog( message="Negative frames are not supported. Sorry...", button=["ok"]) raise Exception("Negative frames are not supported") self._errors = {} self._exportables = kwargs.get('export_extensions', []) self._json_rig = None self._usd_step_frame = kwargs.get('usd_step_frame', 20) self._nodes = [] self._graph = HaGraph(graph_items_args=[]) self._graph.set_render(kwargs.get('render', SlurmRender.SlurmRender)) self._jobname_hash = random_hash_string() script_name = pm.sceneName() _, name = os.path.split(script_name) self.scene_name = name self._export_sets = {} self.basename, ext = os.path.splitext(name) self.tempdir = tempfile.mkdtemp(prefix="_".join( [os.getlogin(), self.scene_name, self._jobname_hash + "_"]), dir=SCRATCH) os.chmod(self.tempdir, 0o0777) self.assembly_json = AssemblyJson() self.global_params = dict( queue='3d', group='allhosts', start_frame=int(pm.playbackOptions(q=True, min=True)), end_frame=int(pm.playbackOptions(q=True, max=True)), job_on_hold=kwargs.get('job_on_hold', False), priority=-500, jobname=self.scene_name, exclude_list=[], usd_step_frame=self._usd_step_frame, assembly_json_file=self.tempdir + os.sep + "assembly.json")
def _get_graph(self, **kwargs): job = os.getenv('JOB_CURRENT', 'none') graph = HaGraph(graph_items_args=[]) # if not 'target_list' in kwargs: # kwargs['target_list'] = [x.name() for x in write_node_list ] # if not 'output_picture' in kwargs: # kwargs['output_picture'] = str(nuke.root().node(kwargs['target_list'][0]).knob('file').getEvaluatedValue()) mayanode = MayaFarm(**kwargs) mayanode.render(**kwargs) graph.add_node(mayanode) return graph
def _get_graph(self, **kwargs): job = os.getenv('JOB_CURRENT', 'none') write_node_list = self._write_node_list() proxy = nuke.root().knob('proxy').value() if proxy == True: for write_node in write_node_list: if write_node.knob('proxy').value() == "": err = 'You must specify a proxy file name to write in "%s" node' % write_node.name() raise Exception(err) nuke.scriptSave() graph = HaGraph(graph_items_args=[]) if not 'target_list' in kwargs: kwargs['target_list'] = [x.name() for x in write_node_list ] if not 'output_picture' in kwargs: kwargs['output_picture'] = str(nuke.root().node(kwargs['target_list'][0]).knob('file').getEvaluatedValue()) graph.add_node(NukeWrapper(**kwargs)) return graph
def on_click(self, sender, evtid): '''Runs per selected iamge in Clarisse GUI. ''' render_archive = self.parent.parms['render_archive'] selected_images = self.parent.parms['selected_images'] graph = HaGraph() graph.set_render(SlurmRender.SlurmRender) for image in selected_images: graph.add_node(ClarisseWrapper(image, str(render_archive))) graph.render() self.set_label('Render sent!')
class HaContextMaya(object): def __init__(self, **kwargs): script_name = pm.sceneName() _, ext = os.path.splitext(script_name) if ext == '.mb': pm.confirmDialog( message= "There is no option to export on render farm the *.mb file. Please, save file as Maya ASCII and try again.", button=["ok"]) raise Exception("File must be MAYA ASCII") self._assembly = kwargs.get('assembly', False) if self._assembly == True: if pm.renderer('redshift', exists=True) == 0: pm.confirmDialog( message= "Render shaded option uses Redshift renderer. Please, turn on redshift renderer and try again.", button=["ok"]) raise Exception("Turn on REDSHIFT") if int(pm.playbackOptions(q=True, min=True)) < 0: pm.confirmDialog( message="Negative frames are not supported. Sorry...", button=["ok"]) raise Exception("Negative frames are not supported") self._errors = {} self._exportables = kwargs.get('export_extensions', []) self._json_rig = None self._usd_step_frame = kwargs.get('usd_step_frame', 20) self._nodes = [] self._graph = HaGraph(graph_items_args=[]) self._graph.set_render(kwargs.get('render', SlurmRender.SlurmRender)) self._jobname_hash = random_hash_string() script_name = pm.sceneName() _, name = os.path.split(script_name) self.scene_name = name self._export_sets = {} self.basename, ext = os.path.splitext(name) self.tempdir = tempfile.mkdtemp(prefix="_".join( [os.getlogin(), self.scene_name, self._jobname_hash + "_"]), dir=SCRATCH) os.chmod(self.tempdir, 0o0777) self.assembly_json = AssemblyJson() self.global_params = dict( queue='3d', group='allhosts', start_frame=int(pm.playbackOptions(q=True, min=True)), end_frame=int(pm.playbackOptions(q=True, max=True)), job_on_hold=kwargs.get('job_on_hold', False), priority=-500, jobname=self.scene_name, exclude_list=[], usd_step_frame=self._usd_step_frame, assembly_json_file=self.tempdir + os.sep + "assembly.json") def __enter__(self): self.global_params['scenefilepath'] = save_to_scratch() return self def __exit__(self, type, value, traceback): if self._errors != {}: _err = "" for n, m in self._errors.iteritems(): _err += n + ":\n" _err += m + "\n\n" pm.confirmDialog(message=_err, button=["ok"], title="Error!") raise Exception(str(self._errors)) with open(self.global_params['assembly_json_file'], "w") as json_file: json.dump(self.assembly_json.to_dict(), json_file, indent=4) print "[ HA MESSAGE ] Assembly json", self.global_params[ 'assembly_json_file'] if self._assembly == True: global_params = copy.copy(self.global_params) assembly = ANMassembly(**global_params) for n in self._graph.graph_items: assembly.add(n) self.add_node(assembly) # pbnode = ASPlayBlast(**global_params) # pbnode.add(assembly) # self.add_node(pbnode) rsnode = RSRender(**global_params) rsnode.add(assembly) self.add_node(rsnode) self._graph.render() def is_shortname_exportable(self, short_name, reference_path): # TODO: refactor this _json_rig = JsonRigObject.JsonRigObject.create(reference_path) _exportables = copy.copy(self._exportables) for ext in _exportables: _ext_sets = _json_rig.export_sets(ext) try: self.assembly_json.add_resolution(ext, _json_rig.asset_name(), _ext_sets) except: pass self._export_sets.update(_ext_sets) print '[ HA DEBUG ]', self._export_sets try: for _res, _path in _json_rig: if not os.path.exists(_path): self._errors['FILE NOT FOUND'] = self._errors.get( 'FILE NOT FOUND', '') + _path + "\n" except: pass if _json_rig.force == True: return True try: return _json_rig.resolution(short_name) in _exportables except: pass try: return _json_rig.extension(short_name) in _exportables except: pass raise Exception('[ HA ERROR ] Failed run API 1.0.0 and 1.1.0') def add_geometry_data(self, target, file_path, *args, **kwargs): if target == None: return namespace = kwargs.get('namespace', '') self.assembly_json.add_scene_item(namespace, target, file_path) tempdir = self.tempdir + os.sep + namespace + os.sep + target if not os.path.exists(tempdir): os.makedirs(tempdir) os.chmod(tempdir, 0o0777) if not os.path.exists(tempdir + os.sep + "usdx"): os.makedirs(tempdir + os.sep + "usdx") os.chmod(tempdir + os.sep + "usdx", 0o0777) jobname, _ = os.path.splitext(os.path.basename(file_path)) global_params = copy.copy(self.global_params) _global_params = dict( target_list=[target], jobname=jobname, hamak_set_maya_name=target, scratch_info_filepath=tempdir + os.sep + 'sharedserver.info', scratch_ma_filepath=tempdir + os.sep + self.scene_name, scratch_usdx_directory=tempdir + os.sep + "usdx", scratch_usdc_filepath=tempdir + os.sep + "out.usdc", generate_assembly_json=tempdir + os.sep + 'assembly.json', abc_output_filename=file_path, hamak_set_maya_exportname='"%s:%s"' % (namespace, self._export_sets[target])) global_params.update(_global_params) ref = REFreplace(**global_params) self.add_node(ref) mayanode = USDGenerate(**global_params) mayanode.add(ref) self.add_node(mayanode) usdfarm = USDstitch(**global_params) usdfarm.add(mayanode) self.add_node(usdfarm) abcnode = ABCconvert(**global_params) abcnode.add(usdfarm) self.add_node(abcnode) def add_node(self, node): node.parms['job_name'] << {"jobname_hash": self._jobname_hash} self._graph.add_node(node)