def _constructJob(self): job = af.Job(self['job_name'].evaluateString()) job.setBranch(self['job_branch'].evaluateString()) job.setPriority(self['priority'].evaluateInt()) job.setMaxRunningTasks(self['afanasy_max_running_tasks'].evaluateInt()) job.setMaxRunTasksPerHost( self['afanasy_max_running_tasks_per_host'].evaluateInt()) job.setHostsMask(self['afanasy_hosts_mask'].evaluateString()) job.setHostsMaskExclude( self['afanasy_hosts_mask_exclude'].evaluateString()) return job
def __init__(self, name='', description=''): # RenderJob.__init__(self, name, description) self.af_job = af.Job(name) self.dispatcher = 'afanasy' self.use_var_capacity = False self.capacity_coeff_min = 1.0 # block.setVariableCapacity( capmin, capmax ) self.capacity_coeff_max = 1.0 self.tasks_max_run_time = -1 # ?? self.max_running_tasks = -1 self.max_running_tasks_per_host = -1 self.hostsmask = '' self.hostsexcl = '' self.depmask = '' self.depglbl = '' self.need_os = '' # Pre Command - Command to execute on job registration. # # Note, that this command is executed by server, and not from tasks # working directory. # # Use absolute paths here or even transfer paths if you server has # another file system than renders. # # Commands are executed in a special thread with commands queue. # # This means if somebody executes 'sleep 1000', other commands # execution (and jobs registration) will be delayed on 1000 seconds # (only delayed, not lost). # # Try not use Pre Command at all. You always can create one more # task(block) and make other tasks(blocks) depend on it. self.command_pre = '' # Post Command - Command executed on job deletion. # This commands are executed on render farm hosts by special system # job. # Working directory of such system task will be the first block working # folder. self.command_post = '' self.distributed = False self.sameHostMaster = True self.hosts_min = 0 self.hosts_max = 0 self.threads_limit = 0 self.port = 39010 # mentalray standalone slave port self.hosts = ''
def submitAsJob(self, graph_file, node_path): """ [virtual] Called when the scheduler should cook the entire TOP Network as a standalone job. by pressing the 'Submit as Job' button on the scheduler node UI. Creates a job which cooks that TOP graph using hython. Returns the status URI for the submitted job - just to open manager Web GUI. graph_file Path to a .hip file containing the TOP Network, relative to $PDG_DIR. node_path Op path to the TOP Network """ self._log("submitAsJob({},{})".format(graph_file, node_path)) # Constuct a command for hython + topcook script cmd = 'hython' # Use PDG licence cmd += ' --pdg' # Specify script that cooks graph cmd += ' "%s/pdgjob/topcook.py"' % os.getenv('HHP') # Set verbosity level cmd += ' --verbosity 2' # Set hip file: cmd += ' --hip "%s"' % hou.hipFile.path() # Set top network to cook cmd += ' --toppath "%s"' % node_path # Constuct a job: job = af.Job(self['gj_name'].evaluateString()) job.setBranch(self['job_branch'].evaluateString()) job.setPriority(self['gj_priority'].evaluateInt()) job.setDependMask(self['gj_depend_mask'].evaluateString()) job.setDependMaskGlobal(self['gj_depend_mask_global'].evaluateString()) job.setHostsMask(self['gj_hosts_mask'].evaluateString()) job.setHostsMaskExclude(self['gj_hosts_mask_exclude'].evaluateString()) if self['gj_start_paused'].evaluateInt(): job.setPaused() # Block block = af.Block('PDG-GRAPH', self['gj_service'].evaluateString()) block.setCapacity(self['gj_capacity'].evaluateInt()) block.addTicket(self['gj_ticket'].evaluateString(), 1) # Task task = af.Task(node_path) task.setCommand(cmd) task.setEnv('AF_USERNAME', cgruconfig.VARS['USERNAME']) # Append task and block and send job block.tasks.append(task) job.blocks.append(block) job.send() return None
def nuke_sendJobs(self, writename, framefirst, framelast, framepertask, seqname): """send jobs from nuke to cgru""" job_name = '{}_{}'.format(writename, self.nukeRootinfos()[0]) job = af.Job(job_name) job.setMaxRunningTasks(15) block = af.Block('Nuke_Render', 'nuke') block.setWorkingDirectory(self.nukeRootinfos()[1]) block.setCommand('nuke -i -X {} -x {} @#@,@#@'.format(writename, self.nukeRootinfos()[2])) block.setFiles([seqname]) block.setNumeric(framefirst, framelast, framepertask) job.blocks.append(block) if self.job_paused.isChecked(): job.offline() job.send()
def createJob(i_app, i_params): # Construct job name: name = '' if 'af_job_name' in i_params: name = i_params['af_job_name'] ext = '.ntp' if name == '': name, ext = os.path.splitext( i_app.getProjectParam('projectName').getValue()) name += '.' + i_params['nodelabel'] job = af.Job(name) # Generate temp project filename: prj = name prj += '.' + str(time.strftime('%y%M%d%H%M%S')) + str( (time.time() - int(time.time())))[2:5] prj += ext prj = os.path.join(i_app.getProjectParam('projectPath').getValue(), prj) # Temp project should be deleteed at job deletion: job.setCmdPost('deletefiles "%s"' % prj) # Set job parameters: if 'af_platform' in i_params and len(i_params['af_platform']): job.setNeedOS(i_params['af_platform']) if 'af_priority' in i_params and i_params['af_priority'] >= 0: job.setPriority(i_params['af_priority']) if 'af_hostsmask' in i_params and len(i_params['af_hostsmask']): job.setHostsMask(i_params['af_hostsmask']) if 'af_hostsmask_exclude' in i_params and len( i_params['af_hostsmask_exclude']): job.setHostsMaskExclude(i_params['af_hostsmask_exclude']) if 'af_dependmask' in i_params and len(i_params['af_dependmask']): job.setDependMask(i_params['af_dependmask']) if 'af_dependmask_global' in i_params and len( i_params['af_dependmask_global']): job.setDependMaskGlobal(i_params['af_dependmask_global']) if 'af_job_paused' in i_params and i_params['af_job_paused']: job.setOffline() # Construct blocks travering through network: traverseChilds(job, i_params, prj) # Childs were reversed, see above job.blocks.reverse() return {'job': job, 'prj': prj}
def genJob(self, blockparams): if VERBOSE: print 'Generating job on "%s"' % self.job_name if len(blockparams) < 1: print('Can`t generate job without any blocks on "%s"' % self.afnode.name()) return # Calculate temporary hip name: ftime = time.time() tmphip = hou.hipFile.name() + '_' + afcommon.filterFileName( self.job_name) + time.strftime('.%m%d-%H%M%S-') + str( ftime - int(ftime))[2:5] + ".hip" # use mwrite, because hou.hipFile.save(tmphip) # changes current scene file name to tmphip, at least in version 9.1.115 hou.hscript('mwrite -n "%s"' % tmphip) job = af.Job() job.setName(self.job_name) if self.start_paused: job.offLine() if self.platform != '': if self.platform == 'any': job.setNeedOS('') else: job.setNeedOS(self.platform) if self.priority != -1: job.setPriority(self.priority) if self.depend_mask != '': job.setDependMask(self.depend_mask) if self.depend_mask_global != '': job.setDependMaskGlobal(self.depend_mask_global) if self.max_runtasks > -1: job.setMaxRunningTasks(self.max_runtasks) if self.maxperhost > -1: job.setMaxRunTasksPerHost(self.maxperhost) if self.hosts_mask != '': job.setHostsMask(self.hosts_mask) if self.hosts_mask_exclude != '': job.setHostsMaskExclude(self.hosts_mask_exclude) for blockparam in blockparams: job.blocks.append(blockparam.genBlock(tmphip)) job.setCmdPost('deletefiles "%s"' % tmphip) if VERBOSE: job.output(True) job.send()
def submit_job(job_name, block_name, command): """Submits an Afanasy job :param job_name: :param block_name: :param command: :return: """ import af block = af.Block(block_name, 'maya') block.setCommand(" ".join(command)) block.setNumeric(1, 1, 1, 1) job = af.Job(job_name) job.blocks = [block] status, data = job.send() if not status: RuntimeError('Something went wrong!')
#!/usr/bin/env python # -*- coding: utf-8 -*- import af job = af.Job('Blender Test') block = af.Block('render', 'blender') block.setCommand('blender -b scene.blend -s @#@ -e @#@ -a') block.setNumeric(1, 20, 2) job.blocks.append(block) print('') job.output(True) print('') job.send()
JobNames.append(os.path.basename(convert['output'])) else: convert['warning'] = 'No images found' OUT['convert'].append(convert) for i in range(0, len(Jobs)): if MkDirs[i]: if Options.verbose: print('mkdir ' + MkDirs[i]) if not Options.debug and not os.path.isdir(MkDirs[i]): os.makedirs(MkDirs[i]) if Options.afanasy: job = af.Job('CVT ' + JobNames[i]) block = af.Block('convert') job.blocks.append(block) if Options.afuser != '': job.setUserName(Options.afuser) if Options.afmax != -1: job.setMaxRunningTasks(Options.afmax) if Options.afcap != -1: block.setCapacity(Options.afcap) if Options.afmph != -1: block.setMaxRunTasksPerHost(Options.afmph)
def genJob(self, blockparams): if VERBOSE: print('Generating job on "%s"' % self.job_name) if len(blockparams) < 1: print('Can`t generate job without any blocks on "%s"' % self.afnode.name()) return renderhip = hou.hipFile.name() if self.afnode.parm('render_temp_hip').eval(): # Calculate temporary hip name: ftime = time.time() renderhip = '%s_%s%s%s.hip' % ( renderhip, afcommon.filterFileName(self.job_name), time.strftime('.%m%d-%H%M%S-'), str(ftime - int(ftime))[2:5]) # use mwrite, because hou.hipFile.save(renderhip) # changes current scene file name to renderhip, # at least in version 9.1.115 hou.hscript('mwrite -n "%s"' % renderhip) job = af.Job() job.setName(self.job_name) if self.start_paused: job.offLine() if self.preview_approval: job.setPPApproval() if self.platform != '': if self.platform == 'any': job.setNeedOS('') else: job.setNeedOS(self.platform) if self.priority != -1: job.setPriority(self.priority) if self.depend_mask != '': job.setDependMask(self.depend_mask) if self.depend_mask_global != '': job.setDependMaskGlobal(self.depend_mask_global) if self.max_runtasks > -1: job.setMaxRunningTasks(self.max_runtasks) if self.maxperhost > -1: job.setMaxRunTasksPerHost(self.maxperhost) if self.hosts_mask != '': job.setHostsMask(self.hosts_mask) if self.hosts_mask_exclude != '': job.setHostsMaskExclude(self.hosts_mask_exclude) job.setFolder('input', os.path.dirname(hou.hipFile.name())) images = None for blockparam in blockparams: job.blocks.append(blockparam.genBlock(renderhip)) # Set ouput folder from the first block with images to preview: if images is None and blockparam.preview != '': images = blockparam.preview job.setFolder('output', os.path.dirname(images)) if self.afnode.parm('render_temp_hip').eval(): job.setCmdPost('deletefiles "%s"' % renderhip) if VERBOSE: job.output(True) job.send()
#!/usr/bin/env python # coding=utf8 import os import af job = af.Job('Nuke Test') block1 = af.Block( 'preview', 'nuke') block1.setCommand('nuke -X preview -x scene.nk %1,%2') block1.setNumeric( 1, 20, 2) block1.setTasksDependMask( 'final') block1.setFiles('render/preview.%04d.jpg') block2 = af.Block( 'final', 'nuke') block2.setCommand('nuke -X final -x scene.nk %1,%2') block2.setNumeric( 1, 20, 1) block2.setTasksDependMask( 'key|back') block2.setFiles('render/final.%04d.exr') block3 = af.Block( 'key', 'nuke') block3.setCommand('nuke -X key -x scene.nk %1,%2') block3.setNumeric( 1, 20, 3) block3.setFiles('render/key.%04d.exr') block4 = af.Block( 'back', 'nuke') block4.setCommand('nuke -X back -x scene.nk %1,%2') block4.setNumeric( 1, 20, 3) block4.setFiles('render/back.%04d.exr') job.blocks.append( block1)
def launch(self, *args, **kwargs): """launch renderer command """ # do nothing if there is no window (called externally) if not self.window: return # warn the user about the ignore settings try: dAO = pm.PyNode('defaultArnoldRenderOptions') ignore_attrs = [ 'ignoreSubdivision', 'ignoreDisplacement', 'ignoreBump', 'ignoreMotionBlur' ] attr_values = [(attr, dAO.getAttr(attr)) for attr in ignore_attrs if dAO.getAttr(attr) is True] if any(attr_values): msg_text = '<br>'.join( map(lambda x: '%s: %s' % (x[0], x[1]), attr_values)) response = pm.confirmDialog( title='Ignore These Settings?', message= 'You have ignored:<br><br>%s<br><br><b>Is that ok?</b>' % msg_text, button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return except (pm.MayaNodeError, pm.MayaAttributeError): # no Arnold pass # check if rendering with persp camera try: wrong_camera_names = [ 'perspShape', 'topShape', 'sideShape', 'fontShape', 'persp1Shape', 'perspShape1', ] renderable_cameras = [ node for node in pm.ls(type='camera') if node.getAttr('renderable') ] if any( map(lambda x: x.name() in wrong_camera_names, renderable_cameras)): response = pm.confirmDialog( title='Rendering with Persp?', message= 'You are rendering with <b>Persp Camera<b><br><br>Is that ok?</b>', button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return if len(renderable_cameras) > 1: response = pm.confirmDialog( title='Rendering more than one Camera?', message= 'You are rendering <b>more than one camera<b><br><br>Is that ok?</b>', button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return elif len(renderable_cameras) == 0: pm.confirmDialog( title='No <b>Renderable</b> camera!!!', message='There is no <b>renderable camera<b>!!!', button=['Ok'], defaultButton='Ok', cancelButton='Ok', dismissString='Ok') return except pm.MayaNodeError: # no default render globals node pass drg = pm.PyNode('defaultRenderGlobals') render_engine = drg.getAttr('currentRenderer') # RENDERER SPECIFIC CHECKS if render_engine == 'redshift': # if the renderer is RedShift # check if unifiedDisableDivision is 1 which will take too much time # to render dro = pm.PyNode('redshiftOptions') if dro.unifiedDisableDivision.get() == 1: response = pm.confirmDialog( title= "Enabled **Don't Automatically Reduce Samples of Other Effects**", message= 'It is not allowed to render with the following option is enabled:<br>' '<br>' "Don't Automatically Reduce Samples of Other Effects: Enabled<br>" "<br>" "Please DISABLE it!", button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # Check dome light backgrounds domes_to_fix = [] rs_domes = pm.ls(type='RedshiftDomeLight') if rs_domes: for rs_dome in rs_domes: if rs_dome.getAttr('background_enable') == 1 \ or rs_dome.getAttr('backPlateEnabled') == 1: domes_to_fix.append(rs_dome.name()) if domes_to_fix: message = 'Some DomeLights have <b>BackGround Render ' \ 'Enabled</b>:' \ '<br><br>%s<br><br>' \ 'Are you Sure?' % '<br>'.join(domes_to_fix) response = pm.confirmDialog( title='Dome Lights with Background Enabled?', message=message, button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return # abort on license fail dro.abortOnLicenseFail.set(1) elif render_engine == 'arnold': # check if the samples are too high dAO = pm.PyNode('defaultArnoldRenderOptions') aa_samples = dAO.AASamples.get() diff_samples = dAO.GIDiffuseSamples.get() try: glossy_samples = dAO.GIGlossySamples.get() except AttributeError: glossy_samples = dAO.GISpecularSamples.get() if int(pm.about(v=1)) >= 2017: sss_samples = dAO.GISssSamples.get() else: sss_samples = dAO.sssBssrdfSamples.get() total_diff_samples = aa_samples**2 * diff_samples**2 total_glossy_samples = aa_samples**2 * glossy_samples**2 total_sss_samples = aa_samples**2 * sss_samples**2 max_allowed_diff_samples = 225 max_allowed_glossy_samples = 100 max_allowed_sss_samples = 800 if total_diff_samples > max_allowed_diff_samples: pm.confirmDialog( title="Too Much Diffuse Samples!!!", message='You are using too much DIFFUSE SAMPLES (>%s)<br>' '<br>' 'Please either reduce AA samples of Diffuse ' 'Samples!!!' % max_allowed_diff_samples, button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return if total_glossy_samples > max_allowed_glossy_samples: pm.confirmDialog( title="Too Much Glossy Samples!!!", message='You are using too much GLOSSY SAMPLES (>%s)<br>' '<br>' 'Please either reduce AA samples of Glossy ' 'Samples!!!' % max_allowed_glossy_samples, button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return if total_sss_samples > max_allowed_sss_samples: pm.confirmDialog( title="Too Much SSS Samples!!!", message='You are using too much SSS SAMPLES (>%s)<br>' '<br>' 'Please either reduce AA samples of SSS ' 'Samples!!!' % max_allowed_sss_samples, button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # check Light Samples # check point lights with zero radius but more than one samples all_point_lights = pm.ls(type='pointLight') ridiculous_point_lights = [] for point_light in all_point_lights: if point_light.aiRadius.get( ) < 0.1 and point_light.aiSamples.get() > 1: ridiculous_point_lights.append(point_light) if ridiculous_point_lights: pm.confirmDialog( title="Unnecessary Samples on Point Lights!!!", message='You are using too much SAMPLES (>1)<br>' '<br>' 'on <b>Point lights with zero radius</b><br>' '<br>' 'Please reduce the samples to 1', button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # Check area lights with more than 2 samples all_area_lights = pm.ls(type=['areaLight', 'aiAreaLight']) ridiculous_area_lights = [] for area_light in all_area_lights: if area_light.aiSamples.get() > 2: ridiculous_area_lights.append(area_light) if ridiculous_area_lights: pm.confirmDialog( title="Unnecessary Samples on Area Lights!!!", message='You are using too much SAMPLES (>2) on<br>' '<br>' '<b>Area Lights</b><br>' '<br>' 'Please reduce the samples to 2', button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # Check directional lights with angle == 0 and samples > 1 all_directional_lights = pm.ls(type='directionalLight') ridiculous_directional_lights = [] dir_sample_attr_name = 'aiSamples' # if pm.about(v=1) == "2014": # dir_sample_attr_name = 'aiSamples' for directional_light in all_directional_lights: if directional_light.aiAngle.get( ) == 0 and directional_light.attr( dir_sample_attr_name).get() > 1: ridiculous_directional_lights.append(directional_light) if ridiculous_directional_lights: pm.confirmDialog( title="Unnecessary Samples on Directional Lights!!!", message='You are using too much SAMPLES (>1) on <br>' '<br>' '<b>Directional lights with zero angle</b><br>' '<br>' 'Please reduce the samples to 1', button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # get values start_frame = pm.intField('cgru_afanasy__start_frame', q=1, v=1) end_frame = pm.intField('cgru_afanasy__end_frame', q=1, v=1) frames_per_task = \ pm.intField('cgru_afanasy__frames_per_task', q=1, v=1) by_frame = pm.intField('cgru_afanasy__by_frame', q=1, v=1) depend_mask_global = pm.textField('cgru_afanasy__depend_mask_global', q=1, text=True) hosts_mask = pm.textField('cgru_afanasy__hosts_mask', q=1, text=True) hosts_exclude = pm.textField('cgru_afanasy__hosts_exclude', q=1, text=True) separate_layers = \ pm.radioButtonGrp('cgru_afanasy__separate_layers', q=1, sl=1) pause = pm.checkBox('cgru_afanasy__paused', q=1, v=1) life_time = pm.intField('cgru_afanasy__life_time', q=1, v=1) annotation = pm.textField('cgru_afanasy__annotation', q=1, text=True) submit_multiple_times = pm.intField( 'cgru_afanasy__submit_multiple_times', q=1, v=1) errors_avoid_host = pm.intField('cgru_afanasy__errors_avoid_host', q=1, v=1) errors_retries = pm.intField('cgru_afanasy__errors_retries', q=1, v=1) errors_task_same_host = pm.intField( 'cgru_afanasy__errors_task_same_host', q=1, v=1) errors_forgive_time = pm.intField('cgru_afanasy__errors_forgive_time', q=1, v=1) generate_previews = pm.checkBox('cgru_afanasy__generate_previews', q=1, v=1) # check values if start_frame > end_frame: temp = end_frame end_frame = start_frame start_frame = temp frames_per_task = max(1, frames_per_task) by_frame = max(1, by_frame) # store without quota sign depend_mask_global = depend_mask_global.replace('"', '') hosts_mask = hosts_mask.replace('"', '') hosts_exclude = hosts_exclude.replace('"', '') # store field values pm.optionVar['cgru_afanasy__start_frame_ov'] = start_frame pm.optionVar['cgru_afanasy__end_frame_ov'] = end_frame pm.optionVar['cgru_afanasy__frames_per_task_ov'] = frames_per_task pm.optionVar['cgru_afanasy__by_frame_ov'] = by_frame pm.optionVar['cgru_afanasy__depend_mask_global_ov'] = \ depend_mask_global pm.optionVar['cgru_afanasy__hosts_mask_ov'] = hosts_mask pm.optionVar['cgru_afanasy__hosts_exclude_ov'] = hosts_exclude pm.optionVar['cgru_afanasy__separate_layers_ov'] = separate_layers pm.optionVar['cgru_afanasy__life_time_ov'] = life_time pm.optionVar['cgru_afanasy__annotation_ov'] = annotation pm.optionVar[ 'cgru_afanasy__submit_multiple_times_ov'] = submit_multiple_times pm.optionVar['cgru_afanasy__errors_avoid_host_ov'] = errors_avoid_host pm.optionVar['cgru_afanasy__errors_retries_ov'] = errors_retries pm.optionVar[ 'cgru_afanasy__errors_task_same_host_ov'] = errors_task_same_host pm.optionVar[ 'cgru_afanasy__errors_errors_forgive_time_ov'] = errors_forgive_time pm.optionVar['cgru_afanasy__paused_ov'] = pause pm.optionVar['cgru_afanasy__generate_previews_ov'] = generate_previews # get paths scene_name = pm.sceneName() datetime = '%s%s' % (time.strftime('%y%m%d-%H%M%S-'), str(time.time() - int(time.time()))[2:5]) filename = '%s.%s.mb' % (scene_name, datetime) project_path = pm.workspace(q=1, rootDirectory=1) # outputs = \ # pm.renderSettings(fullPath=1, firstImageName=1, lastImageName=1) # get output paths, set the RenderPass token to Beauty, # this will at least guarantee to get something outputs = \ pm.renderSettings( fullPath=1, firstImageName=1, lastImageName=1, leaveUnmatchedTokens=1, customTokenString="RenderPass=Beauty" ) # job_name = os.path.basename(scene_name) job_name = self.generate_job_name() logger.debug('%ss %se %sr' % (start_frame, end_frame, by_frame)) logger.debug('scene = %s' % scene_name) logger.debug('file = %s' % filename) logger.debug('job_name = %s' % job_name) logger.debug('project_path = %s' % project_path) logger.debug('outputs = %s' % outputs) logger.debug('annotation = %s' % annotation) logger.debug('separate_layers = %s' % separate_layers) logger.debug('errors_avoid_host = %s' % errors_avoid_host) logger.debug('errors_retries = %s' % errors_retries) logger.debug('errors_task_same_host = %s' % errors_task_same_host) logger.debug('errors_forgive_time = %s' % errors_forgive_time) logger.debug('generate_previews = %s' % generate_previews) if pm.checkBox('cgru_afanasy__close', q=1, v=1): pm.deleteUI(self.window) stored_log_level = None if render_engine == 'arnold': # set the verbosity level to warning+info aro = pm.PyNode('defaultArnoldRenderOptions') stored_log_level = aro.getAttr('log_verbosity') aro.setAttr('log_verbosity', 2) # set output to console aro.setAttr("log_to_console", 1) elif render_engine == 'redshift': # set the verbosity level to detailed+info redshift = pm.PyNode('redshiftOptions') stored_log_level = redshift.logLevel.get() redshift.logLevel.set(2) # save file pm.saveAs(filename, force=1, type='mayaBinary') # rename back to original name pm.renameFile(scene_name) # create the render command mrc = MayaRenderCommandBuilder(name=job_name, file_full_path=filename, render_engine=render_engine, project=project_path, by_frame=by_frame) # submit renders jobs = [] blocks = [] # # separate_layers: # 1 -> None -> submit one job with a single block with all layers # 2 -> Block -> submit one job with multiple blocks # 3 -> Job -> submit multiple jobs with a single block per layer # if separate_layers in [1, 2]: job = af.Job(job_name) jobs.append(job) if separate_layers in [2, 3]: # render each layer separately rlm = pm.PyNode('renderLayerManager') layers = [ layer for layer in rlm.connections(type=pm.nt.RenderLayer) if layer.renderable.get() ] for layer in layers: mrc_layer = copy.copy(mrc) layer_name = layer.name() mrc_layer.name = layer_name mrc_layer.render_layer = layer_name # create a new block for this layer block = af.Block( layer_name, renderer_to_block_type.get(render_engine, 'maya')) # Fix the output path for this layer # by replacing the "masterLayer" with the layer name # without rs_ at the beginning layer_outputs = outputs if layer_name != 'defaultRenderLayer': layer_outputs[0] = outputs[0].replace( 'masterLayer', layer_name.replace('rs_', '')) layer_outputs[1] = outputs[1].replace( 'masterLayer', layer_name.replace('rs_', '')) if generate_previews: outputs_split = afcommon.patternFromDigits( afcommon.patternFromStdC( afcommon.patternFromPaths( layer_outputs[0], layer_outputs[1]))).split(';') block.setFiles(outputs_split) block.setNumeric(start_frame, end_frame, frames_per_task, by_frame) command = mrc_layer.build_command() block.setErrorsAvoidHost(errors_avoid_host) block.setErrorsRetries(errors_retries) block.setErrorsTaskSameHost(errors_task_same_host) block.setErrorsForgiveTime(errors_forgive_time) block.setCommand(command) if separate_layers == 2: blocks.append(block) else: job = af.Job('%s - %s' % (job_name, layer_name)) # add blocks job.blocks = [block] jobs.append(job) else: # create only one block block = af.Block('All Layers', renderer_to_block_type.get(render_engine, 'maya')) if generate_previews: block.setFiles( afcommon.patternFromDigits( afcommon.patternFromStdC( afcommon.patternFromPaths(outputs[0], outputs[1]))).split(';')) block.setNumeric(start_frame, end_frame, frames_per_task, by_frame) command = mrc.build_command() block.setCommand(command) blocks.append(block) for job in jobs: job.setAnnotation(annotation) job.setFolder('input', os.path.dirname(filename)) job.setFolder('output', os.path.dirname(outputs[0])) job.setDependMaskGlobal(depend_mask_global) job.setHostsMask(hosts_mask) job.setHostsMaskExclude(hosts_exclude) if life_time > 0: job.setTimeLife(life_time * 3600) else: job.setTimeLife(240 * 3600) job.setCmdPost('deletefiles -s "%s"' % os.path.abspath(filename)) if pause: job.offline() # add blocks if separate_layers in [1, 2]: job.blocks.extend(blocks) for i in range(submit_multiple_times): orig_job_name = job.data['name'] job.setName('%s - %03i' % (orig_job_name, i + 1)) status, data = job.send() # restore job name job.setName(orig_job_name) if not status: pm.PopupError('Something went wrong!') # restore log level if render_engine == 'arnold': aro = pm.PyNode('defaultArnoldRenderOptions') aro.setAttr('log_verbosity', stored_log_level) # disable set output to console aro.setAttr("log_to_console", 0) elif render_engine == 'redshift': redshift = pm.PyNode('redshiftOptions') redshift.logLevel.set(stored_log_level) # disable abort on license fail redshift.abortOnLicenseFail.set(0)
def afanasySubmitJob_main(self, **connections): try: HostMask = str(connections["HostMask"]) except: HostMask = "" try: HostExcludeMask = str(connections["HostExcludeMask"]) except: HostExcludeMask = "" try: DODCommand = str(connections["DODCommand"]) except: DODCommand = "" try: autoDOD = int(connections["autoDOD"]) except: autoDOD = 0 try: EmailCommand = str(connections["EmailCommand"]) except: EmailCommand = "" try: MaxHosts = int(connections["MaxHosts"]) except: MaxHosts = 8 try: Comment = str(connections["Comment"]) except: Comment = "" try: PostCommand = str(connections["PostCommand"]) except: PostCommand = "" try: Command = str(connections["Command"]) except: Command = "" try: workDir = str(connections["workDir"]) except: workDir = "" try: Name = str(connections["Name"]) except: Name = "" try: Type = str(connections["Type"]) except: Type = "" try: Priority = int(connections["Priority"]) except: Priority = 99 try: startFrame = int(connections["startFrame"]) except: startFrame = 1 try: endFrame = int(connections["endFrame"]) except: endFrame = 10 try: blockSize = int(connections["blockSize"]) except: blockSize = 1 try: DependOn = str(connections["DependOn"]) except: DependOn = "" try: BlockName = str(connections["BlockName"]) except: BlockName = "" try: Capacity = int(connections["Capacity"]) except: Capacity = 1000 try: memory = int(connections["memory"]) except: memory = 4096 try: maxMult = int(connections["maxMult"]) except: maxMult = 4 try: if Type == "": Type = "generic" job = af.Job(Name) if Comment != "": job.setDescription(Comment) else: job.setDescription('Afanasy Job.') job.setMaxHosts(MaxHosts) if HostMask != "": job.setHostsMask(HostMask) if HostExcludeMask != "": job.setHostsMaskExclude(HostExcludeMask) block1 = job.addBlock(Name + " " + BlockName, Type) block1.setCommand(Command) if DependOn != "": block1.setTasksDependMask(DependOn) block1.setWorkingDirectory(workDir) if PostCommand != "": block1.setCmdPost(PostCommand) block1.setNeedMemory(memory) block1.setVariableCapacity(1, maxMult) block1.setCapacity(Capacity) block1.setNumeric(startFrame, endFrame, blockSize) if autoDOD != 0: block_dod = job.addBlock(Name + " " + "autoDOD", "nuke") block_dod.setCommand(DODCommand) block_dod.setDependMask(Name + " " + BlockName) block_dod.setWorkingDirectory(workDir) block_dod.setNeedMemory(memory) block_dod.setCapacity(500) block_dod.setNumeric(startFrame, endFrame, 10) if EmailCommand != "": block_email = job.addBlock(Name + " " + "Email", "generic") block_email.setCommand(EmailCommand) if autoDOD != 0: block_email.setDependMask(Name + " " + "autoDOD") else: block_email.setDependMask(Name + " " + BlockName) block_email.setWorkingDirectory(workDir) block_email.setCapacity(10) block_email.setNumeric(1, 1, 1) job.send() return Name except: return None pass
def launch(self, *args, **kwargs): """launch renderer command """ # do nothing if there is no window (called externally) if not self.window: return # warn the user about the ignore settings try: dAO = pm.PyNode('defaultArnoldRenderOptions') ignore_attrs = [ 'ignoreSubdivision', 'ignoreDisplacement', 'ignoreBump', 'ignoreMotionBlur' ] attr_values = [(attr, dAO.getAttr(attr)) for attr in ignore_attrs if dAO.getAttr(attr) is True] if any(attr_values): msg_text = '<br>'.join( map(lambda x: '%s: %s' % (x[0], x[1]), attr_values)) response = pm.confirmDialog( title='Ignore These Settings?', message= 'You have ignored:<br><br>%s<br><br><b>Is that ok?</b>' % msg_text, button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return except pm.MayaNodeError: # no Arnold pass # check if rendering with persp camera try: wrong_camera_names = [ 'perspShape', 'topShape', 'sideShape', 'fontShape', 'persp1Shape', 'perspShape1', ] renderable_cameras = [ node for node in pm.ls(type='camera') if node.getAttr('renderable') ] if any( map(lambda x: x.name() in wrong_camera_names, renderable_cameras)): response = pm.confirmDialog( title='Rendering with Persp?', message= 'You are rendering with <b>Persp Camera<b><br><br>Is that ok?</b>', button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return if len(renderable_cameras) > 1: response = pm.confirmDialog( title='Rendering more than one Camera?', message= 'You are rendering <b>more than one camera<b><br><br>Is that ok?</b>', button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return elif len(renderable_cameras) == 0: pm.confirmDialog( title='No <b>Renderable</b> camera!!!', message='There is no <b>renderable camera<b>!!!', button=['Ok'], defaultButton='Ok', cancelButton='Ok', dismissString='Ok') return except pm.MayaNodeError: # no default render globals node pass # get values start_frame = pm.intField('cgru_afanasy__start_frame', q=1, v=1) end_frame = pm.intField('cgru_afanasy__end_frame', q=1, v=1) frames_per_task = \ pm.intField('cgru_afanasy__frames_per_task', q=1, v=1) by_frame = pm.intField('cgru_afanasy__by_frame', q=1, v=1) hosts_mask = pm.textField('cgru_afanasy__hosts_mask', q=1, text=True) hosts_exclude = pm.textField('cgru_afanasy__hosts_exclude', q=1, text=True) separate_layers = \ pm.checkBox('cgru_afanasy__separate_layers', q=1, v=1) pause = pm.checkBox('cgru_afanasy__paused', q=1, v=1) life_time = pm.intField('cgru_afanasy__life_time', q=1, v=1) # check values if start_frame > end_frame: temp = end_frame end_frame = start_frame start_frame = temp frames_per_task = max(1, frames_per_task) by_frame = max(1, by_frame) # store without quota sign hosts_mask = hosts_mask.replace('"', '') hosts_exclude = hosts_exclude.replace('"', '') # store field values pm.optionVar['cgru_afanasy__start_frame_ov'] = start_frame pm.optionVar['cgru_afanasy__end_frame_ov'] = end_frame pm.optionVar['cgru_afanasy__frames_per_task_ov'] = frames_per_task pm.optionVar['cgru_afanasy__by_frame_ov'] = by_frame pm.optionVar['cgru_afanasy__hosts_mask_ov'] = hosts_mask pm.optionVar['cgru_afanasy__hosts_exclude_ov'] = hosts_exclude pm.optionVar['cgru_afanasy__separate_layers_ov'] = separate_layers pm.optionVar['cgru_afanasy__life_time_ov'] = life_time # get paths scene_name = pm.sceneName() datetime = '%s%s' % (time.strftime('%y%m%d-%H%M%S-'), str(time.time() - int(time.time()))[2:5]) filename = '%s.%s.mb' % (scene_name, datetime) project_path = pm.workspace(q=1, rootDirectory=1) # get output paths, set the RenderPass token to Beauty, # this will at least guarantee to get something outputs = \ pm.renderSettings( fullPath=1, firstImageName=1, lastImageName=1, leaveUnmatchedTokens=1, customTokenString="RenderPass=Beauty" ) job_name = os.path.basename(scene_name) logger.debug('%ss %se %sr' % (start_frame, end_frame, by_frame)) logger.debug('scene = %s' % scene_name) logger.debug('file = %s' % filename) logger.debug('job_name = %s' % job_name) logger.debug('project_path = %s' % project_path) logger.debug('outputs = %s' % outputs) if pm.checkBox('cgru_afanasy__close', q=1, v=1): pm.deleteUI(self.window) drg = pm.PyNode('defaultRenderGlobals') render_engine = drg.getAttr('currentRenderer') job = af.Job(job_name) stored_log_level = None if render_engine == 'arnold': # set the verbosity level to warning+info aro = pm.PyNode('defaultArnoldRenderOptions') stored_log_level = aro.getAttr('log_verbosity') aro.setAttr('log_verbosity', 1) # set output to console aro.setAttr("log_to_console", 1) elif render_engine == 'redshift': # set the verbosity level to detailed+info redshift = pm.PyNode('redshiftOptions') stored_log_level = redshift.logLevel.get() redshift.logLevel.set(2) # save file pm.saveAs(filename, force=1, type='mayaBinary') # rename back to original name pm.renameFile(scene_name) # create the render command mrc = MayaRenderCommandBuilder(name=job_name, file_full_path=filename, render_engine=render_engine, project=project_path, by_frame=by_frame) # submit renders blocks = [] if separate_layers: # render each layer separately rlm = pm.PyNode('renderLayerManager') layers = [ layer for layer in rlm.connections() if layer.renderable.get() ] for layer in layers: mrc_layer = copy.copy(mrc) layer_name = layer.name() mrc_layer.name = layer_name mrc_layer.render_layer = layer_name # create a new block for this layer block = af.Block( layer_name, renderer_to_block_type.get(render_engine, 'maya')) block.setFiles( afcommon.patternFromDigits( afcommon.patternFromStdC( afcommon.patternFromPaths(outputs[0], outputs[1]))).split(';')) block.setNumeric(start_frame, end_frame, frames_per_task, by_frame) block.setCommand(mrc_layer.build_command()) blocks.append(block) else: # create only one block block = af.Block('All Layers', renderer_to_block_type.get(render_engine, 'maya')) block.setFiles( afcommon.patternFromDigits( afcommon.patternFromStdC( afcommon.patternFromPaths(outputs[0], outputs[1]))).split(';')) block.setNumeric(start_frame, end_frame, frames_per_task, by_frame) block.setCommand(mrc.build_command()) blocks.append(block) job.setFolder('input', os.path.dirname(filename)) job.setFolder('output', os.path.dirname(outputs[0])) job.setHostsMask(hosts_mask) job.setHostsMaskExclude(hosts_exclude) if life_time > 0: job.setTimeLife(life_time * 3600) job.setCmdPost('deletefiles "%s"' % os.path.abspath(filename)) if pause: job.offline() # add blocks job.blocks.extend(blocks) status, data = job.send() if not status: pm.PopupError('Something went wrong!') print('data: %s' % data) # restore log level if render_engine == 'arnold': aro = pm.PyNode('defaultArnoldRenderOptions') aro.setAttr('log_verbosity', stored_log_level) # disable set output to console aro.setAttr("log_to_console", 0) elif render_engine == 'redshift': redshift = pm.PyNode('redshiftOptions') redshift.logLevel.set(stored_log_level)
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import af job = af.Job('Maya Example') block = af.Block('render', 'maya') block.setCommand( 'maya -batch -file %s/scene.mb -command "afanasyBatch(@#@,@#@,1,1)"' % os.getcwd()) block.setNumeric(1, 5, 2) job.blocks.append(block) print('') job.output(True) print('') job.send()
Out.append({'shot': Out_Shot}) # Skip on test or an existing shot: if Options.test or exists: continue # Copy an empty template for a shot: try: shutil.copytree(Options.template, shot_dest) except: errExit('Can`t create "%s"' % shot_dest) if Options.afanasy: job = af.Job('PUT ' + Options.dest) job.setUserName(Options.afuser) job.setMaxRunningTasks(Options.afmax) job.setMaxRunTasksPerHost(1) block = af.Block('put') block.setCapacity(Options.afcap) job.blocks.append(block) Put = os.environ['CGRU_LOCATION'] + '/utilities/put.py' Put = 'python "%s"' % os.path.normpath(Put) for shot in Out: if 'shot' in shot: shot = shot['shot']
import af scene = 'scene.hip' rop = '/out/ifd' ifd = 'render/scene.@####@.ifd' ifdd = 'render/scene.%04d.ifd' img = 'render/img.@####@.exr' imgd = 'render/img.%04d.exr' f_start = 1 f_finish = 10 divx = 3 divy = 2 tiles = divx * divy job = af.Job('Houdini Test: Tile Render') b_genifd = af.Block('generate ifd', 'hbatch') b_genifd.setCommand('hrender_af -s @#@ -e @#@ %s %s' % (scene, rop)) b_genifd.setNumeric(f_start, f_finish) b_render = af.Block('render tiles', 'mantra') b_render.setCommand('mantrarender tc %(divx)d %(divy)d @#@' % vars()) b_render.setTasksDependMask('generate ifd') b_render.setFramesPerTask(-tiles) for f in range(f_start, f_finish + 1): cmd = ' -R -f ' + ifdd % f for t in range(0, tiles): task = af.Task('%d tile %d' % (f, t)) task.setCommand(str(t) + cmd) task.setFiles((imgd % f) + ('.tile_%d.exr' % t))
def SubmitButton_OnClicked(): opSet = Application.ActiveSceneRoot.Properties('afSubmitProperties') if (opSet == None): Application.LogMessage('Error: Can\'t find options.') PPG.Close() # Save scene: #Application.SaveScene() scene = Application.ActiveProject.ActiveScene scenefile = scene.Filename.Value if not os.path.isfile(scenefile): Application.LogMessage('Error: you need to save first.', 2) return range_frompass = opSet.Parameters('afRange_frompass').Value range_forcepass = opSet.Parameters('afRange_forcepass').Value frame_start = opSet.Parameters('afFrame_start').Value frame_end = opSet.Parameters('afFrame_end').Value frame_by = opSet.Parameters('afFrame_by').Value frame_fpt = opSet.Parameters('afFrame_fpt').Value passesOption = opSet.Parameters('afRenderPass').Value jobname = opSet.Parameters('afJobName').Value priority = opSet.Parameters('afPriority').Value capacity = opSet.Parameters('afCapacity').Value simulate = opSet.Parameters('afSimulate').Value paused = opSet.Parameters('afStartPaused').Value maxhosts = opSet.Parameters('afMaxHosts').Value maxruntime = opSet.Parameters('afMaxRunTime').Value hostsmask = opSet.Parameters('afHostsMask').Value hostsmaskexclude = opSet.Parameters('afHostsMaskExclude').Value dependmask = opSet.Parameters('afDependMask').Value dependmaskglobal = opSet.Parameters('afDependMaskGlobal').Value varirender = opSet.Parameters('afVariRender').Value varirender_attr = opSet.Parameters('afVariRenderAttr').Value varirender_start = opSet.Parameters('afVariRenderStart').Value varirender_step = opSet.Parameters('afVariRenderStep').Value varirender_count = opSet.Parameters('afVariRenderCount').Value ArnoldWatermarked = opSet.Parameters('afArnoldWater').Value TempScenePath = opSet.Parameters('afTempScenePath').Value ArnoldAutoThread = opSet.Parameters('afArnoldAutoThread').Value UseTemp = opSet.Parameters('afUseTemp').Value SkipFrame = opSet.Parameters('afSkipFrame').Value ArnoldThreadsBool = opSet.Parameters('afArnoldThreadsBool').Value ArnoldThreadsCount = opSet.Parameters('afArnoldThreadsCount').Value Bucket = opSet.Parameters('afBucket').Value Stillimage = opSet.Parameters('afStillimage').Value Progressive = opSet.Parameters('afProgressive').Value RayReserved = opSet.Parameters('afRayReserved').Value if frame_end < frame_start: frame_end = frame_start if frame_by < 1: frame_by = 1 if frame_fpt < 1: frame_fpt = 1 passes = [] if passesOption == '_all_': for cpass in scene.Passes: passes.append(cpass.Name) elif passesOption == '_selected_': selection = [] for selected in Application.Selection: selection.append(selected.Name) for cpass in scene.Passes: if cpass.Name in selection: passes.append(cpass.Name) elif passesOption == '_current_': passes.append(scene.ActivePass.Name) else: passes.append(passesOption) padding = Application.GetValue('Passes.RenderOptions.FramePadding') sceneRenderer = Application.GetValue('Passes.RenderOptions.Renderer') Application.logmessage(sceneRenderer) """ # CHECK SETTINGS # Frame Set range_setting = Application.GetValue('Passes.RenderOptions.FrameRangeSource') Application.logmessage('BLAAAAAA: %s' % range_setting) if Application.GetValue('Passes.%s.FrameRangeSource' % cpass) == 1: Application.LogMessage('Sorry _Frame Set_ is not yet supported....aborting..., Pass:%s ' % cpass, 2) return # Timeline if Application.GetValue('Passes.%s.FrameRangeSource' % cpass) == 2: Application.LogMessage('Sorry _Timeline_ is not supported....aborting..., Pass:%s ' % cpass, 2) return """ #just run through to set new settings.... for cpass in passes: # Get framebuffers: for ps in scene.Passes: if ps.Name != cpass: continue curRenderer = Application.GetValue('Passes.%s.Renderer' % cpass) if (curRenderer == ''): curRenderer = sceneRenderer Application.logmessage('Scene Renderer: %s' % curRenderer) # SkipFrames if (SkipFrame == True): Application.SetValue('Passes.%s.FrameSkipRendered' % cpass, True, '') Application.logmessage('skip frames: ON') else: Application.SetValue('Passes.%s.FrameSkipRendered' % cpass, False, '') Application.logmessage('skip frames: OFF') # MENTAL RAY SPECIFIC if (curRenderer == 'Mental Ray'): nudel = Application.SetValue( 'Passes.%s.mentalray.TileSize' % cpass, Bucket, '') # ARNOLD SPECIFIC if (curRenderer == 'Arnold Render'): Application.logmessage( 'found Arnold pass, setting new values.....') # reset license values Log('-- just to be sure, reset of license checkboxes --') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.skip_license_check' % cpass, True, '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.skip_license_check' % cpass, False, '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.abort_on_license_fail' % cpass, False, '') # Arnold watermarked? if ArnoldWatermarked: nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.skip_license_check' % cpass, True, '') Application.logmessage('set to watermarked.....') else: nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.abort_on_license_fail' % cpass, True, '') # force autothread? if ArnoldAutoThread: nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.autodetect_threads' % cpass, 1, '') else: if ArnoldThreadsBool: Application.logmessage('limiting threads......') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.autodetect_threads' % cpass, 0, '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.threads' % cpass, ArnoldThreadsCount, '') # bucketsize Application.logmessage('forcing bucketsize....') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.bucket_size' % cpass, Bucket, '') if Stillimage: Application.logmessage( 'tiled EXRs, no autocrop, top tiles') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.bucket_scanning' % cpass, "top", '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.output_exr_tiled' % cpass, 1, '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.output_exr_autocrop' % cpass, False, '') else: Application.logmessage( 'scanline EXRs, autocrop, spiral tiles') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.bucket_scanning' % cpass, "spiral", '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.output_exr_tiled' % cpass, 0, '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.output_exr_autocrop' % cpass, True, '') # force logging nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.enable_log_console' % cpass, 1, '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.enable_log_file' % cpass, 1, '') nudel = Application.SetValue( 'Passes.%s.Arnold_Render_Options.log_level' % cpass, 2, '') # REDSHIFT SPECIFIC if (curRenderer == 'Redshift'): Application.logmessage( 'found Redshift pass, setting new values.....') if Progressive: nudel = Application.SetValue( 'Passes.%s.Redshift_Options.ProgressiveRenderingEnabled' % cpass, 1, '') else: nudel = Application.SetValue( 'Passes.%s.Redshift_Options.ProgressiveRenderingEnabled' % cpass, 0, '') if Stillimage: Application.logmessage( 'tiled EXRs, no autocrop, top tiles') nudel = Application.SetValue( 'Passes.%s.Redshift_Options.ExrIsTiled' % cpass, 1, '') nudel = Application.SetValue( 'Passes.%s.Redshift_Options.Autocrop' % cpass, False, '') nudel = Application.SetValue( 'Passes.%s.Redshift_Options.BucketOrder' % cpass, 2, '') else: Application.logmessage( 'scanline EXRs, autocrop, spiral tiles') nudel = Application.SetValue( 'Passes.%s.Redshift_Options.ExrIsTiled' % cpass, 0, '') nudel = Application.SetValue( 'Passes.%s.Redshift_Options.Autocrop' % cpass, True, '') nudel = Application.SetValue( 'Passes.%s.Redshift_Options.BucketOrder' % cpass, 1, '') # force logging nudel = Application.SetValue( 'Passes.%s.Redshift_Options.LogLevel' % cpass, 2, '') # bucketsize Application.logmessage('forcing bucketsize....') nudel = Application.SetValue( 'Passes.%s.Redshift_Options.BucketSize' % cpass, Bucket, '') # rayreserved Application.logmessage('forcing rayreservedmemory....') nudel = Application.SetValue( 'Passes.%s.Redshift_Options.NumGPUMBToReserveForRays' % cpass, RayReserved, '') # save the changes Application.SaveScene() framenumbers = [] # run through and output the jobs --------------------------------------------------------- for cpass in passes: images = [] images_array = [] # Get frame range: cp_frame_start = frame_start cp_frame_end = frame_end cp_frame_by = frame_by if not range_forcepass: # Frame Range if Application.GetValue('Passes.%s.FrameRangeSource' % cpass) == 0: cp_frame_start = Application.GetValue('Passes.%s.FrameStart' % cpass) cp_frame_end = Application.GetValue('Passes.%s.FrameEnd' % cpass) cp_frame_by = Application.GetValue('Passes.%s.FrameStep' % cpass) Application.LogMessage('found _Frame Range_ setting...') # Frame Set if Application.GetValue('Passes.%s.FrameRangeSource' % cpass) == 1: Application.LogMessage( 'Sorry _Frame Set_ is not yet supported....aborting..., Pass:%s ' % cpass, 2) return # Timeline if Application.GetValue('Passes.%s.FrameRangeSource' % cpass) == 2: #cp_frame_start = Application.GetValue('PlayControl.In') #cp_frame_end = Application.GetValue('PlayControl.Out') #cp_frame_by = 1 #Application.LogMessage('found _Timeline_ setting...') Application.LogMessage( 'Sorry _Timeline_ is not supported....aborting..., Pass:%s ' % cpass, 2) return # Scene Render Options if Application.GetValue('Passes.%s.FrameRangeSource' % cpass) == 3: cp_frame_start = Application.GetValue( 'Passes.RenderOptions.FrameStart') cp_frame_end = Application.GetValue( 'Passes.RenderOptions.FrameEnd') cp_frame_by = Application.GetValue( 'Passes.RenderOptions.FrameStep') Application.LogMessage( 'found _Scene Render Options_ setting...') # Get framebuffers: for ps in scene.Passes: if ps.Name != cpass: continue curRenderer = Application.GetValue('Passes.%s.Renderer' % cpass) if (curRenderer == ''): curRenderer = sceneRenderer Log(curRenderer) for fb in ps.Framebuffers: if fb.Enabled.Value: format = fb.Format.Value filename = fb.ResolvedFilename.Value pattern = r'\d+.' + format + '$' match = re.search(pattern, filename) if match is not None: part = match.group(0) match = re.search(r'\d+', part) if match is not None: num = match.group(0) pad = 'PADolgaPAD' framenumbers.append(num) newpart = part.replace(num, pad) filename = filename.replace(part, newpart) Log('renderfilename Path: "%s"' % filename) #images.append(filename) images_array.append(filename) else: Application.LogMessage( 'Can`t solve "%s". Exiting.....' % filename, 2) return # Copy scene to temporary file: curjobname = jobname if len(passes) > 1: curjobname += '-%s' % cpass ftime = time.time() if UseTemp: tmpscene = TempScenePath + curjobname + time.strftime( '.%m%d-%H%M%S-') + str(ftime - int(ftime))[2:5] + '.scn' else: tmpscene = scenefile + '.' + curjobname + time.strftime( '.%m%d-%H%M%S-') + str(ftime - int(ftime))[2:5] + '.scn' try: # shutil.copyfile( scenefile, tmpscene) if sys.platform == 'win32': os.popen('cmd /C copy "%s" "%s" /Y' % (scenefile, tmpscene), 'r') Application.LogMessage( 'Windows platform detected.. using copy "%s" "%s" /Y' % (scenefile, tmpscene)) else: shutil.copy(scenefile, tmpscene) except: Application.LogMessage('Unable to copy temporary scene:', 2) Application.LogMessage(tmpscene) Application.LogMessage(str(sys.exc_info()[1])) return if not os.path.isfile(tmpscene): Application.LogMessage('Error: Can\'t save temporary scene.', 2) return # Construct job: # ----------------------------------------------------------------------------- if (curRenderer == 'Arnold Render'): if ArnoldWatermarked: Application.LogMessage( 'Sending Arnold pass "%s" watermarked, range: %d-%d,%d' % (cpass, cp_frame_start, cp_frame_end, cp_frame_by)) else: Application.LogMessage( 'Sending Arnold pass "%s" licensed, range: %d-%d,%d' % (cpass, cp_frame_start, cp_frame_end, cp_frame_by)) if (curRenderer == 'mental ray'): Application.LogMessage( 'Sending MentalRay pass "%s", range: %d-%d,%d' % (cpass, cp_frame_start, cp_frame_end, cp_frame_by)) if (curRenderer == 'Redshift'): Application.LogMessage( 'Sending Redshift pass "%s", range: %d-%d,%d' % (cpass, cp_frame_start, cp_frame_end, cp_frame_by)) #xsibatch -script "%XSI_CGRU_PATH%\afrender.py" -lang Python -main afRenderCurPass -args #-scenePath "%CD%\project\Scenes\scene.scn" -startFrame 1 -endFrame 2 -step 1 -simulate 0 -setAttr torus.polymsh.geom.enduangle -setValue 120 blocknames = [] blockcmds = [] blockimages = [] #blockframenumbers = [] images_str = '' cmd = os.environ['XSI_CGRU_PATH'] cmd = os.path.join(cmd, 'afrender.py') cmd = 'xsibatch -script %s' % cmd cmd += ' -lang Python -main afRender -args' cmd += ' -scene "%s"' % tmpscene cmd += ' -start @#@ -end @#@ -step ' + str(cp_frame_by) cmd += ' -simulate' if simulate: cmd += ' 1' else: cmd += ' 0' cmd += ' -renderPass ' + cpass if varirender: cmd += ' -attr ' + varirender_attr + ' -value ' value = varirender_start for i in range(0, varirender_count): blockcmds.append(cmd + str(value)) blocknames.append('var_value [%d]' % value) images_str = '' for img in images: img_dir = os.path.dirname(img) img_name = os.path.basename(img) img_dir = os.path.join(img_dir, str(value)) img = os.path.join(img_dir, img_name) if images_str != '': images_str += ';' images_str += img images_array.append(img) blockimages.append(images_str) value += varirender_step else: # label the Blocks if (curRenderer == 'Arnold Render'): if ArnoldWatermarked: blockname = 'Arnold watermarked' else: blockname = 'Arnold LICENSED' if (curRenderer == 'mental ray'): blockname = 'Mental Ray' if (curRenderer == 'Redshift'): blockname = 'Redshift' #Fallback, every other renderer if (curRenderer != 'Redshift') and ( curRenderer != 'mental ray') and (curRenderer != 'Arnold Render'): blockname = 'Mental Ray' Application.LogMessage( 'Warning: selected Renderer not directly supported, Afanasy will treat it like a MentalRay job, but will try to render it with the selected renderer' ) #Log(images_array) job = af.Job(curjobname + ' -- ' + cpass) job.setCmdPost(str('deletefiles "%s"' % os.path.abspath(tmpscene))) if priority != -1: job.setPriority(priority) if maxhosts != -1: job.setMaxRunningTasks(maxhosts) if hostsmask != None and hostsmask != '': job.setHostsMask(hostsmask) if hostsmaskexclude != None and hostsmaskexclude != '': job.setHostsMaskExclude(hostsmaskexclude) if dependmask != None and dependmask != '': job.setDependMask(dependmask) if dependmaskglobal != None and dependmaskglobal != '': job.setDependMaskGlobal(dependmaskglobal) if paused: job.offLine() if len(blocknames) == 0: Log("inside the blocknames is zero ifblock") blocknames.append(blockname) blockcmds.append(cmd) #blockimages.append( images_str) blockimages.append(images_array) # SET IMAGE PREVIEWS -------------------------------------------------- i = 0 fb_index = 0 for blockname in blocknames: # counter for the image array currentFrame = cp_frame_start for framebufferPaths in blockimages[i]: Log('FOR BLOCK: "%d"' % fb_index) Log('array size: "%d"' % len(images_array)) #Every Frame gets the correct padded first frame of the sequence for viewers like djv if padding > 1: p = 0 Log('original framebufferPaths: "%s"' % framebufferPaths) Log('padding: "%d"' % padding) if len(str(currentFrame)) < padding: pad = str(currentFrame) for p in range(0, (padding - len(str(currentFrame)))): pad = '0' + pad else: pad = str(currentFrame) framebufferPaths = framebufferPaths.replace('PADolgaPAD', pad) blockimages[i][fb_index] = str(framebufferPaths) Log('blockimage after : "%s"' % blockimages[i][fb_index]) Log('framebufferPaths after : "%s"' % framebufferPaths) Log('pad after replace: "%s"' % pad) fb_index = fb_index + 1 #m = m + cp_frame_by i = i + 1 # Set SERVICES -------------------------------------------------- i = 0 for blockname in blocknames: Log(' blockname: "%s"' % blockname) if (curRenderer == 'Arnold Render'): if ArnoldWatermarked: block = af.Block(blockname, 'xsi_arnold_watermarked') else: block = af.Block(blockname, 'xsi_arnold') if (curRenderer == 'mental ray'): block = af.Block(blockname, 'xsi') if (curRenderer == 'Redshift'): block = af.Block(blockname, 'xsi_redshift') block.setCommand(str(blockcmds[i])) block.setFiles(blockimages[i]) block.setNumeric(cp_frame_start, cp_frame_end, frame_fpt, cp_frame_by) if capacity != -1: block.setCapacity(capacity) if maxruntime != 0: block.setTasksMaxRunTime(int(maxruntime * 3600)) job.blocks.append(block) i += 1 # Send job: if not job.send()[0]: Application.LogMessage('Error: Can\'t send job to server.') os.remove(tmpscene)
# -*- coding: utf-8 -*- import af job = af.Job('example job') block = af.Block('block of tasks') block.setWorkingDirectory('/home') task = af.Task('simple task') task.setCommand('ls -l') block.tasks.append(task) job.blocks.append(block) job.send()
def SubmitButton_OnClicked(): opSet = Application.ActiveSceneRoot.Properties('afSubmitProperties') if opSet is None: Application.LogMessage('Error: Can\'t find options.') PPG.Close() # Save scene: Application.SaveScene() scene = Application.ActiveProject.ActiveScene scenefile = scene.Filename.Value if not os.path.isfile(scenefile): Application.LogMessage('Error: Can\'t save scene.') return range_frompass = opSet.Parameters('afRange_frompass').Value range_forcepass = opSet.Parameters('afRange_forcepass').Value frame_start = opSet.Parameters('afFrame_start').Value frame_end = opSet.Parameters('afFrame_end').Value frame_by = opSet.Parameters('afFrame_by').Value frame_fpt = opSet.Parameters('afFrame_fpt').Value passesOption = opSet.Parameters('afRenderPass').Value jobname = opSet.Parameters('afJobName').Value priority = opSet.Parameters('afPriority').Value capacity = opSet.Parameters('afCapacity').Value simulate = opSet.Parameters('afSimulate').Value paused = opSet.Parameters('afStartPaused').Value maxhosts = opSet.Parameters('afMaxHosts').Value maxruntime = opSet.Parameters('afMaxRunTime').Value hostsmask = opSet.Parameters('afHostsMask').Value hostsmaskexclude = opSet.Parameters('afHostsMaskExclude').Value dependmask = opSet.Parameters('afDependMask').Value dependmaskglobal = opSet.Parameters('afDependMaskGlobal').Value varirender = opSet.Parameters('afVariRender').Value varirender_attr = opSet.Parameters('afVariRenderAttr').Value varirender_start = opSet.Parameters('afVariRenderStart').Value varirender_step = opSet.Parameters('afVariRenderStep').Value varirender_count = opSet.Parameters('afVariRenderCount').Value if frame_end < frame_start: frame_end = frame_start if frame_by < 1: frame_by = 1 if frame_fpt < 1: frame_fpt = 1 passes = [] if passesOption == '_all_': for cpass in scene.Passes: passes.append(cpass.Name) elif passesOption == '_selected_': selection = [] for selected in Application.Selection: selection.append(selected.Name) for cpass in scene.Passes: if cpass.Name in selection: passes.append(cpass.Name) elif passesOption == '_current_': passes.append(scene.ActivePass.Name) else: passes.append(passesOption) padding = Application.GetValue('Passes.RenderOptions.FramePadding') for cpass in passes: images = [] # Get framebuffers: for ps in scene.Passes: if ps.Name != cpass: continue for fb in ps.Framebuffers: if fb.Enabled.Value: format = fb.Format.Value filename = fb.ResolvedFilename.Value pattern = r'\d+.' + format + '$' match = re.search(pattern, filename) if match is not None: part = match.group(0) match = re.search(r'\d+', part) if match is not None: num = match.group(0) pad = '%' if padding > 1: pad += '0' + str(padding) pad += 'd' newpart = part.replace(num, pad) filename = filename.replace(part, newpart) images.append(filename) else: Application.LogMessage('Can`t solve "%s"' % filename) # Copy scene to temporary file: curjobname = jobname if len(passes) > 1: curjobname += '-%s' % cpass ftime = time.time() tmpscene = '%s.%s%s%s.scn' % (scenefile, curjobname, time.strftime('.%m%d-%H%M%S-'), str(ftime - int(ftime))[2:5]) try: shutil.copyfile(scenefile, tmpscene) except Exception as e: Application.LogMessage('Unable to copy temporary scene:') Application.LogMessage(tmpscene) Application.LogMessage(str(e)) return if not os.path.isfile(tmpscene): Application.LogMessage('Error: Can\'t save temporary scene.') return # Get frame range: cp_frame_start = frame_start cp_frame_end = frame_end cp_frame_by = frame_by if not range_forcepass: if range_frompass: if Application.GetValue('Passes.%s.FrameRangeSource' % cpass) == 0: cp_frame_start = \ Application.GetValue('Passes.%s.FrameStart' % cpass) cp_frame_end = \ Application.GetValue('Passes.%s.FrameEnd' % cpass) cp_frame_by = \ Application.GetValue('Passes.%s.FrameStep' % cpass) # Construct job: Application.LogMessage( 'Sending "%s" pass, range: %d-%d,%d' % (cpass, cp_frame_start, cp_frame_end, cp_frame_by)) #xsibatch -script "%XSI_CGRU_PATH%\afrender.py" -lang Python -main afRenderCurPass -args #-scenePath "%CD%\project\Scenes\scene.scn" -startFrame 1 -endFrame 2 -step 1 -simulate 0 -setAttr torus.polymsh.geom.enduangle -setValue 120 blocknames = [] blockcmds = [] blockimages = [] cmd = os.environ['XSI_CGRU_PATH'] cmd = os.path.join(cmd, 'afrender.py') cmd = 'xsibatch -script %s' % cmd cmd += ' -lang Python -main afRender -args' cmd += ' -scene "%s"' % tmpscene cmd += ' -start @#@ -end @#@ -step ' + str(cp_frame_by) cmd += ' -simulate' if simulate: cmd += ' 1' else: cmd += ' 0' cmd += ' -renderPass ' + cpass if varirender: cmd += ' -attr ' + varirender_attr + ' -value ' value = varirender_start for i in range(0, varirender_count): blockcmds.append(cmd + str(value)) blocknames.append('variant[%d]' % value) images_str = '' for img in images: img_dir = os.path.dirname(img) img_name = os.path.basename(img) img_dir = os.path.join(img_dir, str(value)) img = os.path.join(img_dir, img_name) if images_str != '': images_str += ';' images_str += img blockimages.append(images_str) value += varirender_step else: blockname = 'xsi' images_str = '' for img in images: if images_str != '': images_str += ';' images_str += img job = af.Job(curjobname) job.setCmdPost(str('deletefiles "%s"' % os.path.abspath(tmpscene))) if priority != -1: job.setPriority(priority) if maxhosts != -1: job.setMaxHosts(maxhosts) if hostsmask is not None and hostsmask != '': job.setHostsMask(hostsmask) if hostsmaskexclude is not None and hostsmaskexclude != '': job.setHostsMaskExclude(hostsmaskexclude) if dependmask is not None and dependmask != '': job.setDependMask(dependmask) if dependmaskglobal is not None and dependmaskglobal != '': job.setDependMaskGlobal(dependmaskglobal) if paused: job.offLine() if len(blocknames) == 0: blocknames.append(blockname) blockcmds.append(cmd) blockimages.append(images_str) i = 0 for blockname in blocknames: block = af.Block(blockname, 'xsi') block.setCommand(str(blockcmds[i])) block.setFiles([str(blockimages[i])]) block.setNumeric(cp_frame_start, cp_frame_end, frame_fpt, cp_frame_by) if capacity != -1: block.setCapacity(capacity) if maxruntime != 0: block.setTasksMaxRunTime(int(maxruntime * 3600)) job.blocks.append(block) i += 1 # Send job: if not job.send()[0]: Application.LogMessage('Error: Can\'t send job to server.') os.remove(tmpscene)
# Load the data that PHP sent us try: data = json.loads(sys.argv[1]) except: print "ERROR" sys.exit(1) # Generate some data to send to PHP result = {'status': 'Yes!'} #folder = "/var/www/owncloud/" + data["folder"] # Send it to stdout (to PHP) # Create a job job = af.Job(data["scene"]) job.setUserName(data["user"]) # Set maximum tasks that can be executed simultaneously job.setMaxRunningTasks(5) # Create a block with provided name and service type block = af.Block('blenderRender', 'blender') # Set block tasks working directory block.setWorkingDirectory('/var/www/html/owncloud/data/' + data["user"] + '/files' + data["directory"] + '/') # Set block tasks command block.setCommand('blender -b \"/var/www/html/owncloud/data/' + data["user"] + '/files/' + data["file_path"] +
Renders = Args[:-1] if len(Renders) == 0: Parser.error("No renders found.") Command = Args[-1] JobName = Options.jobname if JobName is None: JobName = Command.split(' ')[0] if Options.verbose: print('JobName: %s' % JobName) print('Renders: %s' % (','.join(Renders))) print('Command: %s' % Command) job = af.Job(JobName) job.setMaintenance() block = af.Block('Maintenance', Options.service) block.setParser(Options.parser) block.setCommand(Command) for render in Renders: task = af.Task(render) block.tasks.append(task) job.blocks.append(block) if Options.maxruntasks: job.setMaxRunningTasks(Options.maxruntasks)
numtasks = options.numtasks increment = options.increment verbose = options.verbose xcopy = options.xcopy frames = options.frames.split(',') if options.frames != '': numblocks = len(frames) if xcopy < 1: xcopy = 1 if jobname == '': jobname = '_empty_' job = af.Job(jobname) job.setDescription('afanasy test - empty tasks') job.setFolder('pwd', os.getcwd()) job.setFolder('test', '/this/is/a/test') if options.folder and len(options.folder): job.setFolder('folder', options.folder) blocknames = [] if options.labels != '': blocknames = options.labels.split(':') else: blocknames.append('block') blocktypes = [] if options.services != '': blocktypes = options.services.split(':')
# -*- coding: utf-8 -*- import os import af job = af.Job('3d MAX Test') block = af.Block('Frames', 'max') block.setCommand( '3dsmaxcmd "%s\\scene.max" -start:@#@ -end:@#@ -v:5 -showRFW:0 ' '-o:"render/from_script.0000.jpg"' % os.getcwd()) block.setNumeric(1, 10, 1) job.blocks.append(block) job.send()
# Cleanup mode: if Options.cleanup: print('Clean up completed.') sys.exit(0) print('Jobs Pack = %d' % Options.jobspack) print('Tasks Number = %d' % Options.tasksnum) if Options.nopost: print('No post command') # Create temporary folder: os.mkdir(TmpDir) # Create a job template: job = af.Job() for b in range(0, Options.blocksnum): block = af.Block() job.blocks.append(block) if Options.capacity: block.setCapacity(Options.capacity) if not Options.notasks: block.setNumeric(1, Options.tasksnum) cmd = 'task.py' cmd = os.path.join(os.getcwd(), cmd) cmd = 'python "%s"' % cmd cmd += ' -s @#@ -e @#@ -t 1 @####@ @#####@ @#####@ @#####@' block.setCommand(cmd, False) block.setFiles(['file_a.@#@.@####@', 'file_b.@#@.@####@']) job.setNeedOS('')
cmd += ' "%s"' % images.replace('@#', '#').replace('#@', '#') if not os.path.isabs(mname): mname = os.path.join( os.path.dirname( os.path.dirname(images) ), mname ) cmd += ' "%s"' % mname task.setCommand(cmd) # Create a Job: job = af.Job(name) job.setPriority(priority) job.setFolder('input', os.path.dirname(scene)) if images != '': job.setFolder('output', os.path.dirname(images)) if maxruntasks != -1: job.setMaxRunningTasks(maxruntasks) if maxtasksperhost != -1: job.setMaxRunTasksPerHost(maxtasksperhost) if hostsmask != '':
def genJob(self, blockparams): if VERBOSE: print('Generating job on "%s"' % self.job_name) if len(blockparams) < 1: print('Can`t generate job without any blocks on "%s"' % self.afnode.name()) return job = af.Job() job.setName(self.job_name) if self.afnode.parm('wait_time').eval(): hours = int(self.afnode.parm('wait_time_hours').eval()) minutes = int(self.afnode.parm('wait_time_minutes').eval()) hours = max(0, min(hours, 23)) minutes = max(0, min(minutes, 59)) now_sec = int(time.time()) now_day = int((now_sec - time.timezone) / (24 * 3600)) * (24 * 3600) + time.timezone sec = now_sec % 60 wait_sec = now_day + (hours * 3600) + (minutes * 60) + sec if wait_sec <= now_sec: if hou.ui.displayMessage( ('Now is greater than %d:%d\nOffset by 1 day?' % (hours, minutes)), buttons=('Offset', 'Abort'), default_choice=0, close_choice=1, title=('Wait Time')) == 0: wait_sec += (24 * 3600) else: return job.setWaitTime(wait_sec) renderhip = hou.hipFile.name() if self.afnode.parm('render_temp_hip').eval(): # Calculate temporary hip name: ftime = time.time() renderhip = '%s/%s%s%s.hip' % ( os.path.dirname(renderhip), afcommon.filterFileName(self.job_name), time.strftime('.%m%d-%H%M%S-'), str(ftime - int(ftime))[2:5]) # use mwrite, because hou.hipFile.save(renderhip) # changes current scene file name to renderhip, # at least in version 9.1.115 hou.hscript('mwrite -n "%s"' % renderhip) if self.start_paused: job.offLine() if self.preview_approval: job.setPPApproval() if self.platform != '': if self.platform == 'any': job.setNeedOS('') else: job.setNeedOS(self.platform) if self.job_branch != '': job.setBranch(self.job_branch) if self.priority != -1: job.setPriority(self.priority) if self.depend_mask != '': job.setDependMask(self.depend_mask) if self.depend_mask_global != '': job.setDependMaskGlobal(self.depend_mask_global) if self.max_runtasks > -1: job.setMaxRunningTasks(self.max_runtasks) if self.maxperhost > -1: job.setMaxRunTasksPerHost(self.maxperhost) if self.hosts_mask != '': job.setHostsMask(self.hosts_mask) if self.hosts_mask_exclude != '': job.setHostsMaskExclude(self.hosts_mask_exclude) job.setFolder('input', os.path.dirname(hou.hipFile.name())) images = None for blockparam in blockparams: job.blocks.append(blockparam.genBlock(renderhip)) # Set ouput folder from the first block with images to preview: if images is None and blockparam.preview != '': images = blockparam.preview job.setFolder('output', os.path.dirname(images)) if self.afnode.parm('render_temp_hip').eval(): job.setCmdPost('deletefiles "%s"' % renderhip) if VERBOSE: job.output(True) job.send()
# -*- coding: utf-8 -*- import af import os scene = os.path.join(os.getcwd(), 'scene.ntp') job = af.Job('Natron job.py') block = af.Block('w_jpg', 'natron') block.setCommand('natron -b -w w_jpg @#@-@#@ ' + scene) block.setNumeric(1, 20, 2) job.blocks.append(block) job.send()
def execute(self, context): sce = context.scene cgru_props = sce.cgru rd = context.scene.render images = None engine_string = sce.render.engine sceneModified = False # if the opriginal scene modified checker # set selected pool (static) CGRU_Submit.selected_pool = cgru_props.pools # Import Afanasy module: import af # Calculate temporary scene path: scenefile = bpy.data.filepath if scenefile.endswith('.blend'): scenefile = scenefile[:-6] renderscenefile = "%s.%s.blend" % (scenefile, time.strftime('%Y%m%d%H%M%S')) # Make all Local and pack all textures and objects if cgru_props.packLinkedObjects: bpy.ops.object.make_local(type='ALL') sceneModified = True if cgru_props.relativePaths: bpy.ops.file.make_paths_relative() sceneModified = True if cgru_props.packTextures: bpy.ops.file.pack_all() sceneModified = True # Get job name: jobname = cgru_props.jobname # If job name is empty use scene file name: if not jobname: jobname = os.path.basename(scenefile) # Try to cut standart '.blend' extension: if jobname.endswith('.blend'): jobname = jobname[:-6] # Get frames settings: fstart = sce.frame_start fend = sce.frame_end finc = sce.frame_step fpertask = cgru_props.fpertask sequential = cgru_props.sequential # Check frames settings: if fpertask < 1: fpertask = 1 if fend < fstart: fend = fstart # Create a job: job = af.Job(jobname) servicename = 'blender' renderlayer_names = [] layers = bpy.context.scene.render.layers if cgru_props.splitRenderLayers and len(layers) > 1: for layer in layers: if layer.use: renderlayer_names.append(layer.name) else: renderlayer_names.append('') for renderlayer_name in renderlayer_names: block = None images = None # Create block if cgru_props.splitRenderLayers and len(layers) > 1: txt_block = bpy.data.texts.new("layer_%s" % renderlayer_name) txt_block.write(LAYER_TEXT_BLOCK.format(renderlayer_name)) block = af.Block("layer_%s" % renderlayer_name, servicename) else: block = af.Block(engine_string, servicename) # Check current render engine if engine_string == 'BLENDER_RENDER': block.setParser('blender_render') elif engine_string == 'CYCLES': block.setParser('blender_cycles') if cgru_props.filepath != '': pos = cgru_props.filepath.find('#') if pos != -1: if cgru_props.filepath[pos - 1] in '._- ': images = "{0}{1}{2}".format( cgru_props.filepath[:pos - 1], renderlayer_name, cgru_props.filepath[pos - 1:]) else: images = "{0}{1}{2}".format(cgru_props.filepath[:pos], renderlayer_name, cgru_props.filepath[pos:]) else: images = "{0}{1}".format(cgru_props.filepath, renderlayer_name) output_images = re.sub(r'(#+)', r'@\1@', images) if output_images.startswith('//'): output_images = os.path.join( os.path.dirname(renderscenefile), output_images.replace('//', '')) if rd.file_extension not in output_images: block.setFiles([output_images + rd.file_extension]) else: block.setFiles([output_images]) if cgru_props.splitRenderLayers and len(layers) > 1: python_options = ' --python-text "layer_%s"' % renderlayer_name else: python_options = '' cmd = CMD_TEMPLATE.format(blend_scene=renderscenefile, render_engine=engine_string, python_options=python_options, output_options=' -o "%s" ' % images if images else '', frame_inc=finc) block.setCommand(cmd) block.setNumeric(fstart, fend, fpertask, finc) block.setSequential(sequential) block.setHostsMaskExclude(getHostsMaskExclude()) job.blocks.append(block) if cgru_props.make_movie: movie_block = af.Block(cgru_props.mov_name + '-movie', 'movgen') movie_block.setDependMask(job.blocks[-1]) movie_task = af.Task(cgru_props.mov_name) movie_block.tasks.append(movie_task) cmd = os.getenv('CGRU_LOCATION') cmd = os.path.join(cmd, 'utilities', 'moviemaker', 'makemovie.py') cmd = 'python "%s"' % cmd cmd += ' --codec "%s"' % cgru_props.mov_codecs cmd += ' -r "%sx%s"' % (cgru_props.mov_width, cgru_props.mov_height) cmd += ' "%s"' % images.replace('@#', '#').replace('#@', '#') cmd += ' "%s"' % cgru_props.mov_name movie_task.setCommand(cmd) job.blocks.append(movie_block) # Set job running parameters: if cgru_props.maxruntasks > -1: job.setMaxRunningTasks(cgru_props.maxruntasks) if cgru_props.priority > -1: job.setPriority(cgru_props.priority) if cgru_props.dependmask != '': job.setDependMask(cgru_props.dependmask) if cgru_props.dependmaskglobal != '': job.setDependMaskGlobal(cgru_props.dependmaskglobal) if cgru_props.hostsmask != '': job.setHostsMask(cgru_props.hostsmask) if cgru_props.hostsmaskexclude != '': job.setHostsMaskExclude(cgru_props.hostsmaskexclude) if cgru_props.pause: job.offLine() if cgru_props.previewPendingApproval: job.setPPApproval() # Make server to delete temporary file after job deletion: job.setCmdPost('deletefiles "%s"' % os.path.abspath(renderscenefile)) # Print job information: job.output(True) # Save Temporary file bpy.ops.wm.save_as_mainfile(filepath=renderscenefile, copy=True) # Clean up temp text blocks if cgru_props.splitRenderLayers and len(layers) > 1: for text in bpy.data.texts: if "layer_" in text: bpy.data.texts.remove(text) # Send job to server: result = job.send() if not result[0]: msg = ("An error occurred when submitting job to Afanasy." "Check console.") self.report({'ERROR'}, msg) else: msg = "Job id:%s successfully submit to Afanasy." self.report({'INFO'}, msg % result[1]['id']) # if opriginal scene is modified - we need to reload the scene file if sceneModified: bpy.ops.wm.open_mainfile(filepath=scenefile + ".blend") return {'FINISHED'}
f += num_frames file_counter += num_frames print('{"progress":"%d sequences found"},' % len(Shots)) print('{"progress":"%d files found"},' % file_counter) cmd_encode = os.path.join(os.path.dirname(sys.argv[0]), 'makemovie.py') cmd_encode = 'python "%s"' % os.path.normpath(cmd_encode) cmd_encode += ' -f %s' % Options.fps cmd_encode += ' -c %s' % Options.codec cmd_encode += ' "%s"' % os.path.join(OutDir, TmpFiles) cmd_encode += ' "%s"' % movie_name # Afanasy job creation: job = af.Job('CUT ' + CutName) job.setMaxRunningTasks(Options.afmaxtasks) job.setMaxRunTasksPerHost(Options.afperhost) if Options.afuser != '': job.setUserName(Options.afuser) # Delete previous sequence block: delete_name = None if os.path.isdir(OutDir): delete_name = 'delete' block = af.Block(delete_name) block.setCapacity(1) task = af.Task(delete_name + ' ' + os.path.basename(OutDir)) task.setCommand('deletefiles "%s"' % OutDir) block.tasks.append(task) job.blocks.append(block)