def createBlock(i_afparams, i_wparams, i_prj, i_mask, i_prefix): block = af.Block(i_prefix + i_wparams['nodelabel'], 'natron') block.setNumeric(i_afparams['af_frame_first'], i_afparams['af_frame_last'], i_afparams['af_frame_pertast'], i_afparams['af_frame_increment']) if 'af_frame_sequential' in i_afparams: block.setSequential(i_afparams['af_frame_sequential']) if 'af_capacity' in i_afparams and i_afparams['af_capacity'] != -1: block.setCapacity(i_afparams['af_capacity']) if 'af_maxtasks' in i_afparams and i_afparams['af_maxtasks'] != -1: block.setMaxRunningTasks(i_afparams['af_maxtasks']) if 'af_maxtasks_perhost' in i_afparams and i_afparams[ 'af_maxtasks_perhost'] != -1: block.setMaxRunTasksPerHost(i_afparams['af_maxtasks_perhost']) if len(i_mask): if 'af_multi_wholerange' in i_afparams and i_afparams[ 'af_multi_wholerange']: block.setDependMask(i_mask) else: block.setTasksDependMask(i_mask) block.setFiles(i_wparams['files']) cmd = os.getenv('NATRON_AF_RENDER', 'natron -b') cmd += ' -w "%s"' % i_wparams['nodename'] cmd += ' @#@-@#@' cmd += ' "%s"' % i_prj block.setCommand(cmd) return block
def genBlock(self, hipfilename): if VERBOSE: if self.ropnode: print('Generating block for "%s" from "%s"' % (self.ropnode.path(), self.afnode.path())) else: print('Generating command block from "%s"' % (self.afnode.path())) block = af.Block(self.name, self.type) block.setParser(self.parser) block.setCommand(self.cmd % vars(), self.cmd_useprefix) if self.preview != '': block.setFiles([self.preview]) if self.numeric: block.setNumeric(self.frame_first, self.frame_last, self.frame_pertask, self.frame_inc) else: t = 0 for cmd in self.tasks_cmds: task = af.Task(self.tasks_names[t]) task.setCommand(cmd) if len(self.tasks_previews): task.setFiles([self.tasks_previews[t]]) block.tasks.append(task) t += 1 block.setFramesPerTask(self.frame_pertask) block.setSequential(self.frame_sequential) block.setCapacity(self.capacity) if self.capacity_min != -1 or self.capacity_max != -1: block.setVariableCapacity(self.capacity_min, self.capacity_max) block.setTasksMaxRunTime(self.maxruntime) if self.subblock: if self.max_runtasks > -1: block.setMaxRunningTasks(self.max_runtasks) if self.maxperhost > -1: block.setMaxRunTasksPerHost(self.maxperhost) if self.hosts_mask != '': block.setHostsMask(self.hosts_mask) if self.hosts_mask_exclude != '': block.setHostsMaskExclude(self.hosts_mask_exclude) if self.dependmask != '': if self.fullrangedepend: block.setDependMask(self.dependmask) else: block.setTasksDependMask(self.dependmask) if self.subtaskdepend: block.setDependSubTask() return block
def nuke_sendJobs(self, writename, framefirst, framelast, framepertask, seqname): """send jobs from nuke to cgru""" job_name = '{}_{}'.format(writename, self.nukeRootinfos()[0]) job = af.Job(job_name) job.setMaxRunningTasks(15) block = af.Block('Nuke_Render', 'nuke') block.setWorkingDirectory(self.nukeRootinfos()[1]) block.setCommand('nuke -i -X {} -x {} @#@,@#@'.format(writename, self.nukeRootinfos()[2])) block.setFiles([seqname]) block.setNumeric(framefirst, framelast, framepertask) job.blocks.append(block) if self.job_paused.isChecked(): job.offline() job.send()
def submitAsJob(self, graph_file, node_path): """ [virtual] Called when the scheduler should cook the entire TOP Network as a standalone job. by pressing the 'Submit as Job' button on the scheduler node UI. Creates a job which cooks that TOP graph using hython. Returns the status URI for the submitted job - just to open manager Web GUI. graph_file Path to a .hip file containing the TOP Network, relative to $PDG_DIR. node_path Op path to the TOP Network """ self._log("submitAsJob({},{})".format(graph_file, node_path)) # Constuct a command for hython + topcook script cmd = 'hython' # Use PDG licence cmd += ' --pdg' # Specify script that cooks graph cmd += ' "%s/pdgjob/topcook.py"' % os.getenv('HHP') # Set verbosity level cmd += ' --verbosity 2' # Set hip file: cmd += ' --hip "%s"' % hou.hipFile.path() # Set top network to cook cmd += ' --toppath "%s"' % node_path # Constuct a job: job = af.Job(self['gj_name'].evaluateString()) job.setBranch(self['job_branch'].evaluateString()) job.setPriority(self['gj_priority'].evaluateInt()) job.setDependMask(self['gj_depend_mask'].evaluateString()) job.setDependMaskGlobal(self['gj_depend_mask_global'].evaluateString()) job.setHostsMask(self['gj_hosts_mask'].evaluateString()) job.setHostsMaskExclude(self['gj_hosts_mask_exclude'].evaluateString()) if self['gj_start_paused'].evaluateInt(): job.setPaused() # Block block = af.Block('PDG-GRAPH', self['gj_service'].evaluateString()) block.setCapacity(self['gj_capacity'].evaluateInt()) block.addTicket(self['gj_ticket'].evaluateString(), 1) # Task task = af.Task(node_path) task.setCommand(cmd) task.setEnv('AF_USERNAME', cgruconfig.VARS['USERNAME']) # Append task and block and send job block.tasks.append(task) job.blocks.append(block) job.send() return None
def submit_job(job_name, block_name, command): """Submits an Afanasy job :param job_name: :param block_name: :param command: :return: """ import af block = af.Block(block_name, 'maya') block.setCommand(" ".join(command)) block.setNumeric(1, 1, 1, 1) job = af.Job(job_name) job.blocks = [block] status, data = job.send() if not status: RuntimeError('Something went wrong!')
def _constructBlock(self, work_item): service, parser, tickets = self._getWorkItemServiceParserTickets( work_item) block = af.Block(work_item.node.name, service) block.setParser(parser) for name in tickets: block.addTicket(name, tickets[name]) block.setCapacity( self.evaluateIntOverride(work_item.node, self.parmprefix, 'capacity', work_item, -1)) block.setHostsMask( self.evaluateStringOverride(work_item.node, self.parmprefix, 'hosts_mask', work_item, '')) block.setHostsMaskExclude( self.evaluateStringOverride(work_item.node, self.parmprefix, 'hosts_mask_exclude', work_item, '')) block.setMaxRunningTasks( self.evaluateIntOverride(work_item.node, self.parmprefix, 'max_running_tasks', work_item, -1)) block.setMaxRunTasksPerHost( self.evaluateIntOverride(work_item.node, self.parmprefix, 'max_running_tasks_per_host', work_item, -1)) block.setNeedMemory( self.evaluateIntOverride(work_item.node, self.parmprefix, 'need_memory', work_item, -1) * 1024) block.setTaskMinRunTime( self.evaluateIntOverride(work_item.node, self.parmprefix, 'task_min_run_time', work_item, -1)) block.setTaskMaxRunTime( int( self.evaluateFloatOverride(work_item.node, self.parmprefix, 'task_max_run_time', work_item, -1) * 3600.0)) env_dict, removekeys = self.resolveEnvParams(self.parmprefix, work_item, False) for name in env_dict: block.setEnv(name, env_dict[name]) return block
def __init__(self, name='', service=None, parentJob=None, local=False): # RenderBlock.__init__(self, name, service) self.af_block = af.Block(name, service) self.isNumeric = True self.capacity = 1000 self.hostsmask = '' self.parentJob = parentJob self.distributed = False self.sameHostMaster = False self.hosts_min = 0 self.hosts_max = 0 if parentJob is not None: # setup general parameters from parent job self.start = parentJob.start self.stop = parentJob.stop self.step = parentJob.step self.task_size = parentJob.task_size self.af_block.setWorkingDirectory(parentJob.work_dir) if local: self.af_block.setHostsMask(parentJob.af_job.data['host_name'])
#!/usr/bin/env python # coding=utf8 import os import af job = af.Job('Nuke Test') block1 = af.Block( 'preview', 'nuke') block1.setCommand('nuke -X preview -x scene.nk %1,%2') block1.setNumeric( 1, 20, 2) block1.setTasksDependMask( 'final') block1.setFiles('render/preview.%04d.jpg') block2 = af.Block( 'final', 'nuke') block2.setCommand('nuke -X final -x scene.nk %1,%2') block2.setNumeric( 1, 20, 1) block2.setTasksDependMask( 'key|back') block2.setFiles('render/final.%04d.exr') block3 = af.Block( 'key', 'nuke') block3.setCommand('nuke -X key -x scene.nk %1,%2') block3.setNumeric( 1, 20, 3) block3.setFiles('render/key.%04d.exr') block4 = af.Block( 'back', 'nuke') block4.setCommand('nuke -X back -x scene.nk %1,%2') block4.setNumeric( 1, 20, 3) block4.setFiles('render/back.%04d.exr') job.blocks.append( block1)
cmd_encode += ' -f %s' % Options.fps cmd_encode += ' -c %s' % Options.codec cmd_encode += ' "%s"' % os.path.join(OutDir, TmpFiles) cmd_encode += ' "%s"' % movie_name # Afanasy job creation: job = af.Job('CUT ' + CutName) job.setMaxRunningTasks(Options.afmaxtasks) job.setMaxRunTasksPerHost(Options.afperhost) if Options.afuser != '': job.setUserName(Options.afuser) # Delete previous sequence block: delete_name = None if os.path.isdir(OutDir): delete_name = 'delete' block = af.Block(delete_name) block.setCapacity(1) task = af.Task(delete_name + ' ' + os.path.basename(OutDir)) task.setCommand('deletefiles "%s"' % OutDir) block.tasks.append(task) job.blocks.append(block) # Convert block: block = af.Block('convert', Options.afservice) if delete_name: block.setDependMask(delete_name) counter = 0 for cmd in commands: task = af.Task(task_names[counter]) task.setCommand(cmd) block.tasks.append(task) counter += 1
def genBlock(self, hipfilename): if VERBOSE: if self.ropnode: print('Generating block for "%s" from "%s"' % (self.ropnode.path(), self.afnode.path())) else: print('Generating command block from "%s"' % (self.afnode.path())) auxargs = self.auxargs # Place hipfilename and auxargs cmd = self.cmd % vars() block = af.Block(self.name, self.service) block.setParser(self.parser) block.setCommand(cmd, self.cmd_useprefix) if self.preview != '': block.setFiles([self.preview]) if self.numeric: block.setNumeric(self.frame_first, self.frame_last, self.frame_pertask, self.frame_inc) else: t = 0 for cmd in self.tasks_cmds: task = af.Task(self.tasks_names[t]) task.setCommand(cmd) if len(self.tasks_previews): task.setFiles([self.tasks_previews[t]]) block.tasks.append(task) t += 1 block.setFramesPerTask(self.frame_pertask) block.setSequential(self.frame_sequential) block.setCapacity(self.capacity) if self.capacity_min != -1 or self.capacity_max != -1: block.setVariableCapacity(self.capacity_min, self.capacity_max) block.setTaskMaxRunTime(self.maxruntime) block.setTaskMinRunTime(self.minruntime) # Delete files in a block post command: if len(self.delete_files): post_cmd = 'deletefiles' for files in self.delete_files: post_cmd += ' "%s"' % re.sub('@#*@', '*', files) block.setCmdPost(post_cmd) if self.subblock: if self.max_runtasks > -1: block.setMaxRunningTasks(self.max_runtasks) if self.maxperhost > -1: block.setMaxRunTasksPerHost(self.maxperhost) if self.hosts_mask != '': block.setHostsMask(self.hosts_mask) if self.hosts_mask_exclude != '': block.setHostsMaskExclude(self.hosts_mask_exclude) if self.dependmask != '': if self.fullrangedepend: block.setDependMask(self.dependmask) else: block.setTasksDependMask(self.dependmask) if self.subtaskdepend: block.setDependSubTask() if self.min_memory > -1: block.setNeedMemory(self.min_memory) return block
if Options.cleanup: print('Clean up completed.') sys.exit(0) print('Jobs Pack = %d' % Options.jobspack) print('Tasks Number = %d' % Options.tasksnum) if Options.nopost: print('No post command') # Create temporary folder: os.mkdir(TmpDir) # Create a job template: job = af.Job() for b in range(0, Options.blocksnum): block = af.Block() job.blocks.append(block) if Options.capacity: block.setCapacity(Options.capacity) if not Options.notasks: block.setNumeric(1, Options.tasksnum) cmd = 'task.py' cmd = os.path.join(os.getcwd(), cmd) cmd = 'python "%s"' % cmd cmd += ' -s @#@ -e @#@ -t 1 @####@ @#####@ @#####@ @#####@' block.setCommand(cmd, False) block.setFiles(['file_a.@#@.@####@', 'file_b.@#@.@####@']) job.setNeedOS('') counter = 0
for b in range(numblocks): blockname = 'block' blocktype = 'generic' if len(blocknames) > b: blockname = blocknames[b] else: blockname = blocknames[len(blocknames) - 1] + str(b) if len(blocktypes) > b: blocktype = blocktypes[b] else: blocktype = blocktypes[len(blocktypes) - 1] block = af.Block(blockname, blocktype) job.blocks.append(block) if options.parser != '': block.setParser(options.parser) if b > 0: job.blocks[b - 1].setTasksDependMask(blockname) if options.subdep: job.blocks[b].setDependSubTask() if options.maxtime: block.setTasksMaxRunTime(options.maxtime) if options.capacity != 0: block.setCapacity(options.capacity)
# Generate some data to send to PHP result = {'status': 'Yes!'} #folder = "/var/www/owncloud/" + data["folder"] # Send it to stdout (to PHP) # Create a job job = af.Job(data["scene"]) job.setUserName(data["user"]) # Set maximum tasks that can be executed simultaneously job.setMaxRunningTasks(5) # Create a block with provided name and service type block = af.Block('blenderRender', 'blender') # Set block tasks working directory block.setWorkingDirectory('/var/www/html/owncloud/data/' + data["user"] + '/files' + data["directory"] + '/') # Set block tasks command block.setCommand('blender -b \"/var/www/html/owncloud/data/' + data["user"] + '/files/' + data["file_path"] + '\" -o \"/var/www/html/owncloud/Nube_Multimedia/' + data["pathSave"] + '/img\" -s @#@ -e @#@ -j 1 -a') # Set block tasks preview command arguments block.setFiles([ "/var/www/html/owncloud/html/Nube_Multimedia/" + data['pathSave'] + "img" ])
else: convert['warning'] = 'No images found' OUT['convert'].append(convert) for i in range(0, len(Jobs)): if MkDirs[i]: if Options.verbose: print('mkdir ' + MkDirs[i]) if not Options.debug and not os.path.isdir(MkDirs[i]): os.makedirs(MkDirs[i]) if Options.afanasy: job = af.Job('CVT ' + JobNames[i]) block = af.Block('convert') job.blocks.append(block) if Options.afuser != '': job.setUserName(Options.afuser) if Options.afmax != -1: job.setMaxRunningTasks(Options.afmax) if Options.afcap != -1: block.setCapacity(Options.afcap) if Options.afmph != -1: block.setMaxRunTasksPerHost(Options.afmph) if Options.afmrt != -1:
scene = 'scene.hip' rop = '/out/ifd' ifd = 'render/scene.@####@.ifd' ifdd = 'render/scene.%04d.ifd' img = 'render/img.@####@.exr' imgd = 'render/img.%04d.exr' f_start = 1 f_finish = 10 divx = 3 divy = 2 tiles = divx * divy job = af.Job('Houdini Test: Tile Render') b_genifd = af.Block('generate ifd', 'hbatch') b_genifd.setCommand('hrender_af -s @#@ -e @#@ %s %s' % (scene, rop)) b_genifd.setNumeric(f_start, f_finish) b_render = af.Block('render tiles', 'mantra') b_render.setCommand('mantrarender tc %(divx)d %(divy)d @#@' % vars()) b_render.setTasksDependMask('generate ifd') b_render.setFramesPerTask(-tiles) for f in range(f_start, f_finish + 1): cmd = ' -R -f ' + ifdd % f for t in range(0, tiles): task = af.Task('%d tile %d' % (f, t)) task.setCommand(str(t) + cmd) task.setFiles((imgd % f) + ('.tile_%d.exr' % t)) b_render.tasks.append(task)
# -*- coding: utf-8 -*- import af job = af.Job('example job') block = af.Block('block of tasks') block.setWorkingDirectory('/home') task = af.Task('simple task') task.setCommand('ls -l') block.tasks.append(task) job.blocks.append(block) job.send()
def SubmitButton_OnClicked(): opSet = Application.ActiveSceneRoot.Properties('afSubmitProperties') if opSet is None: Application.LogMessage('Error: Can\'t find options.') PPG.Close() # Save scene: Application.SaveScene() scene = Application.ActiveProject.ActiveScene scenefile = scene.Filename.Value if not os.path.isfile(scenefile): Application.LogMessage('Error: Can\'t save scene.') return range_frompass = opSet.Parameters('afRange_frompass').Value range_forcepass = opSet.Parameters('afRange_forcepass').Value frame_start = opSet.Parameters('afFrame_start').Value frame_end = opSet.Parameters('afFrame_end').Value frame_by = opSet.Parameters('afFrame_by').Value frame_fpt = opSet.Parameters('afFrame_fpt').Value passesOption = opSet.Parameters('afRenderPass').Value jobname = opSet.Parameters('afJobName').Value priority = opSet.Parameters('afPriority').Value capacity = opSet.Parameters('afCapacity').Value simulate = opSet.Parameters('afSimulate').Value paused = opSet.Parameters('afStartPaused').Value maxhosts = opSet.Parameters('afMaxHosts').Value maxruntime = opSet.Parameters('afMaxRunTime').Value hostsmask = opSet.Parameters('afHostsMask').Value hostsmaskexclude = opSet.Parameters('afHostsMaskExclude').Value dependmask = opSet.Parameters('afDependMask').Value dependmaskglobal = opSet.Parameters('afDependMaskGlobal').Value varirender = opSet.Parameters('afVariRender').Value varirender_attr = opSet.Parameters('afVariRenderAttr').Value varirender_start = opSet.Parameters('afVariRenderStart').Value varirender_step = opSet.Parameters('afVariRenderStep').Value varirender_count = opSet.Parameters('afVariRenderCount').Value if frame_end < frame_start: frame_end = frame_start if frame_by < 1: frame_by = 1 if frame_fpt < 1: frame_fpt = 1 passes = [] if passesOption == '_all_': for cpass in scene.Passes: passes.append(cpass.Name) elif passesOption == '_selected_': selection = [] for selected in Application.Selection: selection.append(selected.Name) for cpass in scene.Passes: if cpass.Name in selection: passes.append(cpass.Name) elif passesOption == '_current_': passes.append(scene.ActivePass.Name) else: passes.append(passesOption) padding = Application.GetValue('Passes.RenderOptions.FramePadding') for cpass in passes: images = [] # Get framebuffers: for ps in scene.Passes: if ps.Name != cpass: continue for fb in ps.Framebuffers: if fb.Enabled.Value: format = fb.Format.Value filename = fb.ResolvedFilename.Value pattern = r'\d+.' + format + '$' match = re.search(pattern, filename) if match is not None: part = match.group(0) match = re.search(r'\d+', part) if match is not None: num = match.group(0) pad = '%' if padding > 1: pad += '0' + str(padding) pad += 'd' newpart = part.replace(num, pad) filename = filename.replace(part, newpart) images.append(filename) else: Application.LogMessage('Can`t solve "%s"' % filename) # Copy scene to temporary file: curjobname = jobname if len(passes) > 1: curjobname += '-%s' % cpass ftime = time.time() tmpscene = '%s.%s%s%s.scn' % (scenefile, curjobname, time.strftime('.%m%d-%H%M%S-'), str(ftime - int(ftime))[2:5]) try: shutil.copyfile(scenefile, tmpscene) except Exception as e: Application.LogMessage('Unable to copy temporary scene:') Application.LogMessage(tmpscene) Application.LogMessage(str(e)) return if not os.path.isfile(tmpscene): Application.LogMessage('Error: Can\'t save temporary scene.') return # Get frame range: cp_frame_start = frame_start cp_frame_end = frame_end cp_frame_by = frame_by if not range_forcepass: if range_frompass: if Application.GetValue('Passes.%s.FrameRangeSource' % cpass) == 0: cp_frame_start = \ Application.GetValue('Passes.%s.FrameStart' % cpass) cp_frame_end = \ Application.GetValue('Passes.%s.FrameEnd' % cpass) cp_frame_by = \ Application.GetValue('Passes.%s.FrameStep' % cpass) # Construct job: Application.LogMessage( 'Sending "%s" pass, range: %d-%d,%d' % (cpass, cp_frame_start, cp_frame_end, cp_frame_by)) #xsibatch -script "%XSI_CGRU_PATH%\afrender.py" -lang Python -main afRenderCurPass -args #-scenePath "%CD%\project\Scenes\scene.scn" -startFrame 1 -endFrame 2 -step 1 -simulate 0 -setAttr torus.polymsh.geom.enduangle -setValue 120 blocknames = [] blockcmds = [] blockimages = [] cmd = os.environ['XSI_CGRU_PATH'] cmd = os.path.join(cmd, 'afrender.py') cmd = 'xsibatch -script %s' % cmd cmd += ' -lang Python -main afRender -args' cmd += ' -scene "%s"' % tmpscene cmd += ' -start @#@ -end @#@ -step ' + str(cp_frame_by) cmd += ' -simulate' if simulate: cmd += ' 1' else: cmd += ' 0' cmd += ' -renderPass ' + cpass if varirender: cmd += ' -attr ' + varirender_attr + ' -value ' value = varirender_start for i in range(0, varirender_count): blockcmds.append(cmd + str(value)) blocknames.append('variant[%d]' % value) images_str = '' for img in images: img_dir = os.path.dirname(img) img_name = os.path.basename(img) img_dir = os.path.join(img_dir, str(value)) img = os.path.join(img_dir, img_name) if images_str != '': images_str += ';' images_str += img blockimages.append(images_str) value += varirender_step else: blockname = 'xsi' images_str = '' for img in images: if images_str != '': images_str += ';' images_str += img job = af.Job(curjobname) job.setCmdPost(str('deletefiles "%s"' % os.path.abspath(tmpscene))) if priority != -1: job.setPriority(priority) if maxhosts != -1: job.setMaxHosts(maxhosts) if hostsmask is not None and hostsmask != '': job.setHostsMask(hostsmask) if hostsmaskexclude is not None and hostsmaskexclude != '': job.setHostsMaskExclude(hostsmaskexclude) if dependmask is not None and dependmask != '': job.setDependMask(dependmask) if dependmaskglobal is not None and dependmaskglobal != '': job.setDependMaskGlobal(dependmaskglobal) if paused: job.offLine() if len(blocknames) == 0: blocknames.append(blockname) blockcmds.append(cmd) blockimages.append(images_str) i = 0 for blockname in blocknames: block = af.Block(blockname, 'xsi') block.setCommand(str(blockcmds[i])) block.setFiles([str(blockimages[i])]) block.setNumeric(cp_frame_start, cp_frame_end, frame_fpt, cp_frame_by) if capacity != -1: block.setCapacity(capacity) if maxruntime != 0: block.setTasksMaxRunTime(int(maxruntime * 3600)) job.blocks.append(block) i += 1 # Send job: if not job.send()[0]: Application.LogMessage('Error: Can\'t send job to server.') os.remove(tmpscene)
file_counter += 1 commands.append(cmd) task_names.append(os.path.basename(image)) print('{"progress":"%d sequences found"},' % len(Shots)) print('{"progress":"%d files found"},' % file_counter) cmd_encode = os.path.join(os.path.dirname(sys.argv[0]), 'makemovie.py') cmd_encode = 'python "%s"' % os.path.normpath(cmd_encode) cmd_encode += ' -f %s' % Options.fps cmd_encode += ' -c %s' % Options.codec cmd_encode += ' "%s"' % os.path.join(OutDir, TmpFiles) cmd_encode += ' "%s"' % movie_name job = af.Job('CUT ' + CutName) block = af.Block('convert') counter = 0 for cmd in commands: task = af.Task(task_names[counter]) task.setCommand(cmd) block.tasks.append(task) counter += 1 block.setCapacity(100) block.setMaxRunTasksPerHost(2) block.setTasksMaxRunTime(20) job.blocks.append(block) block = af.Block('encode') block.setDependMask('convert') task = af.Task('encode') task.setCommand(cmd_encode)
#!/usr/bin/env python # -*- coding: utf-8 -*- import af job = af.Job('Blender Test') block = af.Block('render', 'blender') block.setCommand('blender -b scene.blend -s @#@ -e @#@ -a') block.setNumeric(1, 20, 2) job.blocks.append(block) print('') job.output(True) print('') job.send()
Parser.error("No renders found.") Command = Args[-1] JobName = Options.jobname if JobName is None: JobName = Command.split(' ')[0] if Options.verbose: print('JobName: %s' % JobName) print('Renders: %s' % (','.join(Renders))) print('Command: %s' % Command) job = af.Job(JobName) job.setMaintenance() block = af.Block('Maintenance', Options.service) block.setParser(Options.parser) block.setCommand(Command) for render in Renders: task = af.Task(render) block.tasks.append(task) job.blocks.append(block) if Options.maxruntasks: job.setMaxRunningTasks(Options.maxruntasks) if Options.offline: job.offline()
continue # Copy an empty template for a shot: try: shutil.copytree(Options.template, shot_dest) except: errExit('Can`t create "%s"' % shot_dest) if Options.afanasy: job = af.Job('PUT ' + Options.dest) job.setUserName(Options.afuser) job.setMaxRunningTasks(Options.afmax) job.setMaxRunTasksPerHost(1) block = af.Block('put') block.setCapacity(Options.afcap) job.blocks.append(block) Put = os.environ['CGRU_LOCATION'] + '/utilities/put.py' Put = 'python "%s"' % os.path.normpath(Put) for shot in Out: if 'shot' in shot: shot = shot['shot'] else: continue if 'exists' in shot: continue
# # Creating a Job: # Create a Block(s) af first: blocks = [] if blockname == '': blockname = scenetype if blocktype == '': blocktype = scenetype if len(cmds) == 0: cmds.append(cmd) blocknames.append(blockname) if len(blockparsers) == 0 and blockparser != '': blockparsers.append(blockparser) i = 0 for cmd in cmds: block = af.Block(blocknames[i], blocktype) if len(blockparsers): block.setParser(blockparsers[i]) block.setWorkingDirectory(pwd) block.setNumeric(s, e, fpt, by) if scenetype == 'max': block.setCommand(cmd, False, False) else: block.setCommand(cmd) block.setCapacity(capacity) block.setVariableCapacity(capmin, capmax) if maxruntime != 0: block.setTasksMaxRunTime(maxruntime) if images != '': block.setFiles( afcommon.patternFromDigits(afcommon.patternFromStdC(images))) blocks.append(block) i += 1
def launch(self, *args, **kwargs): """launch renderer command """ # do nothing if there is no window (called externally) if not self.window: return # warn the user about the ignore settings try: dAO = pm.PyNode('defaultArnoldRenderOptions') ignore_attrs = [ 'ignoreSubdivision', 'ignoreDisplacement', 'ignoreBump', 'ignoreMotionBlur' ] attr_values = [(attr, dAO.getAttr(attr)) for attr in ignore_attrs if dAO.getAttr(attr) is True] if any(attr_values): msg_text = '<br>'.join( map(lambda x: '%s: %s' % (x[0], x[1]), attr_values)) response = pm.confirmDialog( title='Ignore These Settings?', message= 'You have ignored:<br><br>%s<br><br><b>Is that ok?</b>' % msg_text, button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return except pm.MayaNodeError: # no Arnold pass # check if rendering with persp camera try: wrong_camera_names = [ 'perspShape', 'topShape', 'sideShape', 'fontShape', 'persp1Shape', 'perspShape1', ] renderable_cameras = [ node for node in pm.ls(type='camera') if node.getAttr('renderable') ] if any( map(lambda x: x.name() in wrong_camera_names, renderable_cameras)): response = pm.confirmDialog( title='Rendering with Persp?', message= 'You are rendering with <b>Persp Camera<b><br><br>Is that ok?</b>', button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return if len(renderable_cameras) > 1: response = pm.confirmDialog( title='Rendering more than one Camera?', message= 'You are rendering <b>more than one camera<b><br><br>Is that ok?</b>', button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return elif len(renderable_cameras) == 0: pm.confirmDialog( title='No <b>Renderable</b> camera!!!', message='There is no <b>renderable camera<b>!!!', button=['Ok'], defaultButton='Ok', cancelButton='Ok', dismissString='Ok') return except pm.MayaNodeError: # no default render globals node pass # get values start_frame = pm.intField('cgru_afanasy__start_frame', q=1, v=1) end_frame = pm.intField('cgru_afanasy__end_frame', q=1, v=1) frames_per_task = \ pm.intField('cgru_afanasy__frames_per_task', q=1, v=1) by_frame = pm.intField('cgru_afanasy__by_frame', q=1, v=1) hosts_mask = pm.textField('cgru_afanasy__hosts_mask', q=1, text=True) hosts_exclude = pm.textField('cgru_afanasy__hosts_exclude', q=1, text=True) separate_layers = \ pm.checkBox('cgru_afanasy__separate_layers', q=1, v=1) pause = pm.checkBox('cgru_afanasy__paused', q=1, v=1) life_time = pm.intField('cgru_afanasy__life_time', q=1, v=1) # check values if start_frame > end_frame: temp = end_frame end_frame = start_frame start_frame = temp frames_per_task = max(1, frames_per_task) by_frame = max(1, by_frame) # store without quota sign hosts_mask = hosts_mask.replace('"', '') hosts_exclude = hosts_exclude.replace('"', '') # store field values pm.optionVar['cgru_afanasy__start_frame_ov'] = start_frame pm.optionVar['cgru_afanasy__end_frame_ov'] = end_frame pm.optionVar['cgru_afanasy__frames_per_task_ov'] = frames_per_task pm.optionVar['cgru_afanasy__by_frame_ov'] = by_frame pm.optionVar['cgru_afanasy__hosts_mask_ov'] = hosts_mask pm.optionVar['cgru_afanasy__hosts_exclude_ov'] = hosts_exclude pm.optionVar['cgru_afanasy__separate_layers_ov'] = separate_layers pm.optionVar['cgru_afanasy__life_time_ov'] = life_time # get paths scene_name = pm.sceneName() datetime = '%s%s' % (time.strftime('%y%m%d-%H%M%S-'), str(time.time() - int(time.time()))[2:5]) filename = '%s.%s.mb' % (scene_name, datetime) project_path = pm.workspace(q=1, rootDirectory=1) # get output paths, set the RenderPass token to Beauty, # this will at least guarantee to get something outputs = \ pm.renderSettings( fullPath=1, firstImageName=1, lastImageName=1, leaveUnmatchedTokens=1, customTokenString="RenderPass=Beauty" ) job_name = os.path.basename(scene_name) logger.debug('%ss %se %sr' % (start_frame, end_frame, by_frame)) logger.debug('scene = %s' % scene_name) logger.debug('file = %s' % filename) logger.debug('job_name = %s' % job_name) logger.debug('project_path = %s' % project_path) logger.debug('outputs = %s' % outputs) if pm.checkBox('cgru_afanasy__close', q=1, v=1): pm.deleteUI(self.window) drg = pm.PyNode('defaultRenderGlobals') render_engine = drg.getAttr('currentRenderer') job = af.Job(job_name) stored_log_level = None if render_engine == 'arnold': # set the verbosity level to warning+info aro = pm.PyNode('defaultArnoldRenderOptions') stored_log_level = aro.getAttr('log_verbosity') aro.setAttr('log_verbosity', 1) # set output to console aro.setAttr("log_to_console", 1) elif render_engine == 'redshift': # set the verbosity level to detailed+info redshift = pm.PyNode('redshiftOptions') stored_log_level = redshift.logLevel.get() redshift.logLevel.set(2) # save file pm.saveAs(filename, force=1, type='mayaBinary') # rename back to original name pm.renameFile(scene_name) # create the render command mrc = MayaRenderCommandBuilder(name=job_name, file_full_path=filename, render_engine=render_engine, project=project_path, by_frame=by_frame) # submit renders blocks = [] if separate_layers: # render each layer separately rlm = pm.PyNode('renderLayerManager') layers = [ layer for layer in rlm.connections() if layer.renderable.get() ] for layer in layers: mrc_layer = copy.copy(mrc) layer_name = layer.name() mrc_layer.name = layer_name mrc_layer.render_layer = layer_name # create a new block for this layer block = af.Block( layer_name, renderer_to_block_type.get(render_engine, 'maya')) block.setFiles( afcommon.patternFromDigits( afcommon.patternFromStdC( afcommon.patternFromPaths(outputs[0], outputs[1]))).split(';')) block.setNumeric(start_frame, end_frame, frames_per_task, by_frame) block.setCommand(mrc_layer.build_command()) blocks.append(block) else: # create only one block block = af.Block('All Layers', renderer_to_block_type.get(render_engine, 'maya')) block.setFiles( afcommon.patternFromDigits( afcommon.patternFromStdC( afcommon.patternFromPaths(outputs[0], outputs[1]))).split(';')) block.setNumeric(start_frame, end_frame, frames_per_task, by_frame) block.setCommand(mrc.build_command()) blocks.append(block) job.setFolder('input', os.path.dirname(filename)) job.setFolder('output', os.path.dirname(outputs[0])) job.setHostsMask(hosts_mask) job.setHostsMaskExclude(hosts_exclude) if life_time > 0: job.setTimeLife(life_time * 3600) job.setCmdPost('deletefiles "%s"' % os.path.abspath(filename)) if pause: job.offline() # add blocks job.blocks.extend(blocks) status, data = job.send() if not status: pm.PopupError('Something went wrong!') print('data: %s' % data) # restore log level if render_engine == 'arnold': aro = pm.PyNode('defaultArnoldRenderOptions') aro.setAttr('log_verbosity', stored_log_level) # disable set output to console aro.setAttr("log_to_console", 0) elif render_engine == 'redshift': redshift = pm.PyNode('redshiftOptions') redshift.logLevel.set(stored_log_level)
def genBlock(self, hipfilename): if VERBOSE: if self.ropnode: print('Generating block for "%s" from "%s"' % (self.ropnode.path(), self.afnode.path())) else: print('Generating command block from "%s"' % (self.afnode.path())) auxargs = self.auxargs # Place hipfilename and auxargs cmd = self.cmd % vars() block = af.Block(self.name, self.service) block.setParser(self.parser) block.setCommand(cmd, self.cmd_useprefix) if self.preview != '' and self.generate_previews: block.setFiles([self.preview]) if self.numeric: block.setNumeric(self.frame_first, self.frame_last, self.frame_pertask, self.frame_inc) else: t = 0 for cmd in self.tasks_cmds: task = af.Task(self.tasks_names[t]) task.setCommand(cmd) if len(self.tasks_previews) and self.generate_previews: task.setFiles([self.tasks_previews[t]]) block.tasks.append(task) t += 1 block.setFramesPerTask(self.frame_pertask) block.setSequential(self.frame_sequential) block.setCapacity(self.capacity) if self.capacity_min != -1 or self.capacity_max != -1: block.setVariableCapacity(self.capacity_min, self.capacity_max) if self.minruntime > 0.01: block.setTaskMinRunTime(self.minruntime) if self.maxruntime > 0.01: block.setTaskMaxRunTime(int(self.maxruntime * 3600.0)) if self.progress_timeout > 0.001: block.setTaskProgressChangeTimeout( int(self.progress_timeout * 3600.0)) if self.file_check_enable: block.checkRenderedFiles(self.file_check_size_mb_min, self.file_check_size_mb_max) if self.file_check_skip_existing: block.skipExistingFiles() # Delete files in a block post command: if len(self.delete_files): post_cmd = 'deletefiles' for files in self.delete_files: post_cmd += ' "%s"' % re.sub('@#*@', '*', files) block.setCmdPost(post_cmd) if self.subblock: if self.max_runtasks > -1: block.setMaxRunningTasks(self.max_runtasks) if self.maxperhost > -1: block.setMaxRunTasksPerHost(self.maxperhost) if self.hosts_mask != '': block.setHostsMask(self.hosts_mask) if self.hosts_mask_exclude != '': block.setHostsMaskExclude(self.hosts_mask_exclude) if self.dependmask != '': if self.fullrangedepend: block.setDependMask(self.dependmask) else: block.setTasksDependMask(self.dependmask) if self.subtaskdepend: block.setDependSubTask() if self.min_memory > -1: block.setNeedMemory(self.min_memory * 1024) # Process Tickets if self.tickets_use: if self.tickets_auto: for ticket in self.tickets: block.addTicket(ticket, self.tickets[ticket]) if self.ticket_mem: block.addTicket('MEM', self.ticket_mem) if self.tickets_aux_use and self.tickets_aux_data is not None and len( self.tickets_aux_data): for ticket in self.tickets_aux_data.split(','): ticket = ticket.strip().split(':') if len(ticket) != 2: hou.ui.displayMessage('Invalid ticket data: "%s".' % ticket) continue block.addTicket(ticket[0], int(ticket[1])) return block
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import af job = af.Job('Maya Example') block = af.Block('render', 'maya') block.setCommand( 'maya -batch -file %s/scene.mb -command "afanasyBatch(@#@,@#@,1,1)"' % os.getcwd()) block.setNumeric(1, 5, 2) job.blocks.append(block) print('') job.output(True) print('') job.send()
# -*- coding: utf-8 -*- import os import af job = af.Job('3d MAX Test') block = af.Block('Frames', 'max') block.setCommand( '3dsmaxcmd "%s\\scene.max" -start:@#@ -end:@#@ -v:5 -showRFW:0 ' '-o:"render/from_script.0000.jpg"' % os.getcwd()) block.setNumeric(1, 10, 1) job.blocks.append(block) job.send()
def launch(self, *args, **kwargs): """launch renderer command """ # do nothing if there is no window (called externally) if not self.window: return # warn the user about the ignore settings try: dAO = pm.PyNode('defaultArnoldRenderOptions') ignore_attrs = [ 'ignoreSubdivision', 'ignoreDisplacement', 'ignoreBump', 'ignoreMotionBlur' ] attr_values = [(attr, dAO.getAttr(attr)) for attr in ignore_attrs if dAO.getAttr(attr) is True] if any(attr_values): msg_text = '<br>'.join( map(lambda x: '%s: %s' % (x[0], x[1]), attr_values)) response = pm.confirmDialog( title='Ignore These Settings?', message= 'You have ignored:<br><br>%s<br><br><b>Is that ok?</b>' % msg_text, button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return except (pm.MayaNodeError, pm.MayaAttributeError): # no Arnold pass # check if rendering with persp camera try: wrong_camera_names = [ 'perspShape', 'topShape', 'sideShape', 'fontShape', 'persp1Shape', 'perspShape1', ] renderable_cameras = [ node for node in pm.ls(type='camera') if node.getAttr('renderable') ] if any( map(lambda x: x.name() in wrong_camera_names, renderable_cameras)): response = pm.confirmDialog( title='Rendering with Persp?', message= 'You are rendering with <b>Persp Camera<b><br><br>Is that ok?</b>', button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return if len(renderable_cameras) > 1: response = pm.confirmDialog( title='Rendering more than one Camera?', message= 'You are rendering <b>more than one camera<b><br><br>Is that ok?</b>', button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return elif len(renderable_cameras) == 0: pm.confirmDialog( title='No <b>Renderable</b> camera!!!', message='There is no <b>renderable camera<b>!!!', button=['Ok'], defaultButton='Ok', cancelButton='Ok', dismissString='Ok') return except pm.MayaNodeError: # no default render globals node pass drg = pm.PyNode('defaultRenderGlobals') render_engine = drg.getAttr('currentRenderer') # RENDERER SPECIFIC CHECKS if render_engine == 'redshift': # if the renderer is RedShift # check if unifiedDisableDivision is 1 which will take too much time # to render dro = pm.PyNode('redshiftOptions') if dro.unifiedDisableDivision.get() == 1: response = pm.confirmDialog( title= "Enabled **Don't Automatically Reduce Samples of Other Effects**", message= 'It is not allowed to render with the following option is enabled:<br>' '<br>' "Don't Automatically Reduce Samples of Other Effects: Enabled<br>" "<br>" "Please DISABLE it!", button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # Check dome light backgrounds domes_to_fix = [] rs_domes = pm.ls(type='RedshiftDomeLight') if rs_domes: for rs_dome in rs_domes: if rs_dome.getAttr('background_enable') == 1 \ or rs_dome.getAttr('backPlateEnabled') == 1: domes_to_fix.append(rs_dome.name()) if domes_to_fix: message = 'Some DomeLights have <b>BackGround Render ' \ 'Enabled</b>:' \ '<br><br>%s<br><br>' \ 'Are you Sure?' % '<br>'.join(domes_to_fix) response = pm.confirmDialog( title='Dome Lights with Background Enabled?', message=message, button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return # abort on license fail dro.abortOnLicenseFail.set(1) elif render_engine == 'arnold': # check if the samples are too high dAO = pm.PyNode('defaultArnoldRenderOptions') aa_samples = dAO.AASamples.get() diff_samples = dAO.GIDiffuseSamples.get() try: glossy_samples = dAO.GIGlossySamples.get() except AttributeError: glossy_samples = dAO.GISpecularSamples.get() if int(pm.about(v=1)) >= 2017: sss_samples = dAO.GISssSamples.get() else: sss_samples = dAO.sssBssrdfSamples.get() total_diff_samples = aa_samples**2 * diff_samples**2 total_glossy_samples = aa_samples**2 * glossy_samples**2 total_sss_samples = aa_samples**2 * sss_samples**2 max_allowed_diff_samples = 225 max_allowed_glossy_samples = 100 max_allowed_sss_samples = 800 if total_diff_samples > max_allowed_diff_samples: pm.confirmDialog( title="Too Much Diffuse Samples!!!", message='You are using too much DIFFUSE SAMPLES (>%s)<br>' '<br>' 'Please either reduce AA samples of Diffuse ' 'Samples!!!' % max_allowed_diff_samples, button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return if total_glossy_samples > max_allowed_glossy_samples: pm.confirmDialog( title="Too Much Glossy Samples!!!", message='You are using too much GLOSSY SAMPLES (>%s)<br>' '<br>' 'Please either reduce AA samples of Glossy ' 'Samples!!!' % max_allowed_glossy_samples, button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return if total_sss_samples > max_allowed_sss_samples: pm.confirmDialog( title="Too Much SSS Samples!!!", message='You are using too much SSS SAMPLES (>%s)<br>' '<br>' 'Please either reduce AA samples of SSS ' 'Samples!!!' % max_allowed_sss_samples, button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # check Light Samples # check point lights with zero radius but more than one samples all_point_lights = pm.ls(type='pointLight') ridiculous_point_lights = [] for point_light in all_point_lights: if point_light.aiRadius.get( ) < 0.1 and point_light.aiSamples.get() > 1: ridiculous_point_lights.append(point_light) if ridiculous_point_lights: pm.confirmDialog( title="Unnecessary Samples on Point Lights!!!", message='You are using too much SAMPLES (>1)<br>' '<br>' 'on <b>Point lights with zero radius</b><br>' '<br>' 'Please reduce the samples to 1', button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # Check area lights with more than 2 samples all_area_lights = pm.ls(type=['areaLight', 'aiAreaLight']) ridiculous_area_lights = [] for area_light in all_area_lights: if area_light.aiSamples.get() > 2: ridiculous_area_lights.append(area_light) if ridiculous_area_lights: pm.confirmDialog( title="Unnecessary Samples on Area Lights!!!", message='You are using too much SAMPLES (>2) on<br>' '<br>' '<b>Area Lights</b><br>' '<br>' 'Please reduce the samples to 2', button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # Check directional lights with angle == 0 and samples > 1 all_directional_lights = pm.ls(type='directionalLight') ridiculous_directional_lights = [] dir_sample_attr_name = 'aiSamples' # if pm.about(v=1) == "2014": # dir_sample_attr_name = 'aiSamples' for directional_light in all_directional_lights: if directional_light.aiAngle.get( ) == 0 and directional_light.attr( dir_sample_attr_name).get() > 1: ridiculous_directional_lights.append(directional_light) if ridiculous_directional_lights: pm.confirmDialog( title="Unnecessary Samples on Directional Lights!!!", message='You are using too much SAMPLES (>1) on <br>' '<br>' '<b>Directional lights with zero angle</b><br>' '<br>' 'Please reduce the samples to 1', button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') return # get values start_frame = pm.intField('cgru_afanasy__start_frame', q=1, v=1) end_frame = pm.intField('cgru_afanasy__end_frame', q=1, v=1) frames_per_task = \ pm.intField('cgru_afanasy__frames_per_task', q=1, v=1) by_frame = pm.intField('cgru_afanasy__by_frame', q=1, v=1) depend_mask_global = pm.textField('cgru_afanasy__depend_mask_global', q=1, text=True) hosts_mask = pm.textField('cgru_afanasy__hosts_mask', q=1, text=True) hosts_exclude = pm.textField('cgru_afanasy__hosts_exclude', q=1, text=True) separate_layers = \ pm.radioButtonGrp('cgru_afanasy__separate_layers', q=1, sl=1) pause = pm.checkBox('cgru_afanasy__paused', q=1, v=1) life_time = pm.intField('cgru_afanasy__life_time', q=1, v=1) annotation = pm.textField('cgru_afanasy__annotation', q=1, text=True) submit_multiple_times = pm.intField( 'cgru_afanasy__submit_multiple_times', q=1, v=1) errors_avoid_host = pm.intField('cgru_afanasy__errors_avoid_host', q=1, v=1) errors_retries = pm.intField('cgru_afanasy__errors_retries', q=1, v=1) errors_task_same_host = pm.intField( 'cgru_afanasy__errors_task_same_host', q=1, v=1) errors_forgive_time = pm.intField('cgru_afanasy__errors_forgive_time', q=1, v=1) generate_previews = pm.checkBox('cgru_afanasy__generate_previews', q=1, v=1) # check values if start_frame > end_frame: temp = end_frame end_frame = start_frame start_frame = temp frames_per_task = max(1, frames_per_task) by_frame = max(1, by_frame) # store without quota sign depend_mask_global = depend_mask_global.replace('"', '') hosts_mask = hosts_mask.replace('"', '') hosts_exclude = hosts_exclude.replace('"', '') # store field values pm.optionVar['cgru_afanasy__start_frame_ov'] = start_frame pm.optionVar['cgru_afanasy__end_frame_ov'] = end_frame pm.optionVar['cgru_afanasy__frames_per_task_ov'] = frames_per_task pm.optionVar['cgru_afanasy__by_frame_ov'] = by_frame pm.optionVar['cgru_afanasy__depend_mask_global_ov'] = \ depend_mask_global pm.optionVar['cgru_afanasy__hosts_mask_ov'] = hosts_mask pm.optionVar['cgru_afanasy__hosts_exclude_ov'] = hosts_exclude pm.optionVar['cgru_afanasy__separate_layers_ov'] = separate_layers pm.optionVar['cgru_afanasy__life_time_ov'] = life_time pm.optionVar['cgru_afanasy__annotation_ov'] = annotation pm.optionVar[ 'cgru_afanasy__submit_multiple_times_ov'] = submit_multiple_times pm.optionVar['cgru_afanasy__errors_avoid_host_ov'] = errors_avoid_host pm.optionVar['cgru_afanasy__errors_retries_ov'] = errors_retries pm.optionVar[ 'cgru_afanasy__errors_task_same_host_ov'] = errors_task_same_host pm.optionVar[ 'cgru_afanasy__errors_errors_forgive_time_ov'] = errors_forgive_time pm.optionVar['cgru_afanasy__paused_ov'] = pause pm.optionVar['cgru_afanasy__generate_previews_ov'] = generate_previews # get paths scene_name = pm.sceneName() datetime = '%s%s' % (time.strftime('%y%m%d-%H%M%S-'), str(time.time() - int(time.time()))[2:5]) filename = '%s.%s.mb' % (scene_name, datetime) project_path = pm.workspace(q=1, rootDirectory=1) # outputs = \ # pm.renderSettings(fullPath=1, firstImageName=1, lastImageName=1) # get output paths, set the RenderPass token to Beauty, # this will at least guarantee to get something outputs = \ pm.renderSettings( fullPath=1, firstImageName=1, lastImageName=1, leaveUnmatchedTokens=1, customTokenString="RenderPass=Beauty" ) # job_name = os.path.basename(scene_name) job_name = self.generate_job_name() logger.debug('%ss %se %sr' % (start_frame, end_frame, by_frame)) logger.debug('scene = %s' % scene_name) logger.debug('file = %s' % filename) logger.debug('job_name = %s' % job_name) logger.debug('project_path = %s' % project_path) logger.debug('outputs = %s' % outputs) logger.debug('annotation = %s' % annotation) logger.debug('separate_layers = %s' % separate_layers) logger.debug('errors_avoid_host = %s' % errors_avoid_host) logger.debug('errors_retries = %s' % errors_retries) logger.debug('errors_task_same_host = %s' % errors_task_same_host) logger.debug('errors_forgive_time = %s' % errors_forgive_time) logger.debug('generate_previews = %s' % generate_previews) if pm.checkBox('cgru_afanasy__close', q=1, v=1): pm.deleteUI(self.window) stored_log_level = None if render_engine == 'arnold': # set the verbosity level to warning+info aro = pm.PyNode('defaultArnoldRenderOptions') stored_log_level = aro.getAttr('log_verbosity') aro.setAttr('log_verbosity', 2) # set output to console aro.setAttr("log_to_console", 1) elif render_engine == 'redshift': # set the verbosity level to detailed+info redshift = pm.PyNode('redshiftOptions') stored_log_level = redshift.logLevel.get() redshift.logLevel.set(2) # save file pm.saveAs(filename, force=1, type='mayaBinary') # rename back to original name pm.renameFile(scene_name) # create the render command mrc = MayaRenderCommandBuilder(name=job_name, file_full_path=filename, render_engine=render_engine, project=project_path, by_frame=by_frame) # submit renders jobs = [] blocks = [] # # separate_layers: # 1 -> None -> submit one job with a single block with all layers # 2 -> Block -> submit one job with multiple blocks # 3 -> Job -> submit multiple jobs with a single block per layer # if separate_layers in [1, 2]: job = af.Job(job_name) jobs.append(job) if separate_layers in [2, 3]: # render each layer separately rlm = pm.PyNode('renderLayerManager') layers = [ layer for layer in rlm.connections(type=pm.nt.RenderLayer) if layer.renderable.get() ] for layer in layers: mrc_layer = copy.copy(mrc) layer_name = layer.name() mrc_layer.name = layer_name mrc_layer.render_layer = layer_name # create a new block for this layer block = af.Block( layer_name, renderer_to_block_type.get(render_engine, 'maya')) # Fix the output path for this layer # by replacing the "masterLayer" with the layer name # without rs_ at the beginning layer_outputs = outputs if layer_name != 'defaultRenderLayer': layer_outputs[0] = outputs[0].replace( 'masterLayer', layer_name.replace('rs_', '')) layer_outputs[1] = outputs[1].replace( 'masterLayer', layer_name.replace('rs_', '')) if generate_previews: outputs_split = afcommon.patternFromDigits( afcommon.patternFromStdC( afcommon.patternFromPaths( layer_outputs[0], layer_outputs[1]))).split(';') block.setFiles(outputs_split) block.setNumeric(start_frame, end_frame, frames_per_task, by_frame) command = mrc_layer.build_command() block.setErrorsAvoidHost(errors_avoid_host) block.setErrorsRetries(errors_retries) block.setErrorsTaskSameHost(errors_task_same_host) block.setErrorsForgiveTime(errors_forgive_time) block.setCommand(command) if separate_layers == 2: blocks.append(block) else: job = af.Job('%s - %s' % (job_name, layer_name)) # add blocks job.blocks = [block] jobs.append(job) else: # create only one block block = af.Block('All Layers', renderer_to_block_type.get(render_engine, 'maya')) if generate_previews: block.setFiles( afcommon.patternFromDigits( afcommon.patternFromStdC( afcommon.patternFromPaths(outputs[0], outputs[1]))).split(';')) block.setNumeric(start_frame, end_frame, frames_per_task, by_frame) command = mrc.build_command() block.setCommand(command) blocks.append(block) for job in jobs: job.setAnnotation(annotation) job.setFolder('input', os.path.dirname(filename)) job.setFolder('output', os.path.dirname(outputs[0])) job.setDependMaskGlobal(depend_mask_global) job.setHostsMask(hosts_mask) job.setHostsMaskExclude(hosts_exclude) if life_time > 0: job.setTimeLife(life_time * 3600) else: job.setTimeLife(240 * 3600) job.setCmdPost('deletefiles -s "%s"' % os.path.abspath(filename)) if pause: job.offline() # add blocks if separate_layers in [1, 2]: job.blocks.extend(blocks) for i in range(submit_multiple_times): orig_job_name = job.data['name'] job.setName('%s - %03i' % (orig_job_name, i + 1)) status, data = job.send() # restore job name job.setName(orig_job_name) if not status: pm.PopupError('Something went wrong!') # restore log level if render_engine == 'arnold': aro = pm.PyNode('defaultArnoldRenderOptions') aro.setAttr('log_verbosity', stored_log_level) # disable set output to console aro.setAttr("log_to_console", 0) elif render_engine == 'redshift': redshift = pm.PyNode('redshiftOptions') redshift.logLevel.set(stored_log_level) # disable abort on license fail redshift.abortOnLicenseFail.set(0)
blocks = [] if blockname == '': blockname = scenetype if blocktype == '': blocktype = scenetype if len(cmds) == 0: cmds.append(cmd) blocknames.append(blockname) if len(blockparsers) == 0 and blockparser != '': blockparsers.append(blockparser) for i, cmd in enumerate(cmds): block = af.Block(blocknames[i], blocktype) if len(blockparsers): block.setParser(blockparsers[i]) block.setWorkingDirectory(pwd) block.setNumeric(s, e, fpt, by) if seq != 1: block.setSequential(seq) if scenetype == 'max': block.setCommand(cmd, False, False) else: block.setCommand(cmd) block.setCapacity(capacity)
def execute(self, context): sce = context.scene cgru_props = sce.cgru rd = context.scene.render images = None engine_string = sce.render.engine sceneModified = False # if the opriginal scene modified checker # set selected pool (static) CGRU_Submit.selected_pool = cgru_props.pools # Import Afanasy module: import af # Calculate temporary scene path: scenefile = bpy.data.filepath if scenefile.endswith('.blend'): scenefile = scenefile[:-6] renderscenefile = "%s.%s.blend" % (scenefile, time.strftime('%Y%m%d%H%M%S')) # Make all Local and pack all textures and objects if cgru_props.packLinkedObjects: bpy.ops.object.make_local(type='ALL') sceneModified = True if cgru_props.relativePaths: bpy.ops.file.make_paths_relative() sceneModified = True if cgru_props.packTextures: bpy.ops.file.pack_all() sceneModified = True # Get job name: jobname = cgru_props.jobname # If job name is empty use scene file name: if not jobname: jobname = os.path.basename(scenefile) # Try to cut standart '.blend' extension: if jobname.endswith('.blend'): jobname = jobname[:-6] # Get frames settings: fstart = sce.frame_start fend = sce.frame_end finc = sce.frame_step fpertask = cgru_props.fpertask sequential = cgru_props.sequential # Check frames settings: if fpertask < 1: fpertask = 1 if fend < fstart: fend = fstart # Create a job: job = af.Job(jobname) servicename = 'blender' renderlayer_names = [] layers = bpy.context.scene.render.layers if cgru_props.splitRenderLayers and len(layers) > 1: for layer in layers: if layer.use: renderlayer_names.append(layer.name) else: renderlayer_names.append('') for renderlayer_name in renderlayer_names: block = None images = None # Create block if cgru_props.splitRenderLayers and len(layers) > 1: txt_block = bpy.data.texts.new("layer_%s" % renderlayer_name) txt_block.write(LAYER_TEXT_BLOCK.format(renderlayer_name)) block = af.Block("layer_%s" % renderlayer_name, servicename) else: block = af.Block(engine_string, servicename) # Check current render engine if engine_string == 'BLENDER_RENDER': block.setParser('blender_render') elif engine_string == 'CYCLES': block.setParser('blender_cycles') if cgru_props.filepath != '': pos = cgru_props.filepath.find('#') if pos != -1: if cgru_props.filepath[pos - 1] in '._- ': images = "{0}{1}{2}".format( cgru_props.filepath[:pos - 1], renderlayer_name, cgru_props.filepath[pos - 1:]) else: images = "{0}{1}{2}".format(cgru_props.filepath[:pos], renderlayer_name, cgru_props.filepath[pos:]) else: images = "{0}{1}".format(cgru_props.filepath, renderlayer_name) output_images = re.sub(r'(#+)', r'@\1@', images) if output_images.startswith('//'): output_images = os.path.join( os.path.dirname(renderscenefile), output_images.replace('//', '')) if rd.file_extension not in output_images: block.setFiles([output_images + rd.file_extension]) else: block.setFiles([output_images]) if cgru_props.splitRenderLayers and len(layers) > 1: python_options = ' --python-text "layer_%s"' % renderlayer_name else: python_options = '' cmd = CMD_TEMPLATE.format(blend_scene=renderscenefile, render_engine=engine_string, python_options=python_options, output_options=' -o "%s" ' % images if images else '', frame_inc=finc) block.setCommand(cmd) block.setNumeric(fstart, fend, fpertask, finc) block.setSequential(sequential) block.setHostsMaskExclude(getHostsMaskExclude()) job.blocks.append(block) if cgru_props.make_movie: movie_block = af.Block(cgru_props.mov_name + '-movie', 'movgen') movie_block.setDependMask(job.blocks[-1]) movie_task = af.Task(cgru_props.mov_name) movie_block.tasks.append(movie_task) cmd = os.getenv('CGRU_LOCATION') cmd = os.path.join(cmd, 'utilities', 'moviemaker', 'makemovie.py') cmd = 'python "%s"' % cmd cmd += ' --codec "%s"' % cgru_props.mov_codecs cmd += ' -r "%sx%s"' % (cgru_props.mov_width, cgru_props.mov_height) cmd += ' "%s"' % images.replace('@#', '#').replace('#@', '#') cmd += ' "%s"' % cgru_props.mov_name movie_task.setCommand(cmd) job.blocks.append(movie_block) # Set job running parameters: if cgru_props.maxruntasks > -1: job.setMaxRunningTasks(cgru_props.maxruntasks) if cgru_props.priority > -1: job.setPriority(cgru_props.priority) if cgru_props.dependmask != '': job.setDependMask(cgru_props.dependmask) if cgru_props.dependmaskglobal != '': job.setDependMaskGlobal(cgru_props.dependmaskglobal) if cgru_props.hostsmask != '': job.setHostsMask(cgru_props.hostsmask) if cgru_props.hostsmaskexclude != '': job.setHostsMaskExclude(cgru_props.hostsmaskexclude) if cgru_props.pause: job.offLine() if cgru_props.previewPendingApproval: job.setPPApproval() # Make server to delete temporary file after job deletion: job.setCmdPost('deletefiles "%s"' % os.path.abspath(renderscenefile)) # Print job information: job.output(True) # Save Temporary file bpy.ops.wm.save_as_mainfile(filepath=renderscenefile, copy=True) # Clean up temp text blocks if cgru_props.splitRenderLayers and len(layers) > 1: for text in bpy.data.texts: if "layer_" in text: bpy.data.texts.remove(text) # Send job to server: result = job.send() if not result[0]: msg = ("An error occurred when submitting job to Afanasy." "Check console.") self.report({'ERROR'}, msg) else: msg = "Job id:%s successfully submit to Afanasy." self.report({'INFO'}, msg % result[1]['id']) # if opriginal scene is modified - we need to reload the scene file if sceneModified: bpy.ops.wm.open_mainfile(filepath=scenefile + ".blend") return {'FINISHED'}
# -*- coding: utf-8 -*- import af import os scene = os.path.join(os.getcwd(), 'scene.ntp') job = af.Job('Natron job.py') block = af.Block('w_jpg', 'natron') block.setCommand('natron -b -w w_jpg @#@-@#@ ' + scene) block.setNumeric(1, 20, 2) job.blocks.append(block) job.send()