def _set_data(self, data): """ Portions of the job spec should be generated by the system here (each time new data is loaded) """ now = str(datetime.datetime.now()) self._job_data = data if not data.get('uuid', False): self._job_data['uuid'] = JobSpec.new_uuid() self._job_data['date'] = now self._job_data['version'] = rrt.get_version() self._job_data['user'] = getpass.getuser() self._job_data['logs'] = os.path.join(JOB_LOGS_UNC, getpass.getuser(), self._job_data['uuid'], self._job_data['title']+'.*.txt') for k in ['renderer', 'title', 'project', 'scene', 'start', 'end', 'step', 'output']: if not self._job_data.get(k, False): raise JobSpecError("%s cannot be blank." % k) try: self._job_data['net_share'] = get_share(self._job_data['project']) except Exception: raise JobSpecError("Can't find network share for project '%s'." % self._job_data['project']) try: self._job_data['net_drive'] = os.path.splitdrive(self._job_data['project'])[0] except Exception: raise JobSpecError("Can't find drive letter in project path: '%s'" % self._job_data['project'])
def initializePlugin(mobject): mplugin = OpenMayaMPx.MFnPlugin(mobject, "Ringling College", get_version(), "Any") try: mplugin.registerCommand( kPluginCmdName, cmdCreator ) except: sys.stderr.write( "Failed to register command: %s\n" % kPluginCmdName ) raise
def _set_data(self, data): """ Portions of the job spec should be generated by the system here (each time new data is loaded) """ now = str(datetime.datetime.now()) self._job_data = data self._job_data['date'] = now self._job_data['version'] = rrt.get_version() self._job_data['user'] = getpass.getuser() self._job_data['logs'] = os.path.join(JOB_LOGS_UNC, getpass.getuser(), '{job_id}', # we're going to let hpc-spool inject the job id into the path right before the job is submitted 'logs', self._job_data['title']+'.*.txt') self._job_data['stats'] = os.path.join(JOB_STATS_UNC, getpass.getuser(), '{job_id}', # we're going to let hpc-spool inject the job id into the path right before the job is submitted 'stats', self._job_data['title'].replace(' ', '')+'.*.xml') for k in ['renderer', 'title', 'project', 'scene', 'start', 'end', 'step', 'output']: if not self._job_data.get(k, False): raise JobSpecError("%s cannot be blank." % k) try: self._job_data['net_share'] = get_share(self._job_data['project'])+'hpc' except Exception: raise JobSpecError("Can't find network share for project '%s'." % self._job_data['project']) try: self._job_data['net_drive'] = os.path.splitdrive(self._job_data['project'])[0] except Exception: raise JobSpecError("Can't find drive letter in project path: '%s'" % self._job_data['project'])
def _set_data(self, data): """ Portions of the job spec should be generated by the system here (each time new data is loaded) """ now = str(datetime.datetime.now()) self._job_data = data self._job_data["date"] = now self._job_data["version"] = rrt.get_version() self._job_data["user"] = getpass.getuser() self._job_data["logs"] = os.path.join( "D:\\hpc\\", getpass.getuser(), "{job_id}", # we're going to let hpc-spool inject the job id into the path right before the job is submitted "logs", self._job_data["title"] + ".*.txt", ) self._job_data["stats"] = os.path.join( "D:\\hpc\\", getpass.getuser(), "{job_id}", # we're going to let hpc-spool inject the job id into the path right before the job is submitted "stats", self._job_data["title"].replace(" ", "") + ".*.xml", ) for k in ["renderer", "title", "project", "scene", "start", "end", "step", "output"]: if not self._job_data.get(k, False): raise JobSpecError("%s cannot be blank." % k) try: self._job_data["net_share"] = get_share(self._job_data["project"]) except Exception: raise JobSpecError("Can't find network share for project '%s'." % self._job_data["project"]) try: self._job_data["net_drive"] = os.path.splitdrive(self._job_data["project"])[0] except Exception: raise JobSpecError("Can't find drive letter in project path: '%s'" % self._job_data["project"])
def __init__(self): LOG.info("Starting " + rrt.get_version()) LOG.debug("Params: %r" % self._env) #LOG.debug("Params: %r" % env()) jobtype = self._env['RENDERER'] #jobtype = os.getenv('RENDERER', None) if jobtype not in self.__delegates__: raise MissingDelegateError self._delegate = self.__delegates__[jobtype] LOG.debug("Got delegate: %s" % self._delegate) __import__(self._delegate, globals(), locals())
import os import rrt LOG = rrt.get_log() LOG.info("Starting %s" % rrt.get_version()) from rrt.hpc import env ENV = env() from maya import cmds posix = lambda s: s.replace('\\', '/') proj = posix(ENV['PROJECT']) node_proj = posix(ENV['NODE_PROJECT']) map_pairs = [ (node_proj, proj), ] for name in os.listdir(ENV['PROJECT']): full = os.path.join(ENV['PROJECT'], name) if os.path.isdir(full): map_pairs.append(('//'+name, posix(full))) map_pairs.append((node_proj+'/'+name, posix(full))) map_pairs.append((node_proj+'//'+name, posix(full))) LOG.debug("Dirmaps:") for m in map_pairs: LOG.debug(m) cmds.dirmap(mapDirectory=m) cmds.dirmap(enable=True)