def build_command(self): """ Build iconvert command options """ cmd = [conf.get("Iconvert", "bin")] supported_options = ["v", "threads", "d", "g", "tile", "scanline", "compression", "quality", "no_copy_image", "adjust_time", "caption", "keyword", "clear_keywords", "attrib", "orientation", "rotcw", "rotccw", "rot180", "inplace", "sRGB", "separate", "contig", "no_clobber"] options_with_value = ["threads", "d", "g", "tile", "compression", "quality", "caption", "keyword", "attrib", "orientation"] for opt in supported_options: if self.isArgSet(opt): if len(opt) == 1: iopt = "-%s" % opt else: iopt = "--%s" % opt cmd.append(iopt) if opt in options_with_value: value = self.getArg(opt) if isinstance(value, tuple): cmd.append(value[0]) cmd.append(value[1]) else: cmd.append(value) return cmd
def getOutputSeq(scene_file, format): """ A utility function for converting a scene file name into an output sequence name. """ basename = os.path.splitext(os.path.basename(scene_file))[0] return os.path.join(conf.get("bp.output_dir"), basename, "%s.#.%s" % (basename, format))
def getOutputSeq(scene_file, format): """ A utility function for converting a scene file name into an output sequence name. """ basename = os.path.splitext(os.path.basename(scene_file))[0] return os.path.join( conf.get("bp.output_dir"), basename, "%s.#.%s" % (basename, format))
def mkdir(path, check=True): """ Make the given directory. """ if check: if os.path.exists(path): return True command = conf.get("system.mkdir") command.append(path) system(path)
def _execute(self, frames): cmd = [conf.get("Blender", "bin")] cmd.append("-b") cmd.append(self.getInput("scene_file").path) cmd.append("-noaudio") cmd.append("-noglsl") cmd.append("-nojoystick") cmd.extend(("-t", os.environ.get("PLOW_THREADS", "1"))) for f in frames: cmd.extend(("-f", str(f))) self.system(cmd)
def _execute(self): layer = self.getLayer() cmd = [conf.get("Blender", "bin")] cmd.append("-b") cmd.append(layer.getInput("scene_file").path) cmd.append("--python") cmd.append(os.path.join(os.path.dirname(__file__), "setup", "blender_setup.py")) output_path = "%s/blender_outputs_%d.json" % (self.getTempDir(), os.getpid()) os.environ["PLOW_BLENDER_SETUP_PATH"] = output_path self.system(cmd) outputs = json.load(open(output_path, "r")) for output in outputs: layer.addOutput(output["pass"], output["path"], output)
def getLogDir(self): return os.path.join(conf.get("bp.log_dir", JOB_NAME=self.__name), self.__id)
def setName(self, name): self.__name = conf.get("bp.job_name_template", JOB_NAME=name)
def serialize(runner): """ Convert the job from the internal blueprint stucture to a plow JobSpec. """ job = runner.getJob() base_name = runner.getArg("name", job.getName()) job_name = job.getName() log_dir = job.getLogDir() spec = plow.JobSpec() spec.project = os.environ.get("PLOW_PROJECT", conf.get("bp.project")) spec.username = getpass.getuser() spec.uid = os.getuid() spec.paused = runner.getArg("pause") spec.name = job_name spec.logPath = log_dir spec.layers = [] spec.env = { "BLUEPRINT_SCRIPTS_PATH": conf.get("bp.scripts_dir"), "BLUEPRINT_ARCHIVE": job.getPath() } spec.env.update(runner.getArg("env")) for layer in job.getLayers(): if isinstance(layer, blueprint.Task): # These are added via their task containers continue elif isinstance(layer, blueprint.TaskContainer): task_cnt_spec = createLayerSpec(layer) task_cnt_spec.command = [ conf.get("bp.scripts_dir") + "/env_wrapper.sh", "taskrun", "-debug", "-task", "%{TASK}", os.path.join(job.getPath(), "blueprint.yaml") ] task_cnt_spec.tasks = [] spec.layers.append(task_cnt_spec) for task in layer.getTasks(): task_spec = plow.TaskSpec() task_spec.name = task.getName() task_spec.depends = [] task_spec.depends += setupTaskDepends(job, task) task_spec.depends += setupTaskDepends(job, layer) task_cnt_spec.tasks.append(task_spec) else: lspec = createLayerSpec(layer) lspec.depends = setupLayerDepends(job, layer) lspec.range = layer.getFrameRange() lspec.chunk = layer.getChunk() lspec.command = [ conf.get("bp.scripts_dir") + "/env_wrapper.sh", "taskrun", "-debug", "-layer", layer.getName(), os.path.join(job.getPath(), "blueprint.yaml"), "-frame", "%{FRAME}" ] spec.layers.append(lspec) logger.debug(str(spec)) return spec
def serialize(runner): """ Convert the job from the internal blueprint stucture to a plow JobSpec. """ job = runner.getJob() base_name = runner.getArg("name", job.getName()) job_name = job.getName() log_dir = job.getLogDir() spec = plow.JobSpec() spec.project = os.environ.get("PLOW_PROJECT", conf.get("bp.project")) spec.username = getpass.getuser() spec.uid = os.getuid() spec.paused = runner.getArg("pause") spec.name = job_name spec.logPath = log_dir spec.layers = [] spec.env = {"BLUEPRINT_SCRIPTS_PATH": conf.get("bp.scripts_dir"), "BLUEPRINT_ARCHIVE": job.getPath()} spec.env.update(runner.getArg("env")) for layer in job.getLayers(): if isinstance(layer, blueprint.Task): # These are added via their task containers continue elif isinstance(layer, blueprint.TaskContainer): task_cnt_spec = createLayerSpec(layer) task_cnt_spec.command = [ conf.get("bp.scripts_dir") + "/env_wrapper.sh", "taskrun", "-debug", "-task", "%{TASK}", os.path.join(job.getPath(), "blueprint.yaml"), ] task_cnt_spec.tasks = [] spec.layers.append(task_cnt_spec) for task in layer.getTasks(): task_spec = plow.TaskSpec() task_spec.name = task.getName() task_spec.depends = [] task_spec.depends += setupTaskDepends(job, task) task_spec.depends += setupTaskDepends(job, layer) task_cnt_spec.tasks.append(task_spec) else: lspec = createLayerSpec(layer) lspec.depends = setupLayerDepends(job, layer) lspec.range = layer.getFrameRange() lspec.chunk = layer.getChunk() lspec.command = [ conf.get("bp.scripts_dir") + "/env_wrapper.sh", "taskrun", "-debug", "-layer", layer.getName(), os.path.join(job.getPath(), "blueprint.yaml"), "-frame", "%{FRAME}", ] spec.layers.append(lspec) logger.debug(str(spec)) return spec
def serialize(runner): """ Convert the job from the internal blueprint stucture to a plow JobSpec. """ job = runner.getJob() base_name = runner.getArg("name", job.getName()) job_name = job.getName() log_dir = job.getLogDir() spec = plow.JobSpec() spec.project = os.environ.get("PLOW_PROJECT", conf.get("bp.project")) spec.username = getpass.getuser() spec.uid = os.getuid() spec.paused = runner.getArg("pause") spec.name = job_name spec.logPath = log_dir spec.layers = [] spec.env = { "BLUEPRINT_SCRIPTS_PATH": conf.get("bp.scripts_dir"), "BLUEPRINT_ARCHIVE": job.getPath() } spec.env.update(runner.getArg("env")) # Task layers get created to store blueprint tasks. task_layers = { } for layer in job.getLayers(): if isinstance(layer, (blueprint.Task,)): # Have to create a plow layer to store blueprint tasks. # This would be to org if not task_layers.has_key(layer.getArg("group")): task_layer = createLayerSpec(layer) task_layer.name = layer.getArg("group", "default") task_layer.tasks = [] else: task_layer = task_layers[layer.getGroup()] # Merge in the tags for the other layer. Probably not the best option. task_layer.tags.update(layer.getArg("tags", set())) # Use the highest values on any task. task_layer.minCores = max(task_layer.minCores, task.getArg("threads", 1)) task_layer.minRamMb = max(task_layer.minRamMb, task.getArg("ram")) task_layer.range = layer.getArg("frame_range", runner.getArg("frame_range", "1000")) task_layer.command = [ conf.get("bp.scripts_dir") + "/env_wrapper.sh", "taskrun", "-debug", "-task", "%{TASK}", os.path.join(job.getPath(), "blueprint.yaml"), "%{RANGE}", ] task = plow.TaskSpec() task.name = layer.getName() task.depends = setupTaskDepends(job, layer) task_layer.tasks.append(task) spec.layers.append(task_layer) else: lspec = createLayerSpec(layer) lspec.depends = setupLayerDepends(job, layer) lspec.range = layer.getArg("frame_range", runner.getArg("frame_range", "1000")) lspec.command = [ conf.get("bp.scripts_dir") + "/env_wrapper.sh", "taskrun", "-debug", "-layer", layer.getName(), os.path.join(job.getPath(), "blueprint.yaml"), "%{RANGE}", ] spec.layers.append(lspec) return spec
def serialize(runner): """ Convert the job from the internal blueprint stucture to a plow JobSpec. """ job = runner.getJob() base_name = runner.getArg("name", job.getName()) job_name = job.getName() log_dir = job.getLogDir() spec = plow.JobSpec() spec.project = os.environ.get("PLOW_PROJECT", conf.get("bp.project")) spec.username = getpass.getuser() spec.uid = os.getuid() spec.paused = runner.getArg("pause") spec.name = job_name spec.logPath = log_dir spec.layers = [] spec.env = { "BLUEPRINT_SCRIPTS_PATH": conf.get("bp.scripts_dir"), "BLUEPRINT_ARCHIVE": job.getPath() } spec.env.update(runner.getArg("env")) # Task layers get created to store blueprint tasks. task_layers = {} for layer in job.getLayers(): if isinstance(layer, (blueprint.Task, )): # Have to create a plow layer to store blueprint tasks. # This would be to org if not task_layers.has_key(layer.getArg("group")): task_layer = createLayerSpec(layer) task_layer.name = layer.getArg("group", "default") task_layer.tasks = [] else: task_layer = task_layers[layer.getGroup()] # Merge in the tags for the other layer. Probably not the best option. task_layer.tags.update(layer.getArg("tags", set())) # Use the highest values on any task. task_layer.minCores = max(task_layer.minCores, task.getArg("threads", 1)) task_layer.minRamMb = max(task_layer.minRamMb, task.getArg("ram")) task_layer.range = layer.getArg( "frame_range", runner.getArg("frame_range", "1000")) task_layer.command = [ conf.get("bp.scripts_dir") + "/env_wrapper.sh", "taskrun", "-debug", "-task", "%{TASK}", os.path.join(job.getPath(), "blueprint.yaml"), "%{RANGE}", ] task = plow.TaskSpec() task.name = layer.getName() task.depends = setupTaskDepends(job, layer) task_layer.tasks.append(task) spec.layers.append(task_layer) else: lspec = createLayerSpec(layer) lspec.depends = setupLayerDepends(job, layer) lspec.range = layer.getArg("frame_range", runner.getArg("frame_range", "1000")) lspec.command = [ conf.get("bp.scripts_dir") + "/env_wrapper.sh", "taskrun", "-debug", "-layer", layer.getName(), os.path.join(job.getPath(), "blueprint.yaml"), "%{RANGE}", ] spec.layers.append(lspec) return spec
def serialize(runner): """ Convert the job from the internal blueprint stucture to a plow JobSpec. """ job = runner.getJob() base_name = runner.getArg("job_name", job.getName()) job_name = conf.get("templates", "job_name", JOB_NAME=base_name) log_dir = conf.get("templates", "log_dir", JOB_NAME=base_name) spec = plow.JobSpecT() spec.project = os.environ.get("PLOW_PROJECT", conf.get("defaults", "project")) spec.username = getpass.getuser() spec.uid = os.getuid() spec.paused = runner.getArg("pasued") spec.name = job_name spec.logPath = log_dir spec.layers = [] # Task layers get created to store blueprint tasks. task_layers = { } for layer in job.getLayers(): if isinstance(layer, (blueprint.Task,)): # Have to create a plow layer to store blueprint tasks. # This would be to org if not task_layers.has_key(layer.getArg("group")): task_layer = createLayerSpec(layer) task_layer.name = layer.getArg("group") task_layer.tasks = [] else: task_layer = task_layers[layer.getGroup()] # Merge in the tags for the other layer. Probably not the best option. task_layer.tags.update(layer.getArg("tags", set())) # Use the highest values on any task. task_layer.minCores = max(task_layer.minCores, task.getArg("threads", 1)) task_layer.maxCores = max(task_layer.maxCores, task.getArg("max_threads", 0)) task_layer.minRamMb = max(task_layer.minRamMb, task.getArg("ram")) task_layer.command = [ "%s/plow_wrapper.sh" % os.path.dirname(__file__), "%s/bin/taskrun" % os.environ.get("PLOW_ROOT", "/usr/local"), "-debug", os.path.join(job.getPath(), "blueprint.yaml"), "-task", "%{TASK}" ] task = plow.TaskSpecT() task.name = layer.getName() task.depends = setupTaskDepends(job, layer) task_layer.tasks.append(task) spec.layers.append(task_layer) else: lspec = createLayerSpec(layer) lspec.depends = setupLayerDepends(job, layer) lspec.range = layer.getArg("frame_range", runner.getArg("frame_range", None)) lspec.command = [ "%s/plow_wrapper.sh" % os.path.dirname(__file__), "%s/bin/taskrun" % os.environ.get("PLOW_ROOT", "/usr/local"), "-debug", os.path.join(job.getPath(), "blueprint.yaml"), "-layer", layer.getName(), "-range", "%{RANGE}" ] spec.layers.append(lspec) return spec
def __init__(self, job): self.__job = job self.__path = os.path.join( conf.get("bp.archive_dir", JOB_NAME=job.getName()), job.getId()) self.__create()
def getLogDir(self): return os.path.join( conf.get("bp.log_dir", JOB_NAME=self.__name), self.__id)