Esempio n. 1
0
    def include_script(self, target_expr):

        if isinstance(target_expr, basestring):
            # Name may be relative to the local stdlib.
            if target_expr.startswith('http'):
                try:
                    _, script_str = httplib2.Http().request(target_expr)
                except Exception as e:
                    print >> sys.stderr, "Include HTTP failed:", repr(e)
                    raise
            else:
                try:
                    with open(
                            resource_filename(
                                Requirement.parse("ciel-skywriting"),
                                os.path.join("lib/ciel/skywriting/stdlib/",
                                             target_expr)), "r") as fp:
                        script_str = fp.read()
                except Exception as e:
                    print >> sys.stderr, "Include file failed:", repr(e)
                    raise
        elif isinstance(target_expr, SWRealReference):
            script_str = self.get_string_for_ref(target_expr)
        else:
            raise BlameUserException(
                'Invalid object %s passed as the argument of include')

        try:
            return CloudScriptParser().parse(script_str)
        except Exception as e:
            print >> sys.stderr, 'Error parsing included script:', repr(e)
            raise BlameUserException(
                'The included script did not parse successfully')
Esempio n. 2
0
    def build_task_descriptor(cls, task_descriptor, parent_task_record, jar_lib=None, args=None, class_name=None, object_ref=None, n_outputs=1, is_tail_spawn=False, **kwargs):
        # More good stuff goes here.
        if jar_lib is None and kwargs.get("process_record_id", None) is None:
            raise BlameUserException("All Java2 invocations must either specify jar libs or an existing process ID")
        if class_name is None and object_ref is None and kwargs.get("process_record_id", None) is None:
            raise BlameUserException("All Java2 invocations must specify either a class_name or an object_ref, or else give a process ID")
        
        if jar_lib is not None:
            task_descriptor["task_private"]["jar_lib"] = jar_lib
            for jar_ref in jar_lib:
                task_descriptor["dependencies"].append(jar_ref)

        if not is_tail_spawn:
            sha = hashlib.sha1()
            hash_update_with_structure(sha, [args, n_outputs])
            hash_update_with_structure(sha, class_name)
            hash_update_with_structure(sha, object_ref)
            hash_update_with_structure(sha, jar_lib)
            name_prefix = "java2:%s:" % (sha.hexdigest())
            task_descriptor["expected_outputs"] = ["%s%d" % (name_prefix, i) for i in range(n_outputs)]            
        
        if class_name is not None:
            task_descriptor["task_private"]["class_name"] = class_name
        if object_ref is not None:
            task_descriptor["task_private"]["object_ref"] = object_ref
            task_descriptor["dependencies"].append(object_ref)
        if args is not None:
            task_descriptor["task_private"]["args"] = args
        add_package_dep(parent_task_record.package_ref, task_descriptor)
        
        return ProcExecutor.build_task_descriptor(task_descriptor, parent_task_record, n_extra_outputs=0, is_tail_spawn=is_tail_spawn, accept_ref_list_for_single=True, **kwargs)
Esempio n. 3
0
    def check_args_valid(cls, args, n_outputs):

        ProcessRunningExecutor.check_args_valid(args, n_outputs)
        if n_outputs != 1:
            raise BlameUserException("Stdinout executor must have one output")
        if "command_line" not in args:
            raise BlameUserException(
                'Incorrect arguments to the stdinout executor: %s' %
                repr(args))
Esempio n. 4
0
 def check_args_valid(cls, args, n_outputs):
     if "inputs" in args:
         for ref in args["inputs"]:
             if not isinstance(ref, SWRealReference):
                 raise BlameUserException(
                     "Simple executors need args['inputs'] to be a list of references. %s is not a reference."
                     % ref)
Esempio n. 5
0
    def build_task_descriptor(cls,
                              task_descriptor,
                              parent_task_record,
                              process_record_id=None,
                              is_fixed=False,
                              command=None,
                              proc_pargs=[],
                              proc_kwargs={},
                              force_n_outputs=None,
                              n_extra_outputs=0,
                              extra_dependencies=[],
                              is_tail_spawn=False,
                              accept_ref_list_for_single=False):

        #if process_record_id is None and start_command is None:
        #    raise BlameUserException("ProcExecutor tasks must specify either process_record_id or start_command")

        if process_record_id is not None:
            task_descriptor["task_private"]["id"] = process_record_id
        if command is not None:
            task_descriptor["task_private"]["command"] = command
        task_descriptor["task_private"]["proc_pargs"] = proc_pargs
        task_descriptor["task_private"]["proc_kwargs"] = proc_kwargs
        task_descriptor["dependencies"].extend(extra_dependencies)

        task_private_id = ("%s:_private" % task_descriptor["task_id"])
        if is_fixed:
            task_private_ref = SW2_FixedReference(task_private_id,
                                                  get_own_netloc())
            write_fixed_ref_string(
                pickle.dumps(task_descriptor["task_private"]),
                task_private_ref)
        else:
            task_private_ref = ref_from_string(
                pickle.dumps(task_descriptor["task_private"]), task_private_id)
        parent_task_record.publish_ref(task_private_ref)

        task_descriptor["task_private"] = task_private_ref
        task_descriptor["dependencies"].append(task_private_ref)

        if force_n_outputs is not None:
            if "expected_outputs" in task_descriptor and len(
                    task_descriptor["expected_outputs"]) > 0:
                raise BlameUserException(
                    "Task already had outputs, but force_n_outputs is set")
            task_descriptor["expected_outputs"] = [
                "%s:out:%d" % (task_descriptor["task_id"], i)
                for i in range(force_n_outputs)
            ]

        if not is_tail_spawn:
            if len(task_descriptor["expected_outputs"]
                   ) == 1 and not accept_ref_list_for_single:
                return SW2_FutureReference(
                    task_descriptor["expected_outputs"][0])
            else:
                return [
                    SW2_FutureReference(refid)
                    for refid in task_descriptor["expected_outputs"]
                ]
Esempio n. 6
0
    def build_task_descriptor(cls,
                              task_descriptor,
                              parent_task_record,
                              sw_file_ref=None,
                              start_env=None,
                              start_args=None,
                              cont_ref=None,
                              n_extra_outputs=0,
                              is_tail_spawn=False,
                              **kwargs):

        if sw_file_ref is None and cont_ref is None:
            raise BlameUserException(
                "Skywriting tasks must specify either a continuation object or a .sw file"
            )
        if n_extra_outputs > 0:
            raise BlameUserException(
                "Skywriting can't deal with extra outputs")

        if not is_tail_spawn:
            task_descriptor["expected_outputs"] = [
                "%s:retval" % task_descriptor["task_id"]
            ]
            task_descriptor["task_private"]["ret_output"] = 0

        if cont_ref is not None:
            task_descriptor["task_private"]["cont"] = cont_ref
            task_descriptor["dependencies"].append(cont_ref)
        else:
            # External call: SW file should be started from the beginning.
            task_descriptor["task_private"]["swfile_ref"] = sw_file_ref
            task_descriptor["dependencies"].append(sw_file_ref)
            task_descriptor["task_private"]["start_env"] = start_env
            task_descriptor["task_private"]["start_args"] = start_args
        add_package_dep(parent_task_record.package_ref, task_descriptor)

        return ProcExecutor.build_task_descriptor(task_descriptor,
                                                  parent_task_record,
                                                  is_tail_spawn=is_tail_spawn,
                                                  is_fixed=False,
                                                  **kwargs)
Esempio n. 7
0
    def build_task_descriptor(cls,
                              task_descriptor,
                              parent_task_record,
                              args,
                              n_outputs,
                              is_tail_spawn=False,
                              handler_name=None):

        # This is needed to work around the fact that stdinout has its own implementation of build_task_descriptor, so
        # we can't rely using cls.handler_name to find the actual executor.
        if handler_name is None:
            handler_name = cls.handler_name

        if is_tail_spawn and len(
                task_descriptor["expected_outputs"]) != n_outputs:
            raise BlameUserException(
                "SimpleExecutor being built with delegated outputs %s but n_outputs=%d"
                % (task_descriptor["expected_outputs"], n_outputs))

        # Throw early if the args are bad
        cls.check_args_valid(args, n_outputs)

        # Discover required ref IDs for this executor
        reqd_refs = cls.get_required_refs(args)
        task_descriptor["dependencies"].extend(reqd_refs)

        sha = hashlib.sha1()
        hash_update_with_structure(sha, [args, n_outputs])
        name_prefix = "%s:%s:" % (handler_name, sha.hexdigest())

        # Name our outputs
        if not is_tail_spawn:
            task_descriptor["expected_outputs"] = [
                "%s%d" % (name_prefix, i) for i in range(n_outputs)
            ]

        # Add the args dict
        args_name = "%ssimple_exec_args" % name_prefix
        args_ref = ref_from_object(args, "pickle", args_name)
        parent_task_record.publish_ref(args_ref)
        task_descriptor["dependencies"].append(args_ref)
        task_descriptor["task_private"]["simple_exec_args"] = args_ref

        BaseExecutor.build_task_descriptor(task_descriptor, parent_task_record)

        if is_tail_spawn:
            return None
        else:
            return [
                SW2_FutureReference(x)
                for x in task_descriptor["expected_outputs"]
            ]
Esempio n. 8
0
    def build_task_descriptor(cls,
                              task_descriptor,
                              parent_task_record,
                              binary,
                              fn_ref=None,
                              args=None,
                              n_outputs=1,
                              is_tail_spawn=False,
                              **kwargs):
        if binary is None:
            raise BlameUserException(
                "All Haskell invocations must specify a binary")

        if not isabs(binary):
            binary = join(expanduser("~/.cabal/bin"), binary)

        task_descriptor["task_private"]["binary"] = binary
        if fn_ref is not None:
            task_descriptor["task_private"]["fn_ref"] = fn_ref
            task_descriptor["dependencies"].append(fn_ref)

        if not is_tail_spawn:
            sha = hashlib.sha1()
            hash_update_with_structure(sha, [args, n_outputs])
            hash_update_with_structure(sha, binary)
            hash_update_with_structure(sha, fn_ref)
            name_prefix = "hsk:%s:" % (sha.hexdigest())
            task_descriptor["expected_outputs"] = [
                "%s%d" % (name_prefix, i) for i in range(n_outputs)
            ]

        if args is not None:
            task_descriptor["task_private"]["args"] = args

        return ProcExecutor.build_task_descriptor(
            task_descriptor,
            parent_task_record,
            n_extra_outputs=0,
            is_tail_spawn=is_tail_spawn,
            accept_ref_list_for_single=True,
            **kwargs)
Esempio n. 9
0
    def check_args_valid(cls, args, n_outputs):

        FilenamesOnStdinExecutor.check_args_valid(args, n_outputs)
        if "lib" not in args or "class" not in args:
            raise BlameUserException('Incorrect arguments to the dotnet executor: %s' % repr(args))
Esempio n. 10
0
 def check_args_valid(cls, args, n_outputs):
     SimpleExecutor.check_args_valid(args, n_outputs)
     if "inputs" not in args or n_outputs != 1:
         raise BlameUserException(
             'Incorrect arguments to the sync executor: %s' % repr(args))
Esempio n. 11
0
 def check_args_valid(cls, args, n_outputs):
     if "class_name" not in args and "object_ref" not in args:
         raise BlameUserException("All Java2 invocations must specify either a class_name or an object_ref")
     if "jar_lib" not in args:
         raise BlameUserException("All Java2 invocations must specify a jar_lib")
Esempio n. 12
0
    def _guarded_run(self, task_private, task_descriptor, task_record):
        self.task_private = task_private
        self.task_record = task_record
        self.task_descriptor = task_descriptor
        self.expected_outputs = list(self.task_descriptor['expected_outputs'])

        self.error_details = None

        if "id" in task_private:
            id = task_private['id']
            self.process_record = self.process_pool.get_process_record(id)
        else:
            self.process_record = self.process_pool.get_soft_cache_process(
                self.__class__, task_descriptor["dependencies"])
            if self.process_record is None:
                self.process_record = self.process_pool.create_process_record(
                    None, "json")
                if "command" in task_private:
                    if isinstance(task_private["command"], SWRealReference):
                        # Command has been passed in as a reference.
                        command = [
                            retrieve_filename_for_ref(arg, self.task_record,
                                                      False)
                        ]
                        os.chmod(command_line[0], stat.S_IRWXU)
                    else:
                        command = [task_private["command"]]
                else:
                    command = self.get_command()
                command.extend([
                    "--write-fifo",
                    self.process_record.get_read_fifo_name(), "--read-fifo",
                    self.process_record.get_write_fifo_name()
                ])
                new_proc_env = os.environ.copy()
                new_proc_env.update(self.get_env())

                new_proc = subprocess.Popen(command,
                                            env=new_proc_env,
                                            close_fds=True)
                self.process_record.set_pid(new_proc.pid)

        # XXX: This will block until the attached process opens the pipes.
        reader = self.process_record.get_read_fifo()
        writer = self.process_record.get_write_fifo()
        self.reader = reader
        self.writer = writer

        #ciel.log('Got reader and writer FIFOs', 'PROC', logging.INFO)

        write_framed_json(("start_task", task_private), writer)

        try:
            if self.process_record.protocol == 'line':
                finished = self.line_event_loop(reader, writer)
            elif self.process_record.protocol == 'json':
                finished = self.json_event_loop(reader, writer)
            else:
                raise BlameUserException('Unsupported protocol: %s' %
                                         self.process_record.protocol)

        except MissingInputException, mie:
            self.process_pool.delete_process_record(self.process_record)
            raise
Esempio n. 13
0
 def check_args_valid(cls, args, n_outputs):
     
     SimpleExecutor.check_args_valid(args, n_outputs)
     if "urls" not in args or "version" not in args or len(args["urls"]) != n_outputs:
         raise BlameUserException('Incorrect arguments to the grab executor: %s' % repr(args))
Esempio n. 14
0
 def check_args_valid(cls, args, n_outputs):
     ProcessRunningExecutor.check_args_valid(args, n_outputs)
     if "command_line" not in args:
         raise BlameUserException(
             'Incorrect arguments to the env executor: %s' % repr(args))
Esempio n. 15
0
 def check_args_valid(cls, args, n_outputs):
     if "binary" not in args:
         raise BlameUserException(
             "All Haskell invocations must specify a binary")