コード例 #1
0
def job_info(job: Job):
    """ Prints information about given job to stdout

    :param job(Job):
    """
    from sisyphus import tools
    print("Job id: %s" % job._sis_id())
    print("Arguments:")
    for k, v in job._sis_kwargs.items():
        print("  %s : %s" % (k, str(v)))

    print("Inputs:")
    for name, value in job.__dict__.items():
        if not name.startswith('_sis_'):
            paths = tools.extract_paths(value)
            for path in paths:
                if path.creator is not job:
                    if path.creator is None:
                        print("  %s : %s" % (name, path.path))
                    else:
                        print("  %s : %s %s" %
                              (name, path.creator._sis_id(), path.path))

    print("Outputs:")
    for name, value in job.__dict__.items():
        if not name.startswith('_sis_'):
            paths = tools.extract_paths(value)
            for path in paths:
                if path.creator is job:
                    print("  %s : %s" % (name, path.path))

    print("Job dir: %s" % os.path.abspath(job._sis_path()))
    print("Work dir: %s" % job._sis_path(gs.WORK_DIR))
コード例 #2
0
 def __init__(self, name, inputs):
     """
     :param str name:
     :param inputs:
     """
     self._required = extract_paths(inputs)
     self.required_full_list = sorted(list(self._required))
     self.name = name
コード例 #3
0
async def async_run(obj: Any):
    """
    Run and setup all jobs that are contained inside object and all jobs that are necessary.

    :param obj:
    :param quiet: Do not forward job output do stdout
    :return:
    """
    sis_graph.add_target(graph.OutputTarget(name='async_run', inputs=obj))
    all_paths = {p for p in extract_paths(obj) if not p.available()}
    with async_context():
        while all_paths:
            await asyncio.sleep(gs.WAIT_PERIOD_BETWEEN_CHECKS)
            all_paths = {p for p in all_paths if not p.available()}
コード例 #4
0
    def _sis_init(self, args, kwargs, parsed_args):

        for key, arg in parsed_args.items():
            if isinstance(arg, Job):
                logging.warning(
                    "A Job instance was used as argument \"%s\" in \"%s\", this might result in undesired behavior"
                    % (key, self.__class__))

        self._sis_aliases = None
        self._sis_alias_prefixes = set()
        self._sis_vis_name = None
        self._sis_output_dirs = set()
        self._sis_outputs = {}
        self._sis_keep_value = None
        self._sis_hold_job = False

        self._sis_blocks = set()
        self._sis_kwargs = parsed_args
        self._sis_task_rqmt_overwrite = {}

        self._sis_job_lock = Job.get_lock()
        self._sis_is_finished = False
        self._sis_setup_since_restart = False

        self._sis_environment = None
        if gs.CLEANUP_ENVIRONMENT:
            self._sis_environment = tools.EnvironmentModifier()
            self._sis_environment.keep(gs.DEFAULT_ENVIRONMENT_KEEP)
            self._sis_environment.set(gs.DEFAULT_ENVIRONMENT_SET)

        if gs.AUTO_SET_JOB_INIT_ATTRIBUTES:
            self.set_attrs(parsed_args)
        self._sis_inputs = set()
        self.__init__(*args, **kwargs)

        self._sis_inputs.update({
            p
            for p in tools.extract_paths(self.__dict__) if p.creator != self
        })

        for i in self._sis_inputs:
            i.add_user(self)

        self._sis_quiet = False
        self._sis_cleanable_cache = False
        self._sis_cleaned_or_not_cleanable = False
        self._sis_needed_for_which_targets = set()

        self._sis_stacktrace = []
コード例 #5
0
    def _sis_init(self, args, kwargs, parsed_args):

        self._sis_aliases = None
        self._sis_alias_prefixes = set()
        self._sis_vis_name = None
        self._sis_output_dirs = set()
        self._sis_outputs = {}
        self._sis_keep_value = None

        self._sis_blocks = set()
        self._sis_tracebacks = set()

        self._sis_kwargs = parsed_args
        self._sis_task_rqmt_overwrite = {}

        self._sis_work_dir = gs.WORK_DIR
        self._sis_job_lock = multiprocessing.Lock()
        self._sis_is_finished = False

        if gs.CLEANUP_ENVIRONMENT:
            from sisyphus.toolkit import EnvironmentModifier
            self._sis_environment = EnvironmentModifier()
            self._sis_environment.keep(gs.DEFAULT_ENVIRONMENT_KEEP)
            self._sis_environment.set(gs.DEFAULT_ENVIRONMENT_SET)

        if gs.AUTO_SET_JOB_INIT_ATTRIBUTES:
            self.set_attrs(parsed_args)
        self._sis_inputs = set()
        self.__init__(*args, **kwargs)

        self._sis_inputs.update({p for p in tools.extract_paths(self.__dict__) if p.creator != self})

        for i in self._sis_inputs:
            i.add_user(self)

        self._sis_quiet = False
        self._sis_cleanable_cache = False
        self._sis_needed_for_which_targets = set()
コード例 #6
0
    def __call__(cls, *args, **kwargs):
        """ Implemented to ensure that each job is created only once """
        try:
            if 'sis_tags' in kwargs:
                tags = kwargs['sis_tags']
                if tags is not None:
                    for tag in tags:
                        for char in tag:
                            assert char in '-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'
                del kwargs['sis_tags']
            else:
                tags = None
            parsed_args = get_args(cls.__init__, args, kwargs)
        except TypeError as e:
            logging.error(
                'Wrong input arguments or missing __init__ function?\n'
                'Class: %s\nArguments: %s %s' %
                (str(cls), str(args), str(kwargs)))
            raise

        # create key
        sis_hash = cls._sis_hash_static(parsed_args)
        module_name = cls.__module__
        recipe_prefix = gs.RECIPE_PREFIX + '.'
        assert module_name.startswith(recipe_prefix)
        sis_name = os.path.join(
            module_name[len(recipe_prefix):].replace('.', os.path.sep),
            cls.__name__)
        sis_id = "%s.%s" % (sis_name, sis_hash)

        # Update tags
        if tags is None:
            tags = set()
            for p in tools.extract_paths(parsed_args):
                tags.update(p.tags)

        # check cache
        if sis_id in created_jobs:
            job = created_jobs[sis_id]
        else:
            # create new object
            job = super(Job, cls).__new__(cls)
            assert isinstance(job, Job)
            job._sis_tags = tags

            # store _sis_id
            job._sis_id_cache = sis_id

            job._sis_init(*copy.deepcopy((args, kwargs, parsed_args)))
            created_jobs[sis_id] = job

        # Add block
        # skip in worker mode
        if block.active_blocks:
            for b in block.active_blocks:
                b.add_job(job)
                job._sis_add_block(b)

        # Update alias prefixes
        job._sis_alias_prefixes.add(gs.ALIAS_AND_OUTPUT_SUBDIR)

        # add stacktrace information, if set to None or -1 use full stack
        stack_depth = gs.JOB_ADD_STACKTRACE_WITH_DEPTH + 1 if gs.JOB_ADD_STACKTRACE_WITH_DEPTH >= 0 else None
        if stack_depth > 1 or None:
            # add +1 for the traceback command itself, and remove it later
            stacktrace = traceback.extract_stack(limit=stack_depth)[:-1]
            job._sis_stacktrace.append(stacktrace)

        return job
コード例 #7
0
ファイル: flow.py プロジェクト: christophmluscher/i6_core
 def add_hidden_input(self, input):
     """in case a Path has to be converted to a string that is then added to the network"""
     self.hidden_inputs.update(extract_paths(input))
コード例 #8
0
 def update_values(self, report_values):
     if report_values is not None:
         self._report_values = report_values
     self._required = extract_paths(self._report_values)
     self.required_full_list = sorted(list(self._required))
コード例 #9
0
    def __call__(cls, *args, **kwargs):
        """ Implemented to ensure that each job is created only once """
        try:
            if 'sis_tags' in kwargs:
                tags = kwargs['sis_tags']
                if tags is not None:
                    for tag in tags:
                        for char in tag:
                            assert char in '-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'
                del kwargs['sis_tags']
            else:
                tags = None
            parsed_args = get_args(cls.__init__, args, kwargs)
        except TypeError as e:
            logging.error(
                'Wrong input arguments or missing __init__ function?\n'
                'Class: %s\nArguments: %s %s' %
                (str(cls), str(args), str(kwargs)))
            raise

        # create key
        sis_hash = cls._sis_hash_static(parsed_args)
        module_name = cls.__module__
        recipe_prefix = gs.RECIPE_DIR + '.'
        assert module_name.startswith(recipe_prefix)
        sis_name = os.path.join(module_name[len(recipe_prefix):].replace('.', os.path.sep), cls.__name__)
        sis_id = "%s.%s" % (sis_name, sis_hash)

        # Update tags
        if tags is None:
            tags = set()
            for p in tools.extract_paths(parsed_args):
                tags.update(p.tags)

        # check cache
        if sis_id in created_jobs:
            job = created_jobs[sis_id]
        else:
            # create new object
            job = super(Job, cls).__new__(cls)
            assert isinstance(job, Job)
            job._sis_tags = tags

            # store _sis_id
            job._sis_id_cache = sis_id

            job._sis_init(*copy.deepcopy((args, kwargs, parsed_args)))
            created_jobs[sis_id] = job

        # Add block
        # skip in worker mode
        if block.active_blocks:
            for b in block.active_blocks:
                b.add_job(job)
                job._sis_add_block(b)
            if gs.LOG_TRACEBACKS:
                # 6:-1 to remove Sisyphus related traces
                job._sis_tracebacks.add(tuple(traceback.format_stack()[6:-1]))

        # Update alias prefixes
        job._sis_alias_prefixes.add(gs.ALIAS_AND_OUTPUT_SUBDIR)

        return job