def _task_expl_groups(self, expls):
        if not self._config.group_by_error_messages:
            return [((task, ), msg) for task, msg in six.iteritems(expls)]

        groups = collections.defaultdict(list)
        for task, msg in six.iteritems(expls):
            groups[msg].append(task)
        return [(tasks, msg) for msg, tasks in six.iteritems(groups)]
示例#2
0
 def _forward_attributes(self):
     # forward configured attributes to the task
     for reporter_attr, task_attr in six.iteritems(self.forward_reporter_attributes):
         setattr(self.task, task_attr, getattr(self.status_reporter, reporter_attr))
     try:
         yield self
     finally:
         # reset attributes again
         for reporter_attr, task_attr in six.iteritems(self.forward_reporter_attributes):
             setattr(self.task, task_attr, None)
    def _find_or_create_task(self, task):
        with self._session() as session:
            if task.record_id is not None:
                logger.debug("Finding task with record_id [%d]",
                             task.record_id)

                task_record = session.query(TaskRecord).get(task.record_id)

                if not task_record:
                    raise Exception(
                        "Task with record_id, but no matching Task Record!")

                yield (task_record, session)
            else:
                task_record = TaskRecord(task_id=task._task.id,
                                         name=task.task_family,
                                         host=task.host)
                for (k, v) in six.iteritems(task.parameters):
                    task_record.parameters[k] = TaskParameter(name=k, value=v)

                session.add(task_record)

                yield (task_record, session)

            if task.host:
                task_record.host = task.host

        task.record_id = task_record.id
示例#4
0
    def no_unpicklable_properties(self):
        """
        Remove unpicklable properties before dump task and resume them after.

        This method could be called in subtask's dump method, to ensure unpicklable
        properties won't break dump.

        This method is a context-manager which can be called as below:

        .. code-block: python

            class DummyTask(luigi):

                def _dump(self):
                    with self.no_unpicklable_properties():
                        pickle.dumps(self)

        """
        unpicklable_properties = tuple(
            finestrino.worker.TaskProcess.forward_reporter_attributes.values())
        reserved_properties = {}
        for property_name in unpicklable_properties:
            if hasattr(self, property_name):
                reserved_properties[property_name] = getattr(
                    self, property_name)
                setattr(self, property_name, 'placeholder_during_pickling')

        yield

        for property_name, value in six.iteritems(reserved_properties):
            setattr(self, property_name, value)
示例#5
0
def flatten(struct):
    """
    Creates a flat list of all the items in structured output (dicts, lists, items):

    .. code-block:: python
        >>> sorted(flatten(['foo', ['bar', 'troll']))
        ['bar', 'foo', 'troll']
    """
    if struct is None:
        return []

    flat = []

    if isinstance(struct, dict):
        for _, result in six.iteritems(struct):
            flat += flatten(result)

        return flat

    if isinstance(struct, six.string_types):
        return [struct]

    try:
        iterator = iter(struct)
    except TypeError:
        return [struct]

    for result in iterator:
        flat += flatten(result)

    return flat
示例#6
0
    def _generate_retry_policy(self, task_retry_policy_dict):
        retry_policy_dict = self._config._get_retry_policy()._asdict()
        # print("+++ Retry Policy Dict +++ %s" %retry_policy_dict)

        retry_policy_dict.update({k: v for k, v in six.iteritems(task_retry_policy_dict) if v is not None})
        # print("+++ Retry Policy Dict 2 +++ %s" %retry_policy_dict)
        
        return RetryPolicy(**retry_policy_dict)
示例#7
0
 def _used_resources(self):
     used_resources = collections.defaultdict(int)
     if self._resources is not None:
         for task in self._state.get_active_tasks_by_status(RUNNING):
             resources_running = getattr(task, "resources_running", task.resources)
             if resources_running:
                 for resource, amount in six.iteritems(resources_running):
                     used_resources[resource] += amount
     return used_resources
示例#8
0
    def _has_resources(self, needed_resources, used_resources):
        if needed_resources is None:
            return True

        available_resources = self._resources or {}
        for resource, amount in six.iteritems(needed_resources):
            if amount + used_resources[resource] > available_resources.get(resource, 1):
                return False
        
        return True
示例#9
0
    def _get_param_visibilities(self):
        param_visibilities = {}

        params = dict(self.get_params())

        for param_name, param_value in six.iteritems(self.param_kwargs):
            if params[param_name].visibility != ParameterVisibility.PRIVATE:
                param_visibilities[param_name] = params[
                    param_name].visibility.serialize()

        return param_visibilities
示例#10
0
    def run(self):
        artist_count = defaultdict(int)

        for t in self.input():
            with t.open('r') as in_file:
                for line in in_file:
                    _, artist, track = line.strip().split()
                    artist_count[artist] += 1

        with self.output().open('w') as out_file:
            for artist, count in six.iteritems(artist_count):
                out_file.write('{}\t{}\n'.format(artist, count))
示例#11
0
    def get_param_values(cls, params, args, kwargs):
        result = {}

        params_dict = dict(params)

        task_family = cls.get_task_family()

        # In case any exceptions are thrown
        exc_desc = '%s[args=%s, kwargs=%s]' % (task_family, args, kwargs)

        # Fill in the positional arguments
        positional_params = [(n, p) for n, p in params if p.positional]

        for i, arg in enumerate(args):
            if i >= len(positional_params):
                raise parameter.UnknownParameterException(
                    '%s: takes at most %d parameters (%d given)' %
                    (exc_desc, len(positional_params), len(args)))

            param_name, param_obj = positional_params[i]
            result[param_name] = param_obj.normalize(arg)

        # Then the keyword arguments
        for param_name, arg in six.iteritems(kwargs):
            if param_name in result:
                raise parameter.DuplicateParameterException(
                    '%s: parameter %s was already set a positional parameter' %
                    (exc_desc, param_name))
            if param_name not in params_dict:
                raise parameter.UnknownParameterException(
                    '%s: unknown parameter %s' % (exc_desc, param_name))
            result[param_name] = params_dict[param_name].normalize(arg)

        # Then use the defaults for anything not filled in
        for param_name, param_obj in params:
            if param_name not in result:
                if not param_obj.has_task_value(task_family, param_name):
                    raise parameter.MissingParameterException(
                        "%s: requires the '%s' parameter to be set" %
                        (exc_desc, param_name))
                result[param_name] = param_obj.task_value(
                    task_family, param_name)

        def list_to_tuple(x):
            if isinstance(x, list) or isinstance(x, set):
                return tuple(x)
            else:
                return x

        # Sort it by the correct order and make a list
        return [(param_name, list_to_tuple(result[param_name]))
                for param_name, param_obj in params]
    def find_all_by_parameters(self, task_name, session=None, **task_params):
        """
        Find tasks with the given task_name and the same parameters as the kwargs.
        """
        with self._session(session) as session:
            query = session.query(TaskRecord).join(TaskEvent).filter(
                TaskRecord.name == task_name)

            for (k, v) in six.iteritems(task_params):
                alias = sqlalchemy.orm.aliased(TaskParameter)
                query = query.join(alias).filter(alias.name == k,
                                                 alias.value == v)

            tasks = query.order_by(TaskEvent.ts)

            for task in tasks:
                # Sanity checks
                assert all(
                    k in task.parameters and v == str(task.parameters[k].value)
                    for (k, v) in six.iteritems(task_params))

                yield task
示例#13
0
 def _apply_regex(self, regex, input):
     import re
     re_match = re.match(regex, input)
     if re_match and any(re_match.groups()):
         kwargs = {}
         has_val = False
         for k, v in six.iteritems(re_match.groupdict(default="0")):
             val = int(v)
             if val > -1:
                 has_val = True
                 kwargs[k] = val
             if has_val:
                 return datetime.timedelta(**kwargs)
示例#14
0
 def _key(self, task_name, family, unbatched_args):
     if self._config.batch_mode == 'all':
         return task_name
     elif self._config.batch_mode == 'family':
         return family
     elif self._config.batch_mode == 'unbatched_params':
         param_str = six.u(', ').join(
             six.u('{}={}').format(*kv)
             for kv in six.iteritems(unbatched_args))
         return six.u('{}({})').format(family, param_str)
     else:
         raise ValueError(
             'Unknown batch mode for batch notifier: {}'.format(
                 self._config.batch_mode))
示例#15
0
 def send_email(self):
     try:
         for owner, failures in six.iteritems(self._fail_counts):
             self._send_email(
                 fail_counts=failures,
                 disable_counts=self._disabled_counts[owner],
                 scheduling_counts=self._scheduling_fail_counts[owner],
                 fail_expls=self._fail_expls[owner],
                 owner=owner,
             )
     finally:
         self._update_next_send()
         self._fail_counts.clear()
         self._disabled_counts.clear()
         self._scheduling_fail_counts.clear()
         self._fail_expls.clear()
示例#16
0
 def _email_body(self, fail_counts, disable_counts, scheduling_counts,
                 fail_expls):
     expls = {(name, fail_count, disable_counts[name],
               scheduling_counts[name]): self._expl_body(fail_expls[name])
              for name, fail_count in six.iteritems(fail_counts)}
     expl_groups = sorted(self._task_expl_groups(expls),
                          key=self._expls_key)
     body_lines = []
     for tasks, msg in expl_groups:
         body_lines.append(self._format_tasks(tasks))
         body_lines.append(msg)
     body = six.u('\n').join(filter(None, body_lines)).rstrip()
     if self._email_format == 'html':
         return six.u('<ul>\n{}\n</ul>').format(body)
     else:
         return body
示例#17
0
    def get_all_params(cls):
        """ 
        Compiles and returns all parameters for all :py:class:`Task`.

        :return: a generator of tuples 
        """
        for task_name, task_cls in six.iteritems(cls._get_reg()):
            #print('TaskName=%s && TaskClass=%s' % (task_name, task_cls))

            if task_cls == cls.AMBIGUOUS_CLASS:
                continue

            for param_name, param_obj in task_cls.get_params():
                #print('TaskCls:ParamName=%s , ParamObj=%s' % (param_name, param_obj))
                yield task_name, (
                    not task_cls.use_cmdline_section), param_name, param_obj
示例#18
0
def getpaths(struct):
    """
    Maps all Tasks in a structured data object to their .output().
    """
    if isinstance(struct, Task):
        return struct.output()
    elif isinstance(struct, dict):
        return struct.__class__(
            (k, getpaths(v)) for k, v in six.iteritems(struct))
    elif isinstance(struct, (list, tuple)):
        return struct.__class__(getpaths(r) for r in struct)
    else:
        # Remaining case: assume struct is iterable
        try:
            return [getpaths(r) for r in struct]
        except TypeError:
            raise Exception("Cannot map %s to Task/dict/list" % str(struct))
    def run(self):
        process_per_user = defaultdict(int)

        with self.input().open('r') as infile:
            for line in infile:
                username = line.split()[0]
                process_per_user[username] += 1

            toplist = sorted(
                six.iteritems(process_per_user), 
                key=lambda x: x[1],
                reverse=True,
            )

            with self.output().open('w') as outfile:
                for user, n_processes in toplist:
                    print(n_processes, user, file=outfile)
示例#20
0
    def announce_scheduling_failure(self, task_name, family, params, expl, owners, **kwargs):
        if not self._config.batch_emails:
            return 

        worker_id = kwargs['worker']
        batched_params, _ = self._state.get_batcher(worker_id, family)

        if batched_params:
            unbatched_params = {
                param: value
                for param, value in six.iteritems(params) \
                    if param not in batched_params
            }
        else:
            unbatched_params = params

        self._email_batcher.add_scheduling_fail(task_name, family, unbatched_params, expl, owners)
示例#21
0
 def trigger_event(self, event, *args, **kwargs):
     """Trigger that calls all of the specified events associated with this class.
     
     Arguments:
         event {[type]} -- [description]
     """
     for event_class, event_callbacks in six.iteritems(
             self._event_callbacks):
         if not isinstance(self, event_class):
             continue
         for callback in event_callbacks.get(event, []):
             try:
                 # callbacks are protected
                 callback(*args, **kwargs)
             except KeyboardInterrupt:
                 return
             except BaseException:
                 logger.exception("Error in event callback for %r", event)
示例#22
0
    def _purge_children(self):
        """
        Find dead children and put a response on the result queue.

        :return:
        """
        for task_id, p in six.iteritems(self._running_tasks):
            if not p.is_alive() and p.exitcode:
                error_msg = 'Task {} died unexpectedly with exit code {}'.format(task_id, p.exitcode)
                p.task.trigger_event(Event.PROCESS_FAILURE, p.task, error_msg)
            elif p.timeout_time is not None and time.time() > float(p.timeout_time) and p.is_alive():
                p.terminate()
                error_msg = 'Task {} timed out after {} seconds and was terminated.'.format(task_id, p.task.worker_timeout)
                p.task.trigger_event(Event.TIMEOUT, p.task, error_msg)
            else:
                continue

            logger.info(error_msg)
            self._task_result_queue.put((task_id, FAILED, error_msg, [], []))
示例#23
0
    def to_str_params(self, only_significant=False, only_public=False):
        """Convert all parameters to a str->str hash
        
        Keyword Arguments:
            only_significant {bool} -- [description] (default: {False})
            only_public {bool} -- [description] (default: {False})
        """
        params_str = {}
        params = dict(self.get_params())
        for param_name, param_value in six.iteritems(self.param_kwargs):
            if (((not only_significant) or params[param_name].significant)
                    and ((not only_public) or params[param_name].visibility
                         == ParameterVisibility.PUBLIC)
                    and params[param_name].visibility !=
                    ParameterVisibility.PRIVATE):
                params_str[param_name] = params[param_name].serialize(
                    param_value)

        return params_str
示例#24
0
 def _warn_on_wrong_param_types(self):
     params = dict(self.get_params())
     for param_name, param_value in six.iteritems(self.param_kwargs):
         params[param_name]._warn_on_wrong_param_type(
             param_name, param_value)
示例#25
0
    def add_task(self, task_id=None, status=PENDING, runnable=True,
                 deps=None, new_deps=None, expl=None, resources=None,
                 priority=0, family='', module=None, params=None, param_visibilities=None, accepts_messages=False,
                 assistant=False, tracking_url=None, worker=None, batchable=None,
                 batch_id=None, retry_policy_dict=None, owners=None, **kwargs):
        """
        * add task identified by task_id if it doesn't exist
        * if deps is not None, update dependency list
        * update status of task
        * add additional workers/stakeholders
        * update priority when needed
        """
        assert worker is not None
        worker_id = worker
        worker = self._update_worker(worker_id)

        resources = {} if resources is None else resources.copy()

        if retry_policy_dict is None:
            retry_policy_dict = {}

        retry_policy = self._generate_retry_policy(retry_policy_dict)
        #print("++++++ Retry Policy : %s" % retry_policy)

        # print("++++ Is Worker Enabled ? %s ++++" % worker.enabled)
        if worker.enabled:
            _default_task = self._make_task(
                task_id=task_id, status=PENDING, deps=deps, resources=resources,
                priority=priority, family=family, module=module, params=params, param_visibilities=param_visibilities,
            )
        else:
            _default_task = None

        # print("++++++ Default Task is %s +++++++++" % _default_task)
        task = self._state.get_task(task_id, setdefault=_default_task)

        # print("TASK STATUS IS %s" % task.status)
        if task is None or (task.status != RUNNING and not worker.enabled):
            return

        # for setting priority, we'll sometimes create tasks with unset family and params
        if not task.family:
            task.family = family
        if not getattr(task, 'module', None):
            task.module = module
        if not getattr(task, 'param_visibilities', None):
            task.param_visibilities = _get_default(param_visibilities, {})
        if not task.params:
            task.set_params(params)

        # print("&&&& Task Family %s &&&&" % task.family)
        # print("&&&& Task Module %s &&&&" % task.module)
        # print("&&&& Param Visibility is ... %s &&&&&" % task.param_visibilities)
        # print("&&&& Task Params is ... %s &&&&&" % task.params)

        if batch_id is not None:
            task.batch_id = batch_id
        # print("&&&& Task Batch ID is ... %s &&&&&" % task.batch_id)
        
        if status == RUNNING and not task.worker_running:
            task.worker_running = worker_id
            if batch_id:
                # copy resources_running of the first batch task
                batch_tasks = self._state.get_batch_running_tasks(batch_id)
                task.resources_running = batch_tasks[0].resources_running.copy()
            task.time_running = time.time()

        if accepts_messages is not None:
            task.accepts_messages = accepts_messages

        if tracking_url is not None or task.status != RUNNING:
            task.tracking_url = tracking_url
            if task.batch_id is not None:
                for batch_task in self._state.get_batch_running_tasks(task.batch_id):
                    batch_task.tracking_url = tracking_url
        # print("&&&& Tracking URL ... %s &&&&&" % task.tracking_url)

        if batchable is not None:
            task.batchable = batchable

        if task.remove is not None:
            task.remove = None  # unmark task for removal so it isn't removed after being added

        if expl is not None:
            task.expl = expl
            if task.batch_id is not None:
                for batch_task in self._state.get_batch_running_tasks(task.batch_id):
                    batch_task.expl = expl

        task_is_not_running = task.status not in (RUNNING, BATCH_RUNNING)
        task_started_a_run = status in (DONE, FAILED, RUNNING)
        running_on_this_worker = task.worker_running == worker_id
        if task_is_not_running or (task_started_a_run and running_on_this_worker) or new_deps:
            # don't allow re-scheduling of task while it is running, it must either fail or succeed on the worker actually running it
            if status != task.status or status == PENDING:
                # Update the DB only if there was a acctual change, to prevent noise.
                # We also check for status == PENDING b/c that's the default value
                # (so checking for status != task.status woule lie)
                self._update_task_history(task, status)

            #print("_____ Task is   _____ %s" % task)
            #print("_____ Status is _____ %s" % status)
            #print("_____ Config is _____ %s" % self._config)

            self._state.set_status(task, PENDING if status == SUSPENDED else status, self._config)

        if status == FAILED and self._config.batch_emails:
            batched_params, _ = self._state.get_batcher(worker_id, family)
            if batched_params:
                unbatched_params = {
                    param: value
                    for param, value in six.iteritems(task.params)
                    if param not in batched_params
                }
            else:
                unbatched_params = task.params
            try:
                expl_raw = json.loads(expl)
            except ValueError:
                expl_raw = expl

            self._email_batcher.add_failure(
                task.pretty_id, task.family, unbatched_params, expl_raw, owners)
            if task.status == DISABLED:
                self._email_batcher.add_disable(
                    task.pretty_id, task.family, unbatched_params, owners)

        if deps is not None:
            task.deps = set(deps)

        if new_deps is not None:
            task.deps.update(new_deps)

        if resources is not None:
            task.resources = resources

        if worker.enabled and not assistant:
            task.stakeholders.add(worker_id)

            # Task dependencies might not exist yet. Let's create dummy tasks for them for now.
            # Otherwise the task dependencies might end up being pruned if scheduling takes a long time
            for dep in task.deps or []:
                t = self._state.get_task(dep, setdefault=self._make_task(task_id=dep, status=UNKNOWN, deps=None, priority=priority))
                t.stakeholders.add(worker_id)

        self._update_priority(task, priority, worker_id)

        # Because some tasks (non-dynamic dependencies) are `_make_task`ed
        # before we know their retry_policy, we always set it here
        task.retry_policy = retry_policy

        if runnable and status != FAILED and worker.enabled:
            task.workers.add(worker_id)
            self._state.get_worker(worker_id).tasks.add(task)
            task.runnable = runnable
示例#26
0
    def get_work(self, host=None, assistant=False, current_tasks=None, worker=None, **kwargs):
        if self._config.prune_on_get_work:
            self.prune()

        assert worker is not None

        worker_id = worker
        worker = self._update_worker(worker_id, 
            worker_reference={'host': host}, 
            get_work=True)

        if not worker.enabled:
            reply = {'n_pending_tasks': 0, 'running_tasks': [], 'task_id': None,
                'n_unique_pending': 0, 'worker_state': worker.state,
            }

            return reply

        # print("The Old Man and the Gun")

        if assistant:
            self.add_worker(worker_id, [('assistant', assistant)])

        batched_params, unbatched_params, batched_tasks, max_batch_size = None, None, [], 1
        best_task = None
        # print("Current Tasks are .... %s" % current_tasks)

        if current_tasks is not None:
            ct_set = set(current_tasks)

            for task in sorted(self._state.get_active_tasks_by_status(RUNNING), key=self._rank):
                if task.worker_running == worker_id and task.id not in ct_set:
                    best_task = task 

        if current_tasks is not None:
            # batch running tasks that were not claimed sine the last get_work .. go back in the pool
            self._reset_orphaned_batch_running_tasks(worker_id)

        greedy_resources = collections.defaultdict(int)

        worker = self._state.get_worker(worker_id)
        # print("Worker is ... %s" % worker)
        # print("Is Trivial Worker ???? .... %s" % worker.is_trivial_worker(self._state))

        if self._paused:
            relevant_tasks = []
        elif worker.is_trivial_worker(self._state):
            relevant_tasks = worker.get_tasks(self._state, PENDING, RUNNING)
            # print("Relevant Tasks are ... %s" % relevant_tasks)

            used_resources = collections.defaultdict(int)
            greedy_workers = dict() # if there is no resources, then they can grab any task
        else:
            relevant_tasks = self._state.get_active_tasks_by_status(PENDING, RUNNING)
            used_resources = self._used_resources()
            activity_limit = time.time() - self._config.worker_disconnect_delay
            active_workers = self._state.get_active_workers(last_get_work_gt=activity_limit)
            greedy_workers = dict((worker_id, worker.info.get('workers', 1)) for worker in active_workers)

        tasks = list(relevant_tasks)
        #print("Tasks List is ... %s" %tasks)
        tasks.sort(key=self._rank, reverse=True)
        #print("Sorted Tasks are ... %s" %tasks)

        for task in tasks:
            #print("******** Pending Tasks in Scheduler Queue are ... %s (%s) , resources &&&&&&&&&&&&&&&&&&&&" % (task.id, task.status))

            if (best_task and batched_params and task.family == best_task.family and 
                len(batched_tasks) < max_batch_size and task.is_batchable() and all(
                    task.params.get(name) == value for name, value in unbatched_params.items()
                ) and task.resources == best_task.resources and self._schedulable(task)):
                
                for name, params in batched_params.items():
                    params.append(task.params.get(name))
                
                batched_tasks.append(task)

            if best_task:
                continue

            if task.status == RUNNING and (task.worker_running in greedy_workers):
                greedy_workers[task.worker_running] -= 1

                for resource, amount in six.iteritems((getattr(task, 'resources_running', task.resources) or {})):
                    greedy_resources[resource] += amount

            # print("Self._Schedulable(task) is .... %s" % self._schedulable(task))
            # print("Task Resources are ... %s (greedy %s)" % (task.resources, greedy_resources))
            # print("self._has_resources ???? %s" % self._has_resources(task.resources, greedy_resources))

            if self._schedulable(task) and self._has_resources(task.resources, greedy_resources):
                in_workers = (assistant and task.runnable) or worker_id in task.workers
                # print("In Workers ... %d" % in_workers)
                # print( "Has Resources %d" %self._has_resources(task.resources, used_resources))
                #return
                  
                if in_workers and self._has_resources(task.resources, used_resources):
                    best_task = task
                    batch_param_names, max_batch_size = self._state.get_batcher(worker_id, task.family)

                    if batch_param_names and task.is_batchable():
                        # print("Inside IoTs")
                        #return 

                        try:
                            batched_params = {
                                name: [task.params[name]] for name in batch_param_names
                            }
                            unbatched_params = {
                                name: value for name, value in task.params.items()
                                if name not in batched_params 
                            }
                            batched_tasks.append(task)
                        except KeyError:
                            batched_params, unbatched_params = None, None
                else:
                    # print("Run Forest Run ...")
                    #return 

                    workers = itertools.chain(task.workers, [worker_id]) if assistant else task.workers

                    for task_worker in workers:
                        if greedy_workers.get(task_worker, 0) > 0:
                            # use up a worker 
                            greedy_workers[task_worker] -= 1

                            # keep track of the resources used in greedy scheduling
                            for resource, amount in six.iteritems(task.resources or {}):
                                greedy_resources[resource] += amount

                            break

        reply = self.count_pending(worker_id)
        # print("Reply = %s" % reply)
        
        # print("Batched Tasks Length === %d" %(len(batched_tasks)))
        # print("Best Task is === %s" %best_task)

        if len(batched_tasks) > 1:
            batch_string = '|'.join(task.id for task in batched_tasks)
            batch_id = hashlib.md5(batch_string.encode('utf-8')).hexdigest()

            for task in batched_tasks:
                self._state.set_batch_running(task, batch_id, worker_id)

            combined_params = best_task.params.copy()
            combined_params.update(batched_params)

            reply['task_id'] = None
            reply['task_family'] = best_task.family
            reply['task_module'] = getattr(best_task, 'module', None)
            reply['task_params'] = combined_params
            reply['batch_id'] = batch_id
            reply['batch_task_ids'] = [task.id for task in batched_tasks]
        
        elif best_task:
            # print("Inside Best Task !!!")
            # print("Best Task is ... %s" %best_task)
            #return 

            self.update_metrics_task_started(best_task)
            self._state.set_status(best_task, RUNNING, self._config)
            best_task.worker_running = worker_id
            best_task.resources_running = best_task.resources.copy()
            best_task.time_running = time.time()
            self._update_task_history(best_task, RUNNING, host=host)

            reply['task_id'] = best_task.id
            reply['task_family'] = best_task.family
            reply['task_module'] = getattr(best_task, 'module', None)
            reply['task_params'] = best_task.params

        else:
            reply['task_id'] = None

        # print("7777777 Reply is %s" % reply)
        
        return reply