def _optimize_inline_tasks(self): """ Group tasks by abstraction and function and then break them into sub-groups and schedule the sub-groups has sub DAGs. """ if CurrentScript().inline_tasks <= 1: return debug(D_NEST, 'Inlining tasks for {0}'.format(self)) # Group tasks into bins based on Function. task_dict = collections.defaultdict(list) for task in self.tasks: abstraction = task[0] function = task[1] task_dict[(abstraction, function)].append(task) # For each set of tasks, split the set into small sub-groups; for each # sub-group, create a new InlineNest and schedule the tasks there. self.tasks = [] for (abstraction, function), tasks in task_dict.items(): inline_tasks = max(CurrentScript().inline_tasks, abstraction.group) if inline_tasks < len(tasks): for group in groups(tasks, inline_tasks): with InlineNest() as inline_nest: for task in group: inline_nest.schedule(*task) inline_nest.compile() with abstraction.options: inline_nest() else: for task in tasks: self.tasks.append(task)
def _optimize_inline_tasks(self): """ Group tasks by abstraction and function and then break them into sub-groups and schedule the sub-groups has sub DAGs. """ if CurrentScript().inline_tasks <= 1: return debug(D_NEST, 'Inlining tasks for {0}'.format(self)) # Group tasks into bins based on Function. task_dict = collections.defaultdict(list) for task in self.tasks: abstraction = task[0] function = task[1] task_dict[(abstraction, function)].append(task) # For each set of tasks, split the set into small sub-groups; for each # sub-group, create a new InlineNest and schedule the tasks there. self.tasks = [] for (abstraction, function), tasks in list(task_dict.items()): inline_tasks = max(CurrentScript().inline_tasks, abstraction.group) if inline_tasks < len(tasks): for group in groups(tasks, inline_tasks): with InlineNest() as inline_nest: for task in group: inline_nest.schedule(*task) inline_nest.compile() with abstraction.options: inline_nest() else: for task in tasks: self.tasks.append(task)
def _generate(self): with self: debug(D_ABSTRACTION, 'Generating Abstraction {0}'.format(self)) function = parse_function(self.function) inputs = parse_input_list(self.inputs) includes = parse_input_list(self.includes) output = self.outputs nest = CurrentNest() if not os.path.isabs(output): output = os.path.join(nest.work_dir, output) while len(inputs) > self.group: next_inputs = [] for group in groups(inputs, self.group): output_file = next(nest.stash) next_inputs.append(output_file) with Options(local=self.options.local, collect=group if self.collect else None): yield function(group, output_file, None, includes) inputs = next_inputs with Options(local=self.options.local, collect=inputs if self.collect else None): yield function(inputs, output, None, includes)
def _generate(self): with self: debug(D_ABSTRACTION, 'Generating Abstraction {0}'.format(self)) mapper = parse_function(self.mapper, PythonMapper) inputs = parse_input_list(self.inputs) includes = parse_input_list(self.includes) output = self.outputs nest = CurrentNest() for map_input in groups(inputs, self.group): map_output = next(nest.stash) with Options(local=self.options.local, collect=map_input if self.collect else None): yield mapper(map_input, map_output, includes)