def create_tasks(self): """Creates instances of all configured tasks""" from flexget.task import Task # Clear tasks dict self.tasks = {} # Backwards compatibility with feeds key if 'feeds' in self.config: log.warning('`feeds` key has been deprecated and replaced by `tasks`. Please update your config.') if 'tasks' in self.config: log.error('You have defined both `feeds` and `tasks`. Stop that.') self.config['tasks'] = self.config.pop('feeds') # construct task list tasks = self.config.get('tasks', {}).keys() for name in tasks: # Make sure numeric task names are turned into strings. #1763, #1961 if not isinstance(name, basestring): self.config['tasks'][unicode(name)] = self.config['tasks'].pop(name) name = unicode(name) # create task task = Task(self, name, self.config['tasks'][name]) # if task name is prefixed with _ it's disabled if name.startswith('_'): task.enabled = False self.tasks[name] = task
def on_task_input(self, task, config): target_task_name = config subtask_name = '{}>{}'.format(task.name, target_task_name) subtask_config = task.manager.config['tasks'].get(target_task_name, {}) # TODO: This seen disabling is sorta hacky, is there a better way? subtask_config.setdefault('seen', False) input_task = Task( task.manager, subtask_name, config=subtask_config, # TODO: Do we want to pass other options through? # TODO: Manual plugin semantics and allow_manual are confusing. Make it less confusing somehow? options={ 'allow_manual': True, 'tasks': [subtask_name] }, output=task.output, session_id=task.session_id, priority=task.priority, ) logger.verbose('Running task `{}` as subtask.', target_task_name) input_task.execute() logger.verbose('Finished running subtask `{}`.', target_task_name) # Create fresh entries to reset state and strip association to old task return [Entry(e) for e in input_task.accepted]
def handle_phase(task, config): if task.name not in self.task_phases: log.debug('No config dict was generated for this task.') return entry_actions = { 'accept': Entry.accept, 'reject': Entry.reject, 'fail': Entry.fail} for item in self.task_phases[task.name][phase]: requirement, action = item.items()[0] passed_entries = [e for e in task.entries if self.check_condition(requirement, e)] if passed_entries: if isinstance(action, basestring): # Simple entry action (accept, reject or fail) was specified as a string for entry in passed_entries: entry_actions[action](entry, 'Matched requirement: %s' % requirement) else: # Other plugins were specified to run on this entry fake_task = Task(task.manager, task.name, task.config) fake_task.session = task.session # This entry still belongs to our feed, accept/reject etc. will carry through. fake_task.all_entries[:] = passed_entries try: for plugin_name, plugin_config in action.iteritems(): plugin = get_plugin_by_name(plugin_name) method = plugin.phase_handlers[phase] method(fake_task, plugin_config) except Exception: raise
def handle_phase(task, config): entry_actions = { 'accept': Entry.accept, 'reject': Entry.reject, 'fail': Entry.fail} for item in config: requirement, action = list(item.items())[0] passed_entries = (e for e in task.entries if self.check_condition(requirement, e)) if isinstance(action, str): if not phase == 'filter': continue # Simple entry action (accept, reject or fail) was specified as a string for entry in passed_entries: entry_actions[action](entry, 'Matched requirement: %s' % requirement) else: # Other plugins were specified to run on this entry fake_task = Task(task.manager, task.name, config=action, options=task.options) fake_task.session = task.session # This entry still belongs to our feed, accept/reject etc. will carry through. fake_task.all_entries[:] = passed_entries methods = {} for plugin_name, plugin_config in action.items(): p = plugin.get_plugin_by_name(plugin_name) method = p.phase_handlers.get(phase) if method: methods[method] = (fake_task, plugin_config) # Run the methods in priority order for method in sorted(methods, reverse=True): method(*methods[method])
def update_tasks(self): """Updates instances of all configured tasks from config""" from flexget.task import Task if not isinstance(self.config['tasks'], dict): log.critical('Tasks is in wrong datatype, please read configuration guides') return # construct task list for name in self.config.get('tasks', {}): if not isinstance(self.config['tasks'][name], dict): continue if name in self.tasks: # This task already has an instance, update it self.tasks[name].config = deepcopy(self.config['tasks'][name]) if not name.startswith('_'): self.tasks[name].enabled = True else: # Create task task = Task(self, name, deepcopy(self.config['tasks'][name])) # If task name is prefixed with _ it's disabled if name.startswith('_'): task.enabled = False self.tasks[name] = task # Delete any task instances that are no longer in the config for name in [n for n in self.tasks if n not in self.config['tasks']]: del self.tasks[name]
def create_tasks(self): """Creates instances of all configured tasks""" from flexget.task import Task # Clear tasks dict self.tasks = {} # Backwards compatibility with feeds key if 'feeds' in self.config: log.warning( '`feeds` key has been deprecated and replaced by `tasks`. Please update your config.' ) if 'tasks' in self.config: log.error( 'You have defined both `feeds` and `tasks`. Stop that.') self.config['tasks'] = self.config.pop('feeds') # construct task list tasks = self.config.get('tasks', {}).keys() for name in tasks: # Make sure numeric task names are turned into strings. #1763 name = unicode(name) # create task task = Task(self, name, self.config['tasks'][name]) # if task name is prefixed with _ it's disabled if name.startswith('_'): task.enabled = False self.tasks[name] = task
def handle_phase(task, config): entry_actions = {'accept': Entry.accept, 'reject': Entry.reject, 'fail': Entry.fail} for item in config: requirement, action = list(item.items())[0] passed_entries = (e for e in task.entries if self.check_condition(requirement, e)) if isinstance(action, str): if not phase == 'filter': continue # Simple entry action (accept, reject or fail) was specified as a string for entry in passed_entries: entry_actions[action](entry, 'Matched requirement: %s' % requirement) else: # Other plugins were specified to run on this entry fake_task = Task(task.manager, task.name, config=action, options=task.options) fake_task.session = task.session # This entry still belongs to our feed, accept/reject etc. will carry through. fake_task.all_entries[:] = passed_entries methods = {} for plugin_name, plugin_config in action.items(): p = plugin.get_plugin_by_name(plugin_name) method = p.phase_handlers.get(phase) if method: methods[method] = (fake_task, plugin_config) # Run the methods in priority order for method in sorted(methods, reverse=True): method(*methods[method])
def update_tasks(self): """Updates instances of all configured tasks from config""" from flexget.task import Task if not isinstance(self.config['tasks'], dict): log.critical( 'Tasks is in wrong datatype, please read configuration guides') return # construct task list for name in self.config.get('tasks', {}): if not isinstance(self.config['tasks'][name], dict): continue if name in self.tasks: # This task already has an instance, update it self.tasks[name].config = deepcopy(self.config['tasks'][name]) if not name.startswith('_'): self.tasks[name].enabled = True else: # Create task task = Task(self, name, deepcopy(self.config['tasks'][name])) # If task name is prefixed with _ it's disabled if name.startswith('_'): task.enabled = False self.tasks[name] = task # Delete any task instances that are no longer in the config for name in [n for n in self.tasks if n not in self.config['tasks']]: del self.tasks[name]
def execute_task(self, name, abort_ok=False, options=None): """Use to execute one test task from config""" log.info('********** Running task: %s ********** ' % name) config = self.manager.config['tasks'][name] if hasattr(self, 'task'): if hasattr(self, 'session'): self.task.session.close() # pylint: disable-msg=E0203 self.task = Task(self.manager, name, config=config, options=options) try: self.task.execute() except TaskAbort: if not abort_ok: raise
def execute(self, options=None, output=None, loglevel=None, priority=1, suppress_warnings=None): """ Run all (can be limited with options) tasks from the config. :param options: Either an :class:`argparse.Namespace` instance, or a dict, containing options for execution :param output: If a file-like object is specified here, log messages and stdout from the execution will be written to it. :param priority: If there are other executions waiting to be run, they will be run in priority order, lowest first. :param suppress_warnings: Allows suppressing log warning about missing plugin in key phases :returns: a list of :class:`threading.Event` instances which will be set when each respective task has finished running """ if options is None: options = copy.copy(self.options.execute) elif isinstance(options, dict): options_namespace = copy.copy(self.options.execute) options_namespace.__dict__.update(options) options = options_namespace task_names = self.tasks # Only reload config if daemon config_hash = self.hash_config() if self.is_daemon and self.autoreload_config and self.config_file_hash != config_hash: log.info('Config change detected. Reloading.') try: self.load_config(output_to_console=False, config_file_hash=config_hash) log.info('Config successfully reloaded!') except Exception as e: log.error('Reloading config failed: %s', e) # Handle --tasks if options.tasks: # Consider * the same as not specifying tasks at all (makes sure manual plugin still works) if options.tasks == ['*']: options.tasks = None else: # Create list of tasks to run, preserving order task_names = [] for arg in options.tasks: matches = [t for t in self.tasks if fnmatch.fnmatchcase(str(t).lower(), arg.lower())] if not matches: msg = '`%s` does not match any tasks' % arg log.error(msg) if output: output.write(msg) continue task_names.extend(m for m in matches if m not in task_names) # Set the option as a list of matching task names so plugins can use it easily options.tasks = task_names # TODO: 1.2 This is a hack to make task priorities work still, not sure if it's the best one task_names = sorted(task_names, key=lambda t: self.config['tasks'][t].get('priority', 65535)) finished_events = [] for task_name in task_names: task = Task( self, task_name, options=options, output=output, loglevel=loglevel, priority=priority, suppress_warnings=suppress_warnings ) self.task_queue.put(task) finished_events.append((task.id, task.name, task.finished_event)) return finished_events
def search(self, entry, config=None): from flexget.utils.template import environment from flexget.manager import manager search_strings = [ urllib.quote(normalize_unicode(s).encode('utf-8')) for s in entry.get('search_strings', [entry['title']]) ] rss_plugin = plugin.get_plugin_by_name('rss') entries = set() rss_config = rss_plugin.instance.build_config(config) template = environment.from_string(rss_config['url']) rss_config['all_entries'] = True for search_string in search_strings: # Create a fake task to pass to the rss plugin input handler task = Task(manager, 'search_rss_task', config={}) rss_config['url'] = template.render({'search_term': search_string}) # TODO: capture some other_fields to try to find seed/peer/content_size numbers? try: results = rss_plugin.phase_handlers['input'](task, rss_config) except plugin.PluginError as e: log.error('Error attempting to get rss for %s: %s', rss_config['url'], e) else: entries.update(results) return entries
def execute(self, options=None, output=None, loglevel=None, priority=1): """ Run all (can be limited with options) tasks from the config. :param options: Either an :class:`argparse.Namespace` instance, or a dict, containing options for execution :param output: If a file-like object is specified here, log messages and stdout from the execution will be written to it. :param priority: If there are other executions waiting to be run, they will be run in priority order, lowest first. :returns: a list of :class:`threading.Event` instances which will be set when each respective task has finished running """ if options is None: options = copy.copy(self.options.execute) elif isinstance(options, dict): options_namespace = copy.copy(self.options.execute) options_namespace.__dict__.update(options) options = options_namespace task_names = self.tasks # Handle --tasks if options.tasks: # Consider * the same as not specifying tasks at all (makes sure manual plugin still works) if options.tasks == ['*']: options.tasks = None else: # Create list of tasks to run, preserving order task_names = [] for arg in options.tasks: matches = [ t for t in self.tasks if fnmatch.fnmatchcase( unicode(t).lower(), arg.lower()) ] if not matches: msg = '`%s` does not match any tasks' % arg log.error(msg) if output: output.write(msg) continue task_names.extend(m for m in matches if m not in task_names) # Set the option as a list of matching task names so plugins can use it easily options.tasks = task_names # TODO: 1.2 This is a hack to make task priorities work still, not sure if it's the best one task_names = sorted( task_names, key=lambda t: self.config['tasks'][t].get('priority', 65535)) finished_events = [] for task_name in task_names: task = Task(self, task_name, options=options, output=output, loglevel=loglevel, priority=priority) self.task_queue.put(task) finished_events.append((task.id, task.name, task.finished_event)) return finished_events
def execute_task(self, name, abort_ok=False): """Use to execute one test task from config""" log.info('********** Running task: %s ********** ' % name) config = self.manager.config['tasks'][name] if hasattr(self, 'task'): if hasattr(self, 'session'): self.task.session.close() # pylint: disable-msg=E0203 self.task = Task(self.manager, name, config) self.manager.execute(tasks=[self.task]) if not abort_ok: assert not self.task.aborted, 'Task should not have aborted.'
def test_send_message_for_Event(self, get_topic): topic = get_topic.return_value manager = Mock() manager.config = {'tasks': {}} task = Mock(wraps=Task(manager, 'fake')) task.options.test = False event = Mock() task.accepted = [event] e = sns.SNSNotificationEmitter({'aws_region': 'test', 'sns_topic_arn': 'arn'}) e.send_notifications(task) topic.publish.assert_called_once_with(Message=event.render.return_value)
def execute(task_name: str, abort: bool = False, options: Union[dict, argparse.Namespace] = None) -> Task: """ Use to execute one test task from config. :param task_name: Name of task to execute. :param abort: If `True` expect (and require) this task to abort. :param options: Options for the execution. """ logger.info('********** Running task: {} ********** ', task_name) config = manager.config['tasks'][task_name] task = Task(manager, task_name, config=config, options=options) try: if abort: with pytest.raises(TaskAbort): task.execute() else: task.execute() finally: try: task.session.close() except Exception: pass return task
def execute(task_name, abort=False, options=None): """ Use to execute one test task from config. :param abort: If `True` expect (and require) this task to abort. """ log.info('********** Running task: %s ********** ' % task_name) config = manager.config['tasks'][task_name] task = Task(manager, task_name, config=config, options=options) try: if abort: with pytest.raises(TaskAbort): task.execute() else: task.execute() finally: try: task.session.close() except Exception: pass return task
def edit_text(root, name): config_type = root.rstrip('s') context = {'name': name, 'root': root, 'config_type': config_type} if request.method == 'POST': context['config'] = request.form['config'] try: config = yaml.load(request.form['config']) except yaml.scanner.ScannerError as e: flash('Invalid YAML document: %s' % e, 'error') log.exception(e) else: # valid yaml, now run validator errors = Task.validate_config(config) if errors: for error in errors: flash(error, 'error') context['config'] = request.form['config'] else: manager.config[root][name] = config manager.save_config() context['config'] = yaml.dump(config, default_flow_style=False) if request.form.get('name') != name: # Renaming new_name = request.form.get('name') if new_name in manager.config[root]: flash( '%s with name %s already exists' % (config_type.capitalize(), new_name), 'error') else: # Do the rename manager.config[root][new_name] = manager.config[root][ name] del manager.config[root][name] manager.save_config() flash( '%s %s renamed to %s.' % (config_type.capitalize(), name, new_name), 'success') return redirect( url_for('.edit_text', root=root, name=new_name)) else: flash('Configuration saved', 'success') else: config = manager.config[root][name] if config: context['config'] = yaml.dump(config, default_flow_style=False) else: context['config'] = '' context['related'] = get_related(root, name) return render_template('configure/edit_text.html', **context)
def test_dry_run_does_not_send_message(self, get_topic): topic = get_topic.return_value manager = Mock() manager.config = {'tasks': {}} task = Mock(wraps=Task(manager, 'fake')) task.options.test = True event = Mock() task.accepted = [event] e = sns.SNSNotificationEmitter({'aws_region': 'test', 'sns_topic_arn': 'arn'}) e.send_notifications(task) event.render.assert_called_once_with(sns.DEFAULT_TEMPLATE_VALUE) assert not topic.publish.called
def search(self, entry, config=None): from flexget.utils.template import environment from flexget.manager import manager query = entry['title'] search_string = urllib.quote(normalize_unicode(query).encode('utf-8')) rss_plugin = get_plugin_by_name('rss') # Create a fake task to pass to the rss plugin input handler task = Task(manager, 'search_rss_task', {}) # Use a copy of the config, so we don't overwrite jinja url when filling in search term config = rss_plugin.instance.build_config(config).copy() template = environment.from_string(config['url']) config['url'] = template.render({'search_term': search_string}) config['all_entries'] = True # TODO: capture some other_fields to try to find seed/peer/content_size numbers? return rss_plugin.phase_handlers['input'](task, config)
def edit_text(root, name): config_type = root.rstrip('s') context = { 'name': name, 'root': root, 'config_type': config_type} if request.method == 'POST': context['config'] = request.form['config'] try: config = yaml.load(request.form['config']) except yaml.scanner.ScannerError as e: flash('Invalid YAML document: %s' % e, 'error') log.exception(e) else: # valid yaml, now run validator errors = Task.validate_config(config) if errors: for error in errors: flash(error, 'error') context['config'] = request.form['config'] else: manager.config[root][name] = config manager.save_config() context['config'] = yaml.dump(config, default_flow_style=False) if request.form.get('name') != name: # Renaming new_name = request.form.get('name') if new_name in manager.config[root]: flash('%s with name %s already exists' % (config_type.capitalize(), new_name), 'error') else: # Do the rename manager.config[root][new_name] = manager.config[root][name] del manager.config[root][name] manager.save_config() flash('%s %s renamed to %s.' % (config_type.capitalize(), name, new_name), 'success') return redirect(url_for('edit_text', root=root, name=new_name)) else: flash('Configuration saved', 'success') else: config = manager.config[root][name] if config: context['config'] = yaml.dump(config, default_flow_style=False) else: context['config'] = '' context['related'] = get_related(root, name) return render_template('configure/edit_text.html', **context)
def search(self, query, comparator, config=None): from flexget.utils.template import environment from flexget.manager import manager comparator.set_seq1(query) search_string = urllib.quote(comparator.search_string().encode('utf-8')) rss_plugin = get_plugin_by_name('rss') # Create a fake task to pass to the rss plugin input handler task = Task(manager, 'search_rss_task', {}) # Use a copy of the config, so we don't overwrite jinja url when filling in search term config = rss_plugin.instance.build_config(config).copy() template = environment.from_string(config['url']) config['url'] = template.render({'search_term': search_string}) # TODO: capture some other_fields to try to find seed/peer/content_size numbers? entries = [] for entry in rss_plugin.phase_handlers['input'](task, config): if comparator.matches(entry['title']): entry['search_ratio'] = comparator.ratio() entries.append(entry) return entries
def run(self): from flexget.task import Task, TaskAbort while not self._shutdown_now: if self.run_schedules: self.queue_pending_jobs() # Grab the first job from the run queue and do it try: job = self.run_queue.get(timeout=0.5) except Queue.Empty: if self._shutdown_when_finished: self._shutdown_now = True continue if job.output: # Hook up our log and stdout to give back to the requester old_stdout, old_stderr = sys.stdout, sys.stderr sys.stdout, sys.stderr = Tee(job.output, sys.stdout), Tee( job.output, sys.stderr) # TODO: Use a filter to capture only the logging for this execution? streamhandler = logging.StreamHandler(job.output) streamhandler.setFormatter(FlexGetFormatter()) logging.getLogger().addHandler(streamhandler) try: Task(self.manager, job.task, options=job.options).execute() except TaskAbort as e: log.debug('task %s aborted: %r' % (job.task, e)) finally: self.run_queue.task_done() job.finished_event.set() if job.output: sys.stdout, sys.stderr = old_stdout, old_stderr logging.getLogger().removeHandler(streamhandler) remaining_jobs = self.run_queue.qsize() if remaining_jobs: log.warning( 'Scheduler shut down with %s jobs remaining in the queue to run.' % remaining_jobs) log.debug('scheduler shut down')
class FlexGetBase(object): __yaml__ = """# Yaml goes here""" # Set this to True to get a UNIQUE tmpdir; the tmpdir is created on # setup as "./tmp/<testname>" and automatically removed on teardown. # # The instance variable __tmp__ is set to the absolute name of the tmpdir # (ending with "os.sep"), and any occurrence of "__tmp__" in __yaml__ or # a @with_filecopy destination is also replaced with it. __tmp__ = False def __init__(self): self.log = log self.manager = None self.task = None self.database_uri = None self.base_path = os.path.dirname(__file__) def setup(self): """Set up test env""" setup_once() if self.__tmp__: self.__tmp__ = util.maketemp() + '/' self.__yaml__ = self.__yaml__.replace("__tmp__", self.__tmp__) self.manager = MockManager(self.__yaml__, self.__class__.__name__, db_uri=self.database_uri) def teardown(self): try: try: self.task.session.close() except: pass self.manager.shutdown() self.manager.__del__() finally: if self.__tmp__: import shutil log.trace('Removing tmpdir %r' % self.__tmp__) shutil.rmtree(self.__tmp__.rstrip(os.sep)) def execute_task(self, name, abort_ok=False, options=None): """Use to execute one test task from config""" log.info('********** Running task: %s ********** ' % name) config = self.manager.config['tasks'][name] if hasattr(self, 'task'): if hasattr(self, 'session'): self.task.session.close() # pylint: disable-msg=E0203 self.task = Task(self.manager, name, config=config, options=options) try: self.task.execute() except TaskAbort: if not abort_ok: raise def dump(self): """Helper method for debugging""" from flexget.plugins.output.dump import dump #from flexget.utils.tools import sanitize # entries = sanitize(self.task.entries) # accepted = sanitize(self.task.accepted) # rejected = sanitize(self.task.rejected) print '\n-- ENTRIES: -----------------------------------------------------' # print yaml.safe_dump(entries) dump(self.task.entries, True) print '-- ACCEPTED: ----------------------------------------------------' # print yaml.safe_dump(accepted) dump(self.task.entries, True) print '-- REJECTED: ----------------------------------------------------' # print yaml.safe_dump(rejected) dump(self.task.entries, True)
def execute( self, options: Union[dict, argparse.Namespace] = None, priority: int = 1, suppress_warnings: Sequence[str] = None, ) -> List[Tuple[str, str, threading.Event]]: """ Run all (can be limited with options) tasks from the config. :param options: Either an :class:`argparse.Namespace` instance, or a dict, containing options for execution :param priority: If there are other executions waiting to be run, they will be run in priority order, lowest first. :param suppress_warnings: Allows suppressing log warning about missing plugin in key phases :returns: a list of :class:`threading.Event` instances which will be set when each respective task has finished running """ if options is None: options = copy.copy(self.options.execute) elif isinstance(options, dict): options_namespace = copy.copy(self.options.execute) options_namespace.__dict__.update(options) options = options_namespace task_names = self.tasks # Only reload config if daemon config_hash = self.hash_config() if self.is_daemon and self.autoreload_config and self.config_file_hash != config_hash: logger.info('Config change detected. Reloading.') try: self.load_config(output_to_console=False, config_file_hash=config_hash) logger.info('Config successfully reloaded!') except Exception as e: logger.error('Reloading config failed: {}', e) # Handle --tasks if options.tasks: # Consider '*' the same as not specifying any tasks. # (So manual plugin doesn't consider them explicitly enabled.) if options.tasks == ['*']: options.tasks = None else: task_names = [] for task in options.tasks: try: task_names.extend( m for m in self.matching_tasks(task) if m not in task_names ) except ValueError as e: logger.error(e) continue options.tasks = task_names # TODO: 1.2 This is a hack to make task priorities work still, not sure if it's the best one task_names = sorted( task_names, key=lambda t: self.config['tasks'][t].get('priority', 65535) ) finished_events = [] for task_name in task_names: task = Task( self, task_name, options=options, output=get_console_output(), session_id=flexget.log.get_log_session_id(), priority=priority, suppress_warnings=suppress_warnings, ) self.task_queue.put(task) finished_events.append((task.id, task.name, task.finished_event)) return finished_events
class FlexGetBase(object): __yaml__ = """# Yaml goes here""" # Set this to True to get a UNIQUE tmpdir; the tmpdir is created on # setup as "./tmp/<testname>" and automatically removed on teardown. # # The instance variable __tmp__ is set to the absolute name of the tmpdir # (ending with "os.sep"), and any occurrence of "__tmp__" in __yaml__ or # a @with_filecopy destination is also replaced with it. __tmp__ = False def __init__(self): self.log = log self.manager = None self.task = None self.database_uri = None self.base_path = os.path.dirname(__file__) self.config_functions = [] self.tasks_functions = [] def add_config_function(self, config_function): self.config_functions.append(config_function) def add_tasks_function(self, tasks_function): self.tasks_functions.append(tasks_function) def setup(self): """Set up test env""" setup_once() if self.__tmp__: self.__tmp__ = util.maketemp() + '/' self.__yaml__ = self.__yaml__.replace("__tmp__", self.__tmp__) self.manager = MockManager(self.__yaml__, self.__class__.__name__, db_uri=self.database_uri) for config_function in self.config_functions: config_function(self.manager.config) if self.tasks_functions and 'tasks' in self.manager.config: for task_name, task_definition in self.manager.config[ 'tasks'].items(): for task_function in self.tasks_functions: task_function(task_name, task_definition) def teardown(self): try: try: self.task.session.close() except: pass self.manager.shutdown() self.manager.__del__() finally: if self.__tmp__: import shutil log.trace('Removing tmpdir %r' % self.__tmp__) shutil.rmtree(self.__tmp__.rstrip(os.sep)) def execute_task(self, name, abort_ok=False, options=None): """Use to execute one test task from config""" log.info('********** Running task: %s ********** ' % name) config = self.manager.config['tasks'][name] if hasattr(self, 'task'): if hasattr(self, 'session'): self.task.session.close() # pylint: disable-msg=E0203 self.task = Task(self.manager, name, config=config, options=options) try: self.task.execute() except TaskAbort: if not abort_ok: raise def dump(self): """Helper method for debugging""" from flexget.plugins.output.dump import dump #from flexget.utils.tools import sanitize # entries = sanitize(self.task.entries) # accepted = sanitize(self.task.accepted) # rejected = sanitize(self.task.rejected) print '\n-- ENTRIES: -----------------------------------------------------' # print yaml.safe_dump(entries) dump(self.task.entries, True) print '-- ACCEPTED: ----------------------------------------------------' # print yaml.safe_dump(accepted) dump(self.task.entries, True) print '-- REJECTED: ----------------------------------------------------' # print yaml.safe_dump(rejected) dump(self.task.entries, True)