def from_dict(cls, context_options_dict): """Return a context job from a dict output by Context.to_dict.""" import copy context_options = copy.deepcopy(context_options_dict) tasks_inserted = context_options.pop('_tasks_inserted', False) insert_tasks = context_options.pop('insert_tasks', None) if insert_tasks: context_options['insert_tasks'] = path_to_reference(insert_tasks) # The constructor expects a reference to the persistence engine. persistence_engine = context_options.pop('persistence_engine', None) if persistence_engine: context_options['persistence_engine'] = path_to_reference( persistence_engine) # If there are callbacks, reconstitute them. callbacks = context_options.pop('callbacks', None) if callbacks: context_options['callbacks'] = decode_callbacks(callbacks) context = cls(**context_options) context._tasks_inserted = tasks_inserted return context
def test_runs_std_imported(self, parser_mock): """Ensure run_job is able to correctly run bundled python functions.""" from furious.job_utils import path_to_reference function = path_to_reference("email.parser.Parser") self.assertIs(parser_mock, function)
def _get_configured_module(option_name, known_modules=None, verify_exists=True): """Get the module specified by the value of option_name. The value of the configuration option will be used to load the module by name from the known module list or treated as a path if not found in known_modules. Args: option_name: name of persistence module known_modules: dictionary of module names and module paths, ie: {'ndb':'furious.extras.appengine.ndb_persistence'} verify_exists: boolean of whether to ensure the keys and modules exist or just return if they don't. Returns: module of the module path matching the name in known_modules """ from furious.job_utils import path_to_reference config = get_config() if not verify_exists and option_name not in config: return option_value = config[option_name] # If no known_modules were give, make it an empty dict. if not known_modules: known_modules = {} module_path = known_modules.get(option_value) or option_value return path_to_reference(module_path)
def async_from_options(options): """Deserialize an Async or Async subclass from an options dict.""" _type = options.pop('_type', 'furious.async.Async') _type = path_to_reference(_type) return _type.from_dict(options)
def test_runs_builtin(self, dir_mock): """Ensure builtins are able to be loaded and correctly run.""" from furious.job_utils import path_to_reference function = path_to_reference("dir") self.assertIs(dir_mock, function)
def _decorate_job(self): """Returns the job function. A subclass may override `Async._decorate_job` in order to wrap the original target using a decorator function. """ function_path = self.job[0] func = path_to_reference(function_path) return func
def test_casts_unicode_name_to_str(self): """Ensure unicode module_paths do not cause an error.""" from furious.job_utils import path_to_reference imported_module = path_to_reference(u'furious.tests.dummy_module.dumb') from furious.tests.dummy_module import dumb self.assertIs(dumb, imported_module)
def test_casts_unicode_name_to_str(self): """Ensure unicode module_paths do not cause an error.""" from furious.job_utils import path_to_reference imported_module = path_to_reference( u'furious.tests.dummy_module.dumb') from furious.tests.dummy_module import dumb self.assertIs(dumb, imported_module)
def test_runs_classmethod(self): """Ensure classmethods are able to be loaded and correctly run.""" from furious.job_utils import path_to_reference ThrowAway.i_was_ran = False function = path_to_reference( 'furious.tests.test_job_utils.ThrowAway.run_me') function() self.assertTrue(ThrowAway.i_was_ran)
def decode_async_options(options): """Decode Async options from JSON decoding.""" async_options = copy.deepcopy(options) # JSON don't like datetimes. eta = async_options.get('task_args', {}).get('eta') if eta: from datetime import datetime async_options['task_args']['eta'] = datetime.fromtimestamp(eta) # If there are callbacks, reconstitute them. callbacks = async_options.get('callbacks', {}) if callbacks: async_options['callbacks'] = decode_callbacks(callbacks) if '__context_checker' in options: _checker = options['__context_checker'] async_options['_context_checker'] = path_to_reference(_checker) if '__process_results' in options: _processor = options['__process_results'] async_options['_process_results'] = path_to_reference(_processor) return async_options
def _prepare_persistence_engine(self): """Load the specified persistence engine, or the default if none is set. """ if self._persistence_engine: return persistence_engine = self._options.get('persistence_engine') if persistence_engine: self._persistence_engine = path_to_reference(persistence_engine) return from furious.config import get_default_persistence_engine self._persistence_engine = get_default_persistence_engine()
def decode_async_options(options): """Decode Async options from JSON decoding.""" async_options = copy.deepcopy(options) # JSON don't like datetimes. eta = async_options.get('task_args', {}).get('eta') if eta: from datetime import datetime async_options['task_args']['eta'] = datetime.fromtimestamp(eta) # If there are callbacks, reconstitute them. callbacks = async_options.get('callbacks', {}) if callbacks: async_options['callbacks'] = decode_callbacks(callbacks) if '__context_checker' in options: _checker = options['__context_checker'] async_options['_context_checker'] = path_to_reference(_checker) return async_options
def run_job(): """Takes an async object and executes its job.""" async = get_current_async() async_options = async.get_options() job = async_options.get('job') if not job: raise Exception('This async contains no job to execute!') function_path, args, kwargs = job if args is None: args = () if kwargs is None: kwargs = {} function = path_to_reference(function_path) try: async.executing = True async.result = AsyncResult(payload=function(*args, **kwargs), status=AsyncResult.SUCCESS) except Abort as abort: logging.info('Async job was aborted: %r', abort) async.result = AsyncResult(status=AsyncResult.ABORT) # QUESTION: In this eventuality, we should probably tell the context we # are "complete" and let it handle completion checking. _handle_context_completion_check(async) return except AbortAndRestart as restart: logging.info('Async job was aborted and restarted: %r', restart) raise except Exception as e: async.result = AsyncResult(payload=encode_exception(e), status=AsyncResult.ERROR) _handle_results(async_options) _handle_context_completion_check(async)
def run_job(): """Takes an async object and executes its job.""" async = get_current_async() async_options = async .get_options() job = async_options.get('job') if not job: raise Exception('This async contains no job to execute!') function_path, args, kwargs = job if args is None: args = () if kwargs is None: kwargs = {} function = path_to_reference(function_path) try: async .executing = True async .result = function(*args, **kwargs) except Abort as abort: logging.info('Async job was aborted: %r', abort) async .result = None return except AbortAndRestart as restart: logging.info('Async job was aborted and restarted: %r', restart) raise except Exception as e: async .result = encode_exception(e) results_processor = async_options.get('_process_results') if not results_processor: results_processor = _process_results processor_result = results_processor() if isinstance(processor_result, (Async, Context)): processor_result.start()
def run_job(): """Takes an async object and executes its job.""" async = get_current_async() async_options = async.get_options() job = async_options.get('job') if not job: raise Exception('This async contains no job to execute!') function_path, args, kwargs = job if args is None: args = () if kwargs is None: kwargs = {} function = path_to_reference(function_path) try: async.executing = True async.result = function(*args, **kwargs) except Abort as abort: logging.info('Async job was aborted: %r', abort) async.result = None return except AbortAndRestart as restart: logging.info('Async job was aborted and restarted: %r', restart) raise except Exception as e: async.result = encode_exception(e) results_processor = async_options.get('_process_results') if not results_processor: results_processor = _process_results processor_result = results_processor() if isinstance(processor_result, (Async, Context)): processor_result.start()