def run_job(cls, job_class_path, job_id, db_class_path, db_config, db_tablenames, *args, **kwargs): """ :param str job_class_path: String for job class, e.g., 'myscheduler.jobs.a_job.NiceJob' :param str job_id: Job id :param str db_class_path: String for datstore class, e.g. 'datastores.DatastoreSqlite' :param dict db_config: dictionary containing values for db connection :param dict db_tablenames: dictionary containing the names for the jobs, executions, or audit logs table :param args: List of args provided to the job class to be run :param kwargs: Keyword arguments :return: string execution id """ execution_id = utils.generate_uuid() datastore = utils.get_datastore_instance(db_class_path, db_config, db_tablenames) datastore.add_execution(execution_id, job_id, constants.EXECUTION_STATUS_SCHEDULED, description=JobBase.get_scheduled_description()) try: job_class = utils.import_from_path(job_class_path) datastore.update_execution(execution_id, state=constants.EXECUTION_STATUS_SCHEDULED, description=job_class.get_scheduled_description()) cls.run_scheduler_job(job_class, job_id, execution_id, datastore, *args, **kwargs) except Exception: datastore.update_execution(execution_id, state=constants.EXECUTION_STATUS_SCHEDULED_ERROR, description=JobBase.get_scheduled_error_description(), result=JobBase.get_scheduled_error_result() ) return None return execution_id
def _run_job(self, job_id): """Kicks off a job. :param str job_id: Job id. :return: A dictionary with the only field of execution_id. :rtype: dict """ job = self.scheduler_manager.get_job(job_id) if not job: self.set_status(400) return {'error': 'Job not found: %s' % job_id} job_name = utils.get_job_name(job) args = utils.get_job_args(job) kwargs = job.kwargs scheduler = utils.import_from_path(settings.SCHEDULER_CLASS) execution_id = scheduler.run_job(job_name, job_id, settings.DATABASE_CLASS, self.datastore.db_config, self.datastore.table_names, *args, **kwargs) # Audit log self.datastore.add_audit_log(job_id, job.name, constants.AUDIT_LOG_CUSTOM_RUN, user=self.username, description=execution_id) response = {'execution_id': execution_id} return response
def __init__(self, scheduler_class_path, datastore_class_path, db_config=None, db_tablenames=None, job_coalesce=constants.DEFAULT_JOB_COALESCE, job_misfire_grace_sec=constants.DEFAULT_JOB_MISFIRE_GRACE_SEC, job_max_instances=constants.DEFAULT_JOB_MAX_INSTANCES, thread_pool_size=constants.DEFAULT_THREAD_POOL_SIZE, timezone=constants.DEFAULT_TIMEZONE): """ :param str scheduler_class_path: string path for scheduler, e.g. 'mysched.FancyScheduler' :param str datastore_class_path: string path for datastore, e.g. 'datastore.SQLDatastore' :param dict db_config: dictionary containing values for db connection :param dict db_tablenames: dictionary containing the names for the jobs, executions, or audit logs table, e.g. { 'executions_tablename': 'scheduler_executions', 'jobs_tablename': 'scheduler_jobs', 'auditlogs_tablename': 'scheduler_auditlogs' } If any of these keys is not provided, the default table name is selected from constants.py :param bool job_coalesce: True by default :param int job_misfire_grace_sec: Integer number of seconds :param int job_max_instances: Int number of instances :param int thread_pool_size: Int thread pool size :param str timezone: str timezone to schedule jobs in, e.g. 'UTC' """ datastore = utils.get_datastore_instance(datastore_class_path, db_config, db_tablenames) job_stores = { 'default': datastore } job_default = { 'coalesce': job_coalesce, 'misfire_grace_time': job_misfire_grace_sec, 'max_instances': job_max_instances } executors = { 'default': pool.ThreadPoolExecutor(thread_pool_size) } scheduler_class = utils.import_from_path(scheduler_class_path) self.sched = scheduler_class(datastore_class_path, jobstores=job_stores, executors=executors, job_defaults=job_default, timezone=timezone)
def test_class_import_from_path(self): path = 'ndscheduler.default_settings_test' module = utils.import_from_path(path) self.assertEqual(module.DEBUG, True)