def set_up(self):
        from uuid import uuid4
        from foundations_events.producers.jobs import QueueJob
        from foundations_contrib.global_state import message_router, current_foundations_job

        foundations.set_project_name('default')
        self._job_id = str(uuid4())
        current_foundations_job().job_id = self._job_id
        queue_job = QueueJob(message_router, current_foundations_job())
        queue_job.push_message()
    def set_up(self):
        from uuid import uuid4
        from foundations_events.producers.jobs import QueueJob
        from foundations_contrib.global_state import message_router, current_foundations_context
        from acceptance.cleanup import cleanup

        cleanup()

        foundations.set_project_name('default')
        self._job_id = str(uuid4())
        pipeline_context = current_foundations_context().pipeline_context()
        pipeline_context.file_name = self._job_id
        queue_job = QueueJob(message_router, pipeline_context)
        queue_job.push_message()
    def _make_job(self):
        from foundations_contrib.global_state import message_router
        from foundations_events.producers.jobs import QueueJob
        from foundations_events.producers.jobs import RunJob

        QueueJob(message_router, self._context).push_message()
        RunJob(message_router, self._context).push_message()
 def _make_running_job(klass, job_name, user, tags=None, start_timestamp=None):
     klass._foundations_job.job_id = job_name
     klass._foundations_job.user_name = user
     QueueJob(klass._message_router, klass._foundations_job).push_message()
     klass._fake_start_time(start_timestamp)
     RunJob(klass._message_router, klass._foundations_job).push_message()
     klass._set_tags(job_name, tags)
     klass._restore_time(start_timestamp, None)
    def test_queue_job_consumers(self):
        from foundations_contrib.utils import byte_string
        from foundations_contrib.models.project_listing import ProjectListing
        from foundations_events.producers.jobs import QueueJob
        from time import time

        expected_job_parameters = {'random_job_data': self._str_random_uuid()}
        self._context.provenance.job_run_data = expected_job_parameters

        self._redis.sadd('simple', 'value')

        QueueJob(self._message_router, self._context).push_message()
        current_time = time()

        parameter_key = "projects:{}:job_parameter_names".format(
            self._project_name)
        job_parameter_names = self._redis.smembers(parameter_key)
        self.assertEqual(set([b"random_job_data"]), job_parameter_names)

        running_jobs_key = "project:{}:jobs:running".format(self._project_name)
        running_and_completed_jobs = self._redis.smembers(running_jobs_key)
        expected_jobs = set([byte_string(self._job_id)])
        self.assertEqual(expected_jobs, running_and_completed_jobs)

        queued_job_key = "project:{}:jobs:queued".format(self._project_name)
        queued_jobs = self._redis.smembers(queued_job_key)
        self.assertEqual(set([byte_string(self._job_id)]), queued_jobs)

        global_queued_job_key = "projects:global:jobs:queued".format(
            self._project_name)
        global_queued_jobs = self._redis.smembers(global_queued_job_key)
        self.assertEqual(set([byte_string(self._job_id)]), global_queued_jobs)

        job_parameters_key = "jobs:{}:parameters".format(self._job_id)
        job_parameters = self._get_and_deserialize(job_parameters_key)
        self.assertEqual(expected_job_parameters, job_parameters)

        job_state_key = "jobs:{}:state".format(self._job_id)
        state = self._redis.get(job_state_key)
        self.assertEqual(b"queued", state)

        job_project_key = "jobs:{}:project".format(self._job_id)
        job_project_name = self._redis.get(job_project_key)
        self.assertEqual(byte_string(self._project_name), job_project_name)

        tracked_projects = ProjectListing.list_projects(self._redis)
        project_listing = tracked_projects[0]
        self.assertEqual(self._project_name, project_listing["name"])
        self.assertLess(current_time - project_listing["created_at"], 5)

        job_user_key = "jobs:{}:user".format(self._job_id)
        job_user = self._redis.get(job_user_key)
        self.assertEqual(byte_string(self._user), job_user)

        creation_time_key = "jobs:{}:creation_time".format(self._job_id)
        string_creation_time = self._redis.get(creation_time_key)
        creation_time = float(string_creation_time.decode())
        self.assertLess(current_time - creation_time, 5)
 def _make_completed_job(klass, job_name, user, tags=None, start_timestamp=None, end_timestamp=None, **kwargs):
     klass._foundations_job.job_id = job_name
     klass._foundations_job.user_name = user
     klass._foundations_job.provenance.job_run_data = kwargs
     QueueJob(klass._message_router, klass._foundations_job).push_message()
     klass._fake_start_time(start_timestamp)
     RunJob(klass._message_router, klass._foundations_job).push_message()
     klass._set_tags(job_name, tags)
     klass._fake_end_time(end_timestamp)
     CompleteJob(klass._message_router, klass._foundations_job).push_message()
     klass._restore_time(start_timestamp, end_timestamp)
 def _make_running_job(klass,
                       job_name,
                       user,
                       tags=None,
                       start_timestamp=None):
     klass._pipeline_context.file_name = job_name
     klass._pipeline_context.provenance.user_name = user
     QueueJob(klass._message_router, klass._pipeline_context).push_message()
     klass._fake_start_time(start_timestamp)
     RunJob(klass._message_router, klass._pipeline_context).push_message()
     klass._set_tags(job_name, tags)
     klass._restore_time(start_timestamp, None)
Beispiel #8
0
def set_up_job_environment():
    from foundations_events.producers.jobs import QueueJob
    from foundations_events.producers.jobs import RunJob
    from foundations_contrib.global_state import (
        current_foundations_context,
        message_router,
        config_manager,
    )
    import atexit

    config_manager["_is_deployment"] = True
    _get_logger().debug(
        f"Foundations has been run with the following configuration:\n"
        f"{yaml.dump(config_manager.config(), default_flow_style=False)}")
    pipeline_context = current_foundations_context().pipeline_context()
    _set_job_state(pipeline_context)

    QueueJob(message_router, pipeline_context).push_message()
    RunJob(message_router, pipeline_context).push_message()

    atexit.register(_at_exit_callback)
    _set_up_exception_handling()
 def _make_queued_job(klass, job_name, user):
     klass._foundations_job.job_id = job_name
     klass._foundations_job.provenance.user_name = user
     QueueJob(klass._message_router, klass._foundations_job).push_message()
 def _make_queued_job(klass, job_name, user):
     klass._pipeline_context.file_name = job_name
     klass._pipeline_context.provenance.user_name = user
     QueueJob(klass._message_router, klass._pipeline_context).push_message()