Exemplo n.º 1
0
    def test_job_observable_remove_observer(self):
        project = self.project_data['project']
        new_dir = data_mocking.create_directory(
            {
                'project': project,
                'user': self.project_data['users'][0],
            }, self.session)
        job = data_mocking.create_job(
            {
                'project': project,
                'completion_directory_id': new_dir.id
            },
            session=self.session)

        job_observable = task_file_observers.JobObservable(
            session=self.session, log={}, job=job)

        dir_observer = task_file_observers.DirectoryJobObserver(
            session=self.session,
            log={},
            directory=new_dir,
            job_observable=job_observable)
        job_observable._remove_observer(dir_observer)
        self.session.commit()
        self.session.flush()
        updated_job = self.session.query(Job).filter(Job.id == job.id).first()
        self.assertEqual(updated_job.completion_directory_id, None)
Exemplo n.º 2
0
    def process_sync_actions(self, session, sync_action):
        """
            Executes sync action depending on the type of action
        :param session:
        :param sync_action:
        :return:
        """
        log = regular_log.default()
        sync_event = sync_action.sync_event
        sync_events_manager = SyncEventManager(session=session,
                                               sync_event=sync_event)
        logger.debug('Processing new sync event.')
        if sync_event.event_trigger_type == 'task_completed':
            completed_task = sync_event.completed_task
            job_observable = task_file_observers.JobObservable(
                session=session,
                log=log,
                job=completed_task.job,
                task=completed_task,
                sync_events_manager=sync_events_manager)
            job_observable.notify_all_observers(defer=False)
        elif sync_event.event_trigger_type == 'file_operation':
            logger.debug('Processing file_operation sync event.')
            destination_directory = sync_event.dataset_destination
            source_directory = None
            file = sync_event.file
            if sync_event.event_effect_type in ['file_copy', 'file_move']:
                logger.debug('Processing file_copy sync event.')
                if sync_event.event_effect_type == 'file_copy':
                    # we need to provide the source dir for validation of incoming dir.
                    source_directory = sync_event.dataset_source
                    file = sync_event.new_file_copy

                job_dir_sync_manager = job_dir_sync_utils.JobDirectorySyncManager(
                    session=session,
                    log=log,
                    directory=destination_directory,
                )
                # we need to provide the source dir, so validation of incoming
                # directory does not fail when checking the directory the file is coming from.
                logger.debug('Syncing file on jobs...')
                job_dir_sync_manager.add_file_to_all_jobs(
                    file=file,
                    source_dir=source_directory,
                    create_tasks=True,
                )
            else:
                logger.info(
                    '{} event effect not supported for processing.'.format(
                        sync_event.event_effect_type))
        else:
            logger.info(
                '{} event trigger not supported for processing.'.format(
                    sync_event.event_trigger_type))
Exemplo n.º 3
0
    def test_notify_task_completion(self):
        project = self.project_data['project']
        new_dir = data_mocking.create_directory(
            {
                'project': project,
                'user': self.project_data['users'][0],
            }, self.session)
        old_dir = data_mocking.create_directory(
            {
                'project': project,
                'user': self.project_data['users'][0],
            }, self.session)

        job = data_mocking.create_job(
            {
                'project': project,
                'completion_directory_id': new_dir.id,
                'output_dir_action': 'copy'
            },
            session=self.session)
        file = data_mocking.create_file({'project_id': project.id},
                                        self.session)
        task_1 = data_mocking.create_task(
            {
                'name': 'task1',
                'job': job,
                'file': file,
                'incoming_directory_id': old_dir.id
            }, self.session)
        with patch.object(
                regular_methods,
                'transmit_interservice_request_after_commit') as mock:
            job_observable = task_file_observers.JobObservable(
                session=self.session,
                log=regular_log.default(),
                job=job,
                task=task_1)
            dir_observer = task_file_observers.DirectoryJobObserver(
                session=self.session,
                log=regular_log.default(),
                directory=new_dir,
                job_observable=job_observable)
            job_observable.add_new_directory_observer(dir_observer)
            job_observable.notify_all_observers(defer=False)
            self.session.commit()
            file_link = self.session.query(WorkingDirFileLink).filter(
                WorkingDirFileLink.working_dir_id == new_dir.id).all()
            mock.assert_called_once()
Exemplo n.º 4
0
 def test_job_observable_creation(self):
     project = self.project_data['project']
     completion_dir = data_mocking.create_directory(
         {
             'project': project,
             'user': self.project_data['users'][0],
         }, self.session)
     print('completion_dir', completion_dir.id)
     job = data_mocking.create_job(
         {
             'name': 'my-test-job',
             'completion_directory_id': completion_dir.id
         }, self.session)
     job_observable = task_file_observers.JobObservable(
         session=self.session, log={}, job=job)
     self.assertEqual(len(job_observable.dir_observer_list), 1)
     dir_observer = job_observable.dir_observer_list[0]
     self.assertEqual(dir_observer.directory.id,
                      job.completion_directory_id)
     self.assertEqual(dir_observer.job_observable, job_observable)
Exemplo n.º 5
0
def task_complete(session, task, new_file, project):
    """

    Also handles new_file here,
    so for example if the child tasks ditacte that we do a review afterwards,
    we don't flag the file as complete

    This solves the "complete" problem, since the file is not really complete till it's reviewed
    right?
    And presumably we are going to hide controls anyway if in a different status

    """

    # TODO prevent completion of already complete tasks?

    # Check child tasks
    # Should create the event listeners before changing status

    child_list = task.child_list(session)
    if task.status == 'complete':
        return True, new_file

    if child_list:
        for child in child_list:
            if child.task_type == "review":
                # Unlock review task
                child.status = 'available'
                session.add(child)

                # task.status = 'in_review'

                # Currently removing the in-review concept since there is now way to get out of it
                task.status = 'complete'
                """
                this is not supported yet, but if we did 
                want to copy in advance it could go here.

                Note that we would need to call 
                File.update_file_from_existing()
                directly, as we already have the review TASK created
                just not the file changed.
                """

    else:
        task.status = 'complete'

        job = task.job

        # Careful, this is only relevant for normal
        # tasks, not exams?
        if task.job_type == 'Normal' \
                and job.file_handling == "isolation":
            merge_task(session=session, job=job, task=task)

        # this stuff could be applicable to exams and normal maybe

        new_file = new_file.toggle_flag_shared(session)

        # Only assign this here
        # since we don't want files showing up in project
        # dir till complete or what?
        session.add(new_file)
        new_file.job_id = job.id

    task.time_completed = datetime.datetime.utcnow()

    job = task.job
    session.add(job)

    job.stat_count_complete += 1

    # Record member completed?

    ##### Handle transactions

    # TODO "If transactions enabled?"

    # QUESTION Cache from file here?

    if task.job_type == 'Normal':

        if job.share_type == "Market":

            if task.is_live == True:
                result = task_complete_transaction_normal(session=session,
                                                          task=task)

    if task.job_type == 'Exam':
        result = task_complete_exam(session=session, task=task)
    # QUESTION
    # Notify Observers of task completion
    log = regular_log.default()
    sync_event_manager = SyncEventManager.create_sync_event_and_manager(
        session=session,
        dataset_source_id=None,
        dataset_destination=None,
        description=None,
        file=task.file,
        job=task.job,
        input=None,
        project=task.job.project,
        created_task=None,
        completed_task=task,
        new_file_copy=None,
        transfer_action=None,
        event_effect_type='',
        event_trigger_type='task_completed',
        status='init',
        member_created=None)
    logger.debug('Created sync_event {}'.format(
        sync_event_manager.sync_event.id))
    if job.completion_directory and job.output_dir_action in ['copy', 'move']:
        job_observable = task_file_observers.JobObservable(
            session=session,
            log=log,
            job=job,
            task=task,
            sync_events_manager=sync_event_manager)
        job_observable.notify_all_observers(defer=True)

    Event.new_deferred(
        session=session,
        kind='task_completed',
        project_id=task.project_id,
        member_id=get_member(session).id if get_member(session) else None,
        task_id=task.id,
        wait_for_commit=True)
    job.job_complete_core(session)

    return True, new_file