def create(interface, data=None): """Instantiate an appropriately typed Job data based on version""" if JobInterfaceSunset.is_seed(interface): return JobData(data) else: return JobData_1_0(data)
def queue_new_job_for_user(self, job_type, data): """Creates a new job for the given type and data at the request of a user. The new job is immediately placed on the queue. The given job_type model must have already been saved in the database (it must have an ID). The new job, event, job_exe, and queue models are saved in the database in an atomic transaction. If the data is invalid, a :class:`job.configuration.data.exceptions.InvalidData` will be thrown. :param job_type: The type of the new job to create and queue :type job_type: :class:`job.models.JobType` :param data: JSON description defining the job data to run on :type data: dict :returns: The ID of the new job :rtype: int """ description = {'user': '******'} event = TriggerEvent.objects.create_trigger_event( 'USER', None, description, timezone.now()) # TODO: Remove old JobData in v6 when we transition to only Seed job types if 'version' in data and '6' == data['version']: job_data = JobData(data) else: job_data = JobData_1_0(data) job_id = self.queue_new_job(job_type, job_data, event).id return job_id
def test_retrieve_input_data_files_success_multiple_input_file( self, retrieve_files, join): job_data = JobData({'files': {'TEST_FILE_INPUT': [1, 2]}}) retrieve_files.return_value = { 1: '/scale/input/TEST_FILE_INPUT1', 2: '/scale/input/TEST_FILE_INPUT2' } data_files = [ SeedInputFiles({ 'name': 'TEST_FILE_INPUT', 'multiple': True, 'required': True, 'mediaTypes': [], 'partial': False }) ] result = job_data.retrieve_input_data_files(data_files) self.assertEqual( result, { 'TEST_FILE_INPUT': [ '/scale/input/TEST_FILE_INPUT1', '/scale/input/TEST_FILE_INPUT2' ] })
def test_retrieve_input_data_files_missing_plurality_mismatch(self, retrieve_files, join): job_data = JobData({'files': {'TEST_FILE_INPUT': [1]}}) retrieve_files.return_value = {} data_files = [SeedInputFiles( {'name': 'TEST_FILE_INPUT', 'multiple': True, 'required': True, 'mediaTypes': [], 'partial': False})] with self.assertRaises(Exception): job_data.retrieve_input_data_files(data_files)
def test_retrieve_input_data_files_failure_multiple_for_single_input_file(self, retrieve_files, join): job_data = JobData({'files': {'TEST_FILE_INPUT': [1, 2]}}) retrieve_files.return_value = {1: '/scale/input/TEST_FILE_INPUT1', 2: '/scale/input/TEST_FILE_INPUT2'} data_files = [SeedInputFiles( {'name': 'TEST_FILE_INPUT', 'multiple': False, 'required': True, 'mediaTypes': [], 'partial': False})] with self.assertRaises(Exception): job_data.retrieve_input_data_files(data_files)
def test_retrieve_input_data_files_missing_file_not_required(self, retrieve_files, join): job_data = JobData({'files': {}}) retrieve_files.return_value = {} data_files = [SeedInputFiles( {'name': 'TEST_FILE_INPUT', 'multiple': False, 'required': False, 'mediaTypes': [], 'partial': False})] result = job_data.retrieve_input_data_files(data_files) self.assertEqual(result, {})
def test_store_output_files(self, dummy_store, isfile): workspace = storage_test_utils.create_workspace() files = {'OUTPUT_TIFFS': [ProductFileMetadata('OUTPUT_TIFFS', 'outfile0.tif', media_type='image/tiff')]} job_data = JobData({}) job_config = JobConfiguration() job_config.add_output_workspace('OUTPUT_TIFFS', workspace.name) job_exe = Mock() job_exe.job_type.get_job_configuration.return_value = job_config results = JobResults()._store_output_data_files(files, job_data, job_exe) self.assertEqual({'OUTPUT_TIFFS': [1]}, results.files)
def handle(self, *args, **options): """See :meth:`django.core.management.base.BaseCommand.handle`. This method starts the command. """ job_id = int(os.environ.get('SCALE_JOB_ID')) exe_num = int(os.environ.get('SCALE_EXE_NUM')) logger.info( 'Command starting: scale_pre_steps - Job ID: %d, Execution Number: %d', job_id, exe_num) try: job_exe = self._get_job_exe(job_id, exe_num) job_interface = job_exe.job_type.get_job_interface() exe_config = job_exe.get_execution_configuration() logger.info('Validating mounts...') job_interface.validate_populated_mounts(exe_config) logger.info('Validating settings...') job_interface.validate_populated_settings(exe_config) logger.info('Validating outputs and workspaces...') job_interface.validate_workspace_for_outputs(exe_config) self._generate_input_metadata(job_exe) job_data = job_exe.job.get_job_data() job_data = JobData(job_data.get_dict()) logger.info('Setting up input files...') job_interface.perform_pre_steps(job_data) logger.info('Ready to execute job: %s', exe_config.get_args('main')) except ScaleError as err: err.log() sys.exit(err.exit_code) except Exception as ex: exit_code = GENERAL_FAIL_EXIT_CODE err = get_error_by_exception(ex.__class__.__name__) if err: err.log() exit_code = err.exit_code else: logger.exception('Error performing pre-job steps') sys.exit(exit_code) logger.info('Command completed: scale_pre_steps')
def test_retrieve_files_with_id(self, filter, download): job_data = JobData({'files': {}}) filter.return_value = [ Mock(id=1, file_name='input_thing_file'), Mock(id=2, file_name='input_other_file') ] data_files = { 1: ("/scale/input/INPUT_THING", False), 2: ("/scale/input/INPUT_OTHER", True) } result = job_data._retrieve_files(data_files) self.assertEqual( result, { 1: "/scale/input/INPUT_THING/input_thing_file", 2: "/scale/input/INPUT_OTHER/input_other_file" })