def setUp(self): django.setup() manifest1 = job_test_utils.create_seed_manifest(name='type-1', jobVersion='1.0.0') self.job_type_1 = job_test_utils.create_seed_job_type( manifest=manifest1) manifest2 = job_test_utils.create_seed_manifest(name='type-2', jobVersion='2.0.0') self.job_type_2 = job_test_utils.create_seed_job_type( manifest=manifest2) manifest3 = job_test_utils.create_seed_manifest(name='type-1', jobVersion='2.0.0') self.job_type_3 = job_test_utils.create_seed_job_type( manifest=manifest3) self.entry_1_longest = datetime.datetime.utcfromtimestamp( 500000).replace(tzinfo=timezone.utc) self.entry_1_shortest = datetime.datetime.utcfromtimestamp( 650000).replace(tzinfo=timezone.utc) self.entry_2_longest = datetime.datetime.utcfromtimestamp( 600000).replace(tzinfo=timezone.utc) self.entry_2_shortest = datetime.datetime.utcfromtimestamp( 750000).replace(tzinfo=timezone.utc) self.entry_3_longest = datetime.datetime.utcfromtimestamp( 700000).replace(tzinfo=timezone.utc) self.entry_3_shortest = datetime.datetime.utcfromtimestamp( 800000).replace(tzinfo=timezone.utc) job_test_utils.create_job(job_type=self.job_type_1, status='RUNNING', last_status_change=self.entry_1_longest) job_test_utils.create_job(job_type=self.job_type_1, status='RUNNING', last_status_change=self.entry_1_shortest) job_test_utils.create_job(job_type=self.job_type_2, status='RUNNING', last_status_change=self.entry_2_shortest) job_test_utils.create_job(job_type=self.job_type_2, status='RUNNING', last_status_change=self.entry_2_longest) job_test_utils.create_job(job_type=self.job_type_2, status='RUNNING', last_status_change=self.entry_2_shortest) job_test_utils.create_job(job_type=self.job_type_3, status='RUNNING', last_status_change=self.entry_3_shortest) job_test_utils.create_job(job_type=self.job_type_3, status='RUNNING', last_status_change=self.entry_3_longest) job_test_utils.create_job(job_type=self.job_type_3, status='RUNNING', last_status_change=self.entry_3_longest) job_test_utils.create_job(job_type=self.job_type_3, status='RUNNING', last_status_change=self.entry_3_shortest)
def setUp(self): django.setup() self.input_name_1 = 'Test_Input_1' self.output_name_1 = 'Test_Output_1' inputs = [{'name': self.input_name_1, 'mediaTypes': ['text/plain']}] outputs = [{ 'name': self.output_name_1, 'mediaType': 'image/png', 'pattern': '*_.png' }] manifest_1 = job_test_utils.create_seed_manifest(command='my_cmd args', inputs_files=inputs, inputs_json=[], outputs_files=outputs, outputs_json=[]) self.job_type_1 = job_test_utils.create_seed_job_type( manifest=manifest_1) self.input_name_2 = 'Test_Input_2' self.output_name_2 = 'Test_Output_2' inputs = [{ 'name': self.input_name_2, 'mediaTypes': ['image/png', 'image/tiff'] }] outputs = [{ 'name': self.output_name_2, 'mediaType': 'text/plain', 'pattern': '*_.txt' }] manifest_2 = job_test_utils.create_seed_manifest(command='my_cmd args', inputs_files=inputs, inputs_json=[], outputs_files=outputs, outputs_json=[]) self.job_type_2 = job_test_utils.create_seed_job_type( manifest=manifest_2) self.input_name_3 = 'Test_Input_3' self.output_name_3 = 'Test_Output_3' inputs = [{'name': self.input_name_3, 'mediaTypes': ['text/plain']}] manifest_3 = job_test_utils.create_seed_manifest(command='my_cmd args', inputs_files=inputs, inputs_json=[], outputs_files=[], outputs_json=[]) self.job_type_3 = job_test_utils.create_seed_job_type( manifest=manifest_3) self.file_1 = storage_test_utils.create_file(media_type='text/plain')
def test_uuid_use_properties(self): """Tests setting UUIDs on products with different property values.""" inputs_json=[ {'name': 'property1', 'type': 'string'}, {'name': 'property2', 'type': 'string'} ] manifest = job_test_utils.create_seed_manifest(name='test-job', inputs_json=inputs_json, command='my_command') manifest['job']['interface']['inputs']['files'] = [] job_type = job_test_utils.create_seed_job_type(manifest=manifest) job1 = job_test_utils.create_job(job_type=job_type) job_exe1 = job_test_utils.create_job_exe(job=job1) data1 = job_exe1.job.get_input_data() data1.add_value(JsonValue('property1', 'value1')) data1.add_value(JsonValue('property2', 'value2')) job_exe1.job.input = convert_data_to_v6_json(data1).get_dict() job2 = job_test_utils.create_job(job_type=job_type) job_exe2 = job_test_utils.create_job_exe(job=job2) data2 = job_exe2.job.get_input_data() data2.add_value(JsonValue('property1', 'diffvalue1')) data2.add_value(JsonValue('property2', 'value2')) job_exe2.job.input = convert_data_to_v6_json(data2).get_dict() products1 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe1, self.workspace) products2 = ProductFile.objects.upload_files(self.files, [self.source_file.id], job_exe2, self.workspace) # Make sure the product files have different UUIDs self.assertIsNotNone(products1[0].uuid) self.assertIsNotNone(products1[1].uuid) self.assertNotEqual(products1[0].uuid, products2[0].uuid) self.assertNotEqual(products1[1].uuid, products2[1].uuid)
def setUp(self): django.setup() self.recipe_type = recipe_utils.create_recipe_type_v6() self.recipe = recipe_utils.create_recipe(recipe_type=self.recipe_type) cmd = 'command' cmd_args = 'args' outputs = [{ 'name': 'arg1', 'pattern': '*_.txt' }, { 'name': 'arg2', 'pattern': '*_.txt' }] manifest = job_utils.create_seed_manifest(command='command args', outputs_files=outputs) self.job_type = job_utils.create_seed_job_type(job_version='1.0', manifest=manifest) self.recipe_type = recipe_utils.create_recipe_type_v6() self.recipe = recipe_utils.create_recipe(recipe_type=self.recipe_type) self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=self.job_type, event=self.event, status='RUNNING', recipe=self.recipe) self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING')
def setUp(self): django.setup() def upload_files(file_uploads): for file_upload in file_uploads: file_upload.file.save() def delete_files(files): for scale_file in files: scale_file.save() self.workspace = storage_test_utils.create_workspace() self.workspace.upload_files = MagicMock(side_effect=upload_files) self.workspace.delete_files = MagicMock(side_effect=delete_files) self.source_file = source_test_utils.create_source(file_name='input1.txt', workspace=self.workspace) inputs_json=[ {'name': 'property1', 'type': 'string'}, {'name': 'property2', 'type': 'string'} ] manifest = job_test_utils.create_seed_manifest(inputs_json=inputs_json, command='my_command') manifest['job']['interface']['inputs']['files'] = [] job_type = job_test_utils.create_seed_job_type(manifest=manifest) self.job_exe = job_test_utils.create_job_exe(job_type=job_type) data = self.job_exe.job.get_input_data() data.add_value(JsonValue('property1', 'value1')) data.add_value(JsonValue('property2', 'value2')) self.job_exe.job.input = convert_data_to_v6_json(data).get_dict() self.job_exe.job.source_sensor_class = 'classA' self.job_exe.job.source_sensor = '1' self.job_exe.job.source_collection = '12345' self.job_exe.job.source_task = 'my-task' self.job_exe.job.save() self.job_exe_no = job_test_utils.create_job_exe() self.local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/1/file.txt') self.local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/2/file.json') self.local_path_3 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/3/file.h5') self.files = [ ProductFileMetadata(output_name='output_name_1', local_path=self.local_path_1, remote_path='remote/1/file.txt'), ProductFileMetadata(output_name='output_name_2', local_path=self.local_path_2, media_type='application/x-custom-json', remote_path='remote/2/file.json', source_sensor_class='classB', source_sensor='2', source_collection='12346', source_task='my-task-2'), ] self.files_no = [ ProductFileMetadata(output_name='output_name_3', local_path=self.local_path_3, media_type='image/x-hdf5-image', remote_path='remote/3/file.h5') ]
def test_batch_link(self): """Tests calling ProductFileManager.upload_files() successfully when associated with a batch""" manifest = job_test_utils.create_seed_manifest(name='scale-batch-creator') job_type = job_test_utils.create_seed_job_type(manifest=manifest) job_exe = job_test_utils.create_job_exe(job_type=job_type) batch = batch_test_utils.create_batch() job_exe.batch = batch batch.save() recipe_job = recipe_test_utils.create_recipe_job(job=job_exe.job) products_no = ProductFile.objects.upload_files(self.files_no, [self.source_file.id], self.job_exe_no, self.workspace) products = ProductFile.objects.upload_files(self.files, [self.source_file.id, products_no[0].id], job_exe, self.workspace) self.assertEqual(batch.id, products[0].batch_id)
def setUp(self): django.setup() add_message_backend(AMQPMessagingBackend) self.workspace = storage_test_utils.create_workspace() self.source_file = ScaleFile.objects.create(file_name='input_file', file_type='SOURCE', media_type='text/plain', file_size=10, data_type_tags=['type1'], file_path='the_path', workspace=self.workspace) self.source_file.add_data_type_tag('type1') self.source_file.add_data_type_tag('type2') self.source_file.add_data_type_tag('type3') manifest = job_test_utils.create_seed_manifest(inputs_files=[{'name': 'INPUT_FILE', 'media_types': ['text/plain'], 'required': True, 'multiple': True}], inputs_json=[]) self.jt1 = job_test_utils.create_seed_job_type(manifest=manifest) v6_recipe_type_def = {'version': '6', 'input': {'files': [{'name': 'INPUT_FILE', 'media_types': ['text/plain'], 'required': True, 'multiple': True}], 'json': []}, 'nodes': {'node_a': {'dependencies': [], 'input': {'INPUT_FILE': {'type': 'recipe', 'input': 'INPUT_FILE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt1.name, 'job_type_version': self.jt1.version, 'job_type_revision': 1}}}} self.recipe = recipe_test_utils.create_recipe_type_v6(name='test-recipe', definition=v6_recipe_type_def) v7_recipe_type_def = {'version': '7', 'input': {'files': [{'name': 'INPUT_FILE', 'media_types': ['text/plain'], 'required': True, 'multiple': True}], 'json': []}, 'nodes': {'node_a': {'dependencies': [], 'input': {'INPUT_FILE': {'type': 'recipe', 'input': 'INPUT_FILE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt1.name, 'job_type_version': self.jt1.version, 'job_type_revision': 1}}}} self.recipe_v7 = recipe_test_utils.create_recipe_type_v6(name='test-recipe-v7', definition=v7_recipe_type_def)
def setUp(self): django.setup() self.workspace_1 = Workspace.objects.create(name='Test workspace 1') self.workspace_2 = Workspace.objects.create(name='Test workspace 2', is_active=False) manifest = job_utils.create_seed_manifest(name='Type-1') job_type = job_utils.create_seed_job_type(manifest=manifest) event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.job = job_utils.create_job(job_type=job_type, event=event, status='RUNNING', last_status_change=now()) self.job_exe = job_utils.create_job_exe(job=self.job, status='RUNNING', timeout=1, queued=now()) self.remote_base_path = os.path.join( 'jobs', get_valid_filename(self.job.job_type.name), get_valid_filename(self.job.job_type.version))
def setUp(self): django.setup() cmd = 'command' cmd_args = 'run test' timeout = 60 manifest = job_utils.create_seed_manifest(command='command run test') self.seed_job_type = job_utils.create_seed_job_type(manifest=manifest) self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.seed_job = job_utils.create_job(job_type=self.seed_job_type, event=self.event, status='RUNNING') config = { 'output_workspaces': { 'default': storage_utils.create_workspace().name } } self.seed_exe = job_utils.create_job_exe(job=self.seed_job, status='RUNNING', timeout=timeout, queued=now(), configuration=config)
def setUp(self): django.setup() manifest = job_test_utils.create_seed_manifest( name='scale-daily-metrics') self.job_type = job_test_utils.create_seed_job_type(manifest=manifest) self.processor = DailyMetricsProcessor()
def setUp(self): django.setup() cmd = 'command' cmd_args = 'run test' timeout = 60 workspace = storage_utils.create_workspace(base_url="http://test.com/") self.file_1 = storage_utils.create_file(workspace=workspace, file_path="path/1/file1.txt") self.file_2 = storage_utils.create_file(workspace=workspace, file_path="path/2/file2.txt") self.file_3 = storage_utils.create_file(workspace=workspace, file_path="path/3/file3.txt") input_files = { self.file_1.id: self.file_1, self.file_2.id: self.file_2, self.file_3.id: self.file_3 } manifest = job_utils.create_seed_manifest(command='command run test') imm = copy.deepcopy(manifest) imm['job']['jobVersion'] = '1.0.1' imm['job']['interface']['inputs']['files'].append( {'name': 'INPUT_METADATA_MANIFEST'}) self.seed_job_type = job_utils.create_seed_job_type(manifest=manifest) self.seed_job_type_metadata = job_utils.create_seed_job_type( manifest=imm) self.event = TriggerEvent.objects.create_trigger_event( 'TEST', None, {}, now()) self.seed_job = job_utils.create_job(job_type=self.seed_job_type, event=self.event, status='RUNNING') self.data_dict = { 'json': { 'input_1': 'my_val' }, 'files': { 'input_2': [self.file_1.id], 'input_3': [self.file_2.id, self.file_3.id] } } self.seed_job_meta = job_utils.create_job( job_type=self.seed_job_type_metadata, event=self.event, input=self.data_dict, status='RUNNING') config = { 'output_workspaces': { 'default': storage_utils.create_workspace().name } } self.seed_exe = job_utils.create_job_exe(job=self.seed_job, status='RUNNING', timeout=timeout, queued=now(), configuration=config) configurator = QueuedExecutionConfigurator(input_files) exe_config = configurator.configure_queued_job(self.seed_job_meta) self.seed_exe_meta = job_utils.create_job_exe( job=self.seed_job_meta, status='RUNNING', timeout=timeout, queued=now(), configuration=exe_config.get_dict())
def test_execute_with_data(self): """Tests calling ProcessJobInput.execute() successfully when the job already has data populated""" workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) inputs = [{ 'name': 'Input 1', 'mediaTypes': ['text/plain'], }, { 'name': 'Input 2', 'mediaTypes': ['text/plain'], }] outputs = [{ 'name': 'Output 1', 'mediaType': 'image/png', }] manifest = job_test_utils.create_seed_manifest(command='my_command', inputs_files=inputs, outputs_files=outputs) job_type = job_test_utils.create_seed_job_type(manifest=manifest) input_dict = { 'version': '1.0', 'input_data': [{ 'name': 'Input 1', 'file_id': file_1.id }, { 'name': 'Input 2', 'file_id': file_2.id }], 'output_data': [{ 'name': 'Output 1', 'workspace_id': workspace.id }]} job = job_test_utils.create_job(job_type=job_type, num_exes=0, status='PENDING', input_file_size=None, input=input_dict) # Create message message = ProcessJobInput() message.job_id = job.id # Execute message result = message.execute() self.assertTrue(result) job = Job.objects.get(id=job.id) # Check for queued jobs message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'queued_jobs') self.assertFalse(message.new_messages[0].requeue) # Check job for expected input_file_size self.assertEqual(job.input_file_size, 1042.0) # Make sure job input file models are created job_input_files = JobInputFile.objects.filter(job_id=job.id) self.assertEqual(len(job_input_files), 2) for job_input_file in job_input_files: if job_input_file.job_input == 'Input 1': self.assertEqual(job_input_file.input_file_id, file_1.id) elif job_input_file.job_input == 'Input 2': self.assertEqual(job_input_file.input_file_id, file_2.id) else: self.fail('Invalid input name: %s' % job_input_file.job_input) # Test executing message again message_json_dict = message.to_json() message = ProcessJobInput.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have queued jobs message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'queued_jobs') self.assertFalse(message.new_messages[0].requeue) # Make sure job input file models are unchanged job_input_files = JobInputFile.objects.filter(job_id=job.id) self.assertEqual(len(job_input_files), 2)
def test_successful_recipe_kickoff(self, mock_msg_mgr, mock_msg_mgr_rc, mock_msg_mgr_q): """Tests successfully producing an ingest that immediately calls a recipe""" strike_config = { 'version': '7', 'workspace': self.workspace.name, 'monitor': {'type': 'dir-watcher', 'transfer_suffix': '_tmp'}, 'files_to_ingest': [{ 'filename_regex': 'input_file', 'data_types': ['image_type'], 'new_workspace': self.workspace.name, 'new_file_path': 'my/path' }], 'recipe': { 'name': self.recipe_v7.name }, } config = StrikeConfigurationV6(strike_config).get_configuration() strike = Strike.objects.create_strike('my_name', 'my_title', 'my_description', config) ingest = ingest_test_utils.create_ingest(source_file=self.source_file) # Call method to test IngestRecipeHandler().process_ingested_source_file(ingest.id, strike, self.source_file, now()) self.assertEqual(Recipe.objects.count(), 1) self.assertEqual(Recipe.objects.first().recipe_type.name, self.recipe_v7.name) # Verify ingest event and trigger event objects were created from ingest.models import IngestEvent events = IngestEvent.objects.all().values() self.assertEqual(len(events), 1) self.assertEqual(events[0]['type'], 'STRIKE') # Create scan scan_config = { 'workspace': self.workspace.name, 'scanner': { 'type': 'dir' }, 'files_to_ingest': [{ 'filename_regex': 'input_file', 'data_types': ['type1'], 'new_file_path': os.path.join('my', 'path'), 'new_workspace': self.workspace.name, }], 'recipe': { 'name': self.recipe_v7.name, }, } scan_configuration = ScanConfigurationV6(scan_config).get_configuration() scan = Scan.objects.create_scan('my_name', 'my_title', 'my_description', scan_configuration) # Call method to test IngestRecipeHandler().process_ingested_source_file(ingest.id, scan, self.source_file, now()) self.assertEqual(Recipe.objects.count(), 2) self.assertEqual(Recipe.objects.last().recipe_type.name, self.recipe_v7.name) # Verify events were created events = IngestEvent.objects.all().values() self.assertEqual(len(events), 2) self.assertEqual(events[1]['type'], 'SCAN') # Update the recipe then call ingest with revision 1 manifest = job_test_utils.create_seed_manifest( inputs_files=[{'name': 'INPUT_FILE', 'media_types': ['text/plain'], 'required': True, 'multiple': True}], inputs_json=[]) jt2 = job_test_utils.create_seed_job_type(manifest=manifest) definition = {'version': '7', 'input': {'files': [{'name': 'INPUT_FILE', 'media_types': ['text/plain'], 'required': True, 'multiple': True}], 'json': []}, 'nodes': {'node_a': {'dependencies': [], 'input': {'INPUT_FILE': {'type': 'recipe', 'input': 'INPUT_FILE'}}, 'node_type': {'node_type': 'job', 'job_type_name': self.jt1.name, 'job_type_version': self.jt1.version, 'job_type_revision': 1}}, 'node_b': {'dependencies': [], 'input': {'INPUT_FILE': {'type': 'recipe', 'input': 'INPUT_FILE'}}, 'node_type': {'node_type': 'job', 'job_type_name': jt2.name, 'job_type_version': jt2.version, 'job_type_revision': 1}}}} recipe_test_utils.edit_recipe_type_v6(recipe_type=self.recipe, definition=definition) strike_config['recipe'] = { 'name': self.recipe.name, 'revision_num': 1, } config = StrikeConfigurationV6(strike_config).get_configuration() strike = Strike.objects.create_strike('my_name_2', 'my_title_2', 'my_description_2', config) ingest = ingest_test_utils.create_ingest(source_file=self.source_file) # Call method to test IngestRecipeHandler().process_ingested_source_file(ingest.id, strike, self.source_file, now()) self.assertEqual(Recipe.objects.count(), 3) self.assertEqual(Recipe.objects.first().recipe_type.name, self.recipe.name) # Verify events were created events = IngestEvent.objects.all().values() self.assertEqual(len(events), 3) self.assertEqual(events[2]['type'], 'STRIKE')