def setUp(self): django.setup() self.src_file_1 = source_test_utils.create_source() self.src_file_2 = source_test_utils.create_source() self.src_file_3 = source_test_utils.create_source() self.src_file_4 = source_test_utils.create_source() self.job_exe_1 = job_test_utils.create_job_exe() self.recipe_job_1 = recipe_test_utils.create_recipe_job(job=self.job_exe_1.job) self.product_1 = prod_test_utils.create_product(self.job_exe_1, has_been_published=True) self.product_2 = prod_test_utils.create_product(self.job_exe_1, has_been_published=True) FileAncestryLink.objects.create(ancestor=self.src_file_1, descendant=self.product_1, job_exe=self.job_exe_1, job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.src_file_1, descendant=self.product_2, job_exe=self.job_exe_1, job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.src_file_2, descendant=self.product_1, job_exe=self.job_exe_1, job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.src_file_2, descendant=self.product_2, job_exe=self.job_exe_1, job=self.job_exe_1.job, recipe=self.recipe_job_1.recipe) self.job_exe_2 = job_test_utils.create_job_exe() self.recipe_job_2 = recipe_test_utils.create_recipe_job(job=self.job_exe_2.job) self.product_3 = prod_test_utils.create_product(self.job_exe_2, has_been_published=True) FileAncestryLink.objects.create(ancestor=self.src_file_3, descendant=self.product_3, job_exe=self.job_exe_2, job=self.job_exe_2.job, recipe=self.recipe_job_2.recipe) FileAncestryLink.objects.create(ancestor=self.src_file_4, descendant=self.product_3, job_exe=self.job_exe_2, job=self.job_exe_2.job, recipe=self.recipe_job_2.recipe)
def setUp(self): django.setup() self.source1 = source_test_utils.create_source(data_started='2016-01-01T00:00:00Z', data_ended='2016-01-01T00:00:00Z', file_name='test.txt', is_parsed=True) self.source2 = source_test_utils.create_source(data_started='2017-01-01T00:00:00Z', data_ended='2017-01-01T00:00:00Z', is_parsed=False)
def test_get_sources_data_time(self): """Tests calling get_sources() using data time""" source_test_utils.create_source(data_started='2016-01-01T00:00:00Z', data_ended='2016-02-01T00:00:00Z') source_test_utils.create_source(data_started='2016-02-01T00:00:00Z', data_ended='2016-02-01T00:00:00Z') source_test_utils.create_source(data_started='2016-01-01T00:00:00Z', data_ended='2016-03-01T00:00:00Z') source_test_utils.create_source(data_started='2016-01-01T00:00:00Z', data_ended='2016-04-01T00:00:00Z') sources = SourceFile.objects.get_sources( started='2015-12-01T00:00:00Z', ended='2016-01-15T00:00:00Z', time_field='data') self.assertEqual(len(sources), 3) sources = SourceFile.objects.get_sources( started='2016-02-15T00:00:00Z', time_field='data') self.assertEqual(len(sources), 2) sources = SourceFile.objects.get_sources(ended='2016-01-15T00:00:00Z', time_field='data') self.assertEqual(len(sources), 3)
def setUp(self): django.setup() self.source = source_test_utils.create_source() try: import ingest.test.utils as ingest_test_utils self.ingest = ingest_test_utils.create_ingest(source_file=self.source) except: self.ingest = None try: import product.test.utils as product_test_utils self.product1 = product_test_utils.create_product(is_superseded=True) product_test_utils.create_file_link(ancestor=self.source, descendant=self.product1) except: self.product1 = None try: import product.test.utils as product_test_utils self.product2 = product_test_utils.create_product() product_test_utils.create_file_link(ancestor=self.source, descendant=self.product2) except: self.product2 = None
def setUp(self): django.setup() self.ingest = ingest_test_utils.create_ingest(status='INGESTING') self.job_exe_id = JobExecution.objects.get(job_id=self.ingest.job).id self.source_file = source_test_utils.create_source( workspace=self.ingest.workspace)
def setUp(self): django.setup() self.source = source_test_utils.create_source() try: import ingest.test.utils as ingest_test_utils self.ingest = ingest_test_utils.create_ingest( source_file=self.source) except: self.ingest = None try: import product.test.utils as product_test_utils self.product1 = product_test_utils.create_product( is_superseded=True) product_test_utils.create_file_link(ancestor=self.source, descendant=self.product1) except: self.product1 = None try: import product.test.utils as product_test_utils self.product2 = product_test_utils.create_product() product_test_utils.create_file_link(ancestor=self.source, descendant=self.product2) except: self.product2 = None
def setUp(self): django.setup() self.workspace = storage_test_utils.create_workspace() self.workspace.upload_files = MagicMock() self.workspace.delete_files = MagicMock() self.upload_dir = os.path.join('upload', 'dir') self.work_dir = os.path.join('work', 'dir') self.workspace_work_dir = ScaleFile.objects._get_workspace_work_dir( self.work_dir, self.workspace) self.source_file = source_test_utils.create_source( file_name=u'input1.txt', workspace=self.workspace) self.job_exe = job_test_utils.create_job_exe() self.job_exe_no = job_test_utils.create_job_exe() with transaction.atomic(): self.job_exe_no.job.is_operational = False self.job_exe_no.job.job_type.is_operational = False self.job_exe_no.job.save() self.job_exe_no.job.job_type.save() self.files = [ (u'local/1/file.txt', u'remote/1/file.txt', None), (u'local/2/file.json', u'remote/2/file.json', u'application/x-custom-json'), ] self.files_no = [ (u'local/3/file.h5', u'remote/3/file.h5', u'image/x-hdf5-image'), ]
def test_media_type_match(self): '''Tests calling IngestTriggerCondition.is_condition_met() with a matching media type''' condition = IngestTriggerCondition('text/plain', None) source_file = source_test_utils.create_source(media_type='text/plain') self.assertEqual(condition.is_condition_met(source_file), True)
def test_media_type_match(self): """Tests calling IngestTriggerCondition.is_condition_met() with a matching media type""" condition = IngestTriggerCondition('text/plain', None) source_file = source_test_utils.create_source(media_type='text/plain') self.assertEqual(condition.is_condition_met(source_file), True)
def test_no_conditions(self): '''Tests calling IngestTriggerCondition.is_condition_met() with no conditions''' condition = IngestTriggerCondition(None, None) source_file = source_test_utils.create_source(media_type='text/plain') self.assertEqual(condition.is_condition_met(source_file), True)
def setUp(self): django.setup() from product.test import utils as product_test_utils self.country = storage_test_utils.create_country() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.src_file = source_test_utils.create_source( data_started='2016-01-01T00:00:00Z', data_ended='2016-01-01T00:00:00Z', file_name='test.txt', is_parsed=True) self.product1 = product_test_utils.create_product( job_exe=self.job_exe1, has_been_published=True, is_published=True, file_name='test_prod.txt', countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product1, job=self.job1, job_exe=self.job_exe1)
def create_ingest(file_name='test.txt', status='TRANSFERRING', transfer_started=None, transfer_ended=None, ingest_started=None, ingest_ended=None, data_started=None, data_ended=None, workspace=None, strike=None, scan=None, source_file=None): if not workspace: workspace = storage_test_utils.create_workspace() if not source_file: source_file = source_test_utils.create_source(file_name=file_name, data_started=data_started, data_ended=data_ended, workspace=workspace) if not transfer_started: transfer_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING'] and not ingest_started: ingest_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING', 'INGESTING'] and not ingest_ended: ingest_ended = timezone.now() try: job_type = Ingest.objects.get_ingest_job_type() except: job_type = job_utils.create_job_type() job = job_utils.create_job(job_type=job_type) job_utils.create_job_exe(job=job) return Ingest.objects.create(file_name=file_name, file_size=source_file.file_size, status=status, job=job, bytes_transferred=source_file.file_size, transfer_started=transfer_started, transfer_ended=transfer_ended, media_type='text/plain', ingest_started=ingest_started, ingest_ended=ingest_ended, data_started=source_file.data_started, data_ended=source_file.data_ended, workspace=workspace, strike=strike, scan=scan, source_file=source_file)
def test_no_conditions(self): """Tests calling IngestTriggerCondition.is_condition_met() with no conditions""" condition = IngestTriggerCondition(None, None) source_file = source_test_utils.create_source(media_type='text/plain') self.assertEqual(condition.is_condition_met(source_file), True)
def test_media_type_mismatch(self): """Tests calling IngestTriggerCondition.is_condition_met() with a mismatched media type""" condition = IngestTriggerCondition('application/json', None) source_file = source_test_utils.create_source(media_type='text/plain') self.assertEqual(condition.is_condition_met(source_file), False)
def setUp(self): django.setup() from batch.test import utils as batch_test_utils from product.test import utils as product_test_utils self.country = storage_test_utils.create_country() self.src_file = source_test_utils.create_source() self.job_type1 = job_test_utils.create_job_type(name='test1', category='test-1', is_operational=True) self.job1 = job_test_utils.create_job(job_type=self.job_type1) self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) self.product1 = product_test_utils.create_product(job_exe=self.job_exe1, has_been_published=True, is_published=True, file_name='test.txt', countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product1, job=self.job1, job_exe=self.job_exe1) self.batch = batch_test_utils.create_batch() self.job_type2 = job_test_utils.create_job_type(name='test2', category='test-2', is_operational=False) self.job2 = job_test_utils.create_job(job_type=self.job_type2) self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) self.product2a = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=False, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2a, job=self.job2, job_exe=self.job_exe2, batch=self.batch) self.product2b = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=True, is_superseded=True, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2b, job=self.job2, job_exe=self.job_exe2, batch=self.batch) self.product2c = product_test_utils.create_product(job_exe=self.job_exe2, has_been_published=True, is_published=True, countries=[self.country]) product_test_utils.create_file_link(ancestor=self.src_file, descendant=self.product2c, job=self.job2, job_exe=self.job_exe2, batch=self.batch)
def setUp(self): django.setup() self.workspace = storage_test_utils.create_workspace() self.workspace.upload_files = MagicMock() self.workspace.delete_files = MagicMock() self.upload_dir = os.path.join('upload', 'dir') self.work_dir = os.path.join('work', 'dir') self.workspace_work_dir = ScaleFile.objects._get_workspace_work_dir(self.work_dir, self.workspace) self.source_file = source_test_utils.create_source(file_name=u'input1.txt', workspace=self.workspace) self.job_exe = job_test_utils.create_job_exe() self.job_exe_no = job_test_utils.create_job_exe() with transaction.atomic(): self.job_exe_no.job.is_operational = False self.job_exe_no.job.job_type.is_operational = False self.job_exe_no.job.save() self.job_exe_no.job.job_type.save() self.files = [ (u'local/1/file.txt', u'remote/1/file.txt', None), (u'local/2/file.json', u'remote/2/file.json', u'application/x-custom-json'), ] self.files_no = [ (u'local/3/file.h5', u'remote/3/file.h5', u'image/x-hdf5-image'), ]
def test_media_type_mismatch(self): '''Tests calling IngestTriggerCondition.is_condition_met() with a mismatched media type''' condition = IngestTriggerCondition('application/json', None) source_file = source_test_utils.create_source(media_type='text/plain') self.assertEqual(condition.is_condition_met(source_file), False)
def create_ingest(file_name='test.txt', status='TRANSFERRING', transfer_started=None, transfer_ended=None, ingest_started=None, ingest_ended=None, data_started=None, data_ended=None, workspace=None, strike=None, source_file=None): if not workspace: workspace = storage_test_utils.create_workspace() if not strike: strike = create_strike() if not source_file: source_file = source_test_utils.create_source(file_name=file_name, data_started=data_started, data_ended=data_ended, workspace=workspace) if not transfer_started: transfer_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING'] and not ingest_started: ingest_started = timezone.now() if status not in ['QUEUED', 'TRANSFERRING', 'INGESTING'] and not ingest_ended: ingest_ended = timezone.now() try: job_type = Ingest.objects.get_ingest_job_type() except: job_type = job_utils.create_job_type() job = job_utils.create_job(job_type=job_type) job_utils.create_job_exe(job=job) return Ingest.objects.create(file_name=file_name, file_size=source_file.file_size, status=status, job=job, bytes_transferred=source_file.file_size, transfer_started=transfer_started, transfer_ended=transfer_ended, media_type='text/plain', ingest_started=ingest_started, ingest_ended=ingest_ended, workspace=workspace, strike=strike, source_file=source_file)
def setUp(self): django.setup() from product.test import utils as product_test_utils self.src_file = source_test_utils.create_source() self.job_type1 = job_test_utils.create_job_type( name='scale-batch-creator', version='1.0', category='test-1') self.job1 = job_test_utils.create_job(job_type=self.job_type1, status='RUNNING') self.job_exe1 = job_test_utils.create_job_exe(job=self.job1) product_test_utils.create_file_link(ancestor=self.src_file, job=self.job1, job_exe=self.job_exe1) self.job_type2 = job_test_utils.create_job_type(name='test2', version='1.0', category='test-2') self.job2 = job_test_utils.create_job(job_type=self.job_type2, status='PENDING') self.job_exe2 = job_test_utils.create_job_exe(job=self.job2) product_test_utils.create_file_link(ancestor=self.src_file, job=self.job2, job_exe=self.job_exe2) self.job3 = job_test_utils.create_job(is_superseded=True) self.job_exe3 = job_test_utils.create_job_exe(job=self.job3) product_test_utils.create_file_link(ancestor=self.src_file, job=self.job3, job_exe=self.job_exe3)
def setUp(self): django.setup() workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) self.event = trigger_test_utils.create_trigger_event() interface_1 = { "version": "1.0", "command": "test_command", "command_arguments": "test_arg", "input_data": [{"name": "Test Input 1", "type": "file", "media_types": ["text/plain"]}], "output_data": [{"name": "Test Output 1", "type": "files", "media_type": "image/png"}], } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { "version": "1.0", "command": "test_command", "command_arguments": "test_arg", "input_data": [{"name": "Test Input 2", "type": "files", "media_types": ["image/png", "image/tiff"]}], "output_data": [{"name": "Test Output 2", "type": "file"}], } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) definition = { "version": "1.0", "input_data": [{"name": "Recipe Input", "type": "file", "media_types": ["text/plain"]}], "jobs": [ { "name": "Job 1", "job_type": {"name": self.job_type_1.name, "version": self.job_type_1.version}, "recipe_inputs": [{"recipe_input": "Recipe Input", "job_input": "Test Input 1"}], }, { "name": "Job 2", "job_type": {"name": self.job_type_2.name, "version": self.job_type_2.version}, "dependencies": [ {"name": "Job 1", "connections": [{"output": "Test Output 1", "input": "Test Input 2"}]} ], }, ], } recipe_definition = RecipeDefinition(definition) recipe_definition.validate_job_interfaces() self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) self.data = { "version": "1.0", "input_data": [{"name": "Recipe Input", "file_id": source_file.id}], "workspace_id": workspace.id, } # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def test_has_not_data_types(self): """Tests calling ParseTriggerCondition.is_condition_met() with a source file that has all required data types""" condition = ParseTriggerCondition(None, set([]), set([]), set(['A', 'B'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('C') self.assertEqual(condition.is_condition_met(source_file), True)
def test_calculate_stats(self): '''Tests calculating individual statistics for a metrics entry.''' strike = ingest_test_utils.create_strike() source_file = source_test_utils.create_source(file_size=200) ingest_test_utils.create_ingest(strike=strike, source_file=source_file, status='INGESTED', transfer_started=datetime.datetime(2015, 1, 1), transfer_ended=datetime.datetime(2015, 1, 1, 0, 10), ingest_started=datetime.datetime(2015, 1, 1), ingest_ended=datetime.datetime(2015, 1, 1, 1)) ingest_test_utils.create_ingest(strike=strike, status='INGESTED', transfer_started=datetime.datetime(2015, 1, 1), transfer_ended=datetime.datetime(2015, 1, 1, 0, 20), ingest_started=datetime.datetime(2015, 1, 1), ingest_ended=datetime.datetime(2015, 1, 1, 2)) ingest_test_utils.create_ingest(strike=strike, status='ERRORED', transfer_started=datetime.datetime(2015, 1, 1), transfer_ended=datetime.datetime(2015, 1, 1, 0, 30), ingest_started=datetime.datetime(2015, 1, 1), ingest_ended=datetime.datetime(2015, 1, 1, 3)) ingest_test_utils.create_ingest(strike=strike, status='DEFERRED', transfer_started=datetime.datetime(2015, 1, 1), transfer_ended=datetime.datetime(2015, 1, 1, 0, 40), ingest_started=datetime.datetime(2015, 1, 1), ingest_ended=datetime.datetime(2015, 1, 1, 4)) ingest_test_utils.create_ingest(strike=strike, status='DUPLICATE', transfer_started=datetime.datetime(2015, 1, 1), transfer_ended=datetime.datetime(2015, 1, 1, 0, 50), ingest_started=datetime.datetime(2015, 1, 1), ingest_ended=datetime.datetime(2015, 1, 1, 5)) MetricsIngest.objects.calculate(datetime.date(2015, 1, 1)) entries = MetricsIngest.objects.filter(occurred=datetime.date(2015, 1, 1)) self.assertEqual(len(entries), 1) entry = entries.first() self.assertEqual(entry.occurred, datetime.date(2015, 1, 1)) self.assertEqual(entry.deferred_count, 1) self.assertEqual(entry.ingested_count, 2) self.assertEqual(entry.errored_count, 1) self.assertEqual(entry.duplicate_count, 1) self.assertEqual(entry.total_count, 5) self.assertEqual(entry.file_size_sum, 600) self.assertEqual(entry.file_size_min, 100) self.assertEqual(entry.file_size_max, 200) self.assertEqual(entry.file_size_avg, 120) self.assertEqual(entry.transfer_time_sum, 9000) self.assertEqual(entry.transfer_time_min, 600) self.assertEqual(entry.transfer_time_max, 3000) self.assertEqual(entry.transfer_time_avg, 1800) self.assertEqual(entry.ingest_time_sum, 10800) self.assertEqual(entry.ingest_time_min, 3600) self.assertEqual(entry.ingest_time_max, 7200) self.assertEqual(entry.ingest_time_avg, 5400)
def test_calculate_stats(self): """Tests calculating individual statistics for a metrics entry.""" strike = ingest_test_utils.create_strike() source_file = source_test_utils.create_source(file_size=200) ingest_test_utils.create_ingest(strike=strike, source_file=source_file, status='INGESTED', transfer_started=datetime.datetime(2015, 1, 1, tzinfo=utc), transfer_ended=datetime.datetime(2015, 1, 1, 0, 10, tzinfo=utc), ingest_started=datetime.datetime(2015, 1, 1, tzinfo=utc), ingest_ended=datetime.datetime(2015, 1, 1, 1, tzinfo=utc)) ingest_test_utils.create_ingest(strike=strike, status='INGESTED', transfer_started=datetime.datetime(2015, 1, 1, tzinfo=utc), transfer_ended=datetime.datetime(2015, 1, 1, 0, 20, tzinfo=utc), ingest_started=datetime.datetime(2015, 1, 1, tzinfo=utc), ingest_ended=datetime.datetime(2015, 1, 1, 2, tzinfo=utc)) ingest_test_utils.create_ingest(strike=strike, status='ERRORED', transfer_started=datetime.datetime(2015, 1, 1, tzinfo=utc), transfer_ended=datetime.datetime(2015, 1, 1, 0, 30, tzinfo=utc), ingest_started=datetime.datetime(2015, 1, 1, tzinfo=utc), ingest_ended=datetime.datetime(2015, 1, 1, 3, tzinfo=utc)) ingest_test_utils.create_ingest(strike=strike, status='DEFERRED', transfer_started=datetime.datetime(2015, 1, 1, tzinfo=utc), transfer_ended=datetime.datetime(2015, 1, 1, 0, 40, tzinfo=utc), ingest_started=datetime.datetime(2015, 1, 1, tzinfo=utc), ingest_ended=datetime.datetime(2015, 1, 1, 4, tzinfo=utc)) ingest_test_utils.create_ingest(strike=strike, status='DUPLICATE', transfer_started=datetime.datetime(2015, 1, 1, tzinfo=utc), transfer_ended=datetime.datetime(2015, 1, 1, 0, 50, tzinfo=utc), ingest_started=datetime.datetime(2015, 1, 1, tzinfo=utc), ingest_ended=datetime.datetime(2015, 1, 1, 5, tzinfo=utc)) MetricsIngest.objects.calculate(datetime.date(2015, 1, 1)) entries = MetricsIngest.objects.filter(occurred=datetime.date(2015, 1, 1)) self.assertEqual(len(entries), 1) entry = entries.first() self.assertEqual(entry.occurred, datetime.date(2015, 1, 1)) self.assertEqual(entry.deferred_count, 1) self.assertEqual(entry.ingested_count, 2) self.assertEqual(entry.errored_count, 1) self.assertEqual(entry.duplicate_count, 1) self.assertEqual(entry.total_count, 5) self.assertEqual(entry.file_size_sum, 600) self.assertEqual(entry.file_size_min, 100) self.assertEqual(entry.file_size_max, 200) self.assertEqual(entry.file_size_avg, 120) self.assertEqual(entry.transfer_time_sum, 9000) self.assertEqual(entry.transfer_time_min, 600) self.assertEqual(entry.transfer_time_max, 3000) self.assertEqual(entry.transfer_time_avg, 1800) self.assertEqual(entry.ingest_time_sum, 10800) self.assertEqual(entry.ingest_time_min, 3600) self.assertEqual(entry.ingest_time_max, 7200) self.assertEqual(entry.ingest_time_avg, 5400)
def test_does_not_have_data_types(self): '''Tests calling IngestTriggerCondition.is_condition_met() with a source file that does not have all required data types''' condition = IngestTriggerCondition(None, set(['A', 'B', 'C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') self.assertEqual(condition.is_condition_met(source_file), False)
def test_does_not_have_data_types(self): """Tests calling IngestTriggerCondition.is_condition_met() with a source file that does not have all required data types""" condition = IngestTriggerCondition(None, set(['A', 'B', 'C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') self.assertEqual(condition.is_condition_met(source_file), False)
def setUp(self): django.setup() self.source_file = source_test_utils.create_source() from ingest.test import utils as ingest_test_utils self.strike = ingest_test_utils.create_strike() self.ingest1 = ingest_test_utils.create_ingest(source_file=self.source_file, status='QUEUED', strike=self.strike) self.ingest2 = ingest_test_utils.create_ingest(source_file=self.source_file, status='INGESTED')
def test_media_type_incorrect(self): '''Tests calling ParseTriggerCondition.is_condition_met() with a source file that only has the correct data types''' condition = ParseTriggerCondition('application/json', set(['A', 'B', 'C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') source_file.add_data_type_tag('C') self.assertEqual(condition.is_condition_met(source_file), False)
def test_data_types_incorrect(self): """Tests calling IngestTriggerCondition.is_condition_met() with a source file that only has the correct media type""" condition = IngestTriggerCondition('text/plain', set(['A', 'B', 'C', 'D'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') source_file.add_data_type_tag('C') self.assertEqual(condition.is_condition_met(source_file), False)
def test_both_correct(self): """Tests calling IngestTriggerCondition.is_condition_met() with a source file that meets both criteria""" condition = IngestTriggerCondition('text/plain', set(['A', 'B', 'C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') source_file.add_data_type_tag('C') self.assertEqual(condition.is_condition_met(source_file), True)
def test_data_types_incorrect(self): '''Tests calling ParseTriggerCondition.is_condition_met() with a source file that only has the correct media type''' condition = ParseTriggerCondition('text/plain', set(['A', 'B', 'C', 'D'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') source_file.add_data_type_tag('C') self.assertEqual(condition.is_condition_met(source_file), False)
def test_both_correct(self): '''Tests calling IngestTriggerCondition.is_condition_met() with a source file that meets both criteria''' condition = IngestTriggerCondition('text/plain', set(['A', 'B', 'C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') source_file.add_data_type_tag('C') self.assertEqual(condition.is_condition_met(source_file), True)
def setUp(self): django.setup() # Generation 1 self.file_1 = source_test_utils.create_source(file_name='my_file_1.txt') self.file_2 = source_test_utils.create_source(file_name='my_file_2.txt') self.file_8 = source_test_utils.create_source(file_name='my_file_8.txt') # Generation 2 job_exe_1 = job_test_utils.create_job_exe() recipe_job_1 = recipe_test_utils.create_recipe_job(job=job_exe_1.job) self.file_3 = prod_test_utils.create_product(job_exe=job_exe_1) self.file_4 = prod_test_utils.create_product(job_exe=job_exe_1) self.file_5 = prod_test_utils.create_product(job_exe=job_exe_1) # Generation 3 job_exe_2 = job_test_utils.create_job_exe() recipe_job_2 = recipe_test_utils.create_recipe_job(job=job_exe_2.job) self.file_6 = prod_test_utils.create_product(job_exe=job_exe_2) # Stand alone file self.file_7 = prod_test_utils.create_product() # First job links generation 1 to 2 FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_3, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_4, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_5, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_4, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) FileAncestryLink.objects.create(ancestor=self.file_2, descendant=self.file_5, job_exe=job_exe_1, job=job_exe_1.job, recipe=recipe_job_1.recipe) # Second job links generation 2 to 3 FileAncestryLink.objects.create(ancestor=self.file_3, descendant=self.file_6, job_exe=job_exe_2, job=job_exe_2.job, recipe=recipe_job_2.recipe) FileAncestryLink.objects.create(ancestor=self.file_1, descendant=self.file_6, job_exe=job_exe_2, job=job_exe_2.job, recipe=recipe_job_2.recipe, ancestor_job_exe=job_exe_1, ancestor_job=job_exe_1.job)
def test_media_type_incorrect(self): '''Tests calling IngestTriggerCondition.is_condition_met() with a source file that only has the correct data types''' condition = IngestTriggerCondition('application/json', set(['A', 'B', 'C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') source_file.add_data_type_tag('C') self.assertEqual(condition.is_condition_met(source_file), False)
def test_successful(self): """Tests calling QueueManager.queue_new_job() successfully with a Seed job type""" workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) event = trigger_test_utils.create_trigger_event() manifest = { 'seedVersion': '1.0.0', 'job': { 'name': 'test-job', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Test Job', 'description': 'This is a test job', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [{ 'name': 'input_a' }] }, 'outputs': { 'files': [{ 'name': 'output_a', 'multiple': True, 'pattern': '*.png' }] } } } } job_type = job_test_utils.create_seed_job_type(manifest=manifest) data_dict = { 'version': '1.0', 'input_data': [{ 'name': 'input_a', 'file_id': source_file.id, }], 'output_data': [{ 'name': 'output_a', 'workspace_id': workspace.id }] } data = JobData(data_dict) job = Queue.objects.queue_new_job(job_type, data, event) self.assertEqual(job.status, 'QUEUED')
def test_no_any_data_types(self): """ Tests calling ParseTriggerCondition.is_condition_met() with a source file that has no match with any_of_data_types """ condition = ParseTriggerCondition(None, set([]), set(['A', 'B', 'C']), set([])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('F') self.assertEqual(condition.is_condition_met(source_file), False)
def test_has_not_data_types(self): """ Tests calling IngestTriggerCondition.is_condition_met() with a source file that a match with not_data_types """ condition = IngestTriggerCondition(None, set([]), set([]), set(['A', 'B', 'C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') self.assertEqual(condition.is_condition_met(source_file), False)
def test_has_data_types(self): '''Tests calling ParseTriggerCondition.is_condition_met() with a source file that has all required data types''' condition = ParseTriggerCondition(None, set(['A', 'B', 'C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') source_file.add_data_type_tag('C') source_file.add_data_type_tag('D') source_file.add_data_type_tag('E') self.assertEqual(condition.is_condition_met(source_file), True)
def test_uuid_use_input_files(self): """Tests setting UUIDs on products with different source input files.""" source_file2 = source_test_utils.create_source(file_name='input2.txt', workspace=self.workspace) products1 = ProductFile.objects.upload_files(self.files, [self.source_file.id], self.job_exe, self.workspace) products2 = ProductFile.objects.upload_files(self.files, [source_file2.id], self.job_exe, self.workspace) # Make sure the source files are taken into account self.assertIsNotNone(products1[0].uuid) self.assertIsNotNone(products1[1].uuid) self.assertNotEqual(products1[0].uuid, products2[0].uuid) self.assertNotEqual(products1[1].uuid, products2[1].uuid)
def test_has_all_data_types(self): """ Tests calling IngestTriggerCondition.is_condition_met() with a source file that has all three data type conditions """ condition = IngestTriggerCondition(None, set(['A']), set(['A', 'B']), set(['C'])) source_file = source_test_utils.create_source(media_type='text/plain') source_file.add_data_type_tag('A') source_file.add_data_type_tag('B') self.assertEqual(condition.is_condition_met(source_file), True)
def setUp(self): django.setup() def upload_files(file_uploads): for file_upload in file_uploads: file_upload.file.save() def delete_files(files): for scale_file in files: scale_file.save() self.workspace = storage_test_utils.create_workspace() self.workspace.upload_files = MagicMock(side_effect=upload_files) self.workspace.delete_files = MagicMock(side_effect=delete_files) self.source_file = source_test_utils.create_source(file_name='input1.txt', workspace=self.workspace) inputs_json=[ {'name': 'property1', 'type': 'string'}, {'name': 'property2', 'type': 'string'} ] manifest = job_test_utils.create_seed_manifest(inputs_json=inputs_json, command='my_command') manifest['job']['interface']['inputs']['files'] = [] job_type = job_test_utils.create_seed_job_type(manifest=manifest) self.job_exe = job_test_utils.create_job_exe(job_type=job_type) data = self.job_exe.job.get_input_data() data.add_value(JsonValue('property1', 'value1')) data.add_value(JsonValue('property2', 'value2')) self.job_exe.job.input = convert_data_to_v6_json(data).get_dict() self.job_exe.job.source_sensor_class = 'classA' self.job_exe.job.source_sensor = '1' self.job_exe.job.source_collection = '12345' self.job_exe.job.source_task = 'my-task' self.job_exe.job.save() self.job_exe_no = job_test_utils.create_job_exe() self.local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/1/file.txt') self.local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/2/file.json') self.local_path_3 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/3/file.h5') self.files = [ ProductFileMetadata(output_name='output_name_1', local_path=self.local_path_1, remote_path='remote/1/file.txt'), ProductFileMetadata(output_name='output_name_2', local_path=self.local_path_2, media_type='application/x-custom-json', remote_path='remote/2/file.json', source_sensor_class='classB', source_sensor='2', source_collection='12346', source_task='my-task-2'), ] self.files_no = [ ProductFileMetadata(output_name='output_name_3', local_path=self.local_path_3, media_type='image/x-hdf5-image', remote_path='remote/3/file.h5') ]
def setUp(self): django.setup() self.source = source_test_utils.create_source() self.ancestor = product_test_utils.create_product(file_name='test_ancestor.txt') self.descendant = product_test_utils.create_product(file_name='test_descendant.txt') self.product = product_test_utils.create_product(file_name='test_product.txt') product_test_utils.create_file_link(ancestor=self.source, descendant=self.ancestor) product_test_utils.create_file_link(ancestor=self.source, descendant=self.product) product_test_utils.create_file_link(ancestor=self.source, descendant=self.descendant) product_test_utils.create_file_link(ancestor=self.ancestor, descendant=self.product) product_test_utils.create_file_link(ancestor=self.ancestor, descendant=self.descendant) product_test_utils.create_file_link(ancestor=self.product, descendant=self.descendant)
def create_strike_ingest_event(ingest=None, strike=None, source_file=None, description=None, when=None): if not strike: strike = create_strike() if not source_file: workspace = storage_test_utils.create_workspace() source_test_utils.create_source(workspace=workspace) if not description: description = { 'version': '1.0', 'file_id': source_file.id, 'file_name': source_file.file_name } if not when: when = timezone.now() if not ingest: ingest = create_ingest(source_file=source_file) return IngestEvent.objects.create_strike_ingest_event( ingest.id, strike, description, when)
def setUp(self): django.setup() def upload_files(file_uploads): for file_upload in file_uploads: file_upload.file.save() def delete_files(files): for scale_file in files: scale_file.save() self.workspace = storage_test_utils.create_workspace() self.workspace.upload_files = MagicMock(side_effect=upload_files) self.workspace.delete_files = MagicMock(side_effect=delete_files) self.source_file = source_test_utils.create_source( file_name='input1.txt', workspace=self.workspace) self.job_exe = job_test_utils.create_job_exe() data = self.job_exe.job.get_job_data() data.add_property_input('property1', 'value1') data.add_property_input('property2', 'value2') self.job_exe.job.data = data.get_dict() self.job_exe.job.save() self.job_exe_no = job_test_utils.create_job_exe() with transaction.atomic(): self.job_exe_no.job.is_operational = False self.job_exe_no.job.job_type.is_operational = False self.job_exe_no.job.save() self.job_exe_no.job.job_type.save() self.local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/1/file.txt') self.local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/2/file.json') self.local_path_3 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/3/file.h5') self.files = [ ProductFileMetadata('output_name_1', self.local_path_1, remote_path='remote/1/file.txt'), ProductFileMetadata('output_name_2', self.local_path_2, 'application/x-custom-json', 'remote/2/file.json'), ] self.files_no = [ ProductFileMetadata('output_name_3', self.local_path_3, 'image/x-hdf5-image', 'remote/3/file.h5') ]
def setUp(self): django.setup() workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) self.data = { "version": "1.0", "input_data": [{"name": "Recipe Input", "file_id": source_file.id}], "workspace_id": workspace.id, } self.job_type = job_test_utils.create_job_type() # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def setUp(self): django.setup() workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) self.data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': source_file.id, }], 'workspace_id': workspace.id, } self.job_type = job_test_utils.create_job_type() # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)
def setUp(self): django.setup() def upload_files(file_uploads): for file_upload in file_uploads: file_upload.file.save() def delete_files(files): for scale_file in files: scale_file.save() self.workspace = storage_test_utils.create_workspace() self.workspace.upload_files = MagicMock(side_effect=upload_files) self.workspace.delete_files = MagicMock(side_effect=delete_files) self.source_file = source_test_utils.create_source(file_name='input1.txt', workspace=self.workspace) self.job_exe = job_test_utils.create_job_exe() self.job_exe_no = job_test_utils.create_job_exe() with transaction.atomic(): self.job_exe_no.job.is_operational = False self.job_exe_no.job.job_type.is_operational = False self.job_exe_no.job.save() self.job_exe_no.job.job_type.save() self.local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/1/file.txt') self.local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/2/file.json') self.local_path_3 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, 'local/3/file.h5') self.files = [ (self.local_path_1, 'remote/1/file.txt', None), (self.local_path_2, 'remote/2/file.json', 'application/x-custom-json'), ] self.files_no = [ (self.local_path_3, 'remote/3/file.h5', 'image/x-hdf5-image'), ]
def setUp(self): django.setup() self.ingest = ingest_test_utils.create_ingest(status='INGESTING') self.job_exe_id = JobExecution.objects.get(job_id=self.ingest.job).id self.source_file = source_test_utils.create_source(workspace=self.ingest.workspace)
def setUp(self): django.setup() self.source1 = source_test_utils.create_source(file_name='test.txt', is_parsed=True) self.source2 = source_test_utils.create_source(is_parsed=False)
def setUp(self): django.setup() self.ingest = ingest_test_utils.create_ingest(status='QUEUED') self.source_file = source_test_utils.create_source(workspace=self.ingest.workspace)
def setUp(self): django.setup() workspace = storage_test_utils.create_workspace() source_file = source_test_utils.create_source(workspace=workspace) self.event = trigger_test_utils.create_trigger_event() interface_1 = { 'version': '1.0', 'command': 'test_command', 'command_arguments': 'test_arg', 'input_data': [{ 'name': 'Test Input 1', 'type': 'file', 'media_types': ['text/plain'], }], 'output_data': [{ 'name': 'Test Output 1', 'type': 'files', 'media_type': 'image/png', }] } self.job_type_1 = job_test_utils.create_job_type(interface=interface_1) interface_2 = { 'version': '1.0', 'command': 'test_command', 'command_arguments': 'test_arg', 'input_data': [{ 'name': 'Test Input 2', 'type': 'files', 'media_types': ['image/png', 'image/tiff'], }], 'output_data': [{ 'name': 'Test Output 2', 'type': 'file', }] } self.job_type_2 = job_test_utils.create_job_type(interface=interface_2) definition = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'type': 'file', 'media_types': ['text/plain'], }], 'jobs': [{ 'name': 'Job 1', 'job_type': { 'name': self.job_type_1.name, 'version': self.job_type_1.version, }, 'recipe_inputs': [{ 'recipe_input': 'Recipe Input', 'job_input': 'Test Input 1', }] }, { 'name': 'Job 2', 'job_type': { 'name': self.job_type_2.name, 'version': self.job_type_2.version, }, 'dependencies': [{ 'name': 'Job 1', 'connections': [{ 'output': 'Test Output 1', 'input': 'Test Input 2', }] }] }] } recipe_definition = RecipeDefinition(definition) recipe_definition.validate_job_interfaces() self.recipe_type = recipe_test_utils.create_recipe_type(definition=definition) data = { 'version': '1.0', 'input_data': [{ 'name': 'Recipe Input', 'file_id': source_file.id, }], 'workspace_id': workspace.id, } self.data = RecipeData(data) # Register a fake processor self.mock_processor = MagicMock(QueueEventProcessor) Queue.objects.register_processor(lambda: self.mock_processor)