def test_geo_metadata(self, mock_upload_files, mock_create_file_ancestry_links): """Tests calling ProductDataFileType.store_files() successfully""" geo_metadata = { "data_started": "2015-05-15T10:34:12Z", "data_ended": "2015-05-15T10:36:12Z", "geo_json": { "type": "Polygon", "coordinates": [[[1.0, 10.0], [2.0, 10.0], [2.0, 20.0], [1.0, 20.0], [1.0, 10.0]]] } } upload_dir = 'upload_dir' work_dir = 'work_dir' parent_ids = set([98, 99]) local_path_1 = os.path.join('my', 'path', 'one', 'my_test.txt') remote_path_1 = os.path.join(ProductDataFileStore()._calculate_remote_path(self.job_exe, parent_ids), local_path_1) media_type_1 = 'text/plain' local_path_2 = os.path.join('my', 'path', 'one', 'my_test.json') remote_path_2 = os.path.join(ProductDataFileStore()._calculate_remote_path(self.job_exe, parent_ids), local_path_2) media_type_2 = 'application/json' data_files = {self.workspace_1.id: [(local_path_1, media_type_1, geo_metadata), (local_path_2, media_type_2)]} ProductDataFileStore().store_files(upload_dir, work_dir, data_files, parent_ids, self.job_exe) files_to_store = [(local_path_1, remote_path_1, media_type_1, geo_metadata), (local_path_2, remote_path_2, media_type_2)] mock_upload_files.assert_called_with(upload_dir, work_dir, files_to_store, parent_ids, self.job_exe, self.workspace_1)
def test_geo_metadata(self, mock_upload_files, mock_create_file_ancestry_links): """Tests calling ProductDataFileType.store_files() successfully""" geo_metadata = { "data_started": "2015-05-15T10:34:12Z", "data_ended": "2015-05-15T10:36:12Z", "geo_json": { "type": "Polygon", "coordinates": [[[1.0, 10.0], [2.0, 10.0], [2.0, 20.0], [1.0, 20.0], [1.0, 10.0]]] } } parent_ids = set([98, 99]) local_path_1 = os.path.join('my', 'path', 'one', 'my_test.txt') full_local_path_1 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, local_path_1) remote_path_1 = os.path.join( ProductDataFileStore()._calculate_remote_path( self.job_exe, parent_ids), local_path_1) media_type_1 = 'text/plain' job_output_1 = 'mock_output_1' local_path_2 = os.path.join('my', 'path', 'one', 'my_test.json') full_local_path_2 = os.path.join(SCALE_JOB_EXE_OUTPUT_PATH, local_path_2) remote_path_2 = os.path.join( ProductDataFileStore()._calculate_remote_path( self.job_exe, parent_ids), local_path_2) media_type_2 = 'application/json' job_output_2 = 'mock_output_2' metadata_1 = ProductFileMetadata(output_name=job_output_1, local_path=full_local_path_1, remote_path=remote_path_1, media_type=media_type_1, geojson=geo_metadata) metadata_2 = ProductFileMetadata(output_name=job_output_2, local_path=full_local_path_2, remote_path=remote_path_2, media_type=media_type_2) data_files = {self.workspace_1.id: [metadata_1, metadata_2]} ProductDataFileStore().store_files(data_files, parent_ids, self.job_exe) files_to_store = [metadata_1, metadata_2] mock_upload_files.assert_called_with(files_to_store, parent_ids, self.job_exe, self.workspace_1)
def test_successful(self): """Tests calling ProductDataFileStore.get_workspaces() successfully""" workspaces_ids = [self.workspace_1.id, self.workspace_2.id, self.invalid_workspace_id] results = ProductDataFileStore().get_workspaces(workspaces_ids) self.assertDictEqual(results, {self.workspace_1.id: True, self.workspace_2.id: False})
def ready(self): """Registers the product implementations with other applications.""" from job.configuration.data.data_file import DATA_FILE_STORE from product.configuration.product_data_file import ProductDataFileStore # Register product files for the data file store DATA_FILE_STORE['DATA_FILE_STORE'] = ProductDataFileStore()
def test_successful_recipe_path(self, mock_upload_files, mock_create_file_ancestry_links): """Tests calling ProductDataFileType.store_files() successfully with a job that is in a recipe""" job_exe_in_recipe = job_utils.create_job_exe(status='RUNNING') recipe = recipe_utils.create_recipe() _recipe_job = recipe_utils.create_recipe_job(recipe=recipe, job_name='My Job', job=job_exe_in_recipe.job) remote_base_path_with_recipe = os.path.join('recipes', get_valid_filename(recipe.recipe_type.name), get_valid_filename(recipe.recipe_type.version), 'jobs', get_valid_filename(job_exe_in_recipe.job.job_type.name), get_valid_filename(job_exe_in_recipe.job.job_type.version)) local_path_1 = os.path.join('my', 'path', 'one', 'my_test.txt') media_type_1 = 'text/plain' local_path_2 = os.path.join('my', 'path', 'one', 'my_test.json') media_type_2 = 'application/json' local_path_3 = os.path.join('my', 'path', 'three', 'my_test.png') media_type_3 = 'image/png' local_path_4 = os.path.join('my', 'path', 'four', 'my_test.xml') media_type_4 = None # Set up mocks def new_upload_files(upload_dir, work_dir, file_entries, input_file_ids, job_exe, workspace): results = [] for file_entry in file_entries: # Check base remote path for recipe type and job type information self.assertTrue(file_entry[1].startswith(remote_base_path_with_recipe)) if file_entry[0] == local_path_1: mock_1 = MagicMock() mock_1.id = 1 results.append(mock_1) elif file_entry[0] == local_path_2: mock_2 = MagicMock() mock_2.id = 2 results.append(mock_2) elif file_entry[0] == local_path_3: mock_3 = MagicMock() mock_3.id = 3 results.append(mock_3) elif file_entry[0] == local_path_4: mock_4 = MagicMock() mock_4.id = 4 results.append(mock_4) return results mock_upload_files.side_effect = new_upload_files data_files = {self.workspace_1.id: [(local_path_1, media_type_1), (local_path_2, media_type_2)], self.workspace_2.id: [(local_path_3, media_type_3), (local_path_4, media_type_4)]} parent_ids = {98, 99} # Dummy values upload_dir = 'upload_dir' ProductDataFileStore().store_files(upload_dir, 'work_dir', data_files, parent_ids, job_exe_in_recipe)
def ready(self): '''Registers the product implementations with other applications.''' from job.configuration.data.data_file import DATA_FILE_STORE from product.configuration.product_data_file import ProductDataFileStore from product.queue_processor import ProductProcessor from queue.models import Queue # Register product files for the data file store DATA_FILE_STORE[u'DATA_FILE_STORE'] = ProductDataFileStore() # Register the queue processor for publishing products Queue.objects.register_processor(ProductProcessor)
def test_successful(self, mock_upload_files, mock_create_file_ancestry_links): """Tests calling ProductDataFileType.store_files() successfully""" local_path_1 = os.path.join('my', 'path', 'one', 'my_test.txt') media_type_1 = 'text/plain' local_path_2 = os.path.join('my', 'path', 'one', 'my_test.json') media_type_2 = 'application/json' local_path_3 = os.path.join('my', 'path', 'three', 'my_test.png') media_type_3 = 'image/png' local_path_4 = os.path.join('my', 'path', 'four', 'my_test.xml') media_type_4 = None # Set up mocks def new_upload_files(upload_dir, work_dir, file_entries, input_file_ids, job_exe, workspace): results = [] for file_entry in file_entries: # Check base remote path for job type name and version self.assertTrue(file_entry[1].startswith(self.remote_base_path)) if file_entry[0] == local_path_1: mock_1 = MagicMock() mock_1.id = 1 results.append(mock_1) elif file_entry[0] == local_path_2: mock_2 = MagicMock() mock_2.id = 2 results.append(mock_2) elif file_entry[0] == local_path_3: mock_3 = MagicMock() mock_3.id = 3 results.append(mock_3) elif file_entry[0] == local_path_4: mock_4 = MagicMock() mock_4.id = 4 results.append(mock_4) return results mock_upload_files.side_effect = new_upload_files data_files = {self.workspace_1.id: [(local_path_1, media_type_1), (local_path_2, media_type_2)], self.workspace_2.id: [(local_path_3, media_type_3), (local_path_4, media_type_4)]} parent_ids = {98, 99} upload_dir = 'upload_dir' results = ProductDataFileStore().store_files(upload_dir, 'work_dir', data_files, parent_ids, self.job_exe) self.assertDictEqual(results, {os.path.join(upload_dir, local_path_1): long(1), os.path.join(upload_dir, local_path_2): long(2), os.path.join(upload_dir, local_path_3): long(3), os.path.join(upload_dir, local_path_4): long(4)}) mock_create_file_ancestry_links.assert_once_called_with(parent_ids, {1, 2, 3, 4}, self.job_exe)