def create_and_add_data(self, hub_uuid, data_type, test_data): """ Make sure that we can create files and add data to them To be called by another function that has set up the environment for the test """ chunks_to_write = self.load_chunks(test_data) chunk_dates = self.get_dates_from_chunks(chunks_to_write) logs = {} for date in chunk_dates: log_location = self.DATA_FILE_LOCATION.format( hub_uuid, data_type, date) self.assertFalse(os.path.exists(log_location)) logs[date] = log_location if not os.path.exists(self.DATA_FILE_DIRECTORY): num_existing_files = 0 else: num_existing_files = len(os.listdir(self.DATA_FILE_DIRECTORY)) request = self.create_post_request(hub_uuid, data_type, chunks_to_write) resp = views.post_datafile(request, self.project.key) resp_json = json.loads(resp.content) self.assertEqual(int(resp_json["chunks_written"]), int(resp_json["chunks_received"])) num_files_created = len(os.listdir( self.DATA_FILE_DIRECTORY)) - num_existing_files self.assertEqual(len(chunk_dates), num_files_created) # since chunks are sorted before written to file, # we sort our expected_chunks and group them by date expected_chunks = self.sort_chunks(chunks_to_write) grouped_chunks = self.group_chunks(expected_chunks) self.assertEqual(len(grouped_chunks), len(logs)) for date, log in logs.items(): self.assertTrue(os.path.exists(log)) actual_chunks_written = 0 for date, expected_chunks in grouped_chunks.items(): actual_chunks = self.load_chunks(logs[date]) self.assertEqual(len(actual_chunks), len(expected_chunks)) actual_chunks_written += len(actual_chunks) self.assertEqual(actual_chunks, expected_chunks) self.assertEqual(resp_json["chunks_written"], actual_chunks_written) return logs.values()
def create_and_add_data(self, hub_uuid, data_type, test_data): """ Make sure that we can create files and add data to them To be called by another function that has set up the environment for the test """ chunks_to_write = self.load_chunks(test_data) chunk_dates = self.get_dates_from_chunks(chunks_to_write) logs = {} for date in chunk_dates: log_location = self.DATA_FILE_LOCATION.format(hub_uuid, data_type, date) self.assertFalse(os.path.exists(log_location)) logs[date] = log_location if not os.path.exists(self.DATA_FILE_DIRECTORY): num_existing_files = 0 else: num_existing_files = len(os.listdir(self.DATA_FILE_DIRECTORY)) request = self.create_post_request(hub_uuid, data_type, chunks_to_write) resp = views.post_datafile(request, self.project.key) resp_json = json.loads(resp.content) self.assertEqual(int(resp_json["chunks_written"]), int(resp_json["chunks_received"])) num_files_created = len(os.listdir(self.DATA_FILE_DIRECTORY)) - num_existing_files self.assertEqual(len(chunk_dates), num_files_created) # since chunks are sorted before written to file, # we sort our expected_chunks and group them by date expected_chunks = self.sort_chunks(chunks_to_write) grouped_chunks = self.group_chunks(expected_chunks) self.assertEqual(len(grouped_chunks), len(logs)) for date, log in logs.items(): self.assertTrue(os.path.exists(log)) actual_chunks_written = 0 for date, expected_chunks in grouped_chunks.items(): actual_chunks = self.load_chunks(logs[date]) self.assertEqual(len(actual_chunks), len(expected_chunks)) actual_chunks_written += len(actual_chunks) self.assertEqual(actual_chunks, expected_chunks) self.assertEqual(resp_json["chunks_written"], actual_chunks_written) return logs.values()
def append_data(self, hub_uuid, data_type, test_data): """ Make sure we can add data to existing files To be called by another function that has set up the environment for the test """ chunks_to_write = self.load_chunks(test_data) chunk_dates = self.get_dates_from_chunks(chunks_to_write) logs = {} num_existing_chunks = {} # gather the number of existing chunks in each log file # so we can be sure we wrote the correct amount to each file for date in chunk_dates: log_location = self.DATA_FILE_LOCATION.format( hub_uuid, data_type, date) self.assertTrue(os.path.exists(log_location)) num_existing_chunks[date] = len(self.load_chunks(log_location)) logs[date] = log_location request = self.create_post_request(hub_uuid, data_type, chunks_to_write) resp = views.post_datafile(request, self.project.key) resp_json = json.loads(resp.content) self.assertEqual(int(resp_json["chunks_written"]), int(resp_json["chunks_received"])) sorted_chunks = self.sort_chunks(chunks_to_write) grouped_chunks = self.group_chunks(sorted_chunks) for date, expected_chunks in grouped_chunks.items(): log_location = logs[date] actual_chunks = self.load_chunks(log_location) self.assertEqual(len(actual_chunks), len(expected_chunks) + num_existing_chunks[date]) self.assertEqual(actual_chunks[-len(expected_chunks):], expected_chunks)
def create_and_add_data(self, hub, data_type, log_location, test_data): """ Make sure that we can create files and add data to them """ self.assertFalse(os.path.exists(log_location)) expected_chunks = self.load_chunks(test_data) request = self.create_post_request(hub, data_type, expected_chunks) resp = views.post_datafile(request, 'test-project') resp_json = json.loads(resp.content) self.assertTrue(os.path.exists(log_location)) actual_chunks = self.load_chunks(log_location) self.assertEqual(len(actual_chunks), len(expected_chunks)) self.assertEqual(resp_json["chunks_written"], len(expected_chunks)) self.assertEqual(int(resp_json["chunks_written"]), int(resp_json["chunks_received"])) self.assertEqual(actual_chunks, expected_chunks)
def append_data(self, hub_uuid, data_type, test_data): """ Make sure we can add data to existing files To be called by another function that has set up the environment for the test """ chunks_to_write = self.load_chunks(test_data) chunk_dates = self.get_dates_from_chunks(chunks_to_write) logs = {} num_existing_chunks = {} # gather the number of existing chunks in each log file # so we can be sure we wrote the correct amount to each file for date in chunk_dates: log_location = self.DATA_FILE_LOCATION.format(hub_uuid, data_type, date) self.assertTrue(os.path.exists(log_location)) num_existing_chunks[date] = len(self.load_chunks(log_location)) logs[date] = log_location request = self.create_post_request(hub_uuid, data_type, chunks_to_write) resp = views.post_datafile(request, self.project.key) resp_json = json.loads(resp.content) self.assertEqual(int(resp_json["chunks_written"]), int(resp_json["chunks_received"])) sorted_chunks = self.sort_chunks(chunks_to_write) grouped_chunks = self.group_chunks(sorted_chunks) for date, expected_chunks in grouped_chunks.items(): log_location = logs[date] actual_chunks = self.load_chunks(log_location) self.assertEqual(len(actual_chunks), len(expected_chunks) + num_existing_chunks[date]) self.assertEqual(actual_chunks[-len(expected_chunks):], expected_chunks)
def append_data(self, hub, data_type, log_location, test_data): """ Make sure the we can add data to existing files """ self.assertTrue(os.path.exists(log_location)) num_existing_chunks = len(self.load_chunks(log_location)) chunks_to_write = self.load_chunks(test_data) request = self.create_post_request(hub, data_type, chunks_to_write) resp = views.post_datafile(request, 'test-project') resp_json = json.loads(resp.content) actual_chunks = self.load_chunks(log_location) self.assertEqual(int(resp_json["chunks_written"]), int(resp_json["chunks_received"])) # the number of chunks in the data file should equal the sum of # the number of chunks to write and the existing chunks self.assertEqual(len(actual_chunks), len(chunks_to_write) + num_existing_chunks) # the last chunk in the file should be the same as the last chunk # in the list of chunks to write self.assertEqual(actual_chunks[len(actual_chunks) - 1], chunks_to_write[len(chunks_to_write) - 1])