def test_upload_dataset_hdf5_invalid_structure(self): user_1 = 'u1' d1 = self.create_dataset_metadata(True, user_1) token = self.create_token(user_1) headers = self.get_auth_headers(token) # create hdf5 test_file_path = storage.get_tmp_path('test') with h5py.File(test_file_path, 'w') as f: _ = f.create_dataset('x', shape=(1, )) with open(test_file_path, 'rb') as file_io: resp = self.upload(d1.id, file_io, headers) os.remove(test_file_path) self.assertEqual(resp.status, falcon.HTTP_415)
def test_upload_dataset_hdf5_too_large(self): user_1 = 'u1' d1 = self.create_dataset_metadata(True, user_1) token = self.create_token(user_1) headers = self.get_auth_headers(token) headers.update({'Content-Length': str(10**20)}) # create hdf5 test_file_path = storage.get_tmp_path('test') with h5py.File(test_file_path, 'w') as f: _ = f.create_dataset('x', shape=(1, )) _ = f.create_dataset('y', shape=(1, )) with open(test_file_path, 'rb') as file_io: resp = self.upload(d1.id, file_io, headers) os.remove(test_file_path) self.assertEqual(resp.status, falcon.HTTP_413)