def test_dir_with_content(self): rv = self.fileops.create_folder(tools.root, '/d1') rv = self.file.put(tools.root, '/d1/f1', tools.generate_random_data()) rv = self.file.put(tools.root, '/d1/f2', tools.generate_random_data()) self.assertEqual(rv.status_code, 200) rv = self.file.metadata(tools.root, '/d1') self.assertEqual(rv.status_code, 200) src_metadata = rv.json rv = self.fileops.copy(tools.root, '/d1', '/d2/') self.assertEqual(rv.status_code, 200) dst_metadata = rv.json # Can't compare tree revisions: they change with the instance src_metadata.pop('rev') dst_metadata.pop('rev') self.assertEqual(src_metadata.pop('path'), '/d1') self.assertEqual(src_metadata['contents'][0].pop('path'), '/d1/f1') self.assertEqual(src_metadata['contents'][1].pop('path'), '/d1/f2') self.assertEqual(dst_metadata.pop('path'), '/d2') self.assertEqual(dst_metadata['contents'][0].pop('path'), '/d2/f1') self.assertEqual(dst_metadata['contents'][1].pop('path'), '/d2/f2') self.assertDictEqual(src_metadata, dst_metadata)
def test_copy_as_subfile(self): rv = self.fileops.create_folder(tools.root, '/d1') rv = self.file.put(tools.root, '/d1/f1', tools.generate_random_data()) rv = self.file.put(tools.root, '/d1/f2', tools.generate_random_data()) self.assertEqual(rv.status_code, 200) rv = self.fileops.copy(tools.root, '/d1/f1', '/d1/f2/f2') self.assertEqual(rv.status_code, 403)
def test_dup_filepath(self): data1 = tools.generate_random_data() rv = self.file.put(tools.root, 'f1', data1) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1', data1) data2 = tools.generate_random_data() rv = self.file.put(tools.root, 'f1', data2) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1', data2)
def test_delete_twice(self): rv = self.fileops.create_folder(tools.root, '/d1') rv = self.file.put(tools.root, '/d1/f1', tools.generate_random_data()) rv = self.fileops.delete(tools.root, '/d1/f1') self.assertEqual(rv.status_code, 200) rv = self.fileops.delete(tools.root, '/d1/f1') self.assertEqual(rv.status_code, 404)
def test_mimetype(self): data = tools.generate_random_data() rv = self.file.put(tools.root, 'f1.pdf', data) rv = self.file.metadata(tools.root, 'f1.pdf') self.assertEqual(rv.status_code, 200) self.assertEqual(rv.json['mime_type'], 'application/pdf') self.assertValidFileMetadata(rv.json, tools.root, '/f1.pdf', data)
def test_conflict_unknown_parent_rev(self): data = [tools.generate_random_data() for _ in range(2)] rv = self.file.put(tools.root, '/f1.txt', data[0]) rv = self.file.put(tools.root, '/f1.txt', data[1], parent_rev='_') self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1 (conflicted copy).txt', data[1])
def test_embedded_metadata(self): data = tools.generate_random_data() rv = self.file.put(tools.root, 'f1', data) rv = self.file.get(tools.root, 'f1') self.assertEqual(rv.status_code, 200) self.assertEqual(rv.data, data) json_data = json.loads(rv.headers['x-datastore-metadata']) self.assertValidFileMetadata(json_data, tools.root, '/f1', data)
def test_subdir(self): rv = self.fileops.create_folder(tools.root, '/d1/') self.assertEqual(rv.status_code, 200) data = tools.generate_random_data() rv = self.file.put(tools.root, '/d1/f1', data) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/d1/f1', data)
def test_create(self): rv = self.file.put(tools.root, 'f1', tools.generate_random_data()) self.assertEqual(rv.status_code, 200) rv = self.file.shares(tools.root, 'f1') self.assertEqual(rv.status_code, 200) self.assertIn('link', rv.json) self.assertIn('expires', rv.json)
def test_copy_overwrite_dir(self): rv = self.fileops.create_folder(tools.root, '/d1') rv = self.fileops.create_folder(tools.root, '/d2') rv = self.file.put(tools.root, '/d1/f1', tools.generate_random_data()) self.assertEqual(rv.status_code, 200) rv = self.fileops.copy(tools.root, '/d1/f1', '/d2/') self.assertEqual(rv.status_code, 200)
def test_conflict_good_parent_rev(self): data = [tools.generate_random_data() for _ in range(2)] rv = self.file.put(tools.root, '/f1.txt', data[0]) self.assertEqual(rv.status_code, 200) rev = rv.json['rev'] rv = self.file.put(tools.root, '/f1.txt', data[1], parent_rev=rev) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1.txt', data[1])
def test_recursive_folder(self): rv = self.fileops.create_folder(tools.root, '/d1') rv = self.fileops.create_folder(tools.root, '/d1/d2') rv = self.file.put(tools.root, '/d1/f1', tools.generate_random_data()) rv = self.fileops.delete(tools.root, '/d1') self.assertEqual(rv.status_code, 200) for item in ['/d1', '/d1/d2', '/d1/f1']: rv = self.file.metadata(tools.root, item) self.assertTrue(rv.json['is_deleted'])
def test_override_expire_values(self): rv = self.file.put(tools.root, 'f1', tools.generate_random_data()) rv = self.file.shares(tools.root, 'f1', expire_days=0) self.assertEqual(rv.status_code, 406) rv = self.file.shares(tools.root, 'f1', expire_days=1) self.assertEqual(rv.status_code, 200) rv = self.file.shares(tools.root, 'f1', expire_days=10000) self.assertEqual(rv.status_code, 200) rv = self.file.shares(tools.root, 'f1', expire_days=10001) self.assertEqual(rv.status_code, 406)
def test_get_link(self): data = tools.generate_random_data() rv = self.file.put(tools.root, 'f1', data) rv = self.file.shares(tools.root, 'f1') self.assertEqual(rv.status_code, 200) json_data = rv.json rv = self.file.app.get(json_data['link']) self.assertEqual(rv.status_code, 200) self.assertEqual(rv.data, data)
def test_deep(self): rv = self.fileops.create_folder(tools.root, '/d1') rv = self.fileops.create_folder(tools.root, '/d1/d2/') rv = self.fileops.create_folder(tools.root, '/d1/d2/d3/') data = tools.generate_random_data() rv = self.file.put(tools.root, '/d1/d2/d3/f1', data) rv = self.file.get(tools.root, '/d1/d2/d3/f1') self.assertEqual(rv.status_code, 200) self.assertEqual(rv.data, data)
def test_dup_content(self): data = tools.generate_random_data() rv = self.file.put(tools.root, 'f1', data) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1', data) rv = self.file.put(tools.root, 'f1', data) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1', data)
def test_conflict_without_overwrite(self): data = [tools.generate_random_data() for _ in range(3)] rv = self.file.put(tools.root, '/f1.txt', data[0]) rv = self.file.put(tools.root, '/f1.txt', data[1], overwrite=False) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1 (1).txt', data[1]) rv = self.file.put(tools.root, '/f1.txt', data[2], overwrite=False) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1 (2).txt', data[2])
def test_get_expired_link(self): data = tools.generate_random_data() rv = self.file.put(tools.root, 'f1', data) rv = self.file.shares(tools.root, 'f1') self.assertEqual(rv.status_code, 200) old_date = datetime.utcnow() + timedelta(days=1, seconds=1) with mock.patch('datastore.api.files.shares.datetime') as m: m.utcnow.return_value = old_date rv = self.file.app.get(urllib2.quote(rv.json['link'])) self.assertEqual(rv.status_code, 404)
def test_delete_link(self): rv = self.file.put(tools.root, 'f1', tools.generate_random_data()) rv = self.file.shares(tools.root, 'f1') self.assertEqual(rv.status_code, 200) key = rv.json['key'] rv = self.file.remove_shares(key) self.assertEqual(rv.status_code, 200) rv = self.file.get(key) self.assertEqual(rv.status_code, 404)
def generate_data_column(self, column, number_of_row, id): """ Genere une donnee en fonction du type de la colonne :param column: le schema de la colonne a traite :param number_of_row: nombre d entree theoriquement generer pour cette table :type number_of_row: int :param id: id de la derniere entree generer :type id: int :return: une donnee generer aleatoirement :rtype: str|int """ foreign_data = self.data generated_data = None if (column['name'] == 'id'): generated_data = id + 1 else: if (Key.type in column.keys()): if column[Key.type] == Key.foreignkey: if Key.foreignkey in column.keys(): # todo : gerer si la table s'auto reference (qu'une colonne ait pour foreign key la table en cours de creation) foreign_column = column[Key.foreignkey][ Key.column_name] foreign_table = column[Key.foreignkey][Key.table_name] if (foreign_table not in self.data): self.data[ foreign_table] = self.generate_table_data( foreign_table) one_random_element_in_foreign_data = tools.take_one_element_in_array( self.data[foreign_table]) generated_data = one_random_element_in_foreign_data[ foreign_column] else: print( '[Error] column de type foreign key mais pas de table de destination' ) exit(1) #todo else: generated_datas = tools.generate_random_data( column[Key.type], number_of_row, column[Key.reference_file] if (Key.reference_file in column.keys()) else '', self.ref_data[column[Key.ref]] if (Key.ref in column.keys() and column[Key.ref] in self.ref_data) else '') generated_data = generated_datas[Key.data] if (Key.ref_list_data in generated_datas.keys() and Key.ref in column.keys()): self.ref_data[column[Key.ref]] = generated_datas[ Key.ref_list_data] return generated_data
def test_limit(self): rv = self.file.put(tools.root, 'f1', tools.generate_random_data()) self.assertEqual(rv.status_code, 200) rv = self.file.revisions(tools.root, 'f1', rev_limit=0) self.assertEqual(rv.status_code, 406) rv = self.file.revisions(tools.root, 'f1', rev_limit=1) self.assertEqual(rv.status_code, 200) rv = self.file.revisions(tools.root, 'f1', rev_limit=1000) self.assertEqual(rv.status_code, 200) rv = self.file.revisions(tools.root, 'f1', rev_limit=1001) self.assertEqual(rv.status_code, 406)
def test_put_resets_deleted(self): data = [tools.generate_random_data() for _ in range(4)] rv = self.file.put(tools.root, '/f1.txt', data[0]) self.assertEqual(rv.status_code, 200) rv = self.fileops.delete(tools.root, '/f1.txt') self.assertEqual(rv.status_code, 200) self.assertDictContainsSubset({'is_deleted': True}, rv.json) rv = self.file.put(tools.root, '/f1.txt', data[0]) self.assertEqual(rv.status_code, 200) self.assertFalse(rv.json.get('is_deleted', False))
def test_hash_change_subdir(self): rv = self.fileops.create_folder(tools.root, 'd1') rv = self.fileops.create_folder(tools.root, 'd1/d2') rv = self.file.metadata(tools.root, 'd1') self.assertEqual(rv.status_code, 200) options = {'hash': rv.json['hash']} data = tools.generate_random_data() rv = self.file.put(tools.root, 'd1/d2/f1', data) self.assertEqual(rv.status_code, 200) rv = self.file.metadata(tools.root, 'd1', **options) self.assertEqual(rv.status_code, 304)
def test_file__std(self): rv = self.fileops.create_folder(tools.root, '/d1') rv = self.file.put(tools.root, '/d1/f1', tools.generate_random_data()) rv = self.fileops.delete(tools.root, '/d1/f1') self.assertEqual(rv.status_code, 200) self.assertIn('rev', rv.json) self.assertIn('modified', rv.json) self.assertDictContainsSubset({ 'is_deleted': True, 'path': '/d1/f1', 'root': tools.root, }, rv.json)
def test_hash_put_file(self): rv = self.fileops.create_folder(tools.root, 'd1') rv = self.file.metadata(tools.root, 'd1') self.assertEqual(rv.status_code, 200) options = {'hash': rv.json['hash']} data = tools.generate_random_data() rv = self.file.put(tools.root, '/d1/f1', data) self.assertEqual(rv.status_code, 200) rv = self.file.metadata(tools.root, 'd1', **options) self.assertEqual(rv.status_code, 200) self.assertValidDirMetadata(rv.json, tools.root, '/d1')
def test_conflict_with_conflicted_copy(self): data = [tools.generate_random_data() for _ in range(4)] rv = self.file.put(tools.root, '/f1.txt', data[0]) rv = self.file.put(tools.root, '/f1.txt', data[1], parent_rev='_') rv = self.file.put(tools.root, '/f1.txt', data[2], overwrite=False, parent_rev='_') self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1 (conflicted copy) (1).txt', data[2]) rv = self.file.put(tools.root, '/f1.txt', data[3], overwrite=True, parent_rev='_') self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1 (conflicted copy) (2).txt', data[3])
def test_hash_metadata_param(self): data = tools.generate_random_data() rv = self.fileops.create_folder(tools.root, 'd1') rv = self.file.put(tools.root, '/d1/f1', data) self.assertEqual(rv.status_code, 200) rv = self.file.metadata(tools.root, 'd1') self.assertEqual(rv.status_code, 200) options = {'hash': rv.json['hash']} rv = self.file.metadata(tools.root, 'd1', **options) self.assertEqual(rv.status_code, 304) options.update(include_deleted=True) rv = self.file.metadata(tools.root, 'd1', **options) self.assertEqual(rv.status_code, 304)
def test_override_expire(self): rv = self.file.put(tools.root, 'f1', tools.generate_random_data()) rv = self.file.shares(tools.root, 'f1', expire_days=3) self.assertEqual(rv.status_code, 200) link = urllib2.quote(rv.json['link']) old_date = datetime.utcnow() + timedelta(days=3, seconds=-1) with mock.patch('datastore.api.files.shares.datetime') as m: m.utcnow.return_value = old_date rv = self.file.app.get(link) self.assertEqual(rv.status_code, 200) old_date = datetime.utcnow() + timedelta(days=3, seconds=+1) with mock.patch('datastore.api.files.shares.datetime') as m: m.utcnow.return_value = old_date rv = self.file.app.get(link) self.assertEqual(rv.status_code, 404)
def test_std(self): data = [tools.generate_random_data() for _ in range(5)] rv = self.file.chunked_upload(data[0]) self.assertEqual(rv.status_code, 200) self.assertValidChunkResponse(rv.json, offset=len(data[0])) id_ = rv.json['upload_id'] for chunk in data[1:]: offset = rv.json['offset'] rv = self.file.chunked_upload(chunk, offset=offset, upload_id=id_) self.assertEqual(rv.status_code, 200) self.assertValidChunkResponse(rv.json, upload_id=id_, offset=offset + len(chunk)) rv = self.file.commit_chunked_upload(tools.root, '/f1', upload_id=id_) self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/f1', ''.join(data))
def test_simple_file(self): data = tools.generate_random_data() rv = self.fileops.create_folder(tools.root, '/d1') rv = self.fileops.create_folder(tools.root, '/d2') rv = self.file.put(tools.root, '/d1/f1', data) self.assertEqual(rv.status_code, 200) rv = self.fileops.move(tools.root, '/d1/f1', '/d2/f2') self.assertEqual(rv.status_code, 200) self.assertValidFileMetadata(rv.json, tools.root, '/d2/f2', data) rv = self.file.metadata(tools.root, '/d1/f1') self.assertEqual(rv.status_code, 200) self.assertDictContainsSubset({'is_deleted': True}, rv.json) rv = self.file.get(tools.root, '/d2/f2') self.assertEqual(rv.status_code, 200) self.assertEqual(rv.data, data)
def test_simple_file(self): rv = self.fileops.create_folder(tools.root, '/d1') rv = self.file.put(tools.root, '/d1/f1', tools.generate_random_data()) self.assertEqual(rv.status_code, 200) src_metadata = rv.json rv = self.fileops.copy(tools.root, '/d1/f1', '/d1/f2') self.assertEqual(rv.status_code, 200) dst_metadata = rv.json self.assertEqual(src_metadata.pop('path'), '/d1/f1') self.assertEqual(dst_metadata.pop('path'), '/d1/f2') self.assertDictEqual(src_metadata, dst_metadata) # Verify through /metadata that the folder does contain both files. rv = self.file.metadata(tools.root, 'd1') dir_metadata = rv.json sub_elements = [c['path'] for c in dir_metadata['contents']] self.assertIn('/d1/f1', sub_elements) self.assertIn('/d1/f2', sub_elements)