def testListCallsContextStoreToListFiles(self):
        exp_list = [Minifile(file_id="1"),
                    Minifile(file_id="2")]
        self.context_store_mock.list.return_value = exp_list

        self.storage.list()

        self.context_store_mock.list.assert_called()
    def testListReturnsListOfFiles(self):
        exp_list = [Minifile(file_id="1"),
                    Minifile(file_id="2")]
        self.context_store_mock.list.return_value = exp_list

        act_list = self.storage.list()

        self.assertEqual(exp_list, act_list)
Exemple #3
0
    def testMifileFromJson(self):
        exp_file = Minifile(file_id='1',
                            file_name='name',
                            chunk_ids=['1', '2', '3'],
                            chunk_size=10)

        ext_dict = exp_file.to_dict()

        self.assertEqual(exp_file, Minifile().from_dict(ext_dict))
Exemple #4
0
    def testMinifileToDict(self):
        minifile = Minifile(file_id='1',
                            file_name='name',
                            chunk_ids=['1', '2', '3'],
                            chunk_size=10)

        d = {
            'file_id': minifile.file_id,
            'file_name': minifile.file_name,
            'chunk_size': minifile.chunk_size,
            'chunk_ids': minifile.chunk_ids
        }

        self.assertEqual(d, minifile.to_dict())
    def testLoadCallsObjectStoreForEachChunk(self):
        mbf = Minifile(chunk_ids=[1, 2, 3, 4, 5])

        self.object_store_mock.load.return_value = KeyValue('id', b'12345')

        self.storage._load(mbf)
        self.object_store_mock.load.assert_has_calls([call(1), call(2), call(3), call(4), call(5)])
    def testDeleteThrowsKeyErrorIfChunkNotFound(self):
        metadata = Minifile(chunk_ids=[1])

        def delete_raise_io_error(id):
            raise KeyError("ObjectStore: Chunk not found id = %s" % id)
        self.storage._object_store.delete = delete_raise_io_error

        self.assertRaises(KeyError, self.storage._delete, metadata)
    def testDeleteCallsContextStoreToLoadFileMetadata(self):
        # We disable the internal _load function as we are not interested for this test
        def disable_internal_delete(any):
            pass

        self.storage._delete = disable_internal_delete
        exp_file = Minifile(file_id="id")
        self.context_store_mock.load.return_value = exp_file
        self.storage.delete(exp_file.file_id)

        self.context_store_mock.load.assert_has_calls([call(exp_file.file_id)])
    def testSaveCallsContextStoreToSaveFileMetadata(self):
        # We disable the internal _save function as we are not interested for this test
        def disable_internal_save(any):
            pass

        self.storage._save = disable_internal_save
        file = Minifile()

        self.storage.save(file)

        self.context_store_mock.save.assert_has_calls([call(file)])
def list_files():
    headers = {
        'cache-control': "no-cache",
    }

    response = requests.request("GET", based_uri, headers=headers)

    # Validating response
    assert response.status_code == 200

    l = response.json()
    return [Minifile().from_dict(s) for s in l]
    def testLoadReturnsMinifile(self):
        # We disable the internal _load function as we are not interested for this test
        def disable_internal_load(any):
            pass

        self.storage._load = disable_internal_load
        exp_file = Minifile(file_id="id", file_name="file")

        self.context_store_mock.load.return_value = exp_file
        act_file = self.storage.load(exp_file.file_id)

        self.assertEqual(exp_file, act_file)
    def testDeleteReturnsMinifileOfDeletedFile(self):
        # We disable the internal _delete function as we are not interested for this test
        def disable_internal_delete(any):
            pass

        self.storage._delete = disable_internal_delete
        exp_file = Minifile(file_id="id")
        self.context_store_mock.load.return_value = exp_file

        act_file = self.storage.delete(exp_file.file_id)

        self.assertEqual(exp_file, act_file)
def upload_file(file_name):
    files = {'file': (file_name, open(file_name, 'rb'))}
    response = requests.request("POST", based_uri + '/upload', files=files)

    # Validating response
    assert response.status_code == 200

    minifile = Minifile().from_dict(response.json())

    assert minifile.file_id
    assert minifile.file_name == file_name

    return minifile
def delete_file(file_id):
    url = based_uri + "/delete/" + file_id

    headers = {
        'cache-control': "no-cache",
    }

    response = requests.request("DELETE", url, headers=headers)
    assert response.status_code == 200

    minifile = Minifile().from_dict(response.json())

    return minifile
    def testSaveCallsObjectStoreForEachChunk(self):

        FileStorage.generate_object_id = lambda x: 'id'

        stream = io.BytesIO('12345'.encode())
        mbf = Minifile(file_stream=stream, chunk_size=1)

        self.storage._save(mbf)

        self.object_store_mock.save.assert_has_calls([call(KeyValue('id', b'1')),
                                                      call(KeyValue('id', b'2')),
                                                      call(KeyValue('id', b'3')),
                                                      call(KeyValue('id', b'4')),
                                                      call(KeyValue('id', b'5'))])
    def testLoadReturnsStream(self):
        exp_stream = io.BytesIO('12345'.encode())
        chunk_size = 1

        mbf = Minifile(chunk_size=chunk_size,
                       file_stream=exp_stream,
                       chunk_ids=[1, 2, 3, 4, 5])

        # Work around as I cannot manage to make it working with Mock library
        def load_return_exp_stream(id):
            return KeyValue(id, exp_stream.read(chunk_size))

        self.storage._object_store.load = load_return_exp_stream
        self.storage._load(mbf)

        act_stream = mbf.file_stream
        exp_stream.seek(0)
        self.assertEqual(act_stream.read(), exp_stream.read())
    def testSavesSplitsStreamInTenChunks(self):
        file = Minifile(file_stream=io.BytesIO('1234567890'.encode()), chunk_size=1)

        self.storage._save(file)

        self.assertEqual(len(file.chunk_ids), 10)
 def testSaveThrowValueErrorIfChunkSizeLessThanMinChunkSize(self):
     self.assertRaises(ValueError, self.storage._save, Minifile(chunk_size=FileStorage.MIN_CHUNK_SIZE - 1))
Exemple #18
0
 def testMinifileToDictWhenNoFields(self):
     self.assertEqual({}, Minifile().to_dict())
 def testSaveThrowValueErrorIfChunkSizeGreaterThanMaxChunkSize(self):
     self.assertRaises(ValueError, self.storage._save, Minifile(chunk_size=FileStorage.MAX_CHUNK_SIZE + 1))
 def list(self):
     return [
         Minifile().from_dict(json.loads(obj.value))
         for obj in ContextKeyValue.objects().all()
     ]
 def load(self, file_id):
     return Minifile().from_dict(
         json.loads(ContextKeyValue.get(key=file_id).value))
    def testSaveGeneratesNoChunksIfStreamIsEmpty(self):
        file = Minifile(file_stream=io.BytesIO(), chunk_size=1)

        self.storage._save(file)

        self.assertTrue(len(file.chunk_ids) == 0)
 def testLoadThrowsValueErrorIsFileMetadataHasNoChunks(self):
     mbf = Minifile()
     self.assertRaises(ValueError, self.storage._load, mbf)
Exemple #24
0
 def list(self):
     return [
         Minifile().from_dict(json.loads(obj))
         for obj in self._memory_db.values()
     ]
 def load(self, file_id):
     res = requests.get(self.rest_endpoint + '/%s' % file_id)
     if res.status_code is not 200:
         raise IOError('Impossible to load the file')
     return Minifile().from_dict(res.json())
 def testDeleteThrowsValueErrorIfFileMetadataHasNoChunks(self):
     mbf = Minifile()
     self.assertRaises(ValueError, self.storage._delete, mbf)
    def testDeleteCallsObjectStoreDeleteForEachChunkId(self):
        mbf = Minifile(chunk_ids=[1, 2, 3])

        calls = [call(1), call(2), call(3)]
        self.storage._delete(mbf)
        self.object_store_mock.delete.assert_has_calls(calls)
    def testSaveNoCallsObjectStoreIfStreamIsEmpty(self):
        mbf = Minifile(file_stream=io.BytesIO(), chunk_size=1)

        self.storage._save(mbf)

        self.object_store_mock.assert_not_called()
 def list(self):
     res = requests.get(self.rest_endpoint)
     if res.status_code is not 200:
         raise IOError('Impossible to list files')
     return [Minifile().from_dict(x) for x in res.json()]
Exemple #30
0
 def load(self, file_id):
     return Minifile().from_dict(json.loads(self._memory_db[file_id]))