def setUp(): dropbox_config = get_dropbox_config() sugarsync_config = get_sugarsync_config() dropbox_store = DropboxStore(dropbox_config) io_apis.append( TransparentChunkMultiprocessingCachingStore( (SugarsyncStore(sugarsync_config)))) io_apis.append( TransparentChunkMultiprocessingCachingStore( MetadataCachingStore(dropbox_store))) io_apis.append( TransparentMultiprocessingCachingStore( (SugarsyncStore(sugarsync_config)))) io_apis.append( TransparentMultiprocessingCachingStore( MetadataCachingStore(dropbox_store))) time.sleep(10) for io_api in io_apis: try: io_api.create_directory(REMOTE_TESTDIR_PART1) except AlreadyExistsError: pass try: io_api.create_directory(REMOTE_TESTDIR) except AlreadyExistsError: pass
def _initialize_store(self): '''Parametrize the store implementation with the settings in the configuration file Also, it is determined which wrappers should envelope the store for caching, or to provide a monitoring layer. ''' self.logger.debug("_initialize_store:") conf = self.get_store_config_data() service = conf['name'] self.logger.debug("got service name") cache_time = int(conf.get('cache', 240)) type = conf.get('type', '') #chunk max_chunk_size = conf.get(('max_chunk_size', 4)) metadata_cache_time = int(conf.get('metadata_cache', 0)) cache_size = int(conf.get('cache_size', 2000)) hard_cache_size_limit = int(conf.get('hard_cache_size_limit', 10000)) cache_id = str(conf.get('cache_id', random.random())) cache_dir = str( conf.get('cache_dir', os.path.expanduser("~") + '/.cache/cloudfusion')) cache_dir = cache_dir[:-1] if cache_dir[ -1] == '/' else cache_dir # remove slash at the end self.logger.debug("got cache parameter") auth = self.get_service_auth_data() self._unify_auth(auth) auth[ 'cache_id'] = cache_id # workaround; Dropbox needs access to cache_id to create a temporary directory with its name, to distinguish sessions bucket_name = auth.get('bucket_name', 'cloudfusion') auth['bucket_name'] = bucket_name self.logger.debug("got auth data: %s" % auth) config = auth config['cache_dir'] = cache_dir store = self.__get_new_store(service, config) #catch error? self.logger.debug("initialized store") if type != '': store = TransparentChunkMultiprocessingCachingStore( MetadataCachingStore(store, 24 * 60 * 60 * 365), cache_time, cache_size, hard_cache_size_limit, cache_id, max_chunk_size, cache_dir) elif cache_time > 0 and metadata_cache_time > 0: store = TransparentMultiprocessingCachingStore( MetadataCachingStore(store, metadata_cache_time), cache_time, cache_size, hard_cache_size_limit, cache_id, cache_dir) elif cache_time > 0: store = TransparentMultiprocessingCachingStore( store, cache_time, cache_size, hard_cache_size_limit, cache_id, cache_dir) elif metadata_cache_time > 0: store = MetadataCachingStore(store, metadata_cache_time) self.pyfusebox.store = store self.logger.debug("initialized service") self.pyfusebox.store_initialized = True
def test_dropbox(): global store config = get_dropbox_config() store = DropboxStore(config) metadatacache_store = MetadataCachingStore(store) for test in _generate_store_tests(store, "DropboxStore"): yield test for test in _generate_store_tests(metadatacache_store, "MetaDataCache DropboxStore"): yield test
def test_local(): global store config = get_local_config() store = LocalHDStore(config) metadatacache_store = MetadataCachingStore(store) transparent_store = TransparentMultiprocessingCachingStore(store) transparent_metacache_store = TransparentMultiprocessingCachingStore( MetadataCachingStore(store)) transparent_chunk_store = TransparentChunkMultiprocessingCachingStore( store) transparent_chunk_metacache_store = TransparentChunkMultiprocessingCachingStore( MetadataCachingStore(store)) for test in _generate_store_tests(store, "LocalHDStore", include_space_tests=False): yield test for test in _generate_store_tests(metadatacache_store, "MetaDataCache LocalHDStore", include_space_tests=False): yield test for test in _generate_store_tests(transparent_store, "TransparentCachingStore LocalHDStore", include_space_tests=False): yield test for test in _generate_store_tests( transparent_metacache_store, "TransparentCachingStore MetaDataCache LocalHDStore", include_space_tests=False): yield test for test in _generate_store_tests( transparent_chunk_store, "TransparentChunkCachingStore LocalHDStore", include_space_tests=False): yield test for test in _generate_store_tests( transparent_chunk_metacache_store, "TransparentChunkCachingStore MetaDataCache LocalHDStore", include_space_tests=False): yield test
def test_gdrive(): global store config = get_gdrive_config() store = GoogleDrive(config) metadatacache_store = MetadataCachingStore(store) chunkcache_store = ChunkMultiprocessingCachingStore(store) for test in _generate_store_tests(store, "GoogleDrive"): yield test for test in _generate_store_tests(chunkcache_store, "ChunkCachingStore GoogleDrive"): yield test for test in _generate_store_tests(metadatacache_store, "MetaDataCache GoogleDrive", include_space_tests=False): yield test
def test_google(): global store config = get_google_config() store = BulkGetMetadataGoogleStore(config) metadatacache_store = MetadataCachingStore(store) for test in _generate_store_tests(store, "BulkGetMetadataGoogleStore"): yield test for test in _generate_bulk_get_metadata_tests( store, "BulkGetMetadataAmazonStore"): yield test for test in _generate_store_tests( metadatacache_store, "MetaDataCache BulkGetMetadataGoogleStore", include_space_tests=False): yield test
def test_webdav_gmx(): global store config = get_webdav_gmx_config() store = BulkGetMetadataWebdavStore(config) metadatacache_store = MetadataCachingStore(store) transparent_store = TransparentMultiprocessingCachingStore( MetadataCachingStore(store)) for test in _generate_store_tests(store, "BulkGetMetadataWebdavStore gmx", include_space_tests=False): yield test for test in _generate_bulk_get_metadata_tests( store, "BulkGetMetadataWebdavStore gmx"): yield test for test in _generate_store_tests( metadatacache_store, "MetaDataCache BulkGetMetadataWebdavStore gmx", include_space_tests=False): yield test for test in _generate_store_tests( transparent_store, "TransparentCachingStore MetadataCache gmx", include_space_tests=False): yield test
def test_webdav_yandex(): global store config = get_webdav_yandex_config() store = BulkGetMetadataWebdavStore(config) transparent_store = TransparentChunkMultiprocessingCachingStore( MetadataCachingStore(store)) for test in _generate_store_tests(store, "BulkGetMetadataWebdavStore yandex"): yield test for test in _generate_bulk_get_metadata_tests( store, "BulkGetMetadataWebdavStore yandex"): yield test for test in _generate_store_tests( transparent_store, "TransparentChunkCachingStore MetadataCache yandex", include_space_tests=False): yield test