def __init__(self, namespace, concurrency=50, error_file=None, rebuild_file=None): self.pool = GreenPool(concurrency) self.error_file = error_file if self.error_file: f = open(self.error_file, 'a') self.error_writer = csv.writer(f, delimiter=' ') self.rebuild_file = rebuild_file if self.rebuild_file: fd = open(self.rebuild_file, 'a') self.rebuild_writer = csv.writer(fd, delimiter='|') conf = {'namespace': namespace} self.account_client = AccountClient(conf) self.container_client = ContainerClient(conf) self.blob_client = BlobClient() self.accounts_checked = 0 self.containers_checked = 0 self.objects_checked = 0 self.chunks_checked = 0 self.account_not_found = 0 self.container_not_found = 0 self.object_not_found = 0 self.chunk_not_found = 0 self.account_exceptions = 0 self.container_exceptions = 0 self.object_exceptions = 0 self.chunk_exceptions = 0 self.list_cache = {} self.running = {}
def init(self): self.concurrency = int_value(self.conf.get('concurrency'), 10) self.tube = self.conf.get("tube", DEFAULT_TUBE) acct_refresh_interval = int_value( self.conf.get('acct_refresh_interval'), 3600) self.app_env['account_client'] = AccountClient( self.conf, logger=self.logger, refresh_delay=acct_refresh_interval, pool_connections=3, # 1 account, 1 proxy, 1 extra ) self.app_env['rdir_client'] = RdirClient( self.conf, logger=self.logger, pool_maxsize=self.concurrency, # 1 cnx per greenthread per host ) if 'handlers_conf' not in self.conf: raise ValueError("'handlers_conf' path not defined in conf") self.handlers = loadhandlers(self.conf.get('handlers_conf'), global_conf=self.conf, app=self) for opt in ('acct_update', 'rdir_update', 'retries_per_second', 'batch_size'): if opt in self.conf: self.logger.warn('Deprecated option: %s', opt) super(EventWorker, self).init()
def __init__(self, app, conf, **kwargs): self.logger = get_logger(conf) super(AccountUpdateFilter, self).__init__(app, conf, logger=self.logger, **kwargs) self.account = AccountClient(conf, logger=self.logger)
def __init__(self, conf, accounts=None, **kwargs): super(AccountRebuilder, self).__init__(conf, **kwargs) self._accounts_to_refresh = set() self._accounts_refreshed = eventlet.Queue() # input self.accounts = accounts self.account_client = AccountClient(self.conf, logger=self.logger)
def _build_account_client(self, **kwargs): endpoint = "http://1.2.3.4:8000" resp = FakeApiResponse() body = {"listing": [['ct', 0, 0, 0]]} client = AccountClient({'namespace': 'fake'}, endpoint=endpoint, proxy_endpoint=endpoint, **kwargs) client._direct_request = Mock(return_value=(resp, body)) client._get_account_addr = Mock(return_value=endpoint) return client
def test_create_without_account(self): account = random_str(32) name = random_str(32) account_client = AccountClient(self.conf) self.assertRaises(exc.NotFound, account_client.account_show, account) self.api.create(account, name) time.sleep(0.5) # ensure account event have been processed self.assertEqual(account_client.account_show(account)['id'], account) # clean self.api.delete(account, name) account_client.account_delete(account)
def setUp(self): super(TestAccountClient, self).setUp() self.account_id = "test_account_%f" % time.time() self.account_client = AccountClient(self.conf) self.container_client = ContainerClient(self.conf) self.account_client.account_create(self.account_id) self.container_client.container_create(acct=self.account_id, ref="container1") self.container_client.container_create(acct=self.account_id, ref="container2") time.sleep(.5) # ensure container event have been processed
def __init__(self, namespace, **kwargs): ep_parts = ["http:/", load_namespace_conf(namespace).get('proxy'), "v3.0", namespace, "content"] super(CheckMeta2, self).__init__(namespace, "meta2", endpoint="/".join(ep_parts), **kwargs) self.account = AccountClient({"namespace": self.ns}) self.container = ContainerClient({"namespace": self.ns}) self.directory = DirectoryClient({"namespace": self.ns}) self.reference = random_buffer('0123456789ABCDEF', 64)
def __init__(self, namespace, concurrency=50, error_file=None, rebuild_file=None, full=True, limit_listings=0, request_attempts=1): self.pool = GreenPool(concurrency) self.error_file = error_file self.full = bool(full) # Optimisation for when we are only checking one object # or one container. # 0 -> do not limit # 1 -> limit account listings (list of containers) # 2 -> limit container listings (list of objects) self.limit_listings = limit_listings if self.error_file: f = open(self.error_file, 'a') self.error_writer = csv.writer(f, delimiter=' ') self.rebuild_file = rebuild_file if self.rebuild_file: fd = open(self.rebuild_file, 'a') self.rebuild_writer = csv.writer(fd, delimiter='|') conf = {'namespace': namespace} self.account_client = AccountClient(conf, max_retries=request_attempts - 1) self.container_client = ContainerClient( conf, max_retries=request_attempts - 1, request_attempts=request_attempts) self.blob_client = BlobClient(conf=conf) self.accounts_checked = 0 self.containers_checked = 0 self.objects_checked = 0 self.chunks_checked = 0 self.account_not_found = 0 self.container_not_found = 0 self.object_not_found = 0 self.chunk_not_found = 0 self.account_exceptions = 0 self.container_exceptions = 0 self.object_exceptions = 0 self.chunk_exceptions = 0 self.list_cache = {} self.running = {}
def main(myid, queue, concurrency, delay=5.0, duration=DURATION): counter = 0 created = list() results = LightQueue(concurrency * 10) pool = GreenPool(concurrency) api = AccountClient({'namespace': NS}, pool_maxsize=concurrency+1) now = start = checkpoint = time.time() pool.starmap(create_loop, [(api, 'buck-%d-%d' % (myid, n), results) for n in range(concurrency)]) while now - start < duration: try: res = results.get(timeout=delay) created.append(res) counter += 1 except Empty: pass if now - checkpoint > delay: print("Proc %d: %d updates in %fs, %f updates per second." % ( myid, counter, now - checkpoint, counter / (now - checkpoint))) counter = 0 checkpoint = now now = time.time() for coro in pool.coroutines_running: coro.kill() while not results.empty(): created.append(results.get(block=False)) end = time.time() rate = len(created) / (end - start) print("Proc %d: end. %d updates in %fs, %f updates per second." % ( myid, len(created), end - start, rate)) time.sleep(2) print("Proc %d: cleaning..." % myid) del_req = {'dtime': time.time()} # Do not delete twice (or an exception is raised) uniq_ct = set(created) for _ in pool.starmap(api.container_update, [(ACCOUNT, n, del_req) for n in uniq_ct]): pass pool.waitall() queue.put(rate) return 0
def __init__(self, conf, logger): self.conf = conf self.logger = logger self.account = conf[CONF_ACCOUNT] self.container_client = ContainerClient(self.conf) self.account_client = AccountClient(self.conf) self.content_factory = ContentFactory(self.conf) self.passes = 0 self.errors = 0 self.last_reported = 0 self.contents_run_time = 0 self.total_contents_processed = 0 self.report_interval = int_value(conf.get('report_interval'), 3600) self.max_contents_per_second = int_value( conf.get('contents_per_second'), 30) self.container_fetch_limit = int_value( conf.get('container_fetch_limit'), 100) self.content_fetch_limit = int_value(conf.get('content_fetch_limit'), 100) self.outdated_threshold = int_value(conf.get(CONF_OUTDATED_THRESHOLD), 9999999999) self.new_policy = conf.get(CONF_NEW_POLICY)
def setUp(self): super(TestAccountClient, self).setUp() self.account_id = "test_account_%f" % time.time() self.account_client = AccountClient(self.conf) self.container_client = ContainerClient(self.conf) retry = 3 for i in range(retry + 1): try: self.account_client.account_create(self.account_id) break except ClientException: if i < retry: time.sleep(2) else: raise self.container_client.container_create(account=self.account_id, reference="container1") self.container_client.container_create(account=self.account_id, reference="container2") time.sleep(.5) # ensure container event have been processed
def __init__(self, namespace, logger=None, **kwargs): """ Initialize the object storage API. :param namespace: name of the namespace to interract with :type namespace: `str` :keyword connection_timeout: connection timeout towards rawx services :type connection_timeout: `float` seconds :keyword read_timeout: timeout for rawx responses and data reads from the caller (when uploading) :type read_timeout: `float` seconds :keyword write_timeout: timeout for rawx write requests :type write_timeout: `float` seconds :keyword pool_manager: a pooled connection manager that will be used for all HTTP based APIs (except rawx) :type pool_manager: `urllib3.PoolManager` """ self.namespace = namespace conf = {"namespace": self.namespace} self.logger = logger or get_logger(conf) self.timeouts = { tok: float_value(tov, None) for tok, tov in kwargs.items() if tok in self.__class__.TIMEOUT_KEYS } from oio.account.client import AccountClient from oio.container.client import ContainerClient from oio.directory.client import DirectoryClient self.directory = DirectoryClient(conf, logger=self.logger, **kwargs) self.container = ContainerClient(conf, logger=self.logger, **kwargs) # In AccountClient, "endpoint" is the account service, not the proxy acct_kwargs = kwargs.copy() acct_kwargs["proxy_endpoint"] = acct_kwargs.pop("endpoint", None) self.account = AccountClient(conf, logger=self.logger, **acct_kwargs)
action='store_true', default=False, help='Start a controller bond to the given addresses') parser.add_argument("endpoints", metavar='ENDPOINT', type=str, nargs='+', help='Endpoints to connect/bind to') args = parser.parse_args() zctx = zmq.Context() if args.controller: s = ObjectStorageApi("benchmark") #Creating account ac = AccountClient({"namespace": "benchmark"}) retry = 3 for i in range(retry + 1): try: ac.account_create("benchmark_account") break except ClientException: if i < retry: time.sleep(2) else: raise #Creating Container s.container_create(account="benchmark_account", reference="container1") controller(zctx, args.endpoints, ac, s)
def account_client(self): if self._account_client is None: from oio.account.client import AccountClient self._account_client = AccountClient( self.client_conf, pool_manager=self.pool_manager) return self._account_client
def setUpClass(cls): super(TestReplicateFilter, cls).setUpClass() cls.account_client = AccountClient({'namespace': cls._cls_ns}) _App.app_env['account_client'] = cls.account_client
def init(self): self.account = AccountClient(self.conf, logger=self.logger)