Example #1
0
 def chunk_get(self, url, **kwargs):
     req_id = kwargs.get('req_id')
     if not req_id:
         req_id = utils.request_id()
     reader = ChunkReader([{'url': url}], READ_BUFFER_SIZE,
                          {'X-oio-req-id': req_id})
     # This must be done now if we want to access headers
     stream = reader.stream()
     headers = extract_headers_meta(reader.headers)
     return headers, stream
 def object_fetch(self, account, container, obj, ranges=None, headers=None, key_file=None):
     if not headers:
         headers = dict()
     if "X-oio-req-id" not in headers:
         headers["X-oio-req-id"] = utils.request_id()
     meta, raw_chunks = self.object_analyze(account, container, obj, headers=headers)
     chunk_method = meta["chunk-method"]
     storage_method = STORAGE_METHODS.load(chunk_method)
     chunks = _sort_chunks(raw_chunks, storage_method.ec)
     meta["container_id"] = utils.name2cid(account, container).upper()
     meta["ns"] = self.namespace
     if storage_method.ec:
         stream = self._fetch_stream_ec(meta, chunks, ranges, storage_method, headers)
     elif storage_method.backblaze:
         stream = self._fetch_stream_backblaze(meta, chunks, ranges, storage_method, key_file)
     else:
         stream = self._fetch_stream(meta, chunks, ranges, storage_method, headers)
     return meta, stream
Example #3
0
    def take_action(self, parsed_args):
        self.log.debug('take_action(%s)', parsed_args)

        reqid = request_id(prefix='CLI-BUCKET-')
        acct_client = self.app.client_manager.storage.account
        metadata = dict()
        if parsed_args.replicate is not None:
            metadata[BUCKET_PROP_REPLI_ENABLED] = str(parsed_args.replicate)
        data = acct_client.bucket_update(parsed_args.bucket,
                                         metadata=metadata,
                                         to_delete=None,
                                         reqid=reqid)

        if parsed_args.formatter == 'table':
            from oio.common.easy_value import convert_size

            data['bytes'] = convert_size(data['bytes'])
            data['mtime'] = Timestamp(data.get('mtime', 0.0)).isoformat
        return zip(*sorted(data.items()))
Example #4
0
    def container_touch(self, account, container, headers=None, **kwargs):
        """
        Trigger a notification about the container state.

        :param account: account from which to delete the container
        :type account: `str`
        :param container: name of the container
        :type container: `str`
        :keyword headers: extra headers to send to the proxy
        :type headers: `dict`
        """
        if not headers:
            headers = dict()
        if 'X-oio-req-id' not in headers:
            headers['X-oio-req-id'] = utils.request_id()
        self.container.container_touch(account,
                                       container,
                                       headers=headers,
                                       **kwargs)
Example #5
0
    def update_index(self, path, chunk_id):
        with open(path) as file_:
            try:
                meta = None
                if self.convert_chunks and self.converter:
                    _, meta = self.converter.convert_chunk(file_, chunk_id)
                if meta is None:
                    meta, _ = read_chunk_metadata(file_, chunk_id)
            except exc.MissingAttribute as err:
                raise exc.FaultyChunk(err)

            data = {'mtime': int(time.time())}
            headers = {REQID_HEADER: request_id('blob-indexer-')}
            self.index_client.chunk_push(self.volume_id,
                                         meta['container_id'],
                                         meta['content_id'],
                                         meta['chunk_id'],
                                         headers=headers,
                                         **data)
Example #6
0
 def chunk_copy(self, from_url, to_url, **kwargs):
     stream = None
     req_id = kwargs.get('req_id')
     if not req_id:
         req_id = utils.request_id()
     try:
         meta, stream = self.chunk_get(from_url, req_id=req_id)
         meta['chunk_id'] = to_url.split('/')[-1]
         # FIXME: the original keys are the good ones.
         # ReplicatedChunkWriteHandler should be modified to accept them.
         meta['id'] = meta['content_id']
         meta['version'] = meta['content_version']
         meta['chunk_method'] = meta['content_chunkmethod']
         meta['policy'] = meta['content_policy']
         copy_meta = self.chunk_put(to_url, meta, stream, req_id=req_id)
         return copy_meta
     finally:
         if stream:
             stream.close()
Example #7
0
 def chunk_copy(self, from_url, to_url, **kwargs):
     stream = None
     req_id = kwargs.get('req_id')
     if not req_id:
         req_id = utils.request_id()
     try:
         meta, stream = self.chunk_get(from_url, req_id=req_id)
         meta['chunk_id'] = to_url.split('/')[-1]
         # FIXME: the original keys are the good ones.
         # ReplicatedChunkWriteHandler should be modified to accept them.
         meta['id'] = meta['content_id']
         meta['version'] = meta['content_version']
         meta['chunk_method'] = meta['content_chunkmethod']
         meta['policy'] = meta['content_policy']
         copy_meta = self.chunk_put(to_url, meta, stream, req_id=req_id)
         return copy_meta
     finally:
         if stream:
             stream.close()
Example #8
0
    def test_0_upload_ok(self):
        """Check that no event is emitted when everything is ok."""
        # Check we have enough service locations.
        self._aggregate_rawx_by_place()

        # Upload an object.
        container = self._random_user()
        reqid = request_id('perfectible-')
        self.api.object_create(self.account, container,
                               obj_name='perfect',
                               data='whatever',
                               policy='THREECOPIES',
                               headers={REQID_HEADER: reqid})

        # Wait on the oio-improve beanstalk tube.
        self.beanstalkd.watch(DEFAULT_IMPROVER_TUBE)
        # Ensure we do not receive any event.
        self.assertRaises(ResponseError, self.beanstalkd.reserve,
                          timeout=REASONABLE_EVENT_DELAY)
Example #9
0
    def test_upload_warn_dist(self):
        """
        Check that an event is emitted when the warning distance is reached.
        """
        self.wait_for_score(('rawx', ))
        # Check we have enough service locations.
        by_place = self._aggregate_rawx_by_place()

        # Lock all services of the 3rd location.
        banned_loc = list(by_place.keys())[2]
        self._lock_services('rawx', by_place[banned_loc])

        # Upload an object.
        container = self._random_user()
        reqid = request_id('perfectible-')
        self.api.object_create(self.account,
                               container,
                               obj_name='perfectible',
                               data=b'whatever',
                               policy='THREECOPIES',
                               headers={REQID_HEADER: reqid})

        # Wait on the oio-improve beanstalk tube.
        event = self._wait_for_event(timeout=REASONABLE_EVENT_DELAY * 2)

        # Check the content of the event.
        self.assertEqual('storage.content.perfectible', event.event_type)
        self.assertEqual(reqid, event.reqid)
        self.assertEqual(self.account, event.url['account'])
        self.assertEqual(container, event.url['user'])
        self.assertEqual('perfectible', event.url['path'])
        mc = event.data
        self.assertEqual(0, mc['pos'])  # only one metachunk in this test
        lowest_dist = 4
        warn_dist = 4
        for chunk in mc['chunks']:
            qual = chunk['quality']
            if qual['final_dist'] < lowest_dist:
                lowest_dist = qual['final_dist']
            if qual['warn_dist'] < warn_dist:
                warn_dist = qual['warn_dist']
            self.assertEqual(qual['expected_slot'], qual['final_slot'])
        self.assertLessEqual(lowest_dist, warn_dist)
Example #10
0
    def update_index(self, path, chunk_id):
        with open(path) as f:
            try:
                meta = None
                if self.convert_chunks and self.converter:
                    _, meta = self.converter.convert_chunk(f, chunk_id)
                if meta is None:
                    meta, _ = read_chunk_metadata(f, chunk_id)
            except exc.MissingAttribute as e:
                raise exc.FaultyChunk('Missing extended attribute %s' % e)

            data = {'mtime': int(time.time())}
            headers = {'X-oio-req-id': 'blob-indexer-' + request_id()[:-13]}
            self.index_client.chunk_push(self.volume_id,
                                         meta['container_id'],
                                         meta['content_id'],
                                         meta['chunk_id'],
                                         headers=headers,
                                         **data)
Example #11
0
 def _should_notify(self, account, container):
     if not self.check_account:
         return True
     now = monotonic_time()
     enabled, last_update = self.cache.get((account, container), (None, 0))
     if now - last_update > self.cache_duration:
         try:
             ctinfo = self.account.container_show(
                 account,
                 container,
                 force_master=self.force_master,
                 connection_timeout=self.connection_timeout,
                 read_timeout=self.read_timeout,
                 reqid=request_id('ev-repl-'))
             enabled = ctinfo.get(BUCKET_PROP_REPLI_ENABLED, False)
             self.cache[(account, container)] = (enabled, now)
         except Exception:
             self.logger.exception(
                 "Not updating the cached value %s=%s for %s/%s",
                 BUCKET_PROP_REPLI_ENABLED, enabled, account, container)
     return enabled
    def test_upload_fallback(self):
        """
        Test that an event is emitted when a fallback service slot is used.
        """
        by_slot = self._aggregate_rawx_by_slot()
        if len(by_slot['rawx-odd']) < 3:
            self.skip('This test requires at least 3 services '
                      'in the "rawx-odd" slot')

        # Lock all services of the 'rawx-even' slot.
        banned_slot = 'rawx-even'
        self._lock_services('rawx', by_slot[banned_slot])

        # Upload an object.
        container = self._random_user()
        reqid = request_id('perfectible-')
        self.api.object_create(self.account,
                               container,
                               obj_name='perfectible',
                               data=b'whatever',
                               policy='THREECOPIES',
                               headers={REQID_HEADER: reqid})

        # Wait on the oio-improve beanstalk tube.
        event = self._wait_for_event(timeout=REASONABLE_EVENT_DELAY * 2)

        # Check the content of the event.
        self.assertEqual('storage.content.perfectible', event.event_type)
        self.assertEqual(reqid, event.reqid)
        self.assertEqual(self.account, event.url['account'])
        self.assertEqual(container, event.url['user'])
        self.assertEqual('perfectible', event.url['path'])
        mc = event.data
        self.assertEqual(0, mc['pos'])  # only one metachunk in this test
        slot_matches = list()
        for chunk in mc['chunks']:
            qual = chunk['quality']
            slot_matches.append(qual['final_slot'] == qual['expected_slot'])
            self.assertNotEqual(qual['final_slot'], banned_slot)
        self.assertIn(False, slot_matches)
    def test_blob_improver_threecopies(self):
        by_slot = self._aggregate_rawx_by_slot()
        if len(by_slot['rawx-odd']) < 3:
            self.skip('This test requires at least 3 services '
                      'in the "rawx-odd" slot')
        # Ensure the distance between services won't be a problem.
        self._aggregate_rawx_by_place()

        # Lock all services of the 'rawx-even' slot.
        banned_slot = 'rawx-even'
        self._lock_services('rawx', by_slot[banned_slot])

        # Upload an object.
        container = self._random_user()
        reqid = request_id('perfectible-')
        chunks, _, _ = self.api.object_create(self.account,
                                              container,
                                              obj_name='perfectible',
                                              data='whatever',
                                              policy='THREECOPIES',
                                              reqid=reqid)

        # Wait for the "perfectible" event to be emitted,
        # but do not consume it.
        job = self.beanstalkd.wait_for_ready_job(
            DEFAULT_IMPROVER_TUBE, timeout=REASONABLE_EVENT_DELAY)
        self.assertIsNotNone(job)
        # "Unlock" the services of the 'rawx-even' slot.
        self._lock_services('rawx', by_slot[banned_slot], score=100)

        self._call_blob_improver_subprocess()

        # Check some changes have been done on the object.
        _, new_chunks = self.api.object_locate(self.account, container,
                                               'perfectible')
        self.assertNotEqual(chunks, new_chunks)
        # Ensure no new "perfectible" event is emitted.
        job = self.beanstalkd.wait_for_ready_job(
            DEFAULT_IMPROVER_TUBE, timeout=REASONABLE_EVENT_DELAY)
        self.assertIsNone(job)
Example #14
0
    def object_touch(self, account, container, obj, headers=None, **kwargs):
        """
        Trigger a notification about an object
        (as if it just had been created).

        :param account: name of the account where to create the object
        :type account: `str`
        :param container: name of the container where to create the object
        :type container: `str`
        :param obj: name of the object to touch
        :param headers: extra headers to pass to the proxy

        """
        if not headers:
            headers = dict()
        if 'X-oio-req-id' not in headers:
            headers['X-oio-req-id'] = utils.request_id()
        self.container.content_touch(account,
                                     container,
                                     obj,
                                     headers=headers,
                                     **kwargs)
Example #15
0
    def process(self, env, cb):
        event = Event(env)
        if event.event_type == EventTypes.CONTENT_DELETED:
            url = event.env.get('url')
            chunks = []
            content_headers = list()

            for item in event.data:
                if item.get('type') == 'chunks':
                    chunks.append(item)
                if item.get("type") == 'contents_headers':
                    content_headers.append(item)
            if len(chunks):
                reqid = request_id()
                if not content_headers:
                    chunk_method = guess_storage_method(chunks[0]['id']) + '/'
                else:
                    chunk_method = content_headers[0]['chunk-method']
                handler, storage_method = self._load_handler(chunk_method)
                handler(url, chunks, content_headers, storage_method, reqid)
                return self.app(env, cb)

        return self.app(env, cb)
Example #16
0
    def process(self, env, cb):
        event = Event(env)
        if event.event_type == EventTypes.CONTENT_DELETED:
            url = event.env.get('url')
            chunks = []
            content_headers = list()

            for item in event.data:
                if item.get('type') == 'chunks':
                    chunks.append(item)
                if item.get("type") == 'contents_headers':
                    content_headers.append(item)
            if len(chunks):
                reqid = request_id()
                if not content_headers:
                    chunk_method = guess_storage_method(chunks[0]['id']) + '/'
                else:
                    chunk_method = content_headers[0]['chunk-method']
                handler, storage_method = self._load_handler(chunk_method)
                handler(url, chunks, content_headers, storage_method, reqid)
                return self.app(env, cb)

        return self.app(env, cb)
Example #17
0
    def process(self, beanstalkd_job):
        job_id = beanstalkd_job['job_id']
        job_config = beanstalkd_job['job_config']
        job_params = job_config['params']

        task = self.tasks.get(job_id)
        if task is not None and task.params_have_changed(job_params):
            task = None
        if task is None:
            job_type = beanstalkd_job['job_type']
            task_class = JOB_TYPES[job_type].TASK_CLASS
            task = task_class(self.conf, job_params, logger=self.logger)
            self.tasks[job_id] = task

        tasks_per_second = job_config['tasks_per_second']
        tasks = beanstalkd_job['tasks']

        task_errors = Counter()
        task_results = Counter()

        tasks_run_time = 0
        for task_id, task_payload in iteritems(tasks):
            tasks_run_time = ratelimit(
                    tasks_run_time, tasks_per_second)

            reqid = job_id + request_id('-')
            reqid = reqid[:STRLEN_REQID]
            try:
                task_result = task.process(task_id, task_payload, reqid=reqid)
                task_results.update(task_result)
            except Exception as exc:
                self.logger.warn('[job_id=%s] Fail to process task %s: %s',
                                 job_id, task_id, exc)
                task_errors[type(exc).__name__] += 1

        return job_id, list(tasks.keys()), task_results, task_errors, \
            beanstalkd_job['beanstalkd_reply']
    def object_create(
        self,
        account,
        container,
        file_or_path=None,
        data=None,
        etag=None,
        obj_name=None,
        content_type=None,
        content_encoding=None,
        metadata=None,
        policy=None,
        headers=None,
        key_file=None,
        **_kwargs
    ):
        """
        Create an object in `container` of `account` with data taken from
        either `data` (str or generator) or `file_or_path` (path to a file
        or file-like object).
        The object will be named after `obj_name` if specified, or after
        the base name of `file_or_path`.
        """
        if (data, file_or_path) == (None, None):
            raise exc.MissingData()
        src = data if data is not None else file_or_path
        if src is file_or_path:
            if isinstance(file_or_path, basestring):
                if not os.path.exists(file_or_path):
                    raise exc.FileNotFound("File '%s' not found." % file_or_path)
                file_name = os.path.basename(file_or_path)
            else:
                try:
                    file_name = os.path.basename(file_or_path.name)
                except AttributeError:
                    file_name = None
            obj_name = obj_name or file_name
        elif isgenerator(src):
            file_or_path = utils.GeneratorReader(src)
            src = file_or_path
        if not obj_name:
            raise exc.MissingName("No name for the object has been specified")

        sysmeta = {"mime_type": content_type, "content_encoding": content_encoding, "etag": etag}

        if not headers:
            headers = dict()
        if "X-oio-req-id" not in headers:
            headers["X-oio-req-id"] = utils.request_id()

        if src is data:
            return self._object_create(
                account,
                container,
                obj_name,
                StringIO(data),
                sysmeta,
                metadata=metadata,
                policy=policy,
                headers=headers,
                key_file=key_file,
            )
        elif hasattr(file_or_path, "read"):
            return self._object_create(
                account,
                container,
                obj_name,
                src,
                sysmeta,
                metadata=metadata,
                policy=policy,
                headers=headers,
                key_file=key_file,
            )
        else:
            with open(file_or_path, "rb") as f:
                return self._object_create(
                    account,
                    container,
                    obj_name,
                    f,
                    sysmeta,
                    metadata=metadata,
                    policy=policy,
                    headers=headers,
                    key_file=key_file,
                )
Example #19
0
    def move_perfectible_from_event(self,
                                    event,
                                    dry_run=False,
                                    max_attempts=3,
                                    **kwargs):
        """
        Move one or more "perfectible" chunks described in a
        "storage.content.perfectible" event.
        """
        url = event['url']
        reqid = request_id(self.rebuilder.reqid_prefix)
        descr = self.rebuilder._item_to_string(event)
        self.logger.info('Working on %s (reqid=%s)', descr, reqid)
        # There are chances that the set of chunks of the object has
        # changed between the time the event has been emitted and now.
        # It seems a good idea to reload the object metadata and compare.
        content = self.content_factory.get(url['id'],
                                           url['content'],
                                           account=url.get('account'),
                                           container_name=url.get('user'),
                                           reqid=reqid)
        for chunk in event['data']['chunks']:
            found = content.chunks.filter(url=chunk['id']).one()
            if not found:
                raise exceptions.PreconditionFailed(
                    "Chunk %s not found in %s" % (chunk['id'], descr))
            # Chunk quality information is not saved along with object
            # metadata, thus we must fill it now.
            found.quality = chunk['quality']

        moveable = [chunk for chunk in content.chunks if chunk.imperfections]
        moveable.sort(key=lambda x: x.imperfections)

        moves = list()
        errors = list()

        if not moveable:
            self.logger.info('Nothing to do for %s', descr)
            return moves, errors

        for chunk in moveable:
            try:
                src = str(chunk.url)
                # Must do a copy or bad things will happen.
                raw_src = dict(chunk.raw())
                self.logger.debug("Working on %s: %s", src,
                                  chunk.imperfections)
                # TODO(FVE): try to improve all chunks of a metachunk
                # in a single pass
                dst = content.move_chunk(chunk,
                                         check_quality=True,
                                         dry_run=dry_run,
                                         reqid=reqid,
                                         max_attempts=max_attempts,
                                         **kwargs)
                self.logger.debug("%s replaced by %s", src, dst['url'])
                moves.append((raw_src, dst))
            except exceptions.OioException as err:
                self.logger.warn("Could not improve %s: %s", chunk, err)
                errors.append(err)
        return moves, errors
Example #20
0
 def request_id(self, prefix='CLI-'):
     """
     Get an ID for requests generated by this application.
     """
     return request_id(prefix)
Example #21
0
 def ensure_request_id_wrapper(*args, **kwargs):
     headers = kwargs['headers']
     if 'X-oio-req-id' not in headers:
         headers['X-oio-req-id'] = request_id()
     return func(*args, **kwargs)
Example #22
0
    def object_create(self,
                      account,
                      container,
                      file_or_path=None,
                      data=None,
                      etag=None,
                      obj_name=None,
                      mime_type=None,
                      metadata=None,
                      policy=None,
                      headers=None,
                      key_file=None,
                      **_kwargs):
        """
        Create an object in *container* of *account* with data taken from
        either *data* (`str` or `generator`) or *file_or_path* (path to a file
        or file-like object).
        The object will be named after *obj_name* if specified, or after
        the base name of *file_or_path*.

        :param account: name of the account where to create the object
        :type account: `str`
        :param container: name of the container where to create the object
        :type container: `str`
        :param file_or_path: file-like object or path to a file from which
            to read object data
        :type file_or_path: `str` or file-like object
        :param data: object data (if `file_or_path` is not set)
        :type data: `str` or `generator`
        :keyword etag: entity tag of the object
        :type etag: `str`
        :keyword obj_name: name of the object to create. If not set, will use
            the base name of `file_or_path`.
        :keyword mime_type: MIME type of the object
        :type mime_type: `str`
        :keyword properties: a dictionary of properties
        :type properties: `dict`
        :keyword policy: name of the storage policy
        :type policy: `str`
        :param headers: extra headers to pass to the proxy
        :type headers: `dict`
        :keyword key_file:
        """
        if (data, file_or_path) == (None, None):
            raise exc.MissingData()
        src = data if data is not None else file_or_path
        if src is file_or_path:
            if isinstance(file_or_path, basestring):
                if not os.path.exists(file_or_path):
                    raise exc.FileNotFound("File '%s' not found." %
                                           file_or_path)
                file_name = os.path.basename(file_or_path)
            else:
                try:
                    file_name = os.path.basename(file_or_path.name)
                except AttributeError:
                    file_name = None
            obj_name = obj_name or file_name
        elif isgenerator(src):
            file_or_path = utils.GeneratorIO(src)
            src = file_or_path
        if not obj_name:
            raise exc.MissingName("No name for the object has been specified")

        sysmeta = {'mime_type': mime_type, 'etag': etag}

        if not headers:
            headers = dict()
        if 'X-oio-req-id' not in headers:
            headers['X-oio-req-id'] = utils.request_id()

        if src is data:
            return self._object_create(account,
                                       container,
                                       obj_name,
                                       BytesIO(data),
                                       sysmeta,
                                       properties=metadata,
                                       policy=policy,
                                       headers=headers,
                                       key_file=key_file)
        elif hasattr(file_or_path, "read"):
            return self._object_create(account,
                                       container,
                                       obj_name,
                                       src,
                                       sysmeta,
                                       properties=metadata,
                                       policy=policy,
                                       headers=headers,
                                       key_file=key_file)
        else:
            with open(file_or_path, "rb") as f:
                return self._object_create(account,
                                           container,
                                           obj_name,
                                           f,
                                           sysmeta,
                                           properties=metadata,
                                           policy=policy,
                                           headers=headers,
                                           key_file=key_file)
Example #23
0
    def test_HEAD_chunk(self):
        length = 100
        chunkid = random_chunk_id()
        chunkdata = random_buffer(string.printable, length).encode('utf-8')
        chunkurl = self._rawx_url(chunkid)
        self._check_not_present(chunkurl)
        headers = self._chunk_attr(chunkid, chunkdata)
        metachunk_size = 9 * length
        metachunk_hash = md5(chunkdata).hexdigest()
        # TODO should also include meta-chunk-hash
        trailers = {'x-oio-chunk-meta-metachunk-size': str(metachunk_size),
                    'x-oio-chunk-meta-metachunk-hash': metachunk_hash}
        # Initial put that must succeed
        resp, body = self._http_request(chunkurl, 'PUT', chunkdata, headers,
                                        trailers)
        self.assertEqual(201, resp.status)

        # default HEAD
        resp, body = self._http_request(chunkurl, 'HEAD', "", {})
        self.assertEqual(200, resp.status)

        # Check the hash
        resp, body = self._http_request(
            chunkurl, 'HEAD', '',
            {'x-oio-check-hash': True})
        self.assertEqual(200, resp.status)

        # Check the hash with valid header
        resp, body = self._http_request(
            chunkurl, 'HEAD', '',
            {'x-oio-check-hash': True,
             'x-oio-chunk-meta-chunk-hash':
                headers['x-oio-chunk-meta-chunk-hash']})
        self.assertEqual(200, resp.status)

        # Check the hash with invalid header
        resp, body = self._http_request(
            chunkurl, 'HEAD', '',
            {'x-oio-check-hash': True,
             'x-oio-chunk-meta-chunk-hash': 'xxx'})
        self.assertEqual(412, resp.status)
        resp, body = self._http_request(
            chunkurl, 'HEAD', '',
            {'x-oio-check-hash': True,
             'x-oio-chunk-meta-chunk-hash': 'A'*32})
        self.assertEqual(412, resp.status)

        # Corrupt the chunk
        corrupted_data = b'chunk is dead'
        with open(self._chunk_path(chunkid), "wb") as fp:
            fp.write(corrupted_data)

        # Check the hash with corrupted chunk
        resp, body = self._http_request(
            chunkurl, 'HEAD', '',
            {'x-oio-check-hash': True})
        self.assertEqual(412, resp.status)

        if not self._compression():
            # Check the hash with corrupted chunk and valid header
            newh = md5(corrupted_data).hexdigest()
            resp, body = self._http_request(
                chunkurl, 'HEAD', '',
                {'x-oio-check-hash': True,
                 'x-oio-chunk-meta-chunk-hash': newh})
            self.assertEqual(200, resp.status)

        # Check the hash with corrupted chunk and invalid header
        resp, body = self._http_request(
            chunkurl, 'HEAD', '',
            {'x-oio-check-hash': True,
             'x-oio-chunk-meta-chunk-hash': 'xxx'})
        self.assertEqual(412, resp.status)
        resp, body = self._http_request(
            chunkurl, 'HEAD', '',
            {'x-oio-check-hash': True,
             'x-oio-chunk-meta-chunk-hash': 'A'*32})
        self.assertEqual(412, resp.status)

        # Check without xattr
        chunkid_woattr = chunkid[:3] + random_chunk_id()[3:]
        chunkurl_woattr = self._rawx_url(chunkid_woattr)
        with open(self._chunk_path(chunkid_woattr), "wb") as fp:
            fp.write(b"without xattrs")
        resp, body = self._http_request(
            chunkurl_woattr, 'HEAD', "",
            {'X-oio-check-hash': "true",
             REQID_HEADER: request_id('test_HEAD_chunk')})
        # If the size xattr is missing, we cannot read the chunk
        self.assertEqual(500, resp.status)
Example #24
0
    def process(self, env, cb):
        event = Event(env)
        headers = {
            REQID_HEADER: event.reqid or request_id('account-update-')
        }

        try:
            if event.event_type in CONTAINER_EVENTS:
                mtime = event.when / 1000000.0  # convert to seconds
                data = event.data
                url = event.env.get('url')
                body = dict()
                if event.event_type == EventTypes.CONTAINER_STATE:
                    body['objects'] = data.get('object-count', 0)
                    body['bytes'] = data.get('bytes-count', 0)
                    body['damaged_objects'] = data.get('damaged-objects', 0)
                    body['missing_chunks'] = data.get('missing-chunks', 0)
                    body['mtime'] = mtime
                elif event.event_type == EventTypes.CONTAINER_NEW:
                    body['mtime'] = mtime
                self.account.container_update(
                    url.get('account'), url.get('user'), body,
                    connection_timeout=self.connection_timeout,
                    read_timeout=self.read_timeout, headers=headers)
            elif event.event_type == EventTypes.ACCOUNT_SERVICES:
                url = event.env.get('url')
                if isinstance(event.data, list):
                    # Legacy format: list of services
                    new_services = event.data
                else:
                    # New format: dictionary with new and deleted services
                    new_services = event.data.get('services') or list()
                m2_services = [x for x in new_services
                               if x.get('type') == 'meta2']
                if not m2_services:
                    # No service in charge, container has been deleted
                    self.account.container_update(
                        url.get('account'), url.get('user'),
                        {'dtime': event.when / 1000000.0},
                        connection_timeout=self.connection_timeout,
                        read_timeout=self.read_timeout, headers=headers)
                else:
                    try:
                        self.account.account_create(
                            url.get('account'),
                            connection_timeout=self.connection_timeout,
                            read_timeout=self.read_timeout, headers=headers)
                    except OioTimeout as exc:
                        # The account will be autocreated by the next event,
                        # just warn and continue.
                        self.logger.warn(
                            'Failed to create account %s (reqid=%s): %s',
                            url.get('account'), headers[REQID_HEADER], exc)
        except OioTimeout as exc:
            msg = 'account update failure: %s' % str(exc)
            resp = EventError(event=Event(env), body=msg)
            return resp(env, cb)
        except ClientException as exc:
            if (exc.http_status == 409 and
                    "No update needed" in exc.message):
                self.logger.info(
                    "Discarding event %s (job_id=%s, reqid=%s): %s",
                    event.job_id, headers[REQID_HEADER],
                    event.event_type, exc.message)
            else:
                msg = 'account update failure: %s' % str(exc)
                resp = EventError(event=Event(env), body=msg)
                return resp(env, cb)
        return self.app(env, cb)
Example #25
0
    def take_action(self, parsed_args):
        self.log.debug('take_action(%s)', parsed_args)

        kwargs = {'reqid': request_id(prefix='CLI-BUCKET-')}
        if parsed_args.prefix:
            kwargs['prefix'] = parsed_args.prefix
        if parsed_args.marker:
            kwargs['marker'] = parsed_args.marker
        if parsed_args.limit:
            kwargs['limit'] = parsed_args.limit

        account = self.app.client_manager.account
        acct_client = self.app.client_manager.storage.account
        storage = self.app.client_manager.storage

        if parsed_args.full_listing:
            listing = depaginate(acct_client.bucket_list,
                                 listing_key=lambda x: x['listing'],
                                 marker_key=lambda x: x.get('next_marker'),
                                 truncated_key=lambda x: x['truncated'],
                                 account=account,
                                 **kwargs)
        else:
            acct_meta = acct_client.bucket_list(account, **kwargs)
            listing = acct_meta['listing']

        columns = (
            'Name',
            'Bytes',
            'Objects',
            'Mtime',
        )

        def versioning(bucket):
            try:
                data = storage.container_get_properties(account,
                                                        bucket,
                                                        reqid=kwargs['reqid'])
            except NoSuchContainer:
                self.log.info('Bucket %s does not exist', bucket)
                return "Error"

            sys = data['system']
            # WARN it doe not reflect namespace versioning if enabled
            status = sys.get(M2_PROP_VERSIONING_POLICY, None)
            if status is None or int(status) == 0:
                return "Suspended"
            else:
                return "Enabled"

        if parsed_args.versioning:
            columns += ('Versioning', )

            def enrich(listing):
                for v in listing:
                    v['versioning'] = versioning(v['name'])
                    yield v

            listing = enrich(listing)

        return columns, ([v[k.lower()] for k in columns] for v in listing)
 def object_delete(self, account, container, obj, headers={}):
     uri = self._make_uri("content/delete")
     params = self._make_params(account, container, obj)
     if "X-oio-req-id" not in headers:
         headers["X-oio-req-id"] = utils.request_id()
     resp, resp_body = self._request("POST", uri, params=params, headers=headers)
Example #27
0
    def take_action(self, parsed_args):
        self.log.debug('take_action(%s)', parsed_args)

        reqid = request_id(prefix='CLI-BUCKET-')
        acct_client = self.app.client_manager.storage.account
        acct_client.bucket_refresh(parsed_args.bucket, reqid=reqid)