Esempio n. 1
0
def _init():
    flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
    config_path = os.environ.get('DOCKER_REGISTRY_CONFIG', 'config.yml')

    if not os.path.isabs(config_path):
        config_path = os.path.join(os.path.dirname(__file__), '../../',
                                   'config', config_path)
    try:
        f = open(config_path)
    except Exception:
        raise exceptions.FileNotFoundError('Heads-up! File is missing: %s' %
                                           config_path)

    conf = Config(f.read())
    if flavor:
        conf = conf[flavor]
        conf.flavor = flavor

    if conf.privileged_key:
        try:
            f = open(conf.privileged_key)
        except Exception:
            raise exceptions.FileNotFoundError(
                'Heads-up! File is missing: %s' % conf.privileged_key)

        try:
            conf.privileged_key = rsa.PublicKey.load_pkcs1(f.read())
        except Exception:
            raise exceptions.ConfigError('Key at %s is not a valid RSA key' %
                                         conf.privileged_key)

    if conf.index_endpoint:
        conf.index_endpoint = conf.index_endpoint.strip('/')

    return conf
Esempio n. 2
0
 def stream_read(self, path, bytes_range=None):
     path = self._init_path(path)
     nb_bytes = 0
     total_size = 0
     key = self._boto_bucket.lookup(path)
     if not key:
         raise exceptions.FileNotFoundError('%s is not there' % path)
     if bytes_range:
         key._last_position = bytes_range[0]
         total_size = bytes_range[1] - bytes_range[0] + 1
     while True:
         if bytes_range:
             # Bytes Range is enabled
             buf_size = self.buffer_size
             if nb_bytes + buf_size > total_size:
                 # We make sure we don't read out of the range
                 buf_size = total_size - nb_bytes
             if buf_size > 0:
                 buf = key.read(buf_size)
                 nb_bytes += len(buf)
             else:
                 # We're at the end of the range
                 buf = ''
         else:
             buf = key.read(self.buffer_size)
         if not buf:
             break
         yield buf
Esempio n. 3
0
 def get_size(self, path):
     path = self._init_path(path)
     try:
         headers = self.head_store(path)
         return headers['size']
     except Exception:
         raise exceptions.FileNotFoundError('%s is not there' % path)
Esempio n. 4
0
    def getfileinfo(self, path):
        logger.debug("getfileinfo: %s" % path)

        resp = self._retry(self._getfileinfo, path)

        if resp.status_code == 200:
            j = resp.json()

            key = "fragment-info"
            if key in j:
                fragementsinfo = j[key]
                max_range, fragementsinfo = self._check_fragments(
                    fragementsinfo)
                if not max_range or not fragementsinfo:
                    raise exceptions.UnspecifiedError("fragment-info Error!")
                return max_range, fragementsinfo
            else:
                raise exceptions.UnspecifiedError(
                    "fileinfo not contain fragment-info!")

        elif resp.status_code == 404:
            raise exceptions.FileNotFoundError("File Not Found!")
        else:
            raise exceptions.UnspecifiedError(
                "getfileinfo UnKnow status code:%d" % resp.status_code)
Esempio n. 5
0
 def get_store(self, path, chunk_size=None):
     try:
         _, obj = self._swift_connection.get_object(
             self._swift_container, path, resp_chunk_size=chunk_size)
         return obj
     except Exception:
         raise exceptions.FileNotFoundError('%s is not there' % path)
    def stream_read(self, path, bytes_range=None):
        try:
            f = io.BytesIO()
            self._blob.get_blob_to_file(self._container, path, f)

            if bytes_range:
                f.seek(bytes_range[0])
                total_size = bytes_range[1] - bytes_range[0] + 1
            else:
                f.seek(0)

            while True:
                buf = None
                if bytes_range:
                    # Bytes Range is enabled
                    buf_size = self.buffer_size
                    if nb_bytes + buf_size > total_size:
                        # We make sure we don't read out of the range
                        buf_size = total_size - nb_bytes
                    if buf_size > 0:
                        buf = f.read(buf_size)
                        nb_bytes += len(buf)
                    else:
                        # We're at the end of the range
                        buf = ''
                else:
                    buf = f.read(self.buffer_size)

                if not buf:
                    break

                yield buf
        except IOError:
            raise exceptions.FileNotFoundError('%s is not there' % path)
Esempio n. 7
0
def _init():
    flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
    config_path = os.environ.get('DOCKER_REGISTRY_CONFIG', 'config.yml')

    if not os.path.isabs(config_path):
        config_path = os.path.join(os.path.dirname(__file__), '../../',
                                   'config', config_path)
    try:
        f = open(config_path)
    except Exception:
        raise exceptions.FileNotFoundError(
            'Heads-up! File is missing: %s' % config_path)

    conf = Config(f.read())
    if flavor:
        if flavor not in conf:
            raise exceptions.ConfigError(
                'The specified flavor (%s) is missing in your config file (%s)'
                % (flavor, config_path))
        conf = conf[flavor]
        conf.flavor = flavor

    if conf.privileged_key:
        try:
            f = open(conf.privileged_key)
        except Exception:
            raise exceptions.FileNotFoundError(
                'Heads-up! File is missing: %s' % conf.privileged_key)

        try:
            pk = f.read().split('\n')
            pk = 'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A' + ''.join(pk[1:-2])
            pk = [pk[i: i + 64] for i in range(0, len(pk), 64)]
            pk = ('-----BEGIN PUBLIC KEY-----\n' + '\n'.join(pk) +
                  '\n-----END PUBLIC KEY-----')
            bio = BIO.MemoryBuffer(pk)
            conf.privileged_key = RSA.load_pub_key_bio(bio)
        except Exception:
            raise exceptions.ConfigError(
                'Key at %s is not a valid RSA key' % conf.privileged_key)
        f.close()

    if conf.index_endpoint:
        conf.index_endpoint = conf.index_endpoint.strip('/')

    return conf
    def s_read(self, path, offset=0, size=0):
        r = self._session.read_data(path, offset=offset, size=size)
        r.wait()
        err = r.error()
        if err.code != 0:
            raise exceptions.FileNotFoundError("No such file %s" % path)

        res = r.get()[0]
        return str(res.data)
    def list_directory(self, path=None):
        if not path.endswith('/'):
            path += '/'  # path=a would list a/b.txt as well as 'abc.txt'

        blobs = list(self._blob.list_blobs(self._container, path))
        if not blobs:
            raise exceptions.FileNotFoundError('%s is not there' % path)

        return [b.name for b in blobs]
Esempio n. 10
0
    def _downloader(self, fragments):
        logger.debug("begin download multi parts.")

        for fragment in fragments:
            fragment_index = fragment['Index']
            fragment_begin = fragment['Start']
            fragment_end = fragment['End']

            try:
                logger.debug("begin download part fragment_index: %d" %
                             fragment_index)
                resp = self.conn.download(self.path,
                                          fragment_index,
                                          (fragment_begin, fragment_end),
                                          stream=False)
                if resp.status_code == 200:
                    content = resp.content
                    self._fragment_tempfiles[fragment_index] = _TempFile(
                        mode='w+b', prefix=self.tmpdir)
                    f = self._fragment_tempfiles[fragment_index].file
                    f.write(content)

                    # seek to 0, ready to be read
                    f.seek(0)
                    self._refresh_max_completed_byte(fragment_index,
                                                     fragment_end)
                    logger.debug(
                        "download part success!!! fragment_index: %d" %
                        fragment_index)

                elif resp.status_code == 404:
                    logger.debug(
                        "download part, file not found, path: %s, frament_index: %d"
                        % (self.path, fragment_index))
                    raise exceptions.FileNotFoundError(
                        "fetch_part FileNotFound!")
                else:
                    logger.debug("mark else code: %d" % resp.status_code)
                    raise exceptions.UnspecifiedError(
                        "unexcept status code: %d" % resp.status_code)
            except:
                logger.error("download part failed, path: %s, index: %d" %
                             (self.path, fragment_index))
                self._set_error()
                return

            # docker read timeout
            now = time.time()
            if now - self._get_last_read_time() > 300:
                logger.debug(
                    "speedy long time no read, reader maybe exited!!!")
                self._set_error()
                self.clear()
                return

        logger.debug("end download multi parts.")
Esempio n. 11
0
def _get_image_layer(image_id, headers=None, bytes_range=None):
    if headers is None:
        headers = {}

    headers['Content-Type'] = 'application/octet-stream'
    accel_uri_prefix = cfg.nginx_x_accel_redirect
    path = store.image_layer_path(image_id)
    if accel_uri_prefix:
        if store.scheme == 'file':
            accel_uri = '/'.join([accel_uri_prefix, path])
            headers['X-Accel-Redirect'] = accel_uri
            logger.debug('send accelerated {0} ({1})'.format(
                accel_uri, headers))
            return flask.Response('', headers=headers)
        else:
            logger.warn('nginx_x_accel_redirect config set,'
                        ' but storage is not LocalStorage')

    # If store allows us to just redirect the client let's do that, we'll
    # offload a lot of expensive I/O and get faster I/O
    if cfg.storage_redirect:
        try:
            content_redirect_url = store.content_redirect_url(path)
            if content_redirect_url:
                return flask.redirect(content_redirect_url, 302)
        except IOError as e:
            logger.debug(str(e))

    status = None
    layer_size = 0

    if not store.exists(path):
        raise exceptions.FileNotFoundError("Image layer absent from store")
    try:
        layer_size = store.get_size(path)
    except exceptions.FileNotFoundError:
        # XXX why would that fail given we know the layer exists?
        pass
    if bytes_range and bytes_range[1] == -1 and not layer_size == 0:
        bytes_range = (bytes_range[0], layer_size)

    if bytes_range:
        content_length = bytes_range[1] - bytes_range[0] + 1
        if not _valid_bytes_range(bytes_range):
            return flask.Response(status=416, headers=headers)
        status = 206
        content_range = (bytes_range[0], bytes_range[1], layer_size)
        headers['Content-Range'] = '{0}-{1}/{2}'.format(*content_range)
        headers['Content-Length'] = content_length
    elif layer_size > 0:
        headers['Content-Length'] = layer_size
    else:
        return flask.Response(status=416, headers=headers)
    return flask.Response(store.stream_read(path, bytes_range),
                          headers=headers,
                          status=status)
    def list_directory(self, path=None):
        if path is None:  # pragma: no cover
            path = ""

        if not self.exists(path) and path:
            raise exceptions.FileNotFoundError(
                'No such directory: \'{0}\''.format(path))

        for item in self.s_find(('docker', path)):
            yield item
Esempio n. 13
0
 def get_content(self, path):
     path = self.getfullpath(path)
     try:
         res = self._oss.get_object(self.osscfg.bucket, path)
         if res.status == 200:
             return res.read()
         else:
             raise IOError('read %s failed, status: %s' %
                           (path, res.status))
     except Exception:
         raise exceptions.FileNotFoundError("File not found %s" % path)
 def get_content(self, path):
     if not self.exists(path):
         raise exceptions.FileNotFoundError("File not found %s" % path)
     path = self._init_path(path)
     output = StringIO.StringIO()
     try:
         for buf in self.get_store(path, self.buffer_size):
             output.write(buf)
         return output.getvalue()
     finally:
         output.close()
Esempio n. 15
0
def load():
    global _config
    if _config is not None:
        return _config
    data = None
    config_path = os.environ.get('DOCKER_REGISTRY_CONFIG', 'config.yml')
    if not os.path.isabs(config_path):
        config_path = os.path.join(os.path.dirname(__file__), '../../',
                                   'config', config_path)
    try:
        f = open(config_path)
    except Exception:
        raise exceptions.FileNotFoundError(
            'Heads-up! File is missing: %s' % config_path)

    try:
        data = yaml.load(f)
    except Exception:
        raise exceptions.ConfigError(
            'Config file (%s) is not valid yaml' % config_path)

    config = data.get('common', {})
    flavor = os.environ.get('SETTINGS_FLAVOR', 'dev')
    config.update(data.get(flavor, {}))
    config['flavor'] = flavor
    config = convert_env_vars(config)
    if 'privileged_key' in config:
        try:
            f = open(config['privileged_key'])
        except Exception:
            raise exceptions.FileNotFoundError(
                'Heads-up! File is missing: %s' % config['privileged_key'])

        try:
            config['privileged_key'] = rsa.PublicKey.load_pkcs1(f.read())
        except Exception:
            raise exceptions.ConfigError(
                'Key at %s is not a valid RSA key' % config['privileged_key'])

    _config = Config(config)
    return _config
    def stream_read(self, path, bytes_range=None):
        logger.debug("read range %s from %s", str(bytes_range), path)
        if not self.exists(path):
            raise exceptions.FileNotFoundError(
                'No such directory: \'{0}\''.format(path))

        if bytes_range is None:
            yield self.s_read(path)
        else:
            offset = bytes_range[0]
            size = bytes_range[1] - bytes_range[0] + 1
            yield self.s_read(path, offset=offset, size=size)
 def get_size(self, path):
     logger.debug("get_size of %s", path)
     r = self._session.lookup(path)
     r.wait()
     lookups = r.get()
     err = r.error()
     if err.code != 0:
         raise exceptions.FileNotFoundError("Unable to get size of %s %s" %
                                            (path, err))
     size = lookups[0].size
     logger.debug("size of %s = %d", path, size)
     return size
Esempio n. 18
0
    def list_directory(self, path=None):
        logger.debug("list directory path: %s" % path)

        resp = self.speedy_conn.list_directory(path)
        if resp.status_code == 200:
            j = resp.json()
            return j["file-list"]
        elif resp.status_code == 404:
            raise exceptions.FileNotFoundError("no such directory: %s" % path)
        else:
            raise exceptions.UnspecifiedError("unexcept status code: %d" %
                                              path)
    def remove(self, path):
        is_blob = self.exists(path)
        if is_blob:
            self._blob.delete_blob(self._container, path)
            return

        exists = False
        blobs = list(self._blob.list_blobs(self._container, path))
        if not blobs:
            raise exceptions.FileNotFoundError('%s is not there' % path)

        for b in blobs:
            self._blob.delete_blob(self._container, b.name)
Esempio n. 20
0
 def list_directory(self, path=None):
     try:
         path = self._init_path(path)
         if path and not path.endswith('/'):
             path += '/'
         files_generator = self._bucket.listdir(prefix=path)
         if len(list(files_generator)) == 0:
             raise Exception('empty')
         files_generator = self._bucket.listdir(prefix=path)
         for item in files_generator:
             yield item[0]
     except Exception:
         raise exceptions.FileNotFoundError('%s is not there' % path)
Esempio n. 21
0
    def get_size(self, path):
        path = self.getfullpath(path)
        logger.debug("get_size of %s", path)
        headers = {}
        r = self._oss.head_object(self.osscfg.bucket, path, headers)
        if (r.status / 100) != 2:
            raise exceptions.FileNotFoundError("Unable to get size of %s" %
                                               path)

        header_map = convert_header2map(r.getheaders())
        size = safe_get_element("content-length", header_map)
        logger.debug("size of %s = %d", path, size)
        return size
Esempio n. 22
0
    def get_store(self, path, chunk_size=None):
        try:
            response = self._bucket[path]
        except KeyNotFound:
            raise exceptions.FileNotFoundError('%s is not there' % path)

        try:
            while True:
                chunk = response.read(chunk_size)
                if not chunk: break
                yield chunk
        except:
            raise IOError("Could not get content: %s" % path)
 def stream_read(self, path, bytes_range=None):
     """Method to stream read."""
     if not self.exists(path):
         raise exceptions.FileNotFoundError("File not found %s" % path)
     path = self._init_path(path)
     res = self.mos.get_object(self.bucket, path)
     if res.status == 200:
         block = res.read(self.buffer_size)
         while len(block) > 0:
             yield block
             block = res.read(self.buffer_size)
     else:
         raise IOError('read %s failed, status: %s' % (path, res.status))
 def get_size(self, path):
     """Method to get the size."""
     path = self._init_path(path)
     logging.debug("Get file size of %s" % path)
     try:
         file_size = self.mos.get_object_filesize(self.bucket, path)
         if file_size is not None:
             logging.debug("Size of %s is %s" % (path, file_size))
             return file_size
         else:
             raise exceptions.FileNotFoundError("Unable to get size of %s" % path)
     except Exception:
         raise(exceptions.ConnectionError("Unable to get size of %s" % path))
Esempio n. 25
0
    def remove(self, path):
        logger.debug("remove path: %s" % path)

        resp = self.speedy_conn.delete(path)
        if resp.status_code == 204:
            logger.debug("speedy remove success: %s" % path)
        elif resp.status_code == 404:
            logger.warning("speedy remove, file not found: %s" % path)
            raise exceptions.FileNotFoundError("%s is not here" % path)
        else:
            logger.error("speedy remove, unexcept status code %d" %
                         resp.status_code)
            raise exceptions.UnspecifiedError(
                "speedy remove, unexcept status code %d" % resp.status_code)
Esempio n. 26
0
    def stream_read(self, path, bytes_range=None):
        path = self.getfullpath(path)
        logger.debug("read from %s", path)
        if not self.exists(path):
            raise exceptions.FileNotFoundError(
                'No such directory: \'{0}\''.format(path))

        res = self._oss.get_object(self.osscfg.bucket, path)
        if res.status == 200:
            block = res.read(self.buffer_size)
            while len(block) > 0:
                yield block
                block = res.read(self.buffer_size)
        else:
            raise IOError('read %s failed, status: %s' % (path, res.status))
Esempio n. 27
0
    def remove(self, path):
        path = self._init_path(path)

        is_dir = False
        for item in self._bucket.listdir(prefix=path + '/'):
            self._bucket.delete(item[0])
            is_dir = True

        if not is_dir:
            try:
                self._bucket.info(path)
            except Exception:
                raise exceptions.FileNotFoundError('%s is not there' % path)

        self._bucket.delete(path)
 def list_directory(self, path=None):
     try:
         path = self._init_path(path)
         if path and not path.endswith('/'):
             path += '/'
         _, directory = self._swift_connection.get_container(
             container=self._swift_container,
             path=path)
         if not directory:
             raise
         for inode in directory:
             # trim extra trailing slashes
             if inode['name'].endswith('/'):
                 inode['name'] = inode['name'][:-1]
             yield inode['name'].replace(self._root_path[1:] + '/', '', 1)
     except Exception:
         raise exceptions.FileNotFoundError('%s is not there' % path)
    def s_remove(self, key):
        fail = False
        r = self._session.remove(key)
        r.wait()
        err = r.error()
        if err.code != 0:
            logger.warning("Unable to remove key %s %s", key, err.message)
            fail = True

        r = self._session.set_indexes(key, [], [])
        r.wait()
        err = r.error()
        if err.code != 0:
            logger.warning("Unable to remove key %s indexes %s", key,
                           err.message)
        if fail:
            raise exceptions.FileNotFoundError("No such file %s" % key)
    def _lsdir(self, path):
        """list all content in the target dir
        returns a list of filenames

        :param path: dir path
        """
        dir_path = remove_slash(path or "")
        logger.info("list dir for {0}".format(dir_path))
        res = self._request(GET, dir_path)

        for key, val in self.index_header.items():
            if res.headers.get(key) != val:
                raise de.FileNotFoundError("{0} is not there".format(dir_path))

        return [
            os.path.join(dir_path, fname) for fname in res.text.splitlines()
            if fname
        ]