Ejemplo n.º 1
0
def get_swift_uri(test, image_id):
    # Apparently we must use HTTPS with Cloud Files now, otherwise
    # we will get a 301 Moved.... :(
    user = swiftclient.quote("%(swift_store_user)s" % test.__dict__)
    creds = user + ":%(swift_store_key)s" % test.__dict__
    uri = "swift+https://" + creds
    uri += "@%(swift_store_auth_address)s/%(swift_store_container)s/" % test.__dict__
    uri += image_id
    uri = uri.replace("@http://", "@")
    uri = uri.replace("@https://", "@")
    return uri
Ejemplo n.º 2
0
def get_swift_uri(test, image_id):
    # Apparently we must use HTTPS with Cloud Files now, otherwise
    # we will get a 301 Moved.... :(
    user = swiftclient.quote('%(swift_store_user)s' % test.__dict__)
    creds = (user + ':%(swift_store_key)s' % test.__dict__)
    uri = 'swift+https://' + creds
    uri += ('@%(swift_store_auth_address)s/%(swift_store_container)s/' %
            test.__dict__)
    uri += image_id
    uri = uri.replace('@http://', '@')
    uri = uri.replace('@https://', '@')
    return uri
Ejemplo n.º 3
0
def get_swift_uri(test, image_id):
    # Apparently we must use HTTPS with Cloud Files now, otherwise
    # we will get a 301 Moved.... :(
    user = swiftclient.quote('%(swift_store_user)s' % test.__dict__)
    creds = (user + ':%(swift_store_key)s' % test.__dict__)
    uri = 'swift+https://' + creds
    uri += ('@%(swift_store_auth_address)s/%(swift_store_container)s/' %
            test.__dict__)
    uri += image_id
    uri = uri.replace('@http://', '@')
    uri = uri.replace('@https://', '@')
    return uri
Ejemplo n.º 4
0
def browse():
    global htdocs, templates

    cnx = get_session()
    container_info = cnx.get_container(get_container())
    restartneeded = get_reg_value('HKLM', APP_REG_KEY, 'restartneeded')
    internal_error = get_reg_value('HKLM', APP_REG_KEY, 'internal_error', False)
    file_data = {
        'template_lookup': [templates,],
        'restartneeded': restartneeded,
        'internalerror': internal_error,
        'dirs': []
    }
    for files in container_info[1]:
        file_data['dirs'].append(
            {
                'path': files['name'],
                'path_encoded': quote(files['name'], safe='').lstrip('/'),
                'changed': files['last_modified'],
                'size': files['bytes']
            }
        )
        continue
    return file_data
Ejemplo n.º 5
0
         if 'etag' in headers:
             headers['etag'] = headers['etag'].strip('"')
         headers['x-timestamp'] = row['created_at']
         headers['x-container-sync-key'] = sync_key
         put_object(sync_to, name=row['name'], headers=headers,
                    contents=FileLikeIter(body),
                    proxy=self.proxy)
         self.container_puts += 1
         self.logger.increment('puts')
         self.logger.timing_since('puts.timing', start_time)
 except ClientException, err:
     if err.http_status == HTTP_UNAUTHORIZED:
         self.logger.info(
             _('Unauth %(sync_from)r => %(sync_to)r'),
             {'sync_from': '%s/%s' %
                 (quote(info['account']), quote(info['container'])),
              'sync_to': sync_to})
     elif err.http_status == HTTP_NOT_FOUND:
         self.logger.info(
             _('Not found %(sync_from)r => %(sync_to)r \
               - object %(obj_name)r'),
             {'sync_from': '%s/%s' %
                 (quote(info['account']), quote(info['container'])),
              'sync_to': sync_to, 'obj_name': row['name']})
     else:
         self.logger.exception(
             _('ERROR Syncing %(db_file)s %(row)s'),
             {'db_file': broker.db_file, 'row': row})
     self.container_failures += 1
     self.logger.increment('failures')
     return False
Ejemplo n.º 6
0
    def container_sync_row(self, row, sync_to, sync_key, broker, info):
        """
        Sends the update the row indicates to the sync_to container.

        :param row: The updated row in the local database triggering the sync
                    update.
        :param sync_to: The URL to the remote container.
        :param sync_key: The X-Container-Sync-Key to use when sending requests
                         to the other container.
        :param broker: The local container database broker.
        :param info: The get_info result from the local container database
                     broker.
        :returns: True on success
        """
        try:
            start_time = time()
            if row['deleted']:
                try:
                    delete_object(sync_to, name=row['name'],
                                  headers={'x-timestamp': row['created_at'],
                                           'x-container-sync-key': sync_key},
                                  proxy=self.proxy)
                except ClientException as err:
                    if err.http_status != HTTP_NOT_FOUND:
                        raise
                self.container_deletes += 1
                self.logger.increment('deletes')
                self.logger.timing_since('deletes.timing', start_time)
            else:
                part, nodes = self.object_ring.get_nodes(
                    info['account'], info['container'],
                    row['name'])
                shuffle(nodes)
                exc = None
                looking_for_timestamp = float(row['created_at'])
                timestamp = -1
                headers = body = None
                for node in nodes:
                    try:
                        these_headers, this_body = direct_get_object(
                            node, part, info['account'], info['container'],
                            row['name'], resp_chunk_size=65536)
                        this_timestamp = float(these_headers['x-timestamp'])
                        if this_timestamp > timestamp:
                            timestamp = this_timestamp
                            headers = these_headers
                            body = this_body
                    except ClientException as err:
                        # If any errors are not 404, make sure we report the
                        # non-404 one. We don't want to mistakenly assume the
                        # object no longer exists just because one says so and
                        # the others errored for some other reason.
                        if not exc or exc.http_status == HTTP_NOT_FOUND:
                            exc = err
                    except (Exception, Timeout) as err:
                        exc = err
                if timestamp < looking_for_timestamp:
                    if exc:
                        raise exc
                    raise Exception(
                        _('Unknown exception trying to GET: %(node)r '
                          '%(account)r %(container)r %(object)r'),
                        {'node': node, 'part': part,
                         'account': info['account'],
                         'container': info['container'],
                         'object': row['name']})
                for key in ('date', 'last-modified'):
                    if key in headers:
                        del headers[key]
                if 'etag' in headers:
                    headers['etag'] = headers['etag'].strip('"')
                headers['x-timestamp'] = row['created_at']
                headers['x-container-sync-key'] = sync_key
                put_object(sync_to, name=row['name'], headers=headers,
                           contents=FileLikeIter(body),
                           proxy=self.proxy)
                self.container_puts += 1
                self.logger.increment('puts')
                self.logger.timing_since('puts.timing', start_time)
        except ClientException as err:
            if err.http_status == HTTP_UNAUTHORIZED:
                self.logger.info(
                    _('Unauth %(sync_from)r => %(sync_to)r'),
                    {'sync_from': '%s/%s' %
                        (quote(info['account']), quote(info['container'])),
                     'sync_to': sync_to})
            elif err.http_status == HTTP_NOT_FOUND:
                self.logger.info(
                    _('Not found %(sync_from)r => %(sync_to)r \
                      - object %(obj_name)r'),
                    {'sync_from': '%s/%s' %
                        (quote(info['account']), quote(info['container'])),
                     'sync_to': sync_to, 'obj_name': row['name']})
            else:
                self.logger.exception(
                    _('ERROR Syncing %(db_file)s %(row)s'),
                    {'db_file': broker.db_file, 'row': row})
            self.container_failures += 1
            self.logger.increment('failures')
            return False
        except (Exception, Timeout) as err:
            self.logger.exception(
                _('ERROR Syncing %(db_file)s %(row)s'),
                {'db_file': broker.db_file, 'row': row})
            self.container_failures += 1
            self.logger.increment('failures')
            return False
        return True
Ejemplo n.º 7
0
    def container_sync_row(self, row, sync_to, sync_key, broker, info):
        """
        Sends the update the row indicates to the sync_to container.

        :param row: The updated row in the local database triggering the sync
                    update.
        :param sync_to: The URL to the remote container.
        :param sync_key: The X-Container-Sync-Key to use when sending requests
                         to the other container.
        :param broker: The local container database broker.
        :param info: The get_info result from the local container database
                     broker.
        :returns: True on success
        """
        try:
            start_time = time()
            if row["deleted"]:
                try:
                    delete_object(
                        sync_to,
                        name=row["name"],
                        headers={"x-timestamp": row["created_at"], "x-container-sync-key": sync_key},
                        proxy=self.proxy,
                    )
                except ClientException as err:
                    if err.http_status != HTTP_NOT_FOUND:
                        raise
                self.container_deletes += 1
                self.logger.increment("deletes")
                self.logger.timing_since("deletes.timing", start_time)
            else:
                part, nodes = self.object_ring.get_nodes(info["account"], info["container"], row["name"])
                shuffle(nodes)
                exc = None
                looking_for_timestamp = float(row["created_at"])
                timestamp = -1
                headers = body = None
                for node in nodes:
                    try:
                        these_headers, this_body = direct_get_object(
                            node, part, info["account"], info["container"], row["name"], resp_chunk_size=65536
                        )
                        this_timestamp = float(these_headers["x-timestamp"])
                        if this_timestamp > timestamp:
                            timestamp = this_timestamp
                            headers = these_headers
                            body = this_body
                    except ClientException as err:
                        # If any errors are not 404, make sure we report the
                        # non-404 one. We don't want to mistakenly assume the
                        # object no longer exists just because one says so and
                        # the others errored for some other reason.
                        if not exc or exc.http_status == HTTP_NOT_FOUND:
                            exc = err
                    except (Exception, Timeout) as err:
                        exc = err
                if timestamp < looking_for_timestamp:
                    if exc:
                        raise exc
                    raise Exception(
                        _("Unknown exception trying to GET: %(node)r " "%(account)r %(container)r %(object)r"),
                        {
                            "node": node,
                            "part": part,
                            "account": info["account"],
                            "container": info["container"],
                            "object": row["name"],
                        },
                    )
                for key in ("date", "last-modified"):
                    if key in headers:
                        del headers[key]
                if "etag" in headers:
                    headers["etag"] = headers["etag"].strip('"')
                headers["x-timestamp"] = row["created_at"]
                headers["x-container-sync-key"] = sync_key
                put_object(sync_to, name=row["name"], headers=headers, contents=FileLikeIter(body), proxy=self.proxy)
                self.container_puts += 1
                self.logger.increment("puts")
                self.logger.timing_since("puts.timing", start_time)
        except ClientException as err:
            if err.http_status == HTTP_UNAUTHORIZED:
                self.logger.info(
                    _("Unauth %(sync_from)r => %(sync_to)r"),
                    {"sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])), "sync_to": sync_to},
                )
            elif err.http_status == HTTP_NOT_FOUND:
                self.logger.info(
                    _(
                        "Not found %(sync_from)r => %(sync_to)r \
                      - object %(obj_name)r"
                    ),
                    {
                        "sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])),
                        "sync_to": sync_to,
                        "obj_name": row["name"],
                    },
                )
            else:
                self.logger.exception(_("ERROR Syncing %(db_file)s %(row)s"), {"db_file": str(broker), "row": row})
            self.container_failures += 1
            self.logger.increment("failures")
            return False
        except (Exception, Timeout) as err:
            self.logger.exception(_("ERROR Syncing %(db_file)s %(row)s"), {"db_file": str(broker), "row": row})
            self.container_failures += 1
            self.logger.increment("failures")
            return False
        return True
Ejemplo n.º 8
0
def exec_account(url, token=None, container=None, name=None, contents=None,
                 content_length=None, etag=None, chunk_size=None,
                 content_type=None, headers=None, http_conn=None, proxy=None,
                 query_string=None, response_dict=None):
    """
    Execute a job

    :param url: storage URL
    :param token: auth token; if None, no token will be sent
    :param container: container name that the object is in; if None, the
                      container name is expected to be part of the url
    :param name: object name to put; if None, the object name is expected to be
                 part of the url
    :param contents: a string or a file like object to read object data from;
                     if None, a zero-byte put will be done
    :param content_length: value to send as content-length header; also limits
                           the amount read from contents; if None, it will be
                           computed via the contents or chunked transfer
                           encoding will be used
    :param etag: etag of contents; if None, no etag will be sent
    :param chunk_size: chunk size of data to write; it defaults to 65536;
                       used only if the the contents object has a 'read'
                       method, eg. file-like objects, ignored otherwise
    :param content_type: value to send as content-type header; if None, no
                         content-type will be set (remote end will likely try
                         to auto-detect it)
    :param headers: additional headers to include in the request, if any
    :param http_conn: HTTP connection object (If None, it will create the
                      conn object)
    :param proxy: proxy to connect through, if any; None by default; str of the
                  format 'http://127.0.0.1:8888' to set one
    :param query_string: if set will be appended with '?' to generated path
    :param response_dict: an optional dictionary into which to place
                     the response - status, reason and headers
    :returns: (headers, body) tuple
    :raises ClientException: HTTP POST request failed
    """
    if http_conn:
        parsed, conn = http_conn
    else:
        parsed, conn = http_connection(url, proxy=proxy)
    path = parsed.path
    if container:
        path = '%s/%s' % (path.rstrip('/'), quote(container))
    if name:
        path = '%s/%s' % (path.rstrip('/'), quote(name))
    if query_string:
        path += '?' + query_string
    if headers:
        headers = dict(headers)
    else:
        headers = {}
    if token:
        headers['X-Auth-Token'] = token
    if etag:
        headers['ETag'] = etag.strip('"')
    if content_length is not None:
        headers['Content-Length'] = str(content_length)
    else:
        for n, v in headers.items():
            if n.lower() == 'content-length':
                content_length = int(v)
    if content_type is not None:
        headers['Content-Type'] = content_type
    if not contents:
        headers['Content-Length'] = '0'
    headers['X-Zerovm-Execute'] = '1.0'
    if hasattr(contents, 'read'):
        if chunk_size is None:
            chunk_size = 65536
        if content_length is None:
            def chunk_reader():
                while True:
                    data = contents.read(chunk_size)
                    if not data:
                        break
                    yield data
            conn.request('POST', path, data=chunk_reader(), headers=headers)
        else:
            # Fixes https://github.com/kennethreitz/requests/issues/1648
            data = LengthWrapper(contents, content_length)
            conn.request('POST', path, data=data, headers=headers)
    else:
        if chunk_size is not None:
            warn_msg = '%s object has no \"read\" method, ignoring chunk_size'\
                % type(contents).__name__
            warnings.warn(warn_msg, stacklevel=2)
        conn.request('POST', path, contents, headers)
    resp = conn.getresponse()
    body = resp.read()
    headers = {'X-Auth-Token': token}
    http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'POST',),
             {'headers': headers}, resp, body)

    store_response(resp, response_dict)

    if resp.status < 200 or resp.status >= 300:
        raise ClientException('Object PUT failed', http_scheme=parsed.scheme,
                              http_host=conn.host, http_path=path,
                              http_status=resp.status, http_reason=resp.reason,
                              http_response_content=body)
    return body, dict(resp.getheaders())
Ejemplo n.º 9
0
    def container_sync_row(self, row, sync_to, sync_key, broker, info):
        """
        Sends the update the row indicates to the sync_to container.

        :param row: The updated row in the local database triggering the sync
                    update.
        :param sync_to: The URL to the remote container.
        :param sync_key: The X-Container-Sync-Key to use when sending requests
                         to the other container.
        :param broker: The local container database broker.
        :param info: The get_info result from the local container database
                     broker.
        :returns: True on success
        """
        try:
            start_time = time()
            if row['deleted']:
                try:
                    delete_object(sync_to,
                                  name=row['name'],
                                  headers={
                                      'x-timestamp': row['created_at'],
                                      'x-container-sync-key': sync_key
                                  },
                                  proxy=self.proxy)
                except ClientException as err:
                    if err.http_status != HTTP_NOT_FOUND:
                        raise
                self.container_deletes += 1
                self.logger.increment('deletes')
                self.logger.timing_since('deletes.timing', start_time)
            else:
                part, nodes = self.object_ring.get_nodes(
                    info['account'], info['container'], row['name'])
                shuffle(nodes)
                exc = None
                looking_for_timestamp = float(row['created_at'])
                timestamp = -1
                headers = body = None
                for node in nodes:
                    try:
                        these_headers, this_body = direct_get_object(
                            node,
                            part,
                            info['account'],
                            info['container'],
                            row['name'],
                            resp_chunk_size=65536)
                        this_timestamp = float(these_headers['x-timestamp'])
                        if this_timestamp > timestamp:
                            timestamp = this_timestamp
                            headers = these_headers
                            body = this_body
                    except ClientException as err:
                        # If any errors are not 404, make sure we report the
                        # non-404 one. We don't want to mistakenly assume the
                        # object no longer exists just because one says so and
                        # the others errored for some other reason.
                        if not exc or exc.http_status == HTTP_NOT_FOUND:
                            exc = err
                    except (Exception, Timeout) as err:
                        exc = err
                if timestamp < looking_for_timestamp:
                    if exc:
                        raise exc
                    raise Exception(
                        _('Unknown exception trying to GET: %(node)r '
                          '%(account)r %(container)r %(object)r'), {
                              'node': node,
                              'part': part,
                              'account': info['account'],
                              'container': info['container'],
                              'object': row['name']
                          })
                for key in ('date', 'last-modified'):
                    if key in headers:
                        del headers[key]
                if 'etag' in headers:
                    headers['etag'] = headers['etag'].strip('"')
                headers['x-timestamp'] = row['created_at']
                headers['x-container-sync-key'] = sync_key
                put_object(sync_to,
                           name=row['name'],
                           headers=headers,
                           contents=FileLikeIter(body),
                           proxy=self.proxy)
                self.container_puts += 1
                self.logger.increment('puts')
                self.logger.timing_since('puts.timing', start_time)
        except ClientException as err:
            if err.http_status == HTTP_UNAUTHORIZED:
                self.logger.info(
                    _('Unauth %(sync_from)r => %(sync_to)r'), {
                        'sync_from':
                        '%s/%s' %
                        (quote(info['account']), quote(info['container'])),
                        'sync_to':
                        sync_to
                    })
            elif err.http_status == HTTP_NOT_FOUND:
                self.logger.info(
                    _('Not found %(sync_from)r => %(sync_to)r \
                      - object %(obj_name)r'), {
                        'sync_from':
                        '%s/%s' %
                        (quote(info['account']), quote(info['container'])),
                        'sync_to':
                        sync_to,
                        'obj_name':
                        row['name']
                    })
            else:
                self.logger.exception(_('ERROR Syncing %(db_file)s %(row)s'), {
                    'db_file': str(broker),
                    'row': row
                })
            self.container_failures += 1
            self.logger.increment('failures')
            return False
        except (Exception, Timeout) as err:
            self.logger.exception(_('ERROR Syncing %(db_file)s %(row)s'), {
                'db_file': str(broker),
                'row': row
            })
            self.container_failures += 1
            self.logger.increment('failures')
            return False
        return True
Ejemplo n.º 10
0
         for key in ("date", "last-modified"):
             if key in headers:
                 del headers[key]
         if "etag" in headers:
             headers["etag"] = headers["etag"].strip('"')
         headers["x-timestamp"] = row["created_at"]
         headers["x-container-sync-key"] = sync_key
         put_object(sync_to, name=row["name"], headers=headers, contents=FileLikeIter(body), proxy=self.proxy)
         self.container_puts += 1
         self.logger.increment("puts")
         self.logger.timing_since("puts.timing", start_time)
 except ClientException, err:
     if err.http_status == HTTP_UNAUTHORIZED:
         self.logger.info(
             _("Unauth %(sync_from)r => %(sync_to)r"),
             {"sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])), "sync_to": sync_to},
         )
     elif err.http_status == HTTP_NOT_FOUND:
         self.logger.info(
             _(
                 "Not found %(sync_from)r => %(sync_to)r \
               - object %(obj_name)r"
             ),
             {
                 "sync_from": "%s/%s" % (quote(info["account"]), quote(info["container"])),
                 "sync_to": sync_to,
                 "obj_name": row["name"],
             },
         )
     else:
         self.logger.exception(_("ERROR Syncing %(db_file)s %(row)s"), {"db_file": broker.db_file, "row": row})