Пример #1
0
    def post(self):
        """
        Create a new organization.
        """
        user = get_authenticated_user()
        org_data = request.get_json()
        existing = None

        try:
            existing = model.organization.get_organization(org_data["name"])
        except model.InvalidOrganizationException:
            pass

        if not existing:
            existing = model.user.get_user(org_data["name"])

        if existing:
            msg = "A user or organization with this name already exists"
            raise request_error(message=msg)

        if features.MAILING and not org_data.get("email"):
            raise request_error(message="Email address is required")

        # If recaptcha is enabled, then verify the user is a human.
        if features.RECAPTCHA:
            recaptcha_response = org_data.get("recaptcha_response", "")
            result = recaptcha2.verify(app.config["RECAPTCHA_SECRET_KEY"],
                                       recaptcha_response, get_request_ip())

            if not result["success"]:
                return {
                    "message":
                    "Are you a bot? If not, please revalidate the captcha."
                }, 400

        is_possible_abuser = ip_resolver.is_ip_possible_threat(
            get_request_ip())
        try:
            model.organization.create_organization(
                org_data["name"],
                org_data.get("email"),
                user,
                email_required=features.MAILING,
                is_possible_abuser=is_possible_abuser,
            )
            return "Created", 201
        except model.DataModelException as ex:
            raise request_error(exception=ex)
Пример #2
0
    def get_repository_build(self, uuid):
        try:
            build = model.build.get_repository_build(uuid)
        except model.InvalidRepositoryBuildException as e:
            raise InvalidRepositoryBuildException(str(e))

        repo_namespace = build.repository_namespace_user_username
        repo_name = build.repository_name

        can_read = ReadRepositoryPermission(repo_namespace, repo_name).can()
        can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can()
        can_admin = AdministerRepositoryPermission(repo_namespace,
                                                   repo_name).can()
        job_config = get_job_config(build.job_config)
        phase, status, error = _get_build_status(build)
        url = userfiles.get_file_url(self.resource_key,
                                     get_request_ip(),
                                     requires_cors=True)

        return RepositoryBuild(
            build.uuid, build.logs_archived,
            repo_namespace, repo_name, can_write, can_read,
            _create_user(build.pull_robot), build.resource_key,
            BuildTrigger(build.trigger.uuid, build.trigger.service.name,
                         _create_user(build.trigger.pull_robot), can_read,
                         can_admin,
                         True), build.display_name, build.display_name,
            build.started, job_config, phase, status, error, url)
Пример #3
0
def get_image_layer(namespace, repository, image_id, headers):
    permission = ReadRepositoryPermission(namespace, repository)
    repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter="image")

    logger.debug("Checking repo permissions")
    if permission.can() or (repository_ref is not None and repository_ref.is_public):
        if repository_ref is None:
            abort(404)

        legacy_image = registry_model.get_legacy_image(repository_ref, image_id, include_blob=True)
        if legacy_image is None:
            abort(404, "Image %(image_id)s not found", issue="unknown-image", image_id=image_id)

        path = legacy_image.blob.storage_path
        image_pulled_bytes.labels("v1").inc(legacy_image.blob.compressed_size)

        try:
            logger.debug("Looking up the direct download URL for path: %s", path)
            direct_download_url = store.get_direct_download_url(
                legacy_image.blob.placements, path, get_request_ip()
            )
            if direct_download_url:
                logger.debug("Returning direct download URL")
                resp = redirect(direct_download_url)
                return resp

            # Close the database handle here for this process before we send the long download.
            database.close_db_filter(None)
            logger.debug("Streaming layer data")
            return Response(store.stream_read(legacy_image.blob.placements, path), headers=headers)
        except (IOError, AttributeError):
            logger.exception("Image layer data not found")
            abort(404, "Image %(image_id)s not found", issue="unknown-image", image_id=image_id)

    abort(403)
Пример #4
0
        def decorated(*args, **kwargs):
            if namespace_name_kwarg:
                namespace_name = kwargs[namespace_name_kwarg]
            else:
                namespace_name = args[0]

            region_blacklist = registry_model.get_cached_namespace_region_blacklist(
                model_cache, namespace_name
            )
            if region_blacklist:
                # Resolve the IP information and block if on the namespace's blacklist.
                remote_ip = get_request_ip()
                resolved_ip_info = ip_resolver.resolve_ip(remote_ip)
                logger.debug("Resolved IP information for IP %s: %s", remote_ip, resolved_ip_info)

                if (
                    resolved_ip_info
                    and resolved_ip_info.country_iso_code
                    and resolved_ip_info.country_iso_code in region_blacklist
                ):
                    if error_class:
                        raise error_class()

                    abort(403, "Pulls of this data have been restricted geographically")

            return wrapped(*args, **kwargs)
Пример #5
0
 def download_url(cls, package_name, digest):
     blobpath = cls.upload_url(digest)
     locations = model.get_blob_locations(digest)
     if not locations:
         raise_package_not_found(package_name, digest)
     return storage.get_direct_download_url(locations, blobpath,
                                            get_request_ip())
Пример #6
0
def log_action(kind,
               user_or_orgname,
               metadata=None,
               repo=None,
               repo_name=None):
    if not metadata:
        metadata = {}

    oauth_token = get_validated_oauth_token()
    if oauth_token:
        metadata["oauth_token_id"] = oauth_token.id
        metadata[
            "oauth_token_application_id"] = oauth_token.application.client_id
        metadata["oauth_token_application"] = oauth_token.application.name

    performer = get_authenticated_user()

    if repo_name is not None:
        repo = data_model.repository.get_repository(user_or_orgname, repo_name)

    logs_model.log_action(
        kind,
        user_or_orgname,
        repository=repo,
        performer=performer,
        ip=get_request_ip(),
        metadata=metadata,
    )
Пример #7
0
def buildlogs(build_uuid):
    found_build = model.build.get_repository_build(build_uuid)
    if not found_build:
        abort(403)

    repo = found_build.repository
    has_permission = ModifyRepositoryPermission(repo.namespace_user.username,
                                                repo.name).can()
    if features.READER_BUILD_LOGS and not has_permission:
        if ReadRepositoryPermission(
                repo.namespace_user.username,
                repo.name).can() or model.repository.repository_is_public(
                    repo.namespace_user.username, repo.name):
            has_permission = True

    if not has_permission:
        abort(403)

    # If the logs have been archived, just return a URL of the completed archive
    if found_build.logs_archived:
        return redirect(
            log_archive.get_file_url(found_build.uuid, get_request_ip()))

    _, logs = build_logs.get_log_entries(found_build.uuid, 0)
    response = jsonify({"logs": [log for log in logs]})

    response.headers[
        "Content-Disposition"] = "attachment;filename=" + found_build.uuid + ".json"
    return response
Пример #8
0
def build_status_view(build_obj):
    phase, status, error = _get_build_status(build_obj)
    repo_namespace = build_obj.repository.namespace_user.username
    repo_name = build_obj.repository.name

    can_read = ReadRepositoryPermission(repo_namespace, repo_name).can()
    can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can()
    can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can()

    job_config = get_job_config(build_obj)

    resp = {
        'id':
        build_obj.uuid,
        'phase':
        phase,
        'started':
        format_date(build_obj.started),
        'display_name':
        build_obj.display_name,
        'status':
        status or {},
        'subdirectory':
        job_config.get('build_subdir', ''),
        'dockerfile_path':
        job_config.get('build_subdir', ''),
        'context':
        job_config.get('context', ''),
        'tags':
        job_config.get('docker_tags', []),
        'manual_user':
        job_config.get('manual_user', None),
        'is_writer':
        can_write,
        'trigger':
        trigger_view(build_obj.trigger, can_read, can_admin, for_build=True),
        'trigger_metadata':
        job_config.get('trigger_metadata', None) if can_read else None,
        'resource_key':
        build_obj.resource_key,
        'pull_robot':
        user_view(build_obj.pull_robot) if build_obj.pull_robot else None,
        'repository': {
            'namespace': repo_namespace,
            'name': repo_name
        },
        'error':
        error,
    }

    if can_write or features.READER_BUILD_LOGS:
        if build_obj.resource_key is not None:
            resp['archive_url'] = user_files.get_file_url(
                build_obj.resource_key, get_request_ip(), requires_cors=True)
        elif job_config.get('archive_url', None):
            resp['archive_url'] = job_config['archive_url']

    return resp
Пример #9
0
def build_status_view(build_obj):
    phase, status, error = _get_build_status(build_obj)
    repo_namespace = build_obj.repository.namespace_user.username
    repo_name = build_obj.repository.name

    can_read = ReadRepositoryPermission(repo_namespace, repo_name).can()
    can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can()
    can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can()

    job_config = get_job_config(build_obj)

    resp = {
        "id":
        build_obj.uuid,
        "phase":
        phase,
        "started":
        format_date(build_obj.started),
        "display_name":
        build_obj.display_name,
        "status":
        status or {},
        "subdirectory":
        job_config.get("build_subdir", ""),
        "dockerfile_path":
        job_config.get("build_subdir", ""),
        "context":
        job_config.get("context", ""),
        "tags":
        job_config.get("docker_tags", []),
        "manual_user":
        job_config.get("manual_user", None),
        "is_writer":
        can_write,
        "trigger":
        trigger_view(build_obj.trigger, can_read, can_admin, for_build=True),
        "trigger_metadata":
        job_config.get("trigger_metadata", None) if can_read else None,
        "resource_key":
        build_obj.resource_key,
        "pull_robot":
        user_view(build_obj.pull_robot) if build_obj.pull_robot else None,
        "repository": {
            "namespace": repo_namespace,
            "name": repo_name
        },
        "error":
        error,
    }

    if can_write or features.READER_BUILD_LOGS:
        if build_obj.resource_key is not None:
            resp["archive_url"] = user_files.get_file_url(
                build_obj.resource_key, get_request_ip(), requires_cors=True)
        elif job_config.get("archive_url", None):
            resp["archive_url"] = job_config["archive_url"]

    return resp
Пример #10
0
    def post(self):
        """ Create a new organization. """
        user = get_authenticated_user()
        org_data = request.get_json()
        existing = None

        try:
            existing = model.organization.get_organization(org_data['name'])
        except model.InvalidOrganizationException:
            pass

        if not existing:
            existing = model.user.get_user(org_data['name'])

        if existing:
            msg = 'A user or organization with this name already exists'
            raise request_error(message=msg)

        if features.MAILING and not org_data.get('email'):
            raise request_error(message='Email address is required')

        # If recaptcha is enabled, then verify the user is a human.
        if features.RECAPTCHA:
            recaptcha_response = org_data.get('recaptcha_response', '')
            result = recaptcha2.verify(app.config['RECAPTCHA_SECRET_KEY'],
                                       recaptcha_response, get_request_ip())

            if not result['success']:
                return {
                    'message':
                    'Are you a bot? If not, please revalidate the captcha.'
                }, 400

        is_possible_abuser = ip_resolver.is_ip_possible_threat(
            get_request_ip())
        try:
            model.organization.create_organization(
                org_data['name'],
                org_data.get('email'),
                user,
                email_required=features.MAILING,
                is_possible_abuser=is_possible_abuser)
            return 'Created', 201
        except model.DataModelException as ex:
            raise request_error(exception=ex)
Пример #11
0
def delete_service_key(service, kid):
    jwt_header = request.headers.get(JWT_HEADER_NAME, "")
    match = jwtutil.TOKEN_REGEX.match(jwt_header)
    if match is None:
        abort(400)

    encoded_jwt = match.group(1)

    signer_kid = _signer_kid(encoded_jwt)
    signer_key = _lookup_service_key(service, signer_kid, approved_only=False)

    self_signed = kid == signer_kid
    approved_key_for_service = signer_key.approval is not None

    if self_signed or approved_key_for_service:
        _validate_jwt(encoded_jwt, signer_key.jwk, service)

        try:
            model.delete_service_key(kid)
        except ServiceKeyDoesNotExist:
            abort(404)

        logs_model.log_action(
            "service_key_delete",
            ip=get_request_ip(),
            metadata={
                "kid": kid,
                "signer_kid": signer_key.kid,
                "service": service,
                "name": signer_key.name,
                "user_agent": request.headers.get("User-Agent"),
                "ip": get_request_ip(),
            },
        )

        return make_response("", 204)

    abort(403)
Пример #12
0
    def post(self):
        """
        Request a password recovery email.
        """
        def redact(value):
            threshold = max((len(value) / 3) - 1, 1)
            v = ""
            for i in range(0, len(value)):
                if i < threshold or i >= len(value) - threshold:
                    v = v + value[i]
                else:
                    v = v + "\u2022"

            return v

        recovery_data = request.get_json()

        # If recaptcha is enabled, then verify the user is a human.
        if features.RECAPTCHA:
            recaptcha_response = recovery_data.get("recaptcha_response", "")
            result = recaptcha2.verify(app.config["RECAPTCHA_SECRET_KEY"],
                                       recaptcha_response, get_request_ip())

            if not result["success"]:
                return {
                    "message":
                    "Are you a bot? If not, please revalidate the captcha."
                }, 400

        email = recovery_data["email"]
        user = model.user.find_user_by_email(email)
        if not user:
            return {
                "status": "sent",
            }

        if user.organization:
            send_org_recovery_email(user,
                                    model.organization.get_admin_users(user))
            return {
                "status": "org",
                "orgemail": email,
                "orgname": redact(user.username),
            }

        confirmation_code = model.user.create_reset_password_email_code(email)
        send_recovery_email(email, confirmation_code)
        return {
            "status": "sent",
        }
Пример #13
0
def delete_service_key(service, kid):
    jwt_header = request.headers.get(JWT_HEADER_NAME, '')
    match = jwtutil.TOKEN_REGEX.match(jwt_header)
    if match is None:
        abort(400)

    encoded_jwt = match.group(1)

    signer_kid = _signer_kid(encoded_jwt)
    signer_key = _lookup_service_key(service, signer_kid, approved_only=False)

    self_signed = kid == signer_kid
    approved_key_for_service = signer_key.approval is not None

    if self_signed or approved_key_for_service:
        _validate_jwt(encoded_jwt, signer_key.jwk, service)

        try:
            model.delete_service_key(kid)
        except ServiceKeyDoesNotExist:
            abort(404)

        logs_model.log_action('service_key_delete',
                              ip=get_request_ip(),
                              metadata={
                                  'kid': kid,
                                  'signer_kid': signer_key.kid,
                                  'service': service,
                                  'name': signer_key.name,
                                  'user_agent':
                                  request.headers.get('User-Agent'),
                                  'ip': get_request_ip(),
                              })

        return make_response('', 204)

    abort(403)
Пример #14
0
def get_image_layer(namespace, repository, image_id, headers):
    permission = ReadRepositoryPermission(namespace, repository)
    repository_ref = registry_model.lookup_repository(namespace,
                                                      repository,
                                                      kind_filter='image')

    logger.debug('Checking repo permissions')
    if permission.can() or (repository_ref is not None
                            and repository_ref.is_public):
        if repository_ref is None:
            abort(404)

        legacy_image = registry_model.get_legacy_image(repository_ref,
                                                       image_id,
                                                       include_blob=True)
        if legacy_image is None:
            abort(404,
                  'Image %(image_id)s not found',
                  issue='unknown-image',
                  image_id=image_id)

        path = legacy_image.blob.storage_path
        metric_queue.pull_byte_count.Inc(legacy_image.blob.compressed_size,
                                         labelvalues=['v1'])

        try:
            logger.debug('Looking up the direct download URL for path: %s',
                         path)
            direct_download_url = store.get_direct_download_url(
                legacy_image.blob.placements, path, get_request_ip())
            if direct_download_url:
                logger.debug('Returning direct download URL')
                resp = redirect(direct_download_url)
                return resp

            # Close the database handle here for this process before we send the long download.
            database.close_db_filter(None)
            logger.debug('Streaming layer data')
            return Response(store.stream_read(legacy_image.blob.placements,
                                              path),
                            headers=headers)
        except (IOError, AttributeError):
            logger.exception('Image layer data not found')
            abort(404,
                  'Image %(image_id)s not found',
                  issue='unknown-image',
                  image_id=image_id)

    abort(403)
Пример #15
0
    def post(self):
        """ Request a password recovery email."""
        def redact(value):
            threshold = max((len(value) / 3) - 1, 1)
            v = ''
            for i in range(0, len(value)):
                if i < threshold or i >= len(value) - threshold:
                    v = v + value[i]
                else:
                    v = v + u'\u2022'

            return v

        recovery_data = request.get_json()

        # If recaptcha is enabled, then verify the user is a human.
        if features.RECAPTCHA:
            recaptcha_response = recovery_data.get('recaptcha_response', '')
            result = recaptcha2.verify(app.config['RECAPTCHA_SECRET_KEY'],
                                       recaptcha_response, get_request_ip())

            if not result['success']:
                return {
                    'message':
                    'Are you a bot? If not, please revalidate the captcha.'
                }, 400

        email = recovery_data['email']
        user = model.user.find_user_by_email(email)
        if not user:
            return {
                'status': 'sent',
            }

        if user.organization:
            send_org_recovery_email(user,
                                    model.organization.get_admin_users(user))
            return {
                'status': 'org',
                'orgemail': email,
                'orgname': redact(user.username),
            }

        confirmation_code = model.user.create_reset_password_email_code(email)
        send_recovery_email(email, confirmation_code)
        return {
            'status': 'sent',
        }
Пример #16
0
    def captcha_func():
        recaptcha_response = request.values.get("recaptcha_response", "")
        result = recaptcha2.verify(app.config["RECAPTCHA_SECRET_KEY"],
                                   recaptcha_response, get_request_ip())

        if not result["success"]:
            abort(400)

        # Save that the captcha was verified.
        session["captcha_verified"] = int(time.time())

        # Redirect to the normal OAuth flow again, so that the user can now create an account.
        csrf_token = generate_csrf_token(OAUTH_CSRF_TOKEN_NAME)
        login_scopes = login_service.get_login_scopes()
        auth_url = login_service.get_auth_url(url_scheme_and_hostname, "",
                                              csrf_token, login_scopes)
        return redirect(auth_url)
Пример #17
0
def download_blob(namespace_name, repo_name, digest):
    # Find the blob.
    blob = registry_model.get_cached_repo_blob(model_cache, namespace_name, repo_name, digest)
    if blob is None:
        raise BlobUnknown()

    # Build the response headers.
    headers = {"Docker-Content-Digest": digest}

    # If our storage supports range requests, let the client know.
    if storage.get_supports_resumable_downloads(blob.placements):
        headers["Accept-Ranges"] = "bytes"

    image_pulled_bytes.labels("v2").inc(blob.compressed_size)

    # Short-circuit by redirecting if the storage supports it.
    path = blob.storage_path
    logger.debug("Looking up the direct download URL for path: %s", path)
    direct_download_url = storage.get_direct_download_url(blob.placements, path, get_request_ip())
    if direct_download_url:
        logger.debug("Returning direct download URL")
        resp = redirect(direct_download_url)
        resp.headers.extend(headers)
        return resp

    # Close the database connection before we stream the download.
    logger.debug("Closing database connection before streaming layer data")
    headers.update(
        {
            "Content-Length": blob.compressed_size,
            "Content-Type": BLOB_CONTENT_TYPE,
        }
    )

    with database.CloseForLongOperation(app.config):
        # Stream the response to the client.
        return Response(
            storage.stream_read(blob.placements, path),
            headers=headers,
        )
Пример #18
0
def download_blob(namespace_name, repo_name, digest):
    # Find the blob.
    blob = registry_model.get_cached_repo_blob(model_cache, namespace_name,
                                               repo_name, digest)
    if blob is None:
        raise BlobUnknown()

    # Build the response headers.
    headers = {'Docker-Content-Digest': digest}

    # If our storage supports range requests, let the client know.
    if storage.get_supports_resumable_downloads(blob.placements):
        headers['Accept-Ranges'] = 'bytes'

    metric_queue.pull_byte_count.Inc(blob.compressed_size, labelvalues=['v2'])

    # Short-circuit by redirecting if the storage supports it.
    path = blob.storage_path
    logger.debug('Looking up the direct download URL for path: %s', path)
    direct_download_url = storage.get_direct_download_url(
        blob.placements, path, get_request_ip())
    if direct_download_url:
        logger.debug('Returning direct download URL')
        resp = redirect(direct_download_url)
        resp.headers.extend(headers)
        return resp

    # Close the database connection before we stream the download.
    logger.debug('Closing database connection before streaming layer data')
    with database.CloseForLongOperation(app.config):
        # Stream the response to the client.
        return Response(
            storage.stream_read(blob.placements, path),
            headers=headers.update({
                'Content-Length': blob.compressed_size,
                'Content-Type': BLOB_CONTENT_TYPE,
            }),
        )
Пример #19
0
def get_logs_or_log_url(build):
    # If the logs have been archived, just return a URL of the completed archive
    if build.logs_archived:
        return {
            "logs_url":
            log_archive.get_file_url(build.uuid,
                                     get_request_ip(),
                                     requires_cors=True)
        }
    start = int(request.args.get("start", 0))

    try:
        count, logs = build_logs.get_log_entries(build.uuid, start)
    except BuildStatusRetrievalError:
        count, logs = (0, [])

    response_obj = {}
    response_obj.update({
        "start": start,
        "total": count,
        "logs": [log for log in logs],
    })

    return response_obj
Пример #20
0
    def post(self):
        if SuperUserPermission().can():
            body = request.get_json()
            key_name = body.get('name', '')
            if not validate_service_key_name(key_name):
                raise InvalidRequest('Invalid service key friendly name: %s' %
                                     key_name)

            # Ensure we have a valid expiration date if specified.
            expiration_date = body.get('expiration', None)
            if expiration_date is not None:
                try:
                    expiration_date = datetime.utcfromtimestamp(
                        float(expiration_date))
                except ValueError as ve:
                    raise InvalidRequest('Invalid expiration date: %s' % ve)

                if expiration_date <= datetime.now():
                    raise InvalidRequest(
                        'Expiration date cannot be in the past')

            # Create the metadata for the key.
            user = get_authenticated_user()
            metadata = body.get('metadata', {})
            metadata.update({
                'created_by': 'Quay Superuser Panel',
                'creator': user.username,
                'ip': get_request_ip(),
            })

            # Generate a key with a private key that we *never save*.
            (private_key,
             key_id) = pre_oci_model.generate_service_key(body['service'],
                                                          expiration_date,
                                                          metadata=metadata,
                                                          name=key_name)
            # Auto-approve the service key.
            pre_oci_model.approve_service_key(key_id,
                                              user,
                                              ServiceKeyApprovalType.SUPERUSER,
                                              notes=body.get('notes', ''))

            # Log the creation and auto-approval of the service key.
            key_log_metadata = {
                'kid': key_id,
                'preshared': True,
                'service': body['service'],
                'name': key_name,
                'expiration_date': expiration_date,
                'auto_approved': True,
            }

            log_action('service_key_create', None, key_log_metadata)
            log_action('service_key_approve', None, key_log_metadata)

            return jsonify({
                'kid':
                key_id,
                'name':
                key_name,
                'service':
                body['service'],
                'public_key':
                private_key.publickey().exportKey('PEM'),
                'private_key':
                private_key.exportKey('PEM'),
            })

        raise Unauthorized()
Пример #21
0
    def post(self):
        if SuperUserPermission().can():
            body = request.get_json()
            key_name = body.get("name", "")
            if not validate_service_key_name(key_name):
                raise InvalidRequest("Invalid service key friendly name: %s" % key_name)

            # Ensure we have a valid expiration date if specified.
            expiration_date = body.get("expiration", None)
            if expiration_date is not None:
                try:
                    expiration_date = datetime.utcfromtimestamp(float(expiration_date))
                except ValueError as ve:
                    raise InvalidRequest("Invalid expiration date: %s" % ve)

                if expiration_date <= datetime.now():
                    raise InvalidRequest("Expiration date cannot be in the past")

            # Create the metadata for the key.
            user = get_authenticated_user()
            metadata = body.get("metadata", {})
            metadata.update(
                {
                    "created_by": "Quay Superuser Panel",
                    "creator": user.username,
                    "ip": get_request_ip(),
                }
            )

            # Generate a key with a private key that we *never save*.
            (private_key, key_id) = pre_oci_model.generate_service_key(
                body["service"], expiration_date, metadata=metadata, name=key_name
            )
            # Auto-approve the service key.
            pre_oci_model.approve_service_key(
                key_id, user, ServiceKeyApprovalType.SUPERUSER, notes=body.get("notes", "")
            )

            # Log the creation and auto-approval of the service key.
            key_log_metadata = {
                "kid": key_id,
                "preshared": True,
                "service": body["service"],
                "name": key_name,
                "expiration_date": expiration_date,
                "auto_approved": True,
            }

            log_action("service_key_create", None, key_log_metadata)
            log_action("service_key_approve", None, key_log_metadata)

            return jsonify(
                {
                    "kid": key_id,
                    "name": key_name,
                    "service": body["service"],
                    "public_key": private_key.publickey().exportKey("PEM").decode("ascii"),
                    "private_key": private_key.exportKey("PEM").decode("ascii"),
                }
            )

        raise Unauthorized()
Пример #22
0
def start_build(repository, prepared_build, pull_robot_name=None):
    # Ensure that builds are only run in image repositories.
    if repository.kind.name != "image":
        raise Exception(
            "Attempt to start a build for application repository %s" %
            repository.id)

    # Ensure the repository isn't in mirror or read-only mode.
    if repository.state != RepositoryState.NORMAL:
        raise Exception(
            ("Attempt to start a build for a non-normal repository: %s %s" %
             (repository.id, repository.state)))

    # Ensure that disabled triggers are not run.
    if prepared_build.trigger is not None and not prepared_build.trigger.enabled:
        raise BuildTriggerDisabledException

    if repository.namespace_user.maximum_queued_builds_count is not None:
        queue_item_canonical_name = [repository.namespace_user.username]
        alive_builds = dockerfile_build_queue.num_alive_jobs(
            queue_item_canonical_name)
        if alive_builds >= repository.namespace_user.maximum_queued_builds_count:
            logger.debug(
                "Prevented queueing of build under namespace %s due to reaching max: %s",
                repository.namespace_user.username,
                repository.namespace_user.maximum_queued_builds_count,
            )
            raise MaximumBuildsQueuedException()

    host = app.config["SERVER_HOSTNAME"]
    repo_path = "%s/%s/%s" % (host, repository.namespace_user.username,
                              repository.name)

    new_token = model.token.create_access_token(
        repository,
        "write",
        kind="build-worker",
        friendly_name="Repository Build Token")
    logger.debug(
        "Creating build %s with repo %s tags %s",
        prepared_build.build_name,
        repo_path,
        prepared_build.tags,
    )

    job_config = {
        "docker_tags":
        prepared_build.tags,
        "registry":
        host,
        "build_subdir":
        prepared_build.subdirectory,
        "context":
        prepared_build.context,
        "trigger_metadata":
        prepared_build.metadata or {},
        "is_manual":
        prepared_build.is_manual,
        "manual_user":
        get_authenticated_user().username
        if get_authenticated_user() else None,
        "archive_url":
        prepared_build.archive_url,
    }

    with app.config["DB_TRANSACTION_FACTORY"](db):
        build_request = model.build.create_repository_build(
            repository,
            new_token,
            job_config,
            prepared_build.dockerfile_id,
            prepared_build.build_name,
            prepared_build.trigger,
            pull_robot_name=pull_robot_name,
        )

        pull_creds = model.user.get_pull_credentials(
            pull_robot_name) if pull_robot_name else None

        json_data = json.dumps({
            "build_uuid": build_request.uuid,
            "pull_credentials": pull_creds
        })

        queue_id = dockerfile_build_queue.put(
            [repository.namespace_user.username, repository.name],
            json_data,
            retries_remaining=3)

        build_request.queue_id = queue_id
        build_request.save()

    # Add the build to the repo's log and spawn the build_queued notification.
    event_log_metadata = {
        "build_id":
        build_request.uuid,
        "docker_tags":
        prepared_build.tags,
        "repo":
        repository.name,
        "namespace":
        repository.namespace_user.username,
        "is_manual":
        prepared_build.is_manual,
        "manual_user":
        get_authenticated_user().username
        if get_authenticated_user() else None,
    }

    if prepared_build.trigger:
        event_log_metadata["trigger_id"] = prepared_build.trigger.uuid
        event_log_metadata[
            "trigger_kind"] = prepared_build.trigger.service.name
        event_log_metadata["trigger_metadata"] = prepared_build.metadata or {}

    logs_model.log_action(
        "build_dockerfile",
        repository.namespace_user.username,
        ip=get_request_ip(),
        metadata=event_log_metadata,
        repository=repository,
    )

    # TODO: remove when more endpoints have been converted to using interfaces
    repo = AttrDict({
        "namespace_name": repository.namespace_user.username,
        "name": repository.name,
    })

    spawn_notification(
        repo,
        "build_queued",
        event_log_metadata,
        subpage="build/%s" % build_request.uuid,
        pathargs=["build", build_request.uuid],
    )

    return build_request
Пример #23
0
def put_service_key(service, kid):
    metadata = {"ip": get_request_ip()}

    rotation_duration = request.args.get("rotation", None)
    expiration_date = request.args.get("expiration", None)
    if expiration_date is not None:
        try:
            expiration_date = datetime.utcfromtimestamp(float(expiration_date))
        except ValueError:
            logger.exception("Error parsing expiration date on key")
            abort(400)

    try:
        jwk = request.get_json()
    except ValueError:
        logger.exception("Error parsing JWK")
        abort(400)

    jwt_header = request.headers.get(JWT_HEADER_NAME, "")
    match = jwtutil.TOKEN_REGEX.match(jwt_header)
    if match is None:
        logger.error("Could not find matching bearer token")
        abort(400)

    encoded_jwt = match.group(1)

    _validate_jwk(jwk)

    signer_kid = _signer_kid(encoded_jwt, allow_none=True)
    if kid == signer_kid or signer_kid is None:
        # The key is self-signed. Create a new instance and await approval.
        _validate_jwt(encoded_jwt, jwk, service)
        model.create_service_key("",
                                 kid,
                                 service,
                                 jwk,
                                 metadata,
                                 expiration_date,
                                 rotation_duration=rotation_duration)

        logs_model.log_action(
            "service_key_create",
            ip=get_request_ip(),
            metadata={
                "kid": kid,
                "preshared": False,
                "service": service,
                "name": "",
                "expiration_date": expiration_date,
                "user_agent": request.headers.get("User-Agent"),
                "ip": get_request_ip(),
            },
        )

        return make_response("", 202)

    # Key is going to be rotated.
    metadata.update({"created_by": "Key Rotation"})
    signer_key = _lookup_service_key(service, signer_kid)
    signer_jwk = signer_key.jwk

    _validate_jwt(encoded_jwt, signer_jwk, service)

    try:
        model.replace_service_key(signer_key.kid, kid, jwk, metadata,
                                  expiration_date)
    except ServiceKeyDoesNotExist:
        abort(404)

    logs_model.log_action(
        "service_key_rotate",
        ip=get_request_ip(),
        metadata={
            "kid": kid,
            "signer_kid": signer_key.kid,
            "service": service,
            "name": signer_key.name,
            "expiration_date": expiration_date,
            "user_agent": request.headers.get("User-Agent"),
            "ip": get_request_ip(),
        },
    )

    return make_response("", 200)
Пример #24
0
def start_build(repository, prepared_build, pull_robot_name=None):
    # Ensure that builds are only run in image repositories.
    if repository.kind.name != 'image':
        raise Exception(
            'Attempt to start a build for application repository %s' %
            repository.id)

    # Ensure the repository isn't in mirror or read-only mode.
    if repository.state != RepositoryState.NORMAL:
        raise Exception(
            ('Attempt to start a build for a non-normal repository: %s %s' %
             (repository.id, repository.state)))

    # Ensure that disabled triggers are not run.
    if prepared_build.trigger is not None and not prepared_build.trigger.enabled:
        raise BuildTriggerDisabledException

    if repository.namespace_user.maximum_queued_builds_count is not None:
        queue_item_canonical_name = [repository.namespace_user.username]
        alive_builds = dockerfile_build_queue.num_alive_jobs(
            queue_item_canonical_name)
        if alive_builds >= repository.namespace_user.maximum_queued_builds_count:
            logger.debug(
                'Prevented queueing of build under namespace %s due to reaching max: %s',
                repository.namespace_user.username,
                repository.namespace_user.maximum_queued_builds_count)
            raise MaximumBuildsQueuedException()

    host = app.config['SERVER_HOSTNAME']
    repo_path = '%s/%s/%s' % (host, repository.namespace_user.username,
                              repository.name)

    new_token = model.token.create_access_token(
        repository,
        'write',
        kind='build-worker',
        friendly_name='Repository Build Token')
    logger.debug('Creating build %s with repo %s tags %s',
                 prepared_build.build_name, repo_path, prepared_build.tags)

    job_config = {
        'docker_tags':
        prepared_build.tags,
        'registry':
        host,
        'build_subdir':
        prepared_build.subdirectory,
        'context':
        prepared_build.context,
        'trigger_metadata':
        prepared_build.metadata or {},
        'is_manual':
        prepared_build.is_manual,
        'manual_user':
        get_authenticated_user().username
        if get_authenticated_user() else None,
        'archive_url':
        prepared_build.archive_url
    }

    with app.config['DB_TRANSACTION_FACTORY'](db):
        build_request = model.build.create_repository_build(
            repository,
            new_token,
            job_config,
            prepared_build.dockerfile_id,
            prepared_build.build_name,
            prepared_build.trigger,
            pull_robot_name=pull_robot_name)

        pull_creds = model.user.get_pull_credentials(
            pull_robot_name) if pull_robot_name else None

        json_data = json.dumps({
            'build_uuid': build_request.uuid,
            'pull_credentials': pull_creds
        })

        queue_id = dockerfile_build_queue.put(
            [repository.namespace_user.username, repository.name],
            json_data,
            retries_remaining=3)

        build_request.queue_id = queue_id
        build_request.save()

    # Add the queueing of the build to the metrics queue.
    metric_queue.repository_build_queued.Inc(
        labelvalues=[repository.namespace_user.username, repository.name])

    # Add the build to the repo's log and spawn the build_queued notification.
    event_log_metadata = {
        'build_id':
        build_request.uuid,
        'docker_tags':
        prepared_build.tags,
        'repo':
        repository.name,
        'namespace':
        repository.namespace_user.username,
        'is_manual':
        prepared_build.is_manual,
        'manual_user':
        get_authenticated_user().username if get_authenticated_user() else None
    }

    if prepared_build.trigger:
        event_log_metadata['trigger_id'] = prepared_build.trigger.uuid
        event_log_metadata[
            'trigger_kind'] = prepared_build.trigger.service.name
        event_log_metadata['trigger_metadata'] = prepared_build.metadata or {}

    logs_model.log_action('build_dockerfile',
                          repository.namespace_user.username,
                          ip=get_request_ip(),
                          metadata=event_log_metadata,
                          repository=repository)

    # TODO: remove when more endpoints have been converted to using interfaces
    repo = AttrDict({
        'namespace_name': repository.namespace_user.username,
        'name': repository.name,
    })

    spawn_notification(repo,
                       'build_queued',
                       event_log_metadata,
                       subpage='build/%s' % build_request.uuid,
                       pathargs=['build', build_request.uuid])

    return build_request
Пример #25
0
def put_service_key(service, kid):
    metadata = {'ip': get_request_ip()}

    rotation_duration = request.args.get('rotation', None)
    expiration_date = request.args.get('expiration', None)
    if expiration_date is not None:
        try:
            expiration_date = datetime.utcfromtimestamp(float(expiration_date))
        except ValueError:
            logger.exception('Error parsing expiration date on key')
            abort(400)

    try:
        jwk = request.get_json()
    except ValueError:
        logger.exception('Error parsing JWK')
        abort(400)

    jwt_header = request.headers.get(JWT_HEADER_NAME, '')
    match = jwtutil.TOKEN_REGEX.match(jwt_header)
    if match is None:
        logger.error('Could not find matching bearer token')
        abort(400)

    encoded_jwt = match.group(1)

    _validate_jwk(jwk)

    signer_kid = _signer_kid(encoded_jwt, allow_none=True)
    if kid == signer_kid or signer_kid is None:
        # The key is self-signed. Create a new instance and await approval.
        _validate_jwt(encoded_jwt, jwk, service)
        model.create_service_key('',
                                 kid,
                                 service,
                                 jwk,
                                 metadata,
                                 expiration_date,
                                 rotation_duration=rotation_duration)

        logs_model.log_action('service_key_create',
                              ip=get_request_ip(),
                              metadata={
                                  'kid': kid,
                                  'preshared': False,
                                  'service': service,
                                  'name': '',
                                  'expiration_date': expiration_date,
                                  'user_agent':
                                  request.headers.get('User-Agent'),
                                  'ip': get_request_ip(),
                              })

        return make_response('', 202)

    # Key is going to be rotated.
    metadata.update({'created_by': 'Key Rotation'})
    signer_key = _lookup_service_key(service, signer_kid)
    signer_jwk = signer_key.jwk

    _validate_jwt(encoded_jwt, signer_jwk, service)

    try:
        model.replace_service_key(signer_key.kid, kid, jwk, metadata,
                                  expiration_date)
    except ServiceKeyDoesNotExist:
        abort(404)

    logs_model.log_action('service_key_rotate',
                          ip=get_request_ip(),
                          metadata={
                              'kid': kid,
                              'signer_kid': signer_key.kid,
                              'service': service,
                              'name': signer_key.name,
                              'expiration_date': expiration_date,
                              'user_agent': request.headers.get('User-Agent'),
                              'ip': get_request_ip(),
                          })

    return make_response('', 200)
Пример #26
0
    def post(self):
        """
        Create a new user.
        """
        if app.config["AUTHENTICATION_TYPE"] != "Database":
            abort(404)

        user_data = request.get_json()

        invite_code = user_data.get("invite_code", "")
        existing_user = model.user.get_nonrobot_user(user_data["username"])
        if existing_user:
            raise request_error(message="The username already exists")

        # Ensure an e-mail address was specified if required.
        if features.MAILING and not user_data.get("email"):
            raise request_error(message="Email address is required")

        # If invite-only user creation is turned on and no invite code was sent, return an error.
        # Technically, this is handled by the can_create_user call below as well, but it makes
        # a nicer error.
        if features.INVITE_ONLY_USER_CREATION and not invite_code:
            raise request_error(message="Cannot create non-invited user")

        # Ensure that this user can be created.
        blacklisted_domains = app.config.get("BLACKLISTED_EMAIL_DOMAINS", [])
        if not can_create_user(user_data.get("email"), blacklisted_domains=blacklisted_domains):
            raise request_error(
                message="Creation of a user account for this e-mail is disabled; please contact an administrator"
            )

        # If recaptcha is enabled, then verify the user is a human.
        if features.RECAPTCHA:
            recaptcha_response = user_data.get("recaptcha_response", "")
            result = recaptcha2.verify(
                app.config["RECAPTCHA_SECRET_KEY"], recaptcha_response, get_request_ip()
            )

            if not result["success"]:
                return {"message": "Are you a bot? If not, please revalidate the captcha."}, 400

        is_possible_abuser = ip_resolver.is_ip_possible_threat(get_request_ip())
        try:
            prompts = model.user.get_default_user_prompts(features)
            new_user = model.user.create_user(
                user_data["username"],
                user_data["password"],
                user_data.get("email"),
                auto_verify=not features.MAILING,
                email_required=features.MAILING,
                is_possible_abuser=is_possible_abuser,
                prompts=prompts,
            )

            email_address_confirmed = handle_invite_code(invite_code, new_user)
            if features.MAILING and not email_address_confirmed:
                confirmation_code = model.user.create_confirm_email_code(new_user)
                send_confirmation_email(new_user.username, new_user.email, confirmation_code)
                return {"awaiting_verification": True}
            else:
                success, headers = common_login(new_user.uuid)
                if not success:
                    return {"message": "Could not login. Is your account inactive?"}, 403

                return user_view(new_user), 200, headers
        except model.user.DataModelException as ex:
            raise request_error(exception=ex)
Пример #27
0
def track_and_log(event_name,
                  repo_obj,
                  analytics_name=None,
                  analytics_sample=1,
                  **kwargs):
    repo_name = repo_obj.name
    namespace_name = repo_obj.namespace_name
    metadata = {
        "repo": repo_name,
        "namespace": namespace_name,
        "user-agent": request.user_agent.string,
    }
    metadata.update(kwargs)

    is_free_namespace = False
    if hasattr(repo_obj, "is_free_namespace"):
        is_free_namespace = repo_obj.is_free_namespace

    # Add auth context metadata.
    analytics_id = "anonymous"
    auth_context = get_authenticated_context()
    if auth_context is not None:
        analytics_id, context_metadata = auth_context.analytics_id_and_public_metadata(
        )
        metadata.update(context_metadata)

    # Publish the user event (if applicable)
    logger.debug("Checking publishing %s to the user events system",
                 event_name)
    if auth_context and auth_context.has_nonrobot_user:
        logger.debug("Publishing %s to the user events system", event_name)
        user_event_data = {
            "action": event_name,
            "repository": repo_name,
            "namespace": namespace_name,
        }

        event = userevents.get_event(auth_context.authed_user.username)
        event.publish_event_data("docker-cli", user_event_data)

    # Save the action to mixpanel.
    if random.random() < analytics_sample:
        if analytics_name is None:
            analytics_name = event_name

        logger.debug("Logging the %s to analytics engine", analytics_name)

        request_parsed = urlparse(request.url_root)
        extra_params = {
            "repository": "%s/%s" % (namespace_name, repo_name),
            "user-agent": request.user_agent.string,
            "hostname": request_parsed.hostname,
        }

        analytics.track(analytics_id, analytics_name, extra_params)

    # Add the resolved information to the metadata.
    logger.debug("Resolving IP address %s", get_request_ip())
    resolved_ip = ip_resolver.resolve_ip(get_request_ip())
    if resolved_ip is not None:
        metadata["resolved_ip"] = resolved_ip._asdict()

    logger.debug("Resolved IP address %s", get_request_ip())

    # Log the action to the database.
    logger.debug("Logging the %s to logs system", event_name)
    try:
        logs_model.log_action(
            event_name,
            namespace_name,
            performer=get_authenticated_user(),
            ip=get_request_ip(),
            metadata=metadata,
            repository=repo_obj,
            is_free_namespace=is_free_namespace,
        )
        logger.debug("Track and log of %s complete", event_name)
    except ReadOnlyModeException:
        pass