Exemple #1
0
def delete_channel(namespace, package_name, channel_name):
  _check_channel_name(channel_name)
  reponame = repo_name(namespace, package_name)
  result = cnr_registry.delete_channel(reponame, channel_name, channel_class=Channel)
  logs_model.log_action('delete_tag', namespace, repository_name=package_name,
                        metadata={'channel': channel_name})
  return jsonify(result)
Exemple #2
0
def emit_log(mirror,
             log_kind,
             verb,
             message,
             tag=None,
             tags=None,
             stdout=None,
             stderr=None):
    logs_model.log_action(
        log_kind,
        namespace_name=mirror.repository.namespace_user.username,
        repository_name=mirror.repository.name,
        metadata={
            "verb": verb,
            "namespace": mirror.repository.namespace_user.username,
            "repo": mirror.repository.name,
            "message": message,
            "tag": tag,
            "tags": tags,
            "stdout": stdout,
            "stderr": stderr,
        },
    )

    if log_kind in (
            "repo_mirror_sync_started",
            "repo_mirror_sync_failed",
            "repo_mirror_sync_success",
    ):
        spawn_notification(wrap_repository(mirror.repository), log_kind,
                           {"message": message})
Exemple #3
0
def generate_key(service, name, expiration_date=None, notes=None):
  metadata = {
    'created_by': 'CLI tool',
  }

  # Generate a key with a private key that we *never save*.
  (private_key, key) = model.service_keys.generate_service_key(service, expiration_date,
                                                               metadata=metadata,
                                                               name=name)
  # Auto-approve the service key.
  model.service_keys.approve_service_key(key.kid, ServiceKeyApprovalType.AUTOMATIC, notes=notes or '')

  # Log the creation and auto-approval of the service key.
  key_log_metadata = {
    'kid': key.kid,
    'preshared': True,
    'service': service,
    'name': name,
    'expiration_date': expiration_date,
    'auto_approved': True,
  }

  logs_model.log_action('service_key_create', metadata=key_log_metadata)
  logs_model.log_action('service_key_approve', metadata=key_log_metadata)
  return private_key, key.kid
Exemple #4
0
def log_action(kind,
               user_or_orgname,
               metadata=None,
               repo=None,
               repo_name=None):
    if not metadata:
        metadata = {}

    oauth_token = get_validated_oauth_token()
    if oauth_token:
        metadata["oauth_token_id"] = oauth_token.id
        metadata[
            "oauth_token_application_id"] = oauth_token.application.client_id
        metadata["oauth_token_application"] = oauth_token.application.name

    performer = get_authenticated_user()

    if repo_name is not None:
        repo = data_model.repository.get_repository(user_or_orgname, repo_name)

    logs_model.log_action(
        kind,
        user_or_orgname,
        repository=repo,
        performer=performer,
        ip=get_request_ip(),
        metadata=metadata,
    )
Exemple #5
0
def generate_key(service, name, expiration_date=None, notes=None):
    metadata = {
        "created_by": "CLI tool",
    }

    # Generate a key with a private key that we *never save*.
    (private_key,
     key) = model.service_keys.generate_service_key(service,
                                                    expiration_date,
                                                    metadata=metadata,
                                                    name=name)
    # Auto-approve the service key.
    model.service_keys.approve_service_key(key.kid,
                                           ServiceKeyApprovalType.AUTOMATIC,
                                           notes=notes or "")

    # Log the creation and auto-approval of the service key.
    key_log_metadata = {
        "kid": key.kid,
        "preshared": True,
        "service": service,
        "name": name,
        "expiration_date": expiration_date,
        "auto_approved": True,
    }

    logs_model.log_action("service_key_create", metadata=key_log_metadata)
    logs_model.log_action("service_key_approve", metadata=key_log_metadata)
    return private_key, key.kid
Exemple #6
0
def push(namespace, package_name):
    reponame = repo_name(namespace, package_name)

    if not REPOSITORY_NAME_REGEX.match(package_name):
        logger.debug("Found invalid repository name CNR push: %s", reponame)
        raise InvalidUsage("invalid repository name: %s" % reponame)

    values = request.get_json(force=True, silent=True) or {}
    private = values.get("visibility", "private")

    owner = get_authenticated_user()
    if not Package.exists(reponame):
        if not CreateRepositoryPermission(namespace).can():
            raise Forbidden(
                "Unauthorized access for: %s" % reponame,
                {
                    "package": reponame,
                    "scopes": ["create"]
                },
            )
        Package.create_repository(reponame, private, owner)
        logs_model.log_action("create_repo",
                              namespace,
                              repository_name=package_name)

    if not ModifyRepositoryPermission(namespace, package_name).can():
        raise Forbidden("Unauthorized access for: %s" % reponame, {
            "package": reponame,
            "scopes": ["push"]
        })

    if not "release" in values:
        raise InvalidUsage("Missing release")

    if not "media_type" in values:
        raise InvalidUsage("Missing media_type")

    if not "blob" in values:
        raise InvalidUsage("Missing blob")

    release_version = str(values["release"])
    media_type = values["media_type"]
    force = request.args.get("force", "false") == "true"

    blob = Blob(reponame, values["blob"])
    app_release = cnr_registry.push(
        reponame,
        release_version,
        media_type,
        blob,
        force,
        package_class=Package,
        user=owner,
        visibility=private,
    )
    logs_model.log_action("push_repo",
                          namespace,
                          repository_name=package_name,
                          metadata={"release": release_version})
    return jsonify(app_release)
Exemple #7
0
def pull(namespace, package_name, release, media_type):
  logger.debug('Pull of release %s of app repository %s/%s', release, namespace, package_name)
  reponame = repo_name(namespace, package_name)
  data = cnr_registry.pull(reponame, release, media_type, Package, blob_class=Blob)
  logs_model.log_action('pull_repo', namespace, repository_name=package_name,
                        metadata={'release': release, 'mediatype': media_type})
  json_format = request.args.get('format', None) == 'json'
  return _pull(data, json_format)
Exemple #8
0
def add_channel_release(namespace, package_name, channel_name, release):
  _check_channel_name(channel_name, release)
  reponame = repo_name(namespace, package_name)
  result = cnr_registry.add_channel_release(reponame, channel_name, release, channel_class=Channel,
                                            package_class=Package)
  logs_model.log_action('create_tag', namespace, repository_name=package_name,
                        metadata={'channel': channel_name, 'release': release})
  return jsonify(result)
Exemple #9
0
def delete_package(namespace, package_name, release, media_type):
    reponame = repo_name(namespace, package_name)
    result = cnr_registry.delete_package(reponame, release, media_type, package_class=Package)
    logs_model.log_action(
        "delete_tag",
        namespace,
        repository_name=package_name,
        metadata={"release": release, "mediatype": media_type},
    )
    return jsonify(result)
Exemple #10
0
 def log_action(self, kind, namespace_name, repository_name, performer, ip, metadata):
     repository = model.repository.get_repository(namespace_name, repository_name)
     logs_model.log_action(
         kind,
         namespace_name,
         performer=performer,
         ip=ip,
         metadata=metadata,
         repository=repository,
     )
Exemple #11
0
def pull(namespace, package_name, release, media_type):
    logger.debug("Pull of release %s of app repository %s/%s", release, namespace, package_name)
    reponame = repo_name(namespace, package_name)
    data = cnr_registry.pull(reponame, release, media_type, Package, blob_class=Blob)
    logs_model.log_action(
        "pull_repo",
        namespace,
        repository_name=package_name,
        metadata={"release": release, "mediatype": media_type},
    )
    json_format = request.args.get("format", None) == "json"
    return _pull(data, json_format)
Exemple #12
0
def delete_channel_release(namespace, package_name, channel_name, release):
    _check_channel_name(channel_name, release)
    reponame = repo_name(namespace, package_name)
    result = cnr_registry.delete_channel_release(
        reponame, channel_name, release, channel_class=Channel, package_class=Package
    )
    logs_model.log_action(
        "delete_tag",
        namespace,
        repository_name=package_name,
        metadata={"channel": channel_name, "release": release},
    )
    return jsonify(result)
Exemple #13
0
def __generate_service_key(
    kid,
    name,
    user,
    timestamp,
    approval_type,
    expiration=None,
    metadata=None,
    service="sample_service",
    rotation_duration=None,
):
    _, key = model.service_keys.generate_service_key(
        service,
        expiration,
        kid=kid,
        name=name,
        metadata=metadata,
        rotation_duration=rotation_duration,
    )

    if approval_type is not None:
        model.service_keys.approve_service_key(key.kid,
                                               approval_type,
                                               notes="The **test** approval")

        key_metadata = {
            "kid": kid,
            "preshared": True,
            "service": service,
            "name": name,
            "expiration_date": expiration,
            "auto_approved": True,
        }

        logs_model.log_action("service_key_approve",
                              None,
                              performer=user,
                              timestamp=timestamp,
                              metadata=key_metadata)

        logs_model.log_action("service_key_create",
                              None,
                              performer=user,
                              timestamp=timestamp,
                              metadata=key_metadata)
Exemple #14
0
def push(namespace, package_name):
  reponame = repo_name(namespace, package_name)

  if not REPOSITORY_NAME_REGEX.match(package_name):
    logger.debug('Found invalid repository name CNR push: %s', reponame)
    raise InvalidUsage('invalid repository name: %s' % reponame)

  values = request.get_json(force=True, silent=True) or {}
  private = values.get('visibility', 'private')

  owner = get_authenticated_user()
  if not Package.exists(reponame):
    if not CreateRepositoryPermission(namespace).can():
      raise Forbidden("Unauthorized access for: %s" % reponame,
                      {"package": reponame,
                       "scopes": ['create']})
    Package.create_repository(reponame, private, owner)
    logs_model.log_action('create_repo', namespace, repository_name=package_name)

  if not ModifyRepositoryPermission(namespace, package_name).can():
    raise Forbidden("Unauthorized access for: %s" % reponame,
                    {"package": reponame,
                     "scopes": ['push']})

  if not 'release' in values:
    raise InvalidUsage('Missing release')

  if not 'media_type' in values:
    raise InvalidUsage('Missing media_type')

  if not 'blob' in values:
    raise InvalidUsage('Missing blob')

  release_version = str(values['release'])
  media_type = values['media_type']
  force = request.args.get('force', 'false') == 'true'

  blob = Blob(reponame, values['blob'])
  app_release = cnr_registry.push(reponame, release_version, media_type, blob, force,
                                  package_class=Package, user=owner, visibility=private)
  logs_model.log_action('push_repo', namespace, repository_name=package_name,
                        metadata={'release': release_version})
  return jsonify(app_release)
Exemple #15
0
def __generate_service_key(kid,
                           name,
                           user,
                           timestamp,
                           approval_type,
                           expiration=None,
                           metadata=None,
                           service='sample_service',
                           rotation_duration=None):
    _, key = model.service_keys.generate_service_key(
        service,
        expiration,
        kid=kid,
        name=name,
        metadata=metadata,
        rotation_duration=rotation_duration)

    if approval_type is not None:
        model.service_keys.approve_service_key(key.kid,
                                               approval_type,
                                               notes='The **test** approval')

        key_metadata = {
            'kid': kid,
            'preshared': True,
            'service': service,
            'name': name,
            'expiration_date': expiration,
            'auto_approved': True
        }

        logs_model.log_action('service_key_approve',
                              None,
                              performer=user,
                              timestamp=timestamp,
                              metadata=key_metadata)

        logs_model.log_action('service_key_create',
                              None,
                              performer=user,
                              timestamp=timestamp,
                              metadata=key_metadata)
Exemple #16
0
def delete_service_key(service, kid):
    jwt_header = request.headers.get(JWT_HEADER_NAME, "")
    match = jwtutil.TOKEN_REGEX.match(jwt_header)
    if match is None:
        abort(400)

    encoded_jwt = match.group(1)

    signer_kid = _signer_kid(encoded_jwt)
    signer_key = _lookup_service_key(service, signer_kid, approved_only=False)

    self_signed = kid == signer_kid
    approved_key_for_service = signer_key.approval is not None

    if self_signed or approved_key_for_service:
        _validate_jwt(encoded_jwt, signer_key.jwk, service)

        try:
            model.delete_service_key(kid)
        except ServiceKeyDoesNotExist:
            abort(404)

        logs_model.log_action(
            "service_key_delete",
            ip=get_request_ip(),
            metadata={
                "kid": kid,
                "signer_kid": signer_key.kid,
                "service": service,
                "name": signer_key.name,
                "user_agent": request.headers.get("User-Agent"),
                "ip": get_request_ip(),
            },
        )

        return make_response("", 204)

    abort(403)
Exemple #17
0
def delete_service_key(service, kid):
    jwt_header = request.headers.get(JWT_HEADER_NAME, '')
    match = jwtutil.TOKEN_REGEX.match(jwt_header)
    if match is None:
        abort(400)

    encoded_jwt = match.group(1)

    signer_kid = _signer_kid(encoded_jwt)
    signer_key = _lookup_service_key(service, signer_kid, approved_only=False)

    self_signed = kid == signer_kid
    approved_key_for_service = signer_key.approval is not None

    if self_signed or approved_key_for_service:
        _validate_jwt(encoded_jwt, signer_key.jwk, service)

        try:
            model.delete_service_key(kid)
        except ServiceKeyDoesNotExist:
            abort(404)

        logs_model.log_action('service_key_delete',
                              ip=get_request_ip(),
                              metadata={
                                  'kid': kid,
                                  'signer_kid': signer_key.kid,
                                  'service': service,
                                  'name': signer_key.name,
                                  'user_agent':
                                  request.headers.get('User-Agent'),
                                  'ip': get_request_ip(),
                              })

        return make_response('', 204)

    abort(403)
Exemple #18
0
def put_service_key(service, kid):
    metadata = {"ip": get_request_ip()}

    rotation_duration = request.args.get("rotation", None)
    expiration_date = request.args.get("expiration", None)
    if expiration_date is not None:
        try:
            expiration_date = datetime.utcfromtimestamp(float(expiration_date))
        except ValueError:
            logger.exception("Error parsing expiration date on key")
            abort(400)

    try:
        jwk = request.get_json()
    except ValueError:
        logger.exception("Error parsing JWK")
        abort(400)

    jwt_header = request.headers.get(JWT_HEADER_NAME, "")
    match = jwtutil.TOKEN_REGEX.match(jwt_header)
    if match is None:
        logger.error("Could not find matching bearer token")
        abort(400)

    encoded_jwt = match.group(1)

    _validate_jwk(jwk)

    signer_kid = _signer_kid(encoded_jwt, allow_none=True)
    if kid == signer_kid or signer_kid is None:
        # The key is self-signed. Create a new instance and await approval.
        _validate_jwt(encoded_jwt, jwk, service)
        model.create_service_key("",
                                 kid,
                                 service,
                                 jwk,
                                 metadata,
                                 expiration_date,
                                 rotation_duration=rotation_duration)

        logs_model.log_action(
            "service_key_create",
            ip=get_request_ip(),
            metadata={
                "kid": kid,
                "preshared": False,
                "service": service,
                "name": "",
                "expiration_date": expiration_date,
                "user_agent": request.headers.get("User-Agent"),
                "ip": get_request_ip(),
            },
        )

        return make_response("", 202)

    # Key is going to be rotated.
    metadata.update({"created_by": "Key Rotation"})
    signer_key = _lookup_service_key(service, signer_kid)
    signer_jwk = signer_key.jwk

    _validate_jwt(encoded_jwt, signer_jwk, service)

    try:
        model.replace_service_key(signer_key.kid, kid, jwk, metadata,
                                  expiration_date)
    except ServiceKeyDoesNotExist:
        abort(404)

    logs_model.log_action(
        "service_key_rotate",
        ip=get_request_ip(),
        metadata={
            "kid": kid,
            "signer_kid": signer_key.kid,
            "service": service,
            "name": signer_key.name,
            "expiration_date": expiration_date,
            "user_agent": request.headers.get("User-Agent"),
            "ip": get_request_ip(),
        },
    )

    return make_response("", 200)
Exemple #19
0
def populate_database(minimal=False):
    logger.debug("Populating the DB with test data.")

    # Check if the data already exists. If so, we skip. This can happen between calls from the
    # "old style" tests and the new py.test's.
    try:
        User.get(username="******")
        logger.debug("DB already populated")
        return
    except User.DoesNotExist:
        pass

    # Note: databases set up with "real" schema (via Alembic) will not have these types
    # type, so we it here it necessary.
    try:
        ImageStorageLocation.get(name="local_eu")
        ImageStorageLocation.get(name="local_us")
    except ImageStorageLocation.DoesNotExist:
        ImageStorageLocation.create(name="local_eu")
        ImageStorageLocation.create(name="local_us")

    try:
        NotificationKind.get(name="test_notification")
    except NotificationKind.DoesNotExist:
        NotificationKind.create(name="test_notification")

    new_user_1 = model.user.create_user("devtable", "password",
                                        "*****@*****.**")
    new_user_1.verified = True
    new_user_1.stripe_id = TEST_STRIPE_ID
    new_user_1.save()

    if minimal:
        logger.debug(
            "Skipping most db population because user requested mininal db")
        return

    UserRegion.create(user=new_user_1,
                      location=ImageStorageLocation.get(name="local_us"))
    model.release.set_region_release("quay", "us", "v0.1.2")

    model.user.create_confirm_email_code(new_user_1,
                                         new_email="*****@*****.**")

    disabled_user = model.user.create_user("disabled", "password",
                                           "*****@*****.**")
    disabled_user.verified = True
    disabled_user.enabled = False
    disabled_user.save()

    dtrobot = model.user.create_robot("dtrobot", new_user_1)
    dtrobot2 = model.user.create_robot("dtrobot2", new_user_1)

    new_user_2 = model.user.create_user("public", "password",
                                        "*****@*****.**")
    new_user_2.verified = True
    new_user_2.save()

    new_user_3 = model.user.create_user("freshuser", "password",
                                        "*****@*****.**")
    new_user_3.verified = True
    new_user_3.save()

    another_robot = model.user.create_robot("anotherrobot", new_user_3)

    new_user_4 = model.user.create_user("randomuser", "password",
                                        "*****@*****.**")
    new_user_4.verified = True
    new_user_4.save()

    new_user_5 = model.user.create_user("unverified", "password",
                                        "*****@*****.**")
    new_user_5.save()

    reader = model.user.create_user("reader", "password", "*****@*****.**")
    reader.verified = True
    reader.save()

    creatoruser = model.user.create_user("creator", "password",
                                         "*****@*****.**")
    creatoruser.verified = True
    creatoruser.save()

    outside_org = model.user.create_user("outsideorg", "password",
                                         "*****@*****.**")
    outside_org.verified = True
    outside_org.save()

    model.notification.create_notification(
        "test_notification",
        new_user_1,
        metadata={
            "some": "value",
            "arr": [1, 2, 3],
            "obj": {
                "a": 1,
                "b": 2
            }
        },
    )

    from_date = datetime.utcnow()
    to_date = from_date + timedelta(hours=1)
    notification_metadata = {
        "from_date": formatdate(calendar.timegm(from_date.utctimetuple())),
        "to_date": formatdate(calendar.timegm(to_date.utctimetuple())),
        "reason": "database migration",
    }
    model.notification.create_notification("maintenance",
                                           new_user_1,
                                           metadata=notification_metadata)

    __generate_repository(
        new_user_4,
        "randomrepo",
        "Random repo repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    simple_repo = __generate_repository(
        new_user_1,
        "simple",
        "Simple repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    # Add some labels to the latest tag's manifest.
    repo_ref = RepositoryReference.for_repo_obj(simple_repo)
    tag = registry_model.get_repo_tag(repo_ref, "latest")
    manifest = registry_model.get_manifest_for_tag(tag)
    assert manifest

    first_label = registry_model.create_manifest_label(manifest, "foo", "bar",
                                                       "manifest")
    registry_model.create_manifest_label(manifest, "foo", "baz", "api")
    registry_model.create_manifest_label(manifest, "anotherlabel", "1234",
                                         "internal")
    registry_model.create_manifest_label(manifest, "jsonlabel",
                                         '{"hey": "there"}', "internal",
                                         "application/json")

    label_metadata = {
        "key": "foo",
        "value": "bar",
        "id": first_label._db_id,
        "manifest_digest": manifest.digest,
    }

    logs_model.log_action(
        "manifest_label_add",
        new_user_1.username,
        performer=new_user_1,
        timestamp=datetime.now(),
        metadata=label_metadata,
        repository=simple_repo,
    )

    model.blob.initiate_upload(new_user_1.username, simple_repo.name,
                               str(uuid4()), "local_us", {})
    model.notification.create_repo_notification(simple_repo, "repo_push",
                                                "quay_notification", {}, {})

    __generate_repository(
        new_user_1,
        "sharedtags",
        "Shared tags repository",
        False,
        [(new_user_2, "read"), (dtrobot[0], "read")],
        (
            2,
            [
                (3, [], ["v2.0", "v2.1", "v2.2"]),
                (
                    1,
                    [(1, [(1, [], ["prod", "581a284"])
                          ], ["staging", "8423b58"]), (1, [], None)],
                    None,
                ),
            ],
            None,
        ),
    )

    __generate_repository(
        new_user_1,
        "history",
        "Historical repository.",
        False,
        [],
        (4, [(2, [], "#latest"), (3, [], "latest")], None),
    )

    __generate_repository(
        new_user_1,
        "complex",
        "Complex repository with many branches and tags.",
        False,
        [(new_user_2, "read"), (dtrobot[0], "read")],
        (
            2,
            [(3, [], "v2.0"),
             (1, [(1, [(2, [], ["prod"])], "staging"), (1, [], None)], None)],
            None,
        ),
    )

    __generate_repository(
        new_user_1,
        "gargantuan",
        None,
        False,
        [],
        (
            2,
            [
                (3, [], "v2.0"),
                (1, [(1, [(1, [], ["latest", "prod"])], "staging"),
                     (1, [], None)], None),
                (20, [], "v3.0"),
                (5, [], "v4.0"),
                (1, [(1, [], "v5.0"), (1, [], "v6.0")], None),
            ],
            None,
        ),
    )

    trusted_repo = __generate_repository(
        new_user_1,
        "trusted",
        "Trusted repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )
    trusted_repo.trust_enabled = True
    trusted_repo.save()

    publicrepo = __generate_repository(
        new_user_2,
        "publicrepo",
        "Public repository pullable by the world.",
        True,
        [],
        (10, [], "latest"),
    )

    __generate_repository(outside_org, "coolrepo", "Some cool repo.", False,
                          [], (5, [], "latest"))

    __generate_repository(
        new_user_1,
        "shared",
        "Shared repository, another user can write.",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (5, [], "latest"),
    )

    __generate_repository(
        new_user_1,
        "text-full-repo",
        "This is a repository for testing text search",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (5, [], "latest"),
    )

    building = __generate_repository(
        new_user_1,
        "building",
        "Empty repository which is building.",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (0, [], None),
    )

    new_token = model.token.create_access_token(building, "write",
                                                "build-worker")

    trigger = model.build.create_build_trigger(building,
                                               "github",
                                               "123authtoken",
                                               new_user_1,
                                               pull_robot=dtrobot[0])
    trigger.config = json.dumps({
        "build_source": "jakedt/testconnect",
        "subdir": "",
        "dockerfile_path": "Dockerfile",
        "context": "/",
    })
    trigger.save()

    repo = "ci.devtable.com:5000/%s/%s" % (building.namespace_user.username,
                                           building.name)
    job_config = {
        "repository": repo,
        "docker_tags": ["latest"],
        "build_subdir": "",
        "trigger_metadata": {
            "commit": "3482adc5822c498e8f7db2e361e8d57b3d77ddd9",
            "ref": "refs/heads/master",
            "default_branch": "master",
        },
    }

    model.repository.star_repository(new_user_1, simple_repo)

    record = model.repository.create_email_authorization_for_repo(
        new_user_1.username, "simple", "*****@*****.**")
    record.confirmed = True
    record.save()

    model.repository.create_email_authorization_for_repo(
        new_user_1.username, "simple", "*****@*****.**")

    build2 = model.build.create_repository_build(
        building,
        new_token,
        job_config,
        "68daeebd-a5b9-457f-80a0-4363b882f8ea",
        "build-name",
        trigger,
    )
    build2.uuid = "deadpork-dead-pork-dead-porkdeadpork"
    build2.save()

    build3 = model.build.create_repository_build(
        building,
        new_token,
        job_config,
        "f49d07f9-93da-474d-ad5f-c852107c3892",
        "build-name",
        trigger,
    )
    build3.uuid = "deadduck-dead-duck-dead-duckdeadduck"
    build3.save()

    build1 = model.build.create_repository_build(
        building, new_token, job_config, "701dcc3724fb4f2ea6c31400528343cd",
        "build-name", trigger)
    build1.uuid = "deadbeef-dead-beef-dead-beefdeadbeef"
    build1.save()

    org = model.organization.create_organization("buynlarge",
                                                 "*****@*****.**",
                                                 new_user_1)
    org.stripe_id = TEST_STRIPE_ID
    org.save()

    liborg = model.organization.create_organization(
        "library", "*****@*****.**", new_user_1)
    liborg.save()

    titiorg = model.organization.create_organization("titi",
                                                     "*****@*****.**",
                                                     new_user_1)
    titiorg.save()

    thirdorg = model.organization.create_organization(
        "sellnsmall", "*****@*****.**", new_user_1)
    thirdorg.save()

    model.user.create_robot("coolrobot", org)

    oauth_app_1 = model.oauth.create_application(
        org,
        "Some Test App",
        "http://localhost:8000",
        "http://localhost:8000/o2c.html",
        client_id="deadbeef",
    )

    model.oauth.create_application(
        org,
        "Some Other Test App",
        "http://quay.io",
        "http://localhost:8000/o2c.html",
        client_id="deadpork",
        description="This is another test application",
    )

    model.oauth.create_user_access_token(new_user_1,
                                         "deadbeef",
                                         "repo:admin",
                                         access_token="%s%s" %
                                         ("b" * 40, "c" * 40))

    oauth_credential = Credential.from_string("dswfhasdf1")
    OAuthAuthorizationCode.create(
        application=oauth_app_1,
        code="Z932odswfhasdf1",
        scope="repo:admin",
        data='{"somejson": "goeshere"}',
        code_name="Z932odswfhasdf1Z932o",
        code_credential=oauth_credential,
    )

    model.user.create_robot("neworgrobot", org)

    ownerbot = model.user.create_robot("ownerbot", org)[0]
    creatorbot = model.user.create_robot("creatorbot", org)[0]

    owners = model.team.get_organization_team("buynlarge", "owners")
    owners.description = "Owners have unfetterd access across the entire org."
    owners.save()

    org_repo = __generate_repository(
        org,
        "orgrepo",
        "Repository owned by an org.",
        False,
        [(outside_org, "read")],
        (4, [], ["latest", "prod"]),
    )

    __generate_repository(
        org,
        "anotherorgrepo",
        "Another repository owned by an org.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    creators = model.team.create_team("creators", org, "creator",
                                      "Creators of orgrepo.")

    reader_team = model.team.create_team("readers", org, "member",
                                         "Readers of orgrepo.")
    model.team.add_or_invite_to_team(new_user_1, reader_team, outside_org)
    model.permission.set_team_repo_permission(reader_team.name,
                                              org_repo.namespace_user.username,
                                              org_repo.name, "read")

    model.team.add_user_to_team(new_user_2, reader_team)
    model.team.add_user_to_team(reader, reader_team)
    model.team.add_user_to_team(ownerbot, owners)
    model.team.add_user_to_team(creatorbot, creators)
    model.team.add_user_to_team(creatoruser, creators)

    sell_owners = model.team.get_organization_team("sellnsmall", "owners")
    sell_owners.description = "Owners have unfettered access across the entire org."
    sell_owners.save()

    model.team.add_user_to_team(new_user_4, sell_owners)

    sync_config = {
        "group_dn": "cn=Test-Group,ou=Users",
        "group_id": "somegroupid"
    }
    synced_team = model.team.create_team("synced", org, "member",
                                         "Some synced team.")
    model.team.set_team_syncing(synced_team, "ldap", sync_config)

    another_synced_team = model.team.create_team("synced", thirdorg, "member",
                                                 "Some synced team.")
    model.team.set_team_syncing(another_synced_team, "ldap",
                                {"group_dn": "cn=Test-Group,ou=Users"})

    __generate_repository(
        new_user_1,
        "superwide",
        None,
        False,
        [],
        [
            (10, [], "latest2"),
            (2, [], "latest3"),
            (2, [(1, [], "latest11"), (2, [], "latest12")], "latest4"),
            (2, [], "latest5"),
            (2, [], "latest6"),
            (2, [], "latest7"),
            (2, [], "latest8"),
            (2, [], "latest9"),
            (2, [], "latest10"),
            (2, [], "latest13"),
            (2, [], "latest14"),
            (2, [], "latest15"),
            (2, [], "latest16"),
            (2, [], "latest17"),
            (2, [], "latest18"),
        ],
    )

    mirror_repo = __generate_repository(
        new_user_1,
        "mirrored",
        "Mirrored repository.",
        False,
        [(dtrobot[0], "write"), (dtrobot2[0], "write")],
        (4, [], ["latest", "prod"]),
    )
    mirror_rule = model.repo_mirror.create_mirroring_rule(
        mirror_repo, ["latest", "3.3*"])
    mirror_args = (mirror_repo, mirror_rule, dtrobot[0], "quay.io/coreos/etcd",
                   60 * 60 * 24)
    mirror_kwargs = {
        "external_registry_username": "******",
        "external_registry_password": "******",
        "external_registry_config": {},
        "is_enabled": True,
        "sync_start_date": datetime.utcnow(),
    }
    mirror = model.repo_mirror.enable_mirroring_for_repository(
        *mirror_args, **mirror_kwargs)

    read_only_repo = __generate_repository(
        new_user_1,
        "readonly",
        "Read-Only Repo.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )
    read_only_repo.state = RepositoryState.READ_ONLY
    read_only_repo.save()

    model.permission.add_prototype_permission(org,
                                              "read",
                                              activating_user=new_user_1,
                                              delegate_user=new_user_2)
    model.permission.add_prototype_permission(org,
                                              "read",
                                              activating_user=new_user_1,
                                              delegate_team=reader_team)
    model.permission.add_prototype_permission(org,
                                              "write",
                                              activating_user=new_user_2,
                                              delegate_user=new_user_1)

    today = datetime.today()
    week_ago = today - timedelta(6)
    six_ago = today - timedelta(5)
    four_ago = today - timedelta(4)
    yesterday = datetime.combine(date.today(),
                                 datetime.min.time()) - timedelta(hours=6)

    __generate_service_key("kid1", "somesamplekey", new_user_1, today,
                           ServiceKeyApprovalType.SUPERUSER)
    __generate_service_key(
        "kid2",
        "someexpiringkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today + timedelta(days=14),
    )

    __generate_service_key("kid3", "unapprovedkey", new_user_1, today, None)

    __generate_service_key(
        "kid4",
        "autorotatingkey",
        new_user_1,
        six_ago,
        ServiceKeyApprovalType.KEY_ROTATION,
        today + timedelta(days=1),
        rotation_duration=timedelta(hours=12).total_seconds(),
    )

    __generate_service_key(
        "kid5",
        "key for another service",
        new_user_1,
        today,
        ServiceKeyApprovalType.SUPERUSER,
        today + timedelta(days=14),
        service="different_sample_service",
    )

    __generate_service_key(
        "kid6",
        "someexpiredkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today - timedelta(days=1),
    )

    __generate_service_key(
        "kid7",
        "somewayexpiredkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today - timedelta(days=30),
    )

    # Add the test pull key as pre-approved for local and unittest registry testing.
    # Note: this must match the private key found in the local/test config.
    _TEST_JWK = {
        "e":
        "AQAB",
        "kty":
        "RSA",
        "n":
        "yqdQgnelhAPMSeyH0kr3UGePK9oFOmNfwD0Ymnh7YYXr21VHWwyM2eVW3cnLd9KXywDFtGSe9oFDbnOuMCdUowdkBcaHju-isbv5KEbNSoy_T2Rip-6L0cY63YzcMJzv1nEYztYXS8wz76pSK81BKBCLapqOCmcPeCvV9yaoFZYvZEsXCl5jjXN3iujSzSF5Z6PpNFlJWTErMT2Z4QfbDKX2Nw6vJN6JnGpTNHZvgvcyNX8vkSgVpQ8DFnFkBEx54PvRV5KpHAq6AsJxKONMo11idQS2PfCNpa2hvz9O6UZe-eIX8jPo5NW8TuGZJumbdPT_nxTDLfCqfiZboeI0Pw",
    }

    key = model.service_keys.create_service_key("test_service_key",
                                                "test_service_key", "quay",
                                                _TEST_JWK, {}, None)

    model.service_keys.approve_service_key(
        key.kid,
        ServiceKeyApprovalType.SUPERUSER,
        notes="Test service key for local/test registry testing",
    )

    # Add an app specific token.
    token = model.appspecifictoken.create_token(new_user_1, "some app")
    token.token_name = "a" * 60
    token.token_secret = "b" * 60
    token.save()

    logs_model.log_action(
        "org_create_team",
        org.username,
        performer=new_user_1,
        timestamp=week_ago,
        metadata={"team": "readers"},
    )

    logs_model.log_action(
        "org_set_team_role",
        org.username,
        performer=new_user_1,
        timestamp=week_ago,
        metadata={
            "team": "readers",
            "role": "read"
        },
    )

    logs_model.log_action(
        "create_repo",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=week_ago,
        metadata={
            "namespace": org.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "change_repo_permission",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=six_ago,
        metadata={
            "username": new_user_1.username,
            "repo": "orgrepo",
            "role": "admin"
        },
    )

    logs_model.log_action(
        "change_repo_permission",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=six_ago,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo",
            "role": "read"
        },
    )

    logs_model.log_action(
        "add_repo_accesstoken",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=four_ago,
        metadata={
            "repo": "orgrepo",
            "token": "deploytoken"
        },
    )

    logs_model.log_action(
        "push_repo",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        repository=org_repo,
        timestamp=today,
        metadata={
            "token": "sometoken",
            "token_code": "somecode",
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "delete_tag",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo",
            "tag": "sometag"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        repository=org_repo,
        timestamp=today,
        metadata={
            "token_code": "somecode",
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        new_user_2.username,
        repository=publicrepo,
        timestamp=yesterday,
        metadata={
            "token_code": "somecode",
            "repo": "publicrepo"
        },
    )

    logs_model.log_action(
        "build_dockerfile",
        new_user_1.username,
        repository=building,
        timestamp=today,
        metadata={
            "repo": "building",
            "namespace": new_user_1.username,
            "trigger_id": trigger.uuid,
            "config": json.loads(trigger.config),
            "service": trigger.service.name,
        },
    )

    model.message.create([{
        "content": "We love you, Quay customers!",
        "severity": "info",
        "media_type": "text/plain",
    }])

    model.message.create([{
        "content": "This is a **development** install of Quay",
        "severity": "warning",
        "media_type": "text/markdown",
    }])

    fake_queue = WorkQueue("fakequeue", tf)
    fake_queue.put(["canonical", "job", "name"], "{}")

    model.user.create_user_prompt(new_user_4, "confirm_username")

    for to_count in Repository.select():
        model.repositoryactioncount.count_repository_actions(
            to_count, datetime.utcnow())
        model.repositoryactioncount.update_repository_score(to_count)
Exemple #20
0
def test_export_logs(initialized_db, storage_engine, has_logs):
    # Delete all existing logs.
    database.LogEntry3.delete().execute()

    repo = model.repository.get_repository("devtable", "simple")
    user = model.user.get_user("devtable")

    now = datetime.now()
    if has_logs:
        # Add new logs over a multi-day period.
        for index in range(-10, 10):
            logs_model.log_action(
                "push_repo",
                "devtable",
                user,
                "0.0.0.0",
                {"index": index},
                repo,
                timestamp=now + timedelta(days=index),
            )

    worker = ExportActionLogsWorker(None)
    called = [{}]

    @urlmatch(netloc=r"testcallback")
    def handle_request(url, request):
        called[0] = json.loads(request.body)
        return {"status_code": 200, "content": "{}"}

    def format_date(datetime):
        return datetime.strftime("%m/%d/%Y")

    with HTTMock(handle_request):
        worker._process_queue_item(
            {
                "export_id": "someid",
                "repository_id": repo.id,
                "namespace_id": repo.namespace_user.id,
                "namespace_name": "devtable",
                "repository_name": "simple",
                "start_time": format_date(now + timedelta(days=-10)),
                "end_time": format_date(now + timedelta(days=10)),
                "callback_url": "http://testcallback/",
                "callback_email": None,
            },
            storage_engine,
        )

    assert called[0]
    assert called[0]["export_id"] == "someid"
    assert called[0]["status"] == "success"

    url = called[0]["exported_data_url"]

    if url.find("http://localhost:5000/exportedlogs/") == 0:
        storage_id = url[len("http://localhost:5000/exportedlogs/"):]
    else:
        assert (url.find(
            "https://some_bucket.s3.amazonaws.com:443/some/path/exportedactionlogs/"
        ) == 0)
        storage_id, _ = url[len(
            "https://some_bucket.s3.amazonaws.com:443/some/path/exportedactionlogs/"
        ):].split("?")

    created = storage_engine.get_content(storage_engine.preferred_locations,
                                         "exportedactionlogs/" + storage_id)
    created_json = json.loads(created)

    if has_logs:
        found = set()
        for log in created_json["logs"]:
            if log.get("terminator"):
                continue

            found.add(log["metadata"]["index"])

        for index in range(-10, 10):
            assert index in found
    else:
        assert created_json["logs"] == [{"terminator": True}]
Exemple #21
0
def start_build(repository, prepared_build, pull_robot_name=None):
    # Ensure that builds are only run in image repositories.
    if repository.kind.name != "image":
        raise Exception(
            "Attempt to start a build for application repository %s" %
            repository.id)

    # Ensure the repository isn't in mirror or read-only mode.
    if repository.state != RepositoryState.NORMAL:
        raise Exception(
            ("Attempt to start a build for a non-normal repository: %s %s" %
             (repository.id, repository.state)))

    # Ensure that disabled triggers are not run.
    if prepared_build.trigger is not None and not prepared_build.trigger.enabled:
        raise BuildTriggerDisabledException

    if repository.namespace_user.maximum_queued_builds_count is not None:
        queue_item_canonical_name = [repository.namespace_user.username]
        alive_builds = dockerfile_build_queue.num_alive_jobs(
            queue_item_canonical_name)
        if alive_builds >= repository.namespace_user.maximum_queued_builds_count:
            logger.debug(
                "Prevented queueing of build under namespace %s due to reaching max: %s",
                repository.namespace_user.username,
                repository.namespace_user.maximum_queued_builds_count,
            )
            raise MaximumBuildsQueuedException()

    host = app.config["SERVER_HOSTNAME"]
    repo_path = "%s/%s/%s" % (host, repository.namespace_user.username,
                              repository.name)

    new_token = model.token.create_access_token(
        repository,
        "write",
        kind="build-worker",
        friendly_name="Repository Build Token")
    logger.debug(
        "Creating build %s with repo %s tags %s",
        prepared_build.build_name,
        repo_path,
        prepared_build.tags,
    )

    job_config = {
        "docker_tags":
        prepared_build.tags,
        "registry":
        host,
        "build_subdir":
        prepared_build.subdirectory,
        "context":
        prepared_build.context,
        "trigger_metadata":
        prepared_build.metadata or {},
        "is_manual":
        prepared_build.is_manual,
        "manual_user":
        get_authenticated_user().username
        if get_authenticated_user() else None,
        "archive_url":
        prepared_build.archive_url,
    }

    with app.config["DB_TRANSACTION_FACTORY"](db):
        build_request = model.build.create_repository_build(
            repository,
            new_token,
            job_config,
            prepared_build.dockerfile_id,
            prepared_build.build_name,
            prepared_build.trigger,
            pull_robot_name=pull_robot_name,
        )

        pull_creds = model.user.get_pull_credentials(
            pull_robot_name) if pull_robot_name else None

        json_data = json.dumps({
            "build_uuid": build_request.uuid,
            "pull_credentials": pull_creds
        })

        queue_id = dockerfile_build_queue.put(
            [repository.namespace_user.username, repository.name],
            json_data,
            retries_remaining=3)

        build_request.queue_id = queue_id
        build_request.save()

    # Add the build to the repo's log and spawn the build_queued notification.
    event_log_metadata = {
        "build_id":
        build_request.uuid,
        "docker_tags":
        prepared_build.tags,
        "repo":
        repository.name,
        "namespace":
        repository.namespace_user.username,
        "is_manual":
        prepared_build.is_manual,
        "manual_user":
        get_authenticated_user().username
        if get_authenticated_user() else None,
    }

    if prepared_build.trigger:
        event_log_metadata["trigger_id"] = prepared_build.trigger.uuid
        event_log_metadata[
            "trigger_kind"] = prepared_build.trigger.service.name
        event_log_metadata["trigger_metadata"] = prepared_build.metadata or {}

    logs_model.log_action(
        "build_dockerfile",
        repository.namespace_user.username,
        ip=get_request_ip(),
        metadata=event_log_metadata,
        repository=repository,
    )

    # TODO: remove when more endpoints have been converted to using interfaces
    repo = AttrDict({
        "namespace_name": repository.namespace_user.username,
        "name": repository.name,
    })

    spawn_notification(
        repo,
        "build_queued",
        event_log_metadata,
        subpage="build/%s" % build_request.uuid,
        pathargs=["build", build_request.uuid],
    )

    return build_request
Exemple #22
0
def populate_database(minimal=False, with_storage=False):
    logger.debug('Populating the DB with test data.')

    # Check if the data already exists. If so, we skip. This can happen between calls from the
    # "old style" tests and the new py.test's.
    try:
        User.get(username='******')
        logger.debug('DB already populated')
        return
    except User.DoesNotExist:
        pass

    # Note: databases set up with "real" schema (via Alembic) will not have these types
    # type, so we it here it necessary.
    try:
        ImageStorageLocation.get(name='local_eu')
        ImageStorageLocation.get(name='local_us')
    except ImageStorageLocation.DoesNotExist:
        ImageStorageLocation.create(name='local_eu')
        ImageStorageLocation.create(name='local_us')

    try:
        NotificationKind.get(name='test_notification')
    except NotificationKind.DoesNotExist:
        NotificationKind.create(name='test_notification')

    new_user_1 = model.user.create_user('devtable', 'password',
                                        '*****@*****.**')
    new_user_1.verified = True
    new_user_1.stripe_id = TEST_STRIPE_ID
    new_user_1.save()

    if minimal:
        logger.debug(
            'Skipping most db population because user requested mininal db')
        return

    UserRegion.create(user=new_user_1,
                      location=ImageStorageLocation.get(name='local_us'))
    model.release.set_region_release('quay', 'us', 'v0.1.2')

    model.user.create_confirm_email_code(new_user_1,
                                         new_email='*****@*****.**')

    disabled_user = model.user.create_user('disabled', 'password',
                                           '*****@*****.**')
    disabled_user.verified = True
    disabled_user.enabled = False
    disabled_user.save()

    dtrobot = model.user.create_robot('dtrobot', new_user_1)
    dtrobot2 = model.user.create_robot('dtrobot2', new_user_1)

    new_user_2 = model.user.create_user('public', 'password',
                                        '*****@*****.**')
    new_user_2.verified = True
    new_user_2.save()

    new_user_3 = model.user.create_user('freshuser', 'password',
                                        '*****@*****.**')
    new_user_3.verified = True
    new_user_3.save()

    another_robot = model.user.create_robot('anotherrobot', new_user_3)

    new_user_4 = model.user.create_user('randomuser', 'password',
                                        '*****@*****.**')
    new_user_4.verified = True
    new_user_4.save()

    new_user_5 = model.user.create_user('unverified', 'password',
                                        '*****@*****.**')
    new_user_5.save()

    reader = model.user.create_user('reader', 'password', '*****@*****.**')
    reader.verified = True
    reader.save()

    creatoruser = model.user.create_user('creator', 'password',
                                         '*****@*****.**')
    creatoruser.verified = True
    creatoruser.save()

    outside_org = model.user.create_user('outsideorg', 'password',
                                         '*****@*****.**')
    outside_org.verified = True
    outside_org.save()

    model.notification.create_notification('test_notification',
                                           new_user_1,
                                           metadata={
                                               'some': 'value',
                                               'arr': [1, 2, 3],
                                               'obj': {
                                                   'a': 1,
                                                   'b': 2
                                               }
                                           })

    from_date = datetime.utcnow()
    to_date = from_date + timedelta(hours=1)
    notification_metadata = {
        'from_date': formatdate(calendar.timegm(from_date.utctimetuple())),
        'to_date': formatdate(calendar.timegm(to_date.utctimetuple())),
        'reason': 'database migration'
    }
    model.notification.create_notification('maintenance',
                                           new_user_1,
                                           metadata=notification_metadata)

    __generate_repository(with_storage, new_user_4, 'randomrepo',
                          'Random repo repository.', False, [],
                          (4, [], ['latest', 'prod']))

    simple_repo = __generate_repository(with_storage, new_user_1, 'simple',
                                        'Simple repository.', False, [],
                                        (4, [], ['latest', 'prod']))

    # Add some labels to the latest tag's manifest.
    tag_manifest = model.tag.load_tag_manifest(new_user_1.username, 'simple',
                                               'latest')
    first_label = model.label.create_manifest_label(tag_manifest, 'foo', 'bar',
                                                    'manifest')
    model.label.create_manifest_label(tag_manifest, 'foo', 'baz', 'api')
    model.label.create_manifest_label(tag_manifest, 'anotherlabel', '1234',
                                      'internal')
    model.label.create_manifest_label(tag_manifest, 'jsonlabel',
                                      '{"hey": "there"}', 'internal')

    label_metadata = {
        'key': 'foo',
        'value': 'bar',
        'id': first_label.id,
        'manifest_digest': tag_manifest.digest
    }

    logs_model.log_action('manifest_label_add',
                          new_user_1.username,
                          performer=new_user_1,
                          timestamp=datetime.now(),
                          metadata=label_metadata,
                          repository=tag_manifest.tag.repository)

    model.blob.initiate_upload(new_user_1.username, simple_repo.name,
                               str(uuid4()), 'local_us', {})
    model.notification.create_repo_notification(simple_repo, 'repo_push',
                                                'quay_notification', {}, {})

    __generate_repository(
        with_storage, new_user_1, 'sharedtags', 'Shared tags repository',
        False, [(new_user_2, 'read'), (dtrobot[0], 'read')],
        (2, [(3, [], ['v2.0', 'v2.1', 'v2.2']),
             (1, [(1, [(1, [], ['prod', '581a284'])], ['staging', '8423b58']),
                  (1, [], None)], None)], None))

    __generate_repository(with_storage, new_user_1, 'history',
                          'Historical repository.', False, [],
                          (4, [(2, [], '#latest'), (3, [], 'latest')], None))

    __generate_repository(with_storage, new_user_1, 'complex',
                          'Complex repository with many branches and tags.',
                          False, [(new_user_2, 'read'), (dtrobot[0], 'read')],
                          (2, [(3, [], 'v2.0'),
                               (1, [(1, [(2, [], ['prod'])], 'staging'),
                                    (1, [], None)], None)], None))

    __generate_repository(
        with_storage, new_user_1, 'gargantuan', None, False,
        [], (2, [(3, [], 'v2.0'),
                 (1, [(1, [(1, [], ['latest', 'prod'])], 'staging'),
                      (1, [], None)], None), (20, [], 'v3.0'), (5, [], 'v4.0'),
                 (1, [(1, [], 'v5.0'), (1, [], 'v6.0')], None)], None))

    trusted_repo = __generate_repository(with_storage, new_user_1, 'trusted',
                                         'Trusted repository.', False, [],
                                         (4, [], ['latest', 'prod']))
    trusted_repo.trust_enabled = True
    trusted_repo.save()

    publicrepo = __generate_repository(
        with_storage, new_user_2, 'publicrepo',
        'Public repository pullable by the world.', True, [],
        (10, [], 'latest'))

    __generate_repository(with_storage, outside_org, 'coolrepo',
                          'Some cool repo.', False, [], (5, [], 'latest'))

    __generate_repository(with_storage, new_user_1, 'shared',
                          'Shared repository, another user can write.', False,
                          [(new_user_2, 'write'),
                           (reader, 'read')], (5, [], 'latest'))

    __generate_repository(with_storage, new_user_1, 'text-full-repo',
                          'This is a repository for testing text search',
                          False, [(new_user_2, 'write'),
                                  (reader, 'read')], (5, [], 'latest'))

    building = __generate_repository(with_storage, new_user_1, 'building',
                                     'Empty repository which is building.',
                                     False, [(new_user_2, 'write'),
                                             (reader, 'read')], (0, [], None))

    new_token = model.token.create_access_token(building, 'write',
                                                'build-worker')

    trigger = model.build.create_build_trigger(building,
                                               'github',
                                               '123authtoken',
                                               new_user_1,
                                               pull_robot=dtrobot[0])
    trigger.config = json.dumps({
        'build_source': 'jakedt/testconnect',
        'subdir': '',
        'dockerfile_path': 'Dockerfile',
        'context': '/',
    })
    trigger.save()

    repo = 'ci.devtable.com:5000/%s/%s' % (building.namespace_user.username,
                                           building.name)
    job_config = {
        'repository': repo,
        'docker_tags': ['latest'],
        'build_subdir': '',
        'trigger_metadata': {
            'commit': '3482adc5822c498e8f7db2e361e8d57b3d77ddd9',
            'ref': 'refs/heads/master',
            'default_branch': 'master'
        }
    }

    model.repository.star_repository(new_user_1, simple_repo)

    record = model.repository.create_email_authorization_for_repo(
        new_user_1.username, 'simple', '*****@*****.**')
    record.confirmed = True
    record.save()

    model.repository.create_email_authorization_for_repo(
        new_user_1.username, 'simple', '*****@*****.**')

    build2 = model.build.create_repository_build(
        building, new_token, job_config,
        '68daeebd-a5b9-457f-80a0-4363b882f8ea', 'build-name', trigger)
    build2.uuid = 'deadpork-dead-pork-dead-porkdeadpork'
    build2.save()

    build3 = model.build.create_repository_build(
        building, new_token, job_config,
        'f49d07f9-93da-474d-ad5f-c852107c3892', 'build-name', trigger)
    build3.uuid = 'deadduck-dead-duck-dead-duckdeadduck'
    build3.save()

    build1 = model.build.create_repository_build(
        building, new_token, job_config, '701dcc3724fb4f2ea6c31400528343cd',
        'build-name', trigger)
    build1.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
    build1.save()

    org = model.organization.create_organization('buynlarge',
                                                 '*****@*****.**',
                                                 new_user_1)
    org.stripe_id = TEST_STRIPE_ID
    org.save()

    liborg = model.organization.create_organization(
        'library', '*****@*****.**', new_user_1)
    liborg.save()

    titiorg = model.organization.create_organization('titi',
                                                     '*****@*****.**',
                                                     new_user_1)
    titiorg.save()

    thirdorg = model.organization.create_organization(
        'sellnsmall', '*****@*****.**', new_user_1)
    thirdorg.save()

    model.user.create_robot('coolrobot', org)

    oauth_app_1 = model.oauth.create_application(
        org,
        'Some Test App',
        'http://localhost:8000',
        'http://localhost:8000/o2c.html',
        client_id='deadbeef')

    model.oauth.create_application(
        org,
        'Some Other Test App',
        'http://quay.io',
        'http://localhost:8000/o2c.html',
        client_id='deadpork',
        description='This is another test application')

    model.oauth.create_access_token_for_testing(new_user_1,
                                                'deadbeef',
                                                'repo:admin',
                                                access_token='%s%s' %
                                                ('b' * 40, 'c' * 40))

    oauth_credential = Credential.from_string('dswfhasdf1')
    OAuthAuthorizationCode.create(application=oauth_app_1,
                                  code='Z932odswfhasdf1',
                                  scope='repo:admin',
                                  data='{"somejson": "goeshere"}',
                                  code_name='Z932odswfhasdf1Z932o',
                                  code_credential=oauth_credential)

    model.user.create_robot('neworgrobot', org)

    ownerbot = model.user.create_robot('ownerbot', org)[0]
    creatorbot = model.user.create_robot('creatorbot', org)[0]

    owners = model.team.get_organization_team('buynlarge', 'owners')
    owners.description = 'Owners have unfetterd access across the entire org.'
    owners.save()

    org_repo = __generate_repository(with_storage, org, 'orgrepo',
                                     'Repository owned by an org.', False,
                                     [(outside_org, 'read')],
                                     (4, [], ['latest', 'prod']))

    __generate_repository(with_storage, org, 'anotherorgrepo',
                          'Another repository owned by an org.', False, [],
                          (4, [], ['latest', 'prod']))

    creators = model.team.create_team('creators', org, 'creator',
                                      'Creators of orgrepo.')

    reader_team = model.team.create_team('readers', org, 'member',
                                         'Readers of orgrepo.')
    model.team.add_or_invite_to_team(new_user_1, reader_team, outside_org)
    model.permission.set_team_repo_permission(reader_team.name,
                                              org_repo.namespace_user.username,
                                              org_repo.name, 'read')

    model.team.add_user_to_team(new_user_2, reader_team)
    model.team.add_user_to_team(reader, reader_team)
    model.team.add_user_to_team(ownerbot, owners)
    model.team.add_user_to_team(creatorbot, creators)
    model.team.add_user_to_team(creatoruser, creators)

    sell_owners = model.team.get_organization_team('sellnsmall', 'owners')
    sell_owners.description = 'Owners have unfettered access across the entire org.'
    sell_owners.save()

    model.team.add_user_to_team(new_user_4, sell_owners)

    sync_config = {
        'group_dn': 'cn=Test-Group,ou=Users',
        'group_id': 'somegroupid'
    }
    synced_team = model.team.create_team('synced', org, 'member',
                                         'Some synced team.')
    model.team.set_team_syncing(synced_team, 'ldap', sync_config)

    another_synced_team = model.team.create_team('synced', thirdorg, 'member',
                                                 'Some synced team.')
    model.team.set_team_syncing(another_synced_team, 'ldap',
                                {'group_dn': 'cn=Test-Group,ou=Users'})

    __generate_repository(with_storage, new_user_1, 'superwide', None,
                          False, [], [(10, [], 'latest2'), (2, [], 'latest3'),
                                      (2, [(1, [], 'latest11'),
                                           (2, [], 'latest12')], 'latest4'),
                                      (2, [], 'latest5'), (2, [], 'latest6'),
                                      (2, [], 'latest7'), (2, [], 'latest8'),
                                      (2, [], 'latest9'), (2, [], 'latest10'),
                                      (2, [], 'latest13'), (2, [], 'latest14'),
                                      (2, [], 'latest15'), (2, [], 'latest16'),
                                      (2, [], 'latest17'),
                                      (2, [], 'latest18')])

    mirror_repo = __generate_repository(with_storage, new_user_1, 'mirrored',
                                        'Mirrored repository.', False,
                                        [(dtrobot[0], 'write'),
                                         (dtrobot2[0], 'write')],
                                        (4, [], ['latest', 'prod']))
    mirror_rule = model.repo_mirror.create_mirroring_rule(
        mirror_repo, ['latest', '3.3*'])
    mirror_args = (mirror_repo, mirror_rule, dtrobot[0], 'quay.io/coreos/etcd',
                   60 * 60 * 24)
    mirror_kwargs = {
        'external_registry_username': '******',
        'external_registry_password': '******',
        'external_registry_config': {},
        'is_enabled': True,
        'sync_start_date': datetime.utcnow()
    }
    mirror = model.repo_mirror.enable_mirroring_for_repository(
        *mirror_args, **mirror_kwargs)

    read_only_repo = __generate_repository(with_storage, new_user_1,
                                           'readonly', 'Read-Only Repo.',
                                           False, [],
                                           (4, [], ['latest', 'prod']))
    read_only_repo.state = RepositoryState.READ_ONLY
    read_only_repo.save()

    model.permission.add_prototype_permission(org,
                                              'read',
                                              activating_user=new_user_1,
                                              delegate_user=new_user_2)
    model.permission.add_prototype_permission(org,
                                              'read',
                                              activating_user=new_user_1,
                                              delegate_team=reader_team)
    model.permission.add_prototype_permission(org,
                                              'write',
                                              activating_user=new_user_2,
                                              delegate_user=new_user_1)

    today = datetime.today()
    week_ago = today - timedelta(6)
    six_ago = today - timedelta(5)
    four_ago = today - timedelta(4)
    yesterday = datetime.combine(date.today(),
                                 datetime.min.time()) - timedelta(hours=6)

    __generate_service_key('kid1', 'somesamplekey', new_user_1, today,
                           ServiceKeyApprovalType.SUPERUSER)
    __generate_service_key('kid2', 'someexpiringkey', new_user_1, week_ago,
                           ServiceKeyApprovalType.SUPERUSER,
                           today + timedelta(days=14))

    __generate_service_key('kid3', 'unapprovedkey', new_user_1, today, None)

    __generate_service_key(
        'kid4',
        'autorotatingkey',
        new_user_1,
        six_ago,
        ServiceKeyApprovalType.KEY_ROTATION,
        today + timedelta(days=1),
        rotation_duration=timedelta(hours=12).total_seconds())

    __generate_service_key('kid5',
                           'key for another service',
                           new_user_1,
                           today,
                           ServiceKeyApprovalType.SUPERUSER,
                           today + timedelta(days=14),
                           service='different_sample_service')

    __generate_service_key('kid6', 'someexpiredkey', new_user_1, week_ago,
                           ServiceKeyApprovalType.SUPERUSER,
                           today - timedelta(days=1))

    __generate_service_key('kid7', 'somewayexpiredkey', new_user_1, week_ago,
                           ServiceKeyApprovalType.SUPERUSER,
                           today - timedelta(days=30))

    # Add the test pull key as pre-approved for local and unittest registry testing.
    # Note: this must match the private key found in the local/test config.
    _TEST_JWK = {
        'e':
        'AQAB',
        'kty':
        'RSA',
        'n':
        'yqdQgnelhAPMSeyH0kr3UGePK9oFOmNfwD0Ymnh7YYXr21VHWwyM2eVW3cnLd9KXywDFtGSe9oFDbnOuMCdUowdkBcaHju-isbv5KEbNSoy_T2Rip-6L0cY63YzcMJzv1nEYztYXS8wz76pSK81BKBCLapqOCmcPeCvV9yaoFZYvZEsXCl5jjXN3iujSzSF5Z6PpNFlJWTErMT2Z4QfbDKX2Nw6vJN6JnGpTNHZvgvcyNX8vkSgVpQ8DFnFkBEx54PvRV5KpHAq6AsJxKONMo11idQS2PfCNpa2hvz9O6UZe-eIX8jPo5NW8TuGZJumbdPT_nxTDLfCqfiZboeI0Pw'
    }

    key = model.service_keys.create_service_key('test_service_key',
                                                'test_service_key', 'quay',
                                                _TEST_JWK, {}, None)

    model.service_keys.approve_service_key(
        key.kid,
        ServiceKeyApprovalType.SUPERUSER,
        notes='Test service key for local/test registry testing')

    # Add an app specific token.
    token = model.appspecifictoken.create_token(new_user_1, 'some app')
    token.token_name = 'a' * 60
    token.token_secret = 'b' * 60
    token.save()

    logs_model.log_action('org_create_team',
                          org.username,
                          performer=new_user_1,
                          timestamp=week_ago,
                          metadata={'team': 'readers'})

    logs_model.log_action('org_set_team_role',
                          org.username,
                          performer=new_user_1,
                          timestamp=week_ago,
                          metadata={
                              'team': 'readers',
                              'role': 'read'
                          })

    logs_model.log_action('create_repo',
                          org.username,
                          performer=new_user_1,
                          repository=org_repo,
                          timestamp=week_ago,
                          metadata={
                              'namespace': org.username,
                              'repo': 'orgrepo'
                          })

    logs_model.log_action('change_repo_permission',
                          org.username,
                          performer=new_user_2,
                          repository=org_repo,
                          timestamp=six_ago,
                          metadata={
                              'username': new_user_1.username,
                              'repo': 'orgrepo',
                              'role': 'admin'
                          })

    logs_model.log_action('change_repo_permission',
                          org.username,
                          performer=new_user_1,
                          repository=org_repo,
                          timestamp=six_ago,
                          metadata={
                              'username': new_user_2.username,
                              'repo': 'orgrepo',
                              'role': 'read'
                          })

    logs_model.log_action('add_repo_accesstoken',
                          org.username,
                          performer=new_user_1,
                          repository=org_repo,
                          timestamp=four_ago,
                          metadata={
                              'repo': 'orgrepo',
                              'token': 'deploytoken'
                          })

    logs_model.log_action('push_repo',
                          org.username,
                          performer=new_user_2,
                          repository=org_repo,
                          timestamp=today,
                          metadata={
                              'username': new_user_2.username,
                              'repo': 'orgrepo'
                          })

    logs_model.log_action('pull_repo',
                          org.username,
                          performer=new_user_2,
                          repository=org_repo,
                          timestamp=today,
                          metadata={
                              'username': new_user_2.username,
                              'repo': 'orgrepo'
                          })

    logs_model.log_action('pull_repo',
                          org.username,
                          repository=org_repo,
                          timestamp=today,
                          metadata={
                              'token': 'sometoken',
                              'token_code': 'somecode',
                              'repo': 'orgrepo'
                          })

    logs_model.log_action('delete_tag',
                          org.username,
                          performer=new_user_2,
                          repository=org_repo,
                          timestamp=today,
                          metadata={
                              'username': new_user_2.username,
                              'repo': 'orgrepo',
                              'tag': 'sometag'
                          })

    logs_model.log_action('pull_repo',
                          org.username,
                          repository=org_repo,
                          timestamp=today,
                          metadata={
                              'token_code': 'somecode',
                              'repo': 'orgrepo'
                          })

    logs_model.log_action('pull_repo',
                          new_user_2.username,
                          repository=publicrepo,
                          timestamp=yesterday,
                          metadata={
                              'token_code': 'somecode',
                              'repo': 'publicrepo'
                          })

    logs_model.log_action('build_dockerfile',
                          new_user_1.username,
                          repository=building,
                          timestamp=today,
                          metadata={
                              'repo': 'building',
                              'namespace': new_user_1.username,
                              'trigger_id': trigger.uuid,
                              'config': json.loads(trigger.config),
                              'service': trigger.service.name
                          })

    model.message.create([{
        'content': 'We love you, Quay customers!',
        'severity': 'info',
        'media_type': 'text/plain'
    }])

    model.message.create([{
        'content': 'This is a **development** install of Quay',
        'severity': 'warning',
        'media_type': 'text/markdown'
    }])

    fake_queue = WorkQueue('fakequeue', tf)
    fake_queue.put(['canonical', 'job', 'name'], '{}')

    model.user.create_user_prompt(new_user_4, 'confirm_username')

    while True:
        to_count = model.repositoryactioncount.find_uncounted_repository()
        if not to_count:
            break

        model.repositoryactioncount.count_repository_actions(to_count)
        model.repositoryactioncount.update_repository_score(to_count)
Exemple #23
0
def track_and_log(event_name,
                  repo_obj,
                  analytics_name=None,
                  analytics_sample=1,
                  **kwargs):
    repo_name = repo_obj.name
    namespace_name = repo_obj.namespace_name
    metadata = {
        "repo": repo_name,
        "namespace": namespace_name,
        "user-agent": request.user_agent.string,
    }
    metadata.update(kwargs)

    is_free_namespace = False
    if hasattr(repo_obj, "is_free_namespace"):
        is_free_namespace = repo_obj.is_free_namespace

    # Add auth context metadata.
    analytics_id = "anonymous"
    auth_context = get_authenticated_context()
    if auth_context is not None:
        analytics_id, context_metadata = auth_context.analytics_id_and_public_metadata(
        )
        metadata.update(context_metadata)

    # Publish the user event (if applicable)
    logger.debug("Checking publishing %s to the user events system",
                 event_name)
    if auth_context and auth_context.has_nonrobot_user:
        logger.debug("Publishing %s to the user events system", event_name)
        user_event_data = {
            "action": event_name,
            "repository": repo_name,
            "namespace": namespace_name,
        }

        event = userevents.get_event(auth_context.authed_user.username)
        event.publish_event_data("docker-cli", user_event_data)

    # Save the action to mixpanel.
    if random.random() < analytics_sample:
        if analytics_name is None:
            analytics_name = event_name

        logger.debug("Logging the %s to analytics engine", analytics_name)

        request_parsed = urlparse(request.url_root)
        extra_params = {
            "repository": "%s/%s" % (namespace_name, repo_name),
            "user-agent": request.user_agent.string,
            "hostname": request_parsed.hostname,
        }

        analytics.track(analytics_id, analytics_name, extra_params)

    # Add the resolved information to the metadata.
    logger.debug("Resolving IP address %s", get_request_ip())
    resolved_ip = ip_resolver.resolve_ip(get_request_ip())
    if resolved_ip is not None:
        metadata["resolved_ip"] = resolved_ip._asdict()

    logger.debug("Resolved IP address %s", get_request_ip())

    # Log the action to the database.
    logger.debug("Logging the %s to logs system", event_name)
    try:
        logs_model.log_action(
            event_name,
            namespace_name,
            performer=get_authenticated_user(),
            ip=get_request_ip(),
            metadata=metadata,
            repository=repo_obj,
            is_free_namespace=is_free_namespace,
        )
        logger.debug("Track and log of %s complete", event_name)
    except ReadOnlyModeException:
        pass
Exemple #24
0
def start_build(repository, prepared_build, pull_robot_name=None):
    # Ensure that builds are only run in image repositories.
    if repository.kind.name != 'image':
        raise Exception(
            'Attempt to start a build for application repository %s' %
            repository.id)

    # Ensure the repository isn't in mirror or read-only mode.
    if repository.state != RepositoryState.NORMAL:
        raise Exception(
            ('Attempt to start a build for a non-normal repository: %s %s' %
             (repository.id, repository.state)))

    # Ensure that disabled triggers are not run.
    if prepared_build.trigger is not None and not prepared_build.trigger.enabled:
        raise BuildTriggerDisabledException

    if repository.namespace_user.maximum_queued_builds_count is not None:
        queue_item_canonical_name = [repository.namespace_user.username]
        alive_builds = dockerfile_build_queue.num_alive_jobs(
            queue_item_canonical_name)
        if alive_builds >= repository.namespace_user.maximum_queued_builds_count:
            logger.debug(
                'Prevented queueing of build under namespace %s due to reaching max: %s',
                repository.namespace_user.username,
                repository.namespace_user.maximum_queued_builds_count)
            raise MaximumBuildsQueuedException()

    host = app.config['SERVER_HOSTNAME']
    repo_path = '%s/%s/%s' % (host, repository.namespace_user.username,
                              repository.name)

    new_token = model.token.create_access_token(
        repository,
        'write',
        kind='build-worker',
        friendly_name='Repository Build Token')
    logger.debug('Creating build %s with repo %s tags %s',
                 prepared_build.build_name, repo_path, prepared_build.tags)

    job_config = {
        'docker_tags':
        prepared_build.tags,
        'registry':
        host,
        'build_subdir':
        prepared_build.subdirectory,
        'context':
        prepared_build.context,
        'trigger_metadata':
        prepared_build.metadata or {},
        'is_manual':
        prepared_build.is_manual,
        'manual_user':
        get_authenticated_user().username
        if get_authenticated_user() else None,
        'archive_url':
        prepared_build.archive_url
    }

    with app.config['DB_TRANSACTION_FACTORY'](db):
        build_request = model.build.create_repository_build(
            repository,
            new_token,
            job_config,
            prepared_build.dockerfile_id,
            prepared_build.build_name,
            prepared_build.trigger,
            pull_robot_name=pull_robot_name)

        pull_creds = model.user.get_pull_credentials(
            pull_robot_name) if pull_robot_name else None

        json_data = json.dumps({
            'build_uuid': build_request.uuid,
            'pull_credentials': pull_creds
        })

        queue_id = dockerfile_build_queue.put(
            [repository.namespace_user.username, repository.name],
            json_data,
            retries_remaining=3)

        build_request.queue_id = queue_id
        build_request.save()

    # Add the queueing of the build to the metrics queue.
    metric_queue.repository_build_queued.Inc(
        labelvalues=[repository.namespace_user.username, repository.name])

    # Add the build to the repo's log and spawn the build_queued notification.
    event_log_metadata = {
        'build_id':
        build_request.uuid,
        'docker_tags':
        prepared_build.tags,
        'repo':
        repository.name,
        'namespace':
        repository.namespace_user.username,
        'is_manual':
        prepared_build.is_manual,
        'manual_user':
        get_authenticated_user().username if get_authenticated_user() else None
    }

    if prepared_build.trigger:
        event_log_metadata['trigger_id'] = prepared_build.trigger.uuid
        event_log_metadata[
            'trigger_kind'] = prepared_build.trigger.service.name
        event_log_metadata['trigger_metadata'] = prepared_build.metadata or {}

    logs_model.log_action('build_dockerfile',
                          repository.namespace_user.username,
                          ip=get_request_ip(),
                          metadata=event_log_metadata,
                          repository=repository)

    # TODO: remove when more endpoints have been converted to using interfaces
    repo = AttrDict({
        'namespace_name': repository.namespace_user.username,
        'name': repository.name,
    })

    spawn_notification(repo,
                       'build_queued',
                       event_log_metadata,
                       subpage='build/%s' % build_request.uuid,
                       pathargs=['build', build_request.uuid])

    return build_request
Exemple #25
0
def put_service_key(service, kid):
    metadata = {'ip': get_request_ip()}

    rotation_duration = request.args.get('rotation', None)
    expiration_date = request.args.get('expiration', None)
    if expiration_date is not None:
        try:
            expiration_date = datetime.utcfromtimestamp(float(expiration_date))
        except ValueError:
            logger.exception('Error parsing expiration date on key')
            abort(400)

    try:
        jwk = request.get_json()
    except ValueError:
        logger.exception('Error parsing JWK')
        abort(400)

    jwt_header = request.headers.get(JWT_HEADER_NAME, '')
    match = jwtutil.TOKEN_REGEX.match(jwt_header)
    if match is None:
        logger.error('Could not find matching bearer token')
        abort(400)

    encoded_jwt = match.group(1)

    _validate_jwk(jwk)

    signer_kid = _signer_kid(encoded_jwt, allow_none=True)
    if kid == signer_kid or signer_kid is None:
        # The key is self-signed. Create a new instance and await approval.
        _validate_jwt(encoded_jwt, jwk, service)
        model.create_service_key('',
                                 kid,
                                 service,
                                 jwk,
                                 metadata,
                                 expiration_date,
                                 rotation_duration=rotation_duration)

        logs_model.log_action('service_key_create',
                              ip=get_request_ip(),
                              metadata={
                                  'kid': kid,
                                  'preshared': False,
                                  'service': service,
                                  'name': '',
                                  'expiration_date': expiration_date,
                                  'user_agent':
                                  request.headers.get('User-Agent'),
                                  'ip': get_request_ip(),
                              })

        return make_response('', 202)

    # Key is going to be rotated.
    metadata.update({'created_by': 'Key Rotation'})
    signer_key = _lookup_service_key(service, signer_kid)
    signer_jwk = signer_key.jwk

    _validate_jwt(encoded_jwt, signer_jwk, service)

    try:
        model.replace_service_key(signer_key.kid, kid, jwk, metadata,
                                  expiration_date)
    except ServiceKeyDoesNotExist:
        abort(404)

    logs_model.log_action('service_key_rotate',
                          ip=get_request_ip(),
                          metadata={
                              'kid': kid,
                              'signer_kid': signer_key.kid,
                              'service': service,
                              'name': signer_key.name,
                              'expiration_date': expiration_date,
                              'user_agent': request.headers.get('User-Agent'),
                              'ip': get_request_ip(),
                          })

    return make_response('', 200)
def test_export_logs(initialized_db, storage_engine, has_logs):
    # Delete all existing logs.
    database.LogEntry3.delete().execute()

    repo = model.repository.get_repository('devtable', 'simple')
    user = model.user.get_user('devtable')

    now = datetime.now()
    if has_logs:
        # Add new logs over a multi-day period.
        for index in range(-10, 10):
            logs_model.log_action('push_repo',
                                  'devtable',
                                  user,
                                  '0.0.0.0', {'index': index},
                                  repo,
                                  timestamp=now + timedelta(days=index))

    worker = ExportActionLogsWorker(None)
    called = [{}]

    @urlmatch(netloc=r'testcallback')
    def handle_request(url, request):
        called[0] = json.loads(request.body)
        return {'status_code': 200, 'content': '{}'}

    def format_date(datetime):
        return datetime.strftime("%m/%d/%Y")

    with HTTMock(handle_request):
        worker._process_queue_item(
            {
                'export_id': 'someid',
                'repository_id': repo.id,
                'namespace_id': repo.namespace_user.id,
                'namespace_name': 'devtable',
                'repository_name': 'simple',
                'start_time': format_date(now + timedelta(days=-10)),
                'end_time': format_date(now + timedelta(days=10)),
                'callback_url': 'http://testcallback/',
                'callback_email': None,
            }, storage_engine)

    assert called[0]
    assert called[0][u'export_id'] == 'someid'
    assert called[0][u'status'] == 'success'

    url = called[0][u'exported_data_url']

    if url.find('http://localhost:5000/exportedlogs/') == 0:
        storage_id = url[len('http://localhost:5000/exportedlogs/'):]
    else:
        assert url.find(
            'https://some_bucket.s3.amazonaws.com/some/path/exportedactionlogs/'
        ) == 0
        storage_id, _ = url[len(
            'https://some_bucket.s3.amazonaws.com/some/path/exportedactionlogs/'
        ):].split('?')

    created = storage_engine.get_content(storage_engine.preferred_locations,
                                         'exportedactionlogs/' + storage_id)
    created_json = json.loads(created)

    if has_logs:
        found = set()
        for log in created_json['logs']:
            if log.get('terminator'):
                continue

            found.add(log['metadata']['index'])

        for index in range(-10, 10):
            assert index in found
    else:
        assert created_json['logs'] == [{'terminator': True}]