Exemple #1
0
def get_user_by_id(user_db_id):
    try:
        return User.get(User.id == user_db_id, User.organization == False)
    except User.DoesNotExist:
        return None
 def is_valid(self):
     try:
         list(User.select().limit(1))
         return True
     except:
         return False
Exemple #3
0
def find_user_by_email(email):
    try:
        return User.get(User.email == email)
    except User.DoesNotExist:
        return None
Exemple #4
0
def get_namespace_user(username):
    try:
        return User.get(User.username == username)
    except User.DoesNotExist:
        return None
Exemple #5
0
def create_user_noverify(username,
                         email,
                         email_required=True,
                         prompts=tuple(),
                         is_possible_abuser=False):
    if email_required:
        if not validate_email(email):
            raise InvalidEmailAddressException("Invalid email address: %s" %
                                               email)
    else:
        # If email addresses are not required and none was specified, then we just use a unique
        # ID to ensure that the database consistency check remains intact.
        email = email or str(uuid.uuid4())

    (username_valid, username_issue) = validate_username(username)
    if not username_valid:
        raise InvalidUsernameException("Invalid namespace %s: %s" %
                                       (username, username_issue))

    try:
        existing = User.get((User.username == username)
                            | (User.email == email))
        logger.debug("Existing user with same username or email.")

        # A user already exists with either the same username or email
        if existing.username == username:
            assert not existing.robot

            msg = (
                "Username has already been taken by an organization and cannot be reused: %s"
                % username)
            if not existing.organization:
                msg = "Username has already been taken by user cannot be reused: %s" % username

            raise InvalidUsernameException(msg)

        raise InvalidEmailAddressException("Email has already been used: %s" %
                                           email)
    except User.DoesNotExist:
        # This is actually the happy path
        logger.debug("Email and username are unique!")

    # Create the user.
    try:
        default_expr_s = _convert_to_s(
            config.app_config["DEFAULT_TAG_EXPIRATION"])
        default_max_builds = config.app_config.get(
            "DEFAULT_NAMESPACE_MAXIMUM_BUILD_COUNT")
        threat_max_builds = config.app_config.get(
            "THREAT_NAMESPACE_MAXIMUM_BUILD_COUNT")

        if is_possible_abuser and threat_max_builds is not None:
            default_max_builds = threat_max_builds

        new_user = User.create(
            username=username,
            email=email,
            removed_tag_expiration_s=default_expr_s,
            maximum_queued_builds_count=default_max_builds,
        )
        for prompt in prompts:
            create_user_prompt(new_user, prompt)

        return new_user
    except Exception as ex:
        raise DataModelException(ex)
Exemple #6
0
def get_minimum_user_id():
    return User.select(fn.Min(User.id)).tuples().get()[0]
Exemple #7
0
def test_readreplica(init_db_path, tmpdir_factory):
    primary_file = str(tmpdir_factory.mktemp("data").join("primary.db"))
    replica_file = str(tmpdir_factory.mktemp("data").join("replica.db"))

    # Copy the initialized database to two different locations.
    shutil.copy2(init_db_path, primary_file)
    shutil.copy2(init_db_path, replica_file)

    db_config = {
        'DB_URI': 'sqlite:///{0}'.format(primary_file),
        'DB_READ_REPLICAS': [
            {
                'DB_URI': 'sqlite:///{0}'.format(replica_file)
            },
        ],
        "DB_CONNECTION_ARGS": {
            'threadlocals': True,
            'autorollback': True,
        },
        "DB_TRANSACTION_FACTORY": lambda x: FakeTransaction(),
        "FOR_TESTING": True,
        "DATABASE_SECRET_KEY": "anothercrazykey!",
    }

    # Initialize the DB with the primary and the replica.
    configure(db_config)
    assert not read_only_config.obj.is_readonly
    assert read_only_config.obj.read_replicas

    # Ensure we can read the data.
    devtable_user = User.get(username='******')
    assert devtable_user.username == 'devtable'

    # Configure with a bad primary. Reading should still work since we're hitting the replica.
    db_config['DB_URI'] = 'sqlite:///does/not/exist'
    configure(db_config)

    assert not read_only_config.obj.is_readonly
    assert read_only_config.obj.read_replicas

    devtable_user = User.get(username='******')
    assert devtable_user.username == 'devtable'

    # Try to change some data. This should fail because the primary is broken.
    with pytest.raises(OperationalError):
        devtable_user.email = 'newlychanged'
        devtable_user.save()

    # Fix the primary and try again.
    db_config['DB_URI'] = 'sqlite:///{0}'.format(primary_file)
    configure(db_config)

    assert not read_only_config.obj.is_readonly
    assert read_only_config.obj.read_replicas

    devtable_user.email = 'newlychanged'
    devtable_user.save()

    # Mark the system as readonly.
    db_config['DB_URI'] = 'sqlite:///{0}'.format(primary_file)
    db_config['REGISTRY_STATE'] = 'readonly'
    configure(db_config)

    assert read_only_config.obj.is_readonly
    assert read_only_config.obj.read_replicas

    # Ensure all write operations raise a readonly mode exception.
    with pytest.raises(ReadOnlyModeException):
        devtable_user.email = 'newlychanged2'
        devtable_user.save()

    with pytest.raises(ReadOnlyModeException):
        User.create(username='******')

    with pytest.raises(ReadOnlyModeException):
        User.delete().where(User.username == 'foo').execute()

    with pytest.raises(ReadOnlyModeException):
        User.update(username='******').where(User.username == 'foo').execute()

    # Reset the config on the DB, so we don't mess up other tests.
    configure({
        'DB_URI': 'sqlite:///{0}'.format(primary_file),
        "DB_CONNECTION_ARGS": {
            'threadlocals': True,
            'autorollback': True,
        },
        "DB_TRANSACTION_FACTORY": lambda x: FakeTransaction(),
        "DATABASE_SECRET_KEY": "anothercrazykey!",
    })
Exemple #8
0
def verify_user(username_or_email, password):
    """ Verifies that the given username/email + password pair is valid. If the username or e-mail
      address is invalid, returns None. If the password specified does not match for the given user,
      either returns None or raises TooManyLoginAttemptsException if there have been too many
      invalid login attempts. Returns the user object if the login was valid.
  """

    # Make sure we didn't get any unicode for the username.
    try:
        str(username_or_email)
    except ValueError:
        return None

    # Fetch the user with the matching username or e-mail address.
    try:
        fetched = User.get((User.username == username_or_email)
                           | (User.email == username_or_email))
    except User.DoesNotExist:
        return None

    # If the user has any invalid login attempts, check to see if we are within the exponential
    # backoff window for the user. If so, we raise an exception indicating that the user cannot
    # login.
    now = datetime.utcnow()
    if fetched.invalid_login_attempts > 0:
        can_retry_at = exponential_backoff(fetched.invalid_login_attempts,
                                           EXPONENTIAL_BACKOFF_SCALE,
                                           fetched.last_invalid_login)

        if can_retry_at > now:
            retry_after = can_retry_at - now
            raise TooManyLoginAttemptsException("Too many login attempts.",
                                                retry_after.total_seconds())

    # Hash the given password and compare it to the specified password.
    if (fetched.password_hash and hash_password(
            password, fetched.password_hash) == fetched.password_hash):

        # If the user previously had any invalid login attempts, clear them out now.
        if fetched.invalid_login_attempts > 0:
            try:
                (User.update(invalid_login_attempts=0).where(
                    User.id == fetched.id).execute())

                # Mark that the user was accessed.
                _basequery.update_last_accessed(fetched)
            except ReadOnlyModeException:
                pass

        # Return the valid user.
        return fetched

    # Otherwise, update the user's invalid login attempts.
    try:
        (User.update(
            invalid_login_attempts=User.invalid_login_attempts + 1,
            last_invalid_login=now).where(User.id == fetched.id).execute())
    except ReadOnlyModeException:
        pass

    # We weren't able to authorize the user
    return None
Exemple #9
0
def __get_org_admin_users(org):
    return (User.select().join(TeamMember).join(Team).join(TeamRole).where(
        Team.organization == org, TeamRole.name == 'admin',
        User.robot == False).distinct())
Exemple #10
0
def list_team_users(team):
    """ Returns an iterator of all the *users* found in a team. Does not include robots. """
    return (User.select().join(TeamMember).join(Team).where(
        Team.id == team, User.robot == False))
Exemple #11
0
def get_organization_team_members(teamid):
    joined = User.select().join(TeamMember).join(Team)
    query = joined.where(Team.id == teamid)
    return query
Exemple #12
0
def get_organization(name):
    try:
        return User.get(username=name, organization=True)
    except User.DoesNotExist:
        raise InvalidOrganizationException('Organization does not exist: %s' %
                                           name)
Exemple #13
0
def has_users():
    """
    Return false if no users in database yet
    """
    return bool(User.select().limit(1))
Exemple #14
0
def lookup_robot(robot_username):
    try:
        return User.get(username=robot_username, robot=True)
    except User.DoesNotExist:
        raise InvalidRobotException("Could not find robot with username: %s" %
                                    robot_username)
Exemple #15
0
def upgrade(tables, tester, progress_reporter):
    op = ProgressWrapper(original_op, progress_reporter)

    # NOTE: Disconnects the Alembic database connection. We do this because the Peewee calls below
    # use a *different* connection, and if we leave the alembic connection open, it'll time out.
    # See: https://github.com/sqlalchemy/alembic/issues/630
    op.get_bind().execute("COMMIT")
    op.get_bind().invalidate()

    from app import app

    if app.config.get("SETUP_COMPLETE", False) or tester.is_testing():
        # AccessToken.
        logger.info("Backfilling encrypted credentials for access tokens")
        for access_token in _iterate(AccessToken,
                                     ((AccessToken.token_name >> None) |
                                      (AccessToken.token_name == ""))):
            logger.info(
                "Backfilling encrypted credentials for access token %s",
                access_token.id)
            assert access_token.code is not None
            assert access_token.code[:ACCESS_TOKEN_NAME_PREFIX_LENGTH]
            assert access_token.code[ACCESS_TOKEN_NAME_PREFIX_LENGTH:]

            token_name = access_token.code[:ACCESS_TOKEN_NAME_PREFIX_LENGTH]
            token_code = _decrypted(
                access_token.code[ACCESS_TOKEN_NAME_PREFIX_LENGTH:])

            (AccessToken.update(
                token_name=token_name, token_code=token_code).where(
                    AccessToken.id == access_token.id,
                    AccessToken.code == access_token.code).execute())

        assert AccessToken.select().where(
            AccessToken.token_name >> None).count() == 0

        # Robots.
        logger.info("Backfilling encrypted credentials for robots")
        while True:
            has_row = False
            query = (User.select().join(
                RobotAccountToken, JOIN.LEFT_OUTER).where(
                    User.robot == True,
                    RobotAccountToken.id >> None).limit(BATCH_SIZE))

            for robot_user in query:
                logger.info("Backfilling encrypted credentials for robot %s",
                            robot_user.id)
                has_row = True
                try:
                    RobotAccountToken.create(
                        robot_account=robot_user,
                        token=_decrypted(robot_user.email),
                        fully_migrated=False,
                    )
                except IntegrityError:
                    break

            if not has_row:
                break

        # RepositoryBuildTrigger
        logger.info(
            "Backfilling encrypted credentials for repo build triggers")
        for repo_build_trigger in _iterate(
                RepositoryBuildTrigger,
            (RepositoryBuildTrigger.fully_migrated == False)):
            logger.info(
                "Backfilling encrypted credentials for repo build trigger %s",
                repo_build_trigger.id)

            (RepositoryBuildTrigger.update(
                secure_auth_token=_decrypted(repo_build_trigger.auth_token),
                secure_private_key=_decrypted(repo_build_trigger.private_key),
                fully_migrated=True,
            ).where(
                RepositoryBuildTrigger.id == repo_build_trigger.id,
                RepositoryBuildTrigger.uuid == repo_build_trigger.uuid,
            ).execute())

        assert (RepositoryBuildTrigger.select().where(
            RepositoryBuildTrigger.fully_migrated == False).count()) == 0

        # AppSpecificAuthToken
        logger.info(
            "Backfilling encrypted credentials for app specific auth tokens")
        for token in _iterate(
                AppSpecificAuthToken,
            ((AppSpecificAuthToken.token_name >> None)
             | (AppSpecificAuthToken.token_name == "")
             | (AppSpecificAuthToken.token_secret >> None)),
        ):
            logger.info(
                "Backfilling encrypted credentials for app specific auth %s",
                token.id)
            assert token.token_code[AST_TOKEN_NAME_PREFIX_LENGTH:]

            token_name = token.token_code[:AST_TOKEN_NAME_PREFIX_LENGTH]
            token_secret = _decrypted(
                token.token_code[AST_TOKEN_NAME_PREFIX_LENGTH:])
            assert token_name
            assert token_secret

            (AppSpecificAuthToken.update(
                token_name=token_name, token_secret=token_secret).where(
                    AppSpecificAuthToken.id == token.id,
                    AppSpecificAuthToken.token_code == token.token_code,
                ).execute())

        assert (AppSpecificAuthToken.select().where(
            AppSpecificAuthToken.token_name >> None).count()) == 0

        # OAuthAccessToken
        logger.info("Backfilling credentials for OAuth access tokens")
        for token in _iterate(
                OAuthAccessToken,
            ((OAuthAccessToken.token_name >> None) |
             (OAuthAccessToken.token_name == "")),
        ):
            logger.info("Backfilling credentials for OAuth access token %s",
                        token.id)
            token_name = token.access_token[:OAUTH_ACCESS_TOKEN_PREFIX_LENGTH]
            token_code = Credential.from_string(
                token.access_token[OAUTH_ACCESS_TOKEN_PREFIX_LENGTH:])
            assert token_name
            assert token.access_token[OAUTH_ACCESS_TOKEN_PREFIX_LENGTH:]

            (OAuthAccessToken.update(
                token_name=token_name, token_code=token_code).where(
                    OAuthAccessToken.id == token.id,
                    OAuthAccessToken.access_token == token.access_token,
                ).execute())

        assert (OAuthAccessToken.select().where(
            OAuthAccessToken.token_name >> None).count()) == 0

        # OAuthAuthorizationCode
        logger.info("Backfilling credentials for OAuth auth code")
        for code in _iterate(
                OAuthAuthorizationCode,
            ((OAuthAuthorizationCode.code_name >> None) |
             (OAuthAuthorizationCode.code_name == "")),
        ):
            logger.info("Backfilling credentials for OAuth auth code %s",
                        code.id)
            user_code = code.code or random_string_generator(
                AUTHORIZATION_CODE_PREFIX_LENGTH * 2)()
            code_name = user_code[:AUTHORIZATION_CODE_PREFIX_LENGTH]
            code_credential = Credential.from_string(
                user_code[AUTHORIZATION_CODE_PREFIX_LENGTH:])
            assert code_name
            assert user_code[AUTHORIZATION_CODE_PREFIX_LENGTH:]

            (OAuthAuthorizationCode.update(
                code_name=code_name, code_credential=code_credential).where(
                    OAuthAuthorizationCode.id == code.id).execute())

        assert (OAuthAuthorizationCode.select().where(
            OAuthAuthorizationCode.code_name >> None).count()) == 0

        # OAuthApplication
        logger.info("Backfilling secret for OAuth applications")
        for app in _iterate(OAuthApplication,
                            OAuthApplication.fully_migrated == False):
            logger.info("Backfilling secret for OAuth application %s", app.id)
            client_secret = app.client_secret or str(uuid.uuid4())
            secure_client_secret = _decrypted(client_secret)

            (OAuthApplication.update(
                secure_client_secret=secure_client_secret,
                fully_migrated=True).where(
                    OAuthApplication.id == app.id,
                    OAuthApplication.fully_migrated == False).execute())

        assert (OAuthApplication.select().where(
            OAuthApplication.fully_migrated == False).count()) == 0
Exemple #16
0
def get_user_or_org_by_customer_id(customer_id):
    try:
        return User.get(User.stripe_id == customer_id)
    except User.DoesNotExist:
        return None
Exemple #17
0
def populate_database(minimal=False, with_storage=False):
    logger.debug("Populating the DB with test data.")

    # Check if the data already exists. If so, we skip. This can happen between calls from the
    # "old style" tests and the new py.test's.
    try:
        User.get(username="******")
        logger.debug("DB already populated")
        return
    except User.DoesNotExist:
        pass

    # Note: databases set up with "real" schema (via Alembic) will not have these types
    # type, so we it here it necessary.
    try:
        ImageStorageLocation.get(name="local_eu")
        ImageStorageLocation.get(name="local_us")
    except ImageStorageLocation.DoesNotExist:
        ImageStorageLocation.create(name="local_eu")
        ImageStorageLocation.create(name="local_us")

    try:
        NotificationKind.get(name="test_notification")
    except NotificationKind.DoesNotExist:
        NotificationKind.create(name="test_notification")

    new_user_1 = model.user.create_user("devtable", "password",
                                        "*****@*****.**")
    new_user_1.verified = True
    new_user_1.stripe_id = TEST_STRIPE_ID
    new_user_1.save()

    if minimal:
        logger.debug(
            "Skipping most db population because user requested mininal db")
        return

    UserRegion.create(user=new_user_1,
                      location=ImageStorageLocation.get(name="local_us"))
    model.release.set_region_release("quay", "us", "v0.1.2")

    model.user.create_confirm_email_code(new_user_1,
                                         new_email="*****@*****.**")

    disabled_user = model.user.create_user("disabled", "password",
                                           "*****@*****.**")
    disabled_user.verified = True
    disabled_user.enabled = False
    disabled_user.save()

    dtrobot = model.user.create_robot("dtrobot", new_user_1)
    dtrobot2 = model.user.create_robot("dtrobot2", new_user_1)

    new_user_2 = model.user.create_user("public", "password",
                                        "*****@*****.**")
    new_user_2.verified = True
    new_user_2.save()

    new_user_3 = model.user.create_user("freshuser", "password",
                                        "*****@*****.**")
    new_user_3.verified = True
    new_user_3.save()

    another_robot = model.user.create_robot("anotherrobot", new_user_3)

    new_user_4 = model.user.create_user("randomuser", "password",
                                        "*****@*****.**")
    new_user_4.verified = True
    new_user_4.save()

    new_user_5 = model.user.create_user("unverified", "password",
                                        "*****@*****.**")
    new_user_5.save()

    reader = model.user.create_user("reader", "password", "*****@*****.**")
    reader.verified = True
    reader.save()

    creatoruser = model.user.create_user("creator", "password",
                                         "*****@*****.**")
    creatoruser.verified = True
    creatoruser.save()

    outside_org = model.user.create_user("outsideorg", "password",
                                         "*****@*****.**")
    outside_org.verified = True
    outside_org.save()

    model.notification.create_notification(
        "test_notification",
        new_user_1,
        metadata={
            "some": "value",
            "arr": [1, 2, 3],
            "obj": {
                "a": 1,
                "b": 2
            }
        },
    )

    from_date = datetime.utcnow()
    to_date = from_date + timedelta(hours=1)
    notification_metadata = {
        "from_date": formatdate(calendar.timegm(from_date.utctimetuple())),
        "to_date": formatdate(calendar.timegm(to_date.utctimetuple())),
        "reason": "database migration",
    }
    model.notification.create_notification("maintenance",
                                           new_user_1,
                                           metadata=notification_metadata)

    __generate_repository(
        with_storage,
        new_user_4,
        "randomrepo",
        "Random repo repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    simple_repo = __generate_repository(
        with_storage,
        new_user_1,
        "simple",
        "Simple repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    # Add some labels to the latest tag's manifest.
    tag_manifest = model.tag.load_tag_manifest(new_user_1.username, "simple",
                                               "latest")
    first_label = model.label.create_manifest_label(tag_manifest, "foo", "bar",
                                                    "manifest")
    model.label.create_manifest_label(tag_manifest, "foo", "baz", "api")
    model.label.create_manifest_label(tag_manifest, "anotherlabel", "1234",
                                      "internal")
    model.label.create_manifest_label(tag_manifest, "jsonlabel",
                                      '{"hey": "there"}', "internal")

    label_metadata = {
        "key": "foo",
        "value": "bar",
        "id": first_label.id,
        "manifest_digest": tag_manifest.digest,
    }

    logs_model.log_action(
        "manifest_label_add",
        new_user_1.username,
        performer=new_user_1,
        timestamp=datetime.now(),
        metadata=label_metadata,
        repository=tag_manifest.tag.repository,
    )

    model.blob.initiate_upload(new_user_1.username, simple_repo.name,
                               str(uuid4()), "local_us", {})
    model.notification.create_repo_notification(simple_repo, "repo_push",
                                                "quay_notification", {}, {})

    __generate_repository(
        with_storage,
        new_user_1,
        "sharedtags",
        "Shared tags repository",
        False,
        [(new_user_2, "read"), (dtrobot[0], "read")],
        (
            2,
            [
                (3, [], ["v2.0", "v2.1", "v2.2"]),
                (
                    1,
                    [(1, [(1, [], ["prod", "581a284"])
                          ], ["staging", "8423b58"]), (1, [], None)],
                    None,
                ),
            ],
            None,
        ),
    )

    __generate_repository(
        with_storage,
        new_user_1,
        "history",
        "Historical repository.",
        False,
        [],
        (4, [(2, [], "#latest"), (3, [], "latest")], None),
    )

    __generate_repository(
        with_storage,
        new_user_1,
        "complex",
        "Complex repository with many branches and tags.",
        False,
        [(new_user_2, "read"), (dtrobot[0], "read")],
        (
            2,
            [(3, [], "v2.0"),
             (1, [(1, [(2, [], ["prod"])], "staging"), (1, [], None)], None)],
            None,
        ),
    )

    __generate_repository(
        with_storage,
        new_user_1,
        "gargantuan",
        None,
        False,
        [],
        (
            2,
            [
                (3, [], "v2.0"),
                (1, [(1, [(1, [], ["latest", "prod"])], "staging"),
                     (1, [], None)], None),
                (20, [], "v3.0"),
                (5, [], "v4.0"),
                (1, [(1, [], "v5.0"), (1, [], "v6.0")], None),
            ],
            None,
        ),
    )

    trusted_repo = __generate_repository(
        with_storage,
        new_user_1,
        "trusted",
        "Trusted repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )
    trusted_repo.trust_enabled = True
    trusted_repo.save()

    publicrepo = __generate_repository(
        with_storage,
        new_user_2,
        "publicrepo",
        "Public repository pullable by the world.",
        True,
        [],
        (10, [], "latest"),
    )

    __generate_repository(with_storage, outside_org, "coolrepo",
                          "Some cool repo.", False, [], (5, [], "latest"))

    __generate_repository(
        with_storage,
        new_user_1,
        "shared",
        "Shared repository, another user can write.",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (5, [], "latest"),
    )

    __generate_repository(
        with_storage,
        new_user_1,
        "text-full-repo",
        "This is a repository for testing text search",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (5, [], "latest"),
    )

    building = __generate_repository(
        with_storage,
        new_user_1,
        "building",
        "Empty repository which is building.",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (0, [], None),
    )

    new_token = model.token.create_access_token(building, "write",
                                                "build-worker")

    trigger = model.build.create_build_trigger(building,
                                               "github",
                                               "123authtoken",
                                               new_user_1,
                                               pull_robot=dtrobot[0])
    trigger.config = json.dumps({
        "build_source": "jakedt/testconnect",
        "subdir": "",
        "dockerfile_path": "Dockerfile",
        "context": "/",
    })
    trigger.save()

    repo = "ci.devtable.com:5000/%s/%s" % (building.namespace_user.username,
                                           building.name)
    job_config = {
        "repository": repo,
        "docker_tags": ["latest"],
        "build_subdir": "",
        "trigger_metadata": {
            "commit": "3482adc5822c498e8f7db2e361e8d57b3d77ddd9",
            "ref": "refs/heads/master",
            "default_branch": "master",
        },
    }

    model.repository.star_repository(new_user_1, simple_repo)

    record = model.repository.create_email_authorization_for_repo(
        new_user_1.username, "simple", "*****@*****.**")
    record.confirmed = True
    record.save()

    model.repository.create_email_authorization_for_repo(
        new_user_1.username, "simple", "*****@*****.**")

    build2 = model.build.create_repository_build(
        building,
        new_token,
        job_config,
        "68daeebd-a5b9-457f-80a0-4363b882f8ea",
        "build-name",
        trigger,
    )
    build2.uuid = "deadpork-dead-pork-dead-porkdeadpork"
    build2.save()

    build3 = model.build.create_repository_build(
        building,
        new_token,
        job_config,
        "f49d07f9-93da-474d-ad5f-c852107c3892",
        "build-name",
        trigger,
    )
    build3.uuid = "deadduck-dead-duck-dead-duckdeadduck"
    build3.save()

    build1 = model.build.create_repository_build(
        building, new_token, job_config, "701dcc3724fb4f2ea6c31400528343cd",
        "build-name", trigger)
    build1.uuid = "deadbeef-dead-beef-dead-beefdeadbeef"
    build1.save()

    org = model.organization.create_organization("buynlarge",
                                                 "*****@*****.**",
                                                 new_user_1)
    org.stripe_id = TEST_STRIPE_ID
    org.save()

    liborg = model.organization.create_organization(
        "library", "*****@*****.**", new_user_1)
    liborg.save()

    titiorg = model.organization.create_organization("titi",
                                                     "*****@*****.**",
                                                     new_user_1)
    titiorg.save()

    thirdorg = model.organization.create_organization(
        "sellnsmall", "*****@*****.**", new_user_1)
    thirdorg.save()

    model.user.create_robot("coolrobot", org)

    oauth_app_1 = model.oauth.create_application(
        org,
        "Some Test App",
        "http://localhost:8000",
        "http://localhost:8000/o2c.html",
        client_id="deadbeef",
    )

    model.oauth.create_application(
        org,
        "Some Other Test App",
        "http://quay.io",
        "http://localhost:8000/o2c.html",
        client_id="deadpork",
        description="This is another test application",
    )

    model.oauth.create_access_token_for_testing(new_user_1,
                                                "deadbeef",
                                                "repo:admin",
                                                access_token="%s%s" %
                                                ("b" * 40, "c" * 40))

    oauth_credential = Credential.from_string("dswfhasdf1")
    OAuthAuthorizationCode.create(
        application=oauth_app_1,
        code="Z932odswfhasdf1",
        scope="repo:admin",
        data='{"somejson": "goeshere"}',
        code_name="Z932odswfhasdf1Z932o",
        code_credential=oauth_credential,
    )

    model.user.create_robot("neworgrobot", org)

    ownerbot = model.user.create_robot("ownerbot", org)[0]
    creatorbot = model.user.create_robot("creatorbot", org)[0]

    owners = model.team.get_organization_team("buynlarge", "owners")
    owners.description = "Owners have unfetterd access across the entire org."
    owners.save()

    org_repo = __generate_repository(
        with_storage,
        org,
        "orgrepo",
        "Repository owned by an org.",
        False,
        [(outside_org, "read")],
        (4, [], ["latest", "prod"]),
    )

    __generate_repository(
        with_storage,
        org,
        "anotherorgrepo",
        "Another repository owned by an org.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    creators = model.team.create_team("creators", org, "creator",
                                      "Creators of orgrepo.")

    reader_team = model.team.create_team("readers", org, "member",
                                         "Readers of orgrepo.")
    model.team.add_or_invite_to_team(new_user_1, reader_team, outside_org)
    model.permission.set_team_repo_permission(reader_team.name,
                                              org_repo.namespace_user.username,
                                              org_repo.name, "read")

    model.team.add_user_to_team(new_user_2, reader_team)
    model.team.add_user_to_team(reader, reader_team)
    model.team.add_user_to_team(ownerbot, owners)
    model.team.add_user_to_team(creatorbot, creators)
    model.team.add_user_to_team(creatoruser, creators)

    sell_owners = model.team.get_organization_team("sellnsmall", "owners")
    sell_owners.description = "Owners have unfettered access across the entire org."
    sell_owners.save()

    model.team.add_user_to_team(new_user_4, sell_owners)

    sync_config = {
        "group_dn": "cn=Test-Group,ou=Users",
        "group_id": "somegroupid"
    }
    synced_team = model.team.create_team("synced", org, "member",
                                         "Some synced team.")
    model.team.set_team_syncing(synced_team, "ldap", sync_config)

    another_synced_team = model.team.create_team("synced", thirdorg, "member",
                                                 "Some synced team.")
    model.team.set_team_syncing(another_synced_team, "ldap",
                                {"group_dn": "cn=Test-Group,ou=Users"})

    __generate_repository(
        with_storage,
        new_user_1,
        "superwide",
        None,
        False,
        [],
        [
            (10, [], "latest2"),
            (2, [], "latest3"),
            (2, [(1, [], "latest11"), (2, [], "latest12")], "latest4"),
            (2, [], "latest5"),
            (2, [], "latest6"),
            (2, [], "latest7"),
            (2, [], "latest8"),
            (2, [], "latest9"),
            (2, [], "latest10"),
            (2, [], "latest13"),
            (2, [], "latest14"),
            (2, [], "latest15"),
            (2, [], "latest16"),
            (2, [], "latest17"),
            (2, [], "latest18"),
        ],
    )

    mirror_repo = __generate_repository(
        with_storage,
        new_user_1,
        "mirrored",
        "Mirrored repository.",
        False,
        [(dtrobot[0], "write"), (dtrobot2[0], "write")],
        (4, [], ["latest", "prod"]),
    )
    mirror_rule = model.repo_mirror.create_mirroring_rule(
        mirror_repo, ["latest", "3.3*"])
    mirror_args = (mirror_repo, mirror_rule, dtrobot[0], "quay.io/coreos/etcd",
                   60 * 60 * 24)
    mirror_kwargs = {
        "external_registry_username": "******",
        "external_registry_password": "******",
        "external_registry_config": {},
        "is_enabled": True,
        "sync_start_date": datetime.utcnow(),
    }
    mirror = model.repo_mirror.enable_mirroring_for_repository(
        *mirror_args, **mirror_kwargs)

    read_only_repo = __generate_repository(
        with_storage,
        new_user_1,
        "readonly",
        "Read-Only Repo.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )
    read_only_repo.state = RepositoryState.READ_ONLY
    read_only_repo.save()

    model.permission.add_prototype_permission(org,
                                              "read",
                                              activating_user=new_user_1,
                                              delegate_user=new_user_2)
    model.permission.add_prototype_permission(org,
                                              "read",
                                              activating_user=new_user_1,
                                              delegate_team=reader_team)
    model.permission.add_prototype_permission(org,
                                              "write",
                                              activating_user=new_user_2,
                                              delegate_user=new_user_1)

    today = datetime.today()
    week_ago = today - timedelta(6)
    six_ago = today - timedelta(5)
    four_ago = today - timedelta(4)
    yesterday = datetime.combine(date.today(),
                                 datetime.min.time()) - timedelta(hours=6)

    __generate_service_key("kid1", "somesamplekey", new_user_1, today,
                           ServiceKeyApprovalType.SUPERUSER)
    __generate_service_key(
        "kid2",
        "someexpiringkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today + timedelta(days=14),
    )

    __generate_service_key("kid3", "unapprovedkey", new_user_1, today, None)

    __generate_service_key(
        "kid4",
        "autorotatingkey",
        new_user_1,
        six_ago,
        ServiceKeyApprovalType.KEY_ROTATION,
        today + timedelta(days=1),
        rotation_duration=timedelta(hours=12).total_seconds(),
    )

    __generate_service_key(
        "kid5",
        "key for another service",
        new_user_1,
        today,
        ServiceKeyApprovalType.SUPERUSER,
        today + timedelta(days=14),
        service="different_sample_service",
    )

    __generate_service_key(
        "kid6",
        "someexpiredkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today - timedelta(days=1),
    )

    __generate_service_key(
        "kid7",
        "somewayexpiredkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today - timedelta(days=30),
    )

    # Add the test pull key as pre-approved for local and unittest registry testing.
    # Note: this must match the private key found in the local/test config.
    _TEST_JWK = {
        "e":
        "AQAB",
        "kty":
        "RSA",
        "n":
        "yqdQgnelhAPMSeyH0kr3UGePK9oFOmNfwD0Ymnh7YYXr21VHWwyM2eVW3cnLd9KXywDFtGSe9oFDbnOuMCdUowdkBcaHju-isbv5KEbNSoy_T2Rip-6L0cY63YzcMJzv1nEYztYXS8wz76pSK81BKBCLapqOCmcPeCvV9yaoFZYvZEsXCl5jjXN3iujSzSF5Z6PpNFlJWTErMT2Z4QfbDKX2Nw6vJN6JnGpTNHZvgvcyNX8vkSgVpQ8DFnFkBEx54PvRV5KpHAq6AsJxKONMo11idQS2PfCNpa2hvz9O6UZe-eIX8jPo5NW8TuGZJumbdPT_nxTDLfCqfiZboeI0Pw",
    }

    key = model.service_keys.create_service_key("test_service_key",
                                                "test_service_key", "quay",
                                                _TEST_JWK, {}, None)

    model.service_keys.approve_service_key(
        key.kid,
        ServiceKeyApprovalType.SUPERUSER,
        notes="Test service key for local/test registry testing",
    )

    # Add an app specific token.
    token = model.appspecifictoken.create_token(new_user_1, "some app")
    token.token_name = "a" * 60
    token.token_secret = "b" * 60
    token.save()

    logs_model.log_action(
        "org_create_team",
        org.username,
        performer=new_user_1,
        timestamp=week_ago,
        metadata={"team": "readers"},
    )

    logs_model.log_action(
        "org_set_team_role",
        org.username,
        performer=new_user_1,
        timestamp=week_ago,
        metadata={
            "team": "readers",
            "role": "read"
        },
    )

    logs_model.log_action(
        "create_repo",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=week_ago,
        metadata={
            "namespace": org.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "change_repo_permission",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=six_ago,
        metadata={
            "username": new_user_1.username,
            "repo": "orgrepo",
            "role": "admin"
        },
    )

    logs_model.log_action(
        "change_repo_permission",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=six_ago,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo",
            "role": "read"
        },
    )

    logs_model.log_action(
        "add_repo_accesstoken",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=four_ago,
        metadata={
            "repo": "orgrepo",
            "token": "deploytoken"
        },
    )

    logs_model.log_action(
        "push_repo",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        repository=org_repo,
        timestamp=today,
        metadata={
            "token": "sometoken",
            "token_code": "somecode",
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "delete_tag",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo",
            "tag": "sometag"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        repository=org_repo,
        timestamp=today,
        metadata={
            "token_code": "somecode",
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        new_user_2.username,
        repository=publicrepo,
        timestamp=yesterday,
        metadata={
            "token_code": "somecode",
            "repo": "publicrepo"
        },
    )

    logs_model.log_action(
        "build_dockerfile",
        new_user_1.username,
        repository=building,
        timestamp=today,
        metadata={
            "repo": "building",
            "namespace": new_user_1.username,
            "trigger_id": trigger.uuid,
            "config": json.loads(trigger.config),
            "service": trigger.service.name,
        },
    )

    model.message.create([{
        "content": "We love you, Quay customers!",
        "severity": "info",
        "media_type": "text/plain",
    }])

    model.message.create([{
        "content": "This is a **development** install of Quay",
        "severity": "warning",
        "media_type": "text/markdown",
    }])

    fake_queue = WorkQueue("fakequeue", tf)
    fake_queue.put(["canonical", "job", "name"], "{}")

    model.user.create_user_prompt(new_user_4, "confirm_username")

    while True:
        to_count = model.repositoryactioncount.find_uncounted_repository()
        if not to_count:
            break

        model.repositoryactioncount.count_repository_actions(
            to_count,
            datetime.utcnow().day)
        model.repositoryactioncount.update_repository_score(to_count)
Exemple #18
0
def get_all_repo_users_transitive_via_teams(namespace_name, repository_name):
    return (User.select().distinct().join(TeamMember).join(Team).join(
        RepositoryPermission).join(Repository).join(
            Namespace, on=(Repository.namespace_user == Namespace.id)).where(
                Namespace.username == namespace_name,
                Repository.name == repository_name))
Exemple #19
0
def list_team_robots(team):
    """ Returns an iterator of all the *robots* found in a team. Does not include users. """
    return User.select().join(TeamMember).join(Team).where(
        Team.id == team, User.robot == True)
Exemple #20
0
def mark_namespace_for_deletion(user, queues, namespace_gc_queue, force=False):
    """
    Marks a namespace (as referenced by the given user) for deletion.

    A queue item will be added to delete the namespace's repositories and storage, while the
    namespace itself will be renamed, disabled, and delinked from other tables.
    """
    if not user.enabled:
        return None

    if not force and not user.organization:
        # Ensure that the user is not the sole admin for any organizations. If so, then the user
        # cannot be deleted before those organizations are deleted or reassigned.
        organizations = get_solely_admined_organizations(user)
        if len(organizations) > 0:
            message = (
                "Cannot delete %s as you are the only admin for organizations: "
                % user.username)
            for index, org in enumerate(organizations):
                if index > 0:
                    message = message + ", "

                message = message + org.username

            raise DataModelException(message)

    # Delete all queue items for the user.
    for queue in queues:
        queue.delete_namespaced_items(user.username)

    # Delete non-repository related items. This operation is very quick, so we can do so here.
    _delete_user_linked_data(user)

    with db_transaction():
        original_username = user.username
        user = db_for_update(User.select().where(User.id == user.id)).get()

        # Mark the namespace as deleted and ready for GC.
        try:
            marker = DeletedNamespace.create(
                namespace=user,
                original_username=original_username,
                original_email=user.email)
        except IntegrityError:
            return

        # Disable the namespace itself, and replace its various unique fields with UUIDs.
        user.enabled = False
        user.username = str(uuid4())
        user.email = str(uuid4())
        user.save()

    # Add a queueitem to delete the namespace.
    marker.queue_id = namespace_gc_queue.put(
        [str(user.id)],
        json.dumps({
            "marker_id": marker.id,
            "original_username": original_username,
        }),
    )
    marker.save()
    return marker.id
Exemple #21
0
def _namespace_id_for_username(username):
    try:
        return User.get(username=username).id
    except User.DoesNotExist:
        return None
Exemple #22
0
def is_username_unique(test_username):
    try:
        User.get((User.username == test_username))
        return False
    except User.DoesNotExist:
        return True
def upgrade(tables, tester, progress_reporter):
    op = ProgressWrapper(original_op, progress_reporter)

    from app import app

    if app.config.get("SETUP_COMPLETE", False) or tester.is_testing():
        # Empty all access token names to fix the bug where we put the wrong name and code
        # in for some tokens.
        (AccessToken.update(token_name=None).where(
            ~(AccessToken.token_name >> None),
            AccessToken.temporary == False).execute())

        # AccessToken.
        logger.info("Backfilling encrypted credentials for access tokens")
        for access_token in _iterate(AccessToken,
                                     ((AccessToken.token_name >> None) |
                                      (AccessToken.token_name == ""))):
            logger.info(
                "Backfilling encrypted credentials for access token %s",
                access_token.id)
            assert access_token.code is not None
            assert access_token.code[:ACCESS_TOKEN_NAME_PREFIX_LENGTH]
            assert access_token.code[ACCESS_TOKEN_NAME_PREFIX_LENGTH:]

            token_name = access_token.code[:ACCESS_TOKEN_NAME_PREFIX_LENGTH]
            token_code = _decrypted(
                access_token.code[ACCESS_TOKEN_NAME_PREFIX_LENGTH:])

            (AccessToken.update(
                token_name=token_name, token_code=token_code).where(
                    AccessToken.id == access_token.id,
                    AccessToken.code == access_token.code).execute())

        assert AccessToken.select().where(
            AccessToken.token_name >> None).count() == 0

        # Robots.
        logger.info("Backfilling encrypted credentials for robots")
        while True:
            has_row = False
            query = (User.select().join(
                RobotAccountToken, JOIN.LEFT_OUTER).where(
                    User.robot == True,
                    RobotAccountToken.id >> None).limit(BATCH_SIZE))

            for robot_user in query:
                logger.info("Backfilling encrypted credentials for robot %s",
                            robot_user.id)
                has_row = True
                try:
                    RobotAccountToken.create(
                        robot_account=robot_user,
                        token=_decrypted(robot_user.email),
                        fully_migrated=False,
                    )
                except IntegrityError:
                    break

            if not has_row:
                break

        # RepositoryBuildTrigger
        logger.info(
            "Backfilling encrypted credentials for repo build triggers")
        for repo_build_trigger in _iterate(
                RepositoryBuildTrigger,
            (RepositoryBuildTrigger.fully_migrated == False)):
            logger.info(
                "Backfilling encrypted credentials for repo build trigger %s",
                repo_build_trigger.id)

            (RepositoryBuildTrigger.update(
                secure_auth_token=_decrypted(repo_build_trigger.auth_token),
                secure_private_key=_decrypted(repo_build_trigger.private_key),
                fully_migrated=True,
            ).where(
                RepositoryBuildTrigger.id == repo_build_trigger.id,
                RepositoryBuildTrigger.uuid == repo_build_trigger.uuid,
            ).execute())

        assert (RepositoryBuildTrigger.select().where(
            RepositoryBuildTrigger.fully_migrated == False).count()) == 0

        # AppSpecificAuthToken
        logger.info(
            "Backfilling encrypted credentials for app specific auth tokens")
        for token in _iterate(
                AppSpecificAuthToken,
            ((AppSpecificAuthToken.token_name >> None)
             | (AppSpecificAuthToken.token_name == "")
             | (AppSpecificAuthToken.token_secret >> None)),
        ):
            logger.info(
                "Backfilling encrypted credentials for app specific auth %s",
                token.id)
            assert token.token_code[AST_TOKEN_NAME_PREFIX_LENGTH:]

            token_name = token.token_code[:AST_TOKEN_NAME_PREFIX_LENGTH]
            token_secret = _decrypted(
                token.token_code[AST_TOKEN_NAME_PREFIX_LENGTH:])
            assert token_name
            assert token_secret

            (AppSpecificAuthToken.update(
                token_name=token_name, token_secret=token_secret).where(
                    AppSpecificAuthToken.id == token.id,
                    AppSpecificAuthToken.token_code == token.token_code,
                ).execute())

        assert (AppSpecificAuthToken.select().where(
            AppSpecificAuthToken.token_name >> None).count()) == 0

        # OAuthAccessToken
        logger.info("Backfilling credentials for OAuth access tokens")
        for token in _iterate(
                OAuthAccessToken,
            ((OAuthAccessToken.token_name >> None) |
             (OAuthAccessToken.token_name == "")),
        ):
            logger.info("Backfilling credentials for OAuth access token %s",
                        token.id)
            token_name = token.access_token[:OAUTH_ACCESS_TOKEN_PREFIX_LENGTH]
            token_code = Credential.from_string(
                token.access_token[OAUTH_ACCESS_TOKEN_PREFIX_LENGTH:])
            assert token_name
            assert token.access_token[OAUTH_ACCESS_TOKEN_PREFIX_LENGTH:]

            (OAuthAccessToken.update(
                token_name=token_name, token_code=token_code).where(
                    OAuthAccessToken.id == token.id,
                    OAuthAccessToken.access_token == token.access_token,
                ).execute())

        assert (OAuthAccessToken.select().where(
            OAuthAccessToken.token_name >> None).count()) == 0

        # OAuthAuthorizationCode
        logger.info("Backfilling credentials for OAuth auth code")
        for code in _iterate(
                OAuthAuthorizationCode,
            ((OAuthAuthorizationCode.code_name >> None) |
             (OAuthAuthorizationCode.code_name == "")),
        ):
            logger.info("Backfilling credentials for OAuth auth code %s",
                        code.id)
            user_code = code.code or random_string_generator(
                AUTHORIZATION_CODE_PREFIX_LENGTH * 2)()
            code_name = user_code[:AUTHORIZATION_CODE_PREFIX_LENGTH]
            code_credential = Credential.from_string(
                user_code[AUTHORIZATION_CODE_PREFIX_LENGTH:])
            assert code_name
            assert user_code[AUTHORIZATION_CODE_PREFIX_LENGTH:]

            (OAuthAuthorizationCode.update(
                code_name=code_name, code_credential=code_credential).where(
                    OAuthAuthorizationCode.id == code.id).execute())

        assert (OAuthAuthorizationCode.select().where(
            OAuthAuthorizationCode.code_name >> None).count()) == 0

        # OAuthApplication
        logger.info("Backfilling secret for OAuth applications")
        for app in _iterate(OAuthApplication,
                            OAuthApplication.fully_migrated == False):
            logger.info("Backfilling secret for OAuth application %s", app.id)
            client_secret = app.client_secret or str(uuid.uuid4())
            secure_client_secret = _decrypted(client_secret)

            (OAuthApplication.update(
                secure_client_secret=secure_client_secret,
                fully_migrated=True).where(
                    OAuthApplication.id == app.id,
                    OAuthApplication.fully_migrated == False).execute())

        assert (OAuthApplication.select().where(
            OAuthApplication.fully_migrated == False).count()) == 0
Exemple #24
0
def get_user(username):
    try:
        return User.get(User.username == username, User.organization == False)
    except User.DoesNotExist:
        return None
Exemple #25
0
def get_user_organizations(username):
    UserAlias = User.alias()
    return (User.select().distinct().join(Team).join(TeamMember).join(
        UserAlias, on=(UserAlias.id == TeamMember.user)).where(
            User.organization == True, UserAlias.username == username))
Exemple #26
0
def get_user_or_org(username):
    try:
        return User.get(User.username == username, User.robot == False)
    except User.DoesNotExist:
        return None
Exemple #27
0
def get_robot_count():
    return User.select().where(User.robot == True).count()
Exemple #28
0
def get_namespace_user_by_user_id(namespace_user_db_id):
    try:
        return User.get(User.id == namespace_user_db_id, User.robot == False)
    except User.DoesNotExist:
        raise InvalidUsernameException("User with id does not exist: %s" %
                                       namespace_user_db_id)
 def has_users(self):
     return bool(list(User.select().limit(1)))