def create_token(user, title, expiration=_default_expiration_duration_opt): """ Creates and returns an app specific token for the given user. If no expiration is specified (including `None`), then the default from config is used. """ if expiration == _default_expiration_duration_opt: duration = _default_expiration_duration() expiration = duration + datetime.now() if duration else None token_code = random_string_generator(TOKEN_NAME_PREFIX_LENGTH + MINIMUM_TOKEN_SUFFIX_LENGTH)() token_name = token_code[:TOKEN_NAME_PREFIX_LENGTH] token_secret = token_code[TOKEN_NAME_PREFIX_LENGTH:] assert token_name assert token_secret # TODO(remove-unenc): Remove legacy handling. old_token_code = (token_code if ActiveDataMigration.has_flag( ERTMigrationFlags.WRITE_OLD_FIELDS) else None) return AppSpecificAuthToken.create( user=user, title=title, expiration=expiration, token_name=token_name, token_secret=DecryptedValue(token_secret), token_code=old_token_code, )
def discard_authorization_code(self, client_id, full_code): code_name = full_code[:AUTHORIZATION_CODE_PREFIX_LENGTH] try: found = ( OAuthAuthorizationCode.select().join(OAuthApplication).where( OAuthApplication.client_id == client_id, OAuthAuthorizationCode.code_name == code_name, ).get()) found.delete_instance() return except OAuthAuthorizationCode.DoesNotExist: pass # Legacy: full code. # TODO(remove-unenc): Remove legacy fallback. if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): try: found = (OAuthAuthorizationCode.select().join( OAuthApplication).where( OAuthApplication.client_id == client_id, OAuthAuthorizationCode.code == full_code, ).get()) found.delete_instance() except OAuthAuthorizationCode.DoesNotExist: pass
def from_authorization_code(self, client_id, full_code, scope): code_name = full_code[:AUTHORIZATION_CODE_PREFIX_LENGTH] code_credential = full_code[AUTHORIZATION_CODE_PREFIX_LENGTH:] try: found = ( OAuthAuthorizationCode.select().join(OAuthApplication).where( OAuthApplication.client_id == client_id, OAuthAuthorizationCode.code_name == code_name, OAuthAuthorizationCode.scope == scope, ).get()) if not found.code_credential.matches(code_credential): return None logger.debug("Returning data: %s", found.data) return found.data except OAuthAuthorizationCode.DoesNotExist: # Fallback to the legacy lookup of the full code. # TODO(remove-unenc): Remove legacy fallback. if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): try: found = (OAuthAuthorizationCode.select().join( OAuthApplication).where( OAuthApplication.client_id == client_id, OAuthAuthorizationCode.code == full_code, OAuthAuthorizationCode.scope == scope, ).get()) logger.debug("Returning data: %s", found.data) return found.data except OAuthAuthorizationCode.DoesNotExist: return None else: return None
def confirm_user_email(token): # TODO(remove-unenc): Remove allow_public_only once migrated. allow_public_only = ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS) result = decode_public_private_token(token, allow_public_only=allow_public_only) if not result: raise DataModelException("Invalid email confirmation code") try: code = EmailConfirmation.get( EmailConfirmation.code == result.public_code, EmailConfirmation.email_confirm == True ) except EmailConfirmation.DoesNotExist: raise DataModelException("Invalid email confirmation code") if result.private_token and not code.verification_code.matches(result.private_token): raise DataModelException("Invalid email confirmation code") user = code.user user.verified = True old_email = None new_email = code.new_email if new_email and new_email != old_email: if find_user_by_email(new_email): raise DataModelException("E-mail address already used") old_email = user.email user.email = new_email with db_transaction(): user.save() code.delete_instance() return user, new_email, old_email
def app_view(application): is_admin = AdministerOrganizationPermission(application.organization.username).can() client_secret = None if is_admin: # TODO(remove-unenc): Remove legacy lookup. client_secret = None if application.secure_client_secret is not None: client_secret = application.secure_client_secret.decrypt() if ( ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS) and client_secret is None ): client_secret = application.client_secret assert (client_secret is not None) == is_admin return { "name": application.name, "description": application.description, "application_uri": application.application_uri, "client_id": application.client_id, "client_secret": client_secret, "redirect_uri": application.redirect_uri if is_admin else None, "avatar_email": application.avatar_email if is_admin else None, }
def validate_reset_code(token): # TODO(remove-unenc): Remove allow_public_only once migrated. allow_public_only = ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS) result = decode_public_private_token(token, allow_public_only=allow_public_only) if not result: return None # Find the reset code. try: code = EmailConfirmation.get( EmailConfirmation.code == result.public_code, EmailConfirmation.pw_reset == True ) except EmailConfirmation.DoesNotExist: return None if result.private_token and not code.verification_code.matches(result.private_token): return None # Make sure the code is not expired. max_lifetime_duration = convert_to_timedelta(config.app_config["USER_RECOVERY_TOKEN_LIFETIME"]) if code.created + max_lifetime_duration < datetime.now(): code.delete_instance() return None # Verify the user and return the code. user = code.user with db_transaction(): if not user.verified: user.verified = True user.save() code.delete_instance() return user
def get_full_token_string(token): # TODO(remove-unenc): Remove legacy handling. if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): if not token.token_name: return token.token_code assert token.token_name return "%s%s" % (token.token_name, token.token_secret.decrypt())
def verify_robot(robot_username, password): try: password = remove_unicode(password) except UnicodeEncodeError: msg = ('Could not find robot with username: %s and supplied password.' % robot_username) raise InvalidRobotException(msg) result = parse_robot_username(robot_username) if result is None: raise InvalidRobotException('%s is an invalid robot name' % robot_username) robot = lookup_robot(robot_username) assert robot.robot # Lookup the token for the robot. try: token_data = RobotAccountToken.get(robot_account=robot) if not token_data.token.matches(password): msg = ('Could not find robot with username: %s and supplied password.' % robot_username) raise InvalidRobotException(msg) except RobotAccountToken.DoesNotExist: # TODO(remove-unenc): Remove once migrated. if not ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): raise InvalidRobotException(msg) if password.find('robot:') >= 0: # Just to be sure. raise InvalidRobotException(msg) query = (User .select() .join(FederatedLogin) .join(LoginService) .where(FederatedLogin.service_ident == password, LoginService.name == 'quayrobot', User.username == robot_username)) try: robot = query.get() except User.DoesNotExist: msg = ('Could not find robot with username: %s and supplied password.' % robot_username) raise InvalidRobotException(msg) # Find the owner user and ensure it is not disabled. try: owner = User.get(User.username == result[0]) except User.DoesNotExist: raise InvalidRobotException('Robot %s owner does not exist' % robot_username) if not owner.enabled: raise InvalidRobotException('This user has been disabled. Please contact your administrator.') # Mark that the robot was accessed. _basequery.update_last_accessed(robot) return robot
def reset_client_secret(application): client_secret = random_string_generator(length=40)() # TODO(remove-unenc): Remove legacy field. if ActiveDataMigration.has_flag(ERTMigrationFlags.WRITE_OLD_FIELDS): application.client_secret = client_secret application.secure_client_secret = DecryptedValue(client_secret) application.save() return application
def retrieve_robot_token(robot): """ Returns the decrypted token for the given robot. """ try: token = RobotAccountToken.get(robot_account=robot).token.decrypt() except RobotAccountToken.DoesNotExist: if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): # For legacy only. token = robot.email else: raise return token
def validate_client_secret(self, client_id, client_secret): try: application = OAuthApplication.get(client_id=client_id) # TODO(remove-unenc): Remove legacy check. if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): if application.secure_client_secret is None: return application.client_secret == client_secret assert application.secure_client_secret is not None return application.secure_client_secret.matches(client_secret) except OAuthApplication.DoesNotExist: return False
def update_build_trigger(trigger, config, auth_token=None, write_token=None): trigger.config = json.dumps(config or {}) # TODO(remove-unenc): Remove legacy field. if auth_token is not None: if ActiveDataMigration.has_flag(ERTMigrationFlags.WRITE_OLD_FIELDS): trigger.auth_token = auth_token trigger.secure_auth_token = DecryptedValue(auth_token) if write_token is not None: trigger.write_token = write_token trigger.save()
def auth_token(self): """ Returns the auth token for the trigger. """ # NOTE: This check is for testing. if isinstance(self.trigger.auth_token, str): return self.trigger.auth_token # TODO(remove-unenc): Remove legacy field. if self.trigger.secure_auth_token is not None: return self.trigger.secure_auth_token.decrypt() if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): return self.trigger.auth_token return None
def access_valid_token(token_code): """ Looks up an unexpired app specific token with the given token code. If found, the token's last_accessed field is set to now and the token is returned. If not found, returns None. """ token_code = remove_unicode(token_code) prefix = token_code[:TOKEN_NAME_PREFIX_LENGTH] if len(prefix) != TOKEN_NAME_PREFIX_LENGTH: return None suffix = token_code[TOKEN_NAME_PREFIX_LENGTH:] # Lookup the token by its prefix. try: token = (AppSpecificAuthToken.select( AppSpecificAuthToken, User).join(User).where( AppSpecificAuthToken.token_name == prefix, ((AppSpecificAuthToken.expiration > datetime.now()) | (AppSpecificAuthToken.expiration >> None)), ).get()) if not token.token_secret.matches(suffix): return None assert len(prefix) == TOKEN_NAME_PREFIX_LENGTH assert len(suffix) >= MINIMUM_TOKEN_SUFFIX_LENGTH update_last_accessed(token) return token except AppSpecificAuthToken.DoesNotExist: pass # TODO(remove-unenc): Remove legacy handling. if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): try: token = (AppSpecificAuthToken.select( AppSpecificAuthToken, User).join(User).where( AppSpecificAuthToken.token_code == token_code, ((AppSpecificAuthToken.expiration > datetime.now()) | (AppSpecificAuthToken.expiration >> None)), ).get()) update_last_accessed(token) return token except AppSpecificAuthToken.DoesNotExist: return None return None
def create_application(org, name, application_uri, redirect_uri, **kwargs): client_secret = kwargs.pop("client_secret", random_string_generator(length=40)()) # TODO(remove-unenc): Remove legacy field. old_client_secret = None if ActiveDataMigration.has_flag(ERTMigrationFlags.WRITE_OLD_FIELDS): old_client_secret = client_secret return OAuthApplication.create( organization=org, name=name, application_uri=application_uri, redirect_uri=redirect_uri, client_secret=old_client_secret, secure_client_secret=DecryptedValue(client_secret), **kwargs)
def create_build_trigger(repo, service_name, auth_token, user, pull_robot=None, config=None): service = BuildTriggerService.get(name=service_name) # TODO(remove-unenc): Remove legacy field. old_auth_token = None if ActiveDataMigration.has_flag(ERTMigrationFlags.WRITE_OLD_FIELDS): old_auth_token = auth_token secure_auth_token = DecryptedValue(auth_token) if auth_token else None trigger = RepositoryBuildTrigger.create( repository=repo, service=service, auth_token=old_auth_token, secure_auth_token=secure_auth_token, connected_user=user, pull_robot=pull_robot, config=json.dumps(config or {}), ) return trigger
def persist_authorization_code(self, client_id, full_code, scope): oauth_app = OAuthApplication.get(client_id=client_id) data = self._generate_data_string() assert len(full_code) >= (AUTHORIZATION_CODE_PREFIX_LENGTH * 2) code_name = full_code[:AUTHORIZATION_CODE_PREFIX_LENGTH] code_credential = full_code[AUTHORIZATION_CODE_PREFIX_LENGTH:] # TODO(remove-unenc): Remove legacy fallback. full_code = None if ActiveDataMigration.has_flag(ERTMigrationFlags.WRITE_OLD_FIELDS): full_code = code_name + code_credential OAuthAuthorizationCode.create( application=oauth_app, code=full_code, scope=scope, code_name=code_name, code_credential=Credential.from_string(code_credential), data=data)
def validate_access_token(access_token): assert isinstance(access_token, basestring) token_name = access_token[:ACCESS_TOKEN_PREFIX_LENGTH] if not token_name: return None token_code = access_token[ACCESS_TOKEN_PREFIX_LENGTH:] if not token_code: return None try: found = (OAuthAccessToken.select( OAuthAccessToken, User).join(User).where( OAuthAccessToken.token_name == token_name).get()) if found.token_code is None or not found.token_code.matches( token_code): return None return found except OAuthAccessToken.DoesNotExist: pass # Legacy lookup. # TODO(remove-unenc): Remove this once migrated. if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): try: assert access_token found = (OAuthAccessToken.select( OAuthAccessToken, User).join(User).where( OAuthAccessToken.access_token == access_token).get()) return found except OAuthAccessToken.DoesNotExist: return None return None
def start_build(self, build_job): """ Starts a build. """ if self._component_status not in (ComponentStatus.WAITING, ComponentStatus.RUNNING): logger.debug( 'Could not start build for component %s (build %s, worker version: %s): %s', self.builder_realm, build_job.repo_build.uuid, self._worker_version, self._component_status) raise Return() logger.debug( 'Starting build for component %s (build %s, worker version: %s)', self.builder_realm, build_job.repo_build.uuid, self._worker_version) self._current_job = build_job self._build_status = StatusHandler(self.build_logs, build_job.repo_build.uuid) self._image_info = {} yield From(self._set_status(ComponentStatus.BUILDING)) # Send the notification that the build has started. build_job.send_notification('build_start') # Parse the build configuration. try: build_config = build_job.build_config except BuildJobLoadException as irbe: yield From( self._build_failure('Could not load build job information', irbe)) raise Return() base_image_information = {} # Add the pull robot information, if any. if build_job.pull_credentials: base_image_information[ 'username'] = build_job.pull_credentials.get('username', '') base_image_information[ 'password'] = build_job.pull_credentials.get('password', '') # Retrieve the repository's fully qualified name. repo = build_job.repo_build.repository repository_name = repo.namespace_user.username + '/' + repo.name # Parse the build queue item into build arguments. # build_package: URL to the build package to download and untar/unzip. # defaults to empty string to avoid requiring a pointer on the builder. # sub_directory: The location within the build package of the Dockerfile and the build context. # repository: The repository for which this build is occurring. # registry: The registry for which this build is occuring (e.g. 'quay.io'). # pull_token: The token to use when pulling the cache for building. # push_token: The token to use to push the built image. # tag_names: The name(s) of the tag(s) for the newly built image. # base_image: The image name and credentials to use to conduct the base image pull. # username: The username for pulling the base image (if any). # password: The password for pulling the base image (if any). context, dockerfile_path = self.extract_dockerfile_args(build_config) build_arguments = { 'build_package': build_job.get_build_package_url(self.user_files), 'context': context, 'dockerfile_path': dockerfile_path, 'repository': repository_name, 'registry': self.registry_hostname, 'pull_token': build_job.repo_build.access_token.get_code(), 'push_token': build_job.repo_build.access_token.get_code(), 'tag_names': build_config.get('docker_tags', ['latest']), 'base_image': base_image_information, } # If the trigger has a private key, it's using git, thus we should add # git data to the build args. # url: url used to clone the git repository # sha: the sha1 identifier of the commit to check out # private_key: the key used to get read access to the git repository # TODO(remove-unenc): Remove legacy field. private_key = None if build_job.repo_build.trigger is not None and \ build_job.repo_build.trigger.secure_private_key is not None: private_key = build_job.repo_build.trigger.secure_private_key.decrypt( ) if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS) and \ private_key is None and \ build_job.repo_build.trigger is not None: private_key = build_job.repo_build.trigger.private_key if private_key is not None: build_arguments['git'] = { 'url': build_config['trigger_metadata'].get('git_url', ''), 'sha': BuildComponent._commit_sha(build_config), 'private_key': private_key or '', } # If the build args have no buildpack, mark it as a failure before sending # it to a builder instance. if not build_arguments['build_package'] and not build_arguments['git']: logger.error('%s: insufficient build args: %s', self._current_job.repo_build.uuid, build_arguments) yield From( self._build_failure( 'Insufficient build arguments. No buildpack available.')) raise Return() # Invoke the build. logger.debug('Invoking build: %s', self.builder_realm) logger.debug('With Arguments: %s', build_arguments) def build_complete_callback(result): """ This function is used to execute a coroutine as the callback. """ trollius.ensure_future(self._build_complete(result)) self.call("io.quay.builder.build", **build_arguments).add_done_callback(build_complete_callback) # Set the heartbeat for the future. If the builder never receives the build call, # then this will cause a timeout after 30 seconds. We know the builder has registered # by this point, so it makes sense to have a timeout. self._last_heartbeat = datetime.datetime.utcnow( ) + BUILD_HEARTBEAT_DELAY
def list_entity_robot_permission_teams(self, prefix, include_token=False, include_permissions=False, limit=None): tuples = model.user.list_entity_robot_permission_teams(prefix, limit=limit, include_permissions=include_permissions) robots = {} robot_teams = set() for robot_tuple in tuples: robot_name = robot_tuple.get(User.username) if robot_name not in robots: token = None if include_token: # TODO(remove-unenc): Remove branches once migrated. if robot_tuple.get(RobotAccountToken.token): token = robot_tuple.get(RobotAccountToken.token).decrypt() if token is None and ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS): token = robot_tuple.get(FederatedLogin.service_ident) assert not token.startswith('robot:') robot_dict = { 'name': robot_name, 'token': token, 'created': robot_tuple.get(User.creation_date), 'last_accessed': (robot_tuple.get(User.last_accessed) if features.USER_LAST_ACCESSED else None), 'description': robot_tuple.get(RobotAccountMetadata.description), 'unstructured_metadata': robot_tuple.get(RobotAccountMetadata.unstructured_json), } if include_permissions: robot_dict.update({ 'teams': [], 'repositories': [], }) robots[robot_name] = Robot(robot_dict['name'], robot_dict['token'], robot_dict['created'], robot_dict['last_accessed'], robot_dict['description'], robot_dict['unstructured_metadata']) if include_permissions: team_name = robot_tuple.get(TeamTable.name) repository_name = robot_tuple.get(Repository.name) if team_name is not None: check_key = robot_name + ':' + team_name if check_key not in robot_teams: robot_teams.add(check_key) robot_dict['teams'].append(Team( team_name, avatar.get_data(team_name, team_name, 'team') )) if repository_name is not None: if repository_name not in robot_dict['repositories']: robot_dict['repositories'].append(repository_name) robots[robot_name] = RobotWithPermissions(robot_dict['name'], robot_dict['token'], robot_dict['created'], (robot_dict['last_accessed'] if features.USER_LAST_ACCESSED else None), robot_dict['teams'], robot_dict['repositories'], robot_dict['description']) return robots.values()
def post(self, namespace_name, repo_name, trigger_uuid): """ Activate the specified build trigger. """ trigger = get_trigger(trigger_uuid) handler = BuildTriggerHandler.get_handler(trigger) if handler.is_active(): raise InvalidRequest('Trigger config is not sufficient for activation.') user_permission = UserAdminPermission(trigger.connected_user.username) if user_permission.can(): # Update the pull robot (if any). pull_robot_name = request.get_json().get('pull_robot', None) if pull_robot_name: try: pull_robot = model.user.lookup_robot(pull_robot_name) except model.InvalidRobotException: raise NotFound() # Make sure the user has administer permissions for the robot's namespace. (robot_namespace, _) = parse_robot_username(pull_robot_name) if not AdministerOrganizationPermission(robot_namespace).can(): raise Unauthorized() # Make sure the namespace matches that of the trigger. if robot_namespace != namespace_name: raise Unauthorized() # Set the pull robot. trigger.pull_robot = pull_robot # Update the config. new_config_dict = request.get_json()['config'] write_token_name = 'Build Trigger: %s' % trigger.service.name write_token = model.token.create_delegate_token(namespace_name, repo_name, write_token_name, 'write') try: path = url_for('webhooks.build_trigger_webhook', trigger_uuid=trigger.uuid) authed_url = _prepare_webhook_url(app.config['PREFERRED_URL_SCHEME'], '$token', write_token.get_code(), app.config['SERVER_HOSTNAME'], path) handler = BuildTriggerHandler.get_handler(trigger, new_config_dict) final_config, private_config = handler.activate(authed_url) if 'private_key' in private_config: trigger.secure_private_key = DecryptedValue(private_config['private_key']) # TODO(remove-unenc): Remove legacy field. if ActiveDataMigration.has_flag(ERTMigrationFlags.WRITE_OLD_FIELDS): trigger.private_key = private_config['private_key'] except TriggerException as exc: write_token.delete_instance() raise request_error(message=exc.message) # Save the updated config. update_build_trigger(trigger, final_config, write_token=write_token) # Log the trigger setup. repo = model.repository.get_repository(namespace_name, repo_name) log_action('setup_repo_trigger', namespace_name, {'repo': repo_name, 'namespace': namespace_name, 'trigger_id': trigger.uuid, 'service': trigger.service.name, 'pull_robot': trigger.pull_robot.username if trigger.pull_robot else None, 'config': final_config}, repo=repo) return trigger_view(trigger, can_admin=True) else: raise Unauthorized()