def get_authc_info(self, identifier): """ Function defined in the interface. Returns a dict: { 'account_locked': bool, 'authc_info': { '<str cred type>': { 'credential': <value>, 'failed_attempts': <int> } } 'account_id': <str> } This differs from the password-flow lookup in that it uses the users's uuid rather than username since tokens are tied to the uuid to ensure lifecycle tied to a specific instance of a username. :param identifier: the user's uuid as signed/encoded in the token :return: populated dict defined above or empty structured dict above """ result_account = { "account_locked": None, "authc_info": {}, "account_id": None, "anchore_identity": None, } with session_scope() as db: idp = idp_factory.for_session(db) try: identity, creds = idp.lookup_user_by_uuid(identifier) except: logger.exception("Error looking up user") identity = None creds = None result_account["account_locked"] = False if identity: result_account["anchore_identity"] = identity result_account["authc_info"]["jwt"] = { "credential": identity.user_uuid, "failed_attempts": [], } return result_account
def system_subscriptions_get(): try: request_inputs = anchore_engine.apis.do_request_prep( connexion.request, default_params={} ) with db.session_scope() as session: ( return_object, httpcode, ) = anchore_engine.services.catalog.catalog_impl.system_subscriptions( session, request_inputs ) except Exception as err: logger.exception("Error fetching subscriptions") httpcode = 500 return_object = str(err) return return_object, httpcode
def is_permitted(self, identifiers, permission_s): """ :type identifiers: SimpleRealmCollection """ # Fail all if not configured if not self.enabled or not self.client: return [(p, False) for p in permission_s] result_list = [] # List of tuples (required_perm, is_permitted) identifier = identifiers.primary_identifier if isinstance(identifier, IdentityContext): username = identifier.username else: username = identifier actions = {} for required_perm in permission_s: required_permission = CaseSensitivePermission( wildcard_string=required_perm) actions[Action(domain=','.join(required_permission.domain), action=','.join(required_permission.action), target=','.join( required_permission.target))] = required_perm if actions: try: resp = self.client.authorize(principal=username, action_s=list(actions.keys())) for i in resp.allowed: result_list.append((actions[i], True)) for i in resp.denied: result_list.append((actions[i], False)) except Exception as e: logger.exception( 'Unexpected error invoking authorization plugin via client: {}' .format(e)) logger.error( 'Authorization plugin invocation error. Could not perform a proper authz check. Please check configuration and/or authz service status: {}' .format(self.client.url)) raise e return result_list
def describe_policy(): """ Return a dictionary/json description of the set of gates available and triggers. :param gate_filter: a list of gate names to filter by, if None, then all are returned :return: dict/json description of the gates and triggers """ try: doc = [] for k, v in Gate.registry.items(): g = GateSpec() g.name = k g.description = v.description if hasattr(v, 'description') else '' g.triggers = [] for t in v.__triggers__: tr = TriggerSpec() tr.name = t.__trigger_name__ tr.description = t.__description__ if t.__description__ else '' tr.parameters = [] params = t._parameters() if params: for param in params.values(): tps = TriggerParamSpec() tps.name = param.name tps.description = param.description tps.validator = param.validator.json() tps.required = param.required tr.parameters.append(tps) g.triggers.append(tr) doc.append(g.to_dict()) return doc, 200 except Exception as e: log.exception('Error describing gate system') abort(500, 'Internal error describing gate configuration')
def initialize_system_identities(self): """ Ensure basic system identities are present :param session: DB session to use to query/update. Tx managed externally :return: boolean status """ # system user try: if not self.mgr.get_account(localconfig.SYSTEM_ACCOUNT_NAME): self.mgr.create_account(localconfig.SYSTEM_ACCOUNT_NAME, AccountTypes.service, 'system@system', creator=localconfig.SYSTEM_IDENTITY_BOOTSTRAPPER) if not self.mgr.get_user(localconfig.SYSTEM_USERNAME): self.mgr.create_user(localconfig.SYSTEM_ACCOUNT_NAME, localconfig.SYSTEM_USERNAME, creator_name=localconfig.SYSTEM_IDENTITY_BOOTSTRAPPER) self.mgr.add_user_credential(creator_name=localconfig.SYSTEM_IDENTITY_BOOTSTRAPPER, username=localconfig.SYSTEM_USERNAME, credential_type=UserAccessCredentialTypes.password) except Exception as err: logger.exception('Error initializing system identities') raise Exception( "Initialization failed: could not fetch/add anchore-system user from/to DB - exception: " + str(err)) # admin user try: if not self.mgr.get_account(localconfig.ADMIN_ACCOUNT_NAME): self.mgr.create_account(localconfig.ADMIN_ACCOUNT_NAME, AccountTypes.admin, 'admin@system', creator=localconfig.SYSTEM_IDENTITY_BOOTSTRAPPER) if not self.mgr.get_user(localconfig.ADMIN_USERNAME): self.mgr.create_user(localconfig.ADMIN_ACCOUNT_NAME, localconfig.ADMIN_USERNAME, creator_name=localconfig.SYSTEM_IDENTITY_BOOTSTRAPPER) self.mgr.add_user_credential(creator_name=localconfig.SYSTEM_IDENTITY_BOOTSTRAPPER, username=localconfig.ADMIN_USERNAME, credential_type=UserAccessCredentialTypes.password, value=localconfig.ADMIN_USER_DEFAULT_PASSWORD) return True except Exception as err: logger.exception('Error initializing system identities') raise Exception( "Initialization failed: could not fetch/add anchore-system user from/to DB - exception: " + str( err))
def run_feeds_update(cls, json_obj=None, force_flush=False) -> Optional[List[FeedSyncResult]]: """ Creates a task and runs it, optionally with a thread if locking is enabled. :return: """ try: vulnerabilities_provider = get_vulnerabilities_provider() sync_configs = compute_selected_configs_to_sync( vulnerabilities_provider.get_config_name(), get_section_for_vulnerabilities(), vulnerabilities_provider.get_default_sync_config(), ) if json_obj: task = cls.from_json(json_obj) if not task: return None task.sync_configs = sync_configs else: task = FeedsUpdateTask(sync_configs=sync_configs, flush=force_flush) result = [] if cls.locking_enabled: run_target_with_lease( account=None, lease_id="feed_sync", ttl=90, target=lambda: result.append(task.execute()), ) # A bit of work-around for the lambda def to get result from thread execution if result: result = result[0] else: result = task.execute() return result except Exception: logger.exception("Error executing feeds update") raise
def process_preflight(): """ Execute the preflight functions, aborting service startup if any throw uncaught exceptions or return False return value :return: """ preflight_check_functions = [init_db_content, init_feed_registry] for fn in preflight_check_functions: try: fn() except Exception as e: logger.exception( "Preflight checks failed with error: {}. Aborting service startup".format( e ) ) sys.exit(1)
def create_user_credential(accountname, username, credential): """ POST /accounts/{accountname}/users/{username}/credentials :param accountname: str account id for account account record :param username: str username :param credential: json object of the credential type :return: credential json object """ try: with session_scope() as session: mgr = manager_factory.for_session(session) user = verify_user(username, accountname, mgr) # For now, only support passwords via the api if credential['type'] != 'password': return make_response_error('Invalid credential type', in_httpcode=404), 404 if not credential.get('value'): return make_response_error( 'Invalid credential value, must be non-null and non-empty', in_httpcode=400), 400 try: cred_type = UserAccessCredentialTypes(credential['type']) except: return make_response_error(errmsg='Invalid credential type', in_httpcode=400), 400 cred = mgr.add_user_credential(username=username, credential_type=cred_type, value=credential['value']) return credential_db_to_msg(cred), 200 except UserNotFoundError as ex: return make_response_error('User not found', in_httpcode=404), 404 except AccountNotFoundError as ex: return make_response_error('Account not found', in_httpcode=404), 404 except Exception as e: logger.exception('API Error') return make_response_error( 'Internal error creating credential {}'.format(accountname)), 500
def update_policy(policyId, bodycontent): """ PUT /policies/{policyId} Updates a policy :param user_context: :param policyId: :param policy_content: :return: """ try: request_inputs = anchore_engine.apis.do_request_prep(connexion.request, default_params={}) user_id = request_inputs['userId'] bundle_policyId = bodycontent.get('policyId') active = bodycontent.get('active', False) if not bundle_policyId: raise Exception( "must include 'policyId' in the json payload for this operation" ) if policyId != bundle_policyId: raise Exception( 'Id mismatch between route and bundle content. {} != {}'. format(policyId, bundle_policyId)) policybundle = bodycontent.get('policybundle') with db.session_scope() as dbsession: record = db_policybundle.get(user_id, policyId, session=dbsession) if not record: return anchore_engine.common.helpers.make_response_error( "Existing policyId not found to update", in_httpcode=404), 404 return save_policy(user_id, policyId, active, policybundle, dbsession), 200 except Exception as err: logger.exception('Uncaught exception') return str(err), 500
def get_archived_analysis(imageDigest): """ GET /archives/images/{digest} :param imageDigest: :return: """ try: with db.session_scope() as session: return_object = db_archived_images.get(session, ApiRequestContextProxy.namespace(), imageDigest) if not return_object: return make_response_error('Not found in archive', in_httpcode=404), 404 return archived_img_to_msg(return_object), 200 except Exception as err: logger.exception('Error listing archived images') return make_response_error(str(err), in_httpcode=500), 500
def get_account(accountname): """ GET /accounts/{accountname} :param accountname: :return: """ try: with session_scope() as session: mgr = manager_factory.for_session(session) account = verify_account(accountname, mgr) return account_db_to_msg(account), 200 except AccountNotFoundError as ex: return make_response_error('Account not found', in_httpcode=404), 404 except Exception as ex: logger.exception('API Error') return make_response_error('Error getting account', in_httpcode=500), 500
def _process_mapping(self, bundle_exec, image_object, tag): # Execute the mapping to find the policy and whitelists to execute next try: if self.mapping: bundle_exec.executed_mapping = self.mapping.execute(image_object, tag) else: bundle_exec.executed_mapping = None bundle_exec.bundle_decision = BundleDecision(policy_decisions=[FailurePolicyDecision()]) return bundle_exec except PolicyError as e: logger.exception('Error executing bundle mapping') bundle_exec.abort_with_failure(e) return bundle_exec except Exception as e: logger.exception('Error executing bundle mapping') bundle_exec.abort_with_failure(PolicyError.caused_by(e)) return bundle_exec
def _process_api_spec(self): try: swagger_content = UserFacingApiService.parse_swagger( os.path.join(self.__spec_dir__, self.__spec_file__)) actions = UserFacingApiService.build_action_map(swagger_content) missing = [ x for x in filter(lambda x: x[1] is None, actions.items()) ] if missing: raise Exception( 'API Spec validation error: All operations must have a x-anchore-authz-action label. Missing for: {}' .format(missing)) else: self._authz_actions = actions except Exception as ex: logger.exception( 'Error loading swagger spec for authz action parsing. Cannot proceed' ) raise ex
def process_preflight(): """ Execute the preflight functions, aborting service startup if any throw uncaught exceptions or return False return value :return: """ config = localconfig.get_config() # read the global feed disable parameter feed_sync_enabled = config.get('feeds', {}).get('sync_enabled', True) # get the list of feeds if they have been explicitly configured in config.yaml feed_enabled_status = config.get('feeds', {}).get('selective_sync', {}).get('feeds', {}) # check to see if the engine is configured to sync at least one data feed at_least_one = False for feed in feed_enabled_status.keys(): if feed_enabled_status[feed]: at_least_one = True break # toggle credential validation based on whether or not any feeds are configured to sync skip_credential_validate = False if not feed_sync_enabled or not at_least_one: logger.info( "Engine is configured to skip data feed syncs - skipping feed sync client check" ) skip_credential_validate = True preflight_check_functions = [_init_db_content] if not skip_credential_validate: preflight_check_functions.append(_check_feed_client_credentials) for fn in preflight_check_functions: try: fn() except Exception as e: logger.exception( 'Preflight checks failed with error: {}. Aborting service startup' .format(e)) sys.exit(1)
def add_image(image_metadata=None, tag=None, digest=None, created_at=None, from_archive=False, allow_dockerfile_update=False): try: if image_metadata is None: image_metadata = {} request_inputs = anchore_engine.apis.do_request_prep( connexion.request, default_params={ 'tag': tag, 'digest': digest, 'created_at': created_at, 'allow_dockerfile_update': allow_dockerfile_update }) if from_archive: task = archiver.RestoreArchivedImageTask( account=ApiRequestContextProxy.namespace(), image_digest=digest) task.start() request_inputs['params'] = {} request_inputs['method'] = 'GET' with db.session_scope() as session: return_object, httpcode = anchore_engine.services.catalog.catalog_impl.image_imageDigest( session, request_inputs, digest) else: with db.session_scope() as session: return_object, httpcode = anchore_engine.services.catalog.catalog_impl.image( session, request_inputs, bodycontent=image_metadata) except Exception as err: logger.exception('Error processing image add') httpcode = 500 return_object = str(err) return return_object, httpcode
def archive_image_analysis(imageReferences): """ POST /archives/images body = [digest1, digest2, ... ] """ try: if not imageReferences or len(imageReferences) > 100: return make_response_error( 'Bad Request. Must include a list of digests between 1 and 100 entries long', in_httpcode=400), 400 results = [] for digest in imageReferences: try: # Do synchronous part to start the state transition task = ArchiveImageTask( account=ApiRequestContextProxy.namespace(), image_digest=digest) result_status, result_detail = task.run() results.append({ 'digest': task.image_digest, 'status': result_status, 'detail': result_detail }) except Exception as ex: logger.exception( 'Unexpected an uncaught exception from the archive task execution' ) results.append({ 'digest': digest, 'status': 'error', 'detail': str(ex) }) return results, 200 except Exception as err: logger.exception('Error processing image add') return make_response_error(err, in_httpcode=500), 500
def is_permitted(self, identifiers, permission_s): """ :type identifiers: SimpleRealmCollection """ # If a service account or admin account user, use the default handler, not external calls if ExternalAuthzRealm.__account_type_provider__ and callable(ExternalAuthzRealm.__account_type_provider__) and \ ExternalAuthzRealm.__account_type_provider__(identifiers.primary_identifier) in [AccountTypes.service, AccountTypes.admin]: logger.debug( 'Detected admin or service account, using internal authz') return super().is_permitted(identifiers, permission_s) result_list = [] # List of tuples (required_perm, is_permitted) identifier = identifiers.primary_identifier actions = {} for required_perm in permission_s: required_permission = DefaultPermission( wildcard_string=required_perm) actions[Action(domain=','.join(required_permission.domain), action=','.join(required_permission.action), target=','.join( required_permission.target))] = required_perm if actions: try: resp = self.__client__.authorize(principal=identifier, action_s=list(actions.keys())) for i in resp.allowed: result_list.append((actions[i], True)) for i in resp.denied: result_list.append((actions[i], False)) except Exception as e: logger.exception( 'Unexpected error invoking authorization plugin via client: {}' .format(e)) logger.error( 'Authorization plugin invocation error. Could not perform a proper authz check. Please check configuration and/or authz service status: {}' .format(self.__client__.url)) raise e return result_list
def get_authc_info(self, identifier): """ Function defined in the interface. Returns a dict: { 'account_locked': bool, 'authc_info': { '<str cred type>': { 'credential': <value>, 'failed_attempts': <int> } } 'account_id': <str> } :param identifier: :return: populated dict defined above or empty structured dict above """ result_account = { 'account_locked': None, 'authc_info': {}, 'account_id': None, 'anchore_identity': None # Used to transmit more than just username } with session_scope() as db: idp = idp_factory.for_session(db) try: identity, creds = idp.lookup_user(identifier) except: logger.exception('Error looking up user') identity = None creds = None result_account['account_locked'] = False if identity: result_account['anchore_identity'] = identity if creds: result_account['authc_info'] = { cred.type.value: {'credential': cred.value, 'failed_attempts': []} for cred in creds } return result_account
def import_archive(imageDigest, archive_file): from anchore_engine.services.catalog import archiver try: digest = imageDigest task = archiver.RestoreArchivedImageTaskFromArchiveTarfile( account=ApiRequestContextProxy.namespace(), fileobj=archive_file, image_digest=digest, ) task.start() except Exception as ex: logger.exception("Failed to import image archive") return ( make_response_error("Error importing image archive: {}".format(ex), in_httpcode=500), 500, ) return "Success", 200
def get_all_feed_groups_detached(feed_name): """ Returns a list of FeedMetadata objects populated with FeedGroupMetadata objects as returned by the db, but detached from the session. :return: list of FeedMetadata objects """ db_session = get_session() try: feeds = lookup_feed(db_session, feed_name) response = [] for f in feeds: if f.groups: response.extend([g.to_detached() for g in f.groups]) return response except Exception as e: logger.exception("Could not get feed metadata") raise e finally: db_session.rollback()
def delete_account(accountname): """ DELETE /account/{accountname} :param accountname: :return: """ try: with session_scope() as session: mgr = manager_factory.for_session(session) account = verify_account(accountname, mgr) resp = mgr.delete_account(accountname) return None, 204 except AccountNotFoundError as ex: return make_response_error('Account not found', in_httpcode=404), 404 except Exception as e: logger.exception('API Error') return make_response_error('Error deleting account', in_httpcode=500), 500
def do_auth(): try: resp = get_authorizer().inline_authz([]) if resp is not None: if type(resp) == tuple: if type(resp[0]) == dict: return Response(json.dumps(resp[0]), status=resp[1], content_type='application/json') else: return Response(resp[0], status=resp[1]) return resp except: logger.exception('Rejected') logger.info("Authc rejected!") return Response('Unauthorized', status=401, headers=[('WWW-Authenticate', 'basic realm="Authentication required"') ])
def _get_report_generated_by(self, grype_response): generated_by = {"scanner": self.__class__.__name__} try: descriptor_dict = grype_response.get("descriptor", {}) db_dict = descriptor_dict.get("db", {}) generated_by.update( { "grype_version": descriptor_dict.get("version"), "db_checksum": db_dict.get("checksum"), "db_schema_version": db_dict.get("schemaVersion"), "db_built_at": db_dict.get("built"), } ) except (AttributeError, ValueError): logger.exception( "Ignoring error parsing report metadata from grype response" ) return generated_by
def create_operation(): """ POST /imports/images :return: """ try: client = internal_client_for( CatalogClient, userId=ApiRequestContextProxy.namespace() ) resp = client.create_image_import() return resp, 200 except api_exceptions.AnchoreApiError as ex: return ( make_response_error(ex, in_httpcode=ex.__response_code__), ex.__response_code__, ) except Exception as ex: logger.exception("Unexpected error in api processing") return make_response_error(ex, in_httpcode=500), 500
def save_token(token, request): logger.debug('Saving token: {}'.format(token)) try: if request.user: user_id = request.user.username else: user_id = None client = request.client tok = OAuth2Token(client_id=client.client_id, user_id=user_id, **token) db = get_session() db.add(tok) db.commit() logger.info('Saved new token') except: logger.exception('Exception saving token') raise
def list_users(accountname): """ GET /accounts/{accountname}/users :param account: :return: """ try: with session_scope() as session: mgr = manager_factory.for_session(session) users = mgr.list_users(accountname) if users is None: return make_response_error('No such account'), 404 response = list(map(user_db_to_msg, users)) return response, 200 except Exception as ex: logger.exception('API Error') return make_response_error(errmsg=str(ex)), 500
def delete_account(accountname): """ DELETE /account/{accountname} :param accountname: :return: """ try: with session_scope() as session: mgr = manager_factory.for_session(session) try: resp = mgr.delete_account(accountname) return '', 200 except Exception as e: return make_response_error('Internal error deleting account {}'.format(accountname), ), 500 except Exception as ex: logger.exception('API Error') return make_response_error(errmsg=str(ex)), 500
def handle_feed_sync_trigger(*args, **kwargs): """ Checks to see if there is a task for a feed sync in the queue and if not, adds one. Interval for firing this should be longer than the expected feed sync duration. :param args: :param kwargs: :return: """ system_user = _system_creds() logger.info('init args: {}'.format(kwargs)) cycle_time = kwargs['mythread']['cycle_timer'] while True: try: all_ready = anchore_engine.clients.common.check_services_ready( ['simplequeue']) if not all_ready: logger.info("simplequeue service not yet ready, will retry") else: logger.info('Feed Sync Trigger activated') if not simplequeue.is_inqueue(userId=system_user, name=feed_sync_queuename, inobj=feed_sync_msg): try: simplequeue.enqueue(userId=system_user, name=feed_sync_queuename, inobj=feed_sync_msg) except: logger.exception( 'Could not enqueue message for a feed sync') logger.info('Feed Sync Trigger done, waiting for next cycle.') except Exception as e: logger.exception( 'Error caught in feed sync trigger handler. Will continue. Exception: {}' .format(e)) time.sleep(cycle_time) return True
def delete_archived_analysis(imageDigest, force=False): """ DELETE /archives/images/{digest} :param imageDigest: image digest to delete the archive for :return: """ # # Read the archive manifest, and delete or restore the artifacts try: if force: start_statuses = ['archiving', 'archived', 'deleting', 'deleted'] else: start_statuses = ['archived', 'deleting'] with session_scope() as session: resp = db_archived_images.update_image_status( session, ApiRequestContextProxy.namespace(), imageDigest, start_statuses, 'deleting') if resp is None: return make_response_error('Not found in archive', in_httpcode=404), 404 except Exception as ex: logger.exception("Error deleting archive for image {}/{}".format( ApiRequestContextProxy.namespace(), imageDigest)) return make_response_error('Invalid object state: {}'.format(ex), in_httpcode=400), 400 try: task = DeleteArchivedImageTask( account=ApiRequestContextProxy.namespace(), image_digest=imageDigest) task.run() resp = None return resp, 200 except Exception as ex: logger.exception('Failed deleting archived image') return make_response_error( 'Error deleting image archive: {}'.format(ex), in_httpcode=500), 500
def refresh(self): """ Refreshes the cache state (not entry) for this initialized request. Has stateful side-effects of flushing objects from cache if determined to be invalid If a valid entry exists, it is loaded, if an invalid entry exists it is deleted :return: """ session = get_session() match = None for result in self._lookup(): if (self._should_evaluate(result) != EvaluationCacheManager.CacheStatus.valid): self._delete_entry(result) else: match = result session.flush() if match: if match.is_archive_ref(): bucket, key = match.archive_tuple() try: with self._catalog_client.timeout_context( self._default_catalog_conn_timeout, self._default_catalog_read_timeout, ) as timeout_client: data = timeout_client.get_document(bucket, key) except: log.exception( "Unexpected error getting document {}/{} from archive". format(bucket, key)) data = None else: data = match.result.get("result") else: data = None return data