def get(self): '''List all APIs''' parser = reqparse.RequestParser() parser.add_argument('filterPrefix', type=inputs.regex('.{2,}'), required=False, trim=True, help='Filter prefix') parser.add_argument('filterContains', type=inputs.regex('.{2,}'), required=False, trim=True, help='Filter contains') args = parser.parse_args() if args['filterPrefix']: _apis = [] for item in apis: if item['api'].lower().startswith(args['filterPrefix'].lower( )) or item['title'].lower().startswith( args['filterPrefix'].lower()): _apis.append(item) return _apis if args['filterContains']: _apis = [] for item in apis: if args['filterContains'].lower() in item['api'].lower( ) or args['filterContains'].lower() in item['title'].lower(): _apis.append(item) return _apis return apis
def ledger_name_parser_plain(default=config.LEDGER_NAME, parser=None, location='form'): help_text = 'Ledger name (default: "{}")'.format( default) if default else 'Ledger name' parser = reqparse.RequestParser() if not parser else parser parser.add_argument( 'ledger_name', type=inputs.regex( '(?!^.*--)(?!^[0-9]+$)(?!^-)(?!.*-$)^[A-Za-z0-9-]+$'), help=help_text, location=location, default=default, required=True, case_sensitive=True, trim=True) return parser
def table_name_parser_plain(default=config.LEDGER_NAME, parser=None, location='form'): parser = reqparse.RequestParser() if not parser else parser parser = ledger_name_parser_choices_or_plain(default=default, parser=parser, location=location) parser.add_argument('table_name', type=inputs.regex('^[A-Za-z_]{1}[A-Za-z0-9_]{1,127}$'), help='A valid QLDB table name', location=location, default=None, required=True, nullable=False, case_sensitive=True, trim=True) return parser
def table_index_parser_plain(default=config.LEDGER_NAME, parser=None, location='form'): parser = reqparse.RequestParser() if not parser else parser parser = table_name_parser_plain(default=default, parser=parser, location=location) parser.add_argument( 'index_attribute', type=inputs.regex('^[A-Za-z_]{1}[A-Za-z0-9_]{1,127}$'), help='The table field name on which to create the index.', location=location, default=None, required=True, nullable=False, case_sensitive=True, trim=True) return parser
class IngredientListResource(Resource): """Ingredient List Resource""" param_parser = pagination_parser.copy() param_parser.add_argument( "filter", type=inputs.regex("^\w\w\w+$"), help="Search for a ingredient containing this text - must be at least 3 characters!", required=False, trim=True, ) @ns.expect(param_parser) def get(self): args = self.param_parser.parse_args() schema = IngredientSchema(many=True) ingredients = set_sort_order(Ingredient.query, Ingredient, **args) if args.get("filter") is not None: ingredients = set_search_filter( ingredients, Ingredient, search_field="name", search_value=args.get("filter"), ).all() return { "_meta": {}, "_links": {}, "message": f"Found {len(ingredients)} ingredient{'' if len(ingredients) == 1 else 's'} matching '{args.get('filter')}'", "data": schema.dump(ingredients, many=True), }, 200 ingredients = ingredients.paginate(args.get("page"), args.get("per_page")) return { **generate_query_metadata(ingredients), **generate_link_metadata(ingredients, "api_v1.list_ingredients", **args), "message": f"Returning {ingredients.total} ingredients", "data": schema.dump(ingredients.items, many=True), }, 200
def get(self): probe_parser = reqparse.RequestParser() probe_parser.add_argument('dest_addr', type=inputs.ip, location='args', required=True) probe_parser.add_argument('dest_port', type=inputs.int_range(0, 65535), location='args', required=True) probe_parser.add_argument('src_port', type=inputs.int_range(0, 65535), location='args', required=False) probe_parser.add_argument('proto', choices=('tcp', 'udp'), default='tcp', location='args', required=False) probe_parser.add_argument('ts', type=float, location='args', required=True) probe_parser.add_argument('h', type=inputs.regex('^[0-9a-f]{32,512}$'), location='args', required=True) args = probe_parser.parse_args(strict=True) remote_addr = request.remote_addr req_counter = args['ts'] context = zmq.Context() socket = context.socket(zmq.PUSH) socket.connect(app.config.get('ZMQ_BIND')) # Validate HMAC and timestamp or return HTTP 403 try: validate_request(args) app.logger.info('authenticated dispatch request from %s ' 'with valid HMAC', remote_addr) except RequestValidationError as e: msg = ('received invalid request from %s: %s') app.logger.warning(msg, remote_addr, e) abort(403, e) persist_counter(app.config['DB_PATH'], req_counter) app.logger.debug('stored new value %s in counter database', req_counter) # Remove counter from args and pass to task runner del args['ts'] socket.send_json(args) app.logger.debug('passed dispatch request parameters to message queue') return args
def get(self): '''List all groups''' parser = reqparse.RequestParser() parser.add_argument('filterPrefix', type=inputs.regex('.{2,}'), required=False, trim=True, help='Filter prefix') args = parser.parse_args() query = None if args['filterPrefix']: query = 'email:%s* name:%s*' % (_escape_query_arg( args['filterPrefix']), _escape_query_arg(args['filterPrefix'])) credentials = Credentials( token.get_access_token_for_scopes(self.config, [ 'https://www.googleapis.com/auth/admin.directory.group.readonly' ])) group_service = build('admin', 'directory_v1', credentials=credentials, cache=get_discovery_cache()) results = group_service.groups().list(customer=self.customer_id, maxResults=10, query=query).execute() if 'groups' in results: _ret = [] for group in results['groups']: _group = { 'id': group['id'], 'email': group['email'], 'name': group['name'], 'description': group['description'], } _ret.append(_group) return _ret return []
class Parser: # Session SessionPostRequest = reqparse.RequestParser() SessionPostRequest.add_argument("password", type=str, required=True, location="json") # Audit AuditListGetRequest = reqparse.RequestParser() AuditListGetRequest.add_argument("q", type=str, default="", location="args") AuditListGetRequest.add_argument("page", type=inputs.natural, default=0, location="args") AuditListGetRequest.add_argument( "count", type=inputs.int_range(1, AUDIT_LIST_MAX_COUNT), default=AUDIT_LIST_DEFAULT_COUNT, location="args", ) AuditListPostRequest = reqparse.RequestParser() AuditListPostRequest.add_argument("name", type=inputs.regex("^.{1,128}$"), required=True, location="json") AuditListPostRequest.add_argument("description", type=inputs.regex("^.{,128}$"), default="", location="json") AuditItemGetRequest = reqparse.RequestParser() AuditItemGetRequest.add_argument("include_results", type=inputs.boolean, default=False, location="args") AuditPatchRequest = AuditListPostRequest # Scan ScanPostRequest = reqparse.RequestParser() ScanPostRequest.add_argument("name", type=inputs.regex("^.{1,128}$"), required=True, location="json") ScanPostRequest.add_argument("description", type=inputs.regex("^.{,128}$"), default="", location="json") ScanPostRequest.add_argument("target", type=inputs.regex("^.{1,128}$"), required=True, location="json") ScanPostRequest.add_argument("detection_module", type=inputs.regex("^.{1,128}$"), required=True, location="json") ScanPostRequest.add_argument("detection_mode", type=inputs.regex("^.{1,128}$"), required=True, location="json") ScanSchedulePostRequest = reqparse.RequestParser() ScanSchedulePostRequest.add_argument("scheduled_at", type=inputs.datetime_from_iso8601, required=True, location="json") ScanSchedulePostRequest.add_argument("max_duration", type=inputs.int_range( 1, SCAN_MAX_DURATION_IN_HOUR), required=True, location="json") ScanSchedulePostRequest.add_argument("rrule", type=inputs.regex("^RRULE:.{,128}$"), default="", location="json") # Integration IntegrationPatchRequest = reqparse.RequestParser() IntegrationPatchRequest.add_argument("url", type=inputs.URL(schemes=["https"], check=False), required=True, location="json") IntegrationPatchRequest.add_argument("verbose", type=inputs.boolean, default=False, location="json")
def get(self): '''List all users''' parser = reqparse.RequestParser() parser.add_argument('filterPrefix', type=inputs.regex('.{2,}'), required=False, trim=True, help='Filter prefix') parser.add_argument('filterContains', type=inputs.regex('.{2,}'), required=False, trim=True, help='Filter contains') args = parser.parse_args() query = None if args['filterPrefix']: query = 'email:"%s*"' % (_escape_query_arg(args['filterPrefix'])) if args['filterContains']: query = '"%s"' % (_escape_query_arg(args['filterContains'])) credentials = Credentials( token.get_access_token_for_scopes(self.config, [ 'https://www.googleapis.com/auth/admin.directory.user.readonly' ])) user_service = build('admin', 'directory_v1', credentials=credentials, cache=get_discovery_cache()) results = user_service.users().list(customer=self.customer_id, query=query, projection='full', maxResults=10).execute() if 'users' in results: _ret = [] for user in results['users']: if 'suspended' in user and user['suspended']: continue title = '' if 'organizations' in user: if 'title' in user['organizations'][0]: title = user['organizations'][0]['title'] _user = { 'email': user['primaryEmail'], 'name': user['name']['fullName'], 'title': title, } try: photo_results = user_service.users().photos().get( userKey=user['primaryEmail']).execute() if 'photoData' in photo_results: _user['photo'] = photo_results['photoData'] _user['photo_mimetype'] = photo_results['mimeType'] _user['photo_width'] = photo_results['width'] _user['photo_height'] = photo_results['height'] except Exception as e: pass _ret.append(_user) return _ret return []
def test_schema(self): assert inputs.regex(r'^[0-9]+$').__schema__ == { 'type': 'string', 'pattern': '^[0-9]+$' }
def test_bad_pattern(self): with pytest.raises(re.error): inputs.regex('[')
def test_bad_input(self, value): num_only = inputs.regex(r'^[0-9]+$') with pytest.raises(ValueError): num_only(value)
def test_valid_input(self, value): num_only = inputs.regex(r'^[0-9]+$') assert num_only(value) == value
def test_schema(self): assert inputs.regex(r"^[0-9]+$").__schema__ == { "type": "string", "pattern": "^[0-9]+$", }
# https://regexr.com/ # date_regx_format = f'^(0[1-9]|1[0-9]|2[0-9]|3[0-1])(-)(0[1-9]|1[0-2])(-){current_year}+$' # time_regx_format = f'^([01]?[0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])+$' # date_regx_format = f'^(0[1-9]|1[0-9]|2[0-9]|3[0-1])-(0[1-9]|1[0-2])-{current_year}+$' # 01-01-2021 day_regx = '(0[1-9]|1[0-9]|2[0-9]|3[0-1])' month_regx = '(0[1-9]|1[0-2])' date_regx_format = f'^{current_year}-{month_regx}-{day_regx}+$' # 2021-01-01 time_regx_format = f'^([01]?[0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])+$' # 00:00:00 datetime_regx_format = date_regx_format.replace( '+$', ' ') + time_regx_format.replace('^', '') date_datetime_regx_format = date_regx_format + '|' + datetime_regx_format date_range_req = reqparse.RequestParser() date_range_req.add_argument('start', type=inputs.regex(date_regx_format), help=f'use format: {date_hint}', required=True) date_range_req.add_argument('end', type=inputs.regex(date_regx_format), help=f'use format: {date_hint}', required=True) limit_req = reqparse.RequestParser() limit_req.add_argument('limit', type=int, required=False, default=50, help='results limit')