def test_check(self): validator = inputs.URL(check=True, ip=True) assert validator('http://www.google.com' ) == 'http://www.google.com', 'Should check domain' # This test will fail on a network where this address is defined self.assert_bad_url(validator, 'http://this-domain-should-not-exist.com', 'Domain does not exists')
def test_check(self): validator = inputs.URL(check=True, ip=True) assert (validator("http://www.google.com") == "http://www.google.com" ), "Should check domain" # This test will fail on a network where this address is defined self.assert_bad_url( validator, "http://this-domain-should-not-exist.com", "Domain does not exists", )
def test_reject_port(self, url): # Test with auth and port to ensure only port is rejected validator = inputs.URL(ip=True, auth=True) self.assert_bad_url(validator, url, 'Custom port is not allowed')
def test_allow_local(self, url): validator = inputs.URL(ip=True, local=True) assert validator(url) == url
def test_valid_restricted_schemes(self, url): validator = inputs.URL(schemes=("sip", "irc")) assert validator(url) == url
def test_schema(self): assert inputs.URL().__schema__ == {'type': 'string', 'format': 'url'}
def test_excluded_domains(self, url): validator = inputs.URL(exclude=['example.com', 'www.example.com']) self.assert_bad_url(validator, url, 'Domain is not allowed')
def test_invalid_restricted_domains(self, url): validator = inputs.URL(domains=['example.com', 'www.example.com']) self.assert_bad_url(validator, url, 'Domain is not allowed')
def test_invalid_restricted_schemes(self, url): validator = inputs.URL(schemes=('sip', 'irc')) self.assert_bad_url(validator, url, 'Protocol is not allowed')
def test_bad_urls(self, url): # Test with everything enabled to ensure bad URL are really detected validator = inputs.URL(ip=True, auth=True, port=True) self.assert_bad_url(validator, url)
def test_valid_values_default(self, url): validator = inputs.URL() assert validator(url) == url
def test_schema(self): assert inputs.URL().__schema__ == {"type": "string", "format": "url"}
def test_excluded_domains(self, url): validator = inputs.URL(exclude=["example.com", "www.example.com"]) self.assert_bad_url(validator, url, "Domain is not allowed")
def test_invalid_restricted_domains(self, url): validator = inputs.URL(domains=["example.com", "www.example.com"]) self.assert_bad_url(validator, url, "Domain is not allowed")
def test_invalid_restricted_schemes(self, url): validator = inputs.URL(schemes=("sip", "irc")) self.assert_bad_url(validator, url, "Protocol is not allowed")
def test_allow_port(self, url): validator = inputs.URL(port=True) assert validator(url) == url
def test_valid_restricted_schemes(self, url): validator = inputs.URL(schemes=('sip', 'irc')) assert validator(url) == url
def test_bad_urls_with_suggestion(self, url): validator = inputs.URL() self.assert_bad_url(validator, url, 'Did you mean: http://{0}')
def test_valid_restricted_domains(self, url): validator = inputs.URL(domains=['example.com', 'www.example.com']) assert validator(url) == url
def test_reject_ip(self, url): validator = inputs.URL() self.assert_bad_url(validator, url, 'IP is not allowed')
def test_valid_excluded_domains(self, url): validator = inputs.URL(exclude=['example.com', 'www.example.com']) assert validator(url) == url
def test_reject_auth(self, url): # Test with IP and port to ensure only auth is rejected validator = inputs.URL(ip=True, port=True) self.assert_bad_url(validator, url, 'Authentication is not allowed')
def test_allow_auth(self, url): validator = inputs.URL(auth=True) assert validator(url) == url
def test_reject_local(self, url): # Test with IP and port to ensure only auth is rejected validator = inputs.URL(ip=True) self.assert_bad_url(validator, url, 'Localhost is not allowed')
class Parser: # Session SessionPostRequest = reqparse.RequestParser() SessionPostRequest.add_argument("password", type=str, required=True, location="json") # Audit AuditListGetRequest = reqparse.RequestParser() AuditListGetRequest.add_argument("q", type=str, default="", location="args") AuditListGetRequest.add_argument("page", type=inputs.natural, default=0, location="args") AuditListGetRequest.add_argument( "count", type=inputs.int_range(1, AUDIT_LIST_MAX_COUNT), default=AUDIT_LIST_DEFAULT_COUNT, location="args", ) AuditListPostRequest = reqparse.RequestParser() AuditListPostRequest.add_argument("name", type=inputs.regex("^.{1,128}$"), required=True, location="json") AuditListPostRequest.add_argument("description", type=inputs.regex("^.{,128}$"), default="", location="json") AuditItemGetRequest = reqparse.RequestParser() AuditItemGetRequest.add_argument("include_results", type=inputs.boolean, default=False, location="args") AuditPatchRequest = AuditListPostRequest # Scan ScanPostRequest = reqparse.RequestParser() ScanPostRequest.add_argument("name", type=inputs.regex("^.{1,128}$"), required=True, location="json") ScanPostRequest.add_argument("description", type=inputs.regex("^.{,128}$"), default="", location="json") ScanPostRequest.add_argument("target", type=inputs.regex("^.{1,128}$"), required=True, location="json") ScanPostRequest.add_argument("detection_module", type=inputs.regex("^.{1,128}$"), required=True, location="json") ScanPostRequest.add_argument("detection_mode", type=inputs.regex("^.{1,128}$"), required=True, location="json") ScanSchedulePostRequest = reqparse.RequestParser() ScanSchedulePostRequest.add_argument("scheduled_at", type=inputs.datetime_from_iso8601, required=True, location="json") ScanSchedulePostRequest.add_argument("max_duration", type=inputs.int_range( 1, SCAN_MAX_DURATION_IN_HOUR), required=True, location="json") ScanSchedulePostRequest.add_argument("rrule", type=inputs.regex("^RRULE:.{,128}$"), default="", location="json") # Integration IntegrationPatchRequest = reqparse.RequestParser() IntegrationPatchRequest.add_argument("url", type=inputs.URL(schemes=["https"], check=False), required=True, location="json") IntegrationPatchRequest.add_argument("verbose", type=inputs.boolean, default=False, location="json")
update_status_args.add_argument('id', type=str, required=True, default="", help='Item/draft id') update_status_args.add_argument( 'status', type=str, required=True, default="", help='Status to be updated (review - accepted - published)') harvest_args = reqparse.RequestParser() harvest_args.add_argument('url', type=inputs.URL(schemes=['http', 'https'], local=True, port=True, ip=True), required=True, help='Harvest url') harvest_args.add_argument('harvester', type=str, required=True, default="opertusmundi", help='Target catalogue type (opertusmundi/ckan/csw)') iso_arg = reqparse.RequestParser() iso_arg.add_argument('xml', type=str, required=True, help='Iso xml for conversion')