def compile(pattern: str, flags: re.RegexFlag = re.RegexFlag(0), force_raw: bool = False) -> Optional['SimplePattern']: ignorecase = flags == re.IGNORECASE s_pattern = pattern.lower() if ignorecase else pattern esc = "" if not force_raw: esc = re.escape(pattern) first, last = pattern[0], pattern[-1] if first == '^' and last == '$' and (force_raw or esc == f"\\^{pattern[1:-1]}\\$"): s_pattern = s_pattern[1:-1] func = matcher_equals elif first == '^' and (force_raw or esc == f"\\^{pattern[1:]}"): s_pattern = s_pattern[1:] func = matcher_startswith elif last == '$' and (force_raw or esc == f"{pattern[:-1]}\\$"): s_pattern = s_pattern[:-1] func = matcher_endswith elif force_raw or esc == pattern: func = matcher_contains else: # Not a simple pattern return None return SimplePattern(matcher=func, pattern=s_pattern, ignorecase=ignorecase)
def _parse_flags(raw_statement: str, allow_unknown_flags: bool = False ) -> Tuple[re.RegexFlag, bool, bool]: re_flags = { "a": re.ASCII, "i": re.IGNORECASE, "m": re.MULTILINE, "s": re.DOTALL, "t": re.TEMPLATE, } flags = re.UNICODE is_global = False no_underline = False for char in raw_statement.lower(): try: flags += re_flags[char] except KeyError: if char == "g": is_global = True elif char == "u": no_underline = True elif not allow_unknown_flags: raise ValueError(f"Unknown flag {char}") elif char not in string.ascii_lowercase: break return re.RegexFlag(flags), is_global, no_underline
def parse_data(self) -> None: self.default_flags = re.RegexFlag(0) self.default_flags = self._get_flags(self["default_flags"]) self.templates = {name: self._make_template(name, tpl) for name, tpl in self["templates"].items()} self.rules = {name: self._make_rule(name, rule) for name, rule in self["rules"].items()}
def apply_transformation(self, transformation_input): ( buffer_control, document, lineno, source_to_display, fragments, _, _, ) = transformation_input.unpack() if self.selected_entries and not get_app().is_done: # For each search match, replace the style string. line_text = fragment_list_to_text(fragments) fragments = explode_text_fragments(fragments) pattern = "|".join(re.escape(key) for key in self.selected_entries) matches = re.finditer(pattern, line_text, flags=re.RegexFlag(0)) for match in matches: for i in range(match.start(), match.end()): old_fragment, text, *_ = fragments[i] fragments[i] = ( old_fragment + self.match_fragment, fragments[i][1], ) return Transformation(fragments)
def compile(cls, pattern, flags=0): p = sre_parse.parse(pattern) code = sre_compile._code(p, flags) self = cls() self.pattern = pattern self.code = code self.flags = re.RegexFlag(flags | p.state.flags) return self
def apply_transformation( self, transformation_input: TransformationInput) -> Transformation: ( buffer_control, document, lineno, source_to_display, fragments, _, _, ) = transformation_input.unpack() search_text = self._get_search_text(buffer_control) searchmatch_fragment = " class:%s " % (self._classname, ) searchmatch_current_fragment = " class:%s " % ( self._classname_current, ) if search_text and not get_app().is_done: # For each search match, replace the style string. line_text = fragment_list_to_text(fragments) fragments = explode_text_fragments(fragments) if buffer_control.search_state.ignore_case(): flags = re.IGNORECASE else: flags = re.RegexFlag(0) # Get cursor column. cursor_column: Optional[int] if document.cursor_position_row == lineno: cursor_column = source_to_display(document.cursor_position_col) else: cursor_column = None for match in re.finditer(re.escape(search_text), line_text, flags=flags): if cursor_column is not None: on_cursor = match.start() <= cursor_column < match.end() else: on_cursor = False for i in range(match.start(), match.end()): old_fragment, text, *_ = fragments[i] if on_cursor: fragments[i] = ( old_fragment + searchmatch_current_fragment, fragments[i][1], ) else: fragments[i] = ( old_fragment + searchmatch_fragment, fragments[i][1], ) return Transformation(fragments)
def build_tokenizer( normal_tokens: Tuple[Tuple[TokenName, Optional[str]], ...], flags: re.RegexFlag = re.RegexFlag(0), *, ignored_tokens: Tuple[Tuple[IgnoredToken, str], ...] = (), middleware: Dict[TokenName, Callable[[str], Tuple[TokenName, str]]] = {}, ) -> Tokenizer[TokenName, IgnoredToken]: return Tokenizer(build_regexp( tuple(filter(None, normal_tokens)) + ignored_tokens, flags), ignore=[name for name, _pattern in ignored_tokens], middleware=_make_middleware(middleware)) # type: ignore
def _get_flags(flags: Union[str, List[str]]) -> re.RegexFlag: output = re.RegexFlag(0) for flag in flags: flag = flag.lower() if flag == "i" or flag == "ignorecase": output |= re.IGNORECASE elif flag == "s" or flag == "dotall": output |= re.DOTALL elif flag == "x" or flag == "verbose": output |= re.VERBOSE elif flag == "m" or flag == "multiline": output |= re.MULTILINE elif flag == "l" or flag == "locale": output |= re.LOCALE elif flag == "u" or flag == "unicode": output |= re.UNICODE elif flag == "a" or flag == "ascii": output |= re.ASCII return output
def compile(pattern: str, flags: re.RegexFlag = re.RegexFlag(0), force_raw: bool = False ) -> Optional['SimplePattern']: ignorecase = flags == re.IGNORECASE s_pattern = pattern.lower() if ignorecase else pattern esc = "" if not force_raw: esc = re.escape(pattern) first, last = pattern[0], pattern[-1] if first == '^' and last == '$' and (force_raw or esc == f"\\^{pattern[1:-1]}\\$"): s_pattern = s_pattern[1:-1] return SimplePattern(lambda val: val == s_pattern, ignorecase=ignorecase) elif first == '^' and (force_raw or esc == f"\\^{pattern[1:]}"): s_pattern = s_pattern[1:] return SimplePattern(lambda val: val.startswith(s_pattern), ignorecase=ignorecase) elif last == '$' and (force_raw or esc == f"{pattern[:-1]}\\$"): s_pattern = s_pattern[:-1] return SimplePattern(lambda val: val.endswith(s_pattern), ignorecase=ignorecase) elif force_raw or esc == pattern: return SimplePattern(lambda val: s_pattern in val, ignorecase=ignorecase) return None
def parse_regex_flags(raw_flags: str = 'gim'): """ parse flags user input and convert them to re flags. Args: raw_flags: string chars representing er flags Returns: (re flags, whether to return multiple matches) """ raw_flags = raw_flags.lstrip('-') # compatibility with original MatchRegex script. multiple_matches = 'g' in raw_flags raw_flags = raw_flags.replace('g', '') flags = re.RegexFlag(0) for c in raw_flags: if c in LETTER_TO_REGEX_FLAGS: flags |= LETTER_TO_REGEX_FLAGS[c] else: raise ValueError(f'Invalid regex flag "{c}".\n' f'Supported flags are {", ".join(LETTER_TO_REGEX_FLAGS.keys())}') return flags, multiple_matches
class Migration(migrations.Migration): dependencies = [ ('workshops', '0225_remove_event_admin_fee'), ] operations = [ migrations.AlterField( model_name='person', name='twitter', field=models.CharField(blank=True, max_length=100, null=True, unique=True, verbose_name='Twitter username'), ), migrations.AlterField( model_name='person', name='username', field=models.CharField(max_length=100, unique=True, validators=[ django.core.validators.RegexValidator( '^[\\w\\-_]+$', flags=re.RegexFlag(256)) ]), ), migrations.AlterField( model_name='event', name='address', field=models.CharField( blank=True, default='', help_text='Required in order for this event to be "published".', max_length=350), ), ]
def encode(self, arg: typing.Union[Field, typing.Type[SchemaBase], typing.Type[Schema]]) -> typing.Union[bool, dict]: if isinstance(arg, Any): return True if isinstance(arg, NeverMatch): return False data: dict = {} if isinstance(arg, Field): field = arg elif isinstance(arg, SchemaDefinitions): field = None for key, value in field.items(): self.definitions[key] = self.encode(value) return {} else: try: field = arg.make_schema() except AttributeError: field = arg.make_validator() if isinstance(field, Reference): data["$ref"] = f"#/{self.definition_base}/{field.target_string}" if field.target not in self.definitions: self.definitions[field.target_string] = self.encode(field.target) elif isinstance(field, String): data["type"] = ["string", "null"] if field.allow_null else "string" data.update(self.get_standard_properties(field)) if field.min_length is not None or not field.allow_blank: data["minLength"] = field.min_length or 1 if field.max_length is not None: data["maxLength"] = field.max_length if field.pattern_regex is not None: if field.pattern_regex.flags != re.RegexFlag.UNICODE: flags = re.RegexFlag(field.pattern_regex.flags) raise ValueError( f"Cannot convert regular expression with non-standard flags " f"to JSON schema: {flags!s}" ) data["pattern"] = field.pattern_regex.pattern if field.format is not None: data["format"] = field.format elif isinstance(field, (Integer, Float, Decimal)): base_type = "integer" if isinstance(field, Integer) else "number" data["type"] = [base_type, "null"] if field.allow_null else base_type data.update(self.get_standard_properties(field)) if field.minimum is not None: data["minimum"] = field.minimum if field.maximum is not None: data["maximum"] = field.maximum if field.exclusive_minimum is not None: data["exclusiveMinimum"] = field.exclusive_minimum if field.exclusive_maximum is not None: data["exclusiveMaximum"] = field.exclusive_maximum if field.multiple_of is not None: data["multipleOf"] = field.multiple_of elif isinstance(field, Boolean): data["type"] = ["boolean", "null"] if field.allow_null else "boolean" data.update(self.get_standard_properties(field)) elif isinstance(field, Array): data["type"] = ["array", "null"] if field.allow_null else "array" data.update(self.get_standard_properties(field)) if field.min_items is not None: data["minItems"] = field.min_items if field.max_items is not None: data["maxItems"] = field.max_items if field.items is not None: if isinstance(field.items, (list, tuple)): data["items"] = [self.encode(item) for item in field.items] else: data["items"] = self.encode(field.items) if field.additional_items is not None: if isinstance(field.additional_items, bool): data["additionalItems"] = field.additional_items else: data["additionalItems"] = self.encode(field.additional_items) if field.unique_items is not False: data["uniqueItems"] = True elif isinstance(field, Object): data["type"] = ["object", "null"] if field.allow_null else "object" data.update(self.get_standard_properties(field)) if field.properties: data["properties"] = { key: self.encode(value) for key, value in field.properties.items() } if field.pattern_properties: data["patternProperties"] = { key: self.encode(value) for key, value in field.pattern_properties.items() } if field.additional_properties is not None: if isinstance(field.additional_properties, bool): data["additionalProperties"] = field.additional_properties else: data["additionalProperties"] = self.encode(field.additional_properties) if field.property_names is not None: data["propertyNames"] = self.encode(field.property_names) if field.max_properties is not None: data["maxProperties"] = field.max_properties if field.min_properties is not None: data["minProperties"] = field.min_properties if field.required: data["required"] = field.required if is_schema(arg) and hasattr(arg, '_meta'): if arg._meta.read_only: for key in arg._meta.read_only: if key in data["properties"]: data["properties"][key]["readOnly"] = True elif isinstance(field, Choice): data["enum"] = [key for key, value in field.choices] data["enumNames"] = [value for key, value in field.choices] data.update(self.get_standard_properties(field)) elif isinstance(field, Const): data["const"] = field.const data.update(self.get_standard_properties(field)) elif isinstance(field, Union): data["anyOf"] = [ self.encode(item) for item in field.any_of ] data.update(self.get_standard_properties(field)) elif isinstance(field, OneOf): data["oneOf"] = [ self.encode(item) for item in field.one_of ] data.update(self.get_standard_properties(field)) elif isinstance(field, AllOf): data["allOf"] = [ self.encode(item) for item in field.all_of ] data.update(self.get_standard_properties(field)) elif isinstance(field, IfThenElse): data["if"] = self.encode(field.if_clause) if field.then_clause is not None: data["then"] = self.encode(field.then_clause) if field.else_clause is not None: data["else"] = self.encode(field.else_clause) data.update(self.get_standard_properties(field)) elif isinstance(field, Not): data["not"] = self.encode(field.negated) data.update(self.get_standard_properties(field)) elif field is not None: name = type(field).__qualname__ raise ValueError(f"Cannot convert field type {name!r} to JSON Schema") return data
'bitcoin': re.compile(r'(?i)\b(?P<url>(?:(?:bitcoin|btc):)?[13][a-z0-9]{27,34})\b', re.ASCII), # ETH 'ethereum': re.compile(r'(?i)\b(?P<url>(?:(?:ethereum|eth):)?(?:0x)?[0-9a-f]{40})\b', re.ASCII), # HTTP(S) and other *regular* URLs, e.g. WebSocket, IRC, etc. #re.compile(r'(?P<url>((https?|wss?|irc):)?(//)?\w+(\.\w+)+/?\S*)', re.UNICODE), # bitcoin / data / ed2k / magnet / mail / script / tel, etc. #re.compile(r'(?P<url>(bitcoin|data|ed2k|magnet|mailto|script|tel):\w+)', re.ASCII), } URL_PAT.update({ scheme: re.compile(pattern, re.RegexFlag(flags) | re.ASCII) # pattern string + compiling flags for scheme, pattern, flags in json.loads(os.getenv('DARC_URL_PAT', '[]')) }) def match_proxy(proxy: str) -> bool: """Check if proxy type in black list. Args: proxy: Proxy type to be checked. Returns: If ``proxy`` in black list. Note:
def _reflags(self) -> re.RegexFlag: flags = re.RegexFlag(0) for key, flag in self._flagmap.items(): if key in self._flags: flags |= flag return flags
def test_unknown_flag(self) -> None: assert regex.create_regex_flags(["NOT-A-FLAG"]) == re.RegexFlag(0)
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='CardsList', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('name', models.CharField(max_length=50)), ('cards', models.TextField(default='Example card')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='PackProfile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('value', models.CharField(max_length=6, validators=[ django.core.validators.RegexValidator( flags=re.RegexFlag(2), regex='[0-9a-f]{6}') ])), ('color_name', models.CharField(max_length=30, null=True)), ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='packprofiles', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='PDF', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True, verbose_name='ID')), ('generated_content', models.BinaryField()), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='RenderSpec', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('name', models.CharField(max_length=50)), ('packs', models.ManyToManyField(to='cahgen.CardsList')), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='pdf', name='render_spec', field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, to='cahgen.RenderSpec'), ), migrations.AddField( model_name='cardslist', name='profile', field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, to='cahgen.PackProfile'), ), ]
def test_empty_list(self) -> None: assert regex.create_regex_flags([]) == re.RegexFlag(0)
def create_regex_flags(flag_strings: List[str]) -> re.RegexFlag: default = re.RegexFlag(0) flags = (getattr(re, name, default) for name in flag_strings) # FIXME: This line complains about returning Any but still fails when casting. return functools.reduce(lambda x, y: x | y, flags, default) # type: ignore
class Migration(migrations.Migration): dependencies = [ ('peeringdb_server', '0028_ixlan_remove_auto_increment'), ] operations = [ migrations.AlterField( model_name='commandlinetool', name='created', field=models.DateTimeField( auto_now_add=True, help_text='command was run at this date and time'), ), migrations.AlterField( model_name='commandlinetool', name='status', field=models.CharField(choices=[('done', 'Done'), ('waiting', 'Waiting'), ('running', 'Running')], default='done', max_length=255), ), migrations.AlterField( model_name='commandlinetool', name='tool', field=models.CharField(choices=[ ('pdb_renumber_lans', 'Renumber IP Space'), ('pdb_fac_merge', 'Merge Facilities'), ('pdb_fac_merge_undo', 'Merge Facilities: UNDO'), ('pdb_undelete', 'Restore Object(s)'), ('pdb_ixf_ixp_member_import', 'IX-F Import') ], help_text='name of the tool', max_length=255), ), migrations.AlterField( model_name='commandlinetool', name='user', field=models.ForeignKey( help_text='the user that ran this command', on_delete=django.db.models.deletion.CASCADE, related_name='clt_history', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='facility', name='geocode_date', field=models.DateTimeField( blank=True, help_text='Last time of attempted geocode', null=True), ), migrations.AlterField( model_name='facility', name='geocode_error', field=models.TextField( blank=True, help_text='Error message of previous geocode attempt', null=True), ), migrations.AlterField( model_name='facility', name='geocode_status', field=models.BooleanField( default=False, help_text= "Has this object's latitude and longitude been syncronized to it's address fields" ), ), migrations.AlterField( model_name='facility', name='latitude', field=models.DecimalField(blank=True, decimal_places=6, help_text='Latitude', max_digits=9, null=True), ), migrations.AlterField( model_name='facility', name='longitude', field=models.DecimalField(blank=True, decimal_places=6, help_text='Longitude', max_digits=9, null=True), ), migrations.AlterField( model_name='internetexchange', name='media', field=models.CharField(choices=[('Ethernet', 'Ethernet'), ('ATM', 'ATM'), ('Multiple', 'Multiple')], max_length=128), ), migrations.AlterField( model_name='internetexchange', name='region_continent', field=models.CharField(choices=[('North America', 'North America'), ('Asia Pacific', 'Asia Pacific'), ('Europe', 'Europe'), ('South America', 'South America'), ('Africa', 'Africa'), ('Australia', 'Australia'), ('Middle East', 'Middle East')], max_length=255), ), migrations.AlterField( model_name='ixlanprefix', name='protocol', field=models.CharField(choices=[('IPv4', 'IPv4'), ('IPv6', 'IPv6')], max_length=64), ), migrations.AlterField( model_name='network', name='allow_ixp_update', field=models.BooleanField( default=False, help_text= 'Sepcifies whether an ixp is allowed to add a netixlan entry for this network via their ixp_member data' ), ), migrations.AlterField( model_name='network', name='info_never_via_route_servers', field=models.BooleanField( default=False, help_text= 'Indicates if this network will announce its routes via rout servers or not' ), ), migrations.AlterField( model_name='network', name='info_ratio', field=models.CharField(blank=True, choices=[ ('', 'Not Disclosed'), ('Not Disclosed', 'Not Disclosed'), ('Heavy Outbound', 'Heavy Outbound'), ('Mostly Outbound', 'Mostly Outbound'), ('Balanced', 'Balanced'), ('Mostly Inbound', 'Mostly Inbound'), ('Heavy Inbound', 'Heavy Inbound') ], default='Not Disclosed', max_length=45), ), migrations.AlterField( model_name='network', name='info_scope', field=models.CharField(blank=True, choices=[('', 'Not Disclosed'), ('Not Disclosed', 'Not Disclosed'), ('Regional', 'Regional'), ('North America', 'North America'), ('Asia Pacific', 'Asia Pacific'), ('Europe', 'Europe'), ('South America', 'South America'), ('Africa', 'Africa'), ('Australia', 'Australia'), ('Middle East', 'Middle East'), ('Global', 'Global')], default='Not Disclosed', max_length=39), ), migrations.AlterField( model_name='network', name='info_traffic', field=models.CharField(blank=True, choices=[('', 'Not Disclosed'), ('0-20 Mbps', '0-20 Mbps'), ('20-100Mbps', '20-100Mbps'), ('100-1000Mbps', '100-1000Mbps'), ('1-5Gbps', '1-5Gbps'), ('5-10Gbps', '5-10Gbps'), ('10-20Gbps', '10-20Gbps'), ('20-50 Gbps', '20-50 Gbps'), ('50-100 Gbps', '50-100 Gbps'), ('100+ Gbps', '100+ Gbps'), ('100-200 Gbps', '100-200 Gbps'), ('200-300 Gbps', '200-300 Gbps'), ('300-500 Gbps', '300-500 Gbps'), ('500-1000 Gbps', '500-1000 Gbps'), ('1 Tbps+', '1 Tbps+'), ('10 Tbps+', '10 Tbps+')], max_length=39), ), migrations.AlterField( model_name='network', name='info_type', field=models.CharField(blank=True, choices=[('', 'Not Disclosed'), ('Not Disclosed', 'Not Disclosed'), ('NSP', 'NSP'), ('Content', 'Content'), ('Cable/DSL/ISP', 'Cable/DSL/ISP'), ('Enterprise', 'Enterprise'), ('Educational/Research', 'Educational/Research'), ('Non-Profit', 'Non-Profit'), ('Route Server', 'Route Server')], default='Not Disclosed', max_length=60), ), migrations.AlterField( model_name='network', name='policy_contracts', field=models.CharField(blank=True, choices=[('Not Required', 'Not Required'), ('Private Only', 'Private Only'), ('Required', 'Required')], max_length=36), ), migrations.AlterField( model_name='network', name='policy_general', field=models.CharField(blank=True, choices=[('Open', 'Open'), ('Selective', 'Selective'), ('Restrictive', 'Restrictive'), ('No', 'No')], max_length=72), ), migrations.AlterField( model_name='network', name='policy_locations', field=models.CharField(blank=True, choices=[('Not Required', 'Not Required'), ('Preferred', 'Preferred'), ('Required - US', 'Required - US'), ('Required - EU', 'Required - EU'), ('Required - International', 'Required - International')], max_length=72), ), migrations.AlterField( model_name='networkcontact', name='role', field=models.CharField(choices=[('Abuse', 'Abuse'), ('Maintenance', 'Maintenance'), ('Policy', 'Policy'), ('Technical', 'Technical'), ('NOC', 'NOC'), ('Public Relations', 'Public Relations'), ('Sales', 'Sales')], max_length=27), ), migrations.AlterField( model_name='networkcontact', name='visible', field=models.CharField(choices=[('Private', 'Private'), ('Users', 'Users'), ('Public', 'Public')], default='Public', max_length=64), ), migrations.AlterField( model_name='organization', name='latitude', field=models.DecimalField(blank=True, decimal_places=6, help_text='Latitude', max_digits=9, null=True), ), migrations.AlterField( model_name='organization', name='logo', field=models.FileField( blank=True, help_text= 'Allows you to upload and set a logo image file for this organization', null=True, upload_to='logos/'), ), migrations.AlterField( model_name='organization', name='longitude', field=models.DecimalField(blank=True, decimal_places=6, help_text='Longitude', max_digits=9, null=True), ), migrations.AlterField( model_name='partnership', name='logo', field=models.FileField( blank=True, help_text= 'Allows you to upload and set a logo image file for this partnership', null=True, upload_to='logos/'), ), migrations.AlterField( model_name='sponsorshiporganization', name='logo', field=models.FileField( blank=True, help_text= 'Allows you to upload and set a logo image file for this sponsorship', null=True, upload_to='logos/'), ), migrations.AlterField( model_name='user', name='status', field=models.CharField(default='ok', max_length=254, verbose_name='status'), ), migrations.AlterField( model_name='user', name='username', field=models.CharField( help_text='Required. Letters, digits and [@.+-/_=|] only.', max_length=254, unique=True, validators=[ django.core.validators.RegexValidator( '^[\\w\\.@+-=|/]+$', 'Enter a valid username.', 'invalid', flags=re.RegexFlag(32)) ], verbose_name='username'), ), migrations.AlterField( model_name='userorgaffiliationrequest', name='asn', field=django_inet.models.ASNField( blank=True, help_text='The ASN entered by the user', null=True), ), migrations.AlterField( model_name='userorgaffiliationrequest', name='org', field=models.ForeignKey( blank=True, help_text= 'This organization in our database that was derived from the provided ASN or organization name. If this is empty it means no matching organization was found.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='affiliation_requests', to='peeringdb_server.Organization'), ), migrations.AlterField( model_name='userorgaffiliationrequest', name='org_name', field=models.CharField( blank=True, help_text='The organization name entered by the user', max_length=255, null=True), ), migrations.AlterField( model_name='userorgaffiliationrequest', name='status', field=models.CharField(choices=[('pending', 'Pending'), ('approved', 'Approved'), ('denied', 'Denied')], help_text='Status of this request', max_length=254), ), migrations.AlterField( model_name='userorgaffiliationrequest', name='user', field=models.ForeignKey( help_text='The user that made the request', on_delete=django.db.models.deletion.CASCADE, related_name='affiliation_requests', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='verificationqueueitem', name='user', field=models.ForeignKey( blank=True, help_text= 'The item that this queue is attached to was created by this user', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='vqitems', to=settings.AUTH_USER_MODEL), ), ]
class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('accounts', '0001_initial'), ] operations = [ migrations.CreateModel( name='BodyPart', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=256, unique=True)), ('lft', models.PositiveIntegerField(db_index=True, editable=False)), ('rght', models.PositiveIntegerField(db_index=True, editable=False)), ('tree_id', models.PositiveIntegerField(db_index=True, editable=False)), ('level', models.PositiveIntegerField(db_index=True, editable=False)), ('parent', mptt.fields.TreeForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='deceases.BodyPart')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Decease', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(db_index=True, max_length=256, unique=True)), ('chronic', models.BooleanField(default=False)), ('duration', models.PositiveSmallIntegerField(default=10)), ('contagiousness', models.PositiveSmallIntegerField(validators=[ django.core.validators.MaxValueValidator(100) ])), ('malignancy', models.PositiveSmallIntegerField(validators=[ django.core.validators.MaxValueValidator(100) ])), ('description', models.TextField()), ('diagnostics', models.TextField(blank=True, null=True)), ('treatment', models.TextField(blank=True, null=True)), ('passing', models.TextField(blank=True, null=True)), ('recommendations', models.TextField(blank=True, null=True)), ('occurrence', models.PositiveIntegerField(default=1)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='DeceaseAgeGapGender', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('number', models.PositiveIntegerField( verbose_name= 'number of people in average to get decease from 10^6')), ('age_gap', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.AgeGap')), ('decease', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='deceases.Decease')), ('gender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.Gender')), ], ), migrations.CreateModel( name='DeceaseSymptom', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('chances', models.PositiveSmallIntegerField( default=50, validators=[ django.core.validators.MaxValueValidator(100) ])), ('occurrence', models.PositiveIntegerField(default=1)), ('decease', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='deceases.Decease')), ], ), migrations.CreateModel( name='PatientDecease', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('start_date', models.DateField(default=django.utils.timezone.now)), ('end_date', models.DateField(blank=True, null=True)), ('cured', models.BooleanField(default=False)), ('author', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), ('decease', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='deceases.Decease')), ('patient', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='accounts.Patient')), ], ), migrations.CreateModel( name='PatientSymptomDecease', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('patient_decease', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='symptoms', to='deceases.PatientDecease')), ], ), migrations.CreateModel( name='Sphere', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=256, unique=True, verbose_name='name')), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Symptom', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(db_index=True, max_length=512, unique=True)), ('aliases', models.TextField( blank=True, null=True, validators=[ django.core.validators.RegexValidator( flags=re.RegexFlag(32), message= 'Should countain names separated with coma(spaces are available)', regex='^([\\w ]+ {0,2}, {0,2})*([\\w ]+ {0,2})$') ])), ('lft', models.PositiveIntegerField(db_index=True, editable=False)), ('rght', models.PositiveIntegerField(db_index=True, editable=False)), ('tree_id', models.PositiveIntegerField(db_index=True, editable=False)), ('level', models.PositiveIntegerField(db_index=True, editable=False)), ('body_part', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='deceases.BodyPart')), ('parent', mptt.fields.TreeForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='deceases.Symptom')), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='patientsymptomdecease', name='symptom', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to='deceases.Symptom'), ), migrations.AddField( model_name='deceasesymptom', name='symptom', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to='deceases.Symptom'), ), migrations.AddField( model_name='decease', name='sphere', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to='deceases.Sphere'), ), migrations.AddField( model_name='decease', name='symptoms', field=models.ManyToManyField(through='deceases.DeceaseSymptom', to='deceases.Symptom'), ), migrations.AlterUniqueTogether( name='deceasesymptom', unique_together={('symptom', 'decease')}, ), ]
def build_regexp(lookup: Iterable[Tuple[str, str]], flags: re.RegexFlag = re.RegexFlag(0)): return re.compile(build_regexp_source(lookup), flags)
class Migration(migrations.Migration): dependencies = [ ("peeringdb_server", "0028_ixlan_remove_auto_increment"), ] operations = [ migrations.AlterField( model_name="commandlinetool", name="created", field=models.DateTimeField( auto_now_add=True, help_text="command was run at this date and time" ), ), migrations.AlterField( model_name="commandlinetool", name="status", field=models.CharField( choices=[ ("done", "Done"), ("waiting", "Waiting"), ("running", "Running"), ], default="done", max_length=255, ), ), migrations.AlterField( model_name="commandlinetool", name="tool", field=models.CharField( choices=[ ("pdb_renumber_lans", "Renumber IP Space"), ("pdb_fac_merge", "Merge Facilities"), ("pdb_fac_merge_undo", "Merge Facilities: UNDO"), ("pdb_undelete", "Restore Object(s)"), ("pdb_ixf_ixp_member_import", "IX-F Import"), ], help_text="name of the tool", max_length=255, ), ), migrations.AlterField( model_name="commandlinetool", name="user", field=models.ForeignKey( help_text="the user that ran this command", on_delete=django.db.models.deletion.CASCADE, related_name="clt_history", to=settings.AUTH_USER_MODEL, ), ), migrations.AlterField( model_name="facility", name="geocode_date", field=models.DateTimeField( blank=True, help_text="Last time of attempted geocode", null=True ), ), migrations.AlterField( model_name="facility", name="geocode_error", field=models.TextField( blank=True, help_text="Error message of previous geocode attempt", null=True, ), ), migrations.AlterField( model_name="facility", name="geocode_status", field=models.BooleanField( default=False, help_text="Has this object's latitude and longitude been synchronized to its address fields", ), ), migrations.AlterField( model_name="facility", name="latitude", field=models.DecimalField( blank=True, decimal_places=6, help_text="Latitude", max_digits=9, null=True, ), ), migrations.AlterField( model_name="facility", name="longitude", field=models.DecimalField( blank=True, decimal_places=6, help_text="Longitude", max_digits=9, null=True, ), ), migrations.AlterField( model_name="internetexchange", name="media", field=models.CharField( choices=[ ("Ethernet", "Ethernet"), ("ATM", "ATM"), ("Multiple", "Multiple"), ], max_length=128, ), ), migrations.AlterField( model_name="internetexchange", name="region_continent", field=models.CharField( choices=[ ("North America", "North America"), ("Asia Pacific", "Asia Pacific"), ("Europe", "Europe"), ("South America", "South America"), ("Africa", "Africa"), ("Australia", "Australia"), ("Middle East", "Middle East"), ], max_length=255, ), ), migrations.AlterField( model_name="ixlanprefix", name="protocol", field=models.CharField( choices=[("IPv4", "IPv4"), ("IPv6", "IPv6")], max_length=64 ), ), migrations.AlterField( model_name="network", name="allow_ixp_update", field=models.BooleanField( default=False, help_text="Specifies whether an ixp is allowed to add a netixlan entry for this network via their ixp_member data", ), ), migrations.AlterField( model_name="network", name="info_never_via_route_servers", field=models.BooleanField( default=False, help_text="Indicates if this network will announce its routes via rout servers or not", ), ), migrations.AlterField( model_name="network", name="info_ratio", field=models.CharField( blank=True, choices=[ ("", "Not Disclosed"), ("Not Disclosed", "Not Disclosed"), ("Heavy Outbound", "Heavy Outbound"), ("Mostly Outbound", "Mostly Outbound"), ("Balanced", "Balanced"), ("Mostly Inbound", "Mostly Inbound"), ("Heavy Inbound", "Heavy Inbound"), ], default="Not Disclosed", max_length=45, ), ), migrations.AlterField( model_name="network", name="info_scope", field=models.CharField( blank=True, choices=[ ("", "Not Disclosed"), ("Not Disclosed", "Not Disclosed"), ("Regional", "Regional"), ("North America", "North America"), ("Asia Pacific", "Asia Pacific"), ("Europe", "Europe"), ("South America", "South America"), ("Africa", "Africa"), ("Australia", "Australia"), ("Middle East", "Middle East"), ("Global", "Global"), ], default="Not Disclosed", max_length=39, ), ), migrations.AlterField( model_name="network", name="info_traffic", field=models.CharField( blank=True, choices=[ ("", "Not Disclosed"), ("0-20 Mbps", "0-20 Mbps"), ("20-100Mbps", "20-100Mbps"), ("100-1000Mbps", "100-1000Mbps"), ("1-5Gbps", "1-5Gbps"), ("5-10Gbps", "5-10Gbps"), ("10-20Gbps", "10-20Gbps"), ("20-50 Gbps", "20-50 Gbps"), ("50-100 Gbps", "50-100 Gbps"), ("100+ Gbps", "100+ Gbps"), ("100-200 Gbps", "100-200 Gbps"), ("200-300 Gbps", "200-300 Gbps"), ("300-500 Gbps", "300-500 Gbps"), ("500-1000 Gbps", "500-1000 Gbps"), ("1 Tbps+", "1 Tbps+"), ("10 Tbps+", "10 Tbps+"), ], max_length=39, ), ), migrations.AlterField( model_name="network", name="info_type", field=models.CharField( blank=True, choices=[ ("", "Not Disclosed"), ("Not Disclosed", "Not Disclosed"), ("NSP", "NSP"), ("Content", "Content"), ("Cable/DSL/ISP", "Cable/DSL/ISP"), ("Enterprise", "Enterprise"), ("Educational/Research", "Educational/Research"), ("Non-Profit", "Non-Profit"), ("Route Server", "Route Server"), ], default="Not Disclosed", max_length=60, ), ), migrations.AlterField( model_name="network", name="policy_contracts", field=models.CharField( blank=True, choices=[ ("Not Required", "Not Required"), ("Private Only", "Private Only"), ("Required", "Required"), ], max_length=36, ), ), migrations.AlterField( model_name="network", name="policy_general", field=models.CharField( blank=True, choices=[ ("Open", "Open"), ("Selective", "Selective"), ("Restrictive", "Restrictive"), ("No", "No"), ], max_length=72, ), ), migrations.AlterField( model_name="network", name="policy_locations", field=models.CharField( blank=True, choices=[ ("Not Required", "Not Required"), ("Preferred", "Preferred"), ("Required - US", "Required - US"), ("Required - EU", "Required - EU"), ("Required - International", "Required - International"), ], max_length=72, ), ), migrations.AlterField( model_name="networkcontact", name="role", field=models.CharField( choices=[ ("Abuse", "Abuse"), ("Maintenance", "Maintenance"), ("Policy", "Policy"), ("Technical", "Technical"), ("NOC", "NOC"), ("Public Relations", "Public Relations"), ("Sales", "Sales"), ], max_length=27, ), ), migrations.AlterField( model_name="networkcontact", name="visible", field=models.CharField( choices=[ ("Private", "Private"), ("Users", "Users"), ("Public", "Public"), ], default="Public", max_length=64, ), ), migrations.AlterField( model_name="organization", name="latitude", field=models.DecimalField( blank=True, decimal_places=6, help_text="Latitude", max_digits=9, null=True, ), ), migrations.AlterField( model_name="organization", name="logo", field=models.FileField( blank=True, help_text="Allows you to upload and set a logo image file for this organization", null=True, upload_to="logos/", ), ), migrations.AlterField( model_name="organization", name="longitude", field=models.DecimalField( blank=True, decimal_places=6, help_text="Longitude", max_digits=9, null=True, ), ), migrations.AlterField( model_name="partnership", name="logo", field=models.FileField( blank=True, help_text="Allows you to upload and set a logo image file for this partnership", null=True, upload_to="logos/", ), ), migrations.AlterField( model_name="sponsorshiporganization", name="logo", field=models.FileField( blank=True, help_text="Allows you to upload and set a logo image file for this sponsorship", null=True, upload_to="logos/", ), ), migrations.AlterField( model_name="user", name="status", field=models.CharField(default="ok", max_length=254, verbose_name="status"), ), migrations.AlterField( model_name="user", name="username", field=models.CharField( help_text="Required. Letters, digits and [@.+-/_=|] only.", max_length=254, unique=True, validators=[ django.core.validators.RegexValidator( "^[\\w\\.@+-=|/]+$", "Enter a valid username.", "invalid", flags=re.RegexFlag(32), ) ], verbose_name="username", ), ), migrations.AlterField( model_name="userorgaffiliationrequest", name="asn", field=django_inet.models.ASNField( blank=True, help_text="The ASN entered by the user", null=True ), ), migrations.AlterField( model_name="userorgaffiliationrequest", name="org", field=models.ForeignKey( blank=True, help_text="This organization in our database that was derived from the provided ASN or organization name. If this is empty it means no matching organization was found.", null=True, on_delete=django.db.models.deletion.CASCADE, related_name="affiliation_requests", to="peeringdb_server.Organization", ), ), migrations.AlterField( model_name="userorgaffiliationrequest", name="org_name", field=models.CharField( blank=True, help_text="The organization name entered by the user", max_length=255, null=True, ), ), migrations.AlterField( model_name="userorgaffiliationrequest", name="status", field=models.CharField( choices=[ ("pending", "Pending"), ("approved", "Approved"), ("denied", "Denied"), ], help_text="Status of this request", max_length=254, ), ), migrations.AlterField( model_name="userorgaffiliationrequest", name="user", field=models.ForeignKey( help_text="The user that made the request", on_delete=django.db.models.deletion.CASCADE, related_name="affiliation_requests", to=settings.AUTH_USER_MODEL, ), ), migrations.AlterField( model_name="verificationqueueitem", name="user", field=models.ForeignKey( blank=True, help_text="The item that this queue is attached to was created by this user", null=True, on_delete=django.db.models.deletion.CASCADE, related_name="vqitems", to=settings.AUTH_USER_MODEL, ), ), ]
class Migration(migrations.Migration): dependencies = [ ('competition', '0008_auto_20191008_2044'), ] operations = [ migrations.CreateModel( name='University', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('full_name', models.CharField(max_length=30)), ('abbreviation', models.CharField(max_length=5)), ('id_example', models.CharField(max_length=8)), ('id_regex', regex_field.fields.RegexField(max_length=128, verbose_name=re.RegexFlag(2))), ], ), migrations.RemoveField( model_name='group', name='members', ), migrations.RemoveField( model_name='group', name='speedy_team', ), migrations.AddField( model_name='runner', name='group', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='competition.Group'), ), migrations.AlterField( model_name='runner', name='identification', field=models.CharField(max_length=8), ), ]
def to_json_schema(arg: typing.Union[Field, typing.Type[Schema]], _definitions: dict = None) -> typing.Union[bool, dict]: if isinstance(arg, Any): return True elif isinstance(arg, NeverMatch): return False data: dict = {} is_root = _definitions is None definitions = {} if _definitions is None else _definitions if isinstance(arg, Field): field = arg elif isinstance(arg, SchemaDefinitions): field = None for key, value in arg.items(): definitions[key] = to_json_schema(value, _definitions=definitions) else: field = arg.make_validator() if isinstance(field, Reference): data["$ref"] = f"#/definitions/{field.target_string}" definitions[field.target_string] = to_json_schema( field.target, _definitions=definitions) elif isinstance(field, String): data["type"] = ["string", "null"] if field.allow_null else "string" data.update(get_standard_properties(field)) if field.min_length is not None or not field.allow_blank: data["minLength"] = field.min_length or 1 if field.max_length is not None: data["maxLength"] = field.max_length if field.pattern_regex is not None: if field.pattern_regex.flags != re.RegexFlag.UNICODE: flags = re.RegexFlag(field.pattern_regex.flags) raise ValueError( f"Cannot convert regular expression with non-standard flags " f"to JSON schema: {flags!s}") data["pattern"] = field.pattern_regex.pattern if field.format is not None: data["format"] = field.format elif isinstance(field, (Integer, Float, Decimal)): base_type = "integer" if isinstance(field, Integer) else "number" data["type"] = [base_type, "null"] if field.allow_null else base_type data.update(get_standard_properties(field)) if field.minimum is not None: data["minimum"] = field.minimum if field.maximum is not None: data["maximum"] = field.maximum if field.exclusive_minimum is not None: data["exclusiveMinimum"] = field.exclusive_minimum if field.exclusive_maximum is not None: data["exclusiveMaximum"] = field.exclusive_maximum if field.multiple_of is not None: data["multipleOf"] = field.multiple_of elif isinstance(field, Boolean): data["type"] = ["boolean", "null"] if field.allow_null else "boolean" data.update(get_standard_properties(field)) elif isinstance(field, Array): data["type"] = ["array", "null"] if field.allow_null else "array" data.update(get_standard_properties(field)) if field.min_items is not None: data["minItems"] = field.min_items if field.max_items is not None: data["maxItems"] = field.max_items if field.items is not None: if isinstance(field.items, (list, tuple)): data["items"] = [ to_json_schema(item, _definitions=definitions) for item in field.items ] else: data["items"] = to_json_schema(field.items, _definitions=definitions) if field.additional_items is not None: if isinstance(field.additional_items, bool): data["additionalItems"] = field.additional_items else: data["additionalItems"] = to_json_schema( field.additional_items, _definitions=definitions) if field.unique_items is not False: data["uniqueItems"] = True elif isinstance(field, Object): data["type"] = ["object", "null"] if field.allow_null else "object" data.update(get_standard_properties(field)) if field.properties: data["properties"] = { key: to_json_schema(value, _definitions=definitions) for key, value in field.properties.items() } if field.pattern_properties: data["patternProperties"] = { key: to_json_schema(value, _definitions=definitions) for key, value in field.pattern_properties.items() } if field.additional_properties is not None: if isinstance(field.additional_properties, bool): data["additionalProperties"] = field.additional_properties else: data["additionalProperties"] = to_json_schema( field.additional_properties, _definitions=definitions) if field.property_names is not None: data["propertyNames"] = to_json_schema(field.property_names, _definitions=definitions) if field.max_properties is not None: data["maxProperties"] = field.max_properties if field.min_properties is not None: data["minProperties"] = field.min_properties if field.required: data["required"] = field.required elif isinstance(field, Choice): data["enum"] = [key for key, value in field.choices] data.update(get_standard_properties(field)) elif isinstance(field, Const): data["const"] = field.const data.update(get_standard_properties(field)) elif isinstance(field, Union): data["anyOf"] = [ to_json_schema(item, _definitions=definitions) for item in field.any_of ] data.update(get_standard_properties(field)) elif isinstance(field, OneOf): data["oneOf"] = [ to_json_schema(item, _definitions=definitions) for item in field.one_of ] data.update(get_standard_properties(field)) elif isinstance(field, AllOf): data["allOf"] = [ to_json_schema(item, _definitions=definitions) for item in field.all_of ] data.update(get_standard_properties(field)) elif isinstance(field, IfThenElse): data["if"] = to_json_schema(field.if_clause, _definitions=definitions) if field.then_clause is not None: data["then"] = to_json_schema(field.then_clause, _definitions=definitions) if field.else_clause is not None: data["else"] = to_json_schema(field.else_clause, _definitions=definitions) data.update(get_standard_properties(field)) elif isinstance(field, Not): data["not"] = to_json_schema(field.negated, _definitions=definitions) data.update(get_standard_properties(field)) elif field is not None: name = type(field).__qualname__ raise ValueError(f"Cannot convert field type {name!r} to JSON Schema") if is_root and definitions: data["definitions"] = definitions return data
MIME_WHITE_LIST, PROXY_BLACK_LIST, PROXY_FALLBACK, PROXY_WHITE_LIST) from darc.error import render_error from darc.link import Link, parse_link, urljoin, urlsplit # Regular expression patterns to match all reasonable URLs. URL_PAT = [ # HTTP(S) and other *regular* URLs, e.g. WebSocket, IRC, etc. re.compile(r'(?P<url>((https?|wss?|irc):)?(//)?\w+(\.\w+)+/?\S*)', re.UNICODE), # bitcoin / data / ed2k / magnet / mail / script / tel, etc. re.compile(r'(?P<url>(bitcoin|data|ed2k|magnet|mailto|script|tel):\w+)', re.ASCII), ] URL_PAT.extend( re.compile(pattern, re.RegexFlag( flags)) # pattern string + compiling flags for pattern, flags in json.loads(os.getenv('DARC_URL_PAT', '[]'))) def match_proxy(proxy: str) -> bool: """Check if proxy type in black list. Args: proxy: Proxy type to be checked. Returns: If ``proxy`` in black list. Note: If ``proxy`` is ``script``, then it will always return :data:`True`.