def report(self, msg: str, context: Context, severity: str, file: str = None, origin: Context = None) -> None: """Report an error or note (unless disabled).""" if self.disable_count <= 0: self.errors.report(context.get_line() if context else -1, context.get_column() if context else -1, msg.strip(), severity=severity, file=file, origin_line=origin.get_line() if origin else None)
def fail(self, msg: str, context: Context, *, code: Optional[ErrorCode] = None) -> None: self.errors.report(context.get_line(), context.get_column(), msg, code=code)
def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'Attribute': """Return the Attribute that was serialized.""" return Attribute( data['name'], info, data['has_default'], data['init'], Converter(data['converter_name'], data['converter_is_attr_converters_optional']), Context(line=data['context_line'], column=data['context_column']))
def run(self) -> None: for module_name, module_file in self.api.modules.items(): for defn in iter_over_classdefs(module_file): for lvalue, rvalue in iter_call_assignments(defn): if is_related_field(rvalue, module_file): try: ref_to_fullname = extract_ref_to_fullname(rvalue, module_file=module_file, all_modules=self.api.modules) except helpers.SelfReference: ref_to_fullname = defn.fullname except helpers.InvalidModelString as exc: self.api.fail(f'Invalid value for a to= parameter: {exc.model_string!r}', Context(line=rvalue.line)) return None if self.model_classdef.fullname == ref_to_fullname: if 'related_name' in rvalue.arg_names: related_name_expr = rvalue.args[rvalue.arg_names.index('related_name')] if not isinstance(related_name_expr, StrExpr): return None related_name = related_name_expr.value typ = get_related_field_type(rvalue, self.api, defn.info) if typ is None: return None self.add_new_node_to_model_class(related_name, typ)
def report(self, msg: str, context: Context, severity: str, file: str = None) -> None: """Report an error or note (unless disabled).""" if self.disable_count <= 0: self.errors.report(context.get_line(), msg.strip(), severity=severity, file=file)
def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> 'Attribute': """Return the Attribute that was serialized.""" raw_init_type = data['init_type'] init_type = deserialize_and_fixup_type(raw_init_type, api) if raw_init_type else None return Attribute( data['name'], info, data['has_default'], data['init'], data['kw_only'], Converter(data['converter_name'], data['converter_is_attr_converters_optional']), Context(line=data['context_line'], column=data['context_column']), init_type)
def collect_attributes(self) -> Optional[List[DataclassAttribute]]: """Collect all attributes declared in the dataclass and its parents. All assignments of the form a: SomeType b: SomeOtherType = ... are collected. """ # First, collect attributes belonging to the current class. ctx = self._ctx cls = self._ctx.cls attrs: List[DataclassAttribute] = [] known_attrs: Set[str] = set() kw_only = _get_decorator_bool_argument(ctx, 'kw_only', False) for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. if not (isinstance(stmt, AssignmentStmt) and stmt.new_syntax): continue # a: int, b: str = 1, 'foo' is not supported syntax so we # don't have to worry about it. lhs = stmt.lvalues[0] if not isinstance(lhs, NameExpr): continue sym = cls.info.names.get(lhs.name) if sym is None: # This name is likely blocked by a star import. We don't need to defer because # defer() is already called by mark_incomplete(). continue node = sym.node if isinstance(node, PlaceholderNode): # This node is not ready yet. return None assert isinstance(node, Var) # x: ClassVar[int] is ignored by dataclasses. if node.is_classvar: continue # x: InitVar[int] is turned into x: int and is removed from the class. is_init_var = False node_type = get_proper_type(node.type) if (isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar'): is_init_var = True node.type = node_type.args[0] if self._is_kw_only_type(node_type): kw_only = True has_field_call, field_args = _collect_field_args(stmt.rvalue, ctx) is_in_init_param = field_args.get('init') if is_in_init_param is None: is_in_init = True else: is_in_init = bool(ctx.api.parse_bool(is_in_init_param)) has_default = False # Ensure that something like x: int = field() is rejected # after an attribute with a default. if has_field_call: has_default = 'default' in field_args or 'default_factory' in field_args # All other assignments are already type checked. elif not isinstance(stmt.rvalue, TempNode): has_default = True if not has_default: # Make all non-default attributes implicit because they are de-facto set # on self in the generated __init__(), not in the class body. sym.implicit = True is_kw_only = kw_only # Use the kw_only field arg if it is provided. Otherwise use the # kw_only value from the decorator parameter. field_kw_only_param = field_args.get('kw_only') if field_kw_only_param is not None: is_kw_only = bool(ctx.api.parse_bool(field_kw_only_param)) known_attrs.add(lhs.name) attrs.append( DataclassAttribute( name=lhs.name, is_in_init=is_in_init, is_init_var=is_init_var, has_default=has_default, line=stmt.line, column=stmt.column, type=sym.type, info=cls.info, kw_only=is_kw_only, )) # Next, collect attributes belonging to any class in the MRO # as long as those attributes weren't already collected. This # makes it possible to overwrite attributes in subclasses. # copy() because we potentially modify all_attrs below and if this code requires debugging # we'll have unmodified attrs laying around. all_attrs = attrs.copy() for info in cls.info.mro[1:-1]: if 'dataclass' not in info.metadata: continue super_attrs = [] # Each class depends on the set of attributes in its dataclass ancestors. ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) for data in info.metadata["dataclass"]["attributes"]: name: str = data["name"] if name not in known_attrs: attr = DataclassAttribute.deserialize(info, data, ctx.api) attr.expand_typevar_from_subtype(ctx.cls.info) known_attrs.add(name) super_attrs.append(attr) elif all_attrs: # How early in the attribute list an attribute appears is determined by the # reverse MRO, not simply MRO. # See https://docs.python.org/3/library/dataclasses.html#inheritance for # details. for attr in all_attrs: if attr.name == name: all_attrs.remove(attr) super_attrs.append(attr) break all_attrs = super_attrs + all_attrs all_attrs.sort(key=lambda a: a.kw_only) # Ensure that arguments without a default don't follow # arguments that have a default. found_default = False # Ensure that the KW_ONLY sentinel is only provided once found_kw_sentinel = False for attr in all_attrs: # If we find any attribute that is_in_init, not kw_only, and that # doesn't have a default after one that does have one, # then that's an error. if found_default and attr.is_in_init and not attr.has_default and not attr.kw_only: # If the issue comes from merging different classes, report it # at the class definition point. context = (Context(line=attr.line, column=attr.column) if attr in attrs else ctx.cls) ctx.api.fail( 'Attributes without a default cannot follow attributes with one', context, ) found_default = found_default or (attr.has_default and attr.is_in_init) if found_kw_sentinel and self._is_kw_only_type(attr.type): context = (Context(line=attr.line, column=attr.column) if attr in attrs else ctx.cls) ctx.api.fail( 'There may not be more than one field with the KW_ONLY type', context, ) found_kw_sentinel = found_kw_sentinel or self._is_kw_only_type( attr.type) return all_attrs
def collect_attributes(self) -> Optional[List[DataclassAttribute]]: """Collect all attributes declared in the dataclass and its parents. All assignments of the form a: SomeType b: SomeOtherType = ... are collected. """ # First, collect attributes belonging to the current class. ctx = self._ctx cls = self._ctx.cls attrs: List[DataclassAttribute] = [] known_attrs: Set[str] = set() for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. if not (isinstance(stmt, AssignmentStmt) and stmt.new_syntax): continue # a: int, b: str = 1, 'foo' is not supported syntax so we # don't have to worry about it. lhs = stmt.lvalues[0] if not isinstance(lhs, NameExpr): continue sym = cls.info.names.get(lhs.name) if sym is None: # This name is likely blocked by a star import. We don't need # to defer because defer() is already called by mark_incomplete(). continue node = sym.node if isinstance(node, PlaceholderNode): # This node is not ready yet. return None assert isinstance(node, Var) # x: ClassVar[int] is ignored by dataclasses. if node.is_classvar: continue # x: InitVar[int] is turned into x: int and is removed from the class. is_init_var = False node_type = get_proper_type(node.type) if (isinstance(node_type, Instance) and node_type.type.fullname == "dataclasses.InitVar"): is_init_var = True node.type = node_type.args[0] has_field_call, field_args = _collect_field_args(stmt.rvalue) is_in_init_param = field_args.get("init") if is_in_init_param is None: is_in_init = True else: is_in_init = bool(ctx.api.parse_bool(is_in_init_param)) # fields with a resolver are never put in the __init__ method if "resolver" in field_args: is_in_init = False has_default = False # Ensure that something like x: int = field() is rejected # after an attribute with a default. if has_field_call: has_default = "default" in field_args or "default_factory" in field_args # All other assignments are already type checked. elif not isinstance(stmt.rvalue, TempNode): has_default = True if not has_default: # Make all non-default attributes implicit because they are de-facto set # on self in the generated __init__(), not in the class body. sym.implicit = True known_attrs.add(lhs.name) params = dict( name=lhs.name, is_in_init=is_in_init, is_init_var=is_init_var, has_default=has_default, line=stmt.line, column=stmt.column, type=sym.type, ) # add support for mypy >= 0.800 without breaking backwards compatibility # https://github.com/python/mypy/pull/9380/file # https://github.com/strawberry-graphql/strawberry/issues/678 try: attribute = DataclassAttribute(**params) # type: ignore except TypeError: params["info"] = cls.info attribute = DataclassAttribute(**params) # type: ignore attrs.append(attribute) # Next, collect attributes belonging to any class in the MRO # as long as those attributes weren't already collected. This # makes it possible to overwrite attributes in subclasses. # copy() because we potentially modify all_attrs below and if # this code requires debugging we'll have unmodified attrs laying around. all_attrs = attrs.copy() for info in cls.info.mro[1:-1]: if "dataclass" not in info.metadata: continue super_attrs = [] # Each class depends on the set of attributes in its dataclass ancestors. ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) for data in info.metadata["dataclass"]["attributes"]: name = data["name"] # type: str if name not in known_attrs: attr = DataclassAttribute.deserialize(info, data, ctx.api) known_attrs.add(name) super_attrs.append(attr) elif all_attrs: # How early in the attribute list an attribute appears is # determined by the reverse MRO, not simply MRO. # See https://docs.python.org/3/library/dataclasses.html#inheritance # for details. for attr in all_attrs: if attr.name == name: all_attrs.remove(attr) super_attrs.append(attr) break all_attrs = super_attrs + all_attrs # Ensure that arguments without a default don't follow # arguments that have a default. found_default = False for attr in all_attrs: # If we find any attribute that is_in_init but that # doesn't have a default after one that does have one, # then that's an error. if found_default and attr.is_in_init and not attr.has_default: # If the issue comes from merging different classes, report it # at the class definition point. context = (Context(line=attr.line, column=attr.column) if attr in attrs else ctx.cls) ctx.api.fail( "Attributes without a default cannot follow attributes with one", context, ) found_default = found_default or (attr.has_default and attr.is_in_init) return all_attrs
def fail(self, msg: str, ctx: Context, *, blocker: bool = False) -> None: self.errors.report(ctx.get_line(), ctx.get_column(), msg)
def collect_attributes(self) -> List[DataclassAttribute]: """Collect all attributes declared in the dataclass and its parents. All assignments of the form a: SomeType b: SomeOtherType = ... are collected. """ # First, collect attributes belonging to the current class. ctx = self._ctx cls = self._ctx.cls attrs = [] # type: List[DataclassAttribute] known_attrs = set() # type: Set[str] for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. if not (isinstance(stmt, AssignmentStmt) and stmt.new_syntax): continue # a: int, b: str = 1, 'foo' is not supported syntax so we # don't have to worry about it. lhs = stmt.lvalues[0] if not isinstance(lhs, NameExpr): continue node = cls.info.names[lhs.name].node assert isinstance(node, Var) # x: ClassVar[int] is ignored by dataclasses. if node.is_classvar: continue # x: InitVar[int] is turned into x: int and is removed from the class. is_init_var = False if (isinstance(node.type, Instance) and node.type.type.fullname() == 'dataclasses.InitVar'): is_init_var = True node.type = node.type.args[0] has_field_call, field_args = _collect_field_args(stmt.rvalue) is_in_init_param = field_args.get('init') if is_in_init_param is None: is_in_init = True else: is_in_init = bool(ctx.api.parse_bool(is_in_init_param)) has_default = False # Ensure that something like x: int = field() is rejected # after an attribute with a default. if has_field_call: has_default = 'default' in field_args or 'default_factory' in field_args # All other assignments are already type checked. elif not isinstance(stmt.rvalue, TempNode): has_default = True known_attrs.add(lhs.name) attrs.append( DataclassAttribute( name=lhs.name, is_in_init=is_in_init, is_init_var=is_init_var, has_default=has_default, line=stmt.line, column=stmt.column, )) # Next, collect attributes belonging to any class in the MRO # as long as those attributes weren't already collected. This # makes it possible to overwrite attributes in subclasses. super_attrs = [] init_method = cls.info.get_method('__init__') for info in cls.info.mro[1:-1]: if 'dataclass' not in info.metadata: continue for name, data in info.metadata['dataclass']['attributes'].items(): if name not in known_attrs: attr = DataclassAttribute.deserialize(info, data) if attr.is_init_var and isinstance(init_method, FuncDef): # InitVars are removed from classes so, in order for them to be inherited # properly, we need to re-inject them into subclasses' sym tables here. # To do that, we look 'em up from the parents' __init__. These variables # are subsequently removed from the sym table at the end of # DataclassTransformer.transform. for arg, arg_name in zip(init_method.arguments, init_method.arg_names): if arg_name == attr.name: cls.info.names[attr.name] = SymbolTableNode( MDEF, arg.variable) known_attrs.add(name) super_attrs.append(attr) all_attrs = super_attrs + attrs # Ensure that arguments without a default don't follow # arguments that have a default. found_default = False for attr in all_attrs: # If we find any attribute that is_in_init but that # doesn't have a default after one that does have one, # then that's an error. if found_default and attr.is_in_init and not attr.has_default: ctx.api.fail( 'Attributes without a default cannot follow attributes with one', Context(line=attr.line, column=attr.column), ) found_default = found_default or attr.has_default return all_attrs
if format_ops is None: return None last_end = 0 for spec in specifiers: cur_start = spec.start_pos literals.append(format_str[last_end:cur_start]) last_end = cur_start + len(spec.whole_seq) literals.append(format_str[last_end:]) return literals, format_ops # The empty Context as an argument for parse_format_value(). # It wouldn't be used since the code has passed the type-checking. EMPTY_CONTEXT: Final = Context() def tokenizer_format_call( format_str: str) -> Optional[Tuple[List[str], List[FormatOp]]]: """Tokenize a str.format() format string. The core function parse_format_value() is shared with mypy. With these specifiers, we then parse the literal substrings of the original format string and convert `ConversionSpecifier` to `FormatOp`. Return: A list of string literals and a list of FormatOps. The literals are interleaved with FormatOps and the length of returned literals should be exactly one more than FormatOps.
def collect_attributes(self) -> List[DataclassAttribute]: """Collect all attributes declared in the dataclass and its parents. All assignments of the form a: SomeType b: SomeOtherType = ... are collected. """ # First, collect attributes belonging to the current class. ctx = self._ctx cls = self._ctx.cls attrs = [] # type: List[DataclassAttribute] known_attrs = set() # type: Set[str] for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. if not (isinstance(stmt, AssignmentStmt) and stmt.new_syntax): continue # a: int, b: str = 1, 'foo' is not supported syntax so we # don't have to worry about it. lhs = stmt.lvalues[0] if not isinstance(lhs, NameExpr): continue sym = cls.info.names.get(lhs.name) if sym is None: # This name is likely blocked by a star import. We don't need to defer because # defer() is already called by mark_incomplete(). assert ctx.api.options.new_semantic_analyzer continue node = sym.node if isinstance(node, PlaceholderNode): # This node is not ready yet. continue assert isinstance(node, Var) # x: ClassVar[int] is ignored by dataclasses. if node.is_classvar: continue # x: InitVar[int] is turned into x: int and is removed from the class. is_init_var = False if ( isinstance(node.type, Instance) and node.type.type.fullname() == 'dataclasses.InitVar' ): is_init_var = True node.type = node.type.args[0] has_field_call, field_args = _collect_field_args(stmt.rvalue) is_in_init_param = field_args.get('init') if is_in_init_param is None: is_in_init = True else: is_in_init = bool(ctx.api.parse_bool(is_in_init_param)) has_default = False # Ensure that something like x: int = field() is rejected # after an attribute with a default. if has_field_call: has_default = 'default' in field_args or 'default_factory' in field_args # All other assignments are already type checked. elif not isinstance(stmt.rvalue, TempNode): has_default = True known_attrs.add(lhs.name) attrs.append(DataclassAttribute( name=lhs.name, is_in_init=is_in_init, is_init_var=is_init_var, has_default=has_default, line=stmt.line, column=stmt.column, )) # Next, collect attributes belonging to any class in the MRO # as long as those attributes weren't already collected. This # makes it possible to overwrite attributes in subclasses. # copy() because we potentially modify all_attrs below and if this code requires debugging # we'll have unmodified attrs laying around. all_attrs = attrs.copy() init_method = cls.info.get_method('__init__') for info in cls.info.mro[1:-1]: if 'dataclass' not in info.metadata: continue super_attrs = [] # Each class depends on the set of attributes in its dataclass ancestors. ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname())) for name, data in info.metadata['dataclass']['attributes'].items(): if name not in known_attrs: attr = DataclassAttribute.deserialize(info, data) if attr.is_init_var and isinstance(init_method, FuncDef): # InitVars are removed from classes so, in order for them to be inherited # properly, we need to re-inject them into subclasses' sym tables here. # To do that, we look 'em up from the parents' __init__. These variables # are subsequently removed from the sym table at the end of # DataclassTransformer.transform. for arg, arg_name in zip(init_method.arguments, init_method.arg_names): if arg_name == attr.name: cls.info.names[attr.name] = SymbolTableNode(MDEF, arg.variable) known_attrs.add(name) super_attrs.append(attr) else: # How early in the attribute list an attribute appears is determined by the # reverse MRO, not simply MRO. # See https://docs.python.org/3/library/dataclasses.html#inheritance for # details. (attr,) = [a for a in all_attrs if a.name == name] all_attrs.remove(attr) super_attrs.append(attr) all_attrs = super_attrs + all_attrs # Ensure that arguments without a default don't follow # arguments that have a default. found_default = False for attr in all_attrs: # If we find any attribute that is_in_init but that # doesn't have a default after one that does have one, # then that's an error. if found_default and attr.is_in_init and not attr.has_default: ctx.api.fail( 'Attributes without a default cannot follow attributes with one', Context(line=attr.line, column=attr.column), ) found_default = found_default or (attr.has_default and attr.is_in_init) return all_attrs
def fail(self, msg: str, context: Context) -> None: self.errors.report(context.get_line(), context.get_column(), msg)
def get_error_context(node: SymbolNode) -> Context: context = Context() context.set_line(node) return context
def fail(self, msg: str, context: Context) -> None: """Report an error message (unless disabled).""" if self.disable_count <= 0: self.errors.report(context.get_line(), msg.strip())