def test_to_named_class_def(self) -> None: """Test that find_ast_type gives the wrapped named class back""" class_def = ClassDef( name="foo", bases=tuple(), keywords=tuple(), decorator_list=[], body=[], expr=None, identifier_name=None, ) run_ast_test( self, find_ast_type( Module( body=[ ClassDef( name="bar", bases=tuple(), keywords=tuple(), decorator_list=[], body=[], expr=None, identifier_name=None, ), class_def, ], stmt=None, ), node_name="foo", ), class_def, skip_black=True, )
def visit_ClassDef(self, node: ast.ClassDef) -> ast.ClassDef: self.generic_visit(node) if node.bases: node.bases = [ ast.Name(id=_n(core.CustomResource), ctx=ast.Load()), *node.bases, ] else: node.bases = [ast.Name(id=_n(core.ExternalResource), ctx=ast.Load())] return node
def visit_ClassDef(self, node: ast.ClassDef): decorators = [get_id(d) for d in node.decorator_list] if len(node.decorator_list) > 0 and "dataclass" in decorators: node.is_dataclass = True dataclass_members = [] for child in node.body: if isinstance(child, ast.AnnAssign): self.visit_AnnAssign(child, dataclass=True) dataclass_members.append(child) for m in dataclass_members: node.body.remove(m) else: node.is_dataclass = False self.generic_visit(node)
def test_to_code(self) -> None: """ Tests to_source in Python 3.9 and < 3.9 """ class_def = ClassDef( name="Classy", bases=tuple(), decorator_list=[], body=[], keywords=tuple(), identifier_name=None, expr=None, ) with patch("cdd.source_transformer.version_info", (3, 9, 0)): import cdd.source_transformer self.assertEqual( cdd.source_transformer.to_code(class_def).rstrip("\n"), "class Classy:", ) if PY_GTE_3_9 else self.assertRaises( AttributeError, lambda: cdd.source_transformer.to_code(class_def)) with patch("cdd.source_transformer.version_info", (3, 8, 0)): import cdd.source_transformer self.assertEqual( "class Classy:" if PY_GTE_3_9 else cdd.source_transformer.to_code(class_def).rstrip("\n"), "class Classy:", )
def add_class_doc_to_ast(ast_class: ast.ClassDef) -> None: """<#TODO Description> Parameters ---------- ast_class : ast <#TODO Description> Returns ------- None : <#TODO return description> Examples -------- >>> from crawto-quality import crawto_doc >>> add_class_doc_to_ast(ast_class=<#TODO Example Value>) <#TODO Method Return Value> """ classdef = CrawtoClass(**ast_class.__dict__, doc_string=ast.get_docstring(ast_class)) class_docstring = classdef.docs new_class_doc = [ast.parse(class_docstring).body[0]] if ast.get_docstring(ast_class): ast_class.body.pop(0) ast_class.body = new_class_doc + ast_class.body for j in ast_class.body: if type( j ) is ast.FunctionDef and j.name in classdef.method_docs_dict.keys(): method_docstring = classdef.method_docs_dict[j.name] new_method_doc = [ast.parse(method_docstring).body[0]] if ast.get_docstring(j): j.body.pop(0) j.body = new_method_doc + j.body
def make_python_contract(contract_name, abi): constructor = None functions = {} # events = {} # XXX todo for item in abi: _type = item["type"] if _type == "function": # XXX doesn't work with overloaded functions for now # we take the one with highest arity as it's potentially # more useful. name = item["name"] if name not in functions: functions[name] = item else: if len(item["inputs"]) > len(functions[name]["inputs"]): functions[name] = item elif _type == "constructor": constructor = item init_def = make_init() function_defs = [ make_function(name, [arg["name"] for arg in item["inputs"]]) for name, item in sorted(functions.items()) ] body = [init_def] + function_defs class_def = ClassDef(bases=[], name=contract_name, body=body, decorator_list=[]) return class_def
def visit_ClassDef(self, node: ast.ClassDef) -> ast.ClassDef: """ Any `class name` definition. """ self.register_docstring(node) node.name = self.add_placeholder(node.name) self.generic_visit(node) return node
def test_find_ast_type_fails(self) -> None: """Test that `find_ast_type` throws the right errors""" self.assertRaises(NotImplementedError, lambda: find_ast_type(None)) self.assertRaises(NotImplementedError, lambda: find_ast_type("")) self.assertRaises(TypeError, lambda: find_ast_type(Module(body=[], stmt=None))) self.assertRaises( NotImplementedError, lambda: find_ast_type( Module( body=[ ClassDef(expr=None, identifier_name=None), ClassDef(expr=None, identifier_name=None), ], stmt=None, ) ), )
def make_slide_class(slide: Slide, slide_index: int) -> list: slide_class_name = f"Slide{slide_index}" content_class_name = f"{slide_class_name}Content" klass = [ make_slide_content_class(content_class_name, slide.shapes), ClassDef( name=slide_class_name, bases=[Name(id='SlideData', ctx=Load())], keywords=[], body=[ AnnAssign( target=Name(id='contents', ctx=Store()), annotation=ast_optional_subscript(content_class_name), value=ast_default_field(None), simple=1 ), AnnAssign( target=Name(id='slide_pos', ctx=Store()), annotation=Name(id='int', ctx=Load()), value=Constant(value=slide_index, kind=None), simple=1 ) ], decorator_list=[dataclass_decorator]), ] if slide.has_notes_slide: note = slide.notes_slide.notes_text_frame.text if note_name_regx.match(note): name = note_name_regx.search(note).group('name') alias_slide_class = ClassDef( name=f'{name}Slide', bases=[Name(id=slide_class_name, ctx=Load())], keywords=[], body=[Pass()], decorator_list=[], ) alias_content_class = ClassDef( name=f'{name}Content', bases=[Name(id=content_class_name, ctx=Load())], keywords=[], body=[Pass()], decorator_list=[], ) klass += [alias_slide_class, alias_content_class] return klass
def visit_ClassDef(self, node: ast.ClassDef) -> Any: new_body = [] for body_el in node.body: if _is_view(body_el): new_body.append(self._expand_view(body_el)) else: new_body.append(body_el) node.body = new_body return node
def visit_ClassDef(self, node: ast.ClassDef) -> ast.ClassDef: self.generic_visit(node) loc = self.location(node) node.body[0:0] = [ ast.Assign( targets=[ast.Name(id=f"_ig_{attr}", ctx=ast.Store())], value=getattr(loc, attr), ) for attr in ("filename", "lines", "columns", "source") ] return node
def _visit_cls(self: 'ASTTagger', node: ast.ClassDef): bases = visit_suite(self.visit, node.bases) keywords = visit_suite(self.visit, node.keywords) decorator_list = visit_suite(self.visit, node.decorator_list) self.symtable.entered.add(node.name) new = self.symtable.enter_new() new.entered.add('__module__') new.entered.add('__qualname__') # pep-3155 nested name. new_tagger = ASTTagger(new) new.cts.add(ContextType.ClassDef) body = visit_suite(new_tagger.visit, node.body) node.bases = bases node.keywords = keywords node.decorator_list = decorator_list node.body = body return Tag(node, new)
def visit_ClassDef(self, node: ast.ClassDef) -> ast.ClassDef: def add_profile_decorator(node: ast.stmt) -> ast.stmt: if isinstance(node, ast.FunctionDef): node.decorator_list.append(ast.Name("profile", ast.Load())) return node node = self.generic_visit(node) # type: ignore criteria = (lambda decorator: isinstance(decorator, ast.Name) and decorator.id == "add_profile_decorator_to_class_methods") if any(map(criteria, node.decorator_list)): node.body = list(map(add_profile_decorator, node.body)) return node
def test_find_ast_type(self) -> None: """Test that `find_ast_type` gives the wrapped class back""" class_def = ClassDef( name="", bases=tuple(), keywords=tuple(), decorator_list=[], body=[], expr=None, identifier_name=None, ) run_ast_test( self, find_ast_type(Module(body=[class_def], stmt=None)), class_def, skip_black=True, )
def new_object_type(parent: ObjectTypeMeta, child: ObjectTypeMeta) -> AST: assert parent.type == child.base tree = Module(body=[], type_ignores=[]) if parent.type in built_in_types_names(): import_from = arcor2.object_types.abstract.__name__ else: import_from = f".{humps.depascalize(parent.type)}" tree.body.append( ImportFrom(module=import_from, names=[alias(name=parent.type, asname=None)], level=0)) c = ClassDef( name=child.type, bases=[get_name(parent.type)], keywords=[], body=[ Assign( targets=[Name(id="_ABSTRACT", ctx=Store())], value=NameConstant(value=False, kind=None), type_comment=None, ) ], decorator_list=[], ) # TODO add docstring with description (if provided) c.body.append(Pass()) tree.body.append(c) return tree
config_decl_base_ast = ClassDef( name="Config", bases=[Name("Base", Load())], keywords=[], body=[ Expr(set_value(_docstring_header_and_return_str)), Assign(targets=[Name("__tablename__", Store())], value=set_value("config_tbl"), expr=None, lineno=None, **maybe_type_comment), Assign(targets=[Name("dataset_name", Store())], value=Call( func=Name("Column", Load()), args=[Name("String", Load())], keywords=[ keyword(arg="doc", value=set_value("name of dataset"), identifier=None), keyword(arg="default", value=set_value("mnist"), identifier=None), keyword(arg="primary_key", value=set_value(True), identifier=None), ], expr=None, expr_func=None, ), expr=None, lineno=None, **maybe_type_comment), Assign(targets=[Name("tfds_dir", Store())], value=Call( func=Name("Column", Load()), args=[Name("String", Load())], keywords=[ keyword( arg="doc", value=set_value("directory to look for models in"), identifier=None, ), keyword( arg="default", value=set_value("~/tensorflow_datasets"), identifier=None, ), keyword(arg="nullable", value=set_value(False), identifier=None), ], expr=None, expr_func=None, ), expr=None, lineno=None, **maybe_type_comment), Assign( targets=[Name("K", Store())], value=Call( func=Name("Column", Load()), args=[ Call( func=Name("Enum", Load()), args=[set_value("np"), set_value("tf")], keywords=[ keyword(arg="name", value=set_value("K"), identifier=None) ], expr=None, expr_func=None, ) ], keywords=[ keyword( arg="doc", value=set_value("backend engine, e.g., `np` or `tf`"), identifier=None, ), keyword(arg="default", value=set_value("np"), identifier=None), keyword(arg="nullable", value=set_value(False), identifier=None), ], expr=None, expr_func=None, ), expr=None, lineno=None, **maybe_type_comment), Assign(targets=[Name("as_numpy", Store())], value=Call( func=Name("Column", Load()), args=[Name("Boolean", Load())], keywords=[ keyword( arg="doc", value=set_value("Convert to numpy ndarrays"), identifier=None, ), keyword(arg="default", value=set_value(None), identifier=None), keyword(arg="nullable", value=set_value(True), identifier=None), ], expr=None, expr_func=None, ), expr=None, lineno=None, **maybe_type_comment), Assign( targets=[Name("data_loader_kwargs", Store())], value=Call( func=Name("Column", Load()), args=[Name("JSON", Load())], keywords=[ keyword( arg="doc", value=set_value( "pass this as arguments to data_loader function"), identifier=None, ), keyword(arg="default", value=set_value(None), identifier=None), keyword(arg="nullable", value=set_value(True), identifier=None), ], expr=None, expr_func=None, ), expr=None, lineno=None, **maybe_type_comment), FunctionDef( name="__repr__", args=arguments( posonlyargs=[], arg=None, args=[ arg(arg="self", annotation=None, expr=None, identifier_arg=None, **maybe_type_comment) ], kwonlyargs=[], kw_defaults=[], defaults=[], vararg=None, kwarg=None, ), body=[ Expr(set_value(docstring_repr_str)), Return( value=Call( func=Attribute( set_value( "Config(dataset_name={dataset_name!r}, tfds_dir={tfds_dir!r}, K={K!r}, " "as_numpy={as_numpy!r}, data_loader_kwargs={data_loader_kwargs!r})" ), "format", Load(), ), args=[], keywords=[ keyword( arg="dataset_name", value=Attribute(Name("self", Load()), "dataset_name", Load()), identifier=None, ), keyword( arg="tfds_dir", value=Attribute(Name("self", Load()), "tfds_dir", Load()), identifier=None, ), keyword( arg="K", value=Attribute(Name("self", Load()), "K", Load()), identifier=None, ), keyword( arg="as_numpy", value=Attribute(Name("self", Load()), "as_numpy", Load()), identifier=None, ), keyword( arg="data_loader_kwargs", value=Attribute(Name("self", Load()), "data_loader_kwargs", Load()), identifier=None, ), ], expr=None, expr_func=None, ), expr=None, ), ], decorator_list=[], arguments_args=None, identifier_name=None, stmt=None, lineno=None, returns=None, **maybe_type_comment), ], decorator_list=[], expr=None, identifier_name=None, )
alias(name='ChartCategoryData', asname=None), alias(name='ChartBubbleData', asname=None), alias(name='ChartXYData', asname=None), alias(name='TableData', asname=None), ], level=0), ClassDef( name='Slide1Content', bases=[], keywords=[], body=[ Assign( targets=[Name(id='title', ctx=Store())], value=text_data_value, type_comment=None), Assign( targets=[Name(id='subtitle', ctx=Store())], value=text_data_value, type_comment=None), Assign( targets=[Name(id='date', ctx=Store())], value=text_data_value, type_comment=None) ], decorator_list=[dataclass_decorator] ), ClassDef( name='Slide1', bases=[Name(id='SlideData', ctx=Load())], keywords=[], body=[ Assign(
ClassDef( name="C", bases=[Name("object", Load())], keywords=[], body=[ Expr(set_value(" C class (mocked!) ")), FunctionDef( name="function_name", args=arguments( posonlyargs=[], vararg=None, args=[ set_arg("self"), set_arg(arg="dataset_name", annotation=Name("str", Load())), set_arg( arg="tfds_dir", annotation=Subscript( Name("Optional", Load()), set_slice(Name("str", Load())), Load(), ), ), set_arg( arg="K", annotation=Subscript( Name("Literal", Load()), set_slice( Tuple( elts=list(map(set_value, ("np", "tf"))), ctx=Load(), expr=None, ) ), Load(), ), ), set_arg( arg="as_numpy", annotation=Subscript( Name("Optional", Load()), set_slice(Name("bool", Load())), Load(), ), ), ], kwonlyargs=[], kw_defaults=[], kwarg=set_arg("data_loader_kwargs"), defaults=list( map(set_value, ("mnist", "~/tensorflow_datasets", "np", None)) ), arg=None, ), body=[ Expr( set_value( docstring_no_type_no_default_str, ) ), Expr( Call( func=Name("print", Load()), args=[ BinOp( set_value(5), Mult(), set_value(5), ) ], keywords=[], expr=None, expr_func=None, ) ), If( test=set_value(True), body=[ Expr( Call( func=Name("print", Load()), args=[set_value(True)], keywords=[], expr=None, expr_func=None, ) ), Return( value=set_value(5), expr=None, ), ], orelse=[], expr_test=None, stmt=None, ), Return( value=Tuple( elts=[ Call( func=Attribute(Name("np", Load()), "empty", Load()), args=[set_value(0)], keywords=[], expr=None, expr_func=None, ) ] * 2, ctx=Load(), expr=None, ), expr=None, ), ], decorator_list=[], returns=Subscript( Name("Union", Load()), set_slice( Tuple( [ Subscript( Name("Tuple", Load()), set_slice( Tuple( [ Attribute( Attribute( Name("tf", Load()), "data", Load(), ), "Dataset", Load(), ) ] * 2, Load(), expr=None, ) ), Load(), ), Subscript( Name("Tuple", Load()), set_slice( Tuple( [ Attribute( Name("np", Load()), "ndarray", Load(), ) ] * 2, Load(), expr=None, ) ), Load(), ), ], Load(), ) ), Load(), ), arguments_args=None, identifier_name=None, stmt=None, lineno=None, **maybe_type_comment ), ], decorator_list=[], expr=None, identifier_name=None, )
def class_( intermediate_repr, emit_call=False, class_name="ConfigClass", class_bases=("object", ), decorator_list=None, emit_default_doc=False, ): """ Construct a class :param intermediate_repr: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :type intermediate_repr: ```dict``` :param emit_call: Whether to emit a `__call__` method from the `_internal` IR subdict :type emit_call: ```bool``` :param class_name: name of class :type class_name: ```str``` :param class_bases: bases of class (the generated class will inherit these) :type class_bases: ```Iterable[str]``` :param decorator_list: List of decorators :type decorator_list: ```Optional[Union[List[Str], List[]]]``` :param emit_default_doc: Whether help/docstring should include 'With default' text :type emit_default_doc: ```bool``` :return: Class AST of the docstring :rtype: ```ClassDef``` """ returns = (intermediate_repr["returns"] if "return_type" in intermediate_repr.get("returns", {}) else OrderedDict()) param_names = frozenset(intermediate_repr["params"].keys()) if returns: intermediate_repr["params"].update(returns) del intermediate_repr["returns"] internal_body = intermediate_repr.get("_internal", {}).get("body", []) # TODO: Add correct classmethod/staticmethod to decorate function using `annotate_ancestry` and first-field checks # Such that the `self.` or `cls.` rewrite only applies to non-staticmethods # assert internal_body, "Expected `internal_body` to have contents" if internal_body and param_names: internal_body = list( map( ast.fix_missing_locations, map(RewriteName(param_names).visit, internal_body), )) return ClassDef( bases=list(map(rpartial(Name, Load()), class_bases)), body=list( chain.from_iterable(( (Expr( set_value( to_docstring( intermediate_repr, indent_level=0, emit_separating_tab=False, emit_default_doc=emit_default_doc, emit_types=False, ).replace("\n:param ", "{tab}:cvar ".format(tab=tab)).replace( "{tab}:cvar ".format(tab=tab), "\n{tab}:cvar ".format(tab=tab), 1, ).rstrip())), ), map(param2ast, intermediate_repr["params"].items()), iter((_make_call_meth( internal_body, returns["return_type"]["default"] if "default" in ( (returns or { "return_type": iter(()) }).get("return_type") or iter(())) else None, param_names, ), ) if emit_call and internal_body else tuple()), ))), decorator_list=list(map(rpartial(Name, Load()), decorator_list)) if decorator_list else [], keywords=[], name=class_name, expr=None, identifier_name=None, )
def find_immutability_flag(self, node: ClassDef) -> bool: old_size = len(node.decorator_list) node.decorator_list = [d for d in node.decorator_list if not is_mutable(d)] return old_size == len(node.decorator_list)
def visit_ClassDef(self, node: ast.ClassDef) -> Any: # only expand contract methods if node.name != "Contract": return node # if there is a `deploy` method, do nothing for body_node in node.body: if type(body_node) == ast.FunctionDef: if body_node.name == "deploy": return node # Factor body ast.AnnAssign into dataclass storage_keys_spec = {} new_node_body = [] for i, body_node in enumerate(node.body): if type(body_node) == ast.AnnAssign: storage_keys_spec[body_node.target.id] = body_node.annotation else: new_node_body.append(body_node) node.body = new_node_body self.storage_dataclass = make_dataclass('Storage', storage_keys_spec) # For all methods, update `self.<storage_key>` into `self.storage.<key>` # and add return `self.storage` new_body = [] for body_node in node.body: new_body_node = ExpandStorageInEntrypoints().visit(body_node) if type(body_node) == ast.FunctionDef: if not body_node.returns: body_node.returns = ast.Name(id='Storage', ctx=ast.Load()) return_storage_node = ast.Return(value=ast.Attribute( value=ast.Name(id='self', ctx=ast.Load()), attr='storage', ctx=ast.Load())) new_body_node.body.append(return_storage_node) new_body.append(new_body_node) node.body = new_body # Create deploy function deploy_function_node = ast.FunctionDef( name='deploy', args=ast.arguments(posonlyargs=[], args=[], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[]), body=[ ast.Return( value=ast.Call(func=ast.Name(id='Storage', ctx=ast.Load()), args=[], keywords=[])) ], decorator_list=[], returns=None, type_comment=None, ) node.body = [deploy_function_node] + node.body return node
ClassDef( name="C", bases=[Name("object", Load())], keywords=[], body=[ Expr(set_value(" C class (mocked!) ")), FunctionDef( name="function_name", args=arguments( posonlyargs=[], vararg=None, args=[ set_arg("self"), set_arg(arg="dataset_name", annotation=Name("str", Load())), set_arg( arg="tfds_dir", annotation=Subscript( Name("Optional", Load()), set_slice(Name("str", Load())), Load(), ), ), set_arg( arg="K", annotation=Subscript( Name("Literal", Load()), set_slice( Tuple( elts=list(map(set_value, ("np", "tf"))), ctx=Load(), expr=None, )), Load(), ), ), set_arg( arg="as_numpy", annotation=Subscript( Name("Optional", Load()), set_slice(Name("bool", Load())), Load(), ), ), ], kwonlyargs=[], kw_defaults=[], kwarg=set_arg("data_loader_kwargs"), defaults=list( map(set_value, ("mnist", "~/tensorflow_datasets", "np", None))), arg=None, ), body=[ Expr( set_value( "\n Acquire from the official tensorflow_datasets model zoo," " or the ophthalmology focussed ml-prepare library\n\n " ":param dataset_name: name of dataset.\n\n " ":param tfds_dir: directory to look for models in.\n\n " ":param K: backend engine, e.g., `np` or `tf`.\n\n " ":param as_numpy: Convert to numpy ndarrays.\n\n " ":param data_loader_kwargs: pass this as arguments to data_loader function\n\n " ":return: Train and tests dataset splits.\n ", )), Expr( Call( func=Name("print", Load()), args=[BinOp( set_value(5), Mult(), set_value(5), )], keywords=[], expr=None, expr_func=None, )), If( test=set_value(True), body=[ Expr( Call( func=Name("print", Load()), args=[set_value(True)], keywords=[], expr=None, expr_func=None, )), Return( value=set_value(5), expr=None, ), ], orelse=[], expr_test=None, stmt=None, ), Return( value=Tuple( elts=[ Call( func=Attribute(Name("np", Load()), "empty", Load()), args=[set_value(0)], keywords=[], expr=None, expr_func=None, ) ] * 2, ctx=Load(), expr=None, ), expr=None, ), ], decorator_list=[], returns=Subscript( Name("Union", Load()), set_slice( Tuple( [ Subscript( Name("Tuple", Load()), set_slice( Tuple( [ Attribute( Attribute( Name("tf", Load()), "data", Load(), ), "Dataset", Load(), ) ] * 2, Load(), expr=None, )), Load(), ), Subscript( Name("Tuple", Load()), set_slice( Tuple( [ Attribute( Name("np", Load()), "ndarray", Load(), ) ] * 2, Load(), expr=None, )), Load(), ), ], Load(), )), Load(), ), arguments_args=None, identifier_name=None, stmt=None, lineno=None, **maybe_type_comment), ], decorator_list=[], expr=None, identifier_name=None, ))
class_ast = ClassDef( bases=[Name("object", Load())], body=[ Expr( set_value( "\n Acquire from the official tensorflow_datasets model zoo," " or the ophthalmology focussed ml-prepare library\n\n " ':cvar dataset_name: name of dataset. Defaults to "mnist"\n ' ':cvar tfds_dir: directory to look for models in. Defaults to "~/tensorflow_datasets"\n ' ':cvar K: backend engine, e.g., `np` or `tf`. Defaults to "np"\n ' ":cvar as_numpy: Convert to numpy ndarrays. Defaults to None\n " ":cvar data_loader_kwargs: pass this as arguments to data_loader function\n " ":cvar return_type: Train and tests dataset splits. Defaults to (np.empty(0), np.empty(0))", ) ), AnnAssign( annotation=Name( "str", Load(), ), simple=1, target=Name("dataset_name", Store()), value=set_value("mnist"), expr=None, expr_annotation=None, expr_target=None, ), AnnAssign( annotation=Name( "str", Load(), ), simple=1, target=Name("tfds_dir", Store()), value=set_value( "~/tensorflow_datasets", ), expr=None, expr_annotation=None, expr_target=None, ), AnnAssign( annotation=Subscript( Name( "Literal", Load(), ), Index( value=Tuple( elts=list( map( set_value, ( "np", "tf", ), ) ), ctx=Load(), expr=None, ) ), Load(), ), simple=1, target=Name("K", Store()), value=set_value("np"), expr=None, expr_target=None, expr_annotation=None, ), AnnAssign( annotation=Subscript( Name( "Optional", Load(), ), Index(value=Name("bool", Load())), Load(), ), simple=1, target=Name("as_numpy", Store()), value=set_value(None), expr=None, expr_target=None, expr_annotation=None, ), AnnAssign( annotation=Subscript( Name("Optional", Load()), set_slice(Name("dict", Load())), Load() ), simple=1, target=Name( "data_loader_kwargs", Store(), ), value=set_value(None), expr=None, expr_target=None, expr_annotation=None, ), AnnAssign( annotation=Subscript( Name("Union", Load()), Index( value=Tuple( ctx=Load(), elts=[ Subscript( Name("Tuple", Load()), Index( value=Tuple( ctx=Load(), elts=[ Attribute( Attribute( Name("tf", Load()), "data", Load(), ), "Dataset", Load(), ) ] * 2, expr=None, ) ), Load(), ), Subscript( Name("Tuple", Load()), Index( Tuple( ctx=Load(), elts=[ Attribute( Name("np", Load()), "ndarray", Load(), ) ] * 2, expr=None, ) ), Load(), ), ], expr=None, ) ), Load(), ), simple=1, target=Name("return_type", Store()), value=Tuple( ctx=Load(), elts=[ Call( args=[set_value(0)], func=Attribute( Name("np", Load()), "empty", Load(), ), keywords=[], expr=None, expr_func=None, ) ] * 2, expr=None, ), expr=None, expr_target=None, expr_annotation=None, ), ], decorator_list=[], keywords=[], name="ConfigClass", expr=None, identifier_name=None, )
class_with_internal_annotated = ClassDef( name="ClassMock", bases=tuple(), keywords=tuple(), decorator_list=[], body=[ _class_doc_str_expr, AnnAssign( annotation=Name( "int", Load(), ), simple=1, target=Name("a", Store()), value=set_value(5), expr=None, expr_target=None, expr_annotation=None, lineno=None, ), AnnAssign( annotation=Name( "float", Load(), ), simple=1, target=Name("b", Store()), value=set_value(0.0), expr=None, expr_target=None, expr_annotation=None, lineno=None, ), function_type_annotated, ], expr=None, identifier_name=None, )
def global_action_points_class(project: CachedProject) -> str: tree = Module(body=[]) tree.body.append( ImportFrom( module=arcor2.data.common.__name__, names=[ alias(name=ActionPoint.__name__, asname=None), alias(name=Position.__name__, asname=None), alias(name=Pose.__name__, asname=None), alias(name=ProjectRobotJoints.__name__, asname=None), ], level=0, )) tree.body.append( ImportFrom( module=copy.__name__, names=[alias(name=copy.deepcopy.__name__, asname=None)], level=0, )) tree.body.append( ImportFrom( module=RES_MODULE, names=[alias(name=RES_CLS, asname=None)], level=0, )) aps_init_body: List[Union[Assign, Pass]] = [] for ap in project.action_points: ap_cls_body: List[Assign] = [ Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="_position", ctx=Store()) ], value=Attribute( value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr=CachedProject.bare_action_point.__name__, ctx=Load(), ), args=[Str(s=ap.id, kind="")], keywords=[], ), attr="position", ctx=Load(), ), type_comment=None, ) ] ap_type_name = humps.pascalize(ap.name) ap_joints_init_body: List[Assign] = [] for joints in project.ap_joints(ap.id): ap_joints_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{joints.name}", ctx=Store()) ], value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr="joints", ctx=Load(), ), args=[Str(s=joints.id, kind="")], keywords=[], ), type_comment=None, )) if ap_joints_init_body: ap_joints_cls_def = ClassDef( name=f"{ap_type_name}Joints", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_joints_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) for joints in project.ap_joints(ap.id): ap_joints_cls_def.body.append( FunctionDef( name=joints.name, args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None) ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{joints.name}", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=ProjectRobotJoints.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_joints_cls_def) ap_cls_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="joints", ctx=Store()) ], value=Call( func=Name(id=f"{ap_type_name}Joints", ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[], ), type_comment=None, )) ap_orientations_init_body: List[Assign] = [] for ori in project.ap_orientations(ap.id): ap_orientations_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{ori.name}", ctx=Store()) ], value=Call( func=Attribute( value=Attribute(value=Name(id="res", ctx=Load()), attr="project", ctx=Load()), attr="pose", ctx=Load(), ), args=[Str(s=ori.id, kind="")], keywords=[], ), type_comment=None, )) if ap_orientations_init_body: ap_orientations_cls_def = ClassDef( name=f"{ap_type_name}Poses", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_orientations_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) for ori in project.ap_orientations(ap.id): ap_orientations_cls_def.body.append( FunctionDef( name=ori.name, args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None) ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr=f"_{ori.name}", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=Pose.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_orientations_cls_def) ap_cls_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr="poses", ctx=Store()) ], value=Call( func=Name(id=f"{ap_type_name}Poses", ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[], ), type_comment=None, )) ap_cls_def = ClassDef( name=ap_type_name, bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=ap_cls_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) # add copy property for position ap_cls_def.body.append( FunctionDef( name="position", args=arguments( args=[arg(arg="self", annotation=None, type_comment=None)], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=[ Return(value=Call( func=Name(id=copy.deepcopy.__name__, ctx=Load()), args=[ Attribute(value=Name(id="self", ctx=Load()), attr="_position", ctx=Load()) ], keywords=[], )) ], decorator_list=[Name(id="property", ctx=Load())], returns=Name(id=Position.__name__, ctx=Load()), type_comment=None, )) tree.body.append(ap_cls_def) aps_init_body.append( Assign( targets=[ Attribute(value=Name(id="self", ctx=Load()), attr=ap.name, ctx=Store()) ], value=Call(func=Name(id=ap_type_name, ctx=Load()), args=[Name(id="res", ctx=Load())], keywords=[]), type_comment=None, )) if not aps_init_body: # there are no action points aps_init_body.append(Pass()) aps_cls_def = ClassDef( name="ActionPoints", bases=[], keywords=[], body=[ FunctionDef( name="__init__", args=arguments( args=[ arg(arg="self", annotation=None, type_comment=None), arg(arg="res", annotation=Name(id=RES_CLS, ctx=Load()), type_comment=None), ], vararg=None, kwonlyargs=[], kw_defaults=[], kwarg=None, defaults=[], ), body=aps_init_body, decorator_list=[], returns=None, type_comment=None, ) ], decorator_list=[], ) tree.body.append(aps_cls_def) return tree_to_str(tree)
def sqlalchemy( intermediate_repr, emit_repr=True, class_name="Config", class_bases=("Base", ), decorator_list=None, table_name=None, docstring_format="rest", word_wrap=True, emit_original_whitespace=False, emit_default_doc=True, ): """ Construct an SQLAlchemy declarative class :param intermediate_repr: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :type intermediate_repr: ```dict``` :param emit_repr: Whether to generate a `__repr__` method :type emit_repr: ```bool``` :param class_name: name of class :type class_name: ```str``` :param class_bases: bases of class (the generated class will inherit these) :type class_bases: ```Iterable[str]``` :param decorator_list: List of decorators :type decorator_list: ```Optional[Union[List[Str], List[]]]``` :param table_name: Table name, defaults to `class_name` :type table_name: ```str``` :param docstring_format: Format of docstring :type docstring_format: ```Literal['rest', 'numpydoc', 'google']``` :param word_wrap: Whether to word-wrap. Set `DOCTRANS_LINE_LENGTH` to configure length. :type word_wrap: ```bool``` :param docstring_format: Format of docstring :type docstring_format: ```Literal['rest', 'numpydoc', 'google']``` :param emit_original_whitespace: Whether to emit an original whitespace (in docstring) or strip it out :type emit_original_whitespace: ```bool``` :param emit_default_doc: Whether help/docstring should include 'With default' text :type emit_default_doc: ```bool``` :returns: SQLalchemy declarative class AST :rtype: ```ClassDef``` """ return ClassDef( name=class_name, bases=list( map(lambda class_base: Name(class_base, Load()), class_bases)), decorator_list=decorator_list or [], keywords=[], body=list( filter( None, ( Expr( set_value( add(*map( partial( docstring, docstring_format=docstring_format, emit_default_doc=emit_default_doc, emit_original_whitespace= emit_original_whitespace, emit_separating_tab=True, emit_types=True, indent_level=1, word_wrap=word_wrap, ), ( { "doc": intermediate_repr["doc"], "params": OrderedDict(), "returns": None, }, { "doc": "", "params": OrderedDict(), "returns": intermediate_repr["returns"], }, ), )))) if intermediate_repr["doc"] or intermediate_repr["returns"].get( "return_type", {}).get("doc") else None, Assign(targets=[Name("__tablename__", Store())], value=set_value(table_name or class_name), expr=None, lineno=None, **maybe_type_comment), *map( lambda param: Assign(targets=[Name(param[0], Store())], value= param_to_sqlalchemy_column_call( param, include_name=False), expr=None, lineno=None, **maybe_type_comment), intermediate_repr["params"].items(), ), generate_repr_method(intermediate_repr["params"], class_name, docstring_format) if emit_repr else None, ), )), expr=None, identifier_name=None, )
def test_valid_node_corner_cases(self): from ast import ClassDef, FunctionDef name = 'noone_can_guess_this_name_srs' self.assertTrue(self.cmd._valid_node(ClassDef(name=name))) self.assertTrue(self.cmd._valid_node(FunctionDef(name=name)))
def visit_ClassDef(self, node: ast.ClassDef) -> Any: self.defined_class_names.append(node.name) node.body = [self.visit(body_element) for body_element in node.body] return node
def class_( intermediate_repr, emit_call=False, class_name="ConfigClass", class_bases=("object", ), decorator_list=None, word_wrap=True, docstring_format="rest", emit_original_whitespace=False, emit_default_doc=False, ): """ Construct a class :param intermediate_repr: a dictionary of form { "name": Optional[str], "type": Optional[str], "doc": Optional[str], "params": OrderedDict[str, {'typ': str, 'doc': Optional[str], 'default': Any}] "returns": Optional[OrderedDict[Literal['return_type'], {'typ': str, 'doc': Optional[str], 'default': Any}),)]] } :type intermediate_repr: ```dict``` :param emit_call: Whether to emit a `__call__` method from the `_internal` IR subdict :type emit_call: ```bool``` :param class_name: name of class :type class_name: ```str``` :param class_bases: bases of class (the generated class will inherit these) :type class_bases: ```Iterable[str]``` :param decorator_list: List of decorators :type decorator_list: ```Optional[Union[List[Str], List[]]]``` :param word_wrap: Whether to word-wrap. Set `DOCTRANS_LINE_LENGTH` to configure length. :type word_wrap: ```bool``` :param docstring_format: Format of docstring :type docstring_format: ```Literal['rest', 'numpydoc', 'google']``` :param emit_original_whitespace: Whether to emit original whitespace or strip it out (in docstring) :type emit_original_whitespace: ```bool``` :param emit_default_doc: Whether help/docstring should include 'With default' text :type emit_default_doc: ```bool``` :returns: Class AST :rtype: ```ClassDef``` """ assert isinstance( intermediate_repr, dict), "{intermediate_repr_type_name} != dict".format( intermediate_repr_type_name=type(intermediate_repr).__name__) returns = (intermediate_repr["returns"] if "return_type" in ((intermediate_repr or {}).get("returns") or iter( ())) else OrderedDict()) param_names = frozenset(intermediate_repr["params"].keys()) if returns: intermediate_repr["params"].update(returns) del intermediate_repr["returns"] internal_body = intermediate_repr.get("_internal", {}).get("body", []) # TODO: Add correct classmethod/staticmethod to decorate function using `annotate_ancestry` and first-field checks # Such that the `self.` or `cls.` rewrite only applies to non-staticmethods # assert internal_body, "Expected `internal_body` to have contents" if param_names: if internal_body: internal_body = list( map( ast.fix_missing_locations, map(RewriteName(param_names).visit, internal_body), )) elif (returns or {"return_type": None}).get("return_type") is not None: internal_body = returns["return_type"] indent_level = 1 sep = indent_level * tab _emit_docstring = partial( docstring, docstring_format=docstring_format, indent_level=indent_level, emit_default_doc=emit_default_doc, emit_separating_tab=True, emit_types=False, word_wrap=word_wrap, ) return ClassDef( bases=list(map(rpartial(Name, Load()), class_bases)), body=list( chain.from_iterable(( (Expr( set_value( "\n{sep}".format(sep=sep).join(( _emit_docstring( { "doc": intermediate_repr.get("doc", ""), "params": OrderedDict(), "returns": None, }, emit_original_whitespace= emit_original_whitespace, ), _emit_docstring( { "doc": "", "params": intermediate_repr.get("params"), "returns": intermediate_repr.get("returns"), }, emit_original_whitespace=False, ).replace( "\n{sep}:param ".format(sep=sep), ":cvar ", ).replace( "\n{sep}:returns:".format(sep=sep), ":cvar return_type:", 1, ).rstrip(), )), )), ), map(param2ast, intermediate_repr["params"].items()), iter(((_make_call_meth( internal_body, returns["return_type"]["default"] if "default" in ( (returns or { "return_type": iter(()) }).get("return_type") or iter(())) else None, param_names, docstring_format=docstring_format, word_wrap=word_wrap, ), ) or iter(())) if emit_call and internal_body else iter(( ))), ))), decorator_list=list(map(rpartial(Name, Load()), decorator_list)) if decorator_list else [], keywords=[], name=class_name, expr=None, identifier_name=None, )
return AnnAssign( target=Name(id=name_to_slugify(shape.name), ctx=Store()), annotation=value, value=ast_default_field(None), simple=1, ) def make_slide_content_class(name: str, shapes: SlideShapes): body = [] for shape in shapes: if field := make_data_field(shape): body.append(field) return ClassDef( name=name, bases=[], keywords=[], body=body, decorator_list=[dataclass_decorator] ) note_name_regx = re.compile(r"^name:\s{0,3}(?P<name>[a-zA-Z0-9_]*)\s*\n?") def make_slide_class(slide: Slide, slide_index: int) -> list: slide_class_name = f"Slide{slide_index}" content_class_name = f"{slide_class_name}Content" klass = [ make_slide_content_class(content_class_name, slide.shapes), ClassDef( name=slide_class_name, bases=[Name(id='SlideData', ctx=Load())],