def generate_resolvers( collection, disambiguations, guards, typename, query_name, is_aggregation=False, **kwargs ): single_resolver = populate_string( single_item_resolver, dict( # query_name=query_name, # type_name=typename, typename=typename, collection=collection, resolver_path="Query." + query_name, disambiguations=disambiguations, guards_before=[g for g in guards if g["when"] == "before"], guards_after=[g for g in guards if g["when"] == "after"], **resolvers_dependencies, **kwargs, ), ) many_resolver = populate_string( many_items_resolvers, dict( # query_name=query_name, typename=typename, collection=collection, resolver_path="Query." + query_name + "Nodes", disambiguations=disambiguations, guards_before=[g for g in guards if g["when"] == "before"], guards_after=[g for g in guards if g["when"] == "after"], **resolvers_dependencies, **kwargs, ), ) return single_resolver, many_resolver
def generate_relation_boilerplate( where_filter, touch, schema, relation_type, relationName, fromType, toType, implemented_types, pipeline, collection, resolver_filename, ): relation_template = ( to_one_relation if relation_type == "to_one" else to_many_relation ) relation_sdl = populate_string( relation_template, dict(toType=toType, fromType=fromType, relationName=relationName), ) if relation_type == "to_many" and toType not in implemented_types: relation_sdl += populate_string( to_many_relation_boilerplate, dict( toType=toType, fromType=fromType, relationName=relationName, fields=get_scalar_fields(schema, toType), ), ) touch(f"generated/sdl/{fromType}_{relationName}.graphql", relation_sdl, index=True) relation_template = ( single_relation_resolver if relation_type == "to_one" else many_relations_resolver ) relation_resolver = populate_string( relation_template, dict( where_filter=where_filter, pipeline=pipeline, collection=collection, resolver_path=fromType + "." + relationName, disambiguations=[], # TODO relation guards guards_before=[], guards_after=[], map_fields_to_types=dict(get_scalar_fields(schema, toType)), **resolvers_dependencies, ), ) touch(f"generated/resolvers/{resolver_filename}.py", relation_resolver)
def object(self, children): types = "\n".join([x for x, _, _ in children]) + "\n" arguments = ",\n".join([x for _, x, _ in children]) initializers = ",\n".join([x for _, _, x in children]) template = indent_to( "", """ class $key(dictlike): $annotation ${{indent_to(' ', types)}} def __init__( self, *, ${{indent_to(' ', arguments)}}, **kwargs ): super().__init__( ${{indent_to(' ', initializers)}}, **kwargs ) """, ) return populate_string( template, dict( types=types, arguments=arguments, initializers=initializers, indent_to=indent_to, ), )
def obj_from_yaml( script, variables={}, ): if isinstance(script, str): script = populate_string(script, data=variables, do_repr=True, evaluator=evaluate, INDICATOR_START='{{', INDICATOR_END='}}') print(script) try: data = yaml.safe_load(script) if not isinstance(data, dict): print(f'script is not dict, {data}') return {} return data # except Stop: # print('script called stop()') # return {} except Exception as e: print(f'error loading the script, {e}') return {} else: raise NotImplementedError()
def generate_type_sdl(schema, typename, guards, query_name, is_aggregation=False): return populate_string( graphql_query, dict( query_name=query_name, type_name=typename, fields=get_scalar_fields(schema, typename), # scalars=scalars, ), )
def test_yaml(): s = """ ciao: ok other: key: ${{ str( 'sdfsdf"sdfsdf' ) }} another: obj: ciao: 4 another: ok ok --- ${{ None }} """ print(populate_string(s, do_repr=True))
def test_python(): s = """ class Ciao: "${{ ( "heeeey" ) }}" def ${{cosa}}(): return ${{ret}} or ${{ret}} "${{ return_iterpolated() }}" """ x = populate_string( s, dict(cosa='func', ret='True', return_iterpolated=lambda: '${{ ciao }}')) print(x)
def test_1(): s = ''' def ${{ name }}(): ${{ 'return ' + 'xxx' if horny else 'xoxoxo' }} ${{ indent_to(' ', ''.join([ """ def fuck_you(): return 'fucku' """ for i in range(3) ])) }} ''' y = populate_string(s, dict(horny=True, name='boooo')) print(y)
def generate_from_config(config, config_path, root_dir_path): types = config.get("types", {}) jwt_config = config.get("jwt", {}) relations = config.get("relations", []) root_dir_path = root_dir_path or "generated" if os.path.exists(root_dir_path): rmtree(root_dir_path) db_url = config.get("db_url", "") touch = make_touch(base=os.path.abspath(root_dir_path)) main_graphql_schema = get_types_schema(config, here=config_path) # TODO add other scalars from the skema scalars = {*SCALAR_TYPES, *get_graphql_scalars(main_graphql_schema)} scalars = list(scalars) touch(f"checksum", make_checksum(config, config_path)) touch(f"__init__.py", "") touch(f"engine.py", engine) touch( f"__main__.py", populate_string( main, dict( root_dir_name=root_dir_path.split("/")[-1], db_url=db_url, resolver_names=get_resolver_filenames(config), ), ), ) touch(f"generated/__init__.py", "") touch(f"generated/logger.py", logger) touch( f"generated/middleware/__init__.py", populate_string( jwt_middleware, dict( jwt_header=jwt_config.get('header_name', ) or 'Authorization', jwt_sheme=jwt_config.get('header_scheme', 'Bearer'), jwt_required=bool(jwt_config.get('required')), jwt_secret=jwt_config.get('secret', None), jwt_algorithms=jwt_config.get('algorithms', ['H256']), ), ), ) touch(f"generated/resolvers/__init__.py", resolvers_init) touch( f"generated/resolvers/support.py", populate_string( resolvers_support, dict(scalars=[ x for x in scalars if x not in SCALARS_ALREADY_IMPLEMENTED ]), ), ) touch( f"generated/scalars.py", populate_string( scalars_implementations, dict(scalars=[ x for x in scalars if x not in SCALARS_ALREADY_IMPLEMENTED ]), ), ) touch(f"generated/sdl/general.graphql", populate_string(general_graphql, dict(scalars=scalars)), index=True) touch(f"generated/sdl/main.graphql", main_graphql_schema, index=True) implemented_types = [] for typename, type_config in types.items(): type_config = type_config or {} if not type_config.get("exposed", True): continue disambiguations = type_config.get("disambiguations", {}) disambiguations = make_disambiguations_objects(disambiguations) disambiguations = lmap(add_disambiguations_defaults, disambiguations) generate_type_boilerplate( touch=touch, schema=main_graphql_schema, collection=type_config.get("collection", ""), typename=typename, guards=lmap(add_guards_defaults, type_config.get("guards", [])), pipeline=type_config.get("pipeline", []), disambiguations=disambiguations, ) implemented_types += [typename] for relation in relations: toType = relation["to"] generate_relation_boilerplate( touch=touch, schema=main_graphql_schema, fromType=relation["from"], where_filter=relation["where"], toType=toType, pipeline=types[toType].get("pipeline", []), collection=types[toType].get("collection", []), relationName=relation.get("field"), relation_type=relation.get("relation_type", "to_one"), implemented_types=implemented_types, resolver_filename=get_relation_filename(relation), ) implemented_types += [toType]
def generate_terraform(file, project, region, credentials, build, bucket, stack_name): assert project credentials = os.path.abspath(credentials) file = os.path.abspath(file) try: data = get_stdout(f"docker-compose -f {file} config") except ProcessException as e: printred(e.message) return config = yaml.safe_load(data) if not stack_name: stack_name = os.path.basename( os.path.normpath(os.path.abspath(os.path.dirname(file)))) if not valid_name(stack_name): raise Exception( "Service name may only start with a letter and contain up to 63 lowercase letters, numbers or hyphens" ) if bucket: # TODO use other bucket provider than gcp plan = populate_string( REMOTE_STATE, dict(credentials=credentials, bucket=bucket, stack_name=stack_name), ) plan += populate_string( CREDENTIALS, dict(region=region, projectId=project, credentials=credentials)) url_mappings = {} for service_name, service in config.get("services", {}).items(): if not valid_name(service_name): raise Exception( "Service name may only start with a letter and contain up to 63 lowercase letters, numbers or hyphens" ) if not service.get("image"): printred("all services need an image to be deployed to Cloud Run") return if build and service.get("build"): code, _, _ = subprocess_call( f"docker-compose -f {file} build {service_name}") if code: printred("failed building") return print("pushing to registry") code, _, _ = subprocess_call( f"docker-compose -f {file} push {service_name}") if code: printred("cannot push image") return namespaced_service_name = stack_name + "-" + service_name output_url = namespaced_service_name + SERVICE_URL_POSTFIX url_mappings[output_url] = service_name variables = dict( environment=get_environment(service), service_name=namespaced_service_name, image=service.get("image", ""), command=parse_command(service.get("entrypoint", [])), args=parse_command(service.get("command", [])), region=region, projectId=project, output_url=output_url, ) populated_service = populate_string(SERVICE_PLAN, variables) plan += "\n" + populated_service plan += populate_string(PUBLIC_SERVICE, dict(service_name=namespaced_service_name)) return plan, url_mappings