def handle(args): query_url = f"https://api.github.com/orgs/{settings.GITHUB_TEMPLATES_OWNER}/repos" try: repo_data = requests.get(query_url).json() except Exception as exc: logger.debug(traceback.format_exc()) logger.error( "An error has occurred when querying GitHub's API to obtain the list of templates." ) if not settings.DEBUG_ENABLED: logger.error("Add --debug to see the full stack trace.") sys.exit(1) templates = [] for repo in repo_data: name = repo["name"].lower() if name.startswith("silence-template"): template_name = name.replace("silence-template-", "") desc = repo["description"] templates.append({"name": template_name, "desc": desc}) templates.sort(key=lambda x: x["name"]) print("Available templates:") for tmpl in templates: name = tmpl["name"] default = " (default)" if name == settings.DEFAULT_TEMPLATE_NAME.lower( ) else "" desc = f': {tmpl["desc"]}' if tmpl["desc"] else "" print(f" · {name}{default}{desc}")
def update(q, params=None): logger.debug(f'Executing SQL operation "{q}" with params {params}') conn = get_conn() cursor = conn.cursor(DictCursor) # Fetch the connection and get a cursor try: # Execute the query, with or without parameters and return the result if params: cursor.execute(q, params) else: cursor.execute(q) conn.commit() # Return the ID of the row that was modified or inserted lastid = cursor.lastrowid logger.debug(f"Last modified row ID: {lastid}") res = {"lastId": lastid} return res except Exception as exc: # If anything happens, wrap the exceptions in a DatabaseError raise DatabaseError(exc) from exc finally: # Close the cursor and the connection cursor.close() conn.close()
def load_user_endpoints(): logger.debug("Looking for custom endpoints...") # Load every .py file inside the api/ folder for folder in ("api", "endpoints"): try: pyfiles = [f for f in listdir(folder) if f.endswith(".py")] except FileNotFoundError: continue if folder == "api" and pyfiles: logger.warning( "Please rename the folder that contains your endpoints to 'endpoints/' instead of 'api/'" ) logger.warning( "Support for the 'api/' folder will be dropped in the future.") for pyfile in pyfiles: module_name = folder + "." + splitext(pyfile)[0] logger.debug(f"Found endpoint file: {module_name}") try: importlib.import_module(module_name) except ImportError: raise RuntimeError( f"Could not load the API file {module_name}")
def load_user_endpoints(): logger.debug("Looking for custom endpoints...") # Load every .json file inside the endpoints/ or api/ folders curr_dir = getcwd() endpoints_dir = curr_dir + "/endpoints" if not path.isdir(endpoints_dir): mkdir(endpoints_dir) auto_dir = endpoints_dir + "/auto" endpoint_paths_json_user = [endpoints_dir + f"/{f}" for f in listdir(endpoints_dir) if f.endswith('.json')] if path.isdir(auto_dir): endpoint_paths_json_default = [auto_dir + f"/{f}" for f in listdir(auto_dir) if f.endswith('.json')] endpoint_paths_json = endpoint_paths_json_user + endpoint_paths_json_default else: endpoint_paths_json = endpoint_paths_json_user for jsonfile in endpoint_paths_json: with open(jsonfile, "r") as ep: endpoints = list(json.load(ep).values()) for endpoint in endpoints: kwargs_nones = dict(auth_required = endpoint.get('auth_required'), allowed_roles = endpoint.get('allowed_roles'), description = endpoint.get('description'), request_body_params = endpoint.get('request_body_params')) kwargs = {k: v for k, v in kwargs_nones.items() if v is not None} server_endpoint.setup_endpoint(endpoint['route'], endpoint['method'], endpoint['sql'], **kwargs)
def check_session(allowed_roles): token = request.headers.get("Token", default=None) if not token: raise HTTPError(401, "Unauthorized") try: user_data = check_token(token) u_data = settings.USER_AUTH_DATA['table'] primary = get_primary_key(u_data) res = user_data[primary] # Check if the user's role is allowed to access this endpoint role_col_name = settings.USER_AUTH_DATA.get("role", None) if role_col_name: # Only check the role if we know the role column # Find the role of the user from the user data user_role = next((v for k, v in user_data.items() if k.lower() == role_col_name.lower()), None) logger.debug( f"Allowed roles are {allowed_roles} and the user role is {user_role}" ) if user_role not in allowed_roles and "*" not in allowed_roles: raise HTTPError(401, "Unauthorized") return res except TokenError as exc: raise HTTPError(401, str(exc))
def get_user_endpoints(): logger.debug("Looking for user endpoints") # Load every .json file inside the endpoints/ or api/ folders curr_dir = getcwd() endpoints_dir = curr_dir + "/endpoints" if not path.isdir(endpoints_dir): mkdir(endpoints_dir) endpoint_paths_json_user = [ endpoints_dir + f"/{f}" for f in listdir(endpoints_dir) if f.endswith('.json') ] endpoint_route_method_pairs = [] for jsonfile in endpoint_paths_json_user: with open(jsonfile, "r") as ep: endpoints = list(json.load(ep).values()) endpoint_pairs = [(endpoint['route'], endpoint['method']) for endpoint in endpoints] endpoint_route_method_pairs += endpoint_pairs return endpoint_route_method_pairs
def get_views(): res = query(q="SHOW FULL TABLES WHERE table_type = 'VIEW';") views = {} for v in res: views[list(v.values())[0]] = get_table_cols(list(v.values())[0]) logger.debug(f"views in database: {views}") return views
def get_tables(): res = query(q="SHOW FULL TABLES WHERE table_type = 'BASE TABLE';") tables = {} for t in res: tables[list(t.values())[0]] = get_table_cols(list(t.values())[0]) logger.debug(f"tables in database: {tables}") return tables
def login(): USERS_TABLE, IDENTIFIER_FIELD, PASSWORD_FIELD, ROLE_FIELD, ACTIVE_FIELD = get_login_settings( ) # Ensure that the user has sent the required fields form = request.json if request.is_json else request.form form = filter_fields_db(form, USERS_TABLE) username = form.get(IDENTIFIER_FIELD, None) password = form.get(PASSWORD_FIELD, None) if not username or not password: raise HTTPError( 400, f"Both '{IDENTIFIER_FIELD}' and '{PASSWORD_FIELD}' are required") logger.debug( f"Login request from user {username} with password {password}") # Look if there is an user with such username q = get_login_query(USERS_TABLE, IDENTIFIER_FIELD, username) users = dal.api_safe_query(q) if not users: logger.debug(f"The identifier {username} was not found") raise HTTPError(400, "User not found") # The identifier field should be unique (/register also takes care of that) # so we can just extract the first one user = users[0] # Check if the user's password matches the provided one if PASSWORD_FIELD not in user: raise HTTPError(500, f"The user has no attribute '{PASSWORD_FIELD}'") password_ok = (settings.ALLOW_CLEAR_PASSWORDS and user[PASSWORD_FIELD] == password) \ or check_password_hash(user[PASSWORD_FIELD], password) if not password_ok: logger.debug(f"Incorrect password") raise HTTPError(400, "The password is not correct") # If a column has been specified for the "is active" field, and the check # is enabled in the settings, check that the user has not been deactivated if ACTIVE_FIELD is not None and settings.CHECK_USER_IS_ACTIVE: if not user[ACTIVE_FIELD]: logger.debug("The user is deactivated, login denied") raise HTTPError(401, "This user has been deactivated") # If we've reached here the login is successful, generate a session token # and return it with the logged user's info logger.debug("Login OK") token = create_token(user) del user[PASSWORD_FIELD] res = {"sessionToken": token, "user": user} return jsonify(res), 200
def handle(args): from silence.server import manager as server_manager logger.info(f"Silence v{__version__}") logger.debug("Current settings:\n" + str(settings)) new_ver = check_for_new_version() if new_ver: logger.warning( f"A new Silence version (v{new_ver}) is available. Run 'pip install --upgrade Silence' to upgrade." ) server_manager.setup() server_manager.run()
def check_token(token): logger.debug(f"Checking received token {token[:6]}[...]{token[-6:]}") try: user_data = auth.loads(token, max_age=settings.MAX_TOKEN_AGE) logger.debug("The token is correct") return user_data except SignatureExpired: logger.debug("The token has expired") raise TokenError("The session token has expired") except BadSignature: logger.debug("The token is not valid") raise TokenError("The session token is not valid")
def check_for_new_version(): if not settings.CHECK_FOR_UPDATES: return False logger.debug("Checking for new updates...") try: data = requests.get("https://pypi.org/pypi/Silence/json").json() latest_version = data["info"]["version"] except Exception as exc: logger.debug("Exception occurred when checking for updates (muted):") logger.debug(str(exc)) return False current_version = __version__.split("-")[0] if current_version != latest_version: logger.debug("New version available") return latest_version else: logger.debug("Running latest version") return False
def register(): USERS_TABLE, IDENTIFIER_FIELD, PASSWORD_FIELD, ROLE_FIELD, ACTIVE_FIELD = get_login_settings( ) # Ensure that the user has sent the required fields form = request.json if request.is_json else request.form form = filter_fields_db(form, USERS_TABLE) username = form.get(IDENTIFIER_FIELD, None) password = form.get(PASSWORD_FIELD, None) if not username or not password: raise HTTPError( 400, f"Both '{IDENTIFIER_FIELD}' and '{PASSWORD_FIELD}' are required") logger.debug(f"Register request with data {form}") # Ensure that the identifier is unique login_q = get_login_query(USERS_TABLE, IDENTIFIER_FIELD, username) other_users = dal.api_safe_query(login_q) if other_users: logger.debug(f"The identifier {username} already exists") raise HTTPError( 400, f"There already exists another user with that {IDENTIFIER_FIELD}") # Create the user object, replacing the password with the hashed one user = dict(form) user[PASSWORD_FIELD] = generate_password_hash(password) # Assign a default role to the user, if specified in the settings if settings.DEFAULT_ROLE_REGISTER: user[ROLE_FIELD] = settings.DEFAULT_ROLE_REGISTER # Assign a default active status, if the activity check is on and none has # been provided if ACTIVE_FIELD and ACTIVE_FIELD not in user: user[ACTIVE_FIELD] = settings.DEFAULT_ACTIVE_STATUS # Try to insert it in the DB # Since the /register endpoint must adapt to any possible table, # we assume that the user knows what they're doing and submits the # appropriate fields. Otherwise, the DB will just complain. register_q = get_register_user_query(USERS_TABLE, user) dal.api_safe_update(register_q) # Fetch the newly created user from the DB (some fields may have been) # automatically generated, like its ID # It should now exist user = dal.api_safe_query(login_q)[0] # If we've reached here the register is successful, generate a session token # and return it with the logged user's info logger.debug("Register OK") token = create_token(user) del user[PASSWORD_FIELD] res = {"sessionToken": token, "user": user} return jsonify(res), 200
def query(q, params=None): logger.debug(f'Executing SQL query "{q}" with params {params}') # Fetch the connection and get a cursor conn = get_conn() cursor = conn.cursor(DictCursor) try: # Execute the query, with or without parameters and return the result if params: cursor.execute(q, params) else: cursor.execute(q) res = cursor.fetchall() logger.debug(f"Query result: {res}") return res except Exception as exc: # If anything happens, wrap the exceptions in a DatabaseError raise DatabaseError(exc) from exc finally: # Close the cursor and the connection cursor.close() conn.close()
def setup(): # Configures the web server APP.secret_key = settings.SECRET_KEY APP.config["SESSION_TYPE"] = "filesystem" APP.config["SEND_FILE_MAX_AGE_DEFAULT"] = settings.HTTP_CACHE_TIME # Mute Flask's startup messages def noop(*args, **kwargs): pass click.echo = noop click.secho = noop # Add our Flask filter to customize Flask logging messages logging.getLogger("werkzeug").addFilter(FlaskFilter()) # Override the default JSON encoder so that it works with the Decimal type APP.json_encoder = SilenceJSONEncoder # Manually set up the MIME type for .js files # This patches a known issue on Windows, where the MIME type for JS files # is sometimes incorrectly set to text/plain in the registry mimetypes.add_type("application/javascript", ".js", strict=True) # Set up the error handle for our custom exception type @APP.errorhandler(HTTPError) def handle_HTTPError(error): response = jsonify(error.to_dict()) response.status_code = error.status_code return response # Set up the generic Exception handler for server errors @APP.errorhandler(Exception) def handle_generic_error(exc): # Pass through our own HTTP error exception if isinstance(exc, HTTPError): return exc # Create a similar JSON response for Werkzeug's exceptions if isinstance(exc, HTTPException): code = exc.code res = jsonify({"message": exc.description, "code": code}) return res, code # We're facing an uncontrolled server exception logger.exception(exc) exc_type = type(exc).__name__ msg = str(exc) err = HTTPError(500, msg, exc_type) return handle_HTTPError(err) # Check if clear text passwords can be used for login, and show a warning # if that is the case if settings.ALLOW_CLEAR_PASSWORDS: logger.warning( "This project allows clear text passwords in the DB to be used for login\n" + "(ALLOW_CLEAR_PASSWORDS is set to True)\n" + "This is NOT RECOMMENDED outside testing purposes.") # Load the user-provided API endpoints and the default ones if settings.RUN_API: load_default_endpoints() load_user_endpoints() if settings.SHOW_ENDPOINT_LIST: API_SUMMARY.print_endpoints() # Load the web static files if settings.RUN_WEB: logger.debug("Setting up web server") @APP.route("/") def root(): return APP.send_static_file("index.html") @APP.route("/<path:path>") def other_path(path): return APP.send_static_file(path)
def create_token(data): token = auth.dumps(data) logger.debug(f"Created new token {token[:6]}[...]{token[-6:]}") return token
def setup(): # Configures the web server APP.secret_key = settings.SECRET_KEY APP.config["SESSION_TYPE"] = "filesystem" # Mute Flask's startup messages def noop(*args, **kwargs): pass click.echo = noop click.secho = noop # Add our Flask filter to customize Flask logging messages logging.getLogger("werkzeug").addFilter(FlaskFilter()) # Override the default JSON encoder so that it works with the Decimal type APP.json_encoder = SilenceJSONEncoder # Set up the error handle for our custom exception type @APP.errorhandler(HTTPError) def handle_HTTPError(error): response = jsonify(error.to_dict()) response.status_code = error.status_code return response # Set up the generic Exception handler for server errors @APP.errorhandler(Exception) def handle_generic_error(exc): # Pass through our own HTTP error exception if isinstance(exc, HTTPError): return exc # Create a similar JSON response for Werkzeug's exceptions if isinstance(exc, HTTPException): code = exc.code res = jsonify({"message": exc.description, "code": code}) return res, code # We're facing an uncontrolled server exception logger.exception(exc) exc_type = type(exc).__name__ msg = str(exc) err = HTTPError(500, msg, exc_type) return handle_HTTPError(err) # Load the user-provided API endpoints and the default ones if settings.RUN_API: load_default_endpoints() load_user_endpoints() # Load the web static files if settings.RUN_WEB: logger.debug("Setting up web server") @APP.route("/") def root(): return APP.send_static_file("index.html") @APP.route("/<path:path>") def other_path(path): return APP.send_static_file(path)
def setup_endpoint(route, method, sql, auth_required=False, allowed_roles=["*"], description=None, request_body_params=[]): logger.debug(f"Setting up endpoint {method} {route}") # if the query is requesting the logged user. logged_user = "******" in sql if logged_user and not auth_required: logger.warning( "You're using $loggedId but are not requesting authorization, in endpoint: " + str(route)) # Construct the API route taking the prefix into account route_prefix = settings.API_PREFIX if route_prefix.endswith("/"): route_prefix = route_prefix[:-1] # Drop the final / full_route = route_prefix + route # Warn if the pair SQL operation - HTTP verb is not the proper one check_method(sql, method, route) # Warn if the values of auth_required and allowed_roles don't make sense together check_auth_roles(auth_required, allowed_roles, method, route) # Extract the list of parameters that the user expects to receive # in the URL and in the SQL string sql_params = extract_params(sql) url_params = extract_params(route) # Get the required SQL operation sql_op = get_sql_op(sql) # If it's a SELECT or a DELETE, make sure that all SQL params can be # obtained from the url if sql_op in (SQL.SELECT, SQL.DELETE): check_params_match(sql_params, url_params, route) # If it's a SELECT or a DELETE, make sure that all SQL params can be # obtained from the url AND the request body if sql_op in (SQL.INSERT, SQL.UPDATE): check_params_match(sql_params, url_params + request_body_params, route) # The handler function that will be passed to flask def route_handler(*args, **kwargs): # If this endpoint requires authentication, check that the # user has provided a session token and that it is valid if auth_required: userId = check_session(allowed_roles) # Collect all url pattern params request_url_params_dict = kwargs # If endpoint requires the logged userId it adds the pair (loggedId, loggedUserId) if logged_user: if not auth_required: userId = check_session(allowed_roles) if userId != None: request_url_params_dict["loggedId"] = userId else: request_url_params_dict["loggedId"] = None # Convert the silence-style placeholders in the SQL query to proper MySQL placeholders query_string = silence_to_mysql(sql) # Default outputs res = None status = 200 # SELECT/GET operations if sql_op == SQL.SELECT: # The URL params have been checked to be enough to fill all SQL params url_pattern_params = tuple(request_url_params_dict[param] for param in sql_params) res = dal.api_safe_query(query_string, url_pattern_params) # Filter these results according to the URL query string, if there is one # Possible TO-DO: do this by directly editing the SQL query for extra efficiency res = filter_query_results(res, request.args) # In our teaching context, it is safe to assume that if the URL ends # with a parameter and we have no results, we should return a 404 code if RE_QUERY_PARAM.match(route) and not res: raise HTTPError(404, "Not found") else: # POST/PUT/DELETE operations #Construct a dict for all params expected in the request body, setting them to None if they have not been provided form = request.json if request.is_json else request.form body_params = { param: form.get(param, None) for param in request_body_params } # We have checked that sql_params is a subset of url_params U body_params, # construct a joint param object and use it to fill the SQL placeholders for param in url_params: body_params[param] = request_url_params_dict[param] if logged_user and auth_required: body_params["loggedId"] = userId param_tuple = tuple(body_params[param] for param in sql_params) param_tuple = tuple(body_params[param] for param in sql_params) # Run the execute query res = dal.api_safe_update(query_string, param_tuple) return jsonify(res), status # flaskify_url() adapts the URL so that all $variables are converted to Flask-style <variables> server_manager.APP.add_url_rule(flaskify_url(full_route), method + route, route_handler, methods=[method]) server_manager.API_SUMMARY.register_endpoint({ "route": full_route, "method": method.upper(), "description": description })
def create_tests(): # Folder handling curr_dir = getcwd() test_dir = curr_dir + "/tests/auto" logger.debug(f"Selected test directory --> {test_dir}") try: rmtree(test_dir) except FileNotFoundError: logger.debug("Folder is not there, creating it.") logger.debug(f"re-creating directory --> {test_dir}") Path(test_dir).mkdir(parents=True, exist_ok=True) # Test files creation tables = get_tables() table_name_auth = next(t for t in tables.keys() if t.lower() == settings.USER_AUTH_DATA["table"].lower()) pk_auth = get_primary_key(table_name_auth) auth_table_attributes = get_table_cols(table_name_auth) auth_table_attributes.remove(pk_auth) for table in list(tables.items()): pk = get_primary_key(table[0]) try: table[1].remove(pk) # the auth table will already have its primary key removed. except: pass name = table[0].lower() logger.info(f"Generating test for {name}") table_name = next(t for t in tables if t.lower() == name) table_attributes = get_table_cols(table_name) # TEST HEADER TEST = f""" ### THIS IS AN AUTO-GENERATED TEST SUITE, IT NEEDS TO BE COMPLETED WITH VALID DATA ### THESE ARE NOT ALL YOU NEED, MORE OF THEM MUST BE CREATED TO EVALUATE THE FUNCTIONAL ### REQUIREMENTS OF THE PROJECT AT HAND, THESE TEST ONLY TEST THE CRUD PORTION OF THE ENTITY. ### Silence is a DEAL research team project, more info about us in https://deal.us.es @BASE = http://127.0.0.1:8080{settings.API_PREFIX} ### Auxiliary query ### Positive test ### Test 00: Get all existing {name} ### This query is used in several of the below tests it should be executed first. # @name {name} GET {{{{BASE}}}}/{name} ### Login a(n) {table_name_auth} and save the generated token ### This token is used in several of the below tests it should be executed second. # @name login POST {{{{BASE}}}}/login Content-Type: application/json {{ """ t_args = [settings.USER_AUTH_DATA["identifier"], settings.USER_AUTH_DATA["password"]] TEST += add_table_args(t_args) TEST += f"""}} ### @token = {{{{login.response.body.sessionToken}}}} """ TEST += f""" ### TESTS BEGIN ### Test 01: Get one existing {name} by its id. ### positive test @{name[:3]}Id = {{{{{name}.response.body.0.{pk}}}}} GET {{{{BASE}}}}/{name}/{{{{{name[:3]}Id}}}} Content-Type: application/json ### Test 02: Try get one existing {name} by its nonexistent id. ### negative test GET {{{{BASE}}}}/{name}/999999999 Content-Type: application/json ### Test 03: Add a new {name} successfully ### Positive test ### We assume that the token has been aquired by the login request. # @name new{name} POST {{{{BASE}}}}/{name} Content-Type: application/json Token: {{{{token}}}} {{ """ TEST += add_table_args(table_attributes) TEST += f"""}} ### Check the created {name} @new{name}id = {{{{new{name}.response.body.lastId}}}} GET {{{{BASE}}}}/{name}/{{{{new{name}id}}}} Content-Type: application/json ### Test 04: Add a new {name} without a session token ### Negative test POST {{{{BASE}}}}/{name} Content-Type: application/json {{ """ TEST += add_table_args(table_attributes) TEST += f"""}} ### Test 05: Modify an existing {name} ### Positive test @{name[:3]}Id = {{{{{name}.response.body.0.{pk}}}}} PUT {{{{BASE}}}}/{name}/{{{{{name[:3]}Id}}}} Content-Type: application/json Token: {{{{token}}}} {{ """ TEST += add_table_args(table_attributes) TEST += f"""}} ### Check the modified {name} GET {{{{BASE}}}}/{name}/{{{{new{name}id}}}} Content-Type: application/json ### Test 06: Try to modify an existing {name} without a session token ### Negative test @{name[:3]}Id = {{{{{name}.response.body.0.{pk}}}}} PUT {{{{BASE}}}}/{name}/{{{{{name[:3]}Id}}}} Content-Type: application/json {{ """ TEST += add_table_args(table_attributes) TEST += f"""}} ### Test 07: Delete an existing {name} ### Positive test ### Create a new {name}, which we will delete # @name created{name[:3]}ToDelete POST {{{{BASE}}}}/{name} Content-Type: application/json Token: {{{{token}}}} {{ """ TEST += add_table_args(table_attributes) TEST += f"""}} ### Check the created {name} @{name[:3]}delId = {{{{created{name[:3]}ToDelete.response.body.lastId}}}} GET {{{{BASE}}}}/{name}/{{{{{name[:3]}delId}}}} Content-Type: application/json ### Delete the {name} DELETE {{{{BASE}}}}/{name}/{{{{{name[:3]}delId}}}} Token: {{{{token}}}} ### Check the deleted {name} GET {{{{BASE}}}}/{name}/{{{{{name[:3]}delId}}}} Content-Type: application/json ### Test 08: Try to delete a {name} without a session token ### Negative test DELETE {{{{BASE}}}}/{name}/{{{{{name[:3]}Id}}}} """ # WRITE TEST TO FILE. with open(test_dir+f"/{name}.http", "w") as test: test.write(TEST)
def create_entity_endpoints(existing_routes_method_pairs): # Folder handling curr_dir = getcwd() endpoints_dir = curr_dir + "/endpoints" auto_dir = endpoints_dir + "/auto" logger.debug(f"Selected endpoint directory --> {auto_dir}") try: rmtree(auto_dir) except FileNotFoundError: logger.debug("Folder is not there, creating it.") logger.debug(f"re-creating directory --> {auto_dir}") mkdir(auto_dir) # Endpoint files creation tables = get_tables() for table in list(tables.items()): pk = get_primary_key(table[0]) is_AI = is_auto_increment(table[0], pk) endpoints = {} table[1].remove(pk) name = table[0].lower() ep_tuples = [] logger.info(f"Generating endpoints for {name}") get_all_route = f"/{name}" if (get_all_route, "GET") not in existing_routes_method_pairs: endpoints["getAll"] = generate_get_all(get_all_route, table) endpoint = ("getAll", get_all_route, "GET", pk, endpoints["getAll"]["description"]) ep_tuples.append(endpoint) get_by_id_route = f"/{name}/${pk}" if (get_by_id_route, "GET") not in existing_routes_method_pairs: endpoints["getById"] = generate_get_by_id(get_by_id_route, table, pk) endpoint = ("getById", get_by_id_route, "GET", pk, endpoints["getById"]["description"]) ep_tuples.append(endpoint) create_route = f"/{name}" if (create_route, "POST") not in existing_routes_method_pairs: endpoints["create"] = generate_create(create_route, table, pk, is_AI) endpoint = ("create", create_route, "POST", pk, endpoints["create"]["description"]) ep_tuples.append(endpoint) udpate_route = f"/{name}/${pk}" if (udpate_route, "PUT") not in existing_routes_method_pairs: endpoints["update"] = generate_update(udpate_route, table, pk) endpoint = ("update", udpate_route, "PUT", pk, endpoints["update"]["description"]) ep_tuples.append(endpoint) delete_route = f"/{name}/${pk}" if (delete_route, "DELETE") not in existing_routes_method_pairs: endpoints["delete"] = generate_delete(delete_route, table, pk) endpoint = ("delete", delete_route, "DELETE", pk, endpoints["delete"]["description"]) ep_tuples.append(endpoint) generate_API_file_for_endpoints(ep_tuples, name) dicts_to_file(endpoints, name, auto_dir) views = get_views() for view in list(views.items()): endpoints = {} ep_tuples = [] name = view[0].lower() logger.info(f"Generating endpoints for {name}") get_all_route = f"/{name}" if (get_all_route, "GET") not in existing_routes_method_pairs: endpoints["getAll"] = generate_get_all(get_all_route, view) endpoint = ("getAll", get_all_route, "GET", pk, endpoints["getAll"]["description"]) ep_tuples.append(endpoint) generate_API_file_for_endpoints(ep_tuples, name) dicts_to_file(endpoints, name, auto_dir)