from connexion.utils import get_function_from_name from db.mongo import MongoDb spec_dir = "./specs" current_dir = path.dirname(__file__) spec_path = Path(f"{current_dir}/{spec_dir}").resolve() spec = SpecBuilder()\ .add_spec(spec_path.joinpath("cloud.yaml")) \ .add_spec(spec_path.joinpath("vm.yaml")) \ .add_spec(spec_path.joinpath("flavor.yaml"))\ .add_spec(spec_path.joinpath("image.yaml")) options = {'swagger_path': swagger_ui_3_path} app = connexion.FlaskApp(__name__, specification_dir=spec_dir, options=options) # Inject the `controllers` namespace into `operationId`s specified in the spec # files. This allows for the (potentially many) controllers to be organized # into thier own folder and to potentially have multiple versions of the # controller classes in different folders. custom_resolver = Resolver( function_resolver=lambda fname: get_function_from_name(f"controllers.{fname}") ) app.add_api(spec, resolver=custom_resolver) CORS(app.app) @app.route("/") def home():
#!/usr/bin/env python3 import connexion from flask_cors import CORS from kafka import KafkaConsumer app = connexion.FlaskApp(__name__, specification_dir='../../openapi/') app.add_api('specification.yaml') # CORS added to support health check monitoring in Cucumber dashboard CORS(app.app)
#!/usr/bin/env python3 import logging import connexion from connexion import NoContent logging.basicConfig(level=logging.DEBUG) app = connexion.FlaskApp(__name__) app.add_api('hello_service.yml', validate_responses=True) # from http://coderobot.downley.net/swagger-driven-testing-in-python.html # set the WSGI application callable to allow using uWSGI: # uwsgi --http :8080 -w app application = app.app if __name__ == '__main__': app.run(port=8090)
def main(): logging.basicConfig(level=logging.INFO) """ Swagger server entrypoint function. It makes use of argparse library to build a beatiful command line """ python_dir = os.path.dirname(os.path.relpath(__file__)) specification_dir = os.path.join(python_dir, 'swagger') swagger_file = os.path.abspath( os.path.join(specification_dir, 'swagger.yaml')) parser = argparse.ArgumentParser() parser.add_argument('--factory-abi', dest='factory', required=True, help='Smart contract Factory JSON interface') parser.add_argument('--election-abi', dest='election', required=True, help='Smart contract Election JSON interface') parser.add_argument('--address', required=True, help='Smart contract address') parser.add_argument('--infura', required=True, help='Infure URL to build the provider') parser.add_argument('-p', '--port', type=int, default=8080, help='Port to run the API') parser.add_argument('--swagger-file', dest='swagger_file', default=swagger_file) parser.add_argument('-d', '--debug', action='store_true', default=False) args = parser.parse_args() swagger_file = os.path.abspath(args.swagger_file) if args.debug: logging.basicConfig(level=logging.DEBUG) app = connexion.FlaskApp(__name__, specification_dir=os.path.dirname(swagger_file), debug=args.debug) with open(args.factory, 'r') as _file: json_repr = json.loads(_file.read()) if isinstance(json_repr, dict) and 'abi' in json_repr: factory_abi = json_repr['abi'] else: factory_abi = json_repr with open(args.election, 'r') as _file: json_repr = json.loads(_file.read()) if isinstance(json_repr, dict) and 'abi' in json_repr: election_abi = json_repr['abi'] else: election_abi = json_repr provider = Web3.HTTPProvider(args.infura) factory_contract = Ethereum(provider, args.address, factory_abi) app.app.json_encoder = encoder.JSONEncoder app.add_api(os.path.basename(swagger_file), arguments={'title': 'Ethereum Voting'}, pythonic_params=True) app.app.config['FACTORY_CONTRACT'] = factory_contract app.app.config['ELECTION_ABI'] = election_abi app.run(port=args.port)
def test_api_service(): """ Creates a mocked interface for each specified swagger spec and runs a server with a forked process to ensure swagger validates fully. :return: """ port = 8081 for swagger in service_swaggers: pid = os.fork() if not pid: name = swagger.rsplit('/', 3)[2] logger.info( ('Starting server for: {} at: {}'.format(name, swagger))) resolver = MockResolver(mock_all='all') api_extra_args = {'resolver': resolver} app = connexion.FlaskApp(name, swagger_json=False, swagger_ui=False) app.add_api(swagger, resolver_error=True, validate_responses=True, strict_validation=True, **api_extra_args) app.run(port=port) else: try: logger.info('Wait for pinging server of pid: {}'.format(pid)) # Let the api initialize retries = 3 killed = False for i in range(retries): time.sleep(2) proc_id, status = os.waitpid(pid, os.WNOHANG) logger.info('Child pid: {}. Status = {}'.format( pid, status)) if proc_id == 0 and status == 0: try: logger.info('Killing pid {}'.format(pid)) os.kill(pid, signal.SIGTERM) killed = True except ProcessLookupError: logger.info( 'Process {} not found, skipping kill'.format( pid)) finally: break elif status != 0 and killed: logger.info('Confirmed child pid killed') break else: pytest.fail( 'Mock service for {} failed to start properly'. format(swagger)) break except ProcessLookupError: logger.info('Process {} not found. Exiting cleanly') except (KeyboardInterrupt, Exception): raise finally: try: os.kill(pid, signal.SIGKILL) except ProcessLookupError: # This is expected pass except: logger.exception("Failed to kill child process")
def create(): path = pathlib.Path(__file__).parent.parent.parent / 'swagger' app = connexion.FlaskApp(__name__, port=8000, specification_dir=str(path)) app.add_api('api.yaml', arguments={'title': 'Hello World Example'}) return app
if __name__ == "__main__": configure_logger() parser = argparse.ArgumentParser() parser.add_argument( "--insecure-add-idp", dest="insecure_idp", required=False, help="Point to the IdP metadata URL", default=False, ) args = parser.parse_args() zapp = connexion.FlaskApp(__name__, port=443, specification_dir="./") zapp.app.config["SECRET_KEY"] = "onelogindemopytoolkit" zapp.app.config["SAML_PATH"] = pjoin(getcwd(), "saml") current_ip = gethostbyname(gethostname()) base_url = "https://{current_ip}".format(current_ip=current_ip) zapp.app.config["entityId"] = pjoin(base_url, "metadata") custom_base_path = pjoin(zapp.app.config["SAML_PATH"], "settings.json") with open(custom_base_path) as fh: saml_config = yaml.load(fh.read()) zapp.app.config["x509cert"] = saml_config["sp"]["x509cert"] zapp.app.config["privateKey"] = saml_config["sp"]["privateKey"] if args.insecure_idp: zapp.app.config["idp_url"] = args.insecure_idp
def create_app(config_object='config.DevelopmentConfig'): app = connexion.FlaskApp(__name__, specification_dir='api/specification/') # This is required for Flask CLI and FLASK_APP env to work flask_app = app.app flask_app.secret_key = os.environ.get('APP_SECRET_KEY') flask_app.config.from_object(config_object) # Specsynthase's SpecBuilder combines multiple Swagger yaml files into a one. # This is purely used for modularity. # https://github.com/zalando/connexion/issues/254 api_spec = _build_swagger_spec(SpecBuilder()) # In case you don't want to show the swagger_ui for private endpoints # You might want to split this into two apis enable_swagger = flask_app.config['ENABLE_SWAGGER_UI'] app.add_api(api_spec, options={"swagger_ui": enable_swagger}) db.init_app(flask_app) JWT.init_app(flask_app) migrate = Migrate(flask_app, db, compare_type=True) # CORS settings for allowing suggestions from a Skosmos client CORS(flask_app, resources={ r"/api/suggestions": { "origins": os.environ.get('SKOSMOS_URI'), "allow_headers": ['Content-Type', 'Access-Control-Allow-Origin'], "methods": ['POST', 'OPTIONS'] } }) @flask_app.shell_context_processor def shell_context(): #pylint: disable=unused-variable #pylint: disable=import-outside-toplevel from pprint import pprint from api import models #pylint: enable=import-outside-toplevel return { 'app': app, 'db': models.db, 'pprint': pprint, 'models': models } # models.db. @flask_app.cli.command(db.JSON) def prune(): #pylint: disable=unused-variable """ Prunes the JWT token blacklist. Run this from command line (or periodically from crontab, for example) >> flask prune """ no_pruned = prune_expired_tokens() click.echo( '{} expired tokens pruned from the database.'.format(no_pruned)) @flask_app.cli.command() @click.argument('name') @click.argument('email') @click.argument('password') def create_admin(name, email, password): #pylint: disable=unused-variable """ Creates an admin user Run this from command line >> flask create_admin """ user = User(name=name, email=email, password=password, role=UserRoles.ADMIN) db.session.add(user) db.session.commit() click.echo('Created admin user {}.'.format(email)) return app
import connexion options = {'swagger_url': '/apidocs'} app = connexion.FlaskApp('bitshares-explorer-api', options=options) from flask_cors import CORS CORS(app.app) from services.cache import cache cache.init_app(app.app) app.add_api('api.yaml') import services.profiler services.profiler.init_app(app.app) application = app.app if __name__ == "__main__": app.run(host='0.0.0.0', port=5000)
api_token) + " (" + api_token + ")" res = submit_web_notification(message, api_token) return jsonify(res) else: return log("[ERROR] Invalid token", api_token) # ----- Main entry point ----- # if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument( "--port", type=int, default=9090, help="set the port that will be used to deploy the service") args = parser.parse_args() app = connexion.FlaskApp(__name__, port=args.port, specification_dir=APP_BASE_DIR) CORS(app.app) app.add_api('ml_service-api.yaml', arguments={'title': 'Machine Learning Model Service'}) dash_utils.init_dashboard(app.app) if HTTPS_ENABLED: context = (join(APP_BASE_DIR, 'certificate_mas.pem'), join(APP_BASE_DIR, 'key_mas.pem')) app.run(debug=DEBUG_ENABLED, ssl_context=context) else: app.run(debug=DEBUG_ENABLED)
else: return 'Bad Request', 400, { 'empty-error': 'The cc parameter must not be empty' } def delete_user(cc): if (cc.strip()): user = db.users.find_one({ "cc": cc, }) if (user is not None): db.users.delete_one({ "cc": cc, }) return 'OK', 200 else: return 'Not found', 404, { 'exists-error': 'The user with the given cc was not found' } else: return 'Bad Request', 400, { 'empty-error': 'The cc parameter must not be empty' } if __name__ == '__main__': app = connexion.FlaskApp(__name__, port=5000, specification_dir='openapi/') app.add_api('indexer.yaml', arguments={'title': 'user api'}) app.run()
import connexion from flask import g, request import structlog try: import ijson.backends.yajl2_cffi as ijson except ImportError: import ijson from entityservice.logger_setup import setup_logging # Logging setup setup_logging() # Define the WSGI application object # Note we explicitly do this before importing our own code con_app = connexion.FlaskApp(__name__, specification_dir='api_def', debug=True) app = con_app.app import entityservice.views from entityservice.tracing import initialize_tracer from flask_opentracing import FlaskTracer from entityservice import database as db from entityservice.serialization import generate_scores from entityservice.object_store import connect_to_object_store from entityservice.settings import Config as config from entityservice.utils import fmt_bytes, iterable_to_stream con_app.add_api(pathlib.Path("swagger.yaml"), base_path='/', strict_validation=config.CONNEXION_STRICT_VALIDATION, validate_responses=config.CONNEXION_RESPONSE_VALIDATION)
import connexion if __name__ == '__main__': import sys if sys.platform == 'win32': import asyncio asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) app = connexion.FlaskApp(__name__, server='tornado') app.add_api('api.yaml', arguments={'title': '我的api', 'version': 'v1.0'}) app.run(port=8999)
vms.append(vm) # Generate machine status JSON file with open(machine_status_file, 'w') as f: f.write(json.dumps(vms)) return True if __name__ == '__main__': app = Flask(__name__) CORS(app) options = { "swagger_ui": True } app = connexion.FlaskApp( __name__, specification_dir='specs/', options=options ) app.add_api("swagger.yaml") # DB mapping set_sql_debug(True) db.generate_mapping(create_tables=True) # Schedule Task/h if os.getenv('AUTO_SYNC').lower() == 'true': scheduler = APScheduler() scheduler.add_job(func=svc_job, id='svc_job', trigger='interval', hours=1, replace_existing=True) # scheduler.add_job(func=machine_check_job, id='machine_check_job', trigger='interval', seconds=600, replace_existing=True) # scheduler.add_job(func=wechat_robot_job, id='wechat_robot_job', trigger='cron', hour=1, minute=30, replace_existing=True) # UTC Date scheduler.start()
def create_app(): """ This method create the Flask application. :return: Flask App Object """ global db global app global migrate global api_app # first initialize the logger init_logger() api_app = connexion.FlaskApp( __name__, server='flask', specification_dir='openapi/', ) # getting the flask app app = api_app.app flask_env = os.getenv('FLASK_ENV', 'None') if flask_env == 'development': config_object = 'config.DevConfig' elif flask_env == 'testing': config_object = 'config.TestConfig' elif flask_env == 'production': config_object = 'config.ProdConfig' else: raise RuntimeError( "%s is not recognized as valid app environment. You have to setup the environment!" % flask_env) # Load config env = Environments(app) env.from_object(config_object) # creating redis instance create_redis(app) # loading communications import gooutsafe.comm as comm if flask_env != 'production': # disable communication for testing purposes comm.disabled = True else: comm.init_rabbit_mq(app) # registering db db = SQLAlchemy( app=app ) # requiring the list of models import gooutsafe.models # creating migrate migrate = Migrate( app=app, db=db ) # checking the environment if flask_env == 'testing': # we need to populate the db db.create_all() # registering to api app all specifications register_specifications(api_app) return app
import os import connexion # Import formats for validation import have_i_not_been_owned.api.formats from have_i_not_been_owned.api.formats.json_encoder import CustomJSONEncoder _cwd = os.path.abspath(os.path.dirname(__file__)) connexion_app = connexion.FlaskApp(__name__, specification_dir=os.path.join(_cwd, 'resources', 'schemas')) # Set up a custom JSON encoder that can handle ObjectIDs. connexion_app.app.json_encoder = CustomJSONEncoder connexion_app.add_api('openapi.yaml') # Expose underlying Flask WSGI app for uWSGI, connexion, et. al app = connexion_app.app if __name__ == '__main__': connexion_app.run()
from flask import Flask from flask import jsonify from datetime import datetime import re from Pipeline_mine import * import sys from XmlToJson import xmlToJson import numpy as np import pandas as pd import string import re from Train import * import connexion # Instantiate our Flask app object app = connexion.FlaskApp(__name__, port=8080, specification_dir='swagger/') application = app.app @app.route("/", methods=["GET", "POST"]) def health(): # Test to make sure our service is actually healthy return ("Message: Service is OK") @app.route("/", methods=["GET", "POST"]) def predict(recordToTest): df = pd.DataFrame() df = GetJsonFromRecords(recordToTest) df1 = pd.DataFrame() df1 = df.copy()
import connexion import traceback import uuid from anchore_engine.db import end_session from flask import g SWAGGER_DIR = 'swagger/' SWAGGER_FILE = 'swagger.yaml' try: # Setup the api server and routing defined by the swagger file # Uses the x-swagger-router-controller directive to set which module handles the routes application = connexion.FlaskApp(__name__, specification_dir=SWAGGER_DIR) application.add_api(SWAGGER_FILE) except: traceback.print_exc() raise # Do some log config etc. flask_app = application.app @flask_app.before_request def setup_session(): """ Preflight operation to set a request-specific db session into the request-global context. :return: """ #flask_app.logger.debug('Setting up session on request init') return
def create_app(): application = connexion.FlaskApp(__name__, ) application.add_api('swagger.yaml') return application.app
def main(): app = connexion.FlaskApp(__name__, specification_dir='./swagger/') app.app.json_encoder = encoder.JSONEncoder app.add_api('swagger.yaml', arguments={'title': 'Mail REST API'}) app.run(port=8080)
import http import connexion import connexion_buzz class MyException(connexion_buzz.ConnexionBuzz): status_code = http.HTTPStatus.BAD_REQUEST def index(): raise MyException("basic test") app = connexion.FlaskApp(__name__, specification_dir="openapi/") app.app.register_error_handler( connexion_buzz.ConnexionBuzz, connexion_buzz.ConnexionBuzz.build_error_handler(), ) app.add_api("my_api.yaml") app.run(port=8080)
def main(): logging.getLogger('connexion.operation').setLevel('INFO') ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) YAML_DIR = config['SERVICE']['yaml_directory'] METRIC_YAML = config['SERVICE']['metrics_yaml'] METRIC_YML_PATH = os.path.join(ROOT_DIR, YAML_DIR, METRIC_YAML) SPDX_URL = config['EXTERNAL']['spdx_license_github'] DATACITE_API_REPO = config['EXTERNAL']['datacite_api_repo'] RE3DATA_API = config['EXTERNAL']['re3data_api'] METADATACATALOG_API = config['EXTERNAL']['metadata_catalog'] LOV_API = config['EXTERNAL']['lov_api'] LOD_CLOUDNET = config['EXTERNAL']['lod_cloudnet'] #BIOPORTAL_REST = config['EXTERNAL']['bioportal_rest'] #BIOPORTAL_APIKEY = config['EXTERNAL']['bioportal_apikey'] data_files_limit = int(config['SERVICE']['data_files_limit']) metric_specification = config['SERVICE']['metric_specification'] #TODO further implementation on authentication needed usr = config['USER']['usr'] pwd = config['USER']['pwd'] authen.service_username = usr authen.service_password = pwd preproc = Preprocessor() preproc.retrieve_metrics_yaml(METRIC_YML_PATH, data_files_limit, metric_specification) logger.info('Total metrics defined: {}'.format( preproc.get_total_metrics())) isDebug = config.getboolean('SERVICE', 'debug_mode') preproc.retrieve_licenses(SPDX_URL, isDebug) preproc.retrieve_datacite_re3repos(RE3DATA_API, DATACITE_API_REPO, isDebug) preproc.retrieve_metadata_standards(METADATACATALOG_API, isDebug) #preproc.retrieve_linkedvocabs(lov_api=LOV_API, lodcloud_api=LOD_CLOUDNET, bioportal_api=BIOPORTAL_REST, bioportal_key=BIOPORTAL_APIKEY, isDebugMode=False) preproc.retrieve_linkedvocabs(lov_api=LOV_API, lodcloud_api=LOD_CLOUDNET, isDebugMode=isDebug) preproc.retrieve_default_namespaces() logger.info('Total SPDX licenses : {}'.format( preproc.get_total_licenses())) logger.info('Total re3repositories found from datacite api : {}'.format( len(preproc.getRE3repositories()))) logger.info( 'Total subjects area of imported metadata standards : {}'.format( len(preproc.metadata_standards))) logger.info('Total LD vocabs imported : {}'.format( len(preproc.getLinkedVocabs()))) logger.info('Total default namespaces specified : {}'.format( len(preproc.getDefaultNamespaces()))) #you can also use Tornado or gevent as the HTTP server, to do so set server to tornado or gevent app = connexion.FlaskApp(__name__, specification_dir=YAML_DIR) API_YAML = os.path.join(ROOT_DIR, YAML_DIR, config['SERVICE']['swagger_yaml']) app.app.json_encoder = encoder.JSONEncoder app.add_api(API_YAML, arguments={ 'title': 'F-UJI : FAIRsFAIR Research Data Object Assessment Service' }, validate_responses=True) app.app.wsgi_app = ProxyFix(app.app.wsgi_app) app.run(host=config['SERVICE']['service_host'], port=int(config['SERVICE']['service_port']))
def main(): app = connexion.FlaskApp(__name__, specification_dir='./swagger/') app.add_api('swagger.yaml') app.add_error_handler(Unauthorized, generic_render) # app.app.before_request(before_request) app.run(host=HOST, port=PORT)
from twisted.internet import reactor from twisted.internet.task import LoopingCall from twisted.web.wsgi import WSGIResource # anchore modules from anchore_engine.clients import catalog, localanchore, simplequeue import anchore_engine.configuration.localconfig import anchore_engine.services.common import anchore_engine.subsys.taskstate from anchore_engine.subsys import logger import anchore_engine.clients.policy_engine from anchore_engine.services.policy_engine.api.models import ImageUpdateNotification, FeedUpdateNotification, ImageVulnerabilityListing, ImageIngressRequest, ImageIngressResponse, LegacyVulnerabilityReport try: application = connexion.FlaskApp(__name__, specification_dir='swagger/') application.add_api('swagger.yaml') app = application except Exception as err: traceback.print_exc() raise err # service funcs (must be here) def createService(sname, config): global app try: myconfig = config['services'][sname] except Exception as err: raise err
fileMetadata['name'] = file.filename fileMetadata['size'] = len(content) fileMetadata['mimetype'] = file.content_type #fileMetadata['mimetype']=magic.Magic(mime=True).from_file(file) #fileMetadata['mimetype']=MimeTypes().guess_type(file) # le contenu va dependre du type de fichier fileType = filetype.guess(file) if fileType is None: #si le type est inconnu on suppose que le fichier est un fichier de type texte (csv,json,xml,txt etc ...) try: fileMetadata['content'] = content.decode() except: return 'File format not supported' else: #Le type de fichier est connu fileMetadata['extension'] = fileType.extension else: return ('File not supported : please upload a csv, png or txt file') return jsonify(fileMetadata), 200 if __name__ == "__main__": app = connexion.FlaskApp(__name__, port=9090, specification_dir='') app.add_api('api.yml') app.secret_key = 'super secret key' app.run(debug=True, port=8000, host="0.0.0.0")
cors_enabled = cfg.CORS_ENABLED log = logging.getLogger('werkzeug') log.disabled = True logging.Formatter.converter = gmtime log_format = '%(asctime)-15s.%(msecs)03dZ %(levelname)-7s [%(threadName)-10s] : %(name)s - %(message)s' if debug: logging.basicConfig(datefmt='%Y-%m-%dT%H:%M:%S', format=log_format, level=logging.DEBUG) else: logging.basicConfig(datefmt='%Y-%m-%dT%H:%M:%S', format=log_format, level=logging.INFO) app = connexion.FlaskApp(__name__, debug=debug, specification_dir=cfg.API_LOC) app.add_api('contribution.yaml', base_path=cfg.CONTRIBUTION_URL_PREFIX, arguments={'title': 'Rokwire'}, resolver=RokwireResolver('controllers'), resolver_error=501, strict_validation=True) if cors_enabled: CORS(app.app) if __name__ == '__main__': app.run(port=5000, host=None, server='flask', debug=debug)
from flask_cors import CORS import connexion from constants import GENERAL, GOODREADS # load env variables ENV_FILE = find_dotenv() if ENV_FILE: load_dotenv(ENV_FILE) # initialize LoginManager class login_manager = LoginManager() # Create the application instance APP_INSTANCE = connexion.FlaskApp(__name__) # Read the api_config.yml file to configure the endpoints APP_INSTANCE.add_api('api_config.yml') FLASK_APP = APP_INSTANCE.app # get sensitive env vars from untracked .env file GOODREADS_KEY = env.get(GOODREADS.KEY) GOODREADS_SECRET = env.get(GOODREADS.SECRET) SECRET_KEY = env.get(GENERAL.FLASK_SECRET) # make sure env vars exist if GOODREADS_KEY is None or GOODREADS_SECRET is None or SECRET_KEY is None: raise ValueError('You need to specify {}, {}, {} \ in .env file'.format(GOODREADS_KEY, GOODREADS_SECRET, SECRET_KEY))
def create_app(config=None) -> connexion.FlaskApp: # configure_logger() logger.info("creating flask app", config=config) region = "localhost" host = f"http://dynamodb:{CFG.DYNALITE_PORT}" if is_docker() else CFG.DYNALITE_URL stripe.api_key = CFG.STRIPE_API_KEY logger.debug("aws", aws=CFG.AWS_EXECUTION_ENV) if CFG.AWS_EXECUTION_ENV: region = "us-west-2" host = None logger.info("app", port=CFG.DYNALITE_PORT, table_name=CFG.DELETED_USER_TABLE) options = dict(swagger_ui=CFG.SWAGGER_UI) app = connexion.FlaskApp(__name__, specification_dir="./", options=options) app.add_api( "swagger.yaml", pass_context_arg_name="request", strict_validation=True, validate_responses=True, ) app.app.subhub_account = SubHubAccount( table_name=CFG.USER_TABLE, region=region, host=host ) app.app.subhub_deleted_users = SubHubDeletedAccount( table_name=CFG.DELETED_USER_TABLE, region=region, host=host ) if not app.app.subhub_account.model.exists(): app.app.subhub_account.model.create_table( read_capacity_units=1, write_capacity_units=1, wait=True ) if not app.app.subhub_deleted_users.model.exists(): app.app.subhub_deleted_users.model.create_table( read_capacity_units=1, write_capacity_units=1, wait=True ) # Setup error handlers @app.app.errorhandler(SubHubError) def display_subhub_errors(e: SubHubError): if e.status_code == 500: logger.error("display sub errors", error=e) response = jsonify(e.to_dict()) response.status_code = e.status_code return response for error in ( stripe.error.APIConnectionError, stripe.error.APIError, stripe.error.RateLimitError, stripe.error.IdempotencyError, ): app.app.errorhandler(error)(intermittent_stripe_error) for error in (stripe.error.AuthenticationError,): app.app.errorhandler(error)(server_stripe_error) for error in ( stripe.error.InvalidRequestError, stripe.error.StripeErrorWithParamCode, ): app.app.errorhandler(error)(server_stripe_error_with_params) for error in (stripe.error.CardError,): app.app.errorhandler(error)(server_stripe_card_error) for error in (pynamodb.exceptions.GetError,): app.app.errorhandler(error)(database_connection_error) @app.app.before_request def before_request(): headers = dump_safe_headers(request.headers) logger.bind(correlation_id=extract_safe(headers, "X-Amzn-Trace-Id")) logger.debug("Request headers", headers=headers) logger.debug("Request body", body=request.get_data()) g.subhub_account = current_app.subhub_account g.subhub_deleted_users = current_app.subhub_deleted_users g.app_system_id = None if CFG.PROFILING_ENABLED: if "profile" in request.args and not hasattr(sys, "_called_from_test"): from pyinstrument import Profiler g.profiler = Profiler() g.profiler.start() @app.app.after_request def after_request(response): logger.unbind("correlation_id") if not hasattr(g, "profiler") or hasattr(sys, "_called_from_test"): return response if CFG.PROFILING_ENABLED: g.profiler.stop() output_html = g.profiler.output_html() return app.app.make_response(output_html) return response CORS(app.app) return app
save_orbit_files() flyTo_initialize() #Save stats from stats import * dict_stats = main() dict_stats1 = main1() dict_stats2 = main2() img1 = save_stats_as_img(dict_stats1) img1.save('./static/stats/info1.png') img2 = save_stats_as_img(dict_stats2) img2.save('./static/stats/info2.png') # Create the application instance app = connexion.FlaskApp(__name__, specification_dir='./') # Read the swagger.yml file to configure the endpoints app.add_api('swagger.yml') # Add CORS support CORS(app.app) # Create a URL route in our application for "/" @app.route('/') def home(): """ This function just responds to the browser ULR localhost:5000/
def createService(sname, config): global monitor_threads, monitors, servicename try: application = connexion.FlaskApp(__name__, specification_dir='swagger/') flask_app = application.app flask_app.url_map.strict_slashes = False anchore_engine.subsys.metrics.init_flask_metrics( flask_app, servicename=servicename) application.add_api('swagger.yaml') except Exception as err: traceback.print_exc() raise err try: myconfig = config['services'][sname] servicename = sname except Exception as err: raise err try: kick_timer = int(myconfig['cycle_timer_seconds']) except: kick_timer = 1 doapi = False try: if myconfig['listen'] and myconfig['port'] and myconfig[ 'endpoint_hostname']: doapi = True except: doapi = False kwargs = {} kwargs['kick_timer'] = kick_timer kwargs['monitors'] = monitors kwargs['monitor_threads'] = monitor_threads kwargs['servicename'] = servicename if doapi: # start up flask service flask_site = WSGIResource(reactor, reactor.getThreadPool(), application=flask_app) realroot = Resource() realroot.putChild( b"v1", anchore_engine.services.common.getAuthResource( flask_site, sname, config)) realroot.putChild(b"health", anchore_engine.services.common.HealthResource()) # this will rewrite any calls that do not have an explicit version to the base path before being processed by flask root = rewrite.RewriterResource(realroot, default_version_rewrite) ret_svc = anchore_engine.services.common.createServiceAPI( root, sname, config) # start up the monitor as a looping call lc = LoopingCall(anchore_engine.services.common.monitor, **kwargs) lc.start(1) else: # start up the monitor as a timer service svc = internet.TimerService(1, anchore_engine.services.common.monitor, **kwargs) svc.setName(sname) ret_svc = svc return (ret_svc)