def create_app(db_url): """This is a test :param db_url: connection url to the database being used :returns: the initialized and created app instance """ app = Flask(__name__) (app.db_session, app.db_metadata, app.db_engine) = init_db(db_url) @app.teardown_request def shutdown_session(exception=None): app.db_session.remove() create_api(app, API_VERSION) # support for remote debugging in Intellij and pycharm # # Set IDEA_ORGANISATIONS_REMOTE_DEBUG_ON to True in your environment # prior to starting the application to get remote debugging. # # Set IDEA_REMOTE_DEBUG_SERVER to the ip/hostname of the machine running the # debug server. # # Set IDEA_ORGANISATIONS_REMOTE_DEBUG_SERVER to the port of the debug server prosess # # For the remote debugging to work you will also have to make sure # the pycharm-debug.egg is on your path (check your environment file). if os.environ.get('IDEA_ORGANISATIONS_REMOTE_DEBUG_ON') == 'True': server = os.environ.get('IDEA_REMOTE_DEBUG_SERVER') port = os.environ.get('IDEA_ORGANISATIONS_REMOTE_DEBUG_PORT') app.logger.info("Idea remote debugging is on! Will connect to debug server running on %s:%s" % (server, port)) import pydevd pydevd.settrace(server, port=int(port), stdoutToServer=True, stderrToServer=True) return app
def create_app(db_url): app = Flask(__name__) (app.db_session, app.db_metadata, app.db_engine) = init_db(db_url) app.debug = os.environ.get('DEBUG') == 'True' _paragraph_re = re.compile(r'(?:\r\n|\r|\n){1,}') @app.teardown_request def shutdown_session(exception=None): app.db_session.remove() @app.template_filter('strftime') def _jinja2_filter_datetime(date, in_format='%Y-%m-%d', out_format='%d-%m-%Y'): if date: date = datetime.datetime.strptime(date, in_format) return date.strftime(out_format) @app.template_filter('nl2br') def _nl2br(value): result = u'\n\n'.join(u'<p>%s</p>' % p.replace('\n', '<br>\n') \ for p in _paragraph_re.split(escape(value))) result = Markup(result) return result @app.template_filter('sort_vedtak') def _jinja2_filter_sort_vedtak(vedtak): if len(vedtak) == 0: return [] else: id_sorted = sorted(vedtak, key=id) s = sorted(id_sorted, reverse=True, key=lambda v: v.get('vedtaksdato') if v.get('vedtaksdato') else datetime.datetime.now().isoformat()) return s create_api(app, API_VERSION) create_bouncer(app) # support for remote debugging in Intellij and pycharm # # Set IDEA_SAK_REMOTE_DEBUG_ON to True in your environment # prior to starting the application to get remote debugging. # # Set IDEA_REMOTE_DEBUG_SERVER to the ip/hostname of the machine running the # debug server. # # Set IDEA_SAK_REMOTE_DEBUG_SERVER to the port of the debug server prosess # # For the remote debugging to work you will also have to make sure # the pycharm-debug.egg is on your path (check your environment file). if os.environ.get('IDEA_SAK_REMOTE_DEBUG_ON') == 'True': server = os.environ.get('IDEA_REMOTE_DEBUG_SERVER') port = os.environ.get('IDEA_SAK_REMOTE_DEBUG_PORT') app.logger.info("Idea remote debugging is on! Will connect to debug server running on %s:%s" % (server, port)) import pydevd pydevd.settrace(server, port=int(port), suspend=False, stdoutToServer = True, stderrToServer = True) return app
def test_config(): assert not create_api().testing assert create_api({ 'TESTING': True, # added below to silence a warning 'SQLALCHEMY_DATABASE_URI': '' }).testing
def create_app(username, password, matrikkel_url, matrikkel_user, matrikkel_pass): app = Flask(__name__) app.config['BASIC_AUTH_FORCE'] = True app.config['BASIC_AUTH_USERNAME'] = username app.config['BASIC_AUTH_PASSWORD'] = password create_api(app, API_VERSION, matrikkel_url, matrikkel_user, matrikkel_pass) if not app.debug: stream_handler = StreamHandler() app.logger.addHandler(stream_handler) return app
def create_app(db_url): app = Flask(__name__) (app.db_session, app.db_metadata, app.db_engine) = init_db(db_url) app.debug = os.environ.get('DEBUG') == 'True' @app.teardown_request def shutdown_session(exception=None): app.db_session.remove() if not app.debug: app.logger.addHandler(StreamHandler()) create_api(app, API_VERSION) create_bouncer(app) return app
def create_app(): app = Flask(__name__) app.config.from_object(selected_config) create_api(app) register_error_handlers(app) log_configurator = LogConfigurator(current_config) file_handler = log_configurator.create_file_handler() console_handler = log_configurator.create_console_handler() level = log_configurator.get_current_log_level() logging.basicConfig(handlers=[file_handler, console_handler], level=level) return app
def setUp(self): """Define test variables and initialize app.""" self.app = create_api("testing") self.client = self.app.test_client # binds the app to the current context ctx = self.app.app_context() ctx.push()
def create_app(db_url): app = Flask(__name__) (app.db_session, app.db_metadata, app.db_engine) = init_db(db_url) @app.teardown_request def shutdown_session(exception=None): app.db_session.remove() create_api(app, API_VERSION) if not app.debug: stream_handler = StreamHandler() app.logger.addHandler(stream_handler) check_environment(app) return app
def initialize_app(): verify_parameters() db = storage.prepare_storage(database_connector()) ensure_data(db) session_manager = storage.DBSessionFactory(db) return api.create_api([session_manager])
def test_create_api_right_token_in_headers(): responses.add(responses.GET, "https://www.hackerrank.com/", body=TEST_PAGE) api = create_api() responses.add(responses.POST, 'https://www.hackerrank.com/auth/login', json={"status": True}) api.login("42", "42") assert responses.calls[1].request.headers["X-CSRF-Token"] == "spam"
def setUp(self): # cria o objeto flask self.current_api = create_api('config') with self.current_api.app_context(): # remove todas as tabelas do banco db.drop_all() # (re)cria o banco de dados db.create_all()
def cli() -> None: "Command line script" login = os.environ.get("LOGIN") password = os.environ.get("PASSWORD") api = create_api() api.login(login, password) for submission in api.submissions(): challenge = api.challenge(submission.slug) to_file(submission, challenge)
def create_app(): """ WSGI entry point to the api. """ app = Flask(__name__) load_config(app) if app.config["DEBUG"]: logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG) logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) else: logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) logging.debug("Creating application in thread with pid: %d", os.getpid()) @app.before_first_request def load_database(): initialize_database(app.config["DB_URL"]) create_api(app) return app
def setUp(self): """*-*-*- Define test variables and initialize app.""" self.app = create_api("testing") self.client = self.app.test_client # binds the app to the current context ctx = self.app.app_context() ctx.push() # Mock upload module self.upload_patch = mock.patch("api.upload") self.mock_upload = self.upload_patch.start()
def initialize_app(): configurables.verify(REQUIRED_OPTIONS) db_store = db.prepare_storage(configurables.database_connector(), 1200) ensure_data(db_store()) session_manager = db.SessionManager(db_store) app = api.create_api([session_manager]) # Add a different root error handler based on: are we in production or not error_handler = configurables.root_error_handler() app.add_error_handler(Exception, error_handler) return app
def setUp(self): """Define test variables and initialize app""" self.api = create_api() self.api.app_context().push() self.client = self.api.test_client self.headers = {'Authorization': f'Bearer {os.getenv("TOKEN")}'} self.db = db self.db.init_app(self.api) self.db.session.commit() self.db.drop_all() self.db.create_all() self.inserts()
def api(): api = create_api({ 'TESTING': True, 'SQLALCHEMY_DATABASE_URI': 'postgresql://*****:*****@localhost/app_name_sandbox', 'SQLALCHEMY_TRACK_MODIFICATIONS': False, 'SECRET_KEY': 'test' }) with api.app_context(): # create a new test user if we don't have one if not User.query.filter_by(email='*****@*****.**').first(): db.session.add(User('*****@*****.**', 'test')) db.session.commit() yield api
def create_app(challenge_dir=None, static_dir=None): app = FastAPI() if static_dir: app.mount("/static", StaticFiles(directory=static_dir), name="static") state_keeper = StateKeeper(challenge_dir) app.include_router(create_api(state_keeper), prefix="/api") @app.get("/") async def redirect_static(): return RedirectResponse(url="/static/index.html") @app.exception_handler(ResourceNotFoundError) async def handle_resource_not_found(request: Request, exc: ResourceNotFoundError): return JSONResponse(status_code=404, content={"message": "Resource not found"}) return app
def create_app(**config): global app app = AngularFlask(__name__, static_folder='static', static_url_path='') try: import endpoints app.config['ENDPOINTS'] = endpoints.ENDPOINTS except: pass app.config.update(config) app.db = api.auth_layer.init(config.get('CLIENT', None)) app.register_blueprint(api.create_api(app), url_prefix='/api') @app.before_request def add_user(): if not app.config.get('REQUIRE_USER', False): return if 'REMOTE_USER' in request.environ: username = request.environ['REMOTE_USER'].split('\\')[1] elif 'TEST_USER' in app.config: username = request.environ['TEST_USER'] else: abort(403) request.user = app.db.users.find_one({'username':username}) if request.user is None: abort(403) #@app.errorhandler(404) #def not_found(error): # return render_template('404.html'), 404 @app.route('/') def index(): return app.send_static_file('index.html') return app
def appclient(): """ :return: test client for API app. :rtype: flask.testing.FlaskClient """ app = api.create_api() client = app.test_client() # Helpers for testing API responses def validate_response(response, response_code=200, content_type='application/vnd.api+json'): """ Standard checks for any response from the server. :param flask.Response response: Response to request from flask test client. :param int response_code: Expected response code value. :param str content_type: Expected response MIME type (Should be JSONAPI) """ assert response.status_code == response_code assert response.headers['Access-Control-Allow-Headers'] == \ 'Authorization, Content-Type, X-Requested-With' assert response.headers[ 'Access-Control-Allow-Methods'] == 'GET, POST, PATCH, DELETE' assert response.headers['Access-Control-Allow-Origin'] == '*' assert response.headers['Access-Control-Max-Age'] == '86400' assert response.headers['Cache-Control'] == 'private, max-age=60' assert response.headers['Content-Type'] == content_type if response_code != 204: assert int(response.headers['Content-Length']) > 0 assert response.headers['Strict-Transport-Security'] == 'max-age=31536000; includeSubDomains' # pylint: disable=line-too-long assert 'Expires' not in response.headers assert 'Pragma' not in response.headers assert 'Vary' not in response.headers client.validate_response = validate_response return client
# -*- coding: utf-8 -*- import os from flask import Flask, send_from_directory, g, request from flask.ext.sqlalchemy import SQLAlchemy import api, database web_app = Flask(__name__, static_folder='www/static') web_app.config.from_object(os.environ['APP_SETTINGS']) database.AppRepository.db = SQLAlchemy(web_app) app_directory = os.path.join(os.getcwd(), 'app') www_directory = os.path.join(app_directory, 'www') mock_directory = os.path.join(app_directory, 'mocks') api.create_api(web_app) def set_domain(): if web_app.config['DEVELOPMENT']: return 'http://127.0.0.1:8000' return 'https://pivocram.herokuapp.com' DOMAIN = set_domain() @web_app.before_request def before_request(): """ Check if the token received in header is still a valid token from an user. """
import sys from datetime import datetime sys.path.insert(0, '../') from api import create_api, db from api.clients.models import Client from api.clients.services import ClientService from api.finances.services import RecordService api = create_api('config') if __name__ == "__main__": with api.app_context(): # remove todas as tabelas do banco db.drop_all() # (re)cria o banco de dados db.create_all() # Testes de consultas users = Client.query.all() # Adiciona um usuário for i in range(1000): username, email = f'admin{i}', f'admin{i}@finances.com' u = Client(username, email) db.session.add(u) db.session.commit() data = ClientService.get_all()
"""A module with the sole purpose of running the Sanic Web API.""" import os from api import create_api if __name__ == "__main__": API = create_api() API.run(host='0.0.0.0', port=int(os.environ.get("PORT", 5000)))
def create_app(config_name): app = Flask(__name__, static_url_path='') # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py app.config.from_object(config[config_name]) # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to. # For eg: # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg # # where settings.cfg looks like: # # DEBUG = False # SECRET_KEY = 'foo' # app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True) config[config_name].init_app(app) api_version = app.config['API_VERSION'] api_version_minor = app.config['API_VERSION_MINOR'] app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info '''setup cache''' app.extensions['redis-service'].config_set('save','') app.extensions['redis-service'].config_set('appendonly', 'no') icache = InternalCache(app.extensions['redis-service'], str(api_version_minor)) ip2org = IP2Org(icache) if app.config['ELASTICSEARCH_URL']: es = Elasticsearch(app.config['ELASTICSEARCH_URL'], # # sniff before doing anything # sniff_on_start=True, # # refresh nodes after a node fails to respond # sniff_on_connection_fail=True, # # and also every 60 seconds # sniffer_timeout=60 timeout=60 * 20, maxsize=32, ) else: es = None '''elasticsearch handlers''' app.extensions['esquery'] = esQuery( es, DataTypes(app), DataSourceScoring(app), index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'], index_drug=app.config['ELASTICSEARCH_DRUG_INDEX_NAME'], index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'], index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'], index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'], index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'], index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'], index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'], index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'], index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'], docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'], docname_drug=app.config['ELASTICSEARCH_DRUG_DOC_NAME'], docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'], docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'], docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'], docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'], docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'], docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'], docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'], # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'], # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'], docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'], log_level=app.logger.getEffectiveLevel(), cache=icache ) app.extensions['es_access_store'] = esStore(es, eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'], ip2org=ip2org, ) '''mixpanel handlers''' if Config.MIXPANEL_TOKEN: mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer()) app.extensions['mixpanel']= mp app.extensions['mp_access_store'] = MixPanelStore( mp, ip2org=ip2org, ) app.extensions['proxy'] = ProxyHandler( allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'], allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'], allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains']) # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token') ''' define cache''' # cache = Cache(config={'CACHE_TYPE': 'simple'}) # cache.init_app(latest_blueprint) # latest_blueprint.cache = cache # latest_blueprint.extensions['cache'] = cache # app.cache = SimpleCache() app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777) '''load ip name resolution''' ip_resolver = defaultdict(lambda: "PUBLIC") ip_list_file = app.config['IP_RESOLVER_LIST_PATH'] if not os.path.exists(ip_list_file): ip_list_file = '../' + ip_list_file if os.path.exists(ip_list_file): with open(ip_list_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: net = IPNetwork(row['ip']) ip_resolver[net] = row['org'] else: app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC') app.config['IP_RESOLVER'] = ip_resolver '''compress http response''' compress = Compress() compress.init_app(app) latest_blueprint = Blueprint('latest', __name__) current_version_blueprint = Blueprint(str(api_version), __name__) current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__) specpath = '/cttv' if app.config['PROFILE'] == True: from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) '''set the right prefixes''' create_api(latest_blueprint, api_version, specpath) create_api(current_version_blueprint, api_version, specpath) create_api(current_minor_version_blueprint, api_version_minor, specpath) # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform') app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform') app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform') '''serve the static docs''' openapi_def = yaml.load(file('app/static/openapi.template.yaml', 'r')) app.logger.info('parsing swagger from app/static/openapi.template.yaml') #inject the description into the docs with open("api-description.md", "r") as f: desc = f.read() openapi_def['info']['description'] = desc openapi_def['basePath'] = '/v%s' % str(api_version) @app.route('/v%s/platform/swagger' % str(api_version)) def serve_swagger(apiversion=api_version): return jsonify(openapi_def) @app.route('/v%s/platform/docs/swagger-ui' % str(api_version)) def render_swaggerui(apiversion=api_version): return render_template('swaggerui.html',api_version=apiversion) '''pre and post-request''' @app.before_request def before_request(): g.request_start = datetime.now() @app.after_request def after(resp): try: now = datetime.now() took = int(round((now - g.request_start).total_seconds())) resp.headers.add('Access-Control-Allow-Origin', '*') resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token') resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS') if do_not_cache(request):# do not cache in the browser resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0") else: cache = 30 * 24 * 60 * 60 #cache for seven days resp.headers.add('Cache-Control', "no-transform, max-age=%i"%(cache)) return resp except Exception as e: app.logger.exception('failed request teardown function', str(e)) return resp # Override the HTTP exception handler. app.handle_http_exception = get_http_exception_handler(app) return app
from flask import Flask from flask_cors import CORS from flask_restful import Resource, Api from api import create_api app = Flask(__name__) CORS(app) api = Api(app) create_api(api) if __name__ == '__main__': app.run(host='0.0.0.0', port=5000)
def create_app(): flask_app = Flask(__name__) flask_app.config.from_object(Config) db.init_app(flask_app) create_api(flask_app) return flask_app
def create_app(config_name): app = Flask(__name__, static_url_path='') app.config.from_object(config[config_name]) app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True) config[config_name].init_app(app) api_version = app.config['API_VERSION'] api_version_minor = app.config['API_VERSION_MINOR'] # log_level = logging.INFO # if app.config['DEBUG']: # log_level = logging.DEBUG # Flask has a default logger which works well and pushes to stderr # if you want to add different handlers (to file, or logstash, or whatever) # you can use code similar to the one below and set the error level accordingly. # logHandler = logging.StreamHandler() # formatter = jsonlogger.JsonFormatter() # logHandler.setFormatter(formatter) # loghandler.setLevel(logging.INFO) # app.logger.addHandler(logHandler) # or for LOGSTASH # app.logger.addHandler(logstash.LogstashHandler(app.config['LOGSTASH_HOST'], app.config['LOGSTASH_PORT'], version=1)) app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) print('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data app.extensions['redis-service'] = Redis( app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2) # user info '''setup cache''' app.extensions['redis-service'].config_set('save', '') app.extensions['redis-service'].config_set('appendonly', 'no') icache = InternalCache(app.extensions['redis-service'], str(api_version_minor)) ip2org = IP2Org(icache) es = Elasticsearch( app.config['ELASTICSEARCH_URL'], # # sniff before doing anything # sniff_on_start=True, # # refresh nodes after a node fails to respond # sniff_on_connection_fail=True, # # and also every 60 seconds # sniffer_timeout=60 timeout=60 * 20, maxsize=100, ) '''elasticsearch handlers''' app.extensions['esquery'] = esQuery( es, DataTypes(app), DataSourceScoring(app), index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'], index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'], index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'], index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'], index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'], index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'], index_association=app. config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'], index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'], index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'], docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'], docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'], docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'], docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'], docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'], docname_reactome=app. config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'], docname_association=app. config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'], docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'], # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'], # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'], docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'], log_level=app.logger.getEffectiveLevel(), cache=icache) app.extensions['es_access_store'] = esStore( es, eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'], ip2org=ip2org, ) '''mixpanel handlers''' if Config.MIXPANEL_TOKEN: mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer()) app.extensions['mixpanel'] = mp app.extensions['mp_access_store'] = MixPanelStore( mp, ip2org=ip2org, ) app.extensions['proxy'] = ProxyHandler( allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'], allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'], allowed_request_domains=app.config['PROXY_SETTINGS'] ['allowed_request_domains']) basepath = app.config['PUBLIC_API_BASE_PATH'] + api_version # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token') ''' define cache''' # cache = Cache(config={'CACHE_TYPE': 'simple'}) # cache.init_app(latest_blueprint) # latest_blueprint.cache = cache # latest_blueprint.extensions['cache'] = cache # app.cache = SimpleCache() app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60 * 60, mode=777) '''Set usage limiter ''' # limiter = Limiter(global_limits=["2000 per hour", "20 per second"]) # limiter.init_app(app)# use redis to store limits '''Load api keys in redis''' rate_limit_file = app.config['USAGE_LIMIT_PATH'] if not os.path.exists(rate_limit_file): rate_limit_file = '../' + rate_limit_file if os.path.exists(rate_limit_file): with open(rate_limit_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: auth_key = AuthKey(**row) app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__) print('INFO - succesfully loaded rate limit file') else: print('ERROR - cannot find rate limit file') app.logger.error( 'cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!' % rate_limit_file) '''load ip name resolution''' ip_resolver = defaultdict(lambda: "PUBLIC") ip_list_file = app.config['IP_RESOLVER_LIST_PATH'] if not os.path.exists(ip_list_file): ip_list_file = '../' + ip_list_file if os.path.exists(ip_list_file): with open(ip_list_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: net = IPNetwork(row['ip']) ip_resolver[net] = row['org'] else: app.logger.warning( 'cannot find IP list for IP resolver. All traffic will be logged as PUBLIC' ) app.config['IP_RESOLVER'] = ip_resolver '''compress http response''' compress = Compress() compress.init_app(app) latest_blueprint = Blueprint('latest', __name__) current_version_blueprint = Blueprint(str(api_version), __name__) current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__) specpath = '/cttv' if app.config['PROFILE'] == True: from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) create_api(latest_blueprint, api_version, specpath) create_api(current_version_blueprint, api_version, specpath) create_api(current_minor_version_blueprint, api_version_minor, specpath) app.register_blueprint(latest_blueprint, url_prefix='/api/latest') app.register_blueprint(current_version_blueprint, url_prefix='/api/' + str(api_version)) app.register_blueprint(current_minor_version_blueprint, url_prefix='/api/' + str(api_version_minor)) @app.route('/api-docs/%s' % str(api_version_minor)) def docs_current_minor_version(): return redirect('/api/swagger/index.html') @app.route('/api-docs/%s' % str(api_version)) def docs_current_version(): return redirect('/api/swagger/index.html') @app.route('/api-docs') def docs(): return redirect('/api/swagger/index.html') def serve_swagger(): return app.send_static_file('docs/swagger/swagger.yaml') @app.route('/api/docs/swagger.yaml') def send_swagger(): return serve_swagger() @app.route('/api/latest/docs/swagger.yaml') def send_swagger_latest(): return serve_swagger() @app.route('/api/' + str(api_version) + '/docs/swagger.yaml') def send_swagger_current_cersion(): return serve_swagger() @app.before_request def before_request(): g.request_start = datetime.now() @app.after_request def after(resp): rate_limiter = RateLimiter() now = datetime.now() took = (now - g.request_start).total_seconds() * 1000 if took > 500: cache_time = str( int(3600 * took) ) # set cache to last one our for each second spent in the request resp.headers.add('X-Accel-Expires', cache_time) took = int(round(took)) LogApiCallWeight(took) # if took < RateLimiter.DEFAULT_CALL_WEIGHT: # took = RateLimiter.DEFAULT_CALL_WEIGHT current_values = increment_call_rate(took, rate_limiter) now = datetime.now() ceil10s = round(ceil_dt_to_future_time(now, 10), 2) ceil1h = round(ceil_dt_to_future_time(now, 3600), 2) usage_left_10s = rate_limiter.short_window_rate - current_values[ 'short'] usage_left_1h = rate_limiter.long_window_rate - current_values['long'] min_ceil = ceil10s if usage_left_1h < 0: min_ceil = ceil1h if (usage_left_10s < 0) or (usage_left_1h < 0): resp.headers.add('Retry-After', min_ceil) resp.headers.add('X-API-Took', took) resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate) resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate) resp.headers.add('X-Usage-Remaining-10s', usage_left_10s) resp.headers.add('X-Usage-Remaining-1h', usage_left_1h) # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s) # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h) resp.headers.add('Access-Control-Allow-Origin', '*') resp.headers.add('Access-Control-Allow-Headers', 'Content-Type,Auth-Token') if do_not_cache(request): # do not cache in the browser resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0") else: resp.headers.add( 'Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i" % (took * 1800 / 1000, took * 9000 / 1000)) return resp return app
def create_app(config_name): app = Flask(__name__, static_url_path='') # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py app.config.from_object(config[config_name]) # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to. # For eg: # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg # # where settings.cfg looks like: # # DEBUG = False # SECRET_KEY = 'foo' # app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True) config[config_name].init_app(app) api_version = app.config['API_VERSION'] api_version_minor = app.config['API_VERSION_MINOR'] app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info '''setup cache''' app.extensions['redis-service'].config_set('save','') app.extensions['redis-service'].config_set('appendonly', 'no') icache = InternalCache(app.extensions['redis-service'], str(api_version_minor)) ip2org = IP2Org(icache) if app.config['ELASTICSEARCH_URL']: es = Elasticsearch(app.config['ELASTICSEARCH_URL'], # # sniff before doing anything # sniff_on_start=True, # # refresh nodes after a node fails to respond # sniff_on_connection_fail=True, # # and also every 60 seconds # sniffer_timeout=60 timeout=60 * 20, maxsize=32, ) else: es = None '''elasticsearch handlers''' app.extensions['esquery'] = esQuery(es, DataTypes(app), DataSourceScoring(app), index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'], index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'], index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'], index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'], index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'], index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'], index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'], index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'], index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'], docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'], docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'], docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'], docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'], docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'], docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'], docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'], docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'], # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'], # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'], docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'], log_level=app.logger.getEffectiveLevel(), cache=icache ) app.extensions['es_access_store'] = esStore(es, eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'], ip2org=ip2org, ) '''mixpanel handlers''' if Config.MIXPANEL_TOKEN: mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer()) app.extensions['mixpanel']= mp app.extensions['mp_access_store'] = MixPanelStore(mp, ip2org=ip2org, ) app.extensions['proxy'] = ProxyHandler(allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'], allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'], allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains']) # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token') ''' define cache''' # cache = Cache(config={'CACHE_TYPE': 'simple'}) # cache.init_app(latest_blueprint) # latest_blueprint.cache = cache # latest_blueprint.extensions['cache'] = cache # app.cache = SimpleCache() app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777) '''Set usage limiter ''' # limiter = Limiter(global_limits=["2000 per hour", "20 per second"]) # limiter.init_app(app)# use redis to store limits '''Load api keys in redis''' rate_limit_file = app.config['USAGE_LIMIT_PATH'] if not os.path.exists(rate_limit_file): rate_limit_file = '../'+rate_limit_file csvfile = None if Config.GITHUB_AUTH_TOKEN: r = requests.get('https://api.github.com/repos/opentargets/rest_api_auth/contents/rate_limit.csv', headers = {'Authorization': 'token %s'%Config.GITHUB_AUTH_TOKEN, 'Accept': 'application/vnd.github.v3.raw'}) if r.ok: csvfile = r.text.split('\n') app.logger.info('Retrieved rate limit file from github remote') else: app.logger.warning('Cannot retrieve rate limit file from remote, SKIPPED!') elif os.path.exists(rate_limit_file): csvfile = open(rate_limit_file) app.logger.info('Using dummy rate limit file') if csvfile is None: app.logger.error('cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!'%rate_limit_file) else: reader = csv.DictReader(csvfile) for row in reader: auth_key = AuthKey(**row) app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__) try: csvfile.close() except: pass app.logger.info('succesfully loaded rate limit file') '''load ip name resolution''' ip_resolver = defaultdict(lambda: "PUBLIC") ip_list_file = app.config['IP_RESOLVER_LIST_PATH'] if not os.path.exists(ip_list_file): ip_list_file = '../' + ip_list_file if os.path.exists(ip_list_file): with open(ip_list_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: net = IPNetwork(row['ip']) ip_resolver[net] = row['org'] else: app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC') app.config['IP_RESOLVER'] = ip_resolver '''compress http response''' compress = Compress() compress.init_app(app) latest_blueprint = Blueprint('latest', __name__) current_version_blueprint = Blueprint(str(api_version), __name__) current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__) specpath = '/cttv' if app.config['PROFILE'] == True: from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) '''set the right prefixes''' create_api(latest_blueprint, api_version, specpath) create_api(current_version_blueprint, api_version, specpath) create_api(current_minor_version_blueprint, api_version_minor, specpath) # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform') app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform') app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform') '''serve the static docs''' try: ''' NOTE: this file gets created only at deployment time ''' openapi_def = yaml.load(file('app/static/openapi.yaml', 'r')) app.logger.info('parsing swagger from static/openapi.yaml') except IOError: '''if we are not deployed, then simply use the template''' openapi_def = yaml.load(file('openapi.template.yaml', 'r')) app.logger.error('parsing swagger from openapi.template.yaml') with open("api-description.md", "r") as f: desc = f.read() openapi_def['info']['description'] = desc openapi_def['basePath'] = '/v%s' % str(api_version) @app.route('/v%s/platform/swagger' % str(api_version)) def serve_swagger(apiversion=api_version): return jsonify(openapi_def) @app.route('/v%s/platform/docs' % str(api_version)) def render_redoc(apiversion=api_version): return render_template('docs.html',api_version=apiversion) '''pre and post-request''' @app.before_request def before_request(): g.request_start = datetime.now() @app.after_request def after(resp): try: rate_limiter = RateLimiter() now = datetime.now() took = (now - g.request_start).total_seconds()*1000 if took > 500: cache_time = str(int(3600*took))# set cache to last one our for each second spent in the request resp.headers.add('X-Accel-Expires', cache_time) took = int(round(took)) LogApiCallWeight(took) # if took < RateLimiter.DEFAULT_CALL_WEIGHT: # took = RateLimiter.DEFAULT_CALL_WEIGHT current_values = increment_call_rate(took,rate_limiter) now = datetime.now() ceil10s=round(ceil_dt_to_future_time(now, 10),2) ceil1h=round(ceil_dt_to_future_time(now, 3600),2) usage_left_10s = rate_limiter.short_window_rate-current_values['short'] usage_left_1h = rate_limiter.long_window_rate - current_values['long'] min_ceil = ceil10s if usage_left_1h <0: min_ceil = ceil1h if (usage_left_10s < 0) or (usage_left_1h <0): resp.headers.add('Retry-After', min_ceil) resp.headers.add('X-API-Took', took) resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate) resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate) resp.headers.add('X-Usage-Remaining-10s', usage_left_10s) resp.headers.add('X-Usage-Remaining-1h', usage_left_1h) # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s) # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h) resp.headers.add('Access-Control-Allow-Origin', '*') resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token') resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS') if do_not_cache(request):# do not cache in the browser resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0") else: resp.headers.add('Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i"%(took*1800/1000, took*9000/1000)) return resp except Exception as e: app.logger.exception('failed request teardown function', str(e)) return resp # Override the HTTP exception handler. app.handle_http_exception = get_http_exception_handler(app) return app
import sys from datetime import datetime sys.path.insert(0, '../') from api import create_api, db from api.finances.services import RecordService from api.clients.services import ClientService from api.finances.models import Record from api.clients.models import Client, Phone # cria o objeto flask current_api = create_api('config') with current_api.app_context(): # remove todas as tabelas do banco db.drop_all() # (re)cria o banco de dados db.create_all() # obter os registros na base de dados records = RecordService.get_all() print(f'records {list(records)}') # cria um usuário e o adiciona ao banco client_1 = ClientService.add('admin', '*****@*****.**') print(client_1) client_2 = ClientService.add('admin1', '*****@*****.**') print(client_1)
def scheduled_qotd(): quotes_api_url = os.getenv("QUOTES_API_URL") quotes_api_key = os.getenv("QUOTES_API_KEY") api = create_api() tweet_quote_of_the_day(api, quotes_api_url, quotes_api_key)
"""A module with the sole purpose of running the Flask Web API.""" import os from api import create_api if __name__ == "__main__": API = create_api(__name__) API.run(host='0.0.0.0', port=int(os.environ.get("PORT", 5000)))
from api import create_api from api.ws import socketio if __name__ == '__main__': api = create_api() socketio.run(api, host="0.0.0.0", port=5000)
from api import create_api app = create_api() #print(app.config) if __name__ == "__main__": app.run(host="0.0.0.0", port=4000)
from flask.ext.sqlalchemy import SQLAlchemy from app import database, config as config_module config = config_module.get_config() web_app = Flask(__name__) web_app.config.from_object(os.environ['APP_SETTINGS']) database.AppRepository.db = SQLAlchemy(web_app) app_directory = os.path.join(os.getcwd(), 'app') template_directory = os.path.join(app_directory, 'templates') donwload_directory = os.path.join(app_directory, 'downloads') template_admin_directory = os.path.join(template_directory, 'admin') import api, models api.create_api(web_app) HEADERS = { 'admin': { 'token': 'XSRFU-TOKEN', 'user': '******' }, 'atleta': { 'token': 'XSRF-TOKEN', 'user': '******' } } DOMAIN = 'concept2.com.br' if web_app.config['DEVELOPMENT']: DOMAIN = '127.0.0.1:8000'
#!/usr/bin/env python # -*- coding: utf-8 -*- from flask import Flask, render_template from api import create_api from constants import PALINDROME_DB_URI, SECRET_KEY application = Flask(__name__) create_api(application) @application.before_first_request def config_app(): if not application.config.get('INITIALIZED'): application.config['SECRET_KEY'] = SECRET_KEY application.config['SQLALCHEMY_DATABASE_URI'] = PALINDROME_DB_URI application.config['INITIALIZED'] = True @application.route('/', methods=['GET']) def index(): return render_template('index.html') if __name__ == '__main__': # TODO remove debug statement. application.debug = True application.run()
'formatter': 'default' } }, 'root': { 'level': 'INFO', 'handlers': ['wsgi'] } }) app = create_app() migrate = Migrate(app, db) marshmallow = Marshmallow(app) from api import create_api, schema create_api(app) @app.shell_context_processor def make_shell_context(): return dict(app=app, db=db, Volumetrics=Volumetrics, Realization=Realization, Case=Case, Location=Location, Field=Field) @app.cli.command() def empty_database():
from flask import Flask, request, jsonify from flask_sqlalchemy import SQLAlchemy from flask_script import Manager from flask_migrate import Migrate, MigrateCommand from api.models import User from api import create_api, db app = create_api('config') manager = Manager(app) Migrate(app, db) manager.add_command('db', MigrateCommand) if __name__ == '__main__': manager.run()
import argparse, os from api import create_api parser = argparse.ArgumentParser() parser.add_argument('-r', '--xapers-root', help="Xapers root directory", default="~/.xapers/docs") parser.add_argument('--host', help="Host", default="0.0.0.0") parser.add_argument('-p', '--port', help="Port", default=5000, type=int) args = parser.parse_args() args.xapers_root = os.path.join(os.path.expanduser(args.xapers_root), '') print "Args:", args app = Flask(__name__, static_folder="../client/dist", static_url_path="", template_folder="../client/dist") CORS(app) app.register_blueprint(create_api(args), url_prefix="/api") @app.route("/") def index(): return render_template("index.html") if __name__ == "__main__": from eventlet import wsgi import eventlet wsgi.server(eventlet.listen((args.host, args.port)), app) #app.run()
from utils import cors from utils.auth import auth import api # Create app app = Flask(__name__) # Load configuration app.config.from_object('config') app.config['JSON_AS_ASCII'] = False # Attach database db.init_app(app) # Create REST API api.create_api(app) # Enable compression Compress(app) # Enable Cross-Origin headers cors.enable(app) # Login endpoint @app.route('/login') def login(): username = auth(); if username: return '{"login": "******" }' % (username) # Start flask as standalone