from flask_bootstrap import Bootstrap from flask_limiter import Limiter from flask_limiter.util import get_remote_address from flask_login import LoginManager from flask_mail import Mail from .config import config bootstrap = Bootstrap() mail = Mail() login_manager = LoginManager() login_manager.session_protection = 'strong' login_manager.login_view = 'auth.login' limiter = Limiter(key_func=get_remote_address, default_limits=["100 per minute", "5 per second"]) def create_app(config_name='default'): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) from .models import db # noqa bootstrap.init_app(app) mail.init_app(app) db.init_app(app) login_manager.init_app(app) limiter.init_app(app) from .main import main as main_blueprint # noqa
def create_app(): """Flask application factory.""" app = Flask(__name__, static_folder="js") app.config["PROPAGATE_EXCEPTIONS"] = True app.config["TREE"] = os.getenv("TREE") app.config["GRAMPS_EXCLUDE_PRIVATE"] = Boolean( os.getenv("GRAMPS_EXCLUDE_PRIVATE")) app.config["GRAMPS_EXCLUDE_LIVING"] = Boolean( os.getenv("GRAMPS_EXCLUDE_LIVING")) app.config["TREE"] = os.getenv("TREE") if app.config["TREE"] is None or app.config["TREE"] == "": raise ValueError("You have to set the `TREE` environment variable.") app.config["GRAMPS_S3_BUCKET_NAME"] = os.getenv("GRAMPS_S3_BUCKET_NAME") app.config["PASSWORD"] = os.getenv("PASSWORD", "") app.config["GRAMPS_USER_DB_URI"] = os.getenv("GRAMPS_USER_DB_URI", "") app.config["GRAMPS_AUTH_PROVIDER"] = os.getenv("GRAMPS_AUTH_PROVIDER", "") if app.config["GRAMPS_AUTH_PROVIDER"] == "password": auth_provider = SingleUser(password=app.config["PASSWORD"]) elif app.config["GRAMPS_AUTH_PROVIDER"] == "none": auth_provider = None else: auth_provider = SQLAuth(db_uri=app.config["GRAMPS_USER_DB_URI"]) app.logger.setLevel(logging.INFO) app.logger.info("Opening family tree '{}'".format(app.config["TREE"])) limiter = Limiter(app, key_func=get_remote_address) # called once here in case Db's constructor raises Db(app.config["TREE"]) CORS(app) Compress(app) api = Api(app) cache = Cache(app, config={ "CACHE_TYPE": "filesystem", "CACHE_DIR": "appcache" }) app.config["JWT_TOKEN_LOCATION"] = ["headers", "query_string"] app.config["JWT_ACCESS_TOKEN_EXPIRES"] = datetime.timedelta(minutes=15) app.config["JWT_REFRESH_TOKEN_EXPIRES"] = datetime.timedelta(days=30) app.config["JWT_SECRET_KEY"] = get_jwt_secret_key() jwt = JWTManager(app) @app.route("/", methods=["GET", "POST"]) def send_js_index(): return send_from_directory(app.static_folder, "index.html") @app.route("/<path:path>", methods=["GET", "POST"]) def send_js(path): if path and os.path.exists(os.path.join(app.static_folder, path)): return send_from_directory(app.static_folder, path) else: return send_from_directory(app.static_folder, "index.html") @app.route("/api/login", methods=["POST"]) @limiter.limit("1/second") def login(): if app.config["GRAMPS_AUTH_PROVIDER"] == "none": ret = {"access_token": "1", "refresh_token": "1"} return jsonify(ret), 200 if not request.is_json: return jsonify({"msg": "Missing JSON in request"}), 400 username = request.json.get("username", None) password = request.json.get("password", None) from .auth import User if not auth_provider.authorized(username, password): return jsonify({"msg": "Wrong username or password"}), 401 ret = { "access_token": create_access_token(identity=username), "refresh_token": create_refresh_token(identity=username), } return jsonify(ret), 200 def jwt_required_ifauth(fn): """Check JWT unless authentication is disabled""" @wraps(fn) def wrapper(*args, **kwargs): if app.config["GRAMPS_AUTH_PROVIDER"] != "none": verify_jwt_in_request() return fn(*args, **kwargs) return wrapper def jwt_refresh_token_required_ifauth(fn): """Check JWT unless authentication is disabled""" @wraps(fn) def wrapper(*args, **kwargs): if app.config["GRAMPS_AUTH_PROVIDER"] != "none": verify_jwt_refresh_token_in_request() return fn(*args, **kwargs) return wrapper @app.route("/api/refresh", methods=["POST"]) @jwt_refresh_token_required_ifauth def refresh(): if app.config["GRAMPS_AUTH_PROVIDER"] == "none": ret = {"access_token": "1"} return jsonify(ret), 200 current_user = get_jwt_identity() ret = {"access_token": create_access_token(identity=current_user)} return jsonify(ret), 200 parser = reqparse.RequestParser() parser.add_argument("strings", type=str) parser.add_argument("fmt", type=str) @app.before_request def before_request(): if not get_db().dbstate.is_open(): get_db().open() app.teardown_appcontext(close_db) class ProtectedResource(Resource): method_decorators = [jwt_required_ifauth] class People(ProtectedResource): @cache.cached() def get(self): return get_people(get_db()) class Families(ProtectedResource): @cache.cached() def get(self): return get_families(get_db()) class Events(ProtectedResource): @cache.cached() def get(self): return get_events(get_db()) class Places(ProtectedResource): @cache.cached() def get(self): return get_places(get_db()) class Citations(ProtectedResource): @cache.cached() def get(self): return get_citations(get_db()) class Sources(ProtectedResource): @cache.cached() def get(self): return get_sources(get_db()) class Repositories(ProtectedResource): @cache.cached() def get(self): return get_repositories(get_db()) class MediaObjects(ProtectedResource): @cache.cached() def get(self): return get_media(get_db()) class DbInfo(ProtectedResource): @cache.cached() def get(self): return get_db_info(get_db()) class FullTree(ProtectedResource): @cache.cached() def get(self): return { "people": get_people(get_db()), "families": get_families(get_db()), "events": get_events(get_db()), "places": get_places(get_db()), "citations": get_citations(get_db()), "sources": get_sources(get_db()), "repositories": get_repositories(get_db()), "media": get_media(get_db()), "dbinfo": get_db_info(get_db()), } class Translate(Resource): @cache.cached() def get(self): args = parser.parse_args() try: strings = json.loads(args["strings"]) lang = args.get("lang") except (json.decoder.JSONDecodeError, TypeError, ValueError) as e: return {"error": str(e)} return {"data": get_translation(strings, lang=lang)} class Languages(Resource): @cache.cached() def get(self): return {"data": get_languages()} class Note(ProtectedResource): @cache.cached(query_string=True) def get(self, gramps_id): args = parser.parse_args() fmt = args.get("fmt") or "html" return get_note(get_db(), gramps_id, fmt=fmt) api.add_resource(People, "/api/people") api.add_resource(Families, "/api/families") api.add_resource(Events, "/api/events") api.add_resource(Places, "/api/places") api.add_resource(Citations, "/api/citations") api.add_resource(Sources, "/api/sources") api.add_resource(MediaObjects, "/api/mediaobjects") api.add_resource(Repositories, "/api/repositories") api.add_resource(Translate, "/api/translate") api.add_resource(Languages, "/api/languages") api.add_resource(DbInfo, "/api/dbinfo") api.add_resource(FullTree, "/api/tree") api.add_resource(Note, "/api/note/<string:gramps_id>") def get_media_handler(handle): info = get_media_info(get_db(), handle) if app.config["GRAMPS_S3_BUCKET_NAME"]: return S3Handler(handle, info, bucket_name=app.config["GRAMPS_S3_BUCKET_NAME"]) else: return FileHandler(handle, info) @app.route("/api/media/<string:handle>") @jwt_required_ifauth def show_image(handle): handler = get_media_handler(handle) return handler.send_file() @app.route("/api/thumbnail/<string:handle>/<int:size>") @jwt_required_ifauth @cache.cached() def show_thumbnail_square(handle, size): handler = get_media_handler(handle) return handler.send_thumbnail_square(size) @app.route( "/api/thumbnail/<string:handle>/<int:size>/<int:x1>/<int:y1>/<int:x2>/<int:y2>" ) @jwt_required_ifauth @cache.cached() def show_thumbnail_square_cropped(handle, size, x1, y1, x2, y2): handler = get_media_handler(handle) return handler.send_thumbnail_square_cropped(size, x1, y1, x2, y2) return app
'unloyalFans', 'timeUnderMindControl', 'timesMindControlled', 'timeOnFire', 'timesLitOnFire', 'airTime', 'timesJumped', 'disarms', 'timesDisarmed', 'quacks', 'timeWithMouthOpen', 'timeSpentOnMines', 'minesSteppedOn', 'timeSpentReloadingOldTimeyWeapons', 'presentsOpened', 'respectGivenToDead', 'funeralsPerformed', 'funeralsRecieved', 'timePreaching', 'conversions', 'timesConverted', 'lastPlayed', 'lastWon', 'lastKillTime' ] app = Flask(__name__) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db = SQLAlchemy(app) limiter = Limiter(app, key_func=get_remote_address, default_limits=["300 per day", "70 per hour"]) class Duck(db.Model): __tablename__ = 'ducks' steam_id = db.Column(db.BigInteger, primary_key=True, unique=True, nullable=False) steam_name = db.Column(db.String(64), unique=False, nullable=True) real_name = db.Column(db.String(64), unique=False, nullable=True) stats = db.Column(postgresql.JSON, unique=False, nullable=True) auth = db.Column(db.String(16), unique=False, nullable=True) updated = db.Column(db.DateTime, default=datetime.datetime.now)
# Define the application app = Flask(__name__, static_url_path='') Firehose(app) # Adds the HTTP/2 Server Push functionality to app # firehose = Firehose(connector=Custom_connector()) # firehose.init_app(app) # Configurations app.config.from_object('config') # set limit of 5.5MB for file uploads # in practice, this is ~4MB (5.5 / 1.37) # after the data URI is saved to disk app.config['MAX_CONTENT_LENGTH'] = 5.5 * 1024 * 1024 github = GitHub(app) limiter = Limiter(app, key_func=get_remote_address) # import views after we initialize our github object import webcompat.views # noqa from webcompat import webhooks # noqa # register blueprints from webcompat.api.endpoints import api_bp # noqa from webcompat.api.uploads import uploads # noqa from webcompat.error_handlers import error_handlers # noqa for blueprint in [api_bp, error_handlers, uploads]: app.register_blueprint(blueprint) # Start Logging Handlers
from flask_limiter import Limiter from flask_limiter.util import get_remote_address, get_ipaddr from flask_cors import CORS, cross_origin from queue import Queue import threading import waitingtimes import hashlib import json app = Flask(__name__) q_detail = Queue() limiter = Limiter(app, key_func=get_ipaddr, default_limits=["10000 per day", "1000 per hour"]) cors = CORS(app) app.config['CORS_HEADERS'] = 'Content-Type' # rate limit 10/sec @app.route("/geocode", methods=["GET"]) @cross_origin() def get_geocode_address(): """ retrieve information, from lat and lng, about the location """ lat = request.args.get("lat") lng = request.args.get("lng") if (lng == None or lat == None):
from app import app from flask_limiter import Limiter from flask_limiter.util import get_remote_address from flask.ext.limiter import HEADERS limiter = Limiter( app, key_func=get_remote_address, headers_enabled = True, ) limiter.header_mapping = { HEADERS.LIMIT : "X-My-Limit", HEADERS.RESET : "X-My-Reset", HEADERS.REMAINING: "X-My-Remaining" } shared_limiter = limiter.shared_limit(app.config['GLOBAL_RATE_LIMITS'], scope="salic_api")
from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.cache import Cache from config import Production from hbc import debug_logging, online_logging, access_logging # create a flask application - this ``app`` object will be used to handle app = Flask(__name__) app.config.from_object(Production()) api = Api(app) db = SQLAlchemy(app) auth = HTTPBasicAuth() debug_logging(u'logs/error.log') access_logging(u'logs/access.log') logger = logging.getLogger('root') access_logger = logging.getLogger('access') limiter = Limiter(app, headers_enabled=True, global_limits=["10/minute"]) limiter.header_mapping = { HEADERS.LIMIT: "X-RateLimit-Limit", HEADERS.RESET: "X-RateLimit-Reset", HEADERS.REMAINING: "X-RateLimit-Remaining" } cache = Cache(app, config={'CACHE_TYPE': 'simple'})
def make_web_app(self): from flask import request from flask_sqlalchemy import SQLAlchemy # WSGI/Flask Service short_name = bytes(self.stamp).hex()[:6] self.rest_app = Flask(f"faucet-{short_name}", template_folder=TEMPLATES_DIR) # Flask Settings self.rest_app.config[ 'SQLALCHEMY_DATABASE_URI'] = f'sqlite:///{self.db_filepath}' self.rest_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False try: self.rest_app.secret_key = sha256( os.environ['NUCYPHER_FELIX_DB_SECRET'].encode()) # uses envvar except KeyError: raise OSError( "The 'NUCYPHER_FELIX_DB_SECRET' is not set. Export your application secret and try again." ) # Database self.db = SQLAlchemy(self.rest_app) # Database Tables class Recipient(self.db.Model): """ The one and only table in Felix's database; Used to track recipients and airdrop metadata. """ __tablename__ = 'recipient' id = self.db.Column(self.db.Integer, primary_key=True) address = self.db.Column(self.db.String, unique=True, nullable=False) joined = self.db.Column(self.db.DateTime, nullable=False, default=datetime.utcnow) total_received = self.db.Column(self.db.String, default='0', nullable=False) last_disbursement_amount = self.db.Column(self.db.String, nullable=False, default=0) last_disbursement_time = self.db.Column(self.db.DateTime, nullable=True, default=None) is_staking = self.db.Column(self.db.Boolean, nullable=False, default=False) def __repr__(self): return f'{self.__class__.__name__}(id={self.id})' self.Recipient = Recipient # Bind to outer class # Flask decorators rest_app = self.rest_app limiter = Limiter(self.rest_app, key_func=get_remote_address, headers_enabled=True) # # REST Routes # @rest_app.route("/", methods=['GET']) @limiter.limit("100/day;20/hour;1/minute") def home(): rendering = render_template(self.TEMPLATE_NAME) return rendering @rest_app.route("/register", methods=['POST']) @limiter.limit("5 per day") def register(): """Handle new recipient registration via POST request.""" try: new_address = request.form['address'] except KeyError: return Response(status=400) # TODO if not eth_utils.is_checksum_address(new_address): return Response(status=400) # TODO if new_address in self.reserved_addresses: return Response(status=400) # TODO try: with ThreadedSession(self.db_engine) as session: existing = Recipient.query.filter_by( address=new_address).all() if existing: # Address already exists; Abort self.log.debug(f"{new_address} is already enrolled.") return Response(status=400) # Create the record recipient = Recipient(address=new_address, joined=datetime.now()) session.add(recipient) session.commit() except Exception as e: # Pass along exceptions to the logger self.log.critical(str(e)) raise else: return Response(status=200) # TODO return rest_app
# ---- CONFIG ----# config = Config() app = Flask(__name__) app.config["SECRET_KEY"] = config.SECRET_KEY app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///zinharo.sqlite" app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False db = SQLAlchemy(app) # flask-sqlalchemy (db) bcrypt = Bcrypt(app) # BCrypt (pw hashing) jwt = JWTManager(app) # JSON-web-tokens (account auth) ma = Marshmallow(app) # Marshmallow (api parsing) limiter = Limiter( app, key_func=get_remote_address, default_limits=["200 per day", "50 per hour"] ) # API ratelimit @app.errorhandler(404) def error_404(_exception): """404 error handling""" return {"status": "endpoint not found"}, 404 @app.route(f"{config.API_PREFIX}/min_version/") def api_minversion(): """A simple auxillary route to provide the minimum API client version""" return {"status": "success", "body": {"min_version": config.MIN_API_VERSION}}, 200
:return: The IP. """ return request.headers.get('X-Forwarded-For', request.environ['REMOTE_ADDR']) def get_apikey(): api_key = request.args.get('apikey', None) if api_key: api_key = api_key.strip() return api_key if MULTIUSER: limiter = Limiter(app, key_func=get_apikey, default_limits=["6 per minute"]) else: limiter = None def limit_decorate(): """ Decorates functions depending on multiuser mode. """ return limiter.limit(rate_limit_from_api_key) if MULTIUSER else lambda x: x def rate_limit_from_api_key(): api_key = get_apikey() if api_key: user = User.query.filter_by(api_key=api_key).first() if user:
from flask_limiter import Limiter from flask_limiter.util import get_remote_address from flask import current_app default_limits = ['2/second', '200/hour', '500/day'] limiter = Limiter( key_func=get_remote_address, default_limits=default_limits, ) @limiter.request_filter def limiter_filter(): dont_filter = get_remote_address() in (current_app.config.get('WHITE_LIST') or ()) if not current_app.config['DEBUG'] or dont_filter: return True
class RequestBroker: app = None def __init__(self, name): self.app = Flask(name) self.app.config['JSON_AS_ASCII'] = False self.query_id = Value('i', 0) self.cache = CacheDict(cache_len=1000) self.connection_producer = RmqProducer( EXCHANGES.CONNECTION_QUERY.value) self.meeting_producer = RmqProducer(EXCHANGES.MEETING_QUERY.value) self.sequence_producer = RmqProducer(EXCHANGES.SEQUENCE_QUERY.value) self.connection_consumer = RmqConsumer( EXCHANGES.FLASK_SERVER_CONNECTION.value, self.consume_rabbit_results) self.meeting_consumer = RmqConsumer( EXCHANGES.FLASK_SERVER_MEETING.value, self.consume_rabbit_results) self.sequence_consumer = RmqConsumer( EXCHANGES.FLASK_SERVER_SEQUENCE.value, self.consume_rabbit_results) self.connection_consumer_thread = Thread( target=self.connection_consumer.start, args=[]) self.meeting_consumer_thread = Thread( target=self.meeting_consumer.start, args=[]) self.sequence_consumer_thread = Thread( target=self.sequence_consumer.start, args=[]) self.data_manager = FlaskDataManager() self.stops = None self.last_update_date = None self.limiter = Limiter(self.app, key_func=get_remote_address, default_limits=["100 per hour"]) self.limiter.exempt(self.handle_get_query) self.limiter.exempt(self.handle_get_stops) def update_data(self): data = self.data_manager.get_updated_data() self.stops = data['stops'] self.last_update_date = self.data_manager.last_data_update def run(self): self.app.run(threaded=True, host='0.0.0.0', port=5000) def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=None): self.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods) def start(self): self.data_manager.start() if self.data_is_loaded(): self.update_data() self.connection_producer.start() self.meeting_producer.start() self.sequence_producer.start() self.connection_consumer_thread.start() self.meeting_consumer_thread.start() self.sequence_consumer_thread.start() self.add_endpoint('/connection', 'connection', self.handle_post_connection, ['POST']) self.add_endpoint('/meeting', 'meeting', self.handle_post_meeting, ['POST']) self.add_endpoint('/sequence', 'sequence', self.handle_post_sequence, ['POST']) self.add_endpoint('/result/<query_id>', 'results', self.handle_get_query, ['GET']) self.add_endpoint(f'/stops', 'stops', self.handle_get_stops, ['GET']) logger.info('SolverBroker: started') self.run() def consume_rabbit_results(self, result): result["result"]["is_done"] = True self.cache[result["query_id"]] = { "result": result["result"], "error": result["error"] } def handle_get_query(self, query_id): try: query_id = int(query_id) except ValueError: return jsonify(ErrorCodes.BAD_QUERY_ID_TYPE.value), 400 try: result = self.cache[query_id] except KeyError: return jsonify(ErrorCodes.BAD_QUERY_ID_VALUE.value), 400 if result["result"][ "is_done"] and result["error"] != ErrorCodes.OK.value: if result["error"] in [ ErrorCodes.INTERNAL_SERVER_ERROR.value, ErrorCodes.INTERNAL_DATA_NOT_LOADED.value ]: return jsonify(result["error"]), 500 return jsonify(result["error"]), 400 return jsonify(result["result"]), 202 def handle_get_stops(self): if not self.data_is_loaded(): return jsonify(ErrorCodes.INTERNAL_DATA_NOT_LOADED.value), 500 if self.last_update_date is None or self.last_update_date < self.data_manager.last_data_update: self.update_data() return jsonify(self.stops), 202 def handle_post_connection(self): request_json = request.get_json() result = self.handle_query_post( self.connection_producer, request_json, ConnectionQuery, ErrorCodes.BAD_CONNECTION_JSON_FORMAT.value) return result def handle_post_meeting(self): request_json = request.get_json() result = self.handle_query_post( self.meeting_producer, request_json, MeetingQuery, ErrorCodes.BAD_MEETING_JSON_FORMAT.value) return result def handle_post_sequence(self): request_json = request.get_json() result = self.handle_query_post( self.sequence_producer, request_json, SequenceQuery, ErrorCodes.BAD_SEQUENCE_JSON_FORMAT.value) return result def handle_query_post(self, producer, request_json, query_class, parsing_error_message): if not query_class.validate(request_json): return jsonify(parsing_error_message), 400 with self.query_id.get_lock(): self.query_id.value += 1 query_id = self.query_id.value request_json["query_id"] = query_id producer.send_msg(request_json) self.cache[query_id] = {"result": {"is_done": False}} return jsonify({"query_id": query_id}), 202 def data_is_loaded(self): if self.data_manager.data_loaded: return True else: logger.warn( f"RequestBroker: Some pickles in data directory are missing this service won't " f"work without them. Wait for DataProvider to finish processing GTFS files." ) return False
from flask import Flask, jsonify from flask_limiter import Limiter from flask_limiter.util import get_remote_address import json app = Flask(__name__) limiter = Limiter(app, key_func=get_remote_address, default_limits=["25 per day"], storage_uri="redis://*****:*****@app.route('/rate-limited') @limiter.limit("100/30seconds", error_message=json.dumps({ "data": "You hit the rate limit", "error": 429 })) def index(): return jsonify({'response': 'This is a rate limited response'}) @app.route('/') def index2(): return jsonify({'response': 'Are we rated limited?'}) @app.route('/unlimited') @limiter.exempt def index3(): return jsonify({'response': 'We are not rate limited'})
from flask_limiter.util import get_ipaddr from auth import auth import config import models from resources.courses import courses_api from resources.reviews import reviews_api from resources.users import users_api app = Flask(__name__) app.register_blueprint(courses_api) app.register_blueprint(reviews_api, url_prefix='/api/v1') app.register_blueprint(users_api, url_prefix='/api/v1') limiter = Limiter(app, global_limits=[config.DEFAULT_RATE], key_func=get_ipaddr) limiter.limit("40/day")(users_api) limiter.limit(config.DEFAULT_RATE, per_method=True, methods=["post", "put", "delete"])(courses_api) limiter.limit(config.DEFAULT_RATE, per_method=True, methods=["post", "put", "delete"])(reviews_api) #limiter.exempt(courses_api) #limiter.exempt(reviews_api) @app.route('/') def hello_world(): return 'Hello World' @app.route('/api/v1/users/token', methods=['GET']) @auth.login_required def get_auth_token(): token = g.user.generate_auth_token() return jsonify({'token': token.decode('ascii')})
from flask_limiter.util import get_remote_address from config import config from flask import render_template from flask_restful import reqparse import display from datetime import datetime import requests server_settings = config('webconfig.ini', 'Server') app = Flask(__name__) limiter = Limiter( app, key_func=get_remote_address, default_limits=[ "{0} per minute".format(server_settings['rate_limit_minute']), "{0} per second".format(server_settings['rate_limit_second']) ]) title = config('webconfig.ini', 'Static')['title'] @app.before_request def limit_remote_addr(): if request.remote_addr != server_settings['allow_ip']: abort(403) # Forbidden @app.route('/', methods=['GET', 'PUT', 'POST', 'DELETE']) def welcome():
import time import os import sys import re reload(sys) sys.setdefaultencoding("utf-8") #######flask app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) app.secret_key = os.path.join(config_dir, 'PyOne' + GetConfig('password')) cache = Cache(app, config={'CACHE_TYPE': 'redis'}) limiter = Limiter( app, key_func=get_remote_address, default_limits=["200/minute", "50/second"], ) ################################################################################ ###################################功能函数##################################### ################################################################################ def md5(string): a = hashlib.md5() a.update(string.encode(encoding='utf-8')) return a.hexdigest() def GetTotal(path='A:/'): key = 'total:{}'.format(path)
if config.email.use_mandrill: app.config['MANDRILL_API_KEY'] = config.email.mandrill_api_key app.config['MANDRILL_DEFAULT_FROM'] = config.email.from_ # add sqlalchemy middleware db = SQLAlchemy(app) # add flask_kvsession middleware app.config['SESSION_KEY_BITS'] = 128 engine = create_engine('sqlite:///beerme/sessions.sqlite') metadata = MetaData(bind=engine) store = SQLAlchemyStore(engine, metadata, 'kvstore') metadata.create_all() KVSessionExtension(store, app) # add flask csrf middleware csrf = SeaSurf(app) # add rate limiting middleware limiter = Limiter(app) auth_limit = limiter.shared_limit("5/minute;1/second", scope="auth") # app constants SATOSHIS = 1e8 class User(db.Model): id = db.Column(db.Integer, primary_key=True) email = db.Column(db.String, unique=True) class Beer(db.Model): id = db.Column(db.Integer, primary_key=True) brew = db.Column(db.String) table = db.Column(db.String) name = db.Column(db.String) price_satoshis = db.Column(db.Integer)
# -*- coding: utf-8 -*- # by dl from flask import Flask from flask_cors import CORS from flask_cache import Cache from flask_limiter import Limiter from flask_limiter.util import get_remote_address from Config import cache_config app = Flask(__name__) # 缓存 cache = Cache(app, config=cache_config.Redis, with_jinja2_ext=False) cache.init_app(app) # 跨域 CORS(app, supports_credentials=True) # 访问控制 limiter = Limiter(app, key_func=get_remote_address, default_limits=['200 per day', '50 per hour']) # 载入控制器 import Controller.BaseController
def register_extensions(app): """ register extensions to the app """ app.jinja_env.add_extension('jinja2.ext.do') # Global values in jinja # Uncomment to enable profiler # See scripts/profile_analyzer.py to analyze output # app = setup_profiler(app) # Compress app responses with gzip compress = Compress() compress.init_app(app) # Influx db time-series database db.init_app(app) influx_db.init_app(app) # Limit authentication blueprint requests to 200 per minute limiter = Limiter(app, key_func=get_ip_address) limiter.limit("200/minute")(routes_authentication.blueprint) # Language translations babel = Babel(app) @babel.localeselector def get_locale(): try: user = User.query.filter( User.id == flask_login.current_user.id).first() if user and user.language != '': for key in LANGUAGES: if key == user.language: return key # Bypass endpoint test error "'AnonymousUserMixin' object has no attribute 'id'" except AttributeError: pass return request.accept_languages.best_match(LANGUAGES.keys()) # User login management login_manager = flask_login.LoginManager() login_manager.init_app(app) @login_manager.user_loader def user_loader(user_id): user = User.query.filter(User.id == user_id).first() if not user: return return user @login_manager.unauthorized_handler def unauthorized(): flash(gettext('Please log in to access this page'), "error") return redirect(url_for('routes_authentication.do_login')) # Create and populate database if it doesn't exist with app.app_context(): db.create_all() populate_db() # This is disabled because there's a bug that messes up user databases # The upgrade script will execute alembic to upgrade the database # alembic_upgrade_db() # Check user option to force all web connections to use SSL # Fail if the URI is empty (pytest is running) if app.config['SQLALCHEMY_DATABASE_URI'] != 'sqlite://': with session_scope(app.config['SQLALCHEMY_DATABASE_URI']) as new_session: misc = new_session.query(Misc).first() if misc and misc.force_https: SSLify(app)
def get_basic_auth_from_request() -> Optional[str]: # `pcapi.utis.login_manager` cannot be imported at module-scope, # because the application context may not be available and that # module needs it. from pcapi.utils.login_manager import get_request_authorization auth = get_request_authorization() if not auth or not auth.username: return None return auth.username rate_limiter = Limiter( strategy="fixed-window-elastic-expiry", key_func= get_remote_address, # The default is a deprecated function that raises warning logs ) def ip_rate_limiter(**kwargs) -> Callable: base_kwargs = { "key_func": get_remote_address, "scope": "ip_limiter", "error_message": "rate limit by ip exceeded", } base_kwargs.update(kwargs) return rate_limiter.shared_limit(settings.RATE_LIMIT_BY_IP, **base_kwargs) def email_rate_limiter(**kwargs) -> Callable:
app = Flask(__name__) app.config['PREFERRED_URL_SCHEME'] = 'https' app.config['SERVER_NAME'] = os.environ.get('SERVER_NAME') app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY') app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db.app = app db.init_app(app) # CSS directives must be inlined in HTML emails. @app.template_filter('inline_styles') def inline_styles(html): return premailer.transform(html) limiter = Limiter(app, global_limits=['5/second']) limiter.limit('100/hour')(ui) limiter.limit('10/minute')(api) limiter.limit('100/day')(api) app.register_blueprint(ui) app.register_blueprint(api) if __name__ == '__main__': print "Running in debug mode" app.debug = True app.run() else: handler = logging.StreamHandler() handler.setLevel(logging.INFO) app.logger.setLevel(logging.INFO)
An unofficial, RESTful API for NIST's NVD. Copyright (C) 2020 [email protected] """ from flask import Flask, request from api.resources import search from flask_restful import Api from flask_limiter import Limiter from flask_limiter.util import get_remote_address app = Flask(__name__) api = Api(app) # Rate Limiting limiter = Limiter(app, key_func=get_remote_address, default_limits=[ '50/hour', '200/day']) rate = limiter.limit('1/second, 5/minute', error_message={ 'Rate Limit Exceeded': '1/second, 5/minutes, 50/hour, 200/day'}) @limiter.request_filter def ip_whitelist(): return request.remote_addr == "127.0.0.1" search.CVE.decorators.append(rate) search.CVE_Year.decorators.append(rate) search.CVE_Modified.decorators.append(rate) search.CVE_Recent.decorators.append(rate) search.CVE_All.decorators.append(rate) search.Schema.decorators.append(rate)
from mycodo.mycodo_client import DaemonControl from mycodo.mycodo_flask.routes_authentication import clear_cookie_auth from mycodo.mycodo_flask.utils import utils_general from mycodo.utils.influx import query_string from mycodo.utils.system_pi import assure_path_exists from mycodo.utils.system_pi import str_is_float blueprint = Blueprint('routes_general', __name__, static_folder='../static', template_folder='../templates') logger = logging.getLogger(__name__) influx_db = InfluxDB() limiter = Limiter() @blueprint.route('/') def home(): """Load the default landing page""" if flask_login.current_user.is_authenticated: if flask_login.current_user.landing_page == 'live': return redirect(url_for('routes_page.page_live')) elif flask_login.current_user.landing_page == 'dashboard': return redirect(url_for('routes_page.page_dashboard')) return redirect(url_for('routes_page.page_live')) return clear_cookie_auth() @blueprint.route('/settings', methods=('GET', 'POST'))
login_manager.login_view = 'login' @app.route('/healthcheck') def healthcheck(): return "OK", 200 def get_userid(): user_id = flask_login.current_user.id return user_id if MULTIUSER: limiter = Limiter( app, key_func=get_userid ) else: limiter = None def limit_api_resets(f): """ Decorates functions depending on multiuser mode. """ if MULTIUSER: @limiter.limit("1 per minute") @wraps(f) def wrapper(*args, **kwargs): return f(*args, **kwargs) return wrapper else: @wraps(f)
from flask_limiter.util import get_ipaddr from auth import auth import config import models from resources.courses import courses_api from resources.reviews import reviews_api from resources.users import users_api app = Flask(__name__) app.register_blueprint(courses_api, url_prefix='/api/v1') app.register_blueprint(reviews_api, url_prefix='/api/v1') app.register_blueprint(users_api, url_prefix='/api/v1') limiter = Limiter(app, global_limits=[config.DEFAULT_RATE], key_func=get_ipaddr) limiter.limit("20/day")(users_api) limiter.limit(config.DEFAULT_RATE, per_method=True, methods=["post", "put", "delete"])(courses_api) limiter.limit(config.DEFAULT_RATE, per_method=True, methods=["post", "put", "delete"])(reviews_api) @app.route('/') def hello_world(): return "Hello World"
import os from flask import flash, Flask, render_template, redirect from flask_limiter import Limiter from flask import request from werkzeug.utils import secure_filename import logging app = Flask(__name__) app.secret_key = "secret-key" path = ".\store" app.config["path"] = path app.config["Max_Content_length"] = 1024 * 1024 * 10 limiter = Limiter(app, global_limits=["5 per minute"]) imageExt = {"jpg", "png", "bmp", "jpeg"} def allowed_file(filename): return "." in filename and filename.rsplit(".", 1)[1].lower() in imageExt @app.route("/") def web(): return render_template("upload.html") @app.route("/upload", methods=["POST"]) def upload(): if request.method != "POST": flash("Use appropriate method") return "method is worng" logging.error(request.files)
port=redis_url.port, db=redis_db, password=redis_password) return r setup_logging() redis_connection = create_redis_connection() mail = Mail() migrate = Migrate() mail.init_mail(settings.all_settings()) statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX) limiter = Limiter(key_func=get_ipaddr, storage_uri=settings.LIMITER_STORAGE) import_query_runners(settings.QUERY_RUNNERS) import_destinations(settings.DESTINATIONS) from redash.version_check import reset_new_version_status reset_new_version_status() class SlugConverter(BaseConverter): def to_python(self, value): # This is ay workaround for when we enable multi-org and some files are being called by the index rule: # for path in settings.STATIC_ASSETS_PATHS: # full_path = safe_join(path, value) # if os.path.isfile(full_path): # raise ValidationError()
environ['SCRIPT_NAME'] = script_name path_info = environ['PATH_INFO'] if path_info.startswith(script_name): environ['PATH_INFO'] = path_info[len(script_name):] scheme = environ.get('HTTP_X_SCHEME', '') if scheme: environ['wsgi.url_scheme'] = scheme return self.app(environ, start_response) app = Flask(__name__, static_folder='web') app.json_encoder = ApiJSONEncoder app.wsgi_app = ReverseProxied(app.wsgi_app) # type: ignore limiter = Limiter(app, key_func=get_remote_address, default_limits=["3/40seconds"]) @app.route('/') @limiter.exempt def index(): return app.send_static_file('index.html') @app.route('/timeseries', methods=['GET']) def timeseries(): result_df = InfluxdbQuery(request.args).query() return result_df.to_json(orient='split', index=False)
import os from flask import Flask, render_template, request from flask_limiter import Limiter from flask_limiter.util import get_remote_address from dao import Dao from util import get_links, handle_extras app = Flask(__name__) version = "v0.4" # rate limiting limiter = Limiter( app, key_func=get_remote_address, default_limits=["15 per minute"], ) # data access object dao = Dao() @app.route('/', methods=['POST', 'GET']) def index(): render_parameters = {} if request.method == 'POST': if 'button_get_links' in request.form: render_parameters = get_links(request) if 'link_list' in render_parameters:
# Caching cache = Cache() # Redis redis_store = FlaskRedis() # Debugtoolbar debugtoolbar = DebugToolbarExtension() # Migrations alembic = Alembic() # Themes themes = Themes() # PluginManager plugin_manager = PluginManager() # Babel babel = Babel() # CSRF csrf = CSRFProtect() # Rate Limiting limiter = Limiter(auto_check=False, key_func=get_remote_address) # Celery celery = Celery("flaskbb")
from flask import Flask, json, jsonify, request import Helper.InputHandler as inp import modle.response_model as modelRes import random from flask_limiter import Limiter from flask_limiter.util import get_remote_address import psycopg2 app = Flask(__name__) limiter = Limiter( app, key_func=get_remote_address) # config flask limiter with flask app #connect to data using psycopg2 library conn = psycopg2.connect( "host='localhost' port='5432' dbname='postgres' user='******' password='******'" ) import sys import os if sys.version_info >= (3, ): import urllib.request as urllib2 import urllib.parse as urlparse else: import urllib2 import urlparse cur = conn.cursor() cwd = os.getcwd() fields = ["speed", "status", "total_size", "eta", "left_size"] APIs = {}
# Flask setup static_folder = os.path.abspath( os.path.join( os.path.dirname(os.path.realpath(__file__)), "../../frontends/case-triage/build/", )) app = Flask(__name__, static_folder=static_folder) app.secret_key = get_local_secret("case_triage_secret_key") CSRFProtect(app).exempt(e2e_blueprint) register_error_handlers(app) limiter = Limiter( app, key_func=get_remote_address, default_limits=["15 per second"], storage_uri=get_rate_limit_storage_uri(), ) if in_development(): db_url = local_postgres_helpers.postgres_db_url_from_env_vars() else: db_url = SQLAlchemyEngineManager.get_server_postgres_instance_url( database_key=SQLAlchemyDatabaseKey.for_schema(SchemaType.CASE_TRIAGE)) app.config["SESSION_COOKIE_HTTPONLY"] = True app.config["SESSION_COOKIE_SECURE"] = True app.config["SESSION_COOKIE_SAMESITE"] = "Strict" app.config["MAX_CONTENT_LENGTH"] = 16 * 1024 * 1024 # 16 MiB max body size setup_scoped_sessions(app, db_url)
import logging import sqlite3 import logger import problemdb import teamdb from api import api from flask import Flask, jsonify, make_response, render_template, session, redirect, url_for from flask_limiter import Limiter from decorators import admins_only, redirect_if_not_logged_in app = Flask(__name__) limiter = Limiter(app) @app.errorhandler(429) def error_handler(optional_argument=""): if "tid" in session: logger.log("spam", logger.CRITICAL, "%s is using the api too quickly!", session["tid"]) return make_response(jsonify(message="Slow down!"), 200) app.debug = True app.secret_key = open(".secret_key", "r").read() app.jinja_env.trim_blocks = True limiter.limit("10/minute", error_message=error_handler, exempt_when=lambda: is_admin())(api) app.register_blueprint(api) conn = sqlite3.connect("introctf.db", check_same_thread=False) conn.text_factory = str @app.route('/')
Compress(app) # app.config["CACHE_REDIS_URL"] app.config["RATELIMIT_STORAGE_URL"] = 'memory://' app.config["RATELIMIT_KEY_PREFIX"] = "flask_limiting_" app.config["RATELIMIT_ENABLED"] = bool( int(environ.get("RATELIMIT_ENABLED", True))) def limiter_key_func(): return request.remote_addr limiter = Limiter(app, key_func=limiter_key_func, default_limits=["100/minute"], headers_enabled=True, strategy="fixed-window") # setup db pool_size = int(environ.get("PG_POOL_SIZE", 10)) engines = { "leader": create_engine(app.config['DATABASE_URL'], pool_size=pool_size, pool_use_lifo=True), "followers": [ create_engine(x, pool_size=pool_size, pool_use_lifo=True) for x in app.config['SQLALCHEMY_READ_URIS'] if x ] if any(i for i in app.config['SQLALCHEMY_READ_URIS']) else [ create_engine(app.config['DATABASE_URL'],
from datetime import datetime, timedelta import pylast import redis import random import os from flask.ext.sqlalchemy import SQLAlchemy insults = ("you weeb", "you scrub", "you neckbeard", "you pleb", "you newb", "dani stop fapping please", "nuck broke it", "sucks to be you", "dani pls", "no", "the cake is probably a lie", "STOP IT PLS", "pomf", "you wop") redis = redis.StrictRedis(host='localhost', port=6379) app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data.db' db = SQLAlchemy(app) limiter = Limiter(app, strategy="moving-window", storage_uri="redis://localhost:6379", key_func = lambda : request.args['user_name']) def verify_command(key): if key == token_key: return True else: return False lastfm_network = pylast.LastFMNetwork(api_key=config.lastfm_api_key, api_secret=config.lastfm_api_secret) @app.errorhandler(429) def ratelimit_handler(e): return random.choice(insults) + ", ratelimit exceeded %ss" % e.description, 429 @limiter.request_filter def channel_whitelist():
from sqlalchemy.sql import select from project.models.database import init_db, db_session, engine from project.models.models import Contact, Data from project import application import project.controllers.stream_controller from tempfile import gettempdir # configure session to use filesystem (instead of signed cookies) application.config["SESSION_FILE_DIR"] = gettempdir() application.config["SESSION_PERMANENT"] = False application.config["SESSION_TYPE"] = "filesystem" Session(application) # avoid ddos attack limiter = Limiter(application, key_func=get_remote_address, default_limits=["200 per day", "50 per hour"]) @application.teardown_appcontext def shutdown_session(exception=None): db_session.remove() # ensure responses aren't cached if application.config["DEBUG"]: @application.after_request def after_request(response): response.headers[ "Cache-Control"] = "no-cache, no-store, must-revalidate"
def send_imessage(message, buddy): #TODO: Allow `"` to be sent. message = message.replace('"', "'") cmd = ('osascript<<END\n' 'tell application "Messages"\n' ' set targetService to 1st service whose service type = iMessage\n' ' set targetBuddy to buddy "{0}" of targetService\n' ' send "{1}" to targetBuddy\n' 'end tell\n' 'END') cmd = cmd.format(buddy,message) return str(os.system(cmd)) app = Flask(__name__) limiter = Limiter(key_func=get_remote_address) limiter.init_app(app) @app.route('/') def index(): return render_template('index.html') @app.route('/test') def test(): print 'Print hit.' return 'Return hit.' @app.route('/post', methods = ['POST']) @limiter.limit("1/second") def post(): if request.headers['Content-Type'] == 'text/plain':
from werkzeug import check_password_hash, generate_password_hash import pymongo from utils import safe_pickle_dump, strip_version, isvalidid, Config # various globals # ----------------------------------------------------------------------------- # database configuration if os.path.isfile('secret_key.txt'): SECRET_KEY = open('secret_key.txt', 'r').read() else: SECRET_KEY = 'devkey, should be in a file' app = Flask(__name__) app.config.from_object(__name__) limiter = Limiter(app, global_limits=["100 per hour", "20 per minute"]) # ----------------------------------------------------------------------------- # utilities for database interactions # ----------------------------------------------------------------------------- # to initialize the database: sqlite3 as.db < schema.sql def connect_db(): sqlite_db = sqlite3.connect(Config.database_path) sqlite_db.row_factory = sqlite3.Row # to return dicts rather than tuples return sqlite_db def query_db(query, args=(), one=False): """Queries the database and returns a list of dictionaries.""" cur = g.db.execute(query, args)
from flask import render_template from flask_limiter import Limiter from flask_limiter.util import get_ipaddr import config import models from resources.companies import companies_api from resources.marinas import marinas_api from resources.users import users_api app = Flask(__name__) app.register_blueprint(companies_api, url_prefix='/api/v1') app.register_blueprint(marinas_api, url_prefix='/api/v1') app.register_blueprint(users_api, url_prefix='/api/v1') limiter = Limiter(app, global_limits=[config.DEFAULT_RATE], key_func=get_ipaddr) limiter.limit('40/day')(users_api) limiter.limit(config.DEFAULT_RATE, per_method=True, methods=['post', 'put', 'delete'](companies_api) # limiter.exempt(companies_api) # limiter.exempt(marinas_api) @app.route('/') @app.route('/index') def index(): return render_template('index.html') @app.route('/api/v1/users/token', methods=['GET']) @auth.login_required def get_auth_token(): token = g.user.generate_auth_token()
import json import os from flask import Flask, request from flask_caching import Cache from flask_limiter import Limiter from flask_limiter.util import get_remote_address from parse_reponse import response app = Flask(__name__) basedir = os.path.abspath(os.path.dirname(__file__)) limiter = Limiter( app, key_func=get_remote_address, default_limits=["10 per minute", "1 per second"], ) config = { "DEBUG": True, # some Flask specific configs "CACHE_TYPE": "simple", # Flask-Caching related configs "CACHE_DEFAULT_TIMEOUT": 300 } #RATE_LIMIT_FUNCS = {'ip': lambda: get_remote_address} app.config.from_mapping(config) cache = Cache(app)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(module)s %(funcName)s : %(message)s') fh = logging.FileHandler(ConfigClass.flask_logfile) fh.setLevel(ConfigClass.flask_log_level) fh.setFormatter(formatter) logger.addHandler(fh) sh = logging.StreamHandler() sh.setLevel(ConfigClass.flask_log_level) sh.setFormatter(formatter) logger.addHandler(sh) # Initialize Flask Limiter extension limiter = Limiter(app, headers_enabled=ConfigClass.xrate_limit_headers_enabled, strategy='fixed-window-elastic-expiry', storage_uri=ConfigClass.redis_cache_uri) # set rate limits for each module/blueprint limiter.limit("1/second;5/minute")(api_blueprint) limiter.limit("1/second;10/minute")(users_blueprint) # register blueprints app.register_blueprint(api_blueprint) app.register_blueprint(users_blueprint) if __name__ == '__main__': if ConfigClass.debug_mode: app.run(host=ConfigClass.debug_host, port=ConfigClass.debug_port, debug=True) else: app.run(debug=False)
from flask_restful import Resource, Api from flask_limiter import Limiter from decorators import * from utils import * from models import * app = Flask(__name__) api = Api(app) api.route = types.MethodType(api_route, api) app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://*****:*****@localhost/mydatabase' db = SQLAlchemy(app) limiter = Limiter(app, key_func=get_apikey) limite_compartido = limiter.shared_limit("100/hour", scope="all") @api.route('/v1/') class Hello(Resource): decorators = [require_appkey, limite_compartido] def get(self): return { 'status':'OK', 'data': [ {'hello': 'world'}, ] } @limiter.request_filter def ip_whitelist():