def process(self, q_item): if q_item.method == 'post': json_resp = self.post(url=q_item.url, data=json.loads(q_item.payload)) else: json_resp = self.get(url=q_item.url) if 'error' in session: q_item.status = "error" error = session.pop('error') q_item.response = json.dumps(error) if q_item.retry_count is None: q_item.retry_count = 0 else: q_item.retry_count += 1 Queue.update(q_item) return error else: if q_item.cache_name: Cache.set_data(q_item.cache_name, json_resp, is_unique=q_item.cache_is_unique) q_item.status = "ok" q_item.response = json.dumps(json_resp) Queue.update(q_item) return json_resp
def get_user(self, email): users = Cache.get_data('get_users', default=[]) for user in users: if user['email'] == email: return user user = self.get(url='user/{}'.format(email)) users.append(user) Cache.set_data('get_users', users, is_unique=True) return user
async def post(self): data = await self.request.json() event = await self.request.app.objects.create(self.model, **data) if not Cache.get('PAYD_CONDITIONS'): await Rate.create_payd_conditions(self.request.app) is_payd, full_info = await event.is_payd(self.request.app) if is_payd is not None: statistics_data = { 'date': str(full_info['date']), 'partner': full_info['partner'], 'ad': full_info['ad'], 'country': full_info['country'], 'is_payd': is_payd, 'reg': event.is_reg_event, 'tutorial': event.is_tutorial_event, 'project': event.project, } # if is_payd: # sended = await self.send_to_partner(full_info) # statistics_data['postbacks'] = int(sended) await self.send_to_statistics(statistics_data) return web.Response( content_type='application/json', body=json.dumps(self.serialize(event)).encode(), )
async def is_payd(self, app): # sql = """ # SELECT * FROM event # INNER JOIN click ON (click.user_id = event.user_id) # WHERE event.user_id = {}; # """.format(self.user_id) qs = (Event.select(Event, Click).join( Click, peewee.JOIN.INNER, on=(Click.user_id == Event.user_id).alias('click'))) try: obj = await app.objects.get(qs, id=self.id) except (Event.DoesNotExist, Click.DoesNotExist): return None, {} try: payd = obj.event in Cache.get( 'PAYD_CONDITIONS', {})[obj.click.partner][obj.click.country][obj.project] except KeyError: payd = False return (payd, { 'event': obj.event, 'user_id': obj.user_id, 'date': obj.timestamp.date(), 'ad': obj.click.ad, 'country': obj.click.country, 'partner': obj.click.partner, 'project': obj.project, })
def get_users(self): Queue.add(f'get users', url='users', method='get', backoff_duration=30, cache_name="get_users", cache_is_unique=True) return Cache.get_data('get_users', default=[])
def decorated(*args, **kwargs): if current_app.config['TESTING']: if 'db_call' in dkwargs: data = dkwargs['db_call'](*args, **kwargs) else: data = f(*args, **kwargs) return data data = None if 'from_cache' in dkwargs: for cache_name in dkwargs['from_cache'].split(','): data_cache = Cache.get_data(cache_name) if data_cache: if len(args) == 2: data = [ d for d in data_cache if d[dkwargs['key']] == str(args[1]) ] if len(data) > 0: data = data[0] break else: current_app.logger.info("from_cache not 2 args") else: data = Cache.get_data(f.__name__) if data and dkwargs.get('update_daily'): updated_on = Cache.get_updated_on(f.__name__) kwargs['func'] = f if 'decorator' in dkwargs: kwargs['decorator'] = dkwargs['decorator'] if 'sort_by' in dkwargs: kwargs['sort_by'] = dkwargs['sort_by'] if (datetime.utcnow() - updated_on ).seconds > 60 * 60 * 24: # update pages once a day update_cache(*args, **kwargs) if not data: if 'db_call' in dkwargs: data = dkwargs['db_call'](*args, **kwargs) else: data = f(*args, **kwargs) if 'from_cache' not in dkwargs: Cache.set_data(f.__name__, data) return data
async def create_payd_conditions(cls, app): conditions = await app.objects.execute( Rate.select(Rate, ProjectRate, Project).join(ProjectRate).join(Project)) payd_conditions = {} for cond in conditions: if not payd_conditions.get(cond.partner): payd_conditions[cond.partner] = {} if not payd_conditions[cond.partner].get(cond.country): payd_conditions[cond.partner][cond.country] = {} if not payd_conditions[cond.partner][cond.country].get( cond.projectrate.project.name): payd_conditions[cond.partner][cond.country][ cond.projectrate.project.name] = set() payd_conditions[cond.partner][cond.country][ cond.projectrate.project.name].add(cond.event) Cache.set('PAYD_CONDITIONS', payd_conditions) return payd_conditions
def test_dataset(window_size: int, expected: int) -> None: cache = Cache("/store/tmp") df = cache("load-train", load)("/store/data/train.csv") d = Dataset( df[:20], window_size=window_size, stride=5, ) assert len(d) == expected x, y = d[0] assert x.shape == (1, window_size) assert y.shape == (window_size, )
def test_dataset_total_size() -> None: cache = Cache("/store/tmp") df = cache("load-train", load)("/store/data/train.csv")[:20] d = Dataset( df, window_size=5, stride=5, ) total_size = 0 for i in range(len(d)): t, _ = d[i] total_size += t.shape[1] assert total_size == 20
def __init__(self, launcher, name, shard_ids, max_shards): self.launcher = launcher self.process = None self.kwargs = dict( intents=INTENTS, allowed_mentions=NO_MENTIONS, case_insensitive=True, token=TOKEN, shard_ids=shard_ids, shard_count=max_shards, cluster_name=name, cache=Cache(), db=Database( os.getenv("DB_NAME"), os.getenv("DB_USER"), os.getenv("DB_PASSWORD"), ), theme_color=config.THEME_COLOR, dark_theme_color=config.DARK_THEME_COLOR, error_color=config.ERROR_COLOR, initial_extensions=EXTENSIONS, ) self.name = name self.log = logging.getLogger(f"Cluster#{name}") self.log.setLevel(logging.DEBUG) hdlr = logging.StreamHandler() hdlr.setFormatter( logging.Formatter( "[%(asctime)s %(name)s/%(levelname)s] %(message)s")) fhdlr = logging.FileHandler("logs/cluster-Launcher.log", encoding="utf-8") fhdlr.setFormatter( logging.Formatter( "[%(asctime)s %(name)s/%(levelname)s] %(message)s")) self.log.handlers = [hdlr, fhdlr] self.log.info(f"Initialized with shard ids {shard_ids}, " f"total shards {max_shards}")
def update_cache(*args, **kwargs): func = kwargs.pop('func') cache___name__ = func.__name__.replace("_from_db", "") cached_data = Cache.get_data(cache___name__) if 'decorator' in kwargs: func = kwargs['decorator'](func) data = func(*args, **kwargs) review_data = Cache.get_review_entities(func.__name__) if review_data and 'sort_by' in kwargs: sort_by = kwargs.pop('sort_by') data = sort_by(data.extend(review_data)) if cached_data != data: current_app.logger.info('Cache updated from db') Cache.set_data(cache___name__, data) else: current_app.logger.info('Cache does not need updating for {}'.format( func.__name__)) Cache.purge_older_versions(func.__name__)
import pytest from app.dataset import Dataset from app.entities import Annotations import typing as t from app.cache import Cache from app.preprocess import load_labels, get_annotations cache = Cache("/store/tmp") def test_dataset() -> None: labels = cache("labels", load_labels)("/store/dataset/labels.csv") annotations = cache("train_annotations", get_annotations)("/store/dataset/train.csv", labels) d = Dataset(annotations) assert len(d) == 142119
def purge_cache(): return jsonify({"deleted": Cache.purge_cache('get_users')})
# Yes, yes i know Redis should not be used to store persistent data. # This program is made for the sole purpose of practicing Redis w/ python. import flask from app import config import app.cron_jobs as cron from app.cache import Cache from app.forms import * from app.auth import require_auth from redis_utils import redis_utils as redis_utils from flask import request, render_template, flash, session app = flask.Flask(__name__) app.config.from_mapping(config.app_config()) cache = Cache() #cron.job_clean_cache() @app.route("/index") def index_page(): return render_template('base.html') @app.route("/login", methods=['GET', 'POST']) @cache.evict() def login_form(): form = LoginForm() title = 'Redis Login' try: if session['username']:
import os from urllib.parse import urljoin, urlsplit from sanic.exceptions import InvalidUsage from sanic.response import HTTPResponse, redirect from app.cache import Cache from app.logger import set_logger from app.scheduler import Scheduler REDIS_HOST = os.getenv('REDIS_HOST') REDIS_PORT = os.getenv('REDIS_PORT') ADDRESS = f"redis://{REDIS_HOST}:{REDIS_PORT}" SERVICE_NAME = os.getenv('SERVICE', 'balancer') cache = Cache.setup_cache(ADDRESS) sc = Scheduler.setup_scheduler(cache) logger = set_logger(SERVICE_NAME, file_logging=True) async def index(request): file_url = request.args.get('video') logger.debug('file_url: %s', file_url) if file_url: splitted = urlsplit(file_url) origin_host = splitted.netloc path = splitted.path else: logger.error('no file_url')
def admin_events(selected_event_id=None, api_message=None): events = api_client.get_limited_events() event_types = api_client.get_event_types() speakers = api_client.get_speakers() venues = api_client.get_venues() session['events'] = events form = EventForm() temp_event = None errors = reject_reasons = [] form.set_events_form(events, event_types, speakers, venues) if form.validate_on_submit(): if form.image_filename.data: filename = form.image_filename.data.filename else: filename = form.existing_image_filename.data reject_reasons = json.loads(form.reject_reasons_json.data) if form.reject_reason.data: reject_reasons.append( { 'reason': form.reject_reason.data, 'created_by': session['user']['id'] } ) event = { 'event_id': form.events.data, 'event_type_id': form.event_type.data, 'title': form.title.data, 'sub_title': form.sub_title.data, 'description': form.description.data, 'image_filename': filename, 'fee': int(form.fee.data) if form.fee.data else 0, 'conc_fee': int(form.conc_fee.data) if form.conc_fee.data else 0, 'multi_day_fee': int(form.multi_day_fee.data) if form.multi_day_fee.data else 0, 'multi_day_conc_fee': int(form.multi_day_conc_fee.data) if form.multi_day_conc_fee.data else 0, 'venue_id': form.venue.data, 'event_dates': form.event_dates.data, 'start_time': form.start_time.data, 'end_time': form.end_time.data, 'event_state': form.submit_type.data, 'reject_reasons': reject_reasons, 'remote_access': form.remote_access.data, 'remote_pw': form.remote_pw.data, 'show_banner_text': form.show_banner_text.data } adjusted_event = event.copy() from html import escape adjusted_event['description'] = escape(event['description']) adjusted_event['event_dates'] = json.loads(str(event['event_dates'])) file_request = request.files.get('image_filename') if file_request: file_data = file_request.read() file_data_encoded = base64.b64encode(file_data) file_data_encoded = base64.b64encode(file_data_encoded).decode('utf-8') _file_size = size_from_b64(str(file_data_encoded)) if _file_size > current_app.config['MAX_IMAGE_SIZE']: _file_size_mb = round(_file_size/(1024*1024), 1) _max_size_mb = current_app.config['MAX_IMAGE_SIZE']/(1024*1024) errors.append("Image {} file size ({} mb) is larger than max ({} mb)".format( file_request.filename, _file_size_mb, _max_size_mb)) else: adjusted_event['image_data'] = file_data_encoded if not errors: # remove empty values for key, value in event.items(): if value != 0 and not value: del adjusted_event[key] try: message = None if event.get('event_id'): response = api_client.update_event(event['event_id'], adjusted_event) message = 'event updated' if event['event_state'] != "approved" and not form.cache_switch.data: Cache.set_review_entity('get_events_in_future', event.get('event_id')) else: Cache.delete_review_entity('get_events_in_future', event.get('event_id')) update_cache( func=api_client.get_events_in_future_from_db, decorator=only_show_approved_events, approved_only=True) else: # do not need to update the cache here as an event is never in approved state when first created response = api_client.add_event(adjusted_event) if 'error' in session: raise HTTPError(response, message=session.pop('error')) return redirect(url_for('main.admin_events', selected_event_id=response.get('id'), api_message=message)) except HTTPError as e: current_app.logger.error(e) temp_event = json.dumps(event) if "message" in e.message: errors = e.message['message'] else: errors = json.dumps(e.message) return render_template( 'views/admin/events.html', form=form, images_url=current_app.config['IMAGES_URL'], selected_event_id=selected_event_id, message=api_message, temp_event=temp_event, errors=json.dumps(errors) )
from flask import Flask from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate from slack_sdk import WebClient from slack_sdk.signature import SignatureVerifier from app.cache import Cache db = SQLAlchemy() migrate = Migrate() client = WebClient(token=os.environ["SLACK_API_TOKEN"]) signature_verifier = SignatureVerifier(os.environ["SLACK_SIGNING_SECRET"]) # redis_client = redis.Redis(host=os.environ.get("REDIS_HOST", "localhost"), port=os.environ.get("REDIS_PORT", 6379), db=0) app_cache = Cache() class Config: # General Config # SECRET_KEY = environ.get('SECRET_KEY') # FLASK_APP = environ.get('FLASK_APP') # FLASK_ENV = environ.get('FLASK_ENV') # Database SQLALCHEMY_DATABASE_URI = os.environ.get("SQLALCHEMY_DATABASE_URI") SQLALCHEMY_ECHO = False SQLALCHEMY_TRACK_MODIFICATIONS = False # Custom encoder to serialize datetime objects