class Config(object): ENV = env_config('ENV', default='production') DEBUG = env_config('DEBUG', default=False, cast=bool) SECRET_KEY = env_config('SECRET_KEY') MONGODB_SETTINGS = { 'host': env_config('MONGO_URL') }
def client(): app.Testing = True client = app.test_client() yield client database_url = env_config('MONGO_URL') database_name = database_url.split('/')[-1] db.connection.drop_database(database_name)
class Config: BROKER_URL = env_config('WIDUKIND_CELERY_BROKER', 'redis://localhost:6379/0') #CELERY_RESULT_BACKEND = 'redis://localhost:6379/0' BROKER_TRANSPORT_OPTIONS = { 'fanout_prefix': True, 'fanout_patterns': True, } #BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} # 1 hour. CELERY_ENABLE_UTC = True CELERY_TIMEZONE = 'UTC' CELERY_IGNORE_RESULT = True CELERY_RESULT_PERSISTENT = False CELERY_DISABLE_RATE_LIMITS = True #TODO: msgpack CELERY_ACCEPT_CONTENT = ['pickle', 'json']
class Config: SECRET_KEY = env_config('SECRET_KEY', default='very secret string') MAIL_SERVER = env_config('MAIL_SERVER', default='smtp.googlemail.com') MAIL_PORT = env_config('MAILPORT', default=587) MAIL_USE_TLS = env_config('MAIL_USE_TLS', default='true').lower() in ('true', 'on', '1') MAIL_USERNAME = env_config('MAIL_USERNAME') MAIL_PASSWORD = env_config('MAIL_PASSWORD') FLASKY_MAIL_SUBJECT_PREFIX = '[Flasky]' FLASKY_MAIL_SENDER = 'Flasky Admin <*****@*****.**>' FLASKY_ADMIN = env_config('FLASKY_ADMIN') SQLALCHEMY_TRACK_MODIFICATIONS = False @staticmethod def init_app(app): pass
def remind_users_of_upcoming_flights(): sg = SendGridAPIClient(env_config('SENDGRID_KEY')) tomorrow = datetime.now() + timedelta(days=1) pending_flights = Flight.objects( departure_time__gte=tomorrow.replace(hour=0, minute=0), departure_time__lte=tomorrow.replace(hour=23, minute=59), status=flight_statuses.PENDING ) booked_tickets = FlightTicket.objects( flight__in=pending_flights ) for ticket in booked_tickets: message = { 'personalizations': [ { 'to': [ { 'email': ticket.user.email } ], 'subject': 'Flight reminder' } ], 'from': { 'email': '*****@*****.**', 'name': 'Flight Booker' }, 'content': [ { 'type': 'text/plain', 'value': ''' Hi {0}, \n\nThis is a reminder of your flight scheduled to depart tomorrow by {1}. '''.format( ticket.user.full_name, ticket.flight.departure_time.strftime('%-I:%M %p') ) } ] } sg.send(message)
def options(): #TODO: source / destination libre par fichier: local-directory/mailbox, ... parser = argparse.ArgumentParser(description='Mongrey Migration', prog=os.path.basename(sys.argv[0]), version="mongrey-%s" % (version.__VERSION__), add_help=True) parser.add_argument('-P', '--basepath', dest="basepath", default="/var/rs/addons/postfix/etc", help='Base Path for search file. default: %(default)s') parser.add_argument('--settings', dest="settings_path", default=DEFAULT_CONFIG['settings_path'], help='load settings from YAML file') parser.add_argument('--db', dest="mongrey_db", default=env_config( 'MONGREY_DB', 'sqlite:////var/lib/mongrey/mongrey.db'), help='Mongrey DB. default: %(default)s') parser.add_argument('-D', '--debug', action="store_true") parser.add_argument('-n', '--dry-run', dest="dry_run", action="store_true") parser.add_argument(choices=[ 'radicalspam', 'postgrey', ], dest='command', help="Migration From ?") return dict(parser.parse_args()._get_kwargs())
def options(): #TODO: source / destination libre par fichier: local-directory/mailbox, ... parser = argparse.ArgumentParser(description='Mongrey Migration', prog=os.path.basename(sys.argv[0]), version="mongrey-%s" % (version.__VERSION__), add_help=True) parser.add_argument('-P', '--basepath', dest="basepath", default="/var/rs/addons/postfix/etc", help='Base Path for search file. default: %(default)s') parser.add_argument('--settings', dest="settings_path", default=DEFAULT_CONFIG['settings_path'], help='load settings from YAML file') parser.add_argument('--db', dest="mongrey_db", default=env_config('MONGREY_DB', 'sqlite:///mongrey.db'), help='Mongrey DB. default: %(default)s') parser.add_argument('-D', '--debug', action="store_true") parser.add_argument('-n', '--dry-run', dest="dry_run", action="store_true") parser.add_argument(choices=[ 'radicalspam', 'postgrey', ], dest='command', help="Migration From ?") return dict(parser.parse_args()._get_kwargs())
# -*- coding: utf-8 -*- import os import sys import argparse from decouple import config as env_config from .. import version from .. import utils DEFAULT_CONFIG = { 'settings_path': env_config('MONGREY_SERVER_SETTINGS', None), 'db_settings': { 'host': env_config('MONGREY_DB', 'sqlite:////var/lib/mongrey/mongrey.db'), } } def radicalspam_migration(basepath=None, models=None, dry_run=False): from . import radicalspam results = {} files_found = [] files_not_found = [] for f, settings in radicalspam.RS_FILES.iteritems(): model_klass = models[settings[0]] func = settings[1] filepath = os.path.abspath(os.path.join(basepath, f))
# -*- coding: utf-8 -*- from hashlib import md5 from decouple import config as env_config from celery import Celery from celery.utils.log import get_task_logger from widukind_tasks.mongolock import MongoLock, MongoLockLocked from widukind_tasks import utils logger = get_task_logger(__name__) CELERY_MODE = env_config('WIDUKIND_CELERY_MODE', 'prod') app = Celery() task_locker = None class Config: BROKER_URL = env_config('WIDUKIND_CELERY_BROKER', 'redis://localhost:6379/0') #CELERY_RESULT_BACKEND = 'redis://localhost:6379/0' BROKER_TRANSPORT_OPTIONS = { 'fanout_prefix': True, 'fanout_patterns': True, } #BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} # 1 hour. CELERY_ENABLE_UTC = True CELERY_TIMEZONE = 'UTC' CELERY_IGNORE_RESULT = True
For the full list of settings and their values, see https://docs.djangoproject.com/en/2.2/ref/settings/ """ import os from decouple import config as env_config, Csv # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = env_config('SECRET_KEY') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = env_config('DEBUG', cast=bool) ALLOWED_HOSTS = env_config('ALLOWED_HOSTS', cast=Csv()) # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles',
# -*- coding: utf-8 -*- import os import sys import argparse from decouple import config as env_config from .. import version from .. import utils DEFAULT_CONFIG = { 'settings_path': env_config('MONGREY_SERVER_SETTINGS', None), 'db_settings': { 'host': env_config('MONGREY_DB', 'sqlite:///mongrey.db'), 'options': { 'threadlocals': True } }, } def radicalspam_migration(basepath=None, models=None, dry_run=False): from . import radicalspam results = {} files_found = [] files_not_found = []
import datetime import bcrypt from decouple import config as env_config from mongoengine import signals import jwt from api.app import db secret = env_config('SECRET_KEY') class User(db.Document): full_name = db.StringField(required=True) email = db.EmailField(required=True, unique=True) phone_number = db.StringField(required=True) address = db.StringField(required=True) password = db.StringField(required=True) passport_photo = db.FileField() passport_photo_url = db.StringField() created_at = db.DateTimeField(default=datetime.datetime.now) updated_at = db.DateTimeField(default=datetime.datetime.now) @classmethod def pre_save(cls, sender, document, **kwargs): changed_fields = document._get_changed_fields() if 'password' in changed_fields or not changed_fields: hashed_password = bcrypt.hashpw(document.password.encode('utf-8'), bcrypt.gensalt()) document.password = hashed_password.decode('utf-8')
def get_mongo_url(): return env_config("WIDUKIND_MONGODB_URL", "mongodb://localhost/widukind")
def get_trainer_config(): config = AttrDict({ 'n_epochs': env_config('N_EPOCHS', default=3, cast=int), 'writer_comment': env_config('WRITER_COMMENT', default='', cast=str), 'train_batch_size': env_config('TRAIN_BATCH_SIZE', default=128, cast=int), 'batch_split': env_config('BATCH_SPLIT', default=32, cast=int), 'test_batch_size': env_config('TEST_BATCH_SIZE', default=8, cast=int), 'lr': 6.25e-5, 'lr_warmup': 0.002, # a fraction of total training (epoch * train_set_length) if linear_schedule == True 'weight_decay': 0.01, 's2s_weight': env_config('S2S_WEIGHT', default=1, cast=float), 'lm_weight': env_config('LM_WEIGHT', default=0, cast=float), 'risk_weight': env_config('RISK_WEIGHT', default=0, cast=float), 'hits_weight': env_config('HITS_WEIGHT', default=0, cast=float), 'negative_samples': env_config('NEGATIVE_SAMPLES', default=0, cast=int), 'n_jobs': 4, 'label_smoothing': env_config('LABEL_SMOOTHING', default=0.1, cast=float), 'clip_grad': None, 'test_period': 1, 'seed': 0, 'device': 'cuda', 'zero_shot': env_config('ZERO_SHOT', default=False, cast=bool), 'persona_augment': env_config('PERSONA_AUGMENT', default=False, cast=bool), 'persona_aug_syn_proba': env_config('PERSONA_AUG_SYN_PROBA', default=0.0, cast=float), 'apex_loss_scale': env_config('APEX_LOSS_SCALE', default=None, cast=cast2(str)), # e.g. '128', 'dynamic' 'linear_schedule': env_config('LINEAR_SCHEDULE', default=True, cast=bool), 'evaluate_full_sequences': env_config('EVALUATE_FULL_SEQUENCES', default=True, cast=bool), 'limit_eval_size': env_config('LIMIT_EVAL_TIME', default=-1, cast=int), 'limit_train_size': env_config('LIMIT_TRAIN_TIME', default=-1, cast=int), 'risk_metric': env_config('RISK_METRIC', default='f1', cast=str), 'load_last': env_config( 'LOAD_LAST', default='', cast=str ), #./checkpoints/last_checkpoint', # Now that we save several experiments you can put the path of the checpoint file you want to load here 'repo_id': str(repo), 'repo_sha': str(repo.head.object.hexsha), 'repo_branch': str(repo.active_branch), 'openai_parameters_dir': './parameters', 'last_checkpoint_path': 'last_checkpoint', # there are now in the ./runs/XXX/ experiments folders 'eval_references_file': 'eval_references_file', 'eval_predictions_file': 'eval_predictions_file', 'interrupt_checkpoint_path': 'interrupt_checkpoint', # there are now in the ./runs/XXX/ experiments folders 'train_datasets': env_config('TRAIN_DATASETS', default='datasets/ConvAI2/train_self_original.txt', cast=Csv(str)), 'train_datasets_cache': 'train_cache.bin', 'test_datasets': env_config('TEST_DATASETS', default='datasets/ConvAI2/valid_self_original.txt', cast=Csv(str)), 'test_datasets_cache': 'test_cache.bin' }) local_config = deepcopy(config) local_config.train_batch_size = 2 local_config.batch_split = 1 local_config.test_batch_size = 3 local_config.negative_samples = 2 local_config.n_jobs = 0 local_config.device = 'cpu' local_config.risk_weight = 1 local_config.zero_shot = True local_config.fp16 = False local_config.train_datasets_cache = './datasets/train_datasets_cache.bin' local_config.test_datasets_cache = './datasets/test_datasets_cache.bin' return config if torch.cuda.is_available() else local_config
# Third-Party libraries from flask import Flask, jsonify from flask_restx import Api from flask_cors import CORS from decouple import config as env_config from marshmallow import ValidationError as MarshmallowValidationError # middlewares from api import api_blueprint from api.middlewares import middleware_blueprint from api.middlewares.base_validator import ValidationError # Config from config import config config_name = env_config('FLASK_ENV', 'production') api = Api(api_blueprint, doc=False) def initialize_errorhandlers(application): """Initialize error handlers""" application.register_blueprint(middleware_blueprint) application.register_blueprint(api_blueprint) def create_app(config=config[config_name]): """creates a flask app object from a config object""" app = Flask(__name__) CORS(app) app.config.from_object(config)
logger = logging.getLogger(__name__) DEFAULT_SETTINGS_PATH = [ '/etc/mongrey/server.yml', '~/mongrey/server.yml', ] DEFAULT_FIXTURES_PATH = [ '/var/lib/mongrey/server-fixtures.yml', '~/mongrey/server-fixtures.yml', ] DEFAULT_CONFIG = { 'settings_path': env_config('MONGREY_SERVER_SETTINGS', None), 'fixtures_path': env_config('MONGREY_SERVER_FIXTURES', None), 'host': env_config('MONGREY_HOST', '127.0.0.1', cast=str), 'port': env_config('MONGREY_PORT', 9999, cast=int), 'allow_hosts': env_config('MONGREY_ALLOW_HOSTS', '127.0.0.1, ::1', cast=utils.to_list), 'security_by_host': env_config('MONGREY_SECURITY_BY_HOST', True, cast=bool), 'spawn': env_config('MONGREY_SPAWN', 50, cast=int), 'backlog': env_config('MONGREY_BACKLOG', 256, cast=int), 'connection_timeout':
logger = logging.getLogger(__name__) DEFAULT_SETTINGS_PATH = [ '~/mongrey/server.yml', '/etc/mongrey/server.yml', ] #TODO: /var/lib/mongrey/fixtures.yml et/ou /opt/mongrey/fixtures.yml DEFAULT_FIXTURES_PATH = [ '~/mongrey/server-fixtures.yml', '/etc/mongrey/server-fixtures.yml', ] DEFAULT_CONFIG = { 'settings_path': env_config('MONGREY_SERVER_SETTINGS', None), 'fixtures_path': env_config('MONGREY_SERVER_FIXTURES', None), 'host': env_config('MONGREY_HOST', '127.0.0.1', cast=str), 'port': env_config('MONGREY_PORT', 9999, cast=int), 'allow_hosts': env_config('MONGREY_ALLOW_HOSTS', '127.0.0.1, ::1', cast=utils.to_list), 'security_by_host': env_config('MONGREY_SECURITY_BY_HOST', True, cast=bool), 'spawn': env_config('MONGREY_SPAWN', 50, cast=int), 'backlog': env_config('MONGREY_BACKLOG', 256, cast=int),
import os import slack from decouple import config as env_config SLACK_TOKEN = env_config('SLACK_TOKEN') class Channel: def __init__(self, channel_info): self.id = channel_info["id"] self.name = channel_info["name"] self.members = channel_info["members"] class SlackHelper: def __init__(self): self.slack_token = SLACK_TOKEN self.slack_client = slack.WebClient(SLACK_TOKEN, timeout=30) def get_channel_info(self): channel_object = self.slack_client.channels_info( channel="CNETSCVL7" ) print(channel_object['channel']['members']) return ""
import os from decouple import config as env_config BASEDIR = os.getcwd() WORKDIR = os.path.abspath(os.path.dirname(__file__)) FLASK_ENV = env_config("FLASK_ENV") FLASK_APP = env_config("FLASK_APP") FLASK_RUN_PORT = env_config("FLASK_RUN_PORT") SECRET_KEY = env_config("SECRET_KEY") DEBUG = True JWT_SECRET_KEY = env_config("JWT_SECRET_KEY") LOG_LEVEL = "DEBUG"
class Config(object): ENV = env_config('ENV', default='development') DEBUG = env_config('DEBUG', default=False, cast=bool)
from decouple import config as env_config from api import create_app from flask_script import Manager app = create_app(env_config('ENV')) manager = Manager(app) @manager.command def run_server(): app.run(env_config('HOST'), port=5500, debug=True) if __name__ == "__main__": manager.run()
# -*- coding: utf-8 -*- import os from decouple import config as env_config bind = '0.0.0.0:8080' daemon = False chdir = "/code" preload = env_config('WIDUKIND_API_PRELOAD', False, cast=bool) proxy_protocol = env_config('WIDUKIND_API_PROXY_PROTOCOL', True, cast=bool) proxy_allow_ips = env_config('WIDUKIND_API_PROXY_ALLOW_IPS', "127.0.0.1") forwarded_allow_ips = env_config('WIDUKIND_API_FORWARDED_ALLOW_IPS', "*") workers = env_config('WIDUKIND_API_WORKERS', 1, cast=int) worker_class = env_config('WIDUKIND_API_WORKER_CLASS', 'gevent_wsgi') worker_connections = env_config('WIDUKIND_API_WORKER_CONNECTIONS', 5, cast=int) backlog = env_config('WIDUKIND_API_BACKLOG', 2048, cast=int) timeout = env_config('WIDUKIND_API_TIMEOUT', 30, cast=int) keepalive = env_config('WIDUKIND_API_KEEPALIVE', 2, cast=int)
def run_server(): app.run(env_config('HOST'), port=5500, debug=True)
def get_es_url(): return env_config("WIDUKIND_ES_URL", "http://localhost:9200")
import os from flask_migrate import Migrate from app import create_app, db from app.models import User, Role from decouple import config as env_config app = create_app(env_config('FLASK_CONFIG', default='default')) migrate = Migrate(app, db) @app.shell_context_processor def make_shell_context(): return dict(db=db, User=User, Role=Role) @app.cli.command() def test(): """Run the unit tests.""" import unittest tests = unittest.TestLoader().discover('tests') unittest.TextTestRunner(verbosity=2).run(tests)
def get_model_config(): default_config = openai_transformer_config() config = AttrDict({ 'bpe_vocab_path': './parameters/bpe.vocab', 'bpe_codes_path': './parameters/bpe.code', 'checkpoint_path': './checkpoints/last_checkpoint', # Keep the checpoint folder for the checkpoints of the agents 'n_layers': default_config.n_layers, 'n_pos_embeddings': 512, 'embeddings_size': default_config.embeddings_size, 'n_heads': default_config.n_heads, 'dropout': default_config.dropout, 'embed_dropout': default_config.embed_dropout, 'attn_dropout': default_config.attn_dropout, 'ff_dropout': default_config.ff_dropout, 'normalize_embeddings': env_config('NORMALIZE_EMBEDDINGS', default=False, cast=bool), 'max_seq_len': 128, 'beam_size': env_config('BEAM_SIZE', default=3, cast=int), 'diversity_coef': env_config('DIVERSITY_COEF', default=0, cast=int), 'diversity_groups': env_config('DIVERSITY_GROUP', default=1, cast=int), 'annealing_topk': env_config('ANNEALING_TOPK', default=None, cast=cast2(int)), 'annealing': env_config('ANNEALING', default=0, cast=float), 'length_penalty': env_config('LENGTH_PENALTY', default=0.6, cast=float), 'n_segments': None, 'constant_embedding': env_config('CONSTANT_EMBEDDINGS', default=False, cast=bool), 'multiple_choice_head': env_config('MULTIPLE_CHOICE_HEAD', default=False, cast=bool), 'share_models': env_config('SHARE_MODELS', default=True, cast=bool), 'successive_attention': env_config('SUCCESSIVE_ATTENTION', default=False, cast=bool), 'sparse_embeddings': env_config('SPARSE_EMBEDDINGS', default=True, cast=bool), 'shared_attention': env_config('SHARED_ATTENTION', default=True, cast=bool), 'dialog_embeddings': env_config('DIALOG_EMBEDDINGS', default=True, cast=bool), 'single_input': env_config('SINGLE_INPUT', default=False, cast=bool), 'use_start_end': env_config('USE_START_END', default=False, cast=bool), 'apex_level': env_config('APEX_LEVEL', default=None, cast=cast2(str)), # 'O0', 'O1', 'O2', 'O3', 'bs_temperature': env_config('BS_TEMPERATURE', default=1, cast=float), 'bs_nucleus_p': env_config('BS_NUCLEUS_P', default=0, cast=float) }) return config
class DevelopmentConfig(Config): DEBUG = True SQLALCHEMY_DATABASE_URI = env_config( 'DEV_DATABASE_URL', default='sqlite:///' + os.path.join(basedir, 'data-dev.sqlite'))
try: import multiprocessing CPU_COUNT = (multiprocessing.cpu_count() * 2) + 1 except: CPU_COUNT = 1 from decouple import config as env_config bind = '0.0.0.0:8080' daemon = False chdir = "/code" proxy_protocol = env_config('SHORTURL_PROXY_PROTOCOL', True, cast=bool) proxy_allow_ips = env_config('SHORTURL_PROXY_ALLOW_IPS', "127.0.0.1") forwarded_allow_ips = env_config('SHORTURL_FORWARDED_ALLOW_IPS', "*") workers = env_config('SHORTURL_WORKERS', CPU_COUNT, cast=int) worker_class = env_config('SHORTURL_WORKER_CLASS', 'gevent_wsgi') worker_connections = env_config('SHORTURL_WORKER_CONNECTIONS', 200, cast=int) backlog = env_config('SHORTURL_BACKLOG', 2048, cast=int) timeout = env_config('SHORTURL_TIMEOUT', 30, cast=int)
class TestingConfig(Config): TESTING = True SQLALCHEMY_DATABASE_URI = env_config('TEST_DATABASE_URL', default='sqlite://')
class ProductionConfig(Config): SQLALCHEMY_DATABASE_URI = env_config('DATABASE_URL', default='sqlite:///' + os.path.join(basedir, 'data.sqlite'))
from pymongo import MongoClient import numpy as np import pandas as pd from sklearn.feature_extraction.text import TfidfVectorizer from nltk.tokenize import word_tokenize from nltk.corpus import stopwords from nltk.tokenize import RegexpTokenizer import string from decouple import config as env_config MONGO_URL = env_config('MONGO_URL') client = MongoClient(MONGO_URL, connect=False) db = client.bacp_counselling def get_therapist_from_db(): ''' Query the DB and return all the therapist saved there ''' trying_collection = db.trying_collection all_therapist = list(trying_collection.find({})) return all_therapist def get_initial_score(all_therapist, language, ethnicity, lgbt, gender): ''' loop through the therapists and score them based on language, ethnicity and lgbtq choice '''