def send_html_mail(you, subject, food_page): #me = os.environ['food_email'] me = env('FOOD_EMAIL') me_pass = env('FOOD_EMAIL_PASS') msg = MIMEMultipart('alternative') msg['Subject'] = subject msg['From'] = me msg['To'] = you with open(food_page, 'r')as f: html_file = f.read() a = MIMEBase('application', 'octet-stream') a.set_payload(html_file) # encoders.encode_base64(a) a.add_header('Content-Disposition','attachment',filename=food_page) msg.attach(a) html = html_file # text = "HOLA" # part1 = MIMEText(text, 'plain') part2 = MIMEText(html, 'html') # msg.attach(part1) msg.attach(part2) s = smtplib.SMTP("smtp.zoho.com",587) s.ehlo() s.starttls() s.login(me, me_pass) s.sendmail(me, you, msg.as_string()) s.close()
def upload_option_precendence(path, version, token): final_path = path final_version = version final_token = token if path is None: final_path = env('RSDOC_PATH', default=None) if version is None: final_version = env('RSDOC_VERSION', default=None) if token is None: final_token = env('RSDOC_TOKEN', default=None) # Handle missing environment variables or command line options # to let the user know what they are doing wrong. errors = [] if final_path is None: errors.append(' RSDOC_PATH or --path needed') if final_version is None: errors.append(' RSDOC_VERSION or --version needed') if final_token is None: errors.append(' RSDOC_TOKEN or --token needed') if errors: click.secho('rsdoc usage errors:', fg='red') click.secho("\n".join(errors), fg='red') raise click.Abort() return final_path, final_version, final_token
def redis_connect(privileged=False, decode_responses=True, **kwargs): """Connect to the Redis DB with the right credentials """ redis_host = os.getenv('REDIS_ADDRESS', '127.0.0.1:6379').split(':', 1)[0] redis_port = os.getenv('REDIS_ADDRESS', '127.0.0.1:6379').split(':', 1)[1] if privileged: redis_username = os.getenv('REDIS_USER', os.getenv('AGENT_ID')) if not redis_username: print( "redis_connect: REDIS_USER and AGENT_ID are not set in the environment!", file=sys.stderr) # Try to parse the node agent environment as fallback: env.read_envfile('/var/lib/nethserver/node/state/agent.env') redis_username = env('AGENT_ID', default='default') redis_password = env('REDIS_PASSWORD', default='nopass') else: redis_password = os.environ['REDIS_PASSWORD'] # Fatal if missing! else: redis_username = '******' redis_password = '******' return redis.Redis( host=redis_host, port=redis_port, db=0, username=redis_username, password=redis_password, decode_responses=decode_responses, # we assume Redis keys and value strings are encoded UTF-8. Enabling this # option implicitly converts to UTF-8 strings instead of binary strings # (e.g. {b'key': b'value'} != {'key':'value'}) **kwargs)
def track_amplitude(chat_id: int, event: str, event_properties=None, timestamp=None): if env('AMPLITUDE_API_KEY', default=None) is not None: amplitude = AmplitudeLogger(env('AMPLITUDE_API_KEY')) user = user_get_by_chat_id(chat_id=chat_id) amplitude.log( user_id=chat_id, event=event, user_properties={ 'Telegram chat ID': user.chat_id, 'Name': user.full_name, 'Telegram user name': user.user_name, 'Daily catalog request limit': user.daily_catalog_requests_limit, 'Subscribed to WB categories updates': user.subscribe_to_wb_categories_updates, }, event_properties=event_properties, timestamp=timestamp, )
def vad_task(**kwargs): ti = kwargs['ti'] parent_data = ti.xcom_pull(task_ids='resample_%s' % kwargs['params']['mic_name']) resample_dir = parent_data['output_dir'] output_dir = parent_data['file_dir'] + '/1_clean_vad' create_dir_if_not_exists(output_dir) env.read_envfile() vad_dir = env("VAD_DIR") vad_script = env("VAD_SCRIPT") my_env = os.environ.copy() my_env[ "PATH"] = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" intm_id = kwargs['ts_nodash'] + 'session' + str( kwargs['params']['session_num']) + kwargs['params']['mic_name'] vad_command = ["bash", vad_script, resample_dir, output_dir, intm_id] subprocess.check_call(vad_command, env=my_env, cwd=vad_dir) return { 'task_type': 'vad', 'output_dir': output_dir, 'file_id': parent_data['file_id'] }
class Config(object): """ The base configuration option. Contains the defaults. """ DEBUG = False DEVELOPMENT = False STAGING = False PRODUCTION = False TESTING = False ETAG = True CSRF_ENABLED = True SERVER_URL = env('SERVER_URL', default=None) SQLALCHEMY_DATABASE_URI = env('DATABASE_URL', default=None) DATABASE_QUERY_TIMEOUT = 0.1 SQLALCHEMY_TRACK_MODIFICATIONS = False # API configs if not SQLALCHEMY_DATABASE_URI: print('`DATABASE_URL` either not exported or empty') exit() BASE_DIR = basedir FORCE_SSL = os.getenv('FORCE_SSL', 'no') == 'yes' if FORCE_SSL: PREFERRED_URL_SCHEME = 'https'
class TestingConfig(Config): """ The configuration for a test suit """ TESTING = True SQLALCHEMY_RECORD_QUERIES = True SERVER_URL = env('TEST_SERVER_URL', default=None) SQLALCHEMY_DATABASE_URI = env('TEST_DATABASE_URL', default=None)
def __init__(self, project_id): Env.read_envfile('.env') debug('Initializing') self.project_id = project_id self.gitlab = gitlab.Gitlab(env('GITLAB_ENDPOINT'), env('GITLAB_TOKEN'))
def load_conf(): config = {} config['clickhouse'] = dict(host=env("CH_URL"), user=env("CH_USER"), password=env("CH_PASS"), database=env("CH_DB"), port=9440) return config
def create_task(title, description=None, color_id=None): return Request('createTask', title=title, description=description, color_id=color_id, project_id=env('KANBOARD_PROJECT_ID'), column_id=env('KANBOARD_COLUMN_ID', default=None), swimlane_id=env('KANBOARD_SWIMLANE_ID', default=None))
def test_dict(): dict_str = dict(key1='val1', key2='val2') assert_type_value(dict, dict_str, env.dict('DICT_STR')) assert_type_value(dict, dict_str, env('DICT_STR', cast=dict)) dict_int = dict(key1=1, key2=2) assert_type_value(dict, dict_int, env('DICT_INT', cast=dict, subcast=int)) assert_type_value(dict, dict_int, env.dict('DICT_INT', subcast=int)) assert_type_value(dict, {}, env.dict('BLANK'))
class Config(object): """ The base configuration option. Contains the defaults. """ DEBUG = False DEVELOPMENT = False STAGING = False PRODUCTION = False TESTING = False CACHING = False PROFILE = False SQLALCHEMY_RECORD_QUERIES = False FLASK_ADMIN_SWATCH = 'lumen' VERSION = VERSION_NAME SQLALCHEMY_TRACK_MODIFICATIONS = True ERROR_404_HELP = False CSRF_ENABLED = True SERVER_NAME = env('SERVER_NAME', default=None) CORS_HEADERS = 'Content-Type' SQLALCHEMY_DATABASE_URI = env('DATABASE_URL', default=None) SERVE_STATIC = env.bool('SERVE_STATIC', default=False) DATABASE_QUERY_TIMEOUT = 0.1 SENTRY_DSN = env('SENTRY_DSN', default=None) ENABLE_ELASTICSEARCH = env.bool('ENABLE_ELASTICSEARCH', default=False) ELASTICSEARCH_HOST = env('ELASTICSEARCH_HOST', default='localhost:9200') REDIS_URL = env('REDIS_URL', default='redis://localhost:6379/0') # API configs SOFT_DELETE = True PROPOGATE_ERROR = env.bool('PROPOGATE_ERROR', default=False) DASHERIZE_API = True API_PROPOGATE_UNCAUGHT_EXCEPTIONS = env.bool( 'API_PROPOGATE_UNCAUGHT_EXCEPTIONS', default=True) ETAG = True if not SQLALCHEMY_DATABASE_URI: print('`DATABASE_URL` either not exported or empty') exit() BASE_DIR = basedir FORCE_SSL = os.getenv('FORCE_SSL', 'no') == 'yes' if SERVE_STATIC: UPLOADS_FOLDER = BASE_DIR + '/static/uploads/' TEMP_UPLOADS_FOLDER = BASE_DIR + '/static/uploads/temp/' UPLOAD_FOLDER = UPLOADS_FOLDER STATIC_URL = '/static/' STATIC_ROOT = 'staticfiles' STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'), ) if FORCE_SSL: PREFERRED_URL_SCHEME = 'https'
class Config: DEBUG: bool = env('DEBUG', cast=bool, default=False) SQLALCHEMY_URL = env('SQLALCHEMY_URL') TIMEZONE: tzinfo = env('TIMEZONE', default='America/New_York', postprocessor=lambda val: pytz.timezone(val))
def do_something(): username = env("PYDUCK_USER", default="") password = env("PYDUCK_PASSWORD", default="") is_valid = auth.is_user_valid(username, password) if not is_valid: print("You are NOT authorized to use this library") return print("Welcome to the library")
def getpickles(env_key): "Helper to unpickle from file at env_key, returns empty list on errors" try: return pickle.load(open(env(env_key), 'rb')) except FileNotFoundError: app.logger.warning( 'Could not load cached values from "{}"->{!r}'.format( env_key, env(env_key))) return []
def read_settings_async(): from envparse import env env.read_envfile() config = dict() config["connection_string"] = env("MONGODB_CONNECTION_STRING") config["db"] = env("MONGODB_DB") return config
class Props: RESOLVER_HOSTNAME = env('RESOLVER_HOSTNAME', "cloudflare-dns.com") RESOLVER_IP = env('RESOLVER_IP', "1.1.1.1") RESOLVER_PORT = int(env('RESOLVER_PORT', 853)) APP_BIND_HOST = env('APP_BIND_HOST', "0.0.0.0") APP_BIND_PORT = int(env('APP_BIND_PORT', 53)) THREAD_TIMEOUT = 1
def read_settings_async(): from envparse import env env.read_envfile() config = dict() config["url"] = env("CH_URL") config["user"] = env("CH_USER") config["password"] = env("CH_PASS") return config
def detect_mp_by_job_id(job_id: str): spider = re.findall(r'\d+\/(\d+)\/\d+', job_id)[0] if spider == env('SH_WB_SPIDER'): return 'WB', 'Wildberries', wb_transformer() if spider == env('SH_OZON_SPIDER'): return 'Ozon', 'Ozon', ozon_transformer() return None, None, None
def say_hi(): username = env("PYDUCK_USER", default="") password = env("PYDUCK_PASSWORD", default="") is_valid = auth.is_user_valid(username, password) if not is_valid: print( "I do not quack with strangers! (you are NOT authorized to use this library)" ) return print("Quack-quack!")
def test_list(): list_str = ['foo', 'bar'] assert_type_value(list, list_str, env('LIST_STR', cast=list)) assert_type_value(list, list_str, env.list('LIST_STR')) assert_type_value(list, list_str, env.list('LIST_STR_WITH_SPACES')) list_int = [1, 2, 3] assert_type_value(list, list_int, env('LIST_INT', cast=list, subcast=int)) assert_type_value(list, list_int, env.list('LIST_INT', subcast=int)) assert_type_value(list, list_int, env.list('LIST_INT_WITH_SPACES', subcast=int)) assert_type_value(list, [], env.list('BLANK', subcast=int))
class Config: DEBUG: bool = env("DEBUG", cast=bool, default=False) SQLALCHEMY_URL = env("SQLALCHEMY_URL") TIMEZONE: tzinfo = env( "TIMEZONE", default="America/New_York", postprocessor=lambda val: pytz.timezone(val), )
def start_chatting(): username = env("PYDUCK_USER", default="") password = env("PYDUCK_PASSWORD", default="") auth_functions.configure() is_valid = auth_functions.is_user_valid(username, password) if not is_valid: print( "I do not quaching with strangers! (you are NOT authorized to use this library)" ) return print("Quach-quach!")
def output(self): """ We are marking the import as completed in our Postgres DB """ return PostgresTarget( host=env('PGSQL_HOST_AND_PORT'), database=env('PGSQL_DATABASE'), user=env('PGSQL_USER'), password=env('PGSQL_PASSWORD'), table=self.table, update_id=self.update_id )
def output(self): """ We are marking the cleanup as completed in our Postgres DB because the MySQL DB will be destroyed """ return PostgresTarget( host=env('PGSQL_HOST_AND_PORT'), database=env('PGSQL_DATABASE'), user=env('PGSQL_USER'), password=env('PGSQL_PASSWORD'), table='_removed_whole_database_', update_id=self.task_id )
def __init__( self, endpoint=env("CALUMA_ENDPOINT", default="http://caluma:8000/graphql"), oidc_client_id=env("OIDC_CLIENT_ID", default=None), oidc_client_secret=env("OIDC_CLIENT_SECRET", default=None), oidc_token_uri=env("OIDC_TOKEN_URI", default=None), ): self.endpoint = endpoint self.oidc_client_id = oidc_client_id self.oidc_client_secret = oidc_client_secret self.oidc_token_uri = oidc_token_uri self._token = None
def run(self): debug('Running') while True: try: debug('Getting builds') builds = self.gitlab.project_builds.list( project_id=self.project_id) except Exception as e: debug(e, err=True) return debug('Analyzing builds') filter_username = env('FILTER_USERNAME', default=None) filter_stage = env('FILTER_STAGE', default=None) for build in builds: if filter_username and build.user.username != filter_username: # Build wasn't started by the specified user debug(' > Build #{} not started by user {}'.format( build.id, env('FILTER_USERNAME'))) continue if filter_stage and build.stage != filter_stage: debug(' > Build #{} not in stage {}'.format( build.id, env('filter_stage'))) continue if build.id not in self.builds_list: # Build isn't in the internal list debug(' > Build #{} not in the internal list'.format( build.id)) self.builds_list[build.id] = build.status self.notify(build) else: # Build is in the internal list if self.builds_list[ build.id] != build.status: # Build status changed debug(' > Build #{} status changed'.format(build.id)) self.builds_list[build.id] = build.status self.notify(build) else: # Build status unchanged: next build debug(' > Build #{} status unchanged'.format( build.id)) continue time.sleep(env('POLL_INTERVAL', cast=int))
def output(self): """ We are marking the completion of the transform step as completed in our Postgres DB because the MariaDB is not present at the moment of checking completeness of the task """ return PostgresTarget( host=env('PGSQL_HOST_AND_PORT'), database=env('PGSQL_DATABASE'), user=env('PGSQL_USER'), password=env('PGSQL_PASSWORD'), table=self.table, update_id=self.update_id )
def send_mail(to, subject, text, attachment=None, attachment_name=''): message = pystmark.Message( sender=env('MAIL_FROM'), to=to, subject=subject, text=text, ) if attachment is not None: message.attach_binary(attachment.read(), attachment_name) result = pystmark.send(message, api_key=env('POSTMARK_API_KEY')) result.raise_for_status() return result
def _telegram_update(): telegram_json = telegram_json_message_without_surname() bot = Bot(env('TELEGRAM_API_TOKEN')) update = Update.de_json(json.loads(telegram_json), bot) return update
def _parse(self, schema=None): """parse the schema and set django settings""" if schema is not None: self._schema.update(schema) schema = schema or self._schema for var, spec in schema.iteritems(): value = None override = None ignore_none = None if type(spec) is dict: passed, value = self._process_spec(var, spec) override = spec.get('override') ignore_none = spec.get('ignore_none') elif type(spec) is list or type(spec) is tuple: for spec_item in spec: passed, value = self._process_spec(var, spec_item) if passed: override = spec_item.get('override') ignore_none = spec_item.get('ignore_none') break else: # simple cast value = env(var, cast=spec) kwargs = {} if override is not None: kwargs['override'] = override if ignore_none is not None: kwargs['ignore_none'] = ignore_none setting_value(var, value, **kwargs)
def test_schema(): env = Env(STR=str, STR_DEFAULT=dict(cast=str, default='default'), INT=int, LIST_STR=list, LIST_INT=dict(cast=list, subcast=int)) assert_type_value(str, 'foo', env('STR')) assert_type_value(str, 'default', env('STR_DEFAULT')) assert_type_value(int, 42, env('INT')) assert_type_value(list, ['foo', 'bar'], env('LIST_STR')) assert_type_value(list, [1, 2, 3], env('LIST_INT')) # Overrides assert_type_value(str, '42', env('INT', cast=str)) assert_type_value(str, 'manual_default', env('STR_DEFAULT', default='manual_default'))
def get_titanic(drill=False): titanic_status_response = requests.get(ENDPOINT+'companies/'+env('SIREN')) titanic_status_response_json = titanic_status_response.json() if 'status' not in titanic_status_response_json: raise Exception('Illegal Firmapi response') if titanic_status_response_json['status'] != 'success': raise Exception(titanic_status_response_json['message']) if drill: titanic_status_response_json['company']['radie'] = bool(random.getrandbits(1)) return titanic_status_response_json['company']
def set_env(): """Set, Override settings values""" env_vars.add( # project PROJECT_NAME=dict(default='Project'), # sites SITE_DOMAIN=dict(default=None, ignore_none=True), SITE_NAME=dict(default=None, ignore_none=True), # emails DEFAULT_FROM_EMAIL=dict(default=None, ignore_none=True), EMAIL_SUBJECT_PREFIX=dict(default=None, ignore_none=True), EMAIL_BACKEND=dict(default=None, ignore_none=True), SERVER_EMAIL=dict(default=None, ignore_none=True), EMAIL_HOST=dict(default=None, ignore_none=True), EMAIL_PORT=dict(default=None, ignore_none=True), EMAIL_HOST_USER=dict(default=None, ignore_none=True), EMAIL_HOST_PASSWORD=dict(default=None, ignore_none=True), EMAIL_USE_TLS=dict(cast=cast_bool, default=None, ignore_none=True), EMAIL_USE_SSL=dict(cast=cast_bool, default=None, ignore_none=True), EMAIL_SSL_KEYFILE=dict(default=None, ignore_none=True), EMAIL_SSL_CERTFILE=dict(default=None, ignore_none=True), EMAIL_TIMEOUT=dict(default=None, ignore_none=True), # admins, managers ADMINS=dict(cast=cast_eval, default=None, ignore_none=True), MANAGERS=dict(cast=cast_eval, default=None, ignore_none=True), # Sentry SENTRY_DSN=dict(default=None), # Exceptional EXCEPTIONAL_API_KEY=dict(default=None), # GoogleFed GOOGLE_DOMAIN=dict(default=None), # Celery w/ RabbitMQ or Celery w/ RedisCloud BROKER_URL=[dict(name='CLOUDAMQP_URL', only_if='CLOUDAMQP_URL'), dict(name='RABBITMQ_URL', only_if='RABBITMQ_URL'), dict(name='REDISCLOUD_URL', only_if='REDISCLOUD_URL')], BROKER_TRANSPORT=dict(name='BROKER_TRANSPORT', only_if='BROKER_TRANSPORT') ) # DATABASE_URL setting_value('DATABASES', {'default': dj_database_url.config()}) # Mailgun if 'MAILGUN_SMTP_SERVER' in environ: setting_value('MAILGUN_API_KEY', env('MAILGUN_API_KEY')) setting_value('MAILGUN_DOMAIN', env('MAILGUN_DOMAIN')) setting_value('MAILGUN_PUBLIC_KEY', env('MAILGUN_PUBLIC_KEY')) setting_value('MAILGUN_SMTP_LOGIN', env('MAILGUN_SMTP_LOGIN')) setting_value('MAILGUN_SMTP_PASSWORD', env('MAILGUN_SMTP_PASSWORD')) setting_value('MAILGUN_SMTP_PORT', env('MAILGUN_SMTP_PORT', cast=int)) setting_value('MAILGUN_SMTP_SERVER', env('MAILGUN_SMTP_SERVER')) use_mail_gun_api = env('MAILGUN_USE_API', cast=cast_bool, default=True) use_mail_gun_smtp = env('MAILGUN_USE_SMTP', cast=cast_bool, default=True) # for use api if use_mail_gun_api: # django-mailgun: setting_value('MAILGUN_ACCESS_KEY', settings.MAILGUN_API_KEY) setting_value('MAILGUN_SERVER_NAME', settings.MAILGUN_DOMAIN) if use_mail_gun_smtp: # for django smtp server: use smtp setting_value('EMAIL_HOST', settings.MAILGUN_SMTP_SERVER) setting_value('EMAIL_PORT', settings.MAILGUN_SMTP_PORT) setting_value('EMAIL_HOST_USER', settings.MAILGUN_SMTP_LOGIN) setting_value('EMAIL_HOST_PASSWORD', settings.MAILGUN_SMTP_PASSWORD) setting_value('MAIL_USE_TLS', env('MAIL_USE_TLS', cast=cast_bool, default=True)) # SendGrid if 'SENDGRID_USERNAME' in environ: # this is enough to use https://github.com/elbuo8/sendgrid-django # don't forget to set EMAIL_BACKEND setting_value('SENDGRID_USERNAME', env('SENDGRID_USERNAME')) setting_value('SENDGRID_PASSWORD', env('SENDGRID_PASSWORD')) setting_value('SENDGRID_API_KEY', env('SENDGRID_API_KEY', default=None)) setting_value('SENDGRID_SMTP_LOGIN', env('SENDGRID_SMTP_LOGIN', default=settings.SENDGRID_USERNAME)) setting_value('SENDGRID_SMTP_PASSWORD', env('SENDGRID_SMTP_PASSWORD', default=settings.SENDGRID_PASSWORD)) setting_value('SENDGRID_SMTP_PORT', env('MAILGUN_SMTP_PORT', cast=int, default=587), ) setting_value('SENDGRID_SMTP_SERVER', env('SENDGRID_SMTP_SERVER', default='smtp.sendgrid.net')) send_grid_use_smtp = env('SENDGRID_USE_SMTP', cast=cast_bool, default=True) if send_grid_use_smtp: setting_value('EMAIL_HOST', settings.SENDGRID_SMTP_SERVER) setting_value('EMAIL_PORT', settings.SENDGRID_SMTP_PORT) setting_value('EMAIL_HOST_USER', settings.SENDGRID_SMTP_LOGIN) setting_value('EMAIL_HOST_PASSWORD', settings.SENDGRID_SMTP_PASSWORD) setting_value('MAIL_USE_TLS', env('MAIL_USE_TLS', cast=cast_bool, default=True)) # Postmark # if 'POSTMARK_SMTP_SERVER' in environ: # setting_value('SMTP_SERVER', 'POSTMARK_SMTP_SERVER') # setting_value('SMTP_LOGIN', environ.get('POSTMARK_API_KEY')) # setting_value('SMTP_PASSWORD', environ.get('POSTMARK_API_KEY')) # setting_value('MAIL_SERVER', 'POSTMARK_SMTP_SERVER') # setting_value('MAIL_USERNAME', environ.get('POSTMARK_API_KEY')) # setting_value('MAIL_PASSWORD', environ.get('POSTMARK_API_KEY')) # setting_value('MAIL_USE_TLS', True) # Redis To Go redis_url = environ.get('REDISTOGO_URL') if redis_url: url = urlparse(redis_url) setting_value('REDIS_HOST', url.hostname) setting_value('REDIS_PORT', url.port) setting_value('REDIS_PASSWORD', url.password) # Mongolab mongolab_uri = environ.get('MONGOLAB_URI') if mongolab_uri: url = urlparse(mongolab_uri) setting_value('MONGO_URI', mongolab_uri) setting_value('MONGODB_USER', url.username) setting_value('MONGODB_USERNAME', url.username) setting_value('MONGODB_PASSWORD', url.password) setting_value('MONGODB_HOST', url.hostname) setting_value('MONGODB_PORT', url.port) setting_value('MONGODB_DB', url.path[1:]) # MongoHQ mongohq_uri = environ.get('MONGOHQ_URL') if mongohq_uri: url = urlparse(mongohq_uri) setting_value('MONGO_URI', mongohq_uri) setting_value('MONGODB_USER', url.username) setting_value('MONGODB_PASSWORD', url.password) setting_value('MONGODB_HOST', url.hostname) setting_value('MONGODB_PORT', url.port) setting_value('MONGODB_DB', url.path[1:]) # Cloudant cloudant_uri = environ.get('CLOUDANT_URL') if cloudant_uri: setting_value('COUCHDB_SERVER', cloudant_uri) # Memcachier setting_value('CACHE_MEMCACHED_SERVERS', env('MEMCACHIER_SERVERS', default=None)) setting_value('CACHE_MEMCACHED_USERNAME', env('MEMCACHIER_USERNAME', default=None)) setting_value('CACHE_MEMCACHED_PASSWORD', env('MEMCACHIER_PASSWORD', default=None))
from envparse import env STEPIK_BASE_URL = env('STEPIK_BASE_URL') STEPIK_CLIENT_ID = env('STEPIK_CLIENT_ID') STEPIK_CLIENT_SECRET = env('STEPIK_CLIENT_SECRET') WIKI_LOGIN = env('WIKI_LOGIN') WIKI_PASSWORD = env('WIKI_PASSWORD') WIKI_BASE_URL = env('WIKI_BASE_URL') WIKI_API_PATH = env('WIKI_API_PATH', default='/api.php') UPLOAD_CARE_PUB_KEY = env('UPLOAD_CARE_PUB_KEY') YANDEX_SPEECH_KIT_KEY = env('YANDEX_SPEECH_KIT_KEY')
import os import random import sys import datetime from envparse import env BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def get_secret_key(secret_key): if not secret_key: return "".join([random.choice("abcdefghijklmnopqrstuvwxyz0123456789!@#$^&*(-_=+)") for i in range(50)]) return secret_key SECRET_KEY = env('SECRET_KEY', preprocessor=get_secret_key, default=None) DEBUG = env.bool('DJANGO_DEBUG', default=False) ALLOWED_HOSTS = env('ALLOWED_HOSTS', cast=list, default=['localhost', '127.0.0.1', '.lesspass.com']) ADMIN = [('Guillaume Vincent', '*****@*****.**'), ] INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework',
For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os from envparse import env # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = env('SECRET_KEY', default='9t!sc1+#0zh@%c+xyiq&dlg_ri-_*7jfl!tg!lt#c0+1s9e6%p') HASHID_FIELD_SALT = env('HASHID_FIELD_SALT', default='6^y3*fv!fq4z@n2-i!hy-hgi0x*+3qmf@ylqtkv&x932(-8wi+') HASHID_FIELD_ALLOW_INT = False DEBUG = False INTERNAL_IPS = [ '127.0.0.1', ] ALLOWED_HOSTS = [ 'localhost', 'maxisushi.kz', 'www.maxisushi.kz',
def index(): return render_template( 'index.html', ANALYTICS=env('ANALYTICS', default='UA-XXXXXX-1'), MAPS_API=env('MAPS_API', default='123456789') )
def __init__(self): self._KEY = env('DSHIELD')
def test_default_none(): assert_type_value(type(None), None, env('NOT_PRESENT', default=None))
def test_str(): expected = str(env_vars['STR']) assert_type_value(str, expected, env('STR')) assert_type_value(str, expected, env.str('STR'))
def create_settings(): get_or_create(Setting, app_name='Open Event') if current_app.config['DEVELOPMENT']: # get the stripe keys from the env file and save it in the settings. env.read_envfile() stripe_secret_key = env('STRIPE_SECRET_KEY', default=None) stripe_publishable_key = env('STRIPE_PUBLISHABLE_KEY', default=None) stripe_client_id = env('STRIPE_CLIENT_ID', default=None) paypal_sandbox_client = env('PAYPAL_SANDBOX_CLIENT', default=None) paypal_sandbox_secret = env('PAYPAL_SANDBOX_SECRET', default=None) fb_client_id = env('FACEBOOK_CLIENT_ID', default=None) fb_client_secret = env('FACEBOOK_CLIENT_SECRET', default=None) google_client_id = env('GOOGLE_CLIENT_ID', default=None) google_client_secret = env('GOOGLE_CLIENT_SECRET', default=None) tw_consumer_key = env('TWITTER_CONSUMER_KEY', default=None) tw_consumer_secret = env('TWITTER_CONSUMER_SECRET', default=None) in_client_id = env('INSTAGRAM_CLIENT_ID', default=None) in_client_secret = env('INSTAGRAM_CLIENT_SECRET', default=None) setting, _ = get_or_create(Setting, app_name='Open Event') setting.stripe_client_id = stripe_client_id setting.stripe_publishable_key = stripe_publishable_key setting.stripe_secret_key = stripe_secret_key setting.paypal_sandbox_client = paypal_sandbox_client setting.paypal_sandbox_secret = paypal_sandbox_secret setting.fb_client_id = fb_client_id setting.fb_client_secret = fb_client_secret setting.google_client_id = google_client_id setting.google_client_secret = google_client_secret setting.tw_consumer_key = tw_consumer_key setting.tw_consumer_secret = tw_consumer_secret setting.in_client_id = in_client_id setting.in_client_secret = in_client_secret db.session.add(setting) db.session.commit()
envparse.env used to create default values that are overriden by environment variables (where present). This is how settings are managed in Docker containers, http://12factor.net style. """ import os import os.path from envparse import env import sys from datetime import timedelta BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = env( 'SECRET_KEY', default='this_is_not_a_real_secret_key_234db#1k2l#GfnGqn') DEBUG = env('DEBUG', default=False) ALLOWED_HOSTS = [ '127.0.0.1', 'localhost', env('DJANGO_ALLOWED_HOST', default='presidentbusiness.com')] INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'djcelery', 'celery',
logging.basicConfig() logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) from imapclient import IMAPClient env.read_envfile() if __name__ == '__main__': logger.info('start IMAP') server = IMAPClient(env('HOST'), port=env('PORT', default=None), use_uid=True, ssl=env.bool('SSL', default=True)) logger.info(' [+] LOGIN') server.login(env('USERNAME'), env('PASSWORD')) select_info = server.select_folder('INBOX') print('%d messages in INBOX' % select_info['EXISTS']) messages = server.search(['NOT', 'DELETED']) print("%d messages that aren't deleted" % len(messages)) print() print("Messages:") # https://tools.ietf.org/html/rfc3501.html#section-6.4.5 response = server.fetch(messages, ['FLAGS', 'ENVELOPE', ]) for msgid, data in response.iteritems():
def test_preprocessor(): assert_type_value(str, 'FOO', env('STR', preprocessor=lambda v: v.upper()))
def proxied_value(): assert_type_value(str, 'bar', env('PROXIED'))
from envparse import env env.read_envfile() # Read .env ENV = env.str('NODE_ENV', default='production') DEBUG = ENV != 'production' CACHE = { 'STRATEGY': 'redis', 'PARAMS': { 'host': env('REDIS_HOST', default='localhost'), 'port': env.int('REDIS_PORT', default=6379), } } ROUTES = { 'URL_PREFIX': '/v1/' } GITHUB = { 'CLIENT_ID': env('SIR_GITHUB_CLIENT_ID'), 'CLIENT_SECRET': env('SIR_GITHUB_CLIENT_SECRET'), }
import re from string import Template import random import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email.mime.base import MIMEBase import os import sys import getopt import argparse from envparse import env env.read_envfile() user_agent = env('USER_AGENT') client_id = env('CLIENT_ID') client_secret = env('CLIENT_SECRET') r = praw.Reddit( client_id=client_id, client_secret=client_secret, user_agent=user_agent) def get_urls(subreddit): """ Returns a list of urls depending on what subreddit """ submissions = r.subreddit(subreddit).hot(limit=100) links = [x for x in submissions]
def test_var_not_present(): with pytest.raises(ConfigurationError): env('NOT_PRESENT')
import sunazymuth from flask import Flask from flask import request from flask import render_template from flask import redirect app = Flask(__name__) ### # APP ENVIRONMENT SETUP ### # Check if we are on local development machine (default is prod) FLASK_ENV = env('FLASK_ENV', default='production') sha = env('GIT_COMMIT_SHA1', default='') SENTRY_DSN = env('SENTRY_DSN', default='') sentry_sdk.init( dsn=SENTRY_DSN, environment=FLASK_ENV, release=sha, integrations=[FlaskIntegration()] ) # Redirect herokuapp to custom domain @app.before_request def redirect_domain(): urlparts = urlparse(request.url)
def run(drill): logging.basicConfig( format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%d/%m/%Y %H:%M:%S', stream=sys.stdout ) logging.getLogger().setLevel(logging.INFO) Env.read_envfile('.env') survivors = env('SURVIVORS', cast=list, subcast=str) smtp_login = env('SMTP_LOGIN') smtp_password = env('SMTP_PASSWORD') smtp_host = env('SMTP_HOST') smtp_port = env('SMTP_PORT') smtp_tls = env.bool('SMTP_TLS', default=False) if drill: debug('[Drill mode]') try: sank_file = env('SIREN')+'.sank' if os.path.exists(sank_file): raise Exception('Titanic already sank, aborting') debug('Getting Titanic\'s status...') titanic = get_titanic(drill) debug('Checking Titanic\'s status...') if titanic['radie']: since = arrow.get(titanic['last_legal_update']).format('MMM, D YYYY') debug('Titanic sunk! {} isn\'t no more since {}!'.format(titanic['names']['best'], since)) debug('There\'s {} survivors to contact.'.format(len(survivors))) debug('Sending telegrams to the survivors...') envelope = Envelope( from_addr=(smtp_login, 'The Titanic'), to_addr=survivors, subject='Titanic sunk!', html_body="""<h1>Ohmy, Titanic sunk!</h1> <p>There it is. {company} isn\'t, officially since <b>{since}</b>.</p> <p style="text-align: center"><img src="https://media.giphy.com/media/hmxZRW8mhs4ak/giphy.gif"></p> <p>Thank you all.</p> <hr> <p><small>This email was automatically sent by the <a href="https://github.com/EpocDotFr/titanic">Titanic</a>. Please don\'t reply.</small></p>""".format(company=titanic['names']['best'], since=since) ) envelope.send(smtp_host, login=smtp_login, password=smtp_password, port=smtp_port, tls=smtp_tls) debug('Telegrams sent successfully!') open(sank_file, 'a').close() else: debug('Titanic still floating') except Exception as e: debug(' > {}'.format(e), err=True)
import os import socket import sys from envparse import env DEBUG = env("DEBUG", cast=bool, default=False) LOG_LEVEL = env("LOG_LEVEL", default="INFO") APP_NAME = env("APP_NAME", default="flags") ADMIN_MODE = env("ADMIN_MODE", cast=bool, default=False) PRODUCTION_MODE = env("PRODUCTION_MODE", cast=bool, default=True) # If DEFAULT_VALUE is True, then features are Enabled unless stated otherwise # If DEFAULT_VALUE is False, then features are Disabled unless state otherwise DEFAULT_VALUE = env("DEFAULT_VALUE", cast=bool, default=True) ZK_HOSTS = env("ZK_HOSTS", default="localhost:2181") # Time in seconds to wait for zookeeper connection to succeed. ZK_CONNECTION_TIMEOUT = 5 HOST = "localhost" PORT = 9595 PREFIX = env("PREFIX", default="flags") VERSION = env("VERSION", default="v1") FEATURES_KEY = env("FEATURES_KEY", default="features") SEGMENTS_KEY = env("SEGMENTS_KEY", default="segments") RESPONSE_MODE_BASIC = "basic" RESPONSE_MODE_ADVANCED = "advanced" ROOT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def create_app(): BlueprintsManager.register(app) Migrate(app, db) app.config.from_object(env('APP_CONFIG', default='config.ProductionConfig')) db.init_app(app) _manager = Manager(app) _manager.add_command('db', MigrateCommand) if app.config['CACHING']: cache.init_app(app, config={'CACHE_TYPE': 'simple'}) else: cache.init_app(app, config={'CACHE_TYPE': 'null'}) stripe.api_key = 'SomeStripeKey' app.secret_key = 'super secret key' app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static' app.logger.addHandler(logging.StreamHandler(sys.stdout)) app.logger.setLevel(logging.ERROR) # set up jwt app.config['JWT_AUTH_USERNAME_KEY'] = 'email' app.config['JWT_EXPIRATION_DELTA'] = timedelta(seconds=24 * 60 * 60) app.config['JWT_AUTH_URL_RULE'] = '/auth/session' _jwt = JWT(app, jwt_authenticate, jwt_identity) # setup celery app.config['CELERY_BROKER_URL'] = app.config['REDIS_URL'] app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL'] CORS(app, resources={r"/*": {"origins": "*"}}) AuthManager.init_login(app) if app.config['TESTING'] and app.config['PROFILE']: # Profiling app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) # nextgen api with app.app_context(): from app.api.bootstrap import api_v1 from app.api.uploads import upload_routes from app.api.exports import export_routes from app.api.imports import import_routes from app.api.celery_tasks import celery_routes from app.api.auth import auth_routes from app.api.event_copy import event_copy app.register_blueprint(api_v1) app.register_blueprint(event_copy) app.register_blueprint(upload_routes) app.register_blueprint(export_routes) app.register_blueprint(import_routes) app.register_blueprint(celery_routes) app.register_blueprint(auth_routes) sa.orm.configure_mappers() if app.config['SERVE_STATIC']: app.add_url_rule('/static/<path:filename>', endpoint='static', view_func=app.send_static_file) # sentry if 'SENTRY_DSN' in app.config: sentry.init_app(app, dsn=app.config['SENTRY_DSN']) # redis redis_store.init_app(app) # elasticsearch if app.config['ENABLE_ELASTICSEARCH']: es.init_app(app) with app.app_context(): try: cron_rebuild_events_elasticsearch.delay() except Exception: pass return app, _manager, db, _jwt
def test_var_not_present_with_default(): default_val = 'default val' assert default_val, env('NOT_PRESENT', default=default_val)
def test_int(): expected = int(env_vars['INT']) assert_type_value(int, expected, env('INT', cast=int)) assert_type_value(int, expected, env.int('INT'))