def __init__(self, hs, media_repo, media_storage): super().__init__() self.auth = hs.get_auth() self.clock = hs.get_clock() self.filepaths = media_repo.filepaths self.max_spider_size = hs.config.max_spider_size self.server_name = hs.hostname self.store = hs.get_datastore() self.client = SimpleHttpClient( hs, treq_args={"browser_like_redirects": True}, ip_whitelist=hs.config.url_preview_ip_range_whitelist, ip_blacklist=hs.config.url_preview_ip_range_blacklist, http_proxy=os.getenvb(b"http_proxy"), https_proxy=os.getenvb(b"HTTPS_PROXY"), ) self.media_repo = media_repo self.primary_base_path = media_repo.primary_base_path self.media_storage = media_storage self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist # memory cache mapping urls to an ObservableDeferred returning # JSON-encoded OG metadata self._cache = ExpiringCache( cache_name="url_previews", clock=self.clock, # don't spider URLs more often than once an hour expiry_ms=60 * 60 * 1000, ) self._cleaner_loop = self.clock.looping_call( self._start_expire_url_cache_data, 10 * 1000)
def setup(self, mode='tag_exists', psuccess=1.): self.mode = mode self.psuccess = psuccess self.dataset_id = 'XYZ' self.token = '123' self.tag_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() self.tag_url += f'/users/datasets/{self.dataset_id}/tags/' self.tags = [ { 'name': 'initial-tag', '_id': '123' }, { 'name': 'test-tag', '_id': '456' }, ] self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() self.dst_url += f'/users/datasets/{self.dataset_id}/tags/123/download' self.samples = 'sample_1.jpg\nsample_2.jpg'
def get_proxied_http_client(self) -> SimpleHttpClient: """ An HTTP client that uses configured HTTP(S) proxies. """ return SimpleHttpClient( self, http_proxy=os.getenvb(b"http_proxy"), https_proxy=os.getenvb(b"HTTPS_PROXY"), )
class ConfigMap: SECRET_KEY = 'CHANGEME' TG_TOKEN = os.getenvb('TG_TOKEN') TG_CHAT_ID = os.getenvb('TG_CHAT_ID') DEBUG = True BASIC_AUTH_USERNAME = os.getenvb('BA_UNAME') BASIC_AUTH_PASSWORD = os.getenvb('BA_UPASS') BASIC_AUTH_FORCE = True LOG_FILE = '/var/log/am-tg.log'
def get_proxied_blacklisted_http_client(self) -> SimpleHttpClient: """ An HTTP client that uses configured HTTP(S) proxies and blacklists IPs based on the IP range blacklist/whitelist. """ return SimpleHttpClient( self, ip_whitelist=self.config.ip_range_whitelist, ip_blacklist=self.config.ip_range_blacklist, http_proxy=os.getenvb(b"http_proxy"), https_proxy=os.getenvb(b"HTTPS_PROXY"), )
def __init__( self, hs: "HomeServer", media_repo: "MediaRepository", media_storage: MediaStorage, ): super().__init__() self.auth = hs.get_auth() self.clock = hs.get_clock() self.filepaths = media_repo.filepaths self.max_spider_size = hs.config.max_spider_size self.server_name = hs.hostname self.store = hs.get_datastore() self.client = SimpleHttpClient( hs, treq_args={"browser_like_redirects": True}, ip_whitelist=hs.config.url_preview_ip_range_whitelist, ip_blacklist=hs.config.url_preview_ip_range_blacklist, http_proxy=os.getenvb(b"http_proxy"), https_proxy=os.getenvb(b"HTTPS_PROXY"), ) self.media_repo = media_repo self.primary_base_path = media_repo.primary_base_path self.media_storage = media_storage # We run the background jobs if we're the instance specified (or no # instance is specified, where we assume there is only one instance # serving media). instance_running_jobs = hs.config.media.media_instance_running_background_jobs self._worker_run_media_background_jobs = (instance_running_jobs is None or instance_running_jobs == hs.get_instance_name()) self.url_preview_url_blacklist = hs.config.url_preview_url_blacklist self.url_preview_accept_language = hs.config.url_preview_accept_language # memory cache mapping urls to an ObservableDeferred returning # JSON-encoded OG metadata self._cache = ExpiringCache( cache_name="url_previews", clock=self.clock, # don't spider URLs more often than once an hour expiry_ms=ONE_HOUR, ) if self._worker_run_media_background_jobs: self._cleaner_loop = self.clock.looping_call( self._start_expire_url_cache_data, 10 * 1000)
def setup(self, n_data=1000): self.dataset_id = 'XYZ' self.token = 'secret' # set up url self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() self.emb_url = f'{self.dst_url}/users/datasets/{self.dataset_id}/embeddings' self.tag_url = f'{self.dst_url}/users/datasets/{self.dataset_id}/tags/?token={self.token}' # create a dataset self.dataset = torchvision.datasets.FakeData(size=n_data, image_size=(3, 32, 32)) self.folder_path = tempfile.mkdtemp() self.path_to_embeddings = os.path.join(self.folder_path, 'embeddings.csv') sample_names = [f'img_{i}.jpg' for i in range(n_data)] labels = [0] * len(sample_names) lightly.utils.save_embeddings(self.path_to_embeddings, np.random.randn(n_data, 16), labels, sample_names)
def setup(self, n_data=1000): # set up url self.dataset_id = 'XYZ' self.token = 'secret' self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() self.gettag_url = f'{self.dst_url}/users/datasets/{self.dataset_id}/tags/?token={self.token}' self.sample_url = f'{self.dst_url}/users/datasets/{self.dataset_id}/samples/' self.signed_url = 'https://www.this-is-a-signed-url.com' self.dataset_url = f'{self.dst_url}/users/datasets/{self.dataset_id}?token={self.token}' self.maketag_url = f'{self.dst_url}/users/datasets/{self.dataset_id}/tags' self.getquota_url = f'{self.dst_url}/users/quota' # create a dataset self.dataset = torchvision.datasets.FakeData(size=n_data, image_size=(3, 32, 32)) self.folder_path = tempfile.mkdtemp() sample_names = [f'img_{i}.jpg' for i in range(n_data)] self.sample_names = sample_names for sample_idx in range(n_data): data = self.dataset[sample_idx] path = os.path.join(self.folder_path, sample_names[sample_idx]) data[0].save(path)
def get_config_path(filename: str) -> List[str]: "get config file path" paths = ['/etc/'] if os.geteuid(): if os.getenv('XDG_CONFIG_HOME'): paths.append(os.getenvb('XDG_CONFIG_HOME')) else: paths.append(os.path.expanduser('~/.config')) return map(lambda path: os.path.join(path, filename), paths)
def setup(self, psuccess=1.): self.psuccess = psuccess # set up url self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() # route self.dst_url += '/sample/route/to/put'
def get_tail_args(self): """ Work out the tail args """ tail_args = os.getenvb(b'MAGCODE_TAIL_ARGS') # Need to be able to detect environment setting of no args '' if (tail_args is not None): return tail_args.decode() return settings['tail_args']
def get_diff_args(self): """ Work out the diff args """ diff_args = os.getenvb(b'MAGCODE_DIFF_ARGS') # Need to be able to detect environment setting of no args '' if (diff_args is not None): return diff_args.decode() return settings['diff_args']
def setup(self, psuccess=1.): self.psuccess = psuccess # set up url self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() # route self.dst_url += '/pip/version' self.version = '0.0.0'
def get_tail(self): """ Find a tail binary """ tail = os.getenvb(b'MAGCODE_TAIL') if (tail): return tail.decode() tail = '/bin/tail' if os.path.isfile(tail): return tail return '/usr/bin/tail'
def get_pager_args(self): """ Return pager args MUST be called after get_pager """ pager_args = os.getenvb(b'MAGCODE_PAGER_ARGS') # Need to be able to detect environment setting of no args '' if (pager_args is not None): return pager_args.decode() return settings['pager_args']
def create(): connectionType = os.getenvb(b'SELPI_CONNECTION_TYPE') if connectionType == b'Serial': return ConnectionSerial() elif connectionType == b'SelectLive': return ConnectionSelectLive() elif connectionType == b'TCP': return ConnectionTCP() else: raise NotImplementedError("Connection type not implemented: '" + connectionType.decode('ascii') + "'")
def setup(self, psuccess=1.): self.psuccess = psuccess self.dataset_id = 'XYZ' self.token = '123' # set up url self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() # route self.dst_url += f'/users/datasets/{self.dataset_id}/tags'
def get_editor(self): """ Work out the users preference of editor, and return that """ if not self.admin_mode: return settings['editor'] editor = os.getenvb(b'VISUAL') if (editor): return editor.decode() editor = os.getenvb(b'EDITOR') if (editor): return editor.decode() editor = '/usr/bin/editor' if os.path.isfile(editor): return editor # Fall back if none of the above is around... return '/usr/bin/vi'
def _get_editor(self): """ Work out the users preference of editor, and return that """ editor = os.getenvb(b'VISUAL') if (editor): return editor editor = os.getenvb(b'EDITOR') if (editor): return editor editor = b'/usr/bin/sensible-editor' if os.path.isfile(editor): return editor editor = b'/usr/bin/editor' if os.path.isfile(editor): return editor # Fall back if none of the above is around... return b'/usr/bin/vi'
def getenv(key: str, default: str): """Return the value of the environment variable key if it exists, or default if it doesn’t. """ try: return os.getenvb(key.encode(), default.encode()).decode() except Exception: pass try: return os.getenv(key, default) except Exception: pass return default
def setup(self, psuccess=1.): self.psuccess = psuccess self.dataset_id = 'XYZ' self.token = '123' self.filename = 'filename' self.thumbname = 'thumbname' self.metadata = {'example': 0.1, 'metadata': 'this'} self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() self.dst_url += f'/users/datasets/{self.dataset_id}/samples/'
def clean_path(path): # This must be done before isabs test, or else you will get a false negative path = os.path.expanduser(path) # Support Just's changing of CWD, to keep relative paths for the user. if os.getenv('JUST_USER_CWD') is not None: # os.path.abspath, with a tweak path = os.fspath(path) if not os.path.isabs(path): if isinstance(path, bytes): cwd = os.getenvb('JUST_USER_CWD') else: cwd = os.getenv('JUST_USER_CWD') path = os.path.join(cwd, path) return os.path.normpath(path) return os.path.abspath(path)
def test_routes(self): dataset_id = 'XYZ' sample_id = 'xyz' tag_id = 'abc' dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() # pip self.assertEqual(routes.pip.service._prefix(), f'{dst_url}/pip') # users self.assertEqual(routes.users.service._prefix(), f'{dst_url}/users') # datasets self.assertEqual(routes.users.datasets.service._prefix(), f'{dst_url}/users/datasets') self.assertEqual( routes.users.datasets.service._prefix(dataset_id=dataset_id), f'{dst_url}/users/datasets/{dataset_id}') # embeddings self.assertEqual(routes.users.datasets.embeddings.service._prefix(), f'{dst_url}/users/datasets/embeddings') self.assertEqual( routes.users.datasets.embeddings.service._prefix( dataset_id=dataset_id), f'{dst_url}/users/datasets/{dataset_id}/embeddings') # samples self.assertEqual(routes.users.datasets.samples.service._prefix(), f'{dst_url}/users/datasets/samples') self.assertEqual( routes.users.datasets.samples.service._prefix( dataset_id=dataset_id), f'{dst_url}/users/datasets/{dataset_id}/samples') self.assertEqual( routes.users.datasets.samples.service._prefix( dataset_id=dataset_id, sample_id=sample_id), f'{dst_url}/users/datasets/{dataset_id}/samples/{sample_id}') # tags self.assertEqual(routes.users.datasets.tags.service._prefix(), f'{dst_url}/users/datasets/tags') self.assertEqual( routes.users.datasets.tags.service._prefix(dataset_id=dataset_id), f'{dst_url}/users/datasets/{dataset_id}/tags') self.assertEqual( routes.users.datasets.tags.service._prefix(dataset_id=dataset_id, tag_id=tag_id), f'{dst_url}/users/datasets/{dataset_id}/tags/{tag_id}')
def _append_to_default_paths(env_var_name, paths): return subprocess.check_output( [ 'sh', '-c', '. {} && echo -n "${}"'.format( shlex.quote(_setup_sh_path), env_var_name ) ], env={ env_var_name: os.getenvb(env_var_name.encode('ascii'), b'') +\ b':' +\ b':'.join([p.encode('utf-8') for p in paths]) } )
def get_pager(self): """ Work out the users preference of pager and return that """ pager_args = settings['pager_args'] if not self.admin_mode: pager = settings['pager'] if (os.path.realpath(pager).endswith('less') and not pager_args): settings['pager_args'] = LESS_ARGS return settings['pager'] pager = os.getenvb(b'PAGER') if (pager): pager = pager.decode() if pager.endswith('less') and not pager_args: settings['pager_args'] = '-EX' return pager pager = '/usr/bin/pager' if os.path.isfile(pager): if (os.path.realpath(pager).endswith('less') and not pager_args): settings['pager_args'] = LESS_ARGS return pager # Fall backs if none of the above is around # Try less pager = '/usr/bin/less' if os.path.isfile(pager): if not pager_args: settings['pager_args'] = LESS_ARGS return pager # Try more pager = '/usr/bin/more' if os.path.isfile(pager): return pager # In Debian its here... pager = '/bin/more' if os.path.isfile(pager): return pager # This is the pathological default... return '/bin/cat'
def get_diff(self): """ Work out where the system's diff is """ diff = os.getenvb(b'MAGCODE_DIFF') if (diff): return diff.decode() # Colorize diff output diff = '/usr/bin/colordiff' if os.path.isfile(diff): return diff diff = '/usr/local/bin/colordiff' if os.path.isfile(diff): return diff # where binary is on FreeBSD and Linux return '/usr/bin/diff'
def setup(self, psuccess=1.): self.psuccess = psuccess self.dataset_id = 'XYZ' self.token = '123' self.data_1 = { 'embeddingName': 'default', 'embeddings': [0.1 * i for i in range(16)], 'token': '123', 'datasetId': 'XYZ', 'append': 0 } self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() self.dst_url += f'/users/datasets/{self.dataset_id}/embeddings'
def setup(self, n_data=1000, psuccess=1.): self.filename = 'filename' self.dataset_id = 'XYZ' self.sample_id = 'ABC' self.token = 'secret' self.psuccess = psuccess # set up url self.dst_url = os.getenvb(b'LIGHTLY_SERVER_LOCATION', b'https://api.lightly.ai').decode() # route self.dst_url += f'/users/datasets/{self.dataset_id}' self.dst_url += f'/samples/{self.sample_id}/writeurl' # query self.dst_url += f'?fileName={self.filename}&token={self.token}' # create a dataset self.dataset = torchvision.datasets.FakeData(size=n_data, image_size=(3, 32, 32))
def __get__(self, inst, owner=None): """Get the `self.variable` from the environment and apply the converter.""" rv = os.getenvb(self.variable) if rv: return self.convert(rv)
def getb(key: bytes) -> bytes: return os.getenvb(b'SELPI_' + key)
def run(args): address = os.getenvb(b'SELPI_PROXY_BIND_ADDRESS') port = int(os.getenvb(b'SELPI_PROXY_BIND_PORT')) Proxy().bind(address, port)
def getenv_boolean(var_name, default_value=False): result = default_value env_value = os.getenv(var_name) if env_value is not None: result = env_value.upper() in ("TRUE", "1") return result # load env env_file_path = find_dotenv(raise_error_if_not_found=True) load_dotenv(env_file_path) # config settings API_V1_STR = "/api/v1" TIMEZONE = timezone('Asia/Shanghai') SECRET_KEY = os.getenvb(b"SECRET_KEY", os.urandom(32)) ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 * 8 # 60 minutes * 24 hours * 8 days = 8 days SERVER_NAME = os.getenv("SERVER_NAME") SERVER_HOST = os.getenv("SERVER_HOST") BACKEND_CORS_ORIGINS = os.getenv("BACKEND_CORS_ORIGINS") PROJECT_NAME = os.getenv("PROJECT_NAME") POSTGRES_SERVER = os.getenv("POSTGRES_SERVER") POSTGRES_USER = os.getenv("POSTGRES_USER") POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD") POSTGRES_DB = os.getenv("POSTGRES_DB") SQLALCHEMY_DATABASE_URI = ( f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_SERVER}/{POSTGRES_DB}" )
os.utime(path, times) # 返回指定的path文件的访问和修改的时间。 os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]]) # 输出在文件夹中的文件名通过在树中游走,向上或者向下。 os.write(fd, str) # 写入字符串到文件描述符 fd中. 返回实际写入的字符串长度 os.name() # 返回当前使用平台的代表字符,Windows用'nt'表示,Linux用'posix'表示 os.getcwd() # 获取当前工作目录,即当前python脚本工作的目录路径 os.getpid() os.getcwdb() os.getcwdu() os.getegid() os.geteuid() os.getpid() os.getppid() os.getenv() os.getenvb() os.abort() os.chmod() os.chown() os.close() os.cpu_count() os.kill() os.open() os.getgid() os.chdir("dirname") # 改变当前脚本工作目录;相当于shell下cd os.curdir() # 返回当前目录: ('.') os.pardir() # 获取当前目录的父目录字符串名:('..') os.mkdir('dirname') # 生成单级目录;相当于shell中mkdir dirname os.makedirs('dirname1/dirname2') # 可生成多层递归目录 os.rmdir('dirname') # 删除单级空目录,若目录不为空则无法删除,报错;相当于shell中rmdir dirname os.removedirs('dirname1') # 若目录为空,则删除,并递归到上一级目录,如若也为空,则删除,依此类推
import os def getenv_boolean(var_name, default_value=False): result = default_value env_value = os.getenv(var_name) if env_value is not None: result = env_value.upper() in ("TRUE", "1") return result API_V1_STR = "/api/v1" SECRET_KEY = os.getenvb(b"SECRET_KEY") if not SECRET_KEY: SECRET_KEY = os.urandom(32) ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 * 8 # 60 minutes * 24 hours * 8 days = 8 days SERVER_NAME = os.getenv("SERVER_NAME") SERVER_HOST = os.getenv("SERVER_HOST") BACKEND_CORS_ORIGINS = os.getenv( "BACKEND_CORS_ORIGINS" ) # a string of origins separated by commas, e.g: "http://localhost, http://localhost:4200, http://localhost:3000, http://localhost:8080, http://local.dockertoolbox.tiangolo.com" PROJECT_NAME = os.getenv("PROJECT_NAME") SENTRY_DSN = os.getenv("SENTRY_DSN") POSTGRES_SERVER = os.getenv("POSTGRES_SERVER") POSTGRES_USER = os.getenv("POSTGRES_USER") POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD") POSTGRES_DB = os.getenv("POSTGRES_DB")
def build_proxied_http_client(self): return SimpleHttpClient( self, http_proxy=os.getenvb(b"http_proxy"), https_proxy=os.getenvb(b"HTTPS_PROXY"), )
import os API_V1_STR = '/api/v1' SECRET_KEY = os.getenvb(b'SECRET_KEY') if not SECRET_KEY: SECRET_KEY = os.urandom(32) ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 * 8 # 60 minutes * 24 hours * 8 days SERVER_NAME = os.getenv('SERVER_NAME') SENTRY_DSN = os.getenv('SENTRY_DSN') POSTGRES_PASSWORD = os.getenv('POSTGRES_PASSWORD') FIRST_SUPERUSER = os.getenv('FIRST_SUPERUSER') FIRST_SUPERUSER_PASSWORD = os.getenv('FIRST_SUPERUSER_PASSWORD')
def importkey(self, phandle, pauth, objauth, privkey, objattrs=None, seal=None, alg=None, passin=None): if privkey and len(privkey) > 0: exists = os.path.isfile(privkey) if not exists: raise RuntimeError("File '%s' path is invalid or is missing" % privkey) else: sys.exit("Invalid file path") _, priv = mkstemp(prefix='', suffix='.priv', dir=self._tmp) _, pub = mkstemp(prefix='', suffix='.pub', dir=self._tmp) # If the key is an OpenSSH key, convert it to PEM format pem_priv_name = None with open(privkey, "rb") as f: privey_data = f.read() if privey_data.startswith(b'-----BEGIN OPENSSH PRIVATE KEY-----'): if passin: # Parse passin to extract the password if passin.startswith('env:'): password_bytes = os.getenvb(passin[4:].encode()) elif passin.startswith('file:'): with open(passin[5:], 'rb') as f: password_bytes = f.read() elif passin.startswith('pass:'******'ecc' elif isinstance(ssh_key, RSAPrivateKey): alg = 'rsa' else: raise NotImplementedError( "Unsupported SSH key type {}".format(type(ssh_key))) pem_key = ssh_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=enc_alg) pem_priv_fd, pem_priv_name = mkstemp(prefix='', suffix='.privpem', dir=self._tmp) os.write(pem_priv_fd, pem_key) os.close(pem_priv_fd) privkey = pem_priv_name elif alg is None: # Guess the key algorithm from the PEM header if privey_data.startswith(b'-----BEGIN EC PARAMETERS-----'): alg = 'ecc' elif privey_data.startswith(b'-----BEGIN EC PRIVATE KEY-----'): alg = 'ecc' elif privey_data.startswith(b'-----BEGIN RSA PRIVATE KEY-----'): alg = 'rsa' else: raise RuntimeError( "Unable to detect key type, use --algorithm to specify it") parent_path = str(phandle) cmd = [ 'tpm2_import', '-V', '-C', parent_path, '-i', privkey, '-u', pub, '-r', priv ] if pauth and len(pauth) > 0: cmd.extend(['-P', pauth]) if objauth and len(objauth) > 0: cmd.extend(['-p', objauth]) if objattrs != None: cmd.extend(['-a', objattrs]) if seal != None: cmd.extend(['-i', '-']) if alg != None: cmd.extend(['-G', alg]) if passin is not None: cmd.extend(['--passin', passin]) p = Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE, env=os.environ) stdout, stderr = p.communicate(input=seal) rc = p.wait() if pem_priv_name is not None: os.remove(pem_priv_name) if (rc != 0): os.remove(pub) os.remove(priv) print("command: %s" % str(" ".join(cmd))) raise RuntimeError("Could not execute tpm2_import: %s" % stderr) return priv, pub, stdout
STATIC_URL = '/static/' if not DEBUG: STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.CachedStaticFilesStorage' # Email stuff ADMINS = (('DjangoCon Team', '*****@*****.**'),) SERVER_EMAIL = '*****@*****.**' EMAIL_SUBJECT_PREFIX = '[2016.djangocon.eu] ' # Tinyblog stuff TINYBLOG_ROOT_DIR = os.path.join(BASE_DIR, 'tinyblog', 'articles') # Tito webhooks TITO_SHARED_SECRET = os.getenvb(b'TITO_SHARED_SECRET', b'') # Slack integration SLACK_API_TOKENS = { 'djangoconeu': os.getenv('SLACK_TOKEN_DJANGOCONEU'), 'djangoconeu-attendees': os.getenv('SLACK_TOKEN_DJANGOCONEU_ATTENDEES'), } if DEBUG: # Use `python -m http.server 8888` from the uploads/ directory to serve MEDIA_ROOT = os.path.join(BASE_DIR, 'uploads') MEDIA_URL = 'http://localhost:8888/' EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' else: SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')