USE_I18N = ENV_TOKENS.get('USE_I18N', USE_I18N) # Additional installed apps for app in ENV_TOKENS.get('ADDL_INSTALLED_APPS', []): INSTALLED_APPS += (app,) ENV_FEATURES = ENV_TOKENS.get('FEATURES', ENV_TOKENS.get('MITX_FEATURES', {})) for feature, value in ENV_FEATURES.items(): FEATURES[feature] = value WIKI_ENABLED = ENV_TOKENS.get('WIKI_ENABLED', WIKI_ENABLED) local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO') LOGGING = get_logger_config(LOG_DIR, logging_env=ENV_TOKENS['LOGGING_ENV'], syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514), local_loglevel=local_loglevel, debug=False, service_variant=SERVICE_VARIANT) COURSE_LISTINGS = ENV_TOKENS.get('COURSE_LISTINGS', {}) SUBDOMAIN_BRANDING = ENV_TOKENS.get('SUBDOMAIN_BRANDING', {}) VIRTUAL_UNIVERSITIES = ENV_TOKENS.get('VIRTUAL_UNIVERSITIES', []) META_UNIVERSITIES = ENV_TOKENS.get('META_UNIVERSITIES', {}) COMMENTS_SERVICE_URL = ENV_TOKENS.get("COMMENTS_SERVICE_URL", '') COMMENTS_SERVICE_KEY = ENV_TOKENS.get("COMMENTS_SERVICE_KEY", '') CERT_QUEUE = ENV_TOKENS.get("CERT_QUEUE", 'test-pull') ZENDESK_URL = ENV_TOKENS.get("ZENDESK_URL") FEEDBACK_SUBMISSION_EMAIL = ENV_TOKENS.get("FEEDBACK_SUBMISSION_EMAIL") MKTG_URLS = ENV_TOKENS.get('MKTG_URLS', MKTG_URLS) # git repo loading environment
# If CERT_PRIVATE_DIR is set in the environment use it if 'CERT_PRIVATE_DIR' in os.environ: CERT_PRIVATE_DIR = path(os.environ['CERT_PRIVATE_DIR']) # This directory and file must exist in CERT_PRIVATE_DIR # if you are using custom templates and custom cert config TEMPLATE_DATA_SUBDIR = 'template_data' CERT_DATA_FILE = 'cert-data.yml' # DEFAULTS DEBUG = False LOGGING = get_logger_config(ENV_ROOT, logging_env="dev", local_loglevel="INFO", dev_env=True, debug=False) # Default for the gpg dir # Specify the CERT_KEY_ID before running the test suite CERT_GPG_DIR = '{0}/.gnupg'.format(os.environ['HOME']) # dummy key: # https://raw.githubusercontent.com/edx/configuration/master/playbooks/roles/certs/files/example-private-key.txt CERT_KEY_ID = 'FEF8D954' # or leave blank to skip gpg signing # CERT_KEY_ID = '' # Specify the default name of the certificate PDF CERT_FILENAME = 'Certificate.pdf'
if 'CERT_PRIVATE_DIR' in os.environ: CERT_PRIVATE_DIR = path(os.environ['CERT_PRIVATE_DIR']) # This directory and file must exist in CERT_PRIVATE_DIR # if you are using custom templates and custom cert config TEMPLATE_DATA_SUBDIR = 'template_data' CERT_DATA_FILE = 'cert-data.yml' # DEFAULTS DEBUG = False # This needs to be set on MacOS or anywhere you want logging to simply go # to an output file. LOGGING_DEV_ENV = True LOGGING = get_logger_config(ENV_ROOT, logging_env="dev", local_loglevel="INFO", dev_env=LOGGING_DEV_ENV, debug=False) # Default for the gpg dir # Specify the CERT_KEY_ID before running the test suite CERT_GPG_DIR = '{0}/.gnupg'.format(os.environ['HOME']) # dummy key: # https://raw.githubusercontent.com/edx/configuration/master/playbooks/roles/certs/files/example-private-key.txt CERT_KEY_ID = 'FEF8D954' # or leave blank to skip gpg signing # CERT_KEY_ID = '' # Specify the default name of the certificate PDF CERT_FILENAME = 'Certificate.pdf'
""" # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=W0401, W0614 from .test import * from logsettings import get_logger_config ENABLE_JASMINE = True DEBUG = True LOGGING = get_logger_config(TEST_ROOT / "log", logging_env="dev", tracking_filename="tracking.log", dev_env=True, debug=True, local_loglevel='ERROR', console_loglevel='ERROR') PIPELINE_JS['js-test-source'] = { 'source_filenames': sum([ pipeline_group['source_filenames'] for group_name, pipeline_group in sorted(PIPELINE_JS.items(), key=lambda item: item[1].get('test_order', 1e100)) if group_name != 'spec' ], []), 'output_filename': 'js/lms-test-source.js' }
UPLOAD_PATH_PREFIX = ENV_TOKENS.get('UPLOAD_PATH_PREFIX', UPLOAD_PATH_PREFIX) UPLOAD_URL_EXPIRE = ENV_TOKENS.get('UPLOAD_URL_EXPIRE', UPLOAD_URL_EXPIRE) # Deprecated, use UPLOAD_BUCKET and UPLOAD_PATH_PREFIX instead S3_BUCKET = ENV_TOKENS.get('S3_BUCKET', UPLOAD_BUCKET) S3_PATH_PREFIX = ENV_TOKENS.get('S3_PATH_PREFIX', UPLOAD_PATH_PREFIX) ALLOWED_HOSTS = ENV_TOKENS.get('ALLOWED_HOSTS', ALLOWED_HOSTS) LOG_DIR = ENV_TOKENS['LOG_DIR'] local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO') TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE) LOGGING = get_logger_config(LOG_DIR, logging_env=ENV_TOKENS['LOGGING_ENV'], syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514), local_loglevel=local_loglevel, debug=False) RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST).encode('ascii') RABBIT_PORT = ENV_TOKENS.get('RABBIT_PORT', RABBIT_PORT) RABBIT_VHOST = ENV_TOKENS.get('RABBIT_VHOST', RABBIT_VHOST).encode('ascii') RABBIT_TLS = ENV_TOKENS.get('RABBIT_TLS', RABBIT_TLS) with open(ENV_ROOT / CONFIG_PREFIX + "auth.json") as auth_file: AUTH_TOKENS = json.load(auth_file) DATABASES = AUTH_TOKENS['DATABASES'] # The normal database user does not have enough permissions to run migrations. # Migrations are run with separate credentials, given as DB_MIGRATION_* # environment variables
FEATURES[ 'REQUIRE_COURSE_EMAIL_AUTH'] = False # Give all courses email (don't require django-admin perms) FEATURES['ENABLE_HINTER_INSTRUCTOR_VIEW'] = True FEATURES['ENABLE_INSTRUCTOR_BETA_DASHBOARD'] = True FEATURES['MULTIPLE_ENROLLMENT_ROLES'] = True FEATURES['ENABLE_SHOPPING_CART'] = True FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True FEEDBACK_SUBMISSION_EMAIL = "*****@*****.**" WIKI_ENABLED = True LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", local_loglevel="DEBUG", dev_env=True, debug=True) # If there is a database called 'read_replica', you can use the use_read_replica_if_available # function in util/query.py, which is useful for very large database reads DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ENV_ROOT / "db" / "edx.db", } } CACHES = { # This is the cache used for most things. # In staging/prod envs, the sessions also live here.
} # We want Bulk Email running on the high-priority queue, so we define the # routing key that points to it. At the moment, the name is the same. # We have to reset the value here, since we have changed the value of the queue name. BULK_EMAIL_ROUTING_KEY = HIGH_PRIORITY_QUEUE LANGUAGE_DICT = dict(LANGUAGES) # Additional installed apps for app in ADDL_INSTALLED_APPS: INSTALLED_APPS += (app, ) LOGGING = get_logger_config(LOG_DIR, logging_env=LOGGING_ENV, local_loglevel=LOCAL_LOGLEVEL, debug=False, service_variant=SERVICE_VARIANT) for name, value in ENV_TOKENS.get("CODE_JAIL", {}).items(): oldvalue = CODE_JAIL.get(name) if isinstance(oldvalue, dict): for subname, subvalue in value.items(): oldvalue[subname] = subvalue else: CODE_JAIL[name] = value if FEATURES.get('AUTH_USE_CAS'): AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', 'django_cas.backends.CASBackend',
import os from logsettings import get_logger_config from path import Path import sys ROOT_PATH = Path(__file__).dirname() REPO_PATH = ROOT_PATH ENV_ROOT = REPO_PATH.dirname() # DEFAULTS DEBUG = False LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", local_loglevel="DEBUG", dev_env=True, debug=True) GRADER_ROOT = os.path.abspath(os.path.join(ENV_ROOT, 'data/6.00x/graders')) # Dev setting. DO_SANDBOXING = False # AWS if os.path.isfile(ENV_ROOT / "env.json"): print "Opening env.json file" with open(ENV_ROOT / "env.json") as env_file: ENV_TOKENS = json.load(env_file)
from logsettings import get_logger_config from path import path import sys ROOT_PATH = path(__file__).dirname() REPO_PATH = ROOT_PATH ENV_ROOT = REPO_PATH.dirname() # DEFAULTS DEBUG = False LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", local_loglevel="DEBUG", dev_env=True, debug=True) GRADER_ROOT = os.path.abspath(os.path.join(ENV_ROOT, 'data/6.00x/graders')) # Dev setting. DO_SANDBOXING = False # AWS if os.path.isfile(ENV_ROOT / "env.json"): print "Opening env.json file" with open(ENV_ROOT / "env.json") as env_file: ENV_TOKENS = json.load(env_file)
CERT_PRIVATE_DIR = path(os.environ['CERT_PRIVATE_DIR']) # This directory and file must exist in CERT_PRIVATE_DIR # if you are using custom templates and custom cert config TEMPLATE_DATA_SUBDIR = 'template_data' CERT_DATA_FILE = 'cert-data.yml' # DEFAULTS DEBUG = False # This needs to be set on MacOS or anywhere you want logging to simply go # to an output file. LOGGING_DEV_ENV = True # Turn on logging in dev stack LOGGING = get_logger_config(ENV_ROOT, logging_env="dev", LOCAL_LOGLEVEL="INFO", dev_env=LOGGING_DEV_ENV, debug=False) # Directory in which to store Logs LOG_DIR = '/var/tmp' # Local Logging Level LOCAL_LOGLEVEL = 'INFO' # Default for the gpg dir # Specify the CERT_KEY_ID before running the test suite CERT_GPG_DIR = '{0}/.gnupg'.format(os.environ['HOME']) # dummy key: # https://raw.githubusercontent.com/edx/configuration/master/playbooks/roles/certs/files/example-private-key.txt CERT_KEY_ID = 'FEF8D954' # or leave blank to skip gpg signing
# Remove the items that should be used to update dicts, and apply them separately rather # than pumping them into the local vars. dict_updates = { key: config_from_yaml.pop(key, None) for key in DICT_UPDATE_KEYS } for key, value in dict_updates.items(): if value: vars()[key].update(value) vars().update(config_from_yaml) LOGGING = get_logger_config(LOG_DIR, logging_env=LOGGING_ENV, syslog_addr=(SYSLOG_SERVER, 514), local_loglevel=LOCAL_LOGLEVEL, debug=False) # The normal database user does not have enough permissions to run migrations. # Migrations are run with separate credentials, given as DB_MIGRATION_* # environment variables DATABASES['default'].update({ 'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', DATABASES['default']['ENGINE']), 'USER': os.environ.get('DB_MIGRATION_USER', DATABASES['default']['USER']), 'PASSWORD': os.environ.get('DB_MIGRATION_PASS', DATABASES['default']['PASSWORD']), 'NAME': os.environ.get('DB_MIGRATION_NAME', DATABASES['default']['NAME']),
# Additional installed apps for app in ENV_TOKENS.get("ADDL_INSTALLED_APPS", []): INSTALLED_APPS += (app,) ENV_FEATURES = ENV_TOKENS.get("FEATURES", ENV_TOKENS.get("MITX_FEATURES", {})) for feature, value in ENV_FEATURES.items(): FEATURES[feature] = value WIKI_ENABLED = ENV_TOKENS.get("WIKI_ENABLED", WIKI_ENABLED) local_loglevel = ENV_TOKENS.get("LOCAL_LOGLEVEL", "INFO") LOGGING = get_logger_config( LOG_DIR, logging_env=ENV_TOKENS["LOGGING_ENV"], local_loglevel=local_loglevel, debug=False, service_variant=SERVICE_VARIANT, ) COURSE_LISTINGS = ENV_TOKENS.get("COURSE_LISTINGS", {}) SUBDOMAIN_BRANDING = ENV_TOKENS.get("SUBDOMAIN_BRANDING", {}) VIRTUAL_UNIVERSITIES = ENV_TOKENS.get("VIRTUAL_UNIVERSITIES", []) META_UNIVERSITIES = ENV_TOKENS.get("META_UNIVERSITIES", {}) COMMENTS_SERVICE_URL = ENV_TOKENS.get("COMMENTS_SERVICE_URL", "") COMMENTS_SERVICE_KEY = ENV_TOKENS.get("COMMENTS_SERVICE_KEY", "") CERT_QUEUE = ENV_TOKENS.get("CERT_QUEUE", "test-pull") ZENDESK_URL = ENV_TOKENS.get("ZENDESK_URL") FEEDBACK_SUBMISSION_EMAIL = ENV_TOKENS.get("FEEDBACK_SUBMISSION_EMAIL") MKTG_URLS = ENV_TOKENS.get("MKTG_URLS", MKTG_URLS)
import json import os from logsettings import get_logger_config from path import path ROOT_PATH = path(__file__).dirname() REPO_PATH = ROOT_PATH ENV_ROOT = REPO_PATH.dirname() TEMPLATE_DIR = '{0}/template_data'.format(REPO_PATH) # DEFAULTS DEBUG = False LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", local_loglevel="DEBUG", dev_env=True, debug=True) # Default long names, these can be overridden in # env.json # Full list of courses: # 'BerkeleyX/CS169.1x/2012_Fall', # 'BerkeleyX/CS169.2x/2012_Fall', # 'BerkeleyX/CS188.1x/2012_Fall', # 'BerkeleyX/CS184.1x/2012_Fall', # 'HarvardX/CS50x/2012', # 'HarvardX/PH207x/2012_Fall', # 'MITx/3.091x/2012_Fall', # 'MITx/6.002x/2012_Fall', # 'MITx/6.00x/2012_Fall',
with open(CONFIG_ROOT / CONFIG_PREFIX + "env.json") as env_file: ENV_TOKENS = json.load(env_file) XQUEUES = ENV_TOKENS['XQUEUES'] XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE'] WORKER_COUNT = ENV_TOKENS.get('WORKER_COUNT', XQUEUE_WORKERS_PER_QUEUE * 2) S3_BUCKET = ENV_TOKENS.get('S3_BUCKET',S3_BUCKET) S3_PATH_PREFIX = ENV_TOKENS.get('S3_PATH_PREFIX',S3_PATH_PREFIX) LOG_DIR = ENV_TOKENS['LOG_DIR'] local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO') TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE) LOGGING = get_logger_config(LOG_DIR, logging_env=ENV_TOKENS['LOGGING_ENV'], local_loglevel=local_loglevel, debug=False) RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST).encode('ascii') RABBIT_VHOST = ENV_TOKENS.get('RABBIT_VHOST', RABBIT_VHOST).encode('ascii') with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.json") as auth_file: AUTH_TOKENS = json.load(auth_file) DATABASES = AUTH_TOKENS['DATABASES'] AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"] AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"] REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"] RABBITMQ_USER = AUTH_TOKENS.get('RABBITMQ_USER', 'guest').encode('ascii')
""" This config file runs the simplest dev environment""" # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=W0401, W0614 from .common import * from logsettings import get_logger_config DEBUG = True TEMPLATE_DEBUG = DEBUG LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", tracking_filename="tracking.log", dev_env=True, debug=True) modulestore_options = { 'default_class': 'xmodule.raw_module.RawDescriptor', 'fs_root': GITHUB_REPO_ROOT, 'render_template': 'edxmako.shortcuts.render_to_string', } MODULESTORE = { 'default': { 'ENGINE': 'xmodule.modulestore.draft.DraftModuleStore', 'DOC_STORE_CONFIG': DOC_STORE_CONFIG, 'OPTIONS': modulestore_options }, 'direct': {
ROOT_URLCONF = "xqueue.urls" # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = "xqueue.wsgi.application" TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. os.path.join(ROOT_PATH, "templates"), ) INSTALLED_APPS = ( "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.sites", "django.contrib.messages", "django.contrib.staticfiles", # Uncomment the next line to enable the admin: # 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', "queue", "south", ) LOGIN_URL = "/xqueue/login" LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", dev_env=True, debug=True)
PEER_GRADER_MAXIMUM_TO_CALIBRATE = int( ENV_TOKENS.get("PEER_GRADER_MAXIMUM_TO_CALIBRATE", PEER_GRADER_MAXIMUM_TO_CALIBRATE)) PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR = float( ENV_TOKENS.get("PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR", PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR)) #Submission Expiration EXPIRE_SUBMISSIONS_AFTER = int(ENV_TOKENS.get('EXPIRE_SUBMISSIONS_AFTER', EXPIRE_SUBMISSIONS_AFTER)) RESET_SUBMISSIONS_AFTER = int(ENV_TOKENS.get('RESET_SUBMISSIONS_AFTER', RESET_SUBMISSIONS_AFTER)) #Time zone (shows up in logs) TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE) local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO') LOG_DIR = ENV_TOKENS.get("LOG_DIR", ENV_ROOT / "log") LOGGING = get_logger_config(debug=DEBUG) with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.json") as auth_file: AUTH_TOKENS = json.load(auth_file) SECRET_KEY = AUTH_TOKENS.get("SECRET_KEY") XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE'] GRADING_CONTROLLER_INTERFACE = AUTH_TOKENS['GRADING_CONTROLLER_INTERFACE'] DATABASES = AUTH_TOKENS['DATABASES'] AWS_ACCESS_KEY_ID = AUTH_TOKENS.get("AWS_ACCESS_KEY_ID", "") AWS_SECRET_ACCESS_KEY = AUTH_TOKENS.get("AWS_SECRET_ACCESS_KEY", "") #Celery settings BROKER_URL = AUTH_TOKENS.get("BROKER_URL", "") BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} CELERY_RESULT_BACKEND = AUTH_TOKENS.get("BROKER_URL", "")
# Timezone overrides TIME_ZONE = ENV_TOKENS.get("TIME_ZONE", TIME_ZONE) # Translation overrides LANGUAGES = ENV_TOKENS.get("LANGUAGES", LANGUAGES) LANGUAGE_CODE = ENV_TOKENS.get("LANGUAGE_CODE", LANGUAGE_CODE) USE_I18N = ENV_TOKENS.get("USE_I18N", USE_I18N) ENV_FEATURES = ENV_TOKENS.get("FEATURES", ENV_TOKENS.get("MITX_FEATURES", {})) for feature, value in ENV_FEATURES.items(): FEATURES[feature] = value LOGGING = get_logger_config( LOG_DIR, logging_env=ENV_TOKENS["LOGGING_ENV"], syslog_addr=(ENV_TOKENS["SYSLOG_SERVER"], 514), debug=False, service_variant=SERVICE_VARIANT, ) # theming start: PLATFORM_NAME = ENV_TOKENS.get("PLATFORM_NAME", "edX") # Event Tracking if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS: TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS") ################ SECURE AUTH ITEMS ############################### # Secret things: passwords, access keys, etc. with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.json") as auth_file:
if SERVICE_VARIANT: CONFIG_PREFIX = SERVICE_VARIANT + "." with open(CONFIG_ROOT / CONFIG_PREFIX + "env.json") as env_file: ENV_TOKENS = json.load(env_file) XQUEUES = ENV_TOKENS['XQUEUES'] XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE'] S3_BUCKET_PREFIX = ENV_TOKENS.get('S3_BUCKET_PREFIX', S3_BUCKET_PREFIX) LOG_DIR = ENV_TOKENS['LOG_DIR'] local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO') TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE) LOGGING = get_logger_config(LOG_DIR, logging_env=ENV_TOKENS['LOGGING_ENV'], syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 5140), local_loglevel=local_loglevel, debug=True) RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST).encode('ascii') RABBITMQ_VIRTUAL_HOST = ENV_TOKENS.get('RABBIT_VHOST') with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.json") as auth_file: AUTH_TOKENS = json.load(auth_file) DATABASES = AUTH_TOKENS['DATABASES'] AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"] AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"] REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"] RABBITMQ_USER = AUTH_TOKENS.get('RABBITMQ_USER', 'guest').encode('ascii')
# allow for environments to specify what cookie name our login subsystem should use # this is to fix a bug regarding simultaneous logins between edx.org and edge.edx.org which can # happen with some browsers (e.g. Firefox) if SESSION_COOKIE_NAME: # NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str() SESSION_COOKIE_NAME = str(SESSION_COOKIE_NAME) # Additional installed apps for app in ADDL_INSTALLED_APPS: INSTALLED_APPS += (app,) LOGGING = get_logger_config(LOG_DIR, local_loglevel=LOCAL_LOGLEVEL, logging_env=LOGGING_ENV, debug=False, service_variant=SERVICE_VARIANT) if AUTH_USE_CAS: AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', 'django_cas.backends.CASBackend', ) INSTALLED_APPS += ('django_cas',) MIDDLEWARE_CLASSES += ('django_cas.middleware.CASMiddleware',) if CAS_ATTRIBUTE_CALLBACK: import importlib CAS_USER_DETAILS_RESOLVER = getattr( importlib.import_module(CAS_ATTRIBUTE_CALLBACK['module']), CAS_ATTRIBUTE_CALLBACK['function']
LANGUAGES = ENV_TOKENS.get('LANGUAGES', LANGUAGES) LANGUAGE_CODE = ENV_TOKENS.get('LANGUAGE_CODE', LANGUAGE_CODE) USE_I18N = ENV_TOKENS.get('USE_I18N', USE_I18N) ENV_FEATURES = ENV_TOKENS.get('FEATURES', ENV_TOKENS.get('MITX_FEATURES', {})) for feature, value in ENV_FEATURES.items(): FEATURES[feature] = value # Additional installed apps for app in ENV_TOKENS.get('ADDL_INSTALLED_APPS', []): INSTALLED_APPS += (app, ) WIKI_ENABLED = ENV_TOKENS.get('WIKI_ENABLED', WIKI_ENABLED) LOGGING = get_logger_config(LOG_DIR, logging_env=ENV_TOKENS['LOGGING_ENV'], debug=False, service_variant=SERVICE_VARIANT) #theming start: PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', 'edX') # Event Tracking if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS: TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get( "TRACKING_IGNORE_URL_PATTERNS") # Django CAS external authentication settings CAS_EXTRA_LOGIN_PARAMS = ENV_TOKENS.get("CAS_EXTRA_LOGIN_PARAMS", None) if FEATURES.get('AUTH_USE_CAS'): CAS_SERVER_URL = ENV_TOKENS.get("CAS_SERVER_URL", None) AUTHENTICATION_BACKENDS = (
with codecs.open(CONFIG_FILE, encoding='utf-8') as f: config_from_yaml = yaml.safe_load(f) # Remove the items that should be used to update dicts, and apply them separately rather # than pumping them into the local vars. dict_updates = {key: config_from_yaml.pop(key, None) for key in DICT_UPDATE_KEYS} for key, value in dict_updates.items(): if value: vars()[key].update(value) vars().update(config_from_yaml) LOGGING = get_logger_config(LOG_DIR, logging_env=LOGGING_ENV, syslog_addr=(SYSLOG_SERVER, 514), local_loglevel=LOCAL_LOGLEVEL, debug=False) # The normal database user does not have enough permissions to run migrations. # Migrations are run with separate credentials, given as DB_MIGRATION_* # environment variables DATABASES['default'].update({ 'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', DATABASES['default']['ENGINE']), 'USER': os.environ.get('DB_MIGRATION_USER', DATABASES['default']['USER']), 'PASSWORD': os.environ.get('DB_MIGRATION_PASS', DATABASES['default']['PASSWORD']), 'NAME': os.environ.get('DB_MIGRATION_NAME', DATABASES['default']['NAME']), 'HOST': os.environ.get('DB_MIGRATION_HOST', DATABASES['default']['HOST']), 'PORT': os.environ.get('DB_MIGRATION_PORT', DATABASES['default']['PORT']), })
# This directory and file must exist in CERT_PRIVATE_DIR # if you are using custom templates and custom cert config TEMPLATE_DATA_SUBDIR = 'template_data' CERT_DATA_FILE = 'cert-data.yml' # DEFAULTS DEBUG = False # This needs to be set on MacOS or anywhere you want logging to simply go # to an output file. LOGGING_DEV_ENV = True # Turn on logging in dev stack LOGGING = get_logger_config(ENV_ROOT, logging_env="dev", LOCAL_LOGLEVEL="INFO", dev_env=LOGGING_DEV_ENV, debug=False) # Directory in which to store Logs LOG_DIR = '/var/tmp' # Local Logging Level LOCAL_LOGLEVEL = 'INFO' # Default for the gpg dir # Specify the CERT_KEY_ID before running the test suite CERT_GPG_DIR = '{0}/.gnupg'.format(os.environ['HOME']) # dummy key: # https://raw.githubusercontent.com/edx/configuration/master/playbooks/roles/certs/files/example-private-key.txt CERT_KEY_ID = 'FEF8D954' # or leave blank to skip gpg signing
from settings import * from logsettings import get_logger_config import os import os.path import json from uuid import uuid4 log_dir = REPO_PATH / "log" try: os.makedirs(log_dir) except: pass LOGGING = get_logger_config(log_dir, logging_env="test", dev_env=True, debug=True) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'test_xqueue', 'TEST': { # We need to use TEST['NAME'] here, # otherwise Django tests will use an in-memory database. # In-memory databases do not support access from # multiple threads, which the integration tests need. # We also need to choose *unique* names to avoid # conflicts in the Jenkins server 'NAME': 'test_xqueue_%s' % uuid4().hex, },
from settings import * from logsettings import get_logger_config import logging south_logger=logging.getLogger('south') south_logger.setLevel(logging.INFO) log_dir = REPO_PATH / "log" try: os.makedirs(log_dir) except: pass LOGGING = get_logger_config(log_dir, logging_env="test", debug=True) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME' : 'test_essaydb', } } RESET_SUBMISSIONS_AFTER = 0 #seconds EXPIRE_SUBMISSIONS_AFTER = 0 #seconds MIN_TO_USE_PEER = 2 MIN_TO_USE_ML = 3 TEST_PATH = os.path.abspath(os.path.join(REPO_PATH, "tests"))
WSGI_APPLICATION = 'xqueue.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. os.path.join(ROOT_PATH, 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: # 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'queue', 'south', ) LOGIN_URL = '/xqueue/login' LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", dev_env=True, debug=True)
""" This config file runs the simplest dev environment""" # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=W0401, W0614 from .common import * from logsettings import get_logger_config DEBUG = True TEMPLATE_DEBUG = DEBUG LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", tracking_filename="tracking.log", dev_env=True, debug=True) modulestore_options = { 'default_class': 'xmodule.raw_module.RawDescriptor', 'host': 'localhost', 'db': 'xmodule', 'collection': 'modulestore', 'fs_root': GITHUB_REPO_ROOT, 'render_template': 'mitxmako.shortcuts.render_to_string', } MODULESTORE = { 'default': { 'ENGINE': 'xmodule.modulestore.draft.DraftModuleStore', 'OPTIONS': modulestore_options
'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'controller', 'staff_grading', 'south', 'peer_grading', 'ml_grading', 'metrics', 'djcelery', ) LOGGING = get_logger_config(debug=True) SESSION_COOKIE_NAME = "controller_session_id" AWS_ACCESS_KEY_ID = "" AWS_SECRET_ACCESS_KEY = "" #Celery settings BROKER_URL = 'redis://localhost:6379/6' BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} CELERY_RESULT_BACKEND = 'redis://localhost:6379/6'
'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'controller', 'staff_grading', 'south', 'peer_grading', 'ml_grading', 'metrics', 'djcelery', ) LOGGING = get_logger_config(debug=True) SESSION_COOKIE_NAME="controller_session_id" AWS_ACCESS_KEY_ID= "" AWS_SECRET_ACCESS_KEY= "" #Celery settings BROKER_URL = 'redis://localhost:6379/6' BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} CELERY_RESULT_BACKEND = 'redis://localhost:6379/6' # Cache settings for ml grading pending counts. See controller/grader_interface.py RECHECK_EMPTY_ML_GRADE_QUEUE_DELAY = 60 # Maximum number of graders for any single submission.
ENV_TOKENS = json.load(env_file) XQUEUES = ENV_TOKENS['XQUEUES'] XQUEUE_WORKERS_PER_QUEUE = ENV_TOKENS['XQUEUE_WORKERS_PER_QUEUE'] WORKER_COUNT = ENV_TOKENS.get('WORKER_COUNT', XQUEUE_WORKERS_PER_QUEUE * 2) S3_BUCKET = ENV_TOKENS.get('S3_BUCKET', S3_BUCKET) S3_PATH_PREFIX = ENV_TOKENS.get('S3_PATH_PREFIX', S3_PATH_PREFIX) ALLOWED_HOSTS = ENV_TOKENS.get('ALLOWED_HOSTS', ALLOWED_HOSTS) LOG_DIR = ENV_TOKENS['LOG_DIR'] local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO') TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE) LOGGING = get_logger_config(LOG_DIR, logging_env=ENV_TOKENS['LOGGING_ENV'], local_loglevel=local_loglevel, debug=False) RABBIT_HOST = ENV_TOKENS.get('RABBIT_HOST', RABBIT_HOST).encode('ascii') RABBIT_VHOST = ENV_TOKENS.get('RABBIT_VHOST', RABBIT_VHOST).encode('ascii') with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.json") as auth_file: AUTH_TOKENS = json.load(auth_file) DATABASES = AUTH_TOKENS['DATABASES'] AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"] AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"] REQUESTS_BASIC_AUTH = AUTH_TOKENS["REQUESTS_BASIC_AUTH"] RABBITMQ_USER = AUTH_TOKENS.get('RABBITMQ_USER', 'guest').encode('ascii') RABBITMQ_PASS = AUTH_TOKENS.get('RABBITMQ_PASS', 'guest').encode('ascii')
FEATURES['ENABLE_INSTRUCTOR_EMAIL'] = True # Enable email for all Studio courses FEATURES['REQUIRE_COURSE_EMAIL_AUTH'] = False # Give all courses email (don't require django-admin perms) FEATURES['ENABLE_HINTER_INSTRUCTOR_VIEW'] = True FEATURES['ENABLE_INSTRUCTOR_BETA_DASHBOARD'] = True FEATURES['MULTIPLE_ENROLLMENT_ROLES'] = True FEATURES['ENABLE_SHOPPING_CART'] = True FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True FEEDBACK_SUBMISSION_EMAIL = "*****@*****.**" WIKI_ENABLED = True LOGGING = get_logger_config(ENV_ROOT / "log", logging_env="dev", local_loglevel="DEBUG", dev_env=True, debug=True) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ENV_ROOT / "db" / "edx.db", } } CACHES = { # This is the cache used for most things. # In staging/prod envs, the sessions also live here. 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
""" # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=W0401, W0614 from .test import * from logsettings import get_logger_config ENABLE_JASMINE = True DEBUG = True LOGGING = get_logger_config(TEST_ROOT / "log", logging_env="dev", tracking_filename="tracking.log", dev_env=True, debug=True, local_loglevel='ERROR', console_loglevel='ERROR') PIPELINE_JS['js-test-source'] = { 'source_filenames': sum([ pipeline_group['source_filenames'] for group_name, pipeline_group in sorted(PIPELINE_JS.items(), key=lambda item: item[1].get('test_order', 1e100)) if group_name != 'spec' ], []), 'output_filename': 'js/cms-test-source.js' } PIPELINE_JS['spec'] = {
LANGUAGES = ENV_TOKENS.get('LANGUAGES', LANGUAGES) LANGUAGE_CODE = ENV_TOKENS.get('LANGUAGE_CODE', LANGUAGE_CODE) USE_I18N = ENV_TOKENS.get('USE_I18N', USE_I18N) ENV_FEATURES = ENV_TOKENS.get('FEATURES', ENV_TOKENS.get('MITX_FEATURES', {})) for feature, value in ENV_FEATURES.items(): FEATURES[feature] = value # Additional installed apps for app in ENV_TOKENS.get('ADDL_INSTALLED_APPS', []): INSTALLED_APPS += (app,) WIKI_ENABLED = ENV_TOKENS.get('WIKI_ENABLED', WIKI_ENABLED) LOGGING = get_logger_config(LOG_DIR, logging_env=ENV_TOKENS['LOGGING_ENV'], debug=False, service_variant=SERVICE_VARIANT) #theming start: PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', 'edX') # Event Tracking if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS: TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS") # Django CAS external authentication settings CAS_EXTRA_LOGIN_PARAMS = ENV_TOKENS.get("CAS_EXTRA_LOGIN_PARAMS", None) if FEATURES.get('AUTH_USE_CAS'): CAS_SERVER_URL = ENV_TOKENS.get("CAS_SERVER_URL", None) AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend',
PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR = float( ENV_TOKENS.get("PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR", PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR)) #Submission Expiration EXPIRE_SUBMISSIONS_AFTER = int( ENV_TOKENS.get('EXPIRE_SUBMISSIONS_AFTER', EXPIRE_SUBMISSIONS_AFTER)) RESET_SUBMISSIONS_AFTER = int( ENV_TOKENS.get('RESET_SUBMISSIONS_AFTER', RESET_SUBMISSIONS_AFTER)) #Time zone (shows up in logs) TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE) local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO') LOG_DIR = ENV_TOKENS.get("LOG_DIR", ENV_ROOT / "log") LOGGING = get_logger_config(debug=DEBUG) with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.json") as auth_file: AUTH_TOKENS = json.load(auth_file) SECRET_KEY = AUTH_TOKENS.get("SECRET_KEY") XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE'] GRADING_CONTROLLER_INTERFACE = AUTH_TOKENS['GRADING_CONTROLLER_INTERFACE'] DATABASES = AUTH_TOKENS['DATABASES'] AWS_ACCESS_KEY_ID = AUTH_TOKENS.get("AWS_ACCESS_KEY_ID", "") AWS_SECRET_ACCESS_KEY = AUTH_TOKENS.get("AWS_SECRET_ACCESS_KEY", "") #Celery settings BROKER_URL = AUTH_TOKENS.get("BROKER_URL", "") BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} CELERY_RESULT_BACKEND = AUTH_TOKENS.get("BROKER_URL", "")