This commit adds the requisite settings and startup features to enable integration of themes into the edX platform. It does not yet provide hooks in any of the templates, but it does cause the main `lms/static/sass/application.scss` file to `@import` a theme's base Sass. Template hooks will come down the road. CHANGELOG --------- Define a new `MITX_FEATURE`, `USE_CUSTOM_THEME`, that when enabled, can be used in templates to determine whether or not custom theme templates should be used instead of the defaults. Also define a new setting, `THEME_NAME`, which will be used to locate theme-specific files. Establish the convention that themes will be stored outside of the `REPO_ROOT`, inside the `ENV_ROOT`, in a directory named `themes/`. `themes/<THEME_NAME>` will store the files for a particular theme. Provide a function, `enable_theme`, that modifies the template and static asset load paths appropriately to include the theme's files. Move the main LMS Sass file to a Mako template that conditionally `@import`s the theme's base Sass file when a theme is enabled. Add logic to the assets Rakefile to properly preprocess any Sass/ Mako templates before compiling them.
210 lines
7.2 KiB
Python
210 lines
7.2 KiB
Python
"""
|
|
This is the default template for our main set of AWS servers. This does NOT
|
|
cover the content machines, which use content.py
|
|
|
|
Common traits:
|
|
* Use memcached, and cache-backed sessions
|
|
* Use a MySQL 5.1 database
|
|
"""
|
|
|
|
# We intentionally define lots of variables that aren't used, and
|
|
# want to import all variables from base settings files
|
|
# pylint: disable=W0401, W0614
|
|
|
|
import json
|
|
|
|
from .common import *
|
|
from logsettings import get_logger_config
|
|
import os
|
|
|
|
# specified as an environment variable. Typically this is set
|
|
# in the service's upstart script and corresponds exactly to the service name.
|
|
# Service variants apply config differences via env and auth JSON files,
|
|
# the names of which correspond to the variant.
|
|
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
|
|
|
|
# when not variant is specified we attempt to load an unvaried
|
|
# config set.
|
|
CONFIG_PREFIX = ""
|
|
|
|
if SERVICE_VARIANT:
|
|
CONFIG_PREFIX = SERVICE_VARIANT + "."
|
|
|
|
|
|
################################ ALWAYS THE SAME ##############################
|
|
|
|
DEBUG = False
|
|
TEMPLATE_DEBUG = False
|
|
|
|
EMAIL_BACKEND = 'django_ses.SESBackend'
|
|
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
|
|
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
|
|
|
|
# Enable Berkeley forums
|
|
MITX_FEATURES['ENABLE_DISCUSSION_SERVICE'] = True
|
|
|
|
# IMPORTANT: With this enabled, the server must always be behind a proxy that
|
|
# strips the header HTTP_X_FORWARDED_PROTO from client requests. Otherwise,
|
|
# a user can fool our server into thinking it was an https connection.
|
|
# See
|
|
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
|
|
# for other warnings.
|
|
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
|
|
|
###################################### CELERY ################################
|
|
|
|
# Don't use a connection pool, since connections are dropped by ELB.
|
|
BROKER_POOL_LIMIT = 0
|
|
BROKER_CONNECTION_TIMEOUT = 1
|
|
|
|
# For the Result Store, use the django cache named 'celery'
|
|
CELERY_RESULT_BACKEND = 'cache'
|
|
CELERY_CACHE_BACKEND = 'celery'
|
|
|
|
# When the broker is behind an ELB, use a heartbeat to refresh the
|
|
# connection and to detect if it has been dropped.
|
|
BROKER_HEARTBEAT = 10.0
|
|
BROKER_HEARTBEAT_CHECKRATE = 2
|
|
|
|
# Each worker should only fetch one message at a time
|
|
CELERYD_PREFETCH_MULTIPLIER = 1
|
|
|
|
# Skip djcelery migrations, since we don't use the database as the broker
|
|
SOUTH_MIGRATION_MODULES = {
|
|
'djcelery': 'ignore',
|
|
}
|
|
|
|
# Rename the exchange and queues for each variant
|
|
|
|
QUEUE_VARIANT = CONFIG_PREFIX.lower()
|
|
|
|
CELERY_DEFAULT_EXCHANGE = 'edx.{0}core'.format(QUEUE_VARIANT)
|
|
|
|
HIGH_PRIORITY_QUEUE = 'edx.{0}core.high'.format(QUEUE_VARIANT)
|
|
DEFAULT_PRIORITY_QUEUE = 'edx.{0}core.default'.format(QUEUE_VARIANT)
|
|
LOW_PRIORITY_QUEUE = 'edx.{0}core.low'.format(QUEUE_VARIANT)
|
|
|
|
CELERY_DEFAULT_QUEUE = DEFAULT_PRIORITY_QUEUE
|
|
CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE
|
|
|
|
CELERY_QUEUES = {
|
|
HIGH_PRIORITY_QUEUE: {},
|
|
LOW_PRIORITY_QUEUE: {},
|
|
DEFAULT_PRIORITY_QUEUE: {}
|
|
}
|
|
|
|
########################## NON-SECURE ENV CONFIG ##############################
|
|
# Things like server locations, ports, etc.
|
|
|
|
with open(ENV_ROOT / CONFIG_PREFIX + "env.json") as env_file:
|
|
ENV_TOKENS = json.load(env_file)
|
|
|
|
SITE_NAME = ENV_TOKENS['SITE_NAME']
|
|
SESSION_COOKIE_DOMAIN = ENV_TOKENS.get('SESSION_COOKIE_DOMAIN')
|
|
|
|
BOOK_URL = ENV_TOKENS['BOOK_URL']
|
|
MEDIA_URL = ENV_TOKENS['MEDIA_URL']
|
|
LOG_DIR = ENV_TOKENS['LOG_DIR']
|
|
|
|
CACHES = ENV_TOKENS['CACHES']
|
|
|
|
#Email overrides
|
|
DEFAULT_FROM_EMAIL = ENV_TOKENS.get('DEFAULT_FROM_EMAIL', DEFAULT_FROM_EMAIL)
|
|
DEFAULT_FEEDBACK_EMAIL = ENV_TOKENS.get('DEFAULT_FEEDBACK_EMAIL', DEFAULT_FEEDBACK_EMAIL)
|
|
ADMINS = ENV_TOKENS.get('ADMINS', ADMINS)
|
|
SERVER_EMAIL = ENV_TOKENS.get('SERVER_EMAIL', SERVER_EMAIL)
|
|
|
|
#Theme overrides
|
|
THEME_NAME = ENV_TOKENS.get('THEME_NAME', None)
|
|
if not THEME_NAME is None:
|
|
enable_theme(THEME_NAME)
|
|
|
|
#Timezone overrides
|
|
TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE)
|
|
|
|
for feature, value in ENV_TOKENS.get('MITX_FEATURES', {}).items():
|
|
MITX_FEATURES[feature] = value
|
|
|
|
WIKI_ENABLED = ENV_TOKENS.get('WIKI_ENABLED', WIKI_ENABLED)
|
|
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
|
|
|
|
LOGGING = get_logger_config(LOG_DIR,
|
|
logging_env=ENV_TOKENS['LOGGING_ENV'],
|
|
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
|
|
local_loglevel=local_loglevel,
|
|
debug=False,
|
|
service_variant=SERVICE_VARIANT)
|
|
|
|
COURSE_LISTINGS = ENV_TOKENS.get('COURSE_LISTINGS', {})
|
|
SUBDOMAIN_BRANDING = ENV_TOKENS.get('SUBDOMAIN_BRANDING', {})
|
|
VIRTUAL_UNIVERSITIES = ENV_TOKENS.get('VIRTUAL_UNIVERSITIES', [])
|
|
META_UNIVERSITIES = ENV_TOKENS.get('META_UNIVERSITIES', {})
|
|
COMMENTS_SERVICE_URL = ENV_TOKENS.get("COMMENTS_SERVICE_URL", '')
|
|
COMMENTS_SERVICE_KEY = ENV_TOKENS.get("COMMENTS_SERVICE_KEY", '')
|
|
CERT_QUEUE = ENV_TOKENS.get("CERT_QUEUE", 'test-pull')
|
|
ZENDESK_URL = ENV_TOKENS.get("ZENDESK_URL")
|
|
FEEDBACK_SUBMISSION_EMAIL = ENV_TOKENS.get("FEEDBACK_SUBMISSION_EMAIL")
|
|
MKTG_URLS = ENV_TOKENS.get('MKTG_URLS', MKTG_URLS)
|
|
|
|
for name, value in ENV_TOKENS.get("CODE_JAIL", {}).items():
|
|
oldvalue = CODE_JAIL.get(name)
|
|
if isinstance(oldvalue, dict):
|
|
for subname, subvalue in value.items():
|
|
oldvalue[subname] = subvalue
|
|
else:
|
|
CODE_JAIL[name] = value
|
|
|
|
COURSES_WITH_UNSAFE_CODE = ENV_TOKENS.get("COURSES_WITH_UNSAFE_CODE", [])
|
|
|
|
############################## SECURE AUTH ITEMS ###############
|
|
# Secret things: passwords, access keys, etc.
|
|
|
|
with open(ENV_ROOT / CONFIG_PREFIX + "auth.json") as auth_file:
|
|
AUTH_TOKENS = json.load(auth_file)
|
|
|
|
SECRET_KEY = AUTH_TOKENS['SECRET_KEY']
|
|
|
|
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
|
|
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
|
|
AWS_STORAGE_BUCKET_NAME = 'edxuploads'
|
|
|
|
DATABASES = AUTH_TOKENS['DATABASES']
|
|
|
|
XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE']
|
|
|
|
# Get the MODULESTORE from auth.json, but if it doesn't exist,
|
|
# use the one from common.py
|
|
MODULESTORE = AUTH_TOKENS.get('MODULESTORE', MODULESTORE)
|
|
CONTENTSTORE = AUTH_TOKENS.get('CONTENTSTORE', CONTENTSTORE)
|
|
|
|
OPEN_ENDED_GRADING_INTERFACE = AUTH_TOKENS.get('OPEN_ENDED_GRADING_INTERFACE',
|
|
OPEN_ENDED_GRADING_INTERFACE)
|
|
|
|
PEARSON_TEST_USER = "pearsontest"
|
|
PEARSON_TEST_PASSWORD = AUTH_TOKENS.get("PEARSON_TEST_PASSWORD")
|
|
|
|
# Pearson hash for import/export
|
|
PEARSON = AUTH_TOKENS.get("PEARSON")
|
|
|
|
# Datadog for events!
|
|
DATADOG_API = AUTH_TOKENS.get("DATADOG_API")
|
|
|
|
# Analytics dashboard server
|
|
ANALYTICS_SERVER_URL = ENV_TOKENS.get("ANALYTICS_SERVER_URL")
|
|
ANALYTICS_API_KEY = AUTH_TOKENS.get("ANALYTICS_API_KEY", "")
|
|
|
|
# Zendesk
|
|
ZENDESK_USER = AUTH_TOKENS.get("ZENDESK_USER")
|
|
ZENDESK_API_KEY = AUTH_TOKENS.get("ZENDESK_API_KEY")
|
|
|
|
# Celery Broker
|
|
CELERY_BROKER_TRANSPORT = ENV_TOKENS.get("CELERY_BROKER_TRANSPORT", "")
|
|
CELERY_BROKER_HOSTNAME = ENV_TOKENS.get("CELERY_BROKER_HOSTNAME", "")
|
|
CELERY_BROKER_USER = AUTH_TOKENS.get("CELERY_BROKER_USER", "")
|
|
CELERY_BROKER_PASSWORD = AUTH_TOKENS.get("CELERY_BROKER_PASSWORD", "")
|
|
|
|
BROKER_URL = "{0}://{1}:{2}@{3}".format(CELERY_BROKER_TRANSPORT,
|
|
CELERY_BROKER_USER,
|
|
CELERY_BROKER_PASSWORD,
|
|
CELERY_BROKER_HOSTNAME)
|