Add OpenStack Swift support
Refactor all S3 code to use the django storage API and add swift settings for openstack deployments
This commit is contained in:
@@ -317,6 +317,20 @@ else:
|
||||
|
||||
DATABASES = AUTH_TOKENS['DATABASES']
|
||||
|
||||
# The normal database user does not have enough permissions to run migrations.
|
||||
# Migrations are run with separate credentials, given as DB_MIGRATION_*
|
||||
# environment variables
|
||||
for name, database in DATABASES.items():
|
||||
if name != 'read_replica':
|
||||
database.update({
|
||||
'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', database['ENGINE']),
|
||||
'USER': os.environ.get('DB_MIGRATION_USER', database['USER']),
|
||||
'PASSWORD': os.environ.get('DB_MIGRATION_PASS', database['PASSWORD']),
|
||||
'NAME': os.environ.get('DB_MIGRATION_NAME', database['NAME']),
|
||||
'HOST': os.environ.get('DB_MIGRATION_HOST', database['HOST']),
|
||||
'PORT': os.environ.get('DB_MIGRATION_PORT', database['PORT']),
|
||||
})
|
||||
|
||||
MODULESTORE = convert_module_store_setting_if_needed(AUTH_TOKENS.get('MODULESTORE', MODULESTORE))
|
||||
|
||||
MODULESTORE_FIELD_OVERRIDE_PROVIDERS = ENV_TOKENS.get(
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
"""
|
||||
A Django settings file for use on AWS while running
|
||||
database migrations, since we don't want to normally run the
|
||||
LMS with enough privileges to modify the database schema.
|
||||
"""
|
||||
|
||||
# We intentionally define lots of variables that aren't used, and
|
||||
# want to import all variables from base settings files
|
||||
# pylint: disable=wildcard-import, unused-wildcard-import
|
||||
|
||||
# Import everything from .aws so that our settings are based on those.
|
||||
from .aws import *
|
||||
import os
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
|
||||
def get_db_overrides(db_name):
|
||||
"""
|
||||
Now that we have multiple databases, we want to look up from the environment
|
||||
for both databases.
|
||||
"""
|
||||
db_overrides = dict(
|
||||
PASSWORD=os.environ.get('DB_MIGRATION_PASS', None),
|
||||
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES[db_name]['ENGINE']),
|
||||
USER=os.environ.get('DB_MIGRATION_USER', DATABASES[db_name]['USER']),
|
||||
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES[db_name]['NAME']),
|
||||
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES[db_name]['HOST']),
|
||||
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES[db_name]['PORT']),
|
||||
)
|
||||
|
||||
if db_overrides['PASSWORD'] is None:
|
||||
raise ImproperlyConfigured("No database password was provided for running "
|
||||
"migrations. This is fatal.")
|
||||
return db_overrides
|
||||
|
||||
for db in DATABASES:
|
||||
# You never migrate a read_replica
|
||||
if db != 'read_replica':
|
||||
DATABASES[db].update(get_db_overrides(db))
|
||||
@@ -69,7 +69,7 @@
|
||||
"AUTH_USE_OPENID_PROVIDER": true,
|
||||
"CERTIFICATES_HTML_VIEW": true,
|
||||
"ENABLE_DISCUSSION_SERVICE": true,
|
||||
"ENABLE_S3_GRADE_DOWNLOADS": true,
|
||||
"ENABLE_GRADE_DOWNLOADS": true,
|
||||
"ENTRANCE_EXAMS": true,
|
||||
"MILESTONES_APP": true,
|
||||
"PREVIEW_LMS_BASE": "preview.localhost:8003",
|
||||
|
||||
5
cms/envs/openstack.py
Normal file
5
cms/envs/openstack.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""
|
||||
Settings for OpenStack deployments.
|
||||
"""
|
||||
|
||||
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
@@ -6,6 +6,7 @@ from contextlib import contextmanager
|
||||
from django.dispatch import Signal
|
||||
from markupsafe import escape
|
||||
from mock import Mock, patch
|
||||
import moto
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -107,3 +108,18 @@ def skip_signal(signal, **kwargs):
|
||||
signal.disconnect(**kwargs)
|
||||
yield
|
||||
signal.connect(**kwargs)
|
||||
|
||||
|
||||
class MockS3Mixin(object):
|
||||
"""
|
||||
TestCase mixin that stubs S3 using the moto library. Note that this will
|
||||
activate httpretty, which will monkey patch socket.
|
||||
"""
|
||||
def setUp(self):
|
||||
super(MockS3Mixin, self).setUp()
|
||||
self._mock_s3 = moto.mock_s3()
|
||||
self._mock_s3.start()
|
||||
|
||||
def tearDown(self):
|
||||
self._mock_s3.stop()
|
||||
super(MockS3Mixin, self).tearDown()
|
||||
|
||||
@@ -2975,7 +2975,7 @@ class TestInstructorAPILevelsDataDump(SharedModuleStoreTestCase, LoginEnrollment
|
||||
|
||||
def test_list_report_downloads(self):
|
||||
url = reverse('list_report_downloads', kwargs={'course_id': self.course.id.to_deprecated_string()})
|
||||
with patch('instructor_task.models.LocalFSReportStore.links_for') as mock_links_for:
|
||||
with patch('instructor_task.models.DjangoStorageReportStore.links_for') as mock_links_for:
|
||||
mock_links_for.return_value = [
|
||||
('mock_file_name_1', 'https://1.mock.url'),
|
||||
('mock_file_name_2', 'https://2.mock.url'),
|
||||
|
||||
@@ -12,22 +12,18 @@ file and check it in at the same time as your model changes. To do that,
|
||||
ASSUMPTIONS: modules have unique IDs, even across different module_types
|
||||
|
||||
"""
|
||||
from cStringIO import StringIO
|
||||
from gzip import GzipFile
|
||||
from uuid import uuid4
|
||||
import csv
|
||||
import json
|
||||
import hashlib
|
||||
import os.path
|
||||
import urllib
|
||||
|
||||
from boto.s3.connection import S3Connection
|
||||
from boto.s3.key import Key
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models, transaction
|
||||
|
||||
from openedx.core.storage import get_storage
|
||||
from xmodule_django.models import CourseKeyField
|
||||
|
||||
|
||||
@@ -192,11 +188,28 @@ class ReportStore(object):
|
||||
Return one of the ReportStore subclasses depending on django
|
||||
configuration. Look at subclasses for expected configuration.
|
||||
"""
|
||||
storage_type = getattr(settings, config_name).get("STORAGE_TYPE")
|
||||
if storage_type.lower() == "s3":
|
||||
return S3ReportStore.from_config(config_name)
|
||||
elif storage_type.lower() == "localfs":
|
||||
return LocalFSReportStore.from_config(config_name)
|
||||
# Convert old configuration parameters to those expected by
|
||||
# DjangoStorageReportStore for backward compatibility
|
||||
config = getattr(settings, config_name, {})
|
||||
storage_type = config.get('STORAGE_TYPE', '').lower()
|
||||
if storage_type == 's3':
|
||||
return DjangoStorageReportStore(
|
||||
storage_class='storages.backends.s3boto.S3BotoStorage',
|
||||
storage_kwargs={
|
||||
'bucket': config['BUCKET'],
|
||||
'location': config['ROOT_PATH'],
|
||||
'querystring_expire': 300,
|
||||
'gzip': True,
|
||||
},
|
||||
)
|
||||
elif storage_type == 'localfs':
|
||||
return DjangoStorageReportStore(
|
||||
storage_class='django.core.files.storage.FileSystemStorage',
|
||||
storage_kwargs={
|
||||
'location': config['ROOT_PATH'],
|
||||
},
|
||||
)
|
||||
return DjangoStorageReportStore.from_config(config_name)
|
||||
|
||||
def _get_utf8_encoded_rows(self, rows):
|
||||
"""
|
||||
@@ -208,204 +221,78 @@ class ReportStore(object):
|
||||
yield [unicode(item).encode('utf-8') for item in row]
|
||||
|
||||
|
||||
class S3ReportStore(ReportStore):
|
||||
class DjangoStorageReportStore(ReportStore):
|
||||
"""
|
||||
Reports store backed by S3. The directory structure we use to store things
|
||||
is::
|
||||
|
||||
`{bucket}/{root_path}/{sha1 hash of course_id}/filename`
|
||||
|
||||
We might later use subdirectories or metadata to do more intelligent
|
||||
grouping and querying, but right now it simply depends on its own
|
||||
conventions on where files are stored to know what to display. Clients using
|
||||
this class can name the final file whatever they want.
|
||||
ReportStore implementation that delegates to django's storage api.
|
||||
"""
|
||||
def __init__(self, bucket_name, root_path):
|
||||
self.root_path = root_path
|
||||
|
||||
conn = S3Connection(
|
||||
settings.AWS_ACCESS_KEY_ID,
|
||||
settings.AWS_SECRET_ACCESS_KEY
|
||||
)
|
||||
|
||||
self.bucket = conn.get_bucket(bucket_name)
|
||||
def __init__(self, storage_class=None, storage_kwargs=None):
|
||||
if storage_kwargs is None:
|
||||
storage_kwargs = {}
|
||||
self.storage = get_storage(storage_class, **storage_kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config_name):
|
||||
"""
|
||||
The expected configuration for an `S3ReportStore` is to have a
|
||||
`GRADES_DOWNLOAD` dict in settings with the following fields::
|
||||
By default, the default file storage specified by the `DEFAULT_FILE_STORAGE`
|
||||
setting will be used. To configure the storage used, add a dict in
|
||||
settings with the following fields::
|
||||
|
||||
STORAGE_TYPE : "s3"
|
||||
BUCKET : Your bucket name, e.g. "reports-bucket"
|
||||
ROOT_PATH : The path you want to store all course files under. Do not
|
||||
use a leading or trailing slash. e.g. "staging" or
|
||||
"staging/2013", not "/staging", or "/staging/"
|
||||
STORAGE_CLASS : The import path of the storage class to use. If
|
||||
not set, the DEFAULT_FILE_STORAGE setting will be used.
|
||||
STORAGE_KWARGS : An optional dict of kwargs to pass to the storage
|
||||
constructor. This can be used to specify a
|
||||
different S3 bucket or root path, for example.
|
||||
|
||||
Since S3 access relies on boto, you must also define `AWS_ACCESS_KEY_ID`
|
||||
and `AWS_SECRET_ACCESS_KEY` in settings.
|
||||
Reference the setting name when calling `.from_config`.
|
||||
"""
|
||||
return cls(
|
||||
getattr(settings, config_name).get("BUCKET"),
|
||||
getattr(settings, config_name).get("ROOT_PATH")
|
||||
getattr(settings, config_name).get('STORAGE_CLASS'),
|
||||
getattr(settings, config_name).get('STORAGE_KWARGS'),
|
||||
)
|
||||
|
||||
def key_for(self, course_id, filename):
|
||||
"""Return the S3 key we would use to store and retrieve the data for the
|
||||
given filename."""
|
||||
hashed_course_id = hashlib.sha1(course_id.to_deprecated_string())
|
||||
|
||||
key = Key(self.bucket)
|
||||
key.key = "{}/{}/{}".format(
|
||||
self.root_path,
|
||||
hashed_course_id.hexdigest(),
|
||||
filename
|
||||
)
|
||||
|
||||
return key
|
||||
|
||||
def store(self, course_id, filename, buff, config=None):
|
||||
def store(self, course_id, filename, buff):
|
||||
"""
|
||||
Store the contents of `buff` in a directory determined by hashing
|
||||
`course_id`, and name the file `filename`. `buff` is typically a
|
||||
`StringIO`, but can be anything that implements `.getvalue()`.
|
||||
|
||||
This method assumes that the contents of `buff` are gzip-encoded (it
|
||||
will add the appropriate headers to S3 to make the decompression
|
||||
transparent via the browser). Filenames should end in whatever
|
||||
suffix makes sense for the original file, so `.txt` instead of `.gz`
|
||||
`course_id`, and name the file `filename`. `buff` can be any file-like
|
||||
object, ready to be read from the beginning.
|
||||
"""
|
||||
key = self.key_for(course_id, filename)
|
||||
|
||||
_config = config if config else {}
|
||||
|
||||
content_type = _config.get('content_type', 'text/csv')
|
||||
content_encoding = _config.get('content_encoding', 'gzip')
|
||||
|
||||
data = buff.getvalue()
|
||||
key.size = len(data)
|
||||
key.content_encoding = content_encoding
|
||||
key.content_type = content_type
|
||||
|
||||
# Just setting the content encoding and type above should work
|
||||
# according to the docs, but when experimenting, this was necessary for
|
||||
# it to actually take.
|
||||
key.set_contents_from_string(
|
||||
data,
|
||||
headers={
|
||||
"Content-Encoding": content_encoding,
|
||||
"Content-Length": len(data),
|
||||
"Content-Type": content_type,
|
||||
}
|
||||
)
|
||||
path = self.path_to(course_id, filename)
|
||||
self.storage.save(path, buff)
|
||||
|
||||
def store_rows(self, course_id, filename, rows):
|
||||
"""
|
||||
Given a `course_id`, `filename`, and `rows` (each row is an iterable of
|
||||
strings), create a buffer that is a gzip'd csv file, and then `store()`
|
||||
that buffer.
|
||||
|
||||
Even though we store it in gzip format, browsers will transparently
|
||||
download and decompress it. Filenames should end in `.csv`, not `.gz`.
|
||||
Given a course_id, filename, and rows (each row is an iterable of
|
||||
strings), write the rows to the storage backend in csv format.
|
||||
"""
|
||||
output_buffer = StringIO()
|
||||
gzip_file = GzipFile(fileobj=output_buffer, mode="wb")
|
||||
csvwriter = csv.writer(gzip_file)
|
||||
csvwriter.writerows(self._get_utf8_encoded_rows(rows))
|
||||
gzip_file.close()
|
||||
|
||||
self.store(course_id, filename, output_buffer)
|
||||
|
||||
def links_for(self, course_id):
|
||||
"""
|
||||
For a given `course_id`, return a list of `(filename, url)` tuples. `url`
|
||||
can be plugged straight into an href
|
||||
"""
|
||||
course_dir = self.key_for(course_id, '')
|
||||
return [
|
||||
(key.key.split("/")[-1], key.generate_url(expires_in=300))
|
||||
for key in sorted(self.bucket.list(prefix=course_dir.key), reverse=True, key=lambda k: k.last_modified)
|
||||
]
|
||||
|
||||
|
||||
class LocalFSReportStore(ReportStore):
|
||||
"""
|
||||
LocalFS implementation of a ReportStore. This is meant for debugging
|
||||
purposes and is *absolutely not for production use*. Use S3ReportStore for
|
||||
that. We use this in tests and for local development. When it generates
|
||||
links, it will make file:/// style links. That means you actually have to
|
||||
copy them and open them in a separate browser window, for security reasons.
|
||||
This lets us do the cheap thing locally for debugging without having to open
|
||||
up a separate URL that would only be used to send files in dev.
|
||||
"""
|
||||
def __init__(self, root_path):
|
||||
"""
|
||||
Initialize with root_path where we're going to store our files. We
|
||||
will build a directory structure under this for each course.
|
||||
"""
|
||||
self.root_path = root_path
|
||||
if not os.path.exists(root_path):
|
||||
os.makedirs(root_path)
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config_name):
|
||||
"""
|
||||
Generate an instance of this object from Django settings. It assumes
|
||||
that there is a dict in settings named GRADES_DOWNLOAD and that it has
|
||||
a ROOT_PATH that maps to an absolute file path that the web app has
|
||||
write permissions to. `LocalFSReportStore` will create any intermediate
|
||||
directories as needed. Example::
|
||||
|
||||
STORAGE_TYPE : "localfs"
|
||||
ROOT_PATH : /tmp/edx/report-downloads/
|
||||
"""
|
||||
return cls(getattr(settings, config_name).get("ROOT_PATH"))
|
||||
|
||||
def path_to(self, course_id, filename):
|
||||
"""Return the full path to a given file for a given course."""
|
||||
return os.path.join(self.root_path, urllib.quote(course_id.to_deprecated_string(), safe=''), filename)
|
||||
|
||||
def store(self, course_id, filename, buff, config=None): # pylint: disable=unused-argument
|
||||
"""
|
||||
Given the `course_id` and `filename`, store the contents of `buff` in
|
||||
that file. Overwrite anything that was there previously. `buff` is
|
||||
assumed to be a StringIO objecd (or anything that can flush its contents
|
||||
to string using `.getvalue()`).
|
||||
"""
|
||||
full_path = self.path_to(course_id, filename)
|
||||
directory = os.path.dirname(full_path)
|
||||
if not os.path.exists(directory):
|
||||
os.mkdir(directory)
|
||||
|
||||
with open(full_path, "wb") as f:
|
||||
f.write(buff.getvalue())
|
||||
|
||||
def store_rows(self, course_id, filename, rows):
|
||||
"""
|
||||
Given a course_id, filename, and rows (each row is an iterable of strings),
|
||||
write this data out.
|
||||
"""
|
||||
output_buffer = StringIO()
|
||||
output_buffer = ContentFile('')
|
||||
csvwriter = csv.writer(output_buffer)
|
||||
csvwriter.writerows(self._get_utf8_encoded_rows(rows))
|
||||
|
||||
output_buffer.seek(0)
|
||||
self.store(course_id, filename, output_buffer)
|
||||
|
||||
def links_for(self, course_id):
|
||||
"""
|
||||
For a given `course_id`, return a list of `(filename, url)` tuples. `url`
|
||||
can be plugged straight into an href. Note that `LocalFSReportStore`
|
||||
will generate `file://` type URLs, so you'll need to copy the URL and
|
||||
open it in a new browser window. Again, this class is only meant for
|
||||
local development.
|
||||
For a given `course_id`, return a list of `(filename, url)` tuples.
|
||||
Calls the `url` method of the underlying storage backend. Returned
|
||||
urls can be plugged straight into an href
|
||||
"""
|
||||
course_dir = self.path_to(course_id, '')
|
||||
if not os.path.exists(course_dir):
|
||||
course_dir = self.path_to(course_id)
|
||||
try:
|
||||
_, filenames = self.storage.listdir(course_dir)
|
||||
except OSError:
|
||||
# Django's FileSystemStorage fails with an OSError if the course
|
||||
# dir does not exist; other storage types return an empty list.
|
||||
return []
|
||||
files = [(filename, os.path.join(course_dir, filename)) for filename in os.listdir(course_dir)]
|
||||
files.sort(key=lambda (filename, full_path): os.path.getmtime(full_path), reverse=True)
|
||||
|
||||
files = [(filename, os.path.join(course_dir, filename)) for filename in filenames]
|
||||
files.sort(key=lambda f: self.storage.modified_time(f[1]), reverse=True)
|
||||
return [
|
||||
(filename, ("file://" + urllib.quote(full_path)))
|
||||
(filename, self.storage.url(full_path))
|
||||
for filename, full_path in files
|
||||
]
|
||||
|
||||
def path_to(self, course_id, filename=''):
|
||||
"""
|
||||
Return the full path to a given file for a given course.
|
||||
"""
|
||||
hashed_course_id = hashlib.sha1(course_id.to_deprecated_string()).hexdigest()
|
||||
return os.path.join(hashed_course_id, filename)
|
||||
|
||||
@@ -649,10 +649,6 @@ def upload_exec_summary_to_store(data_dict, report_name, course_id, generated_at
|
||||
timestamp_str=generated_at.strftime("%Y-%m-%d-%H%M")
|
||||
),
|
||||
output_buffer,
|
||||
config={
|
||||
'content_type': 'text/html',
|
||||
'content_encoding': None,
|
||||
}
|
||||
)
|
||||
tracker.emit(REPORT_REQUESTED_EVENT_NAME, {"report_type": report_name})
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ from uuid import uuid4
|
||||
|
||||
from celery.states import SUCCESS, FAILURE
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.conf import settings
|
||||
from django.test.testcases import TestCase
|
||||
from django.contrib.auth.models import User
|
||||
from lms.djangoapps.lms_xblock.runtime import quote_slashes
|
||||
@@ -292,9 +291,14 @@ class TestReportMixin(object):
|
||||
Cleans up after tests that place files in the reports directory.
|
||||
"""
|
||||
def tearDown(self):
|
||||
reports_download_path = settings.GRADES_DOWNLOAD['ROOT_PATH']
|
||||
if os.path.exists(reports_download_path):
|
||||
shutil.rmtree(reports_download_path)
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
try:
|
||||
reports_download_path = report_store.storage.path('')
|
||||
except NotImplementedError:
|
||||
pass # storage backend does not use the local filesystem
|
||||
else:
|
||||
if os.path.exists(reports_download_path):
|
||||
shutil.rmtree(reports_download_path)
|
||||
|
||||
def verify_rows_in_csv(self, expected_rows, file_index=0, verify_order=True, ignore_other_columns=False):
|
||||
"""
|
||||
@@ -317,7 +321,8 @@ class TestReportMixin(object):
|
||||
"""
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
report_csv_filename = report_store.links_for(self.course.id)[file_index][0]
|
||||
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
|
||||
report_path = report_store.path_to(self.course.id, report_csv_filename)
|
||||
with report_store.storage.open(report_path) as csv_file:
|
||||
# Expand the dict reader generator so we don't lose it's content
|
||||
csv_rows = [row for row in unicodecsv.DictReader(csv_file)]
|
||||
|
||||
@@ -337,6 +342,7 @@ class TestReportMixin(object):
|
||||
"""
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
report_csv_filename = report_store.links_for(self.course.id)[0][0]
|
||||
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
|
||||
report_path = report_store.path_to(self.course.id, report_csv_filename)
|
||||
with report_store.storage.open(report_path) as csv_file:
|
||||
rows = unicodecsv.reader(csv_file, encoding='utf-8')
|
||||
return rows.next()
|
||||
|
||||
@@ -3,55 +3,32 @@ Tests for instructor_task/models.py.
|
||||
"""
|
||||
|
||||
from cStringIO import StringIO
|
||||
import mock
|
||||
import time
|
||||
from datetime import datetime
|
||||
from unittest import TestCase
|
||||
|
||||
from instructor_task.models import LocalFSReportStore, S3ReportStore
|
||||
import boto
|
||||
from django.conf import settings
|
||||
from django.test import SimpleTestCase, override_settings
|
||||
from mock import patch
|
||||
|
||||
from common.test.utils import MockS3Mixin
|
||||
from instructor_task.models import ReportStore
|
||||
from instructor_task.tests.test_base import TestReportMixin
|
||||
from opaque_keys.edx.locator import CourseLocator
|
||||
|
||||
|
||||
class MockKey(object):
|
||||
"""
|
||||
Mocking a boto S3 Key object.
|
||||
"""
|
||||
def __init__(self, bucket):
|
||||
self.last_modified = datetime.now()
|
||||
self.bucket = bucket
|
||||
LOCAL_SETTINGS = {
|
||||
'STORAGE_KWARGS': {
|
||||
'location': settings.GRADES_DOWNLOAD['ROOT_PATH'],
|
||||
},
|
||||
}
|
||||
|
||||
def set_contents_from_string(self, contents, headers): # pylint: disable=unused-argument
|
||||
""" Expected method on a Key object. """
|
||||
self.bucket.store_key(self)
|
||||
|
||||
def generate_url(self, expires_in): # pylint: disable=unused-argument
|
||||
""" Expected method on a Key object. """
|
||||
return "http://fake-edx-s3.edx.org/"
|
||||
|
||||
|
||||
class MockBucket(object):
|
||||
""" Mocking a boto S3 Bucket object. """
|
||||
def __init__(self, _name):
|
||||
self.keys = []
|
||||
|
||||
def store_key(self, key):
|
||||
""" Not a Bucket method, created just to store the keys in the Bucket for testing purposes. """
|
||||
self.keys.append(key)
|
||||
|
||||
def list(self, prefix): # pylint: disable=unused-argument
|
||||
""" Expected method on a Bucket object. """
|
||||
return self.keys
|
||||
|
||||
|
||||
class MockS3Connection(object):
|
||||
""" Mocking a boto S3 Connection """
|
||||
def __init__(self, access_key, secret_key):
|
||||
pass
|
||||
|
||||
def get_bucket(self, bucket_name):
|
||||
""" Expected method on an S3Connection object. """
|
||||
return MockBucket(bucket_name)
|
||||
S3_SETTINGS = {
|
||||
'STORAGE_CLASS': 'storages.backends.s3boto.S3BotoStorage',
|
||||
'STORAGE_KWARGS': {
|
||||
'bucket': settings.GRADES_DOWNLOAD['BUCKET'],
|
||||
'location': settings.GRADES_DOWNLOAD['ROOT_PATH'],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ReportStoreTestMixin(object):
|
||||
@@ -59,6 +36,7 @@ class ReportStoreTestMixin(object):
|
||||
Mixin for report store tests.
|
||||
"""
|
||||
def setUp(self):
|
||||
super(ReportStoreTestMixin, self).setUp()
|
||||
self.course_id = CourseLocator(org="testx", course="coursex", run="runx")
|
||||
|
||||
def create_report_store(self):
|
||||
@@ -73,6 +51,8 @@ class ReportStoreTestMixin(object):
|
||||
in reverse chronological order.
|
||||
"""
|
||||
report_store = self.create_report_store()
|
||||
self.assertEqual(report_store.links_for(self.course_id), [])
|
||||
|
||||
report_store.store(self.course_id, 'old_file', StringIO())
|
||||
time.sleep(1) # Ensure we have a unique timestamp.
|
||||
report_store.store(self.course_id, 'middle_file', StringIO())
|
||||
@@ -85,23 +65,57 @@ class ReportStoreTestMixin(object):
|
||||
)
|
||||
|
||||
|
||||
class LocalFSReportStoreTestCase(ReportStoreTestMixin, TestReportMixin, TestCase):
|
||||
class LocalFSReportStoreTestCase(ReportStoreTestMixin, TestReportMixin, SimpleTestCase):
|
||||
"""
|
||||
Test the LocalFSReportStore model.
|
||||
Test the old LocalFSReportStore configuration.
|
||||
"""
|
||||
def create_report_store(self):
|
||||
""" Create and return a LocalFSReportStore. """
|
||||
return LocalFSReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
"""
|
||||
Create and return a DjangoStorageReportStore using the old
|
||||
LocalFSReportStore configuration.
|
||||
"""
|
||||
return ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
|
||||
|
||||
@mock.patch('instructor_task.models.S3Connection', new=MockS3Connection)
|
||||
@mock.patch('instructor_task.models.Key', new=MockKey)
|
||||
@mock.patch('instructor_task.models.settings.AWS_SECRET_ACCESS_KEY', create=True, new="access_key")
|
||||
@mock.patch('instructor_task.models.settings.AWS_ACCESS_KEY_ID', create=True, new="access_id")
|
||||
class S3ReportStoreTestCase(ReportStoreTestMixin, TestReportMixin, TestCase):
|
||||
@patch.dict(settings.GRADES_DOWNLOAD, {'STORAGE_TYPE': 's3'})
|
||||
class S3ReportStoreTestCase(MockS3Mixin, ReportStoreTestMixin, TestReportMixin, SimpleTestCase):
|
||||
"""
|
||||
Test the S3ReportStore model.
|
||||
Test the old S3ReportStore configuration.
|
||||
"""
|
||||
def create_report_store(self):
|
||||
""" Create and return a S3ReportStore. """
|
||||
return S3ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
"""
|
||||
Create and return a DjangoStorageReportStore using the old
|
||||
S3ReportStore configuration.
|
||||
"""
|
||||
connection = boto.connect_s3()
|
||||
connection.create_bucket(settings.GRADES_DOWNLOAD['BUCKET'])
|
||||
return ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
|
||||
|
||||
@override_settings(GRADES_DOWNLOAD=LOCAL_SETTINGS)
|
||||
class DjangoStorageReportStoreLocalTestCase(ReportStoreTestMixin, TestReportMixin, SimpleTestCase):
|
||||
"""
|
||||
Test the DjangoStorageReportStore implementation using the local
|
||||
filesystem.
|
||||
"""
|
||||
def create_report_store(self):
|
||||
"""
|
||||
Create and return a DjangoStorageReportStore configured to use the
|
||||
local filesystem for storage.
|
||||
"""
|
||||
return ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
|
||||
|
||||
@override_settings(GRADES_DOWNLOAD=S3_SETTINGS)
|
||||
class DjangoStorageReportStoreS3TestCase(MockS3Mixin, ReportStoreTestMixin, TestReportMixin, SimpleTestCase):
|
||||
"""
|
||||
Test the DjangoStorageReportStore implementation using S3 stubs.
|
||||
"""
|
||||
def create_report_store(self):
|
||||
"""
|
||||
Create and return a DjangoStorageReportStore configured to use S3 for
|
||||
storage.
|
||||
"""
|
||||
connection = boto.connect_s3()
|
||||
connection.create_bucket(settings.GRADES_DOWNLOAD['STORAGE_KWARGS']['bucket'])
|
||||
return ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
|
||||
@@ -79,7 +79,8 @@ class InstructorGradeReportTestCase(TestReportMixin, InstructorTaskCourseTestCas
|
||||
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
report_csv_filename = report_store.links_for(course_id)[0][0]
|
||||
with open(report_store.path_to(course_id, report_csv_filename)) as csv_file:
|
||||
report_path = report_store.path_to(course_id, report_csv_filename)
|
||||
with report_store.storage.open(report_path) as csv_file:
|
||||
for row in unicodecsv.DictReader(csv_file):
|
||||
if row.get('username') == username:
|
||||
self.assertEqual(row[column_header], expected_cell_content)
|
||||
@@ -564,7 +565,8 @@ class TestInstructorDetailedEnrollmentReport(TestReportMixin, InstructorTaskCour
|
||||
"""
|
||||
report_store = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
|
||||
report_csv_filename = report_store.links_for(self.course.id)[0][0]
|
||||
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
|
||||
report_path = report_store.path_to(self.course.id, report_csv_filename)
|
||||
with report_store.storage.open(report_path) as csv_file:
|
||||
# Expand the dict reader generator so we don't lose it's content
|
||||
for row in unicodecsv.DictReader(csv_file):
|
||||
if row.get('Username') == username:
|
||||
@@ -994,7 +996,8 @@ class TestExecutiveSummaryReport(TestReportMixin, InstructorTaskCourseTestCase):
|
||||
Verify grade report data.
|
||||
"""
|
||||
report_html_filename = report_store.links_for(self.course.id)[0][0]
|
||||
with open(report_store.path_to(self.course.id, report_html_filename)) as html_file:
|
||||
report_path = report_store.path_to(self.course.id, report_html_filename)
|
||||
with report_store.storage.open(report_path) as html_file:
|
||||
html_file_data = html_file.read()
|
||||
for data in expected_data:
|
||||
self.assertTrue(data in html_file_data)
|
||||
@@ -1087,7 +1090,8 @@ class TestCourseSurveyReport(TestReportMixin, InstructorTaskCourseTestCase):
|
||||
Verify course survey data.
|
||||
"""
|
||||
report_csv_filename = report_store.links_for(self.course.id)[0][0]
|
||||
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
|
||||
report_path = report_store.path_to(self.course.id, report_csv_filename)
|
||||
with report_store.storage.open(report_path) as csv_file:
|
||||
csv_file_data = csv_file.read()
|
||||
for data in expected_data:
|
||||
self.assertIn(data, csv_file_data)
|
||||
@@ -1169,7 +1173,8 @@ class TestTeamStudentReport(TestReportMixin, InstructorTaskCourseTestCase):
|
||||
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
report_csv_filename = report_store.links_for(self.course.id)[0][0]
|
||||
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
|
||||
report_path = report_store.path_to(self.course.id, report_csv_filename)
|
||||
with report_store.storage.open(report_path) as csv_file:
|
||||
for row in unicodecsv.DictReader(csv_file):
|
||||
if row.get('username') == username:
|
||||
self.assertEqual(row['team'], expected_team)
|
||||
@@ -1539,7 +1544,8 @@ class TestGradeReportEnrollmentAndCertificateInfo(TestReportMixin, InstructorTas
|
||||
upload_grades_csv(None, None, self.course.id, None, 'graded')
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
report_csv_filename = report_store.links_for(self.course.id)[0][0]
|
||||
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
|
||||
report_path = report_store.path_to(self.course.id, report_csv_filename)
|
||||
with report_store.storage.open(report_path) as csv_file:
|
||||
for row in unicodecsv.DictReader(csv_file):
|
||||
if row.get('username') == username:
|
||||
csv_row_data = [row[column] for column in self.columns_to_check]
|
||||
@@ -2213,7 +2219,7 @@ class TestInstructorOra2Report(SharedModuleStoreTestCase):
|
||||
with patch('instructor_task.tasks_helper.OraAggregateData.collect_ora2_data') as mock_collect_data:
|
||||
mock_collect_data.return_value = (test_header, test_rows)
|
||||
|
||||
with patch('instructor_task.models.LocalFSReportStore.store_rows') as mock_store_rows:
|
||||
with patch('instructor_task.models.DjangoStorageReportStore.store_rows') as mock_store_rows:
|
||||
return_val = upload_ora2_data(None, None, self.course.id, None, 'generated')
|
||||
|
||||
# pylint: disable=maybe-no-member
|
||||
|
||||
@@ -3,31 +3,35 @@ Tests for django admin commands in the verify_student module
|
||||
|
||||
Lots of imports from verify_student's model tests, since they cover similar ground
|
||||
"""
|
||||
import boto
|
||||
from nose.tools import assert_equals
|
||||
from mock import patch
|
||||
|
||||
from django.test import TestCase
|
||||
from django.conf import settings
|
||||
|
||||
from common.test.utils import MockS3Mixin
|
||||
from student.tests.factories import UserFactory
|
||||
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification
|
||||
from django.core.management import call_command
|
||||
from lms.djangoapps.verify_student.tests.test_models import (
|
||||
MockKey, MockS3Connection, mock_software_secure_post,
|
||||
mock_software_secure_post_error, FAKE_SETTINGS,
|
||||
mock_software_secure_post, mock_software_secure_post_error, FAKE_SETTINGS,
|
||||
)
|
||||
|
||||
|
||||
# Lots of patching to stub in our own settings, S3 substitutes, and HTTP posting
|
||||
# Lots of patching to stub in our own settings, and HTTP posting
|
||||
@patch.dict(settings.VERIFY_STUDENT, FAKE_SETTINGS)
|
||||
@patch('lms.djangoapps.verify_student.models.S3Connection', new=MockS3Connection)
|
||||
@patch('lms.djangoapps.verify_student.models.Key', new=MockKey)
|
||||
@patch('lms.djangoapps.verify_student.models.requests.post', new=mock_software_secure_post)
|
||||
class TestVerifyStudentCommand(TestCase):
|
||||
class TestVerifyStudentCommand(MockS3Mixin, TestCase):
|
||||
"""
|
||||
Tests for django admin commands in the verify_student module
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(TestVerifyStudentCommand, self).setUp()
|
||||
connection = boto.connect_s3()
|
||||
connection.create_bucket(FAKE_SETTINGS['SOFTWARE_SECURE']['S3_BUCKET'])
|
||||
|
||||
def create_and_submit(self, username):
|
||||
"""
|
||||
Helper method that lets us create new SoftwareSecurePhotoVerifications
|
||||
|
||||
@@ -11,6 +11,7 @@ photo verification process as generic as possible.
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import os.path
|
||||
from datetime import datetime, timedelta
|
||||
from email.utils import formatdate
|
||||
|
||||
@@ -25,12 +26,13 @@ from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.core.cache import cache
|
||||
from django.core.files.base import ContentFile
|
||||
from django.dispatch import receiver
|
||||
from django.db import models, transaction
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.translation import ugettext as _, ugettext_lazy
|
||||
|
||||
from boto.s3.connection import S3Connection
|
||||
from boto.s3.key import Key
|
||||
from openedx.core.storage import get_storage
|
||||
from simple_history.models import HistoricalRecords
|
||||
from config_models.models import ConfigurationModel
|
||||
from course_modes.models import CourseMode
|
||||
@@ -616,9 +618,10 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
|
||||
@status_before_must_be("created")
|
||||
def upload_face_image(self, img_data):
|
||||
"""
|
||||
Upload an image of the user's face to S3. `img_data` should be a raw
|
||||
Upload an image of the user's face. `img_data` should be a raw
|
||||
bytestream of a PNG image. This method will take the data, encrypt it
|
||||
using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to S3.
|
||||
using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to the
|
||||
storage backend.
|
||||
|
||||
Yes, encoding it to base64 adds compute and disk usage without much real
|
||||
benefit, but that's what the other end of this API is expecting to get.
|
||||
@@ -633,17 +636,18 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
|
||||
aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
|
||||
aes_key = aes_key_str.decode("hex")
|
||||
|
||||
s3_key = self._generate_s3_key("face")
|
||||
s3_key.set_contents_from_string(encrypt_and_encode(img_data, aes_key))
|
||||
path = self._get_path("face")
|
||||
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
|
||||
self._storage.save(path, buff)
|
||||
|
||||
@status_before_must_be("created")
|
||||
def upload_photo_id_image(self, img_data):
|
||||
"""
|
||||
Upload an the user's photo ID image to S3. `img_data` should be a raw
|
||||
Upload an the user's photo ID image. `img_data` should be a raw
|
||||
bytestream of a PNG image. This method will take the data, encrypt it
|
||||
using a randomly generated AES key, encode it with base64 and save it to
|
||||
S3. The random key is also encrypted using Software Secure's public RSA
|
||||
key and stored in our `photo_id_key` field.
|
||||
using a randomly generated AES key, encode it with base64 and save it
|
||||
to the storage backend. The random key is also encrypted using Software
|
||||
Secure's public RSA key and stored in our `photo_id_key` field.
|
||||
|
||||
Yes, encoding it to base64 adds compute and disk usage without much real
|
||||
benefit, but that's what the other end of this API is expecting to get.
|
||||
@@ -662,9 +666,10 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
|
||||
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
|
||||
rsa_encrypted_aes_key = rsa_encrypt(aes_key, rsa_key_str)
|
||||
|
||||
# Upload this to S3
|
||||
s3_key = self._generate_s3_key("photo_id")
|
||||
s3_key.set_contents_from_string(encrypt_and_encode(img_data, aes_key))
|
||||
# Save this to the storage backend
|
||||
path = self._get_path("photo_id")
|
||||
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
|
||||
self._storage.save(path, buff)
|
||||
|
||||
# Update our record fields
|
||||
self.photo_id_key = rsa_encrypted_aes_key.encode('base64')
|
||||
@@ -752,31 +757,42 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
|
||||
string: The expiring URL for the image.
|
||||
|
||||
"""
|
||||
s3_key = self._generate_s3_key(name, override_receipt_id=override_receipt_id)
|
||||
return s3_key.generate_url(self.IMAGE_LINK_DURATION)
|
||||
path = self._get_path(name, override_receipt_id=override_receipt_id)
|
||||
return self._storage.url(path)
|
||||
|
||||
def _generate_s3_key(self, prefix, override_receipt_id=None):
|
||||
@cached_property
|
||||
def _storage(self):
|
||||
"""
|
||||
Generates a key for an s3 bucket location
|
||||
|
||||
Example: face/4dd1add9-6719-42f7-bea0-115c008c4fca
|
||||
Return the configured django storage backend.
|
||||
"""
|
||||
conn = S3Connection(
|
||||
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["AWS_ACCESS_KEY"],
|
||||
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["AWS_SECRET_KEY"]
|
||||
)
|
||||
bucket = conn.get_bucket(settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["S3_BUCKET"])
|
||||
config = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]
|
||||
|
||||
# Override the receipt ID if one is provided.
|
||||
# This allow us to construct S3 keys to images submitted in previous attempts
|
||||
# (used for reverification, where we send a new face photo with the same photo ID
|
||||
# from a previous attempt).
|
||||
# Default to the S3 backend for backward compatibility
|
||||
storage_class = config.get("STORAGE_CLASS", "storages.backends.s3boto.S3BotoStorage")
|
||||
storage_kwargs = config.get("STORAGE_KWARGS", {})
|
||||
|
||||
# Map old settings to the parameters expected by the storage backend
|
||||
if "AWS_ACCESS_KEY" in config:
|
||||
storage_kwargs["access_key"] = config["AWS_ACCESS_KEY"]
|
||||
if "AWS_SECRET_KEY" in config:
|
||||
storage_kwargs["secret_key"] = config["AWS_SECRET_KEY"]
|
||||
if "S3_BUCKET" in config:
|
||||
storage_kwargs["bucket"] = config["S3_BUCKET"]
|
||||
storage_kwargs["querystring_expire"] = self.IMAGE_LINK_DURATION
|
||||
|
||||
return get_storage(storage_class, **storage_kwargs)
|
||||
|
||||
def _get_path(self, prefix, override_receipt_id=None):
|
||||
"""
|
||||
Returns the path to a resource with this instance's `receipt_id`.
|
||||
|
||||
If `override_receipt_id` is given, the path to that resource will be
|
||||
retrieved instead. This allows us to retrieve images submitted in
|
||||
previous attempts (used for reverification, where we send a new face
|
||||
photo with the same photo ID from a previous attempt).
|
||||
"""
|
||||
receipt_id = self.receipt_id if override_receipt_id is None else override_receipt_id
|
||||
|
||||
key = Key(bucket)
|
||||
key.key = "{}/{}".format(prefix, receipt_id)
|
||||
|
||||
return key
|
||||
return os.path.join(prefix, receipt_id)
|
||||
|
||||
def _encrypted_user_photo_key_str(self):
|
||||
"""
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from datetime import timedelta, datetime
|
||||
import json
|
||||
|
||||
import boto
|
||||
import ddt
|
||||
from django.conf import settings
|
||||
from django.db import IntegrityError
|
||||
@@ -13,6 +14,7 @@ from nose.tools import assert_is_none, assert_equals, assert_raises, assert_true
|
||||
import pytz
|
||||
import requests.exceptions
|
||||
|
||||
from common.test.utils import MockS3Mixin
|
||||
from student.tests.factories import UserFactory
|
||||
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
|
||||
from xmodule.modulestore.tests.factories import CourseFactory
|
||||
@@ -50,41 +52,6 @@ iwIDAQAB
|
||||
}
|
||||
|
||||
|
||||
class MockKey(object):
|
||||
"""
|
||||
Mocking a boto S3 Key object. It's a really dumb mock because once we
|
||||
write data to S3, we never read it again. We simply generate a link to it
|
||||
and pass that to Software Secure. Because of that, we don't even implement
|
||||
the ability to pull back previously written content in this mock.
|
||||
|
||||
Testing that the encryption/decryption roundtrip on the data works is in
|
||||
test_ssencrypt.py
|
||||
"""
|
||||
def __init__(self, bucket):
|
||||
self.bucket = bucket
|
||||
|
||||
def set_contents_from_string(self, contents):
|
||||
self.contents = contents
|
||||
|
||||
def generate_url(self, duration):
|
||||
return "http://fake-edx-s3.edx.org/"
|
||||
|
||||
|
||||
class MockBucket(object):
|
||||
"""Mocking a boto S3 Bucket object."""
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
|
||||
class MockS3Connection(object):
|
||||
"""Mocking a boto S3 Connection"""
|
||||
def __init__(self, access_key, secret_key):
|
||||
pass
|
||||
|
||||
def get_bucket(self, bucket_name):
|
||||
return MockBucket(bucket_name)
|
||||
|
||||
|
||||
def mock_software_secure_post(url, headers=None, data=None, **kwargs):
|
||||
"""
|
||||
Mocks our interface when we post to Software Secure. Does basic assertions
|
||||
@@ -129,13 +96,16 @@ def mock_software_secure_post_unavailable(url, headers=None, data=None, **kwargs
|
||||
raise requests.exceptions.ConnectionError
|
||||
|
||||
|
||||
# Lots of patching to stub in our own settings, S3 substitutes, and HTTP posting
|
||||
# Lots of patching to stub in our own settings, and HTTP posting
|
||||
@patch.dict(settings.VERIFY_STUDENT, FAKE_SETTINGS)
|
||||
@patch('lms.djangoapps.verify_student.models.S3Connection', new=MockS3Connection)
|
||||
@patch('lms.djangoapps.verify_student.models.Key', new=MockKey)
|
||||
@patch('lms.djangoapps.verify_student.models.requests.post', new=mock_software_secure_post)
|
||||
@ddt.ddt
|
||||
class TestPhotoVerification(ModuleStoreTestCase):
|
||||
class TestPhotoVerification(MockS3Mixin, ModuleStoreTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestPhotoVerification, self).setUp()
|
||||
connection = boto.connect_s3()
|
||||
connection.create_bucket(FAKE_SETTINGS['SOFTWARE_SECURE']['S3_BUCKET'])
|
||||
|
||||
def test_state_transitions(self):
|
||||
"""
|
||||
|
||||
@@ -18,6 +18,7 @@ Common traits:
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import warnings
|
||||
|
||||
import dateutil
|
||||
|
||||
@@ -191,6 +192,14 @@ ENV_FEATURES = ENV_TOKENS.get('FEATURES', {})
|
||||
for feature, value in ENV_FEATURES.items():
|
||||
FEATURES[feature] = value
|
||||
|
||||
# Backward compatibility for deprecated feature names
|
||||
if 'ENABLE_S3_GRADE_DOWNLOADS' in FEATURES:
|
||||
warnings.warn(
|
||||
"'ENABLE_S3_GRADE_DOWNLOADS' is deprecated. Please use 'ENABLE_GRADE_DOWNLOADS' instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
FEATURES['ENABLE_GRADE_DOWNLOADS'] = FEATURES['ENABLE_S3_GRADE_DOWNLOADS']
|
||||
|
||||
CMS_BASE = ENV_TOKENS.get('CMS_BASE', 'studio.edx.org')
|
||||
|
||||
ALLOWED_HOSTS = [
|
||||
@@ -492,6 +501,20 @@ FILE_UPLOAD_STORAGE_PREFIX = ENV_TOKENS.get('FILE_UPLOAD_STORAGE_PREFIX', FILE_U
|
||||
# function in util/query.py, which is useful for very large database reads
|
||||
DATABASES = AUTH_TOKENS['DATABASES']
|
||||
|
||||
# The normal database user does not have enough permissions to run migrations.
|
||||
# Migrations are run with separate credentials, given as DB_MIGRATION_*
|
||||
# environment variables
|
||||
for name, database in DATABASES.items():
|
||||
if name != 'read_replica':
|
||||
database.update({
|
||||
'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', database['ENGINE']),
|
||||
'USER': os.environ.get('DB_MIGRATION_USER', database['USER']),
|
||||
'PASSWORD': os.environ.get('DB_MIGRATION_PASS', database['PASSWORD']),
|
||||
'NAME': os.environ.get('DB_MIGRATION_NAME', database['NAME']),
|
||||
'HOST': os.environ.get('DB_MIGRATION_HOST', database['HOST']),
|
||||
'PORT': os.environ.get('DB_MIGRATION_PORT', database['PORT']),
|
||||
})
|
||||
|
||||
XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE']
|
||||
|
||||
# Get the MODULESTORE from auth.json, but if it doesn't exist,
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
"""
|
||||
A Django settings file for use on AWS while running
|
||||
database migrations, since we don't want to normally run the
|
||||
LMS with enough privileges to modify the database schema.
|
||||
"""
|
||||
|
||||
# We intentionally define lots of variables that aren't used, and
|
||||
# want to import all variables from base settings files
|
||||
# pylint: disable=wildcard-import, unused-wildcard-import
|
||||
|
||||
# Import everything from .aws so that our settings are based on those.
|
||||
from .aws import *
|
||||
import os
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
|
||||
def get_db_overrides(db_name):
|
||||
"""
|
||||
Now that we have multiple databases, we want to look up from the environment
|
||||
for both databases.
|
||||
"""
|
||||
db_overrides = dict(
|
||||
PASSWORD=os.environ.get('DB_MIGRATION_PASS', None),
|
||||
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES[db_name]['ENGINE']),
|
||||
USER=os.environ.get('DB_MIGRATION_USER', DATABASES[db_name]['USER']),
|
||||
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES[db_name]['NAME']),
|
||||
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES[db_name]['HOST']),
|
||||
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES[db_name]['PORT']),
|
||||
)
|
||||
|
||||
if db_overrides['PASSWORD'] is None:
|
||||
raise ImproperlyConfigured("No database password was provided for running "
|
||||
"migrations. This is fatal.")
|
||||
return db_overrides
|
||||
|
||||
for db in DATABASES:
|
||||
# You never migrate a read_replica
|
||||
if db != 'read_replica':
|
||||
DATABASES[db].update(get_db_overrides(db))
|
||||
@@ -77,7 +77,7 @@
|
||||
"ENABLE_PAYMENT_FAKE": true,
|
||||
"ENABLE_VERIFIED_CERTIFICATES": true,
|
||||
"ENABLE_DISCUSSION_SERVICE": true,
|
||||
"ENABLE_S3_GRADE_DOWNLOADS": true,
|
||||
"ENABLE_GRADE_DOWNLOADS": true,
|
||||
"ENABLE_THIRD_PARTY_AUTH": true,
|
||||
"ENABLE_COMBINED_LOGIN_REGISTRATION": true,
|
||||
"PREVIEW_LMS_BASE": "preview.localhost:8003",
|
||||
|
||||
@@ -197,9 +197,9 @@ FEATURES = {
|
||||
# when enrollment exceeds this number
|
||||
'MAX_ENROLLMENT_INSTR_BUTTONS': 200,
|
||||
|
||||
# Grade calculation started from the new instructor dashboard will write
|
||||
# grades CSV files to S3 and give links for downloads.
|
||||
'ENABLE_S3_GRADE_DOWNLOADS': False,
|
||||
# Grade calculation started from the instructor dashboard will write grades
|
||||
# CSV files to the configured storage backend and give links for downloads.
|
||||
'ENABLE_GRADE_DOWNLOADS': False,
|
||||
|
||||
# whether to use password policy enforcement or not
|
||||
'ENFORCE_PASSWORD_POLICY': True,
|
||||
|
||||
@@ -24,7 +24,7 @@ FEATURES['ENABLE_MANUAL_GIT_RELOAD'] = True
|
||||
FEATURES['ENABLE_SERVICE_STATUS'] = True
|
||||
FEATURES['ENABLE_SHOPPING_CART'] = True
|
||||
FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True
|
||||
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
|
||||
FEATURES['ENABLE_GRADE_DOWNLOADS'] = True
|
||||
FEATURES['ENABLE_PAYMENT_FAKE'] = True
|
||||
|
||||
|
||||
|
||||
27
lms/envs/openstack.py
Normal file
27
lms/envs/openstack.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""
|
||||
Settings for OpenStack deployments.
|
||||
"""
|
||||
|
||||
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
|
||||
SWIFT_AUTH_URL = AUTH_TOKENS.get('SWIFT_AUTH_URL')
|
||||
SWIFT_AUTH_VERSION = AUTH_TOKENS.get('SWIFT_AUTH_VERSION', 1)
|
||||
SWIFT_USERNAME = AUTH_TOKENS.get('SWIFT_USERNAME')
|
||||
SWIFT_KEY = AUTH_TOKENS.get('SWIFT_KEY')
|
||||
SWIFT_TENANT_NAME = AUTH_TOKENS.get('SWIFT_TENANT_NAME')
|
||||
SWIFT_TENANT_ID = AUTH_TOKENS.get('SWIFT_TENANT_ID')
|
||||
SWIFT_CONTAINER_NAME = FILE_UPLOAD_STORAGE_BUCKET_NAME
|
||||
SWIFT_NAME_PREFIX = FILE_UPLOAD_STORAGE_PREFIX
|
||||
SWIFT_USE_TEMP_URLS = AUTH_TOKENS.get('SWIFT_USE_TEMP_URLS', False)
|
||||
SWIFT_TEMP_URL_KEY = AUTH_TOKENS.get('SWIFT_TEMP_URL_KEY')
|
||||
SWIFT_TEMP_URL_DURATION = AUTH_TOKENS.get('SWIFT_TEMP_URL_DURATION', 1800) # seconds
|
||||
|
||||
if AUTH_TOKENS.get('SWIFT_REGION_NAME'):
|
||||
SWIFT_EXTRA_OPTIONS = {'region_name': AUTH_TOKENS['SWIFT_REGION_NAME']}
|
||||
|
||||
if AUTH_TOKENS.get('DEFAULT_FILE_STORAGE'):
|
||||
DEFAULT_FILE_STORAGE = AUTH_TOKENS.get('DEFAULT_FILE_STORAGE')
|
||||
elif SWIFT_AUTH_URL and SWIFT_USERNAME and SWIFT_KEY:
|
||||
DEFAULT_FILE_STORAGE = 'swift.storage.SwiftStorage'
|
||||
else:
|
||||
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
|
||||
@@ -67,7 +67,7 @@ FEATURES['ENABLE_SHOPPING_CART'] = True
|
||||
FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True
|
||||
|
||||
# Enable this feature for course staff grade downloads, to enable acceptance tests
|
||||
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
|
||||
FEATURES['ENABLE_GRADE_DOWNLOADS'] = True
|
||||
FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True
|
||||
|
||||
GRADES_DOWNLOAD['ROOT_PATH'] += "-{}".format(os.getpid())
|
||||
|
||||
@@ -19,7 +19,7 @@ from openedx.core.djangolib.markup import HTML, Text
|
||||
<p><input type="button" name="list-anon-ids" value="${_("Get Student Anonymized IDs CSV")}" data-csv="true" class="csv" data-endpoint="${ section_data['get_anon_ids_url'] }" class="${'is-disabled' if disable_buttons else ''}" aria-disabled="${'true' if disable_buttons else 'false'}" ></p>
|
||||
</div>
|
||||
|
||||
%if settings.FEATURES.get('ENABLE_S3_GRADE_DOWNLOADS'):
|
||||
%if settings.FEATURES.get('ENABLE_GRADE_DOWNLOADS'):
|
||||
<div class="reports-download-container action-type-container">
|
||||
<hr>
|
||||
<h3 class="hd hd-3">${_("Reports")}</h3>
|
||||
|
||||
@@ -3,6 +3,9 @@ Django storage backends for Open edX.
|
||||
"""
|
||||
from django_pipeline_forgiving.storages import PipelineForgivingStorage
|
||||
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||
from django.core.files.storage import get_storage_class
|
||||
from django.utils.lru_cache import lru_cache
|
||||
|
||||
from pipeline.storage import NonPackagingMixin
|
||||
from require.storage import OptimizedFilesMixin
|
||||
from openedx.core.djangoapps.theming.storage import (
|
||||
@@ -39,3 +42,16 @@ class DevelopmentStorage(
|
||||
so that we can skip packaging and optimization.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def get_storage(storage_class=None, **kwargs):
|
||||
"""
|
||||
Returns a storage instance with the given class name and kwargs. If the
|
||||
class name is not given, an instance of the default storage is returned.
|
||||
Instances are cached so that if this function is called multiple times
|
||||
with the same arguments, the same instance is returned. This is useful if
|
||||
the storage implementation makes http requests when instantiated, for
|
||||
example.
|
||||
"""
|
||||
return get_storage_class(storage_class)(**kwargs)
|
||||
|
||||
@@ -89,14 +89,14 @@ python-social-auth==0.2.12
|
||||
pytz==2015.2
|
||||
pysrt==0.4.7
|
||||
PyYAML==3.10
|
||||
requests==2.7.0
|
||||
requests==2.9.1
|
||||
requests-oauthlib==0.4.1
|
||||
scipy==0.14.0
|
||||
Shapely==1.2.16
|
||||
singledispatch==3.4.0.2
|
||||
sorl-thumbnail==12.3
|
||||
sortedcontainers==0.9.2
|
||||
stevedore==0.14.1
|
||||
stevedore==1.10.0
|
||||
sure==1.2.3
|
||||
sympy==0.7.1
|
||||
xmltodict==0.4.1
|
||||
|
||||
6
requirements/edx/openstack.txt
Normal file
6
requirements/edx/openstack.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
#
|
||||
# Dependencies for OpenStack deployments.
|
||||
#
|
||||
|
||||
# OpenStack swift backend for django storage API
|
||||
django-storage-swift==1.2.9
|
||||
Reference in New Issue
Block a user