merge out from master
This commit is contained in:
@@ -211,7 +211,11 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
|
||||
new_loc = descriptor.location._replace(org='MITx', course='999')
|
||||
print "Checking {0} should now also be at {1}".format(descriptor.location.url(), new_loc.url())
|
||||
resp = self.client.get(reverse('edit_unit', kwargs={'location': new_loc.url()}))
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
def test_bad_contentstore_request(self):
|
||||
resp = self.client.get('http://localhost:8001/c4x/CDX/123123/asset/&images_circuits_Lab7Solution2.png')
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
def test_delete_course(self):
|
||||
import_from_xml(modulestore(), 'common/test/data/', ['full'])
|
||||
@@ -328,11 +332,11 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
|
||||
self.assertEqual(wrapper.counter, 4)
|
||||
|
||||
# make sure we pre-fetched a known sequential which should be at depth=2
|
||||
self.assertTrue(Location(['i4x', 'edX', 'full', 'sequential',
|
||||
self.assertTrue(Location(['i4x', 'edX', 'full', 'sequential',
|
||||
'Administrivia_and_Circuit_Elements', None]) in course.system.module_data)
|
||||
|
||||
# make sure we don't have a specific vertical which should be at depth=3
|
||||
self.assertFalse(Location(['i4x', 'edX', 'full', 'vertical', 'vertical_58',
|
||||
self.assertFalse(Location(['i4x', 'edX', 'full', 'vertical', 'vertical_58',
|
||||
None]) in course.system.module_data)
|
||||
|
||||
def test_export_course_with_unknown_metadata(self):
|
||||
@@ -556,7 +560,7 @@ class ContentStoreTest(ModuleStoreTestCase):
|
||||
module_store.update_children(parent.location, parent.children + [new_component_location.url()])
|
||||
|
||||
# flush the cache
|
||||
module_store.get_cached_metadata_inheritance_tree(new_component_location, -1)
|
||||
module_store.refresh_cached_metadata_inheritance_tree(new_component_location)
|
||||
new_module = module_store.get_item(new_component_location)
|
||||
|
||||
# check for grace period definition which should be defined at the course level
|
||||
@@ -571,7 +575,7 @@ class ContentStoreTest(ModuleStoreTestCase):
|
||||
module_store.update_metadata(new_module.location, own_metadata(new_module))
|
||||
|
||||
# flush the cache and refetch
|
||||
module_store.get_cached_metadata_inheritance_tree(new_component_location, -1)
|
||||
module_store.refresh_cached_metadata_inheritance_tree(new_component_location)
|
||||
new_module = module_store.get_item(new_component_location)
|
||||
|
||||
self.assertEqual(timedelta(1), new_module.lms.graceperiod)
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import datetime
|
||||
import json
|
||||
import copy
|
||||
from util import converters
|
||||
from util.converters import jsdate_to_time
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.test.client import Client
|
||||
@@ -15,69 +13,13 @@ from models.settings.course_details import (CourseDetails,
|
||||
from models.settings.course_grading import CourseGradingModel
|
||||
from contentstore.utils import get_modulestore
|
||||
|
||||
from django.test import TestCase
|
||||
from .utils import ModuleStoreTestCase
|
||||
from xmodule.modulestore.tests.factories import CourseFactory
|
||||
|
||||
from models.settings.course_metadata import CourseMetadata
|
||||
from xmodule.modulestore.xml_importer import import_from_xml
|
||||
from xmodule.modulestore.django import modulestore
|
||||
import time
|
||||
|
||||
|
||||
# YYYY-MM-DDThh:mm:ss.s+/-HH:MM
|
||||
class ConvertersTestCase(TestCase):
|
||||
@staticmethod
|
||||
def struct_to_datetime(struct_time):
|
||||
return datetime.datetime(struct_time.tm_year, struct_time.tm_mon,
|
||||
struct_time.tm_mday, struct_time.tm_hour,
|
||||
struct_time.tm_min, struct_time.tm_sec, tzinfo=UTC())
|
||||
|
||||
def compare_dates(self, date1, date2, expected_delta):
|
||||
dt1 = ConvertersTestCase.struct_to_datetime(date1)
|
||||
dt2 = ConvertersTestCase.struct_to_datetime(date2)
|
||||
self.assertEqual(dt1 - dt2, expected_delta, str(date1) + "-"
|
||||
+ str(date2) + "!=" + str(expected_delta))
|
||||
|
||||
def test_iso_to_struct(self):
|
||||
'''Test conversion from iso compatible date strings to struct_time'''
|
||||
self.compare_dates(converters.jsdate_to_time("2013-01-01"),
|
||||
converters.jsdate_to_time("2012-12-31"),
|
||||
datetime.timedelta(days=1))
|
||||
self.compare_dates(converters.jsdate_to_time("2013-01-01T00"),
|
||||
converters.jsdate_to_time("2012-12-31T23"),
|
||||
datetime.timedelta(hours=1))
|
||||
self.compare_dates(converters.jsdate_to_time("2013-01-01T00:00"),
|
||||
converters.jsdate_to_time("2012-12-31T23:59"),
|
||||
datetime.timedelta(minutes=1))
|
||||
self.compare_dates(converters.jsdate_to_time("2013-01-01T00:00:00"),
|
||||
converters.jsdate_to_time("2012-12-31T23:59:59"),
|
||||
datetime.timedelta(seconds=1))
|
||||
self.compare_dates(converters.jsdate_to_time("2013-01-01T00:00:00Z"),
|
||||
converters.jsdate_to_time("2012-12-31T23:59:59Z"),
|
||||
datetime.timedelta(seconds=1))
|
||||
self.compare_dates(
|
||||
converters.jsdate_to_time("2012-12-31T23:00:01-01:00"),
|
||||
converters.jsdate_to_time("2013-01-01T00:00:00+01:00"),
|
||||
datetime.timedelta(hours=1, seconds=1))
|
||||
|
||||
def test_struct_to_iso(self):
|
||||
'''
|
||||
Test converting time reprs to iso dates
|
||||
'''
|
||||
self.assertEqual(
|
||||
converters.time_to_isodate(
|
||||
time.strptime("2012-12-31T23:59:59Z", "%Y-%m-%dT%H:%M:%SZ")),
|
||||
"2012-12-31T23:59:59Z")
|
||||
self.assertEqual(
|
||||
converters.time_to_isodate(
|
||||
jsdate_to_time("2012-12-31T23:59:59Z")),
|
||||
"2012-12-31T23:59:59Z")
|
||||
self.assertEqual(
|
||||
converters.time_to_isodate(
|
||||
jsdate_to_time("2012-12-31T23:00:01-01:00")),
|
||||
"2013-01-01T00:00:01Z")
|
||||
|
||||
from xmodule.fields import Date
|
||||
|
||||
class CourseTestCase(ModuleStoreTestCase):
|
||||
def setUp(self):
|
||||
@@ -206,17 +148,24 @@ class CourseDetailsViewTest(CourseTestCase):
|
||||
self.assertEqual(details['intro_video'], encoded.get('intro_video', None), context + " intro_video not ==")
|
||||
self.assertEqual(details['effort'], encoded['effort'], context + " efforts not ==")
|
||||
|
||||
@staticmethod
|
||||
def struct_to_datetime(struct_time):
|
||||
return datetime.datetime(struct_time.tm_year, struct_time.tm_mon,
|
||||
struct_time.tm_mday, struct_time.tm_hour,
|
||||
struct_time.tm_min, struct_time.tm_sec, tzinfo=UTC())
|
||||
|
||||
def compare_date_fields(self, details, encoded, context, field):
|
||||
if details[field] is not None:
|
||||
date = Date()
|
||||
if field in encoded and encoded[field] is not None:
|
||||
encoded_encoded = jsdate_to_time(encoded[field])
|
||||
dt1 = ConvertersTestCase.struct_to_datetime(encoded_encoded)
|
||||
encoded_encoded = date.from_json(encoded[field])
|
||||
dt1 = CourseDetailsViewTest.struct_to_datetime(encoded_encoded)
|
||||
|
||||
if isinstance(details[field], datetime.datetime):
|
||||
dt2 = details[field]
|
||||
else:
|
||||
details_encoded = jsdate_to_time(details[field])
|
||||
dt2 = ConvertersTestCase.struct_to_datetime(details_encoded)
|
||||
details_encoded = date.from_json(details[field])
|
||||
dt2 = CourseDetailsViewTest.struct_to_datetime(details_encoded)
|
||||
|
||||
expected_delta = datetime.timedelta(0)
|
||||
self.assertEqual(dt1 - dt2, expected_delta, str(dt1) + "!=" + str(dt2) + " at " + context)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from xmodule.modulestore.django import modulestore
|
||||
from xmodule.modulestore import Location
|
||||
from xmodule.modulestore.exceptions import ItemNotFoundError
|
||||
from xmodule.modulestore.inheritance import own_metadata
|
||||
@@ -6,9 +5,9 @@ import json
|
||||
from json.encoder import JSONEncoder
|
||||
import time
|
||||
from contentstore.utils import get_modulestore
|
||||
from util.converters import jsdate_to_time, time_to_date
|
||||
from models.settings import course_grading
|
||||
from contentstore.utils import update_item
|
||||
from xmodule.fields import Date
|
||||
import re
|
||||
import logging
|
||||
|
||||
@@ -81,8 +80,14 @@ class CourseDetails(object):
|
||||
|
||||
dirty = False
|
||||
|
||||
# In the descriptor's setter, the date is converted to JSON using Date's to_json method.
|
||||
# Calling to_json on something that is already JSON doesn't work. Since reaching directly
|
||||
# into the model is nasty, convert the JSON Date to a Python date, which is what the
|
||||
# setter expects as input.
|
||||
date = Date()
|
||||
|
||||
if 'start_date' in jsondict:
|
||||
converted = jsdate_to_time(jsondict['start_date'])
|
||||
converted = date.from_json(jsondict['start_date'])
|
||||
else:
|
||||
converted = None
|
||||
if converted != descriptor.start:
|
||||
@@ -90,7 +95,7 @@ class CourseDetails(object):
|
||||
descriptor.start = converted
|
||||
|
||||
if 'end_date' in jsondict:
|
||||
converted = jsdate_to_time(jsondict['end_date'])
|
||||
converted = date.from_json(jsondict['end_date'])
|
||||
else:
|
||||
converted = None
|
||||
|
||||
@@ -99,7 +104,7 @@ class CourseDetails(object):
|
||||
descriptor.end = converted
|
||||
|
||||
if 'enrollment_start' in jsondict:
|
||||
converted = jsdate_to_time(jsondict['enrollment_start'])
|
||||
converted = date.from_json(jsondict['enrollment_start'])
|
||||
else:
|
||||
converted = None
|
||||
|
||||
@@ -108,7 +113,7 @@ class CourseDetails(object):
|
||||
descriptor.enrollment_start = converted
|
||||
|
||||
if 'enrollment_end' in jsondict:
|
||||
converted = jsdate_to_time(jsondict['enrollment_end'])
|
||||
converted = date.from_json(jsondict['enrollment_end'])
|
||||
else:
|
||||
converted = None
|
||||
|
||||
@@ -178,6 +183,6 @@ class CourseSettingsEncoder(json.JSONEncoder):
|
||||
elif isinstance(obj, Location):
|
||||
return obj.dict()
|
||||
elif isinstance(obj, time.struct_time):
|
||||
return time_to_date(obj)
|
||||
return Date().to_json(obj)
|
||||
else:
|
||||
return JSONEncoder.default(self, obj)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
from xmodule.modulestore import Location
|
||||
from contentstore.utils import get_modulestore
|
||||
import re
|
||||
from util import converters
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
|
||||
@@ -81,7 +81,7 @@ $(document).ready(function () {
|
||||
});
|
||||
|
||||
// general link management - smooth scrolling page links
|
||||
$('a[rel*="view"]').bind('click', linkSmoothScroll);
|
||||
$('a[rel*="view"][href^="#"]').bind('click', smoothScrollLink);
|
||||
|
||||
|
||||
// toggling overview section details
|
||||
@@ -148,7 +148,7 @@ $(document).ready(function () {
|
||||
});
|
||||
});
|
||||
|
||||
function linkSmoothScroll(e) {
|
||||
function smoothScrollLink(e) {
|
||||
(e).preventDefault();
|
||||
|
||||
$.smoothScroll({
|
||||
|
||||
@@ -151,7 +151,7 @@
|
||||
<figcaption class="description">Simple two-level outline to organize your couse. Drag and drop, and see your course at a glance.</figcaption>
|
||||
</figure>
|
||||
|
||||
<a href="#" rel="view" class="action action-modal-close">
|
||||
<a href="" rel="view" class="action action-modal-close">
|
||||
<i class="ss-icon ss-symbolicons-block icon icon-close">␡</i>
|
||||
<span class="label">close modal</span>
|
||||
</a>
|
||||
@@ -164,7 +164,7 @@
|
||||
<figcaption class="description">Quickly create videos, text snippets, inline discussions, and a variety of problem types.</figcaption>
|
||||
</figure>
|
||||
|
||||
<a href="#" rel="view" class="action action-modal-close">
|
||||
<a href="" rel="view" class="action action-modal-close">
|
||||
<i class="ss-icon ss-symbolicons-block icon icon-close">␡</i>
|
||||
<span class="label">close modal</span>
|
||||
</a>
|
||||
@@ -177,7 +177,7 @@
|
||||
<figcaption class="description">Simply set the date of a section or subsection, and Studio will publish it to your students for you.</figcaption>
|
||||
</figure>
|
||||
|
||||
<a href="#" rel="view" class="action action-modal-close">
|
||||
<a href="" rel="view" class="action action-modal-close">
|
||||
<i class="ss-icon ss-symbolicons-block icon icon-close">␡</i>
|
||||
<span class="label">close modal</span>
|
||||
</a>
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.http import HttpResponse, Http404, HttpResponseNotModified
|
||||
|
||||
from xmodule.contentstore.django import contentstore
|
||||
from xmodule.contentstore.content import StaticContent, XASSET_LOCATION_TAG
|
||||
from xmodule.modulestore import InvalidLocationError
|
||||
from cache_toolbox.core import get_cached_content, set_cached_content
|
||||
from xmodule.exceptions import NotFoundError
|
||||
|
||||
@@ -13,7 +14,14 @@ class StaticContentServer(object):
|
||||
def process_request(self, request):
|
||||
# look to see if the request is prefixed with 'c4x' tag
|
||||
if request.path.startswith('/' + XASSET_LOCATION_TAG + '/'):
|
||||
loc = StaticContent.get_location_from_path(request.path)
|
||||
try:
|
||||
loc = StaticContent.get_location_from_path(request.path)
|
||||
except InvalidLocationError:
|
||||
# return a 'Bad Request' to browser as we have a malformed Location
|
||||
response = HttpResponse()
|
||||
response.status_code = 400
|
||||
return response
|
||||
|
||||
# first look in our cache so we don't have to round-trip to the DB
|
||||
content = get_cached_content(loc)
|
||||
if content is None:
|
||||
|
||||
@@ -325,7 +325,12 @@ def change_enrollment(request):
|
||||
"course:{0}".format(course_num),
|
||||
"run:{0}".format(run)])
|
||||
|
||||
enrollment, created = CourseEnrollment.objects.get_or_create(user=user, course_id=course.id)
|
||||
try:
|
||||
enrollment, created = CourseEnrollment.objects.get_or_create(user=user, course_id=course.id)
|
||||
except IntegrityError:
|
||||
# If we've already created this enrollment in a separate transaction,
|
||||
# then just continue
|
||||
pass
|
||||
return {'success': True}
|
||||
|
||||
elif action == "unenroll":
|
||||
@@ -369,14 +374,14 @@ def login_user(request, error=""):
|
||||
try:
|
||||
user = User.objects.get(email=email)
|
||||
except User.DoesNotExist:
|
||||
log.warning("Login failed - Unknown user email: {0}".format(email))
|
||||
log.warning(u"Login failed - Unknown user email: {0}".format(email))
|
||||
return HttpResponse(json.dumps({'success': False,
|
||||
'value': 'Email or password is incorrect.'})) # TODO: User error message
|
||||
|
||||
username = user.username
|
||||
user = authenticate(username=username, password=password)
|
||||
if user is None:
|
||||
log.warning("Login failed - password for {0} is invalid".format(email))
|
||||
log.warning(u"Login failed - password for {0} is invalid".format(email))
|
||||
return HttpResponse(json.dumps({'success': False,
|
||||
'value': 'Email or password is incorrect.'}))
|
||||
|
||||
@@ -392,7 +397,7 @@ def login_user(request, error=""):
|
||||
log.critical("Login failed - Could not create session. Is memcached running?")
|
||||
log.exception(e)
|
||||
|
||||
log.info("Login success - {0} ({1})".format(username, email))
|
||||
log.info(u"Login success - {0} ({1})".format(username, email))
|
||||
|
||||
try_change_enrollment(request)
|
||||
|
||||
@@ -400,7 +405,7 @@ def login_user(request, error=""):
|
||||
|
||||
return HttpResponse(json.dumps({'success': True}))
|
||||
|
||||
log.warning("Login failed - Account not active for user {0}, resending activation".format(username))
|
||||
log.warning(u"Login failed - Account not active for user {0}, resending activation".format(username))
|
||||
|
||||
reactivation_email_for_user(user)
|
||||
not_activated_msg = "This account has not been activated. We have " + \
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import time
|
||||
import datetime
|
||||
import calendar
|
||||
import dateutil.parser
|
||||
|
||||
|
||||
def time_to_date(time_obj):
|
||||
"""
|
||||
Convert a time.time_struct to a true universal time (can pass to js Date
|
||||
constructor)
|
||||
"""
|
||||
return calendar.timegm(time_obj) * 1000
|
||||
|
||||
|
||||
def time_to_isodate(source):
|
||||
'''Convert to an iso date'''
|
||||
if isinstance(source, time.struct_time):
|
||||
return time.strftime('%Y-%m-%dT%H:%M:%SZ', source)
|
||||
elif isinstance(source, datetime):
|
||||
return source.isoformat() + 'Z'
|
||||
|
||||
|
||||
def jsdate_to_time(field):
|
||||
"""
|
||||
Convert a universal time (iso format) or msec since epoch to a time obj
|
||||
"""
|
||||
if field is None:
|
||||
return field
|
||||
elif isinstance(field, basestring):
|
||||
d = dateutil.parser.parse(field)
|
||||
return d.utctimetuple()
|
||||
elif isinstance(field, (int, long, float)):
|
||||
return time.gmtime(field / 1000)
|
||||
elif isinstance(field, time.struct_time):
|
||||
return field
|
||||
else:
|
||||
raise ValueError("Couldn't convert %r to time" % field)
|
||||
@@ -1961,9 +1961,10 @@ class ImageResponse(LoncapaResponse):
|
||||
self.ielements = self.inputfields
|
||||
self.answer_ids = [ie.get('id') for ie in self.ielements]
|
||||
|
||||
|
||||
def get_score(self, student_answers):
|
||||
correct_map = CorrectMap()
|
||||
expectedset = self.get_answers()
|
||||
expectedset = self.get_mapped_answers()
|
||||
for aid in self.answer_ids: # loop through IDs of <imageinput>
|
||||
# fields in our stanza
|
||||
given = student_answers[
|
||||
@@ -2018,11 +2019,42 @@ class ImageResponse(LoncapaResponse):
|
||||
break
|
||||
return correct_map
|
||||
|
||||
def get_answers(self):
|
||||
return (
|
||||
def get_mapped_answers(self):
|
||||
'''
|
||||
Returns the internal representation of the answers
|
||||
|
||||
Input:
|
||||
None
|
||||
Returns:
|
||||
tuple (dict, dict) -
|
||||
rectangles (dict) - a map of inputs to the defined rectangle for that input
|
||||
regions (dict) - a map of inputs to the defined region for that input
|
||||
'''
|
||||
answers = (
|
||||
dict([(ie.get('id'), ie.get(
|
||||
'rectangle')) for ie in self.ielements]),
|
||||
dict([(ie.get('id'), ie.get('regions')) for ie in self.ielements]))
|
||||
return answers
|
||||
|
||||
def get_answers(self):
|
||||
'''
|
||||
Returns the external representation of the answers
|
||||
|
||||
Input:
|
||||
None
|
||||
Returns:
|
||||
dict (str, (str, str)) - a map of inputs to a tuple of their rectange
|
||||
and their regions
|
||||
'''
|
||||
answers = {}
|
||||
for ie in self.ielements:
|
||||
ie_id = ie.get('id')
|
||||
answers[ie_id] = (ie.get('rectangle'), ie.get('regions'))
|
||||
|
||||
return answers
|
||||
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -2087,8 +2119,8 @@ class AnnotationResponse(LoncapaResponse):
|
||||
correct_option = self._find_option_with_choice(
|
||||
inputfield, 'correct')
|
||||
if correct_option is not None:
|
||||
answer_map[inputfield.get(
|
||||
'id')] = correct_option.get('description')
|
||||
input_id = inputfield.get('id')
|
||||
answer_map[input_id] = correct_option.get('description')
|
||||
return answer_map
|
||||
|
||||
def _get_max_points(self):
|
||||
|
||||
@@ -36,6 +36,10 @@ class ResponseTest(unittest.TestCase):
|
||||
correct_map = problem.grade_answers(input_dict)
|
||||
self.assertEquals(correct_map.get_correctness('1_2_1'), expected_correctness)
|
||||
|
||||
def assert_answer_format(self, problem):
|
||||
answers = problem.get_question_answers()
|
||||
self.assertTrue(answers['1_2_1'] is not None)
|
||||
|
||||
def assert_multiple_grade(self, problem, correct_answers, incorrect_answers):
|
||||
for input_str in correct_answers:
|
||||
result = problem.grade_answers({'1_2_1': input_str}).get_correctness('1_2_1')
|
||||
@@ -166,6 +170,13 @@ class ImageResponseTest(ResponseTest):
|
||||
incorrect_inputs = ["[0,0]", "[600,300]"]
|
||||
self.assert_multiple_grade(problem, correct_inputs, incorrect_inputs)
|
||||
|
||||
def test_show_answer(self):
|
||||
rectangle_str = "(100,100)-(200,200)"
|
||||
region_str = "[[10,10], [20,10], [20, 30]]"
|
||||
|
||||
problem = self.build_problem(regions=region_str, rectangle=rectangle_str)
|
||||
self.assert_answer_format(problem)
|
||||
|
||||
|
||||
class SymbolicResponseTest(unittest.TestCase):
|
||||
def test_sr_grade(self):
|
||||
|
||||
@@ -635,8 +635,17 @@ class CourseDescriptor(CourseFields, SequenceDescriptor):
|
||||
|
||||
@property
|
||||
def start_date_text(self):
|
||||
def try_parse_iso_8601(text):
|
||||
try:
|
||||
result = datetime.strptime(text, "%Y-%m-%dT%H:%M")
|
||||
result = result.strftime("%b %d, %Y")
|
||||
except ValueError:
|
||||
result = text.title()
|
||||
|
||||
return result
|
||||
|
||||
if isinstance(self.advertised_start, basestring):
|
||||
return self.advertised_start
|
||||
return try_parse_iso_8601(self.advertised_start)
|
||||
elif self.advertised_start is None and self.start is None:
|
||||
return 'TBD'
|
||||
else:
|
||||
|
||||
@@ -14,7 +14,6 @@ class Date(ModelType):
|
||||
'''
|
||||
Date fields know how to parse and produce json (iso) compatible formats.
|
||||
'''
|
||||
# NB: these are copies of util.converters.*
|
||||
def from_json(self, field):
|
||||
"""
|
||||
Parse an optional metadata key containing a time: if present, complain
|
||||
|
||||
@@ -9,6 +9,7 @@ from fs.osfs import OSFS
|
||||
from itertools import repeat
|
||||
from path import path
|
||||
from datetime import datetime
|
||||
from operator import attrgetter
|
||||
|
||||
from importlib import import_module
|
||||
from xmodule.errortracker import null_error_tracker, exc_info_to_str
|
||||
@@ -96,6 +97,7 @@ class MongoKeyValueStore(KeyValueStore):
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
MongoUsage = namedtuple('MongoUsage', 'id, def_id')
|
||||
|
||||
|
||||
@@ -107,7 +109,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
|
||||
references to metadata_inheritance_tree
|
||||
"""
|
||||
def __init__(self, modulestore, module_data, default_class, resources_fs,
|
||||
error_tracker, render_template, metadata_inheritance_tree = None):
|
||||
error_tracker, render_template, metadata_cache=None):
|
||||
"""
|
||||
modulestore: the module store that can be used to retrieve additional modules
|
||||
|
||||
@@ -132,9 +134,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
|
||||
# cdodge: other Systems have a course_id attribute defined. To keep things consistent, let's
|
||||
# define an attribute here as well, even though it's None
|
||||
self.course_id = None
|
||||
self.metadata_inheritance_tree = metadata_inheritance_tree
|
||||
self.metadata_cache = metadata_cache
|
||||
|
||||
def load_item(self, location):
|
||||
"""
|
||||
Return an XModule instance for the specified location
|
||||
"""
|
||||
location = Location(location)
|
||||
json_data = self.module_data.get(location)
|
||||
if json_data is None:
|
||||
@@ -165,8 +170,8 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
|
||||
|
||||
model_data = DbModel(kvs, class_, None, MongoUsage(self.course_id, location))
|
||||
module = class_(self, location, model_data)
|
||||
if self.metadata_inheritance_tree is not None:
|
||||
metadata_to_inherit = self.metadata_inheritance_tree.get('parent_metadata', {}).get(location.url(), {})
|
||||
if self.metadata_cache is not None:
|
||||
metadata_to_inherit = self.metadata_cache.get(metadata_cache_key(location), {}).get('parent_metadata', {}).get(location.url(), {})
|
||||
inherit_metadata(module, metadata_to_inherit)
|
||||
return module
|
||||
except:
|
||||
@@ -196,16 +201,19 @@ def location_to_query(location, wildcard=True):
|
||||
return query
|
||||
|
||||
|
||||
def namedtuple_to_son(namedtuple, prefix=''):
|
||||
def namedtuple_to_son(ntuple, prefix=''):
|
||||
"""
|
||||
Converts a namedtuple into a SON object with the same key order
|
||||
"""
|
||||
son = SON()
|
||||
for idx, field_name in enumerate(namedtuple._fields):
|
||||
son[prefix + field_name] = namedtuple[idx]
|
||||
for idx, field_name in enumerate(ntuple._fields):
|
||||
son[prefix + field_name] = ntuple[idx]
|
||||
return son
|
||||
|
||||
|
||||
metadata_cache_key = attrgetter('org', 'course')
|
||||
|
||||
|
||||
class MongoModuleStore(ModuleStoreBase):
|
||||
"""
|
||||
A Mongodb backed ModuleStore
|
||||
@@ -228,7 +236,6 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
if user is not None and password is not None:
|
||||
self.collection.database.authenticate(user, password)
|
||||
|
||||
|
||||
# Force mongo to report errors, at the expense of performance
|
||||
self.collection.safe = True
|
||||
|
||||
@@ -258,10 +265,15 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
query = {
|
||||
'_id.org': location.org,
|
||||
'_id.course': location.course,
|
||||
'_id.category': {'$in': [ 'course', 'chapter', 'sequential', 'vertical']}
|
||||
'_id.category': {'$in': ['course', 'chapter', 'sequential', 'vertical']}
|
||||
}
|
||||
# we just want the Location, children, and metadata
|
||||
record_filter = {'_id': 1, 'definition.children': 1, 'metadata': 1}
|
||||
# we just want the Location, children, and inheritable metadata
|
||||
record_filter = {'_id': 1, 'definition.children': 1}
|
||||
|
||||
# just get the inheritable metadata since that is all we need for the computation
|
||||
# this minimizes both data pushed over the wire
|
||||
for attr in INHERITABLE_METADATA:
|
||||
record_filter['metadata.{0}'.format(attr)] = 1
|
||||
|
||||
# call out to the DB
|
||||
resultset = self.collection.find(query, record_filter)
|
||||
@@ -278,7 +290,11 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
|
||||
# now traverse the tree and compute down the inherited metadata
|
||||
metadata_to_inherit = {}
|
||||
|
||||
def _compute_inherited_metadata(url):
|
||||
"""
|
||||
Helper method for computing inherited metadata for a specific location url
|
||||
"""
|
||||
my_metadata = {}
|
||||
# check for presence of metadata key. Note that a given module may not yet be fully formed.
|
||||
# example: update_item -> update_children -> update_metadata sequence on new item create
|
||||
@@ -293,7 +309,7 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
|
||||
# go through all the children and recurse, but only if we have
|
||||
# in the result set. Remember results will not contain leaf nodes
|
||||
for child in results_by_url[url].get('definition',{}).get('children',[]):
|
||||
for child in results_by_url[url].get('definition', {}).get('children', []):
|
||||
if child in results_by_url:
|
||||
new_child_metadata = copy.deepcopy(my_metadata)
|
||||
new_child_metadata.update(results_by_url[child].get('metadata', {}))
|
||||
@@ -304,42 +320,52 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
# this is likely a leaf node, so let's record what metadata we need to inherit
|
||||
metadata_to_inherit[child] = my_metadata
|
||||
|
||||
|
||||
if root is not None:
|
||||
_compute_inherited_metadata(root)
|
||||
|
||||
return {'parent_metadata': metadata_to_inherit,
|
||||
'timestamp' : datetime.now()}
|
||||
'timestamp': datetime.now()}
|
||||
|
||||
def get_cached_metadata_inheritance_tree(self, location, force_refresh=False):
|
||||
def get_cached_metadata_inheritance_trees(self, locations, force_refresh=False):
|
||||
'''
|
||||
TODO (cdodge) This method can be deleted when the 'split module store' work has been completed
|
||||
'''
|
||||
key_name = '{0}/{1}'.format(location.org, location.course)
|
||||
|
||||
tree = None
|
||||
if self.metadata_inheritance_cache is not None:
|
||||
tree = self.metadata_inheritance_cache.get(key_name)
|
||||
trees = {}
|
||||
if locations and self.metadata_inheritance_cache is not None and not force_refresh:
|
||||
trees = self.metadata_inheritance_cache.get_many(list(set([metadata_cache_key(loc) for loc in locations])))
|
||||
else:
|
||||
# This is to help guard against an accident prod runtime without a cache
|
||||
logging.warning('Running MongoModuleStore without metadata_inheritance_cache. This should not happen in production!')
|
||||
logging.warning('Running MongoModuleStore without metadata_inheritance_cache. '
|
||||
'This should not happen in production!')
|
||||
|
||||
if tree is None or force_refresh:
|
||||
tree = self.get_metadata_inheritance_tree(location)
|
||||
if self.metadata_inheritance_cache is not None:
|
||||
self.metadata_inheritance_cache.set(key_name, tree)
|
||||
to_cache = {}
|
||||
for loc in locations:
|
||||
cache_key = metadata_cache_key(loc)
|
||||
if cache_key not in trees:
|
||||
to_cache[cache_key] = trees[cache_key] = self.get_metadata_inheritance_tree(loc)
|
||||
|
||||
return tree
|
||||
if to_cache and self.metadata_inheritance_cache is not None:
|
||||
self.metadata_inheritance_cache.set_many(to_cache)
|
||||
|
||||
return trees
|
||||
|
||||
def refresh_cached_metadata_inheritance_tree(self, location):
|
||||
"""
|
||||
Refresh the cached metadata inheritance tree for the org/course combination
|
||||
for location
|
||||
"""
|
||||
pseudo_course_id = '/'.join([location.org, location.course])
|
||||
if pseudo_course_id not in self.ignore_write_events_on_courses:
|
||||
self.get_cached_metadata_inheritance_tree(location, force_refresh = True)
|
||||
self.get_cached_metadata_inheritance_trees([location], force_refresh=True)
|
||||
|
||||
def clear_cached_metadata_inheritance_tree(self, location):
|
||||
key_name = '{0}/{1}'.format(location.org, location.course)
|
||||
"""
|
||||
Delete the cached metadata inheritance tree for the org/course combination
|
||||
for location
|
||||
"""
|
||||
if self.metadata_inheritance_cache is not None:
|
||||
self.metadata_inheritance_cache.delete(key_name)
|
||||
self.metadata_inheritance_cache.delete(metadata_cache_key(location))
|
||||
|
||||
def _clean_item_data(self, item):
|
||||
"""
|
||||
@@ -367,7 +393,7 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
data[Location(item['location'])] = item
|
||||
|
||||
if depth == 0:
|
||||
break;
|
||||
break
|
||||
|
||||
# Load all children by id. See
|
||||
# http://www.mongodb.org/display/DOCS/Advanced+Queries#AdvancedQueries-%24or
|
||||
@@ -385,7 +411,18 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
|
||||
return data
|
||||
|
||||
def _load_item(self, item, data_cache, should_apply_metadata_inheritence=True):
|
||||
def _cache_metadata_inheritance(self, items, depth, force_refresh=False):
|
||||
"""
|
||||
Retrieves all course metadata inheritance trees needed to load items
|
||||
"""
|
||||
|
||||
locations = [
|
||||
Location(item['location']) for item in items
|
||||
if not (item['location']['category'] == 'course' and depth == 0)
|
||||
]
|
||||
return self.get_cached_metadata_inheritance_trees(locations, force_refresh=force_refresh)
|
||||
|
||||
def _load_item(self, item, data_cache, metadata_cache):
|
||||
"""
|
||||
Load an XModuleDescriptor from item, using the children stored in data_cache
|
||||
"""
|
||||
@@ -397,11 +434,6 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
|
||||
resource_fs = OSFS(root)
|
||||
|
||||
metadata_inheritance_tree = None
|
||||
|
||||
if should_apply_metadata_inheritence:
|
||||
metadata_inheritance_tree = self.get_cached_metadata_inheritance_tree(Location(item['location']))
|
||||
|
||||
# TODO (cdodge): When the 'split module store' work has been completed, we should remove
|
||||
# the 'metadata_inheritance_tree' parameter
|
||||
system = CachingDescriptorSystem(
|
||||
@@ -411,7 +443,7 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
resource_fs,
|
||||
self.error_tracker,
|
||||
self.render_template,
|
||||
metadata_inheritance_tree = metadata_inheritance_tree
|
||||
metadata_cache,
|
||||
)
|
||||
return system.load_item(item['location'])
|
||||
|
||||
@@ -421,11 +453,11 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
to specified depth
|
||||
"""
|
||||
data_cache = self._cache_children(items, depth)
|
||||
inheritance_cache = self._cache_metadata_inheritance(items, depth)
|
||||
|
||||
# if we are loading a course object, if we're not prefetching children (depth != 0) then don't
|
||||
# bother with the metadata inheritence
|
||||
return [self._load_item(item, data_cache,
|
||||
should_apply_metadata_inheritence=(item['location']['category'] != 'course' or depth != 0)) for item in items]
|
||||
# bother with the metadata inheritence
|
||||
return [self._load_item(item, data_cache, inheritance_cache) for item in items]
|
||||
|
||||
def get_courses(self):
|
||||
'''
|
||||
@@ -559,7 +591,8 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
raise Exception('Could not find course at {0}'.format(course_search_location))
|
||||
|
||||
if found_cnt > 1:
|
||||
raise Exception('Found more than one course at {0}. There should only be one!!! Dump = {1}'.format(course_search_location, courses))
|
||||
raise Exception('Found more than one course at {0}. There should only be one!!! '
|
||||
'Dump = {1}'.format(course_search_location, courses))
|
||||
|
||||
return courses[0]
|
||||
|
||||
@@ -631,7 +664,7 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
|
||||
self._update_single_item(location, {'metadata': metadata})
|
||||
# recompute (and update) the metadata inheritance tree which is cached
|
||||
self.refresh_cached_metadata_inheritance_tree(loc)
|
||||
self.refresh_cached_metadata_inheritance_tree(loc)
|
||||
|
||||
def delete_item(self, location):
|
||||
"""
|
||||
@@ -654,7 +687,7 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
# from overriding our default value set in the init method.
|
||||
safe=self.collection.safe)
|
||||
# recompute (and update) the metadata inheritance tree which is cached
|
||||
self.refresh_cached_metadata_inheritance_tree(Location(location))
|
||||
self.refresh_cached_metadata_inheritance_tree(Location(location))
|
||||
|
||||
def get_parent_locations(self, location, course_id):
|
||||
'''Find all locations that are the parents of this location in this
|
||||
@@ -675,4 +708,10 @@ class MongoModuleStore(ModuleStoreBase):
|
||||
|
||||
# DraftModuleStore is first, because it needs to intercept calls to MongoModuleStore
|
||||
class DraftMongoModuleStore(DraftModuleStore, MongoModuleStore):
|
||||
"""
|
||||
Version of MongoModuleStore with draft capability mixed in
|
||||
"""
|
||||
"""
|
||||
Version of MongoModuleStore with draft capability mixed in
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import pymongo
|
||||
|
||||
from nose.tools import assert_equals, assert_raises, assert_not_equals, with_setup
|
||||
from mock import Mock
|
||||
from nose.tools import assert_equals, assert_raises, assert_not_equals, with_setup, assert_false
|
||||
from pprint import pprint
|
||||
|
||||
from xmodule.modulestore import Location
|
||||
@@ -102,3 +103,58 @@ class TestMongoModuleStore(object):
|
||||
def test_path_to_location(self):
|
||||
'''Make sure that path_to_location works'''
|
||||
check_path_to_location(self.store)
|
||||
|
||||
def test_metadata_inheritance_query_count(self):
|
||||
'''
|
||||
When retrieving items from mongo, we should only query the cache a number of times
|
||||
equal to the number of courses being retrieved from.
|
||||
|
||||
We should also not query
|
||||
'''
|
||||
self.store.metadata_inheritance_cache = Mock()
|
||||
get_many = self.store.metadata_inheritance_cache.get_many
|
||||
set_many = self.store.metadata_inheritance_cache.set_many
|
||||
get_many.return_value = {('edX', 'toy'): {}}
|
||||
|
||||
self.store.get_item(Location("i4x://edX/toy/course/2012_Fall"), depth=0)
|
||||
assert_false(get_many.called)
|
||||
assert_false(set_many.called)
|
||||
get_many.reset_mock()
|
||||
|
||||
self.store.get_item(Location("i4x://edX/toy/course/2012_Fall"), depth=3)
|
||||
get_many.assert_called_with([('edX', 'toy')])
|
||||
assert_equals(0, set_many.call_count)
|
||||
get_many.reset_mock()
|
||||
|
||||
self.store.get_items(Location('i4x', 'edX', None, 'course', None), depth=0)
|
||||
assert_false(get_many.called)
|
||||
assert_false(set_many.called)
|
||||
get_many.reset_mock()
|
||||
|
||||
self.store.get_items(Location('i4x', 'edX', None, 'course', None), depth=3)
|
||||
assert_equals(1, get_many.call_count)
|
||||
assert_equals([('edX', 'simple'), ('edX', 'toy')], sorted(get_many.call_args[0][0]))
|
||||
assert_equals(1, set_many.call_count)
|
||||
assert_equals([('edX', 'simple')], sorted(set_many.call_args[0][0].keys()))
|
||||
get_many.reset_mock()
|
||||
|
||||
self.store.get_items(Location('i4x', 'edX', None, None, None), depth=0)
|
||||
assert_equals(1, get_many.call_count)
|
||||
assert_equals([('edX', 'simple'), ('edX', 'toy')], sorted(get_many.call_args[0][0]))
|
||||
assert_equals(1, set_many.call_count)
|
||||
assert_equals([('edX', 'simple')], sorted(set_many.call_args[0][0].keys()))
|
||||
get_many.reset_mock()
|
||||
|
||||
def test_metadata_inheritance_query_count_forced_refresh(self):
|
||||
self.store.metadata_inheritance_cache = Mock()
|
||||
get_many = self.store.metadata_inheritance_cache.get_many
|
||||
set_many = self.store.metadata_inheritance_cache.set_many
|
||||
get_many.return_value = {('edX', 'toy'): {}}
|
||||
|
||||
self.store.get_cached_metadata_inheritance_trees(
|
||||
[Location("i4x://edX/toy/course/2012_Fall"), Location("i4x://edX/simple/course/2012_Fall")],
|
||||
True
|
||||
)
|
||||
assert_false(get_many.called)
|
||||
assert_equals(1, set_many.call_count)
|
||||
assert_equals([('edX', 'simple'), ('edX', 'toy')], sorted(set_many.call_args[0][0].keys()))
|
||||
|
||||
@@ -24,7 +24,7 @@ MAX_ATTEMPTS = 1
|
||||
MAX_SCORE = 1
|
||||
|
||||
#The highest score allowed for the overall xmodule and for each rubric point
|
||||
MAX_SCORE_ALLOWED = 3
|
||||
MAX_SCORE_ALLOWED = 50
|
||||
|
||||
#If true, default behavior is to score module as a practice problem. Otherwise, no grade at all is shown in progress
|
||||
#Metadata overrides this.
|
||||
@@ -363,7 +363,15 @@ class CombinedOpenEndedV1Module():
|
||||
"""
|
||||
self.update_task_states()
|
||||
html = self.current_task.get_html(self.system)
|
||||
return_html = rewrite_links(html, self.rewrite_content_links)
|
||||
return_html = html
|
||||
try:
|
||||
#Without try except block, get this error:
|
||||
# File "/home/vik/mitx_all/mitx/common/lib/xmodule/xmodule/x_module.py", line 263, in rewrite_content_links
|
||||
# if link.startswith(XASSET_SRCREF_PREFIX):
|
||||
# Placing try except so that if the error is fixed, this code will start working again.
|
||||
return_html = rewrite_links(html, self.rewrite_content_links)
|
||||
except:
|
||||
pass
|
||||
return return_html
|
||||
|
||||
def get_current_attributes(self, task_number):
|
||||
@@ -782,7 +790,7 @@ class CombinedOpenEndedV1Descriptor():
|
||||
template_dir_name = "combinedopenended"
|
||||
|
||||
def __init__(self, system):
|
||||
self.system =system
|
||||
self.system = system
|
||||
|
||||
@classmethod
|
||||
def definition_from_xml(cls, xml_object, system):
|
||||
|
||||
@@ -36,7 +36,7 @@ ALLOWABLE_IMAGE_SUFFIXES = [
|
||||
]
|
||||
|
||||
#Maximum allowed dimensions (x and y) for an uploaded image
|
||||
MAX_ALLOWED_IMAGE_DIM = 1500
|
||||
MAX_ALLOWED_IMAGE_DIM = 2000
|
||||
|
||||
#Dimensions to which image is resized before it is evaluated for color count, etc
|
||||
MAX_IMAGE_DIM = 150
|
||||
@@ -178,7 +178,7 @@ class URLProperties(object):
|
||||
Runs all available url tests
|
||||
@return: True if URL passes tests, false if not.
|
||||
"""
|
||||
url_is_okay = self.check_suffix() and self.check_if_parses() and self.check_domain()
|
||||
url_is_okay = self.check_suffix() and self.check_if_parses()
|
||||
return url_is_okay
|
||||
|
||||
def check_domain(self):
|
||||
|
||||
@@ -357,10 +357,6 @@ class OpenEndedChild(object):
|
||||
if get_data['can_upload_files'] in ['true', '1']:
|
||||
has_file_to_upload = True
|
||||
file = get_data['student_file'][0]
|
||||
if self.system.track_fuction:
|
||||
self.system.track_function('open_ended_image_upload', {'filename': file.name})
|
||||
else:
|
||||
log.info("No tracking function found when uploading image.")
|
||||
uploaded_to_s3, image_ok, s3_public_url = self.upload_image_to_s3(file)
|
||||
if uploaded_to_s3:
|
||||
image_tag = self.generate_image_tag_from_url(s3_public_url, file.name)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import unittest
|
||||
from time import strptime
|
||||
|
||||
from fs.memoryfs import MemoryFS
|
||||
|
||||
from mock import Mock, patch
|
||||
@@ -108,7 +109,22 @@ class IsNewCourseTestCase(unittest.TestCase):
|
||||
print "Comparing %s to %s" % (a, b)
|
||||
assertion(a_score, b_score)
|
||||
|
||||
@patch('xmodule.course_module.time.gmtime')
|
||||
def test_start_date_text(self, gmtime_mock):
|
||||
gmtime_mock.return_value = NOW
|
||||
|
||||
settings = [
|
||||
# start, advertized, result
|
||||
('2012-12-02T12:00', None, 'Dec 02, 2012'),
|
||||
('2012-12-02T12:00', '2011-11-01T12:00', 'Nov 01, 2011'),
|
||||
('2012-12-02T12:00', 'Spring 2012', 'Spring 2012'),
|
||||
('2012-12-02T12:00', 'November, 2011', 'November, 2011'),
|
||||
]
|
||||
|
||||
for s in settings:
|
||||
d = self.get_dummy_course(start=s[0], advertised_start=s[1])
|
||||
print "Checking start=%s advertised=%s" % (s[0], s[1])
|
||||
self.assertEqual(d.start_date_text, s[2])
|
||||
|
||||
@patch('xmodule.course_module.time.gmtime')
|
||||
def test_is_newish(self, gmtime_mock):
|
||||
|
||||
80
common/lib/xmodule/xmodule/tests/test_fields.py
Normal file
80
common/lib/xmodule/xmodule/tests/test_fields.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""Tests for Date class defined in fields.py."""
|
||||
import datetime
|
||||
import unittest
|
||||
from django.utils.timezone import UTC
|
||||
from xmodule.fields import Date
|
||||
import time
|
||||
|
||||
class DateTest(unittest.TestCase):
|
||||
date = Date()
|
||||
|
||||
@staticmethod
|
||||
def struct_to_datetime(struct_time):
|
||||
return datetime.datetime(struct_time.tm_year, struct_time.tm_mon,
|
||||
struct_time.tm_mday, struct_time.tm_hour,
|
||||
struct_time.tm_min, struct_time.tm_sec, tzinfo=UTC())
|
||||
|
||||
def compare_dates(self, date1, date2, expected_delta):
|
||||
dt1 = DateTest.struct_to_datetime(date1)
|
||||
dt2 = DateTest.struct_to_datetime(date2)
|
||||
self.assertEqual(dt1 - dt2, expected_delta, str(date1) + "-"
|
||||
+ str(date2) + "!=" + str(expected_delta))
|
||||
|
||||
def test_from_json(self):
|
||||
'''Test conversion from iso compatible date strings to struct_time'''
|
||||
self.compare_dates(
|
||||
DateTest.date.from_json("2013-01-01"),
|
||||
DateTest.date.from_json("2012-12-31"),
|
||||
datetime.timedelta(days=1))
|
||||
self.compare_dates(
|
||||
DateTest.date.from_json("2013-01-01T00"),
|
||||
DateTest.date.from_json("2012-12-31T23"),
|
||||
datetime.timedelta(hours=1))
|
||||
self.compare_dates(
|
||||
DateTest.date.from_json("2013-01-01T00:00"),
|
||||
DateTest.date.from_json("2012-12-31T23:59"),
|
||||
datetime.timedelta(minutes=1))
|
||||
self.compare_dates(
|
||||
DateTest.date.from_json("2013-01-01T00:00:00"),
|
||||
DateTest.date.from_json("2012-12-31T23:59:59"),
|
||||
datetime.timedelta(seconds=1))
|
||||
self.compare_dates(
|
||||
DateTest.date.from_json("2013-01-01T00:00:00Z"),
|
||||
DateTest.date.from_json("2012-12-31T23:59:59Z"),
|
||||
datetime.timedelta(seconds=1))
|
||||
self.compare_dates(
|
||||
DateTest.date.from_json("2012-12-31T23:00:01-01:00"),
|
||||
DateTest.date.from_json("2013-01-01T00:00:00+01:00"),
|
||||
datetime.timedelta(hours=1, seconds=1))
|
||||
|
||||
def test_return_None(self):
|
||||
self.assertIsNone(DateTest.date.from_json(""))
|
||||
self.assertIsNone(DateTest.date.from_json(None))
|
||||
self.assertIsNone(DateTest.date.from_json(['unknown value']))
|
||||
|
||||
def test_old_due_date_format(self):
|
||||
current = datetime.datetime.today()
|
||||
self.assertEqual(
|
||||
time.struct_time((current.year, 3, 12, 12, 0, 0, 1, 71, 0)),
|
||||
DateTest.date.from_json("March 12 12:00"))
|
||||
self.assertEqual(
|
||||
time.struct_time((current.year, 12, 4, 16, 30, 0, 2, 338, 0)),
|
||||
DateTest.date.from_json("December 4 16:30"))
|
||||
|
||||
def test_to_json(self):
|
||||
'''
|
||||
Test converting time reprs to iso dates
|
||||
'''
|
||||
self.assertEqual(
|
||||
DateTest.date.to_json(
|
||||
time.strptime("2012-12-31T23:59:59Z", "%Y-%m-%dT%H:%M:%SZ")),
|
||||
"2012-12-31T23:59:59Z")
|
||||
self.assertEqual(
|
||||
DateTest.date.to_json(
|
||||
DateTest.date.from_json("2012-12-31T23:59:59Z")),
|
||||
"2012-12-31T23:59:59Z")
|
||||
self.assertEqual(
|
||||
DateTest.date.to_json(
|
||||
DateTest.date.from_json("2012-12-31T23:00:01-01:00")),
|
||||
"2013-01-01T00:00:01Z")
|
||||
|
||||
35
common/static/js/capa/symbolic_mathjax_preprocessor.js
Normal file
35
common/static/js/capa/symbolic_mathjax_preprocessor.js
Normal file
@@ -0,0 +1,35 @@
|
||||
/* This file defines a processor in between the student's math input
|
||||
(AsciiMath) and what is read by MathJax. It allows for our own
|
||||
customizations, such as use of the syntax "a_b__x" in superscripts, or
|
||||
possibly coloring certain variables, etc&.
|
||||
|
||||
It is used in the <textline> definition like the following:
|
||||
|
||||
<symbolicresponse expect="a_b^c + b_x__d" size="30">
|
||||
<textline math="1"
|
||||
preprocessorClassName="SymbolicMathjaxPreprocessor"
|
||||
preprocessorSrc="/static/js/capa/symbolic_mathjax_preprocessor.js"/>
|
||||
</symbolicresponse>
|
||||
*/
|
||||
window.SymbolicMathjaxPreprocessor = function () {
|
||||
this.fn = function (eqn) {
|
||||
// flags and config
|
||||
var superscriptsOn = true;
|
||||
|
||||
if (superscriptsOn) {
|
||||
// find instances of "__" and make them superscripts ("^") and tag them
|
||||
// as such. Specifcally replace instances of "__X" or "__{XYZ}" with
|
||||
// "^{CHAR$1}", marking superscripts as different from powers
|
||||
|
||||
// a zero width space--this is an invisible character that no one would
|
||||
// use, that gets passed through MathJax and to the server
|
||||
var c = "\u200b";
|
||||
eqn = eqn.replace(/__(?:([^\{])|\{([^\}]+)\})/g, '^{' + c + '$1$2}');
|
||||
|
||||
// NOTE: MathJax supports '\class{name}{mathcode}' but not for asciimath
|
||||
// input, which is too bad. This would be preferable to this char tag
|
||||
}
|
||||
|
||||
return eqn;
|
||||
};
|
||||
};
|
||||
40
doc/public/course_data_formats/symbolic_response.rst
Normal file
40
doc/public/course_data_formats/symbolic_response.rst
Normal file
@@ -0,0 +1,40 @@
|
||||
#################
|
||||
Symbolic Response
|
||||
#################
|
||||
|
||||
This document plans to document features that the current symbolic response
|
||||
supports. In general it allows the input and validation of math expressions,
|
||||
up to commutativity and some identities.
|
||||
|
||||
|
||||
********
|
||||
Features
|
||||
********
|
||||
|
||||
This is a partial list of features, to be revised as we go along:
|
||||
* sub and superscripts: an expression following the ``^`` character
|
||||
indicates exponentiation. To use superscripts in variables, the syntax
|
||||
is ``b_x__d`` for the variable ``b`` with subscript ``x`` and super
|
||||
``d``.
|
||||
|
||||
An example of a problem::
|
||||
|
||||
<symbolicresponse expect="a_b^c + b_x__d" size="30">
|
||||
<textline math="1"
|
||||
preprocessorClassName="SymbolicMathjaxPreprocessor"
|
||||
preprocessorSrc="/static/js/capa/symbolic_mathjax_preprocessor.js"/>
|
||||
</symbolicresponse>
|
||||
|
||||
It's a bit of a pain to enter that.
|
||||
|
||||
* The script-style math variant. What would be outputted in latex if you
|
||||
entered ``\mathcal{N}``. This is used in some variables.
|
||||
|
||||
An example::
|
||||
|
||||
<symbolicresponse expect="scriptN_B + x" size="30">
|
||||
<textline math="1"/>
|
||||
</symbolicresponse>
|
||||
|
||||
There is no fancy preprocessing needed, but if you had superscripts or
|
||||
something, you would need to include that part.
|
||||
@@ -8,6 +8,7 @@ from functools import partial
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.http import Http404
|
||||
from django.http import HttpResponse
|
||||
@@ -208,9 +209,6 @@ def get_module_for_descriptor(user, request, descriptor, model_data_cache, cours
|
||||
'waittime': settings.XQUEUE_WAITTIME_BETWEEN_REQUESTS
|
||||
}
|
||||
|
||||
def get_or_default(key, default):
|
||||
getattr(settings, key, default)
|
||||
|
||||
#This is a hacky way to pass settings to the combined open ended xmodule
|
||||
#It needs an S3 interface to upload images to S3
|
||||
#It needs the open ended grading interface in order to get peer grading to be done
|
||||
@@ -226,12 +224,11 @@ def get_module_for_descriptor(user, request, descriptor, model_data_cache, cours
|
||||
open_ended_grading_interface['mock_staff_grading'] = settings.MOCK_STAFF_GRADING
|
||||
if is_descriptor_combined_open_ended:
|
||||
s3_interface = {
|
||||
'access_key' : get_or_default('AWS_ACCESS_KEY_ID',''),
|
||||
'secret_access_key' : get_or_default('AWS_SECRET_ACCESS_KEY',''),
|
||||
'storage_bucket_name' : get_or_default('AWS_STORAGE_BUCKET_NAME','')
|
||||
'access_key' : getattr(settings,'AWS_ACCESS_KEY_ID',''),
|
||||
'secret_access_key' : getattr(settings,'AWS_SECRET_ACCESS_KEY',''),
|
||||
'storage_bucket_name' : getattr(settings,'AWS_STORAGE_BUCKET_NAME','openended')
|
||||
}
|
||||
|
||||
|
||||
def inner_get_module(descriptor):
|
||||
"""
|
||||
Delegate to get_module. It does an access check, so may return None
|
||||
@@ -412,6 +409,9 @@ def modx_dispatch(request, dispatch, location, course_id):
|
||||
if not Location.is_valid(location):
|
||||
raise Http404("Invalid location")
|
||||
|
||||
if not request.user.is_authenticated():
|
||||
raise PermissionDenied
|
||||
|
||||
# Check for submitted files and basic file size checks
|
||||
p = request.POST.copy()
|
||||
if request.FILES:
|
||||
|
||||
107
lms/djangoapps/courseware/tests/test_login.py
Normal file
107
lms/djangoapps/courseware/tests/test_login.py
Normal file
@@ -0,0 +1,107 @@
|
||||
from django.test import TestCase
|
||||
from django.test.client import Client
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.contrib.auth.models import User
|
||||
from student.models import Registration, UserProfile
|
||||
import json
|
||||
|
||||
class LoginTest(TestCase):
|
||||
'''
|
||||
Test student.views.login_user() view
|
||||
'''
|
||||
|
||||
def setUp(self):
|
||||
|
||||
# Create one user and save it to the database
|
||||
self.user = User.objects.create_user('test', 'test@edx.org', 'test_password')
|
||||
self.user.is_active = True
|
||||
self.user.save()
|
||||
|
||||
# Create a registration for the user
|
||||
Registration().register(self.user)
|
||||
|
||||
# Create a profile for the user
|
||||
UserProfile(user=self.user).save()
|
||||
|
||||
# Create the test client
|
||||
self.client = Client()
|
||||
|
||||
# Store the login url
|
||||
self.url = reverse('login')
|
||||
|
||||
def test_login_success(self):
|
||||
response = self._login_response('test@edx.org', 'test_password')
|
||||
self._assert_response(response, success=True)
|
||||
|
||||
def test_login_success_unicode_email(self):
|
||||
unicode_email = u'test@edx.org' + unichr(40960)
|
||||
|
||||
self.user.email = unicode_email
|
||||
self.user.save()
|
||||
|
||||
response = self._login_response(unicode_email, 'test_password')
|
||||
self._assert_response(response, success=True)
|
||||
|
||||
|
||||
def test_login_fail_no_user_exists(self):
|
||||
response = self._login_response('not_a_user@edx.org', 'test_password')
|
||||
self._assert_response(response, success=False,
|
||||
value='Email or password is incorrect')
|
||||
|
||||
def test_login_fail_wrong_password(self):
|
||||
response = self._login_response('test@edx.org', 'wrong_password')
|
||||
self._assert_response(response, success=False,
|
||||
value='Email or password is incorrect')
|
||||
|
||||
def test_login_not_activated(self):
|
||||
|
||||
# De-activate the user
|
||||
self.user.is_active = False
|
||||
self.user.save()
|
||||
|
||||
# Should now be unable to login
|
||||
response = self._login_response('test@edx.org', 'test_password')
|
||||
self._assert_response(response, success=False,
|
||||
value="This account has not been activated")
|
||||
|
||||
|
||||
def test_login_unicode_email(self):
|
||||
unicode_email = u'test@edx.org' + unichr(40960)
|
||||
response = self._login_response(unicode_email, 'test_password')
|
||||
self._assert_response(response, success=False)
|
||||
|
||||
def test_login_unicode_password(self):
|
||||
unicode_password = u'test_password' + unichr(1972)
|
||||
response = self._login_response('test@edx.org', unicode_password)
|
||||
self._assert_response(response, success=False)
|
||||
|
||||
def _login_response(self, email, password):
|
||||
post_params = {'email': email, 'password': password}
|
||||
return self.client.post(self.url, post_params)
|
||||
|
||||
def _assert_response(self, response, success=None, value=None):
|
||||
'''
|
||||
Assert that the response had status 200 and returned a valid
|
||||
JSON-parseable dict.
|
||||
|
||||
If success is provided, assert that the response had that
|
||||
value for 'success' in the JSON dict.
|
||||
|
||||
If value is provided, assert that the response contained that
|
||||
value for 'value' in the JSON dict.
|
||||
'''
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
try:
|
||||
response_dict = json.loads(response.content)
|
||||
except ValueError:
|
||||
self.fail("Could not parse response content as JSON: %s"
|
||||
% str(response.content))
|
||||
|
||||
if success is not None:
|
||||
self.assertEqual(response_dict['success'], success)
|
||||
|
||||
if value is not None:
|
||||
msg = ("'%s' did not contain '%s'" %
|
||||
(str(response_dict['value']), str(value)))
|
||||
self.assertTrue(value in response_dict['value'], msg)
|
||||
@@ -8,8 +8,8 @@ from django.test.client import RequestFactory
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from xmodule.modulestore.exceptions import ItemNotFoundError
|
||||
from xmodule.modulestore.django import modulestore
|
||||
import courseware.module_render as render
|
||||
from xmodule.modulestore.django import modulestore, _MODULESTORES
|
||||
from courseware.tests.tests import LoginEnrollmentTestCase
|
||||
from courseware.model_data import ModelDataCache
|
||||
|
||||
@@ -40,7 +40,6 @@ TEST_DATA_XML_MODULESTORE = xml_store_config(TEST_DATA_DIR)
|
||||
class ModuleRenderTestCase(LoginEnrollmentTestCase):
|
||||
def setUp(self):
|
||||
self.location = ['i4x', 'edX', 'toy', 'chapter', 'Overview']
|
||||
self._MODULESTORES = {}
|
||||
self.course_id = 'edX/toy/2012_Fall'
|
||||
self.toy_course = modulestore().get_course(self.course_id)
|
||||
|
||||
@@ -91,12 +90,23 @@ class ModuleRenderTestCase(LoginEnrollmentTestCase):
|
||||
self.assertEquals(render.get_score_bucket(11, 10), 'incorrect')
|
||||
self.assertEquals(render.get_score_bucket(-1, 10), 'incorrect')
|
||||
|
||||
def test_anonymous_modx_dispatch(self):
|
||||
dispatch_url = reverse(
|
||||
'modx_dispatch',
|
||||
args=[
|
||||
'edX/toy/2012_Fall',
|
||||
'i4x://edX/toy/videosequence/Toy_Videos',
|
||||
'goto_position'
|
||||
]
|
||||
)
|
||||
response = self.client.post(dispatch_url, {'position': 2})
|
||||
self.assertEquals(403, response.status_code)
|
||||
|
||||
|
||||
@override_settings(MODULESTORE=TEST_DATA_XML_MODULESTORE)
|
||||
class TestTOC(TestCase):
|
||||
"""Check the Table of Contents for a course"""
|
||||
def setUp(self):
|
||||
self._MODULESTORES = {}
|
||||
|
||||
# Toy courses should be loaded
|
||||
self.course_name = 'edX/toy/2012_Fall'
|
||||
|
||||
@@ -1,10 +1,24 @@
|
||||
from comment_client import CommentClientError
|
||||
from django_comment_client.utils import JsonError
|
||||
import json
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AjaxExceptionMiddleware(object):
|
||||
"""
|
||||
Middleware that captures CommentClientErrors during ajax requests
|
||||
and tranforms them into json responses
|
||||
"""
|
||||
def process_exception(self, request, exception):
|
||||
"""
|
||||
Processes CommentClientErrors in ajax requests. If the request is an ajax request,
|
||||
returns a http response that encodes the error as json
|
||||
"""
|
||||
if isinstance(exception, CommentClientError) and request.is_ajax():
|
||||
return JsonError(json.loads(exception.message))
|
||||
try:
|
||||
return JsonError(json.loads(exception.message))
|
||||
except ValueError:
|
||||
return JsonError(exception.message)
|
||||
return None
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
import string
|
||||
import random
|
||||
import collections
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
import comment_client
|
||||
@@ -13,17 +9,19 @@ class AjaxExceptionTestCase(TestCase):
|
||||
|
||||
# TODO: check whether the correct error message is produced.
|
||||
# The error message should be the same as the argument to CommentClientError
|
||||
def setUp(self):
|
||||
self.a = middleware.AjaxExceptionMiddleware()
|
||||
self.request1 = django.http.HttpRequest()
|
||||
self.request0 = django.http.HttpRequest()
|
||||
self.exception1 = comment_client.CommentClientError('{}')
|
||||
self.exception0 = ValueError()
|
||||
self.request1.META['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest"
|
||||
self.request0.META['HTTP_X_REQUESTED_WITH'] = "SHADOWFAX"
|
||||
def setUp(self):
|
||||
self.a = middleware.AjaxExceptionMiddleware()
|
||||
self.request1 = django.http.HttpRequest()
|
||||
self.request0 = django.http.HttpRequest()
|
||||
self.exception1 = comment_client.CommentClientError('{}')
|
||||
self.exception2 = comment_client.CommentClientError('Foo!')
|
||||
self.exception0 = ValueError()
|
||||
self.request1.META['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest"
|
||||
self.request0.META['HTTP_X_REQUESTED_WITH'] = "SHADOWFAX"
|
||||
|
||||
def test_process_exception(self):
|
||||
self.assertIsInstance(self.a.process_exception(self.request1, self.exception1), middleware.JsonError)
|
||||
self.assertIsNone(self.a.process_exception(self.request1, self.exception0))
|
||||
self.assertIsNone(self.a.process_exception(self.request0, self.exception1))
|
||||
self.assertIsNone(self.a.process_exception(self.request0, self.exception0))
|
||||
def test_process_exception(self):
|
||||
self.assertIsInstance(self.a.process_exception(self.request1, self.exception1), middleware.JsonError)
|
||||
self.assertIsInstance(self.a.process_exception(self.request1, self.exception2), middleware.JsonError)
|
||||
self.assertIsNone(self.a.process_exception(self.request1, self.exception0))
|
||||
self.assertIsNone(self.a.process_exception(self.request0, self.exception1))
|
||||
self.assertIsNone(self.a.process_exception(self.request0, self.exception0))
|
||||
|
||||
@@ -111,7 +111,7 @@ def peer_grading(request, course_id):
|
||||
#Get the peer grading modules currently in the course
|
||||
items = modulestore().get_items(['i4x', None, course_id_parts[1], 'peergrading', None])
|
||||
#See if any of the modules are centralized modules (ie display info from multiple problems)
|
||||
items = [i for i in items if i.metadata.get("use_for_single_location", True) in false_dict]
|
||||
items = [i for i in items if getattr(i,"use_for_single_location", True) in false_dict]
|
||||
#Get the first one
|
||||
item_location = items[0].location
|
||||
#Generate a url for the first module and redirect the user to it
|
||||
|
||||
@@ -74,6 +74,15 @@ def to_latex(x):
|
||||
# LatexPrinter._print_dot = _print_dot
|
||||
xs = latex(x)
|
||||
xs = xs.replace(r'\XI', 'XI') # workaround for strange greek
|
||||
|
||||
# substitute back into latex form for scripts
|
||||
# literally something of the form
|
||||
# 'scriptN' becomes '\\mathcal{N}'
|
||||
# note: can't use something akin to the _print_hat method above because we sometimes get 'script(N)__B' or more complicated terms
|
||||
xs = re.sub(r'script([a-zA-Z0-9]+)',
|
||||
'\\mathcal{\\1}',
|
||||
xs)
|
||||
|
||||
#return '<math>%s{}{}</math>' % (xs[1:-1])
|
||||
if xs[0] == '$':
|
||||
return '[mathjax]%s[/mathjax]<br>' % (xs[1:-1]) # for sympy v6
|
||||
@@ -106,6 +115,7 @@ def my_sympify(expr, normphase=False, matrix=False, abcsym=False, do_qubit=False
|
||||
'i': sympy.I, # lowercase i is also sqrt(-1)
|
||||
'Q': sympy.Symbol('Q'), # otherwise it is a sympy "ask key"
|
||||
'I': sympy.Symbol('I'), # otherwise it is sqrt(-1)
|
||||
'N': sympy.Symbol('N'), # or it is some kind of sympy function
|
||||
#'X':sympy.sympify('Matrix([[0,1],[1,0]])'),
|
||||
#'Y':sympy.sympify('Matrix([[0,-I],[I,0]])'),
|
||||
#'Z':sympy.sympify('Matrix([[1,0],[0,-1]])'),
|
||||
@@ -247,6 +257,127 @@ class formula(object):
|
||||
fix_hat(k)
|
||||
fix_hat(xml)
|
||||
|
||||
def flatten_pmathml(xml):
|
||||
''' Give the text version of certain PMathML elements
|
||||
|
||||
Sometimes MathML will be given with each letter separated (it
|
||||
doesn't know if its implicit multiplication or what). From an xml
|
||||
node, find the (text only) variable name it represents. So it takes
|
||||
<mrow>
|
||||
<mi>m</mi>
|
||||
<mi>a</mi>
|
||||
<mi>x</mi>
|
||||
</mrow>
|
||||
and returns 'max', for easier use later on.
|
||||
'''
|
||||
tag = gettag(xml)
|
||||
if tag == 'mn': return xml.text
|
||||
elif tag == 'mi': return xml.text
|
||||
elif tag == 'mrow': return ''.join([flatten_pmathml(y) for y in xml])
|
||||
raise Exception, '[flatten_pmathml] unknown tag %s' % tag
|
||||
|
||||
def fix_mathvariant(parent):
|
||||
'''Fix certain kinds of math variants
|
||||
|
||||
Literally replace <mstyle mathvariant="script"><mi>N</mi></mstyle>
|
||||
with 'scriptN'. There have been problems using script_N or script(N)
|
||||
'''
|
||||
for child in parent:
|
||||
if (gettag(child) == 'mstyle' and child.get('mathvariant') == 'script'):
|
||||
newchild = etree.Element('mi')
|
||||
newchild.text = 'script%s' % flatten_pmathml(child[0])
|
||||
parent.replace(child, newchild)
|
||||
fix_mathvariant(child)
|
||||
fix_mathvariant(xml)
|
||||
|
||||
|
||||
# find "tagged" superscripts
|
||||
# they have the character \u200b in the superscript
|
||||
# replace them with a__b so snuggle doesn't get confused
|
||||
def fix_superscripts(xml):
|
||||
''' Look for and replace sup elements with 'X__Y' or 'X_Y__Z'
|
||||
|
||||
In the javascript, variables with '__X' in them had an invisible
|
||||
character inserted into the sup (to distinguish from powers)
|
||||
E.g. normal:
|
||||
<msubsup>
|
||||
<mi>a</mi>
|
||||
<mi>b</mi>
|
||||
<mi>c</mi>
|
||||
</msubsup>
|
||||
to be interpreted '(a_b)^c' (nothing done by this method)
|
||||
|
||||
And modified:
|
||||
<msubsup>
|
||||
<mi>b</mi>
|
||||
<mi>x</mi>
|
||||
<mrow>
|
||||
<mo>​</mo>
|
||||
<mi>d</mi>
|
||||
</mrow>
|
||||
</msubsup>
|
||||
to be interpreted 'a_b__c'
|
||||
|
||||
also:
|
||||
<msup>
|
||||
<mi>x</mi>
|
||||
<mrow>
|
||||
<mo>​</mo>
|
||||
<mi>B</mi>
|
||||
</mrow>
|
||||
</msup>
|
||||
to be 'x__B'
|
||||
'''
|
||||
for k in xml:
|
||||
tag = gettag(k)
|
||||
|
||||
# match things like the last example--
|
||||
# the second item in msub is an mrow with the first
|
||||
# character equal to \u200b
|
||||
if (tag == 'msup' and
|
||||
len(k) == 2 and gettag(k[1]) == 'mrow' and
|
||||
gettag(k[1][0]) == 'mo' and k[1][0].text == u'\u200b'): # whew
|
||||
|
||||
# replace the msup with 'X__Y'
|
||||
k[1].remove(k[1][0])
|
||||
newk = etree.Element('mi')
|
||||
newk.text = '%s__%s' % (flatten_pmathml(k[0]), flatten_pmathml(k[1]))
|
||||
xml.replace(k, newk)
|
||||
|
||||
# match things like the middle example-
|
||||
# the third item in msubsup is an mrow with the first
|
||||
# character equal to \u200b
|
||||
if (tag == 'msubsup' and
|
||||
len(k) == 3 and gettag(k[2]) == 'mrow' and
|
||||
gettag(k[2][0]) == 'mo' and k[2][0].text == u'\u200b'): # whew
|
||||
|
||||
# replace the msubsup with 'X_Y__Z'
|
||||
k[2].remove(k[2][0])
|
||||
newk = etree.Element('mi')
|
||||
newk.text = '%s_%s__%s' % (flatten_pmathml(k[0]), flatten_pmathml(k[1]), flatten_pmathml(k[2]))
|
||||
xml.replace(k, newk)
|
||||
|
||||
fix_superscripts(k)
|
||||
fix_superscripts(xml)
|
||||
|
||||
# Snuggle returns an error when it sees an <msubsup>
|
||||
# replace such elements with an <msup>, except the first element is of
|
||||
# the form a_b. I.e. map a_b^c => (a_b)^c
|
||||
def fix_msubsup(parent):
|
||||
for child in parent:
|
||||
# fix msubsup
|
||||
if (gettag(child) == 'msubsup' and len(child) == 3):
|
||||
newchild = etree.Element('msup')
|
||||
newbase = etree.Element('mi')
|
||||
newbase.text = '%s_%s' % (flatten_pmathml(child[0]), flatten_pmathml(child[1]))
|
||||
newexp = child[2]
|
||||
newchild.append(newbase)
|
||||
newchild.append(newexp)
|
||||
parent.replace(child, newchild)
|
||||
|
||||
fix_msubsup(child)
|
||||
fix_msubsup(xml)
|
||||
|
||||
self.xml = xml
|
||||
return self.xml
|
||||
|
||||
@@ -257,6 +388,7 @@ class formula(object):
|
||||
try:
|
||||
xml = self.preprocess_pmathml(self.expr)
|
||||
except Exception, err:
|
||||
log.warning('Err %s while preprocessing; expr=%s' % (err, self.expr))
|
||||
return "<html>Error! Cannot process pmathml</html>"
|
||||
pmathml = etree.tostring(xml, pretty_print=True)
|
||||
self.the_pmathml = pmathml
|
||||
|
||||
115
lms/lib/symmath/test_formula.py
Normal file
115
lms/lib/symmath/test_formula.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""
|
||||
Tests of symbolic math
|
||||
"""
|
||||
|
||||
|
||||
import unittest
|
||||
import formula
|
||||
import re
|
||||
from lxml import etree
|
||||
|
||||
def stripXML(xml):
|
||||
xml = xml.replace('\n', '')
|
||||
xml = re.sub(r'\> +\<', '><', xml)
|
||||
return xml
|
||||
|
||||
class FormulaTest(unittest.TestCase):
|
||||
# for readability later
|
||||
mathml_start = '<math xmlns="http://www.w3.org/1998/Math/MathML"><mstyle displaystyle="true">'
|
||||
mathml_end = '</mstyle></math>'
|
||||
|
||||
def setUp(self):
|
||||
self.formulaInstance = formula.formula('')
|
||||
|
||||
def test_replace_mathvariants(self):
|
||||
expr = '''
|
||||
<mstyle mathvariant="script">
|
||||
<mi>N</mi>
|
||||
</mstyle>'''
|
||||
|
||||
expected = '<mi>scriptN</mi>'
|
||||
|
||||
# wrap
|
||||
expr = stripXML(self.mathml_start + expr + self.mathml_end)
|
||||
expected = stripXML(self.mathml_start + expected + self.mathml_end)
|
||||
|
||||
# process the expression
|
||||
xml = etree.fromstring(expr)
|
||||
xml = self.formulaInstance.preprocess_pmathml(xml)
|
||||
test = etree.tostring(xml)
|
||||
|
||||
# success?
|
||||
self.assertEqual(test, expected)
|
||||
|
||||
|
||||
def test_fix_simple_superscripts(self):
|
||||
expr = '''
|
||||
<msup>
|
||||
<mi>a</mi>
|
||||
<mrow>
|
||||
<mo>​</mo>
|
||||
<mi>b</mi>
|
||||
</mrow>
|
||||
</msup>'''
|
||||
|
||||
expected = '<mi>a__b</mi>'
|
||||
|
||||
# wrap
|
||||
expr = stripXML(self.mathml_start + expr + self.mathml_end)
|
||||
expected = stripXML(self.mathml_start + expected + self.mathml_end)
|
||||
|
||||
# process the expression
|
||||
xml = etree.fromstring(expr)
|
||||
xml = self.formulaInstance.preprocess_pmathml(xml)
|
||||
test = etree.tostring(xml)
|
||||
|
||||
# success?
|
||||
self.assertEqual(test, expected)
|
||||
|
||||
def test_fix_complex_superscripts(self):
|
||||
expr = '''
|
||||
<msubsup>
|
||||
<mi>a</mi>
|
||||
<mi>b</mi>
|
||||
<mrow>
|
||||
<mo>​</mo>
|
||||
<mi>c</mi>
|
||||
</mrow>
|
||||
</msubsup>'''
|
||||
|
||||
expected = '<mi>a_b__c</mi>'
|
||||
|
||||
# wrap
|
||||
expr = stripXML(self.mathml_start + expr + self.mathml_end)
|
||||
expected = stripXML(self.mathml_start + expected + self.mathml_end)
|
||||
|
||||
# process the expression
|
||||
xml = etree.fromstring(expr)
|
||||
xml = self.formulaInstance.preprocess_pmathml(xml)
|
||||
test = etree.tostring(xml)
|
||||
|
||||
# success?
|
||||
self.assertEqual(test, expected)
|
||||
|
||||
|
||||
def test_fix_msubsup(self):
|
||||
expr = '''
|
||||
<msubsup>
|
||||
<mi>a</mi>
|
||||
<mi>b</mi>
|
||||
<mi>c</mi>
|
||||
</msubsup>'''
|
||||
|
||||
expected = '<msup><mi>a_b</mi><mi>c</mi></msup>' # which is (a_b)^c
|
||||
|
||||
# wrap
|
||||
expr = stripXML(self.mathml_start + expr + self.mathml_end)
|
||||
expected = stripXML(self.mathml_start + expected + self.mathml_end)
|
||||
|
||||
# process the expression
|
||||
xml = etree.fromstring(expr)
|
||||
xml = self.formulaInstance.preprocess_pmathml(xml)
|
||||
test = etree.tostring(xml)
|
||||
|
||||
# success?
|
||||
self.assertEqual(test, expected)
|
||||
Reference in New Issue
Block a user