diff --git a/common/djangoapps/student/models.py b/common/djangoapps/student/models.py index 8220e5507c..7b4a5fb9be 100644 --- a/common/djangoapps/student/models.py +++ b/common/djangoapps/student/models.py @@ -271,8 +271,9 @@ class TestCenterUserForm(ModelForm): new_user = self.save(commit=False) # create additional values here: new_user.user_updated_at = datetime.utcnow() + new_user.upload_status = '' new_user.save() - log.info("Updated demographic information for user's test center exam registration: username \"{}\" ".format(new_user.username)) + log.info("Updated demographic information for user's test center exam registration: username \"{}\" ".format(new_user.user.username)) # add validation: @@ -533,6 +534,7 @@ class TestCenterRegistrationForm(ModelForm): registration = self.save(commit=False) # create additional values here: registration.user_updated_at = datetime.utcnow() + registration.upload_status = '' registration.save() log.info("Updated registration information for user's test center exam registration: username \"{}\" course \"{}\", examcode \"{}\"".format(registration.testcenter_user.user.username, registration.course_id, registration.exam_series_code)) diff --git a/common/djangoapps/student/views.py b/common/djangoapps/student/views.py index 8696c2ba28..61b49e6022 100644 --- a/common/djangoapps/student/views.py +++ b/common/djangoapps/student/views.py @@ -27,7 +27,7 @@ from bs4 import BeautifulSoup from django.core.cache import cache from django_future.csrf import ensure_csrf_cookie, csrf_exempt -from student.models import (Registration, UserProfile, TestCenterUser, TestCenterUserForm, +from student.models import (Registration, UserProfile, TestCenterUser, TestCenterUserForm, TestCenterRegistration, TestCenterRegistrationForm, PendingNameChange, PendingEmailChange, CourseEnrollment, unique_id_for_user, @@ -42,7 +42,7 @@ from xmodule.modulestore.django import modulestore #from datetime import date from collections import namedtuple -from courseware.courses import get_courses +from courseware.courses import get_courses, sort_by_announcement from courseware.access import has_access from statsd import statsd @@ -78,10 +78,7 @@ def index(request, extra_context={}, user=None): domain = request.META.get('HTTP_HOST') courses = get_courses(None, domain=domain) - - # Sort courses by how far are they from they start day - key = lambda course: course.days_until_start - courses = sorted(courses, key=key, reverse=True) + courses = sort_by_announcement(courses) # Get the 3 most recent news top_news = _get_news(top=3) @@ -211,7 +208,7 @@ def _cert_info(user, course, cert_status): def dashboard(request): user = request.user enrollments = CourseEnrollment.objects.filter(user=user) - + # Build our courses list for the user, but ignore any courses that no longer # exist (because the course IDs have changed). Still, we don't delete those # enrollments, because it could have been a data push snafu. @@ -473,7 +470,7 @@ def _do_create_account(post_vars): except (ValueError, KeyError): # If they give us garbage, just ignore it instead # of asking them to put an integer. - profile.year_of_birth = None + profile.year_of_birth = None try: profile.save() except Exception: @@ -613,7 +610,7 @@ def exam_registration_info(user, course): exam_info = course.current_test_center_exam if exam_info is None: return None - + exam_code = exam_info.exam_series_code registrations = get_testcenter_registration(user, course.id, exam_code) if registrations: @@ -621,7 +618,7 @@ def exam_registration_info(user, course): else: registration = None return registration - + @login_required @ensure_csrf_cookie def begin_exam_registration(request, course_id): @@ -632,19 +629,22 @@ def begin_exam_registration(request, course_id): user = request.user try: - course = (course_from_id(course_id)) + course = course_from_id(course_id) except ItemNotFoundError: - # TODO: do more than just log!! The rest will fail, so we should fail right now. - log.error("User {0} enrolled in non-existent course {1}" - .format(user.username, course_id)) + log.error("User {0} enrolled in non-existent course {1}".format(user.username, course_id)) + raise Http404 # get the exam to be registered for: # (For now, we just assume there is one at most.) + # if there is no exam now (because someone bookmarked this stupid page), + # then return a 404: exam_info = course.current_test_center_exam + if exam_info is None: + raise Http404 # determine if the user is registered for this course: registration = exam_registration_info(user, course) - + # we want to populate the registration page with the relevant information, # if it already exists. Create an empty object otherwise. try: @@ -652,7 +652,7 @@ def begin_exam_registration(request, course_id): except TestCenterUser.DoesNotExist: testcenteruser = TestCenterUser() testcenteruser.user = user - + context = {'course': course, 'user': user, 'testcenteruser': testcenteruser, @@ -669,17 +669,24 @@ def create_exam_registration(request, post_override=None): Called by form in test_center_register.html ''' post_vars = post_override if post_override else request.POST - - # first determine if we need to create a new TestCenterUser, or if we are making any update + + # first determine if we need to create a new TestCenterUser, or if we are making any update # to an existing TestCenterUser. username = post_vars['username'] user = User.objects.get(username=username) course_id = post_vars['course_id'] - course = (course_from_id(course_id)) # assume it will be found.... - + course = course_from_id(course_id) # assume it will be found.... + + # make sure that any demographic data values received from the page have been stripped. + # Whitespace is not an acceptable response for any of these values + demographic_data = {} + for fieldname in TestCenterUser.user_provided_fields(): + if fieldname in post_vars: + demographic_data[fieldname] = (post_vars[fieldname]).strip() + try: testcenter_user = TestCenterUser.objects.get(user=user) - needs_updating = testcenter_user.needs_update(post_vars) + needs_updating = testcenter_user.needs_update(demographic_data) log.info("User {0} enrolled in course {1} {2}updating demographic info for exam registration".format(user.username, course_id, "" if needs_updating else "not ")) except TestCenterUser.DoesNotExist: # do additional initialization here: @@ -689,9 +696,9 @@ def create_exam_registration(request, post_override=None): # perform validation: if needs_updating: - # first perform validation on the user information + # first perform validation on the user information # using a Django Form. - form = TestCenterUserForm(instance=testcenter_user, data=post_vars) + form = TestCenterUserForm(instance=testcenter_user, data=demographic_data) if form.is_valid(): form.update_and_save() else: @@ -700,7 +707,7 @@ def create_exam_registration(request, post_override=None): response_data['field_errors'] = form.errors response_data['non_field_errors'] = form.non_field_errors() return HttpResponse(json.dumps(response_data), mimetype="application/json") - + # create and save the registration: needs_saving = False exam = course.current_test_center_exam @@ -710,12 +717,12 @@ def create_exam_registration(request, post_override=None): registration = registrations[0] # NOTE: we do not bother to check here to see if the registration has changed, # because at the moment there is no way for a user to change anything about their - # registration. They only provide an optional accommodation request once, and + # registration. They only provide an optional accommodation request once, and # cannot make changes to it thereafter. # It is possible that the exam_info content has been changed, such as the # scheduled exam dates, but those kinds of changes should not be handled through - # this registration screen. - + # this registration screen. + else: accommodation_request = post_vars.get('accommodation_request','') registration = TestCenterRegistration.create(testcenter_user, exam, accommodation_request) @@ -723,7 +730,7 @@ def create_exam_registration(request, post_override=None): log.info("User {0} enrolled in course {1} creating new exam registration".format(user.username, course_id)) if needs_saving: - # do validation of registration. (Mainly whether an accommodation request is too long.) + # do validation of registration. (Mainly whether an accommodation request is too long.) form = TestCenterRegistrationForm(instance=registration, data=post_vars) if form.is_valid(): form.update_and_save() @@ -733,14 +740,14 @@ def create_exam_registration(request, post_override=None): response_data['field_errors'] = form.errors response_data['non_field_errors'] = form.non_field_errors() return HttpResponse(json.dumps(response_data), mimetype="application/json") - + # only do the following if there is accommodation text to send, # and a destination to which to send it. # TODO: still need to create the accommodation email templates # if 'accommodation_request' in post_vars and 'TESTCENTER_ACCOMMODATION_REQUEST_EMAIL' in settings: # d = {'accommodation_request': post_vars['accommodation_request'] } -# +# # # composes accommodation email # subject = render_to_string('emails/accommodation_email_subject.txt', d) # # Email subject *must not* contain newlines diff --git a/common/lib/xmodule/xmodule/combined_open_ended_module.py b/common/lib/xmodule/xmodule/combined_open_ended_module.py index a35573032a..73caeb2358 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_module.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_module.py @@ -24,8 +24,6 @@ import open_ended_module from combined_open_ended_rubric import CombinedOpenEndedRubric from .stringify import stringify_children -from mitxmako.shortcuts import render_to_string - log = logging.getLogger("mitx.courseware") # Set the default number of max attempts. Should be 1 for production @@ -142,7 +140,7 @@ class CombinedOpenEndedModule(XModule): # completion (doesn't matter if you self-assessed correct/incorrect). self._max_score = int(self.metadata.get('max_score', MAX_SCORE)) - rubric_renderer = CombinedOpenEndedRubric(True) + rubric_renderer = CombinedOpenEndedRubric(system, True) success, rubric_feedback = rubric_renderer.render_rubric(stringify_children(definition['rubric'])) if not success: error_message="Could not parse rubric : {0}".format(definition['rubric']) @@ -327,7 +325,7 @@ class CombinedOpenEndedModule(XModule): Output: HTML rendered directly via Mako """ context = self.get_context() - html = render_to_string('combined_open_ended.html', context) + html = self.system.render_template('combined_open_ended.html', context) return html def get_html_base(self): @@ -377,17 +375,17 @@ class CombinedOpenEndedModule(XModule): self.static_data, instance_state=task_state) last_response = task.latest_answer() last_score = task.latest_score() - last_post_assessment = task.latest_post_assessment() + last_post_assessment = task.latest_post_assessment(self.system) last_post_feedback = "" if task_type == "openended": - last_post_assessment = task.latest_post_assessment(short_feedback=False, join_feedback=False) + last_post_assessment = task.latest_post_assessment(self.system, short_feedback=False, join_feedback=False) if isinstance(last_post_assessment, list): eval_list = [] for i in xrange(0, len(last_post_assessment)): - eval_list.append(task.format_feedback_with_evaluation(last_post_assessment[i])) + eval_list.append(task.format_feedback_with_evaluation(self.system, last_post_assessment[i])) last_post_evaluation = "".join(eval_list) else: - last_post_evaluation = task.format_feedback_with_evaluation(last_post_assessment) + last_post_evaluation = task.format_feedback_with_evaluation(self.system, last_post_assessment) last_post_assessment = last_post_evaluation last_correctness = task.is_last_response_correct() max_score = task.max_score() @@ -450,7 +448,7 @@ class CombinedOpenEndedModule(XModule): self.update_task_states() response_dict = self.get_last_response(task_number) context = {'results': response_dict['post_assessment'], 'task_number': task_number + 1} - html = render_to_string('combined_open_ended_results.html', context) + html = self.system.render_template('combined_open_ended_results.html', context) return {'html': html, 'success': True} def handle_ajax(self, dispatch, get): @@ -603,4 +601,4 @@ class CombinedOpenEndedDescriptor(XmlDescriptor, EditingDescriptor): for child in ['task']: add_child(child) - return elt \ No newline at end of file + return elt diff --git a/common/lib/xmodule/xmodule/combined_open_ended_rubric.py b/common/lib/xmodule/xmodule/combined_open_ended_rubric.py index 37ce18e4f1..07844aec35 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_rubric.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_rubric.py @@ -1,14 +1,14 @@ -from mitxmako.shortcuts import render_to_string import logging from lxml import etree log=logging.getLogger(__name__) -class CombinedOpenEndedRubric: +class CombinedOpenEndedRubric(object): - def __init__ (self, view_only = False): + def __init__ (self, system, view_only = False): self.has_score = False self.view_only = view_only + self.system ''' render_rubric: takes in an xml string and outputs the corresponding @@ -23,7 +23,7 @@ class CombinedOpenEndedRubric: success = False try: rubric_categories = self.extract_categories(rubric_xml) - html = render_to_string('open_ended_rubric.html', + html = self.system.render_template('open_ended_rubric.html', {'categories' : rubric_categories, 'has_score': self.has_score, 'view_only': self.view_only}) diff --git a/common/lib/xmodule/xmodule/course_module.py b/common/lib/xmodule/xmodule/course_module.py index 499247cc2d..bc171ca5b9 100644 --- a/common/lib/xmodule/xmodule/course_module.py +++ b/common/lib/xmodule/xmodule/course_module.py @@ -1,4 +1,5 @@ import logging +from math import exp, erf from lxml import etree from path import path # NOTE (THK): Only used for detecting presence of syllabus import requests @@ -183,35 +184,66 @@ class CourseDescriptor(SequenceDescriptor): @property def is_new(self): - # The course is "new" if either if the metadata flag is_new is - # true or if the course has not started yet + """ + Returns if the course has been flagged as new in the metadata. If + there is no flag, return a heuristic value considering the + announcement and the start dates. + """ flag = self.metadata.get('is_new', None) if flag is None: - return self.days_until_start > 1 + # Use a heuristic if the course has not been flagged + announcement, start, now = self._sorting_dates() + if announcement and (now - announcement).days < 30: + # The course has been announced for less that month + return True + elif (now - start).days < 1: + # The course has not started yet + return True + else: + return False elif isinstance(flag, basestring): return flag.lower() in ['true', 'yes', 'y'] else: return bool(flag) @property - def days_until_start(self): - def convert_to_datetime(timestamp): + def sorting_score(self): + """ + Returns a number that can be used to sort the courses according + the how "new"" they are. The "newness"" score is computed using a + heuristic that takes into account the announcement and + (advertized) start dates of the course if available. + + The lower the number the "newer" the course. + """ + # Make courses that have an announcement date shave a lower + # score than courses than don't, older courses should have a + # higher score. + announcement, start, now = self._sorting_dates() + scale = 300.0 # about a year + if announcement: + days = (now - announcement).days + score = -exp(-days/scale) + else: + days = (now - start).days + score = exp(days/scale) + return score + + def _sorting_dates(self): + # utility function to get datetime objects for dates used to + # compute the is_new flag and the sorting_score + def to_datetime(timestamp): return datetime.fromtimestamp(time.mktime(timestamp)) - start_date = convert_to_datetime(self.start) + def get_date(field): + timetuple = self._try_parse_time(field) + return to_datetime(timetuple) if timetuple else None - # Try to use course advertised date if we can parse it - advertised_start = self.metadata.get('advertised_start', None) - if advertised_start: - try: - start_date = datetime.strptime(advertised_start, - "%Y-%m-%dT%H:%M") - except ValueError: - pass # Invalid date, keep using 'start'' + announcement = get_date('announcement') + start = get_date('advertised_start') or to_datetime(self.start) + now = to_datetime(time.gmtime()) - now = convert_to_datetime(time.gmtime()) - days_until_start = (start_date - now).days - return days_until_start + return announcement, start, now @lazyproperty def grading_context(self): @@ -387,9 +419,9 @@ class CourseDescriptor(SequenceDescriptor): self.first_eligible_appointment_date = self._try_parse_time('First_Eligible_Appointment_Date') if self.first_eligible_appointment_date is None: raise ValueError("First appointment date must be specified") - # TODO: If defaulting the last appointment date, it should be the + # TODO: If defaulting the last appointment date, it should be the # *end* of the same day, not the same time. It's going to be used as the - # end of the exam overall, so we don't want the exam to disappear too soon. + # end of the exam overall, so we don't want the exam to disappear too soon. # It's also used optionally as the registration end date, so time matters there too. self.last_eligible_appointment_date = self._try_parse_time('Last_Eligible_Appointment_Date') # or self.first_eligible_appointment_date if self.last_eligible_appointment_date is None: @@ -403,7 +435,7 @@ class CourseDescriptor(SequenceDescriptor): raise ValueError("First appointment date must be before last appointment date") if self.registration_end_date > self.last_eligible_appointment_date: raise ValueError("Registration end date must be before last appointment date") - + def _try_parse_time(self, key): """ @@ -434,7 +466,7 @@ class CourseDescriptor(SequenceDescriptor): def is_registering(self): now = time.gmtime() return now >= self.registration_start_date and now <= self.registration_end_date - + @property def first_eligible_appointment_date_text(self): return time.strftime("%b %d, %Y", self.first_eligible_appointment_date) @@ -451,7 +483,7 @@ class CourseDescriptor(SequenceDescriptor): def current_test_center_exam(self): exams = [exam for exam in self.test_center_exams if exam.has_started_registration() and not exam.has_ended()] if len(exams) > 1: - # TODO: output some kind of warning. This should already be + # TODO: output some kind of warning. This should already be # caught if we decide to do validation at load time. return exams[0] elif len(exams) == 1: diff --git a/common/lib/xmodule/xmodule/open_ended_module.py b/common/lib/xmodule/xmodule/open_ended_module.py index 80cf6c4988..799e0ca29e 100644 --- a/common/lib/xmodule/xmodule/open_ended_module.py +++ b/common/lib/xmodule/xmodule/open_ended_module.py @@ -30,7 +30,6 @@ from xmodule.modulestore import Location from capa.util import * import openendedchild -from mitxmako.shortcuts import render_to_string from numpy import median from datetime import datetime @@ -256,7 +255,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): @param system: Modulesystem @return: Boolean True (not useful currently) """ - new_score_msg = self._parse_score_msg(score_msg) + new_score_msg = self._parse_score_msg(score_msg, system) if not new_score_msg['valid']: score_msg['feedback'] = 'Invalid grader reply. Please contact the course staff.' @@ -370,7 +369,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): return u"\n".join([feedback_list_part1, feedback_list_part2]) - def _format_feedback(self, response_items): + def _format_feedback(self, response_items, system): """ Input: Dictionary called feedback. Must contain keys seen below. @@ -382,14 +381,18 @@ class OpenEndedModule(openendedchild.OpenEndedChild): rubric_feedback="" feedback = self._convert_longform_feedback_to_html(response_items) if response_items['rubric_scores_complete']==True: +<<<<<<< HEAD rubric_renderer = CombinedOpenEndedRubric(True) success, rubric_feedback = rubric_renderer.render_rubric(response_items['rubric_xml']) +======= + rubric_feedback = CombinedOpenEndedRubric.render_rubric(response_items['rubric_xml'], system) +>>>>>>> master if not response_items['success']: return system.render_template("open_ended_error.html", {'errors': feedback}) - feedback_template = render_to_string("open_ended_feedback.html", { + feedback_template = system.render_template("open_ended_feedback.html", { 'grader_type': response_items['grader_type'], 'score': "{0} / {1}".format(response_items['score'], self.max_score()), 'feedback': feedback, @@ -399,7 +402,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): return feedback_template - def _parse_score_msg(self, score_msg, join_feedback=True): + def _parse_score_msg(self, score_msg, system, join_feedback=True): """ Grader reply is a JSON-dump of the following dict { 'correct': True/False, @@ -451,7 +454,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): 'rubric_scores_complete' : score_result['rubric_scores_complete'][i], 'rubric_xml' : score_result['rubric_xml'][i], } - feedback_items.append(self._format_feedback(new_score_result)) + feedback_items.append(self._format_feedback(new_score_result, system)) if join_feedback: feedback = "".join(feedback_items) else: @@ -459,7 +462,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): score = int(median(score_result['score'])) else: #This is for instructor and ML grading - feedback = self._format_feedback(score_result) + feedback = self._format_feedback(score_result, system) score = score_result['score'] self.submission_id = score_result['submission_id'] @@ -467,7 +470,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): return {'valid': True, 'score': score, 'feedback': feedback} - def latest_post_assessment(self, short_feedback=False, join_feedback=True): + def latest_post_assessment(self, system, short_feedback=False, join_feedback=True): """ Gets the latest feedback, parses, and returns @param short_feedback: If the long feedback is wanted or not @@ -476,7 +479,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): if not self.history: return "" - feedback_dict = self._parse_score_msg(self.history[-1].get('post_assessment', ""), join_feedback=join_feedback) + feedback_dict = self._parse_score_msg(self.history[-1].get('post_assessment', ""), system, join_feedback=join_feedback) if not short_feedback: return feedback_dict['feedback'] if feedback_dict['valid'] else '' if feedback_dict['valid']: @@ -484,14 +487,14 @@ class OpenEndedModule(openendedchild.OpenEndedChild): json.loads(self.history[-1].get('post_assessment', ""))) return short_feedback if feedback_dict['valid'] else '' - def format_feedback_with_evaluation(self, feedback): + def format_feedback_with_evaluation(self, system, feedback): """ Renders a given html feedback into an evaluation template @param feedback: HTML feedback @return: Rendered html """ context = {'msg': feedback, 'id': "1", 'rows': 50, 'cols': 50} - html = render_to_string('open_ended_evaluation.html', context) + html = system.render_template('open_ended_evaluation.html', context) return html def handle_ajax(self, dispatch, get, system): @@ -583,7 +586,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): if self.state != self.INITIAL: latest = self.latest_answer() previous_answer = latest if latest is not None else self.initial_display - post_assessment = self.latest_post_assessment() + post_assessment = self.latest_post_assessment(system) score = self.latest_score() correct = 'correct' if self.is_submission_correct(score) else 'incorrect' else: diff --git a/common/lib/xmodule/xmodule/openendedchild.py b/common/lib/xmodule/xmodule/openendedchild.py index 2ba9528237..88fed61c6d 100644 --- a/common/lib/xmodule/xmodule/openendedchild.py +++ b/common/lib/xmodule/xmodule/openendedchild.py @@ -35,7 +35,7 @@ MAX_ATTEMPTS = 1 # Overriden by max_score specified in xml. MAX_SCORE = 1 -class OpenEndedChild(): +class OpenEndedChild(object): """ States: @@ -123,7 +123,7 @@ class OpenEndedChild(): return None return self.history[-1].get('score') - def latest_post_assessment(self): + def latest_post_assessment(self, system): """None if not available""" if not self.history: return "" diff --git a/common/lib/xmodule/xmodule/self_assessment_module.py b/common/lib/xmodule/xmodule/self_assessment_module.py index de842091c9..9caba5a6fe 100644 --- a/common/lib/xmodule/xmodule/self_assessment_module.py +++ b/common/lib/xmodule/xmodule/self_assessment_module.py @@ -122,8 +122,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): if self.state == self.INITIAL: return '' - rubric_renderer = CombinedOpenEndedRubric(True) - + rubric_renderer = CombinedOpenEndedRubric(system, True) success, rubric_html = rubric_renderer.render_rubric(self.rubric) # we'll render it @@ -149,7 +148,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): if self.state == self.DONE: # display the previous hint - latest = self.latest_post_assessment() + latest = self.latest_post_assessment(system) hint = latest if latest is not None else '' else: hint = '' diff --git a/common/lib/xmodule/xmodule/tests/test_course_module.py b/common/lib/xmodule/xmodule/tests/test_course_module.py index 63eaec1f61..712b095696 100644 --- a/common/lib/xmodule/xmodule/tests/test_course_module.py +++ b/common/lib/xmodule/xmodule/tests/test_course_module.py @@ -1,5 +1,5 @@ import unittest -from time import strptime, gmtime +from time import strptime from fs.memoryfs import MemoryFS from mock import Mock, patch @@ -39,52 +39,81 @@ class DummySystem(ImportSystem): class IsNewCourseTestCase(unittest.TestCase): """Make sure the property is_new works on courses""" @staticmethod - def get_dummy_course(start, is_new=None, load_error_modules=True): + def get_dummy_course(start, announcement=None, is_new=None): """Get a dummy course""" - system = DummySystem(load_error_modules) - is_new = '' if is_new is None else 'is_new="{0}"'.format(is_new).lower() + system = DummySystem(load_error_modules=True) + + def to_attrb(n, v): + return '' if v is None else '{0}="{1}"'.format(n, v).lower() + + is_new = to_attrb('is_new', is_new) + announcement = to_attrb('announcement', announcement) start_xml = ''' Two houses, ... - '''.format(org=ORG, course=COURSE, start=start, is_new=is_new) + '''.format(org=ORG, course=COURSE, start=start, is_new=is_new, + announcement=announcement) return system.process_xml(start_xml) @patch('xmodule.course_module.time.gmtime') - def test_non_started_yet(self, gmtime_mock): - descriptor = self.get_dummy_course(start='2013-01-05T12:00') + def test_sorting_score(self, gmtime_mock): gmtime_mock.return_value = NOW - assert(descriptor.is_new == True) - assert(descriptor.days_until_start == 4) + dates = [('2012-10-01T12:00', '2012-09-01T12:00'), # 0 + ('2012-12-01T12:00', '2012-11-01T12:00'), # 1 + ('2013-02-01T12:00', '2012-12-01T12:00'), # 2 + ('2013-02-01T12:00', '2012-11-10T12:00'), # 3 + ('2013-02-01T12:00', None), # 4 + ('2013-03-01T12:00', None), # 5 + ('2013-04-01T12:00', None), # 6 + ('2012-11-01T12:00', None), # 7 + ('2012-09-01T12:00', None), # 8 + ('1990-01-01T12:00', None), # 9 + ('2013-01-02T12:00', None), # 10 + ('2013-01-10T12:00', '2012-12-31T12:00'), # 11 + ('2013-01-10T12:00', '2013-01-01T12:00'), # 12 + ] + + data = [] + for i, d in enumerate(dates): + descriptor = self.get_dummy_course(start=d[0], announcement=d[1]) + score = descriptor.sorting_score + data.append((score, i)) + + result = [d[1] for d in sorted(data)] + assert(result == [12, 11, 2, 3, 1, 0, 6, 5, 4, 10, 7, 8, 9]) + @patch('xmodule.course_module.time.gmtime') - def test_already_started(self, gmtime_mock): - gmtime_mock.return_value = NOW - - descriptor = self.get_dummy_course(start='2012-12-02T12:00') - assert(descriptor.is_new == False) - assert(descriptor.days_until_start < 0) - - @patch('xmodule.course_module.time.gmtime') - def test_is_new_set(self, gmtime_mock): + def test_is_new(self, gmtime_mock): gmtime_mock.return_value = NOW descriptor = self.get_dummy_course(start='2012-12-02T12:00', is_new=True) - assert(descriptor.is_new == True) - assert(descriptor.days_until_start < 0) + assert(descriptor.is_new is True) descriptor = self.get_dummy_course(start='2013-02-02T12:00', is_new=False) - assert(descriptor.is_new == False) - assert(descriptor.days_until_start > 0) + assert(descriptor.is_new is False) descriptor = self.get_dummy_course(start='2013-02-02T12:00', is_new=True) - assert(descriptor.is_new == True) - assert(descriptor.days_until_start > 0) + assert(descriptor.is_new is True) + + descriptor = self.get_dummy_course(start='2013-01-15T12:00') + assert(descriptor.is_new is True) + + descriptor = self.get_dummy_course(start='2013-03-00T12:00') + assert(descriptor.is_new is True) + + descriptor = self.get_dummy_course(start='2012-10-15T12:00') + assert(descriptor.is_new is False) + + descriptor = self.get_dummy_course(start='2012-12-31T12:00') + assert(descriptor.is_new is True) diff --git a/common/lib/xmodule/xmodule/tests/test_import.py b/common/lib/xmodule/xmodule/tests/test_import.py index 90ec112f19..554e89ac74 100644 --- a/common/lib/xmodule/xmodule/tests/test_import.py +++ b/common/lib/xmodule/xmodule/tests/test_import.py @@ -339,19 +339,6 @@ class ImportTestCase(unittest.TestCase): self.assertRaises(etree.XMLSyntaxError, system.process_xml, bad_xml) - def test_selfassessment_import(self): - ''' - Check to see if definition_from_xml in self_assessment_module.py - works properly. Pulls data from the self_assessment directory in the test data directory. - ''' - - modulestore = XMLModuleStore(DATA_DIR, course_dirs=['self_assessment']) - - sa_id = "edX/sa_test/2012_Fall" - location = Location(["i4x", "edX", "sa_test", "selfassessment", "SampleQuestion"]) - sa_sample = modulestore.get_instance(sa_id, location) - #10 attempts is hard coded into SampleQuestion, which is the url_name of a selfassessment xml tag - self.assertEqual(sa_sample.metadata['attempts'], '10') def test_graphicslidertool_import(self): ''' diff --git a/common/lib/xmodule/xmodule/tests/test_self_assessment.py b/common/lib/xmodule/xmodule/tests/test_self_assessment.py index d89190b1e0..565483c586 100644 --- a/common/lib/xmodule/xmodule/tests/test_self_assessment.py +++ b/common/lib/xmodule/xmodule/tests/test_self_assessment.py @@ -4,6 +4,7 @@ import unittest from xmodule.self_assessment_module import SelfAssessmentModule from xmodule.modulestore import Location +from lxml import etree from . import test_system @@ -26,22 +27,37 @@ class SelfAssessmentTest(unittest.TestCase): state = json.dumps({'student_answers': ["Answer 1", "answer 2", "answer 3"], 'scores': [0, 1], 'hints': ['o hai'], - 'state': SelfAssessmentModule.ASSESSING, + 'state': SelfAssessmentModule.INITIAL, 'attempts': 2}) + rubric = ''' + + Response Quality + + + ''' + + prompt = etree.XML("Text") + static_data = { + 'max_attempts': 10, + 'rubric': etree.XML(rubric), + 'prompt': prompt, + 'max_score': 1 + } + module = SelfAssessmentModule(test_system, self.location, self.definition, self.descriptor, - state, {}, metadata=self.metadata) + static_data, state, metadata=self.metadata) self.assertEqual(module.get_score()['score'], 0) - self.assertTrue('answer 3' in module.get_html()) - self.assertFalse('answer 2' in module.get_html()) - module.save_assessment({'assessment': '0'}) - self.assertEqual(module.state, module.REQUEST_HINT) + module.save_answer({'student_answer': "I am an answer"}, test_system) + self.assertEqual(module.state, module.ASSESSING) - module.save_hint({'hint': 'hint for ans 3'}) + module.save_assessment({'assessment': '0'}, test_system) + self.assertEqual(module.state, module.POST_ASSESSMENT) + module.save_hint({'hint': 'this is a hint'}, test_system) self.assertEqual(module.state, module.DONE) d = module.reset({}) @@ -49,6 +65,6 @@ class SelfAssessmentTest(unittest.TestCase): self.assertEqual(module.state, module.INITIAL) # if we now assess as right, skip the REQUEST_HINT state - module.save_answer({'student_answer': 'answer 4'}) - module.save_assessment({'assessment': '1'}) + module.save_answer({'student_answer': 'answer 4'}, test_system) + module.save_assessment({'assessment': '1'}, test_system) self.assertEqual(module.state, module.DONE) diff --git a/lms/djangoapps/courseware/access.py b/lms/djangoapps/courseware/access.py index c7e09526c9..a176d2a171 100644 --- a/lms/djangoapps/courseware/access.py +++ b/lms/djangoapps/courseware/access.py @@ -338,6 +338,10 @@ def course_beta_test_group_name(location): """ return 'beta_testers_{0}'.format(Location(location).course) +# nosetests thinks that anything with _test_ in the name is a test. +# Correct this (https://nose.readthedocs.org/en/latest/finding_tests.html) +course_beta_test_group_name.__test__ = False + def _course_instructor_group_name(location): """ diff --git a/lms/djangoapps/courseware/courses.py b/lms/djangoapps/courseware/courses.py index 7c0d30ebd8..1090c208d1 100644 --- a/lms/djangoapps/courseware/courses.py +++ b/lms/djangoapps/courseware/courses.py @@ -64,6 +64,7 @@ def course_image_url(course): path = course.metadata['data_dir'] + "/images/course_image.jpg" return try_staticfiles_lookup(path) + def find_file(fs, dirs, filename): """ Looks for a filename in a list of dirs on a filesystem, in the specified order. @@ -80,6 +81,7 @@ def find_file(fs, dirs, filename): return filepath raise ResourceNotFoundError("Could not find {0}".format(filename)) + def get_course_about_section(course, section_key): """ This returns the snippet of html to be rendered on the course about page, @@ -234,4 +236,18 @@ def get_courses(user, domain=None): courses = [c for c in courses if has_access(user, c, 'see_exists')] courses = sorted(courses, key=lambda course:course.number) + + return courses + + +def sort_by_announcement(courses): + """ + Sorts a list of courses by their announcement date. If the date is + not available, sort them by their start date. + """ + + # Sort courses by how far are they from they start day + key = lambda course: course.sorting_score + courses = sorted(courses, key=key) + return courses diff --git a/lms/djangoapps/courseware/views.py b/lms/djangoapps/courseware/views.py index 9e52e2b281..b3775eb663 100644 --- a/lms/djangoapps/courseware/views.py +++ b/lms/djangoapps/courseware/views.py @@ -17,7 +17,8 @@ from django.views.decorators.cache import cache_control from courseware import grades from courseware.access import has_access -from courseware.courses import (get_courses, get_course_with_access, get_courses_by_university) +from courseware.courses import (get_courses, get_course_with_access, + get_courses_by_university, sort_by_announcement) import courseware.tabs as tabs from courseware.models import StudentModuleCache from module_render import toc_for_course, get_module, get_instance_module @@ -67,11 +68,8 @@ def courses(request): ''' Render "find courses" page. The course selection work is done in courseware.courses. ''' - courses = get_courses(request.user, domain=request.META.get('HTTP_HOST')) - - # Sort courses by how far are they from they start day - key = lambda course: course.days_until_start - courses = sorted(courses, key=key, reverse=True) + courses = get_courses(request.user, request.META.get('HTTP_HOST')) + courses = sort_by_announcement(courses) return render_to_response("courseware/courses.html", {'courses': courses}) @@ -438,10 +436,7 @@ def university_profile(request, org_id): # Only grab courses for this org... courses = get_courses_by_university(request.user, domain=request.META.get('HTTP_HOST'))[org_id] - - # Sort courses by how far are they from they start day - key = lambda course: course.days_until_start - courses = sorted(courses, key=key, reverse=True) + courses = sort_by_announcement(courses) context = dict(courses=courses, org_id=org_id) template_file = "university_profile/{0}.html".format(org_id).lower() diff --git a/lms/djangoapps/instructor/views.py b/lms/djangoapps/instructor/views.py index ddb31bf871..2cf3bbb0a9 100644 --- a/lms/djangoapps/instructor/views.py +++ b/lms/djangoapps/instructor/views.py @@ -111,6 +111,7 @@ def instructor_dashboard(request, course_id): except Group.DoesNotExist: group = Group(name=grpname) # create the group group.save() + return group def get_beta_group(course): """ diff --git a/lms/djangoapps/open_ended_grading/grading_service.py b/lms/djangoapps/open_ended_grading/grading_service.py index 3b17a015fe..e8af5f09f6 100644 --- a/lms/djangoapps/open_ended_grading/grading_service.py +++ b/lms/djangoapps/open_ended_grading/grading_service.py @@ -13,6 +13,8 @@ from util.json_request import expect_json from xmodule.course_module import CourseDescriptor from xmodule.combined_open_ended_rubric import CombinedOpenEndedRubric from lxml import etree +from mitxmako.shortcuts import render_to_string +from xmodule.x_module import ModuleSystem log = logging.getLogger(__name__) @@ -29,6 +31,7 @@ class GradingService(object): self.url = config['url'] self.login_url = self.url + '/login/' self.session = requests.session() + self.system = ModuleSystem(None, None, None, render_to_string, None) def _login(self): """ @@ -109,7 +112,7 @@ class GradingService(object): response_json = json.loads(response) if response_json.has_key('rubric'): rubric = response_json['rubric'] - rubric_renderer = CombinedOpenEndedRubric(False) + rubric_renderer = CombinedOpenEndedRubric(self.system, False) success, rubric_html = rubric_renderer.render_rubric(rubric) if not success: error_message = "Could not render rubric: {0}".format(rubric) diff --git a/lms/static/sass/multicourse/_dashboard.scss b/lms/static/sass/multicourse/_dashboard.scss index c22bc14105..4555a426d3 100644 --- a/lms/static/sass/multicourse/_dashboard.scss +++ b/lms/static/sass/multicourse/_dashboard.scss @@ -426,6 +426,14 @@ font-size: 1.2rem; font-weight: bold; } + + strong { + font-weight: 700; + + a { + font-weight: 700; + } + } } .actions { diff --git a/lms/static/sass/multicourse/_testcenter-register.scss b/lms/static/sass/multicourse/_testcenter-register.scss index 961fffd5d0..6d85fc167f 100644 --- a/lms/static/sass/multicourse/_testcenter-register.scss +++ b/lms/static/sass/multicourse/_testcenter-register.scss @@ -232,8 +232,9 @@ $red: rgb(178, 6, 16); } input, textarea { + height: 100%; width: 100%; - padding: $baseline ($baseline*.75); + padding: ($baseline/2); &.long { width: 100%; diff --git a/lms/templates/dashboard.html b/lms/templates/dashboard.html index 0182a8edf1..8ec58a6a28 100644 --- a/lms/templates/dashboard.html +++ b/lms/templates/dashboard.html @@ -243,22 +243,15 @@ % endif % if registration.is_rejected:
-

Your - registration for the Pearson exam - has been rejected. Please check the information you provided, and try to correct any demographic errors. Otherwise - contact edX for further help.

- Contact exam-help@edx.org +

Your registration for the Pearson exam has been rejected. Please see your registration status details. Otherwise contact edX at exam-help@edx.org for further help.

% endif % if not registration.is_accepted and not registration.is_rejected:
-

Your - registration for the Pearson exam - is pending. Within a few days, you should see a confirmation number here, which can be used to schedule your exam.

+

Your registration for the Pearson exam is pending. Within a few days, you should see a confirmation number here, which can be used to schedule your exam.

% endif % endif - % endif <% diff --git a/lms/templates/test_center_register.html b/lms/templates/test_center_register.html index 03883d907c..f6c53c0e89 100644 --- a/lms/templates/test_center_register.html +++ b/lms/templates/test_center_register.html @@ -128,7 +128,7 @@ % if registration.registration_is_rejected:

Your registration for the Pearson exam has been rejected

-

Please see your registration status details for more information.

+

Please see your registration status details for more information.

% endif @@ -246,25 +246,25 @@
  • - +
    - +
    - +
  • - +
    - +
  • @@ -474,7 +474,7 @@

    Questions

    -

    If you have a specific question pertaining to your registration, you may contact exam-help@edx.org.

    +

    If you have a specific question pertaining to your registration, you may contact edX at exam-help@edx.org.

    diff --git a/requirements.txt b/requirements.txt index 08cfe57e2e..bc019ab54c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -51,8 +51,7 @@ pygraphviz==1.1 pil==1.1.7 nltk==2.0.4 dogstatsd-python==0.2.1 -# Taking out MySQL-python for now because it requires mysql to be installed, so breaks updates on content folks' envs. -# MySQL-python +MySQL-python==1.2.4c1 sphinx==1.1.3 Shapely==1.2.16 ipython==0.13.1