From ecf04b3e493ff5c9c3f14de98f691cb3ec086576 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 28 Dec 2012 11:29:29 -0500 Subject: [PATCH 01/49] Refactor existing grading logic into a new app. --- lms/djangoapps/instructor/views.py | 21 ------ lms/djangoapps/open_ended_grading/__init__.py | 0 .../open_ended_grading/grading_service.py | 71 +++++++++++++++++++ .../peer_grading_service.py | 15 ++++ .../staff_grading.py} | 0 .../staff_grading_service.py | 69 +++--------------- lms/djangoapps/open_ended_grading/tests.py | 16 +++++ lms/djangoapps/open_ended_grading/views.py | 57 +++++++++++++++ lms/envs/common.py | 1 + lms/urls.py | 10 +-- 10 files changed, 176 insertions(+), 84 deletions(-) create mode 100644 lms/djangoapps/open_ended_grading/__init__.py create mode 100644 lms/djangoapps/open_ended_grading/grading_service.py create mode 100644 lms/djangoapps/open_ended_grading/peer_grading_service.py rename lms/djangoapps/{instructor/grading.py => open_ended_grading/staff_grading.py} (100%) rename lms/djangoapps/{instructor => open_ended_grading}/staff_grading_service.py (84%) create mode 100644 lms/djangoapps/open_ended_grading/tests.py create mode 100644 lms/djangoapps/open_ended_grading/views.py diff --git a/lms/djangoapps/instructor/views.py b/lms/djangoapps/instructor/views.py index 79cf0caaf3..2bad058ad8 100644 --- a/lms/djangoapps/instructor/views.py +++ b/lms/djangoapps/instructor/views.py @@ -28,7 +28,6 @@ from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundErr from xmodule.modulestore.search import path_to_location import track.views -from .grading import StaffGrading log = logging.getLogger(__name__) @@ -414,26 +413,6 @@ def get_student_grade_summary_data(request, course, course_id, get_grades=True, -@cache_control(no_cache=True, no_store=True, must_revalidate=True) -def staff_grading(request, course_id): - """ - Show the instructor grading interface. - """ - course = get_course_with_access(request.user, course_id, 'staff') - - grading = StaffGrading(course) - - ajax_url = reverse('staff_grading', kwargs={'course_id': course_id}) - if not ajax_url.endswith('/'): - ajax_url += '/' - - return render_to_response('instructor/staff_grading.html', { - 'view_html': grading.get_html(), - 'course': course, - 'course_id': course_id, - 'ajax_url': ajax_url, - # Checked above - 'staff_access': True, }) @cache_control(no_cache=True, no_store=True, must_revalidate=True) diff --git a/lms/djangoapps/open_ended_grading/__init__.py b/lms/djangoapps/open_ended_grading/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lms/djangoapps/open_ended_grading/grading_service.py b/lms/djangoapps/open_ended_grading/grading_service.py new file mode 100644 index 0000000000..be15ae08ee --- /dev/null +++ b/lms/djangoapps/open_ended_grading/grading_service.py @@ -0,0 +1,71 @@ +# This class gives a common interface for logging into +# the graing controller +import json +import logging +import requests +from requests.exceptions import RequestException, ConnectionError, HTTPError +import sys + +from django.conf import settings +from django.http import HttpResponse, Http404 + +from courseware.access import has_access +from util.json_request import expect_json +from xmodule.course_module import CourseDescriptor + +log = logging.getLogger(__name__) + +class GradingServiceError(Exception): + pass + +class GradingService(object): + """ + Interface to staff grading backend. + """ + def __init__(self, config): + self.username = config['username'] + self.password = config['password'] + self.url = config['url'] + self.login_url = self.url + '/login/' + self.session = requests.session() + + def _login(self): + """ + Log into the staff grading service. + + Raises requests.exceptions.HTTPError if something goes wrong. + + Returns the decoded json dict of the response. + """ + response = self.session.post(self.login_url, + {'username': self.username, + 'password': self.password,}) + + response.raise_for_status() + + return response.json + + + def _try_with_login(self, operation): + """ + Call operation(), which should return a requests response object. If + the request fails with a 'login_required' error, call _login() and try + the operation again. + + Returns the result of operation(). Does not catch exceptions. + """ + response = operation() + if (response.json + and response.json.get('success') == False + and response.json.get('error') == 'login_required'): + # apparrently we aren't logged in. Try to fix that. + r = self._login() + if r and not r.get('success'): + log.warning("Couldn't log into staff_grading backend. Response: %s", + r) + # try again + response = operation() + response.raise_for_status() + + return response + diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py new file mode 100644 index 0000000000..cad23a072c --- /dev/null +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -0,0 +1,15 @@ +import json +import logging +import requests +from requests.exceptions import RequestException, ConnectionError, HTTPError +import sys + +from django.conf import settings +from django.http import HttpResponse, Http404 + +from courseware.access import has_access +from util.json_request import expect_json +from xmodule.course_module import CourseDescriptor + +log = logging.getLogger(__name__) + diff --git a/lms/djangoapps/instructor/grading.py b/lms/djangoapps/open_ended_grading/staff_grading.py similarity index 100% rename from lms/djangoapps/instructor/grading.py rename to lms/djangoapps/open_ended_grading/staff_grading.py diff --git a/lms/djangoapps/instructor/staff_grading_service.py b/lms/djangoapps/open_ended_grading/staff_grading_service.py similarity index 84% rename from lms/djangoapps/instructor/staff_grading_service.py rename to lms/djangoapps/open_ended_grading/staff_grading_service.py index ea8f0de074..6d0cea983b 100644 --- a/lms/djangoapps/instructor/staff_grading_service.py +++ b/lms/djangoapps/open_ended_grading/staff_grading_service.py @@ -7,6 +7,8 @@ import logging import requests from requests.exceptions import RequestException, ConnectionError, HTTPError import sys +from grading_service import GradingService +from grading_service import GradingServiceError from django.conf import settings from django.http import HttpResponse, Http404 @@ -18,9 +20,6 @@ from xmodule.course_module import CourseDescriptor log = logging.getLogger(__name__) -class GradingServiceError(Exception): - pass - class MockStaffGradingService(object): """ @@ -57,62 +56,16 @@ class MockStaffGradingService(object): return self.get_next(course_id, 'fake location', grader_id) -class StaffGradingService(object): +class StaffGradingService(GradingService): """ Interface to staff grading backend. """ def __init__(self, config): - self.username = config['username'] - self.password = config['password'] - self.url = config['url'] - - self.login_url = self.url + '/login/' + super(StaffGradingService, self).__init__(config) self.get_next_url = self.url + '/get_next_submission/' self.save_grade_url = self.url + '/save_grade/' self.get_problem_list_url = self.url + '/get_problem_list/' - self.session = requests.session() - - - def _login(self): - """ - Log into the staff grading service. - - Raises requests.exceptions.HTTPError if something goes wrong. - - Returns the decoded json dict of the response. - """ - response = self.session.post(self.login_url, - {'username': self.username, - 'password': self.password,}) - - response.raise_for_status() - - return response.json - - - def _try_with_login(self, operation): - """ - Call operation(), which should return a requests response object. If - the request fails with a 'login_required' error, call _login() and try - the operation again. - - Returns the result of operation(). Does not catch exceptions. - """ - response = operation() - if (response.json - and response.json.get('success') == False - and response.json.get('error') == 'login_required'): - # apparrently we aren't logged in. Try to fix that. - r = self._login() - if r and not r.get('success'): - log.warning("Couldn't log into staff_grading backend. Response: %s", - r) - # try again - response = operation() - response.raise_for_status() - - return response def get_problem_list(self, course_id, grader_id): """ @@ -203,11 +156,11 @@ class StaffGradingService(object): return r.text -# don't initialize until grading_service() is called--means that just +# don't initialize until staff_grading_service() is called--means that just # importing this file doesn't create objects that may not have the right config _service = None -def grading_service(): +def staff_grading_service(): """ Return a staff grading service instance--if settings.MOCK_STAFF_GRADING is True, returns a mock one, otherwise a real one. @@ -308,12 +261,12 @@ def get_problem_list(request, course_id): """ _check_access(request.user, course_id) try: - response = grading_service().get_problem_list(course_id, request.user.id) + response = staff_grading_service().get_problem_list(course_id, request.user.id) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error from grading service. server url: {0}" - .format(grading_service().url)) + .format(staff_grading_service().url)) return HttpResponse(json.dumps({'success': False, 'error': 'Could not connect to grading service'})) @@ -323,10 +276,10 @@ def _get_next(course_id, grader_id, location): Implementation of get_next (also called from save_grade) -- returns a json string """ try: - return grading_service().get_next(course_id, location, grader_id) + return staff_grading_service().get_next(course_id, location, grader_id) except GradingServiceError: log.exception("Error from grading service. server url: {0}" - .format(grading_service().url)) + .format(staff_grading_service().url)) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) @@ -364,7 +317,7 @@ def save_grade(request, course_id): location = p['location'] skipped = 'skipped' in p try: - result_json = grading_service().save_grade(course_id, + result_json = staff_grading_service().save_grade(course_id, grader_id, p['submission_id'], p['score'], diff --git a/lms/djangoapps/open_ended_grading/tests.py b/lms/djangoapps/open_ended_grading/tests.py new file mode 100644 index 0000000000..501deb776c --- /dev/null +++ b/lms/djangoapps/open_ended_grading/tests.py @@ -0,0 +1,16 @@ +""" +This file demonstrates writing tests using the unittest module. These will pass +when you run "manage.py test". + +Replace this with more appropriate tests for your application. +""" + +from django.test import TestCase + + +class SimpleTest(TestCase): + def test_basic_addition(self): + """ + Tests that 1 + 1 always equals 2. + """ + self.assertEqual(1 + 1, 2) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py new file mode 100644 index 0000000000..9066f8323a --- /dev/null +++ b/lms/djangoapps/open_ended_grading/views.py @@ -0,0 +1,57 @@ +# Grading Views + +from collections import defaultdict +import csv +import logging +import os +import urllib + +from django.conf import settings +from django.contrib.auth.models import User, Group +from django.http import HttpResponse +from django_future.csrf import ensure_csrf_cookie +from django.views.decorators.cache import cache_control +from mitxmako.shortcuts import render_to_response +from django.core.urlresolvers import reverse + +from courseware import grades +from courseware.access import has_access, get_access_group_name +from courseware.courses import get_course_with_access +from django_comment_client.models import Role, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA +from django_comment_client.utils import has_forum_access +from psychometrics import psychoanalyze +from student.models import CourseEnrollment +from xmodule.course_module import CourseDescriptor +from xmodule.modulestore import Location +from xmodule.modulestore.django import modulestore +from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundError, NoPathToItem +from xmodule.modulestore.search import path_to_location +import track.views + +from .staff_grading import StaffGrading + + +log = logging.getLogger(__name__) + +template_imports = {'urllib': urllib} +@cache_control(no_cache=True, no_store=True, must_revalidate=True) +def staff_grading(request, course_id): + """ + Show the instructor grading interface. + """ + course = get_course_with_access(request.user, course_id, 'staff') + + grading = StaffGrading(course) + + ajax_url = reverse('staff_grading', kwargs={'course_id': course_id}) + if not ajax_url.endswith('/'): + ajax_url += '/' + + return render_to_response('instructor/staff_grading.html', { + 'view_html': grading.get_html(), + 'course': course, + 'course_id': course_id, + 'ajax_url': ajax_url, + # Checked above + 'staff_access': True, }) + diff --git a/lms/envs/common.py b/lms/envs/common.py index 26941f7e01..1cf22a6323 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -600,6 +600,7 @@ INSTALLED_APPS = ( 'util', 'certificates', 'instructor', + 'open_ended_grading', 'psychometrics', 'licenses', diff --git a/lms/urls.py b/lms/urls.py index baa720028b..f04af88e72 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -241,15 +241,15 @@ if settings.COURSEWARE_ENABLED: url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/enroll_students$', 'instructor.views.enroll_students', name='enroll_students'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/staff_grading$', - 'instructor.views.staff_grading', name='staff_grading'), + 'open_ended_grading.views.staff_grading', name='staff_grading'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/staff_grading/get_next$', - 'instructor.staff_grading_service.get_next', name='staff_grading_get_next'), + 'open_ended_grading.staff_grading_service.get_next', name='staff_grading_get_next'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/staff_grading/save_grade$', - 'instructor.staff_grading_service.save_grade', name='staff_grading_save_grade'), + 'open_ended_grading.staff_grading_service.save_grade', name='staff_grading_save_grade'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/staff_grading/save_grade$', - 'instructor.staff_grading_service.save_grade', name='staff_grading_save_grade'), + 'open_ended_grading.staff_grading_service.save_grade', name='staff_grading_save_grade'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/staff_grading/get_problem_list$', - 'instructor.staff_grading_service.get_problem_list', name='staff_grading_get_problem_list'), + 'open_ended_grading.staff_grading_service.get_problem_list', name='staff_grading_get_problem_list'), ) # discussion forums live within courseware, so courseware must be enabled first From 125945de9019adf7b989c1f65f48e92937801741 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 28 Dec 2012 13:34:07 -0500 Subject: [PATCH 02/49] Refactor get and post logic into new GradingService --- .../open_ended_grading/grading_service.py | 32 ++++++++++++- .../staff_grading_service.py | 45 +++++-------------- 2 files changed, 41 insertions(+), 36 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/grading_service.py b/lms/djangoapps/open_ended_grading/grading_service.py index be15ae08ee..3c92c5bddd 100644 --- a/lms/djangoapps/open_ended_grading/grading_service.py +++ b/lms/djangoapps/open_ended_grading/grading_service.py @@ -1,5 +1,4 @@ -# This class gives a common interface for logging into -# the graing controller +# This class gives a common interface for logging into the grading controller import json import logging import requests @@ -45,6 +44,35 @@ class GradingService(object): return response.json + def post(self, url, allow_redirects, data): + """ + Make a post request to the grading controller + """ + try: + op = lambda: self.session.post(url, data=data, + allow_redirects=allow_redirects) + r = self._try_with_login(op) + except (RequestException, ConnectionError, HTTPError) as err: + # reraise as promised GradingServiceError, but preserve stacktrace. + raise GradingServiceError, str(err), sys.exc_info()[2] + + return r.text + + def get(self, url, allow_redirects, params): + """ + Make a get request to the grading controller + """ + op = lambda: self.session.get(url, + allow_redirects=allow_redirects, + params=params) + try: + r = self._try_with_login(op) + except (RequestException, ConnectionError, HTTPError) as err: + # reraise as promised GradingServiceError, but preserve stacktrace. + raise GradingServiceError, str(err), sys.exc_info()[2] + + return r.text + def _try_with_login(self, operation): """ diff --git a/lms/djangoapps/open_ended_grading/staff_grading_service.py b/lms/djangoapps/open_ended_grading/staff_grading_service.py index 6d0cea983b..5d56a90064 100644 --- a/lms/djangoapps/open_ended_grading/staff_grading_service.py +++ b/lms/djangoapps/open_ended_grading/staff_grading_service.py @@ -83,17 +83,8 @@ class StaffGradingService(GradingService): Raises: GradingServiceError: something went wrong with the connection. """ - op = lambda: self.session.get(self.get_problem_list_url, - allow_redirects = False, - params={'course_id': course_id, - 'grader_id': grader_id}) - try: - r = self._try_with_login(op) - except (RequestException, ConnectionError, HTTPError) as err: - # reraise as promised GradingServiceError, but preserve stacktrace. - raise GradingServiceError, str(err), sys.exc_info()[2] - - return r.text + params = {'course_id': course_id,'grader_id': grader_id} + return self.get(self.get_problem_list_url, False, params) def get_next(self, course_id, location, grader_id): @@ -114,17 +105,10 @@ class StaffGradingService(GradingService): Raises: GradingServiceError: something went wrong with the connection. """ - op = lambda: self.session.get(self.get_next_url, + return self.get(self.get_next_url, allow_redirects=False, params={'location': location, 'grader_id': grader_id}) - try: - r = self._try_with_login(op) - except (RequestException, ConnectionError, HTTPError) as err: - # reraise as promised GradingServiceError, but preserve stacktrace. - raise GradingServiceError, str(err), sys.exc_info()[2] - - return r.text def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped): @@ -139,22 +123,15 @@ class StaffGradingService(GradingService): Raises: GradingServiceError if there's a problem connecting. """ - try: - data = {'course_id': course_id, - 'submission_id': submission_id, - 'score': score, - 'feedback': feedback, - 'grader_id': grader_id, - 'skipped': skipped} + data = {'course_id': course_id, + 'submission_id': submission_id, + 'score': score, + 'feedback': feedback, + 'grader_id': grader_id, + 'skipped': skipped} - op = lambda: self.session.post(self.save_grade_url, data=data, - allow_redirects=False) - r = self._try_with_login(op) - except (RequestException, ConnectionError, HTTPError) as err: - # reraise as promised GradingServiceError, but preserve stacktrace. - raise GradingServiceError, str(err), sys.exc_info()[2] - - return r.text + return self.post(self.save_grade_url, data=data, + allow_redirects=False) # don't initialize until staff_grading_service() is called--means that just # importing this file doesn't create objects that may not have the right config From d23eb93fbb159c91ec2bf6a66e0825622a566f04 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 28 Dec 2012 13:35:35 -0500 Subject: [PATCH 03/49] add new peer grading service with some rudimentary logic --- .../peer_grading_service.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index cad23a072c..e3b2e823a7 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -6,6 +6,8 @@ import sys from django.conf import settings from django.http import HttpResponse, Http404 +from grading_service import GradingService +from grading_service import GradingServiceError from courseware.access import has_access from util.json_request import expect_json @@ -13,3 +15,26 @@ from xmodule.course_module import CourseDescriptor log = logging.getLogger(__name__) +class PeerGradingService(GradingService): + """ + Interface with the grading controller for peer grading + """ + def __init__(self, config): + super(PeerGradingService, self).__init__(config) + self.get_next_submission_url = self.url + '/get_next_submission/' + self.save_grade_url = self.url + '/save_grade/' + self.is_student_calibrated_url = self.url + '/is_student_calibrated/' + self.show_calibration_essay = self.url + '/show_calibration_essay/' + self.save_calibration_essay = self.url + '/save_calibration_essay/' + + def get_next_submission(self, problem_location, grader_id): + return self.get(self.get_next_submission_url, False, + {'location': problem_location, 'grader_id': grader_id}) + + def save_grade(self, grader_id, submission_id, score, feedback, submission_key): + data = {'grader_id' : grader_id, + 'submission_id' : submission_id, + 'score' : score, + 'feedback' : feedback, + 'submission_key', submission_key} + return self.post(self.save_grade_url, False, data) From 652a8eb440af46e2f377b7970c5e0faea2a96f49 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 28 Dec 2012 15:48:06 -0500 Subject: [PATCH 04/49] New peer grading view on the lms side --- .../peer_grading_service.py | 56 ++++++++++++++++++- lms/djangoapps/open_ended_grading/views.py | 22 +++++++- lms/envs/common.py | 6 +- lms/templates/instructor/staff_grading.html | 4 ++ lms/templates/peer_grading/peer_grading.html | 22 ++++++++ lms/urls.py | 5 ++ 6 files changed, 112 insertions(+), 3 deletions(-) create mode 100644 lms/templates/peer_grading/peer_grading.html diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index e3b2e823a7..b2b1fab5d3 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -36,5 +36,59 @@ class PeerGradingService(GradingService): 'submission_id' : submission_id, 'score' : score, 'feedback' : feedback, - 'submission_key', submission_key} + 'submission_key': submission_key} return self.post(self.save_grade_url, False, data) + + +def peer_grading_service(): + """ + Return a peer grading service instance--if settings.MOCK_PEER_GRADING is True, + returns a mock one, otherwise a real one. + + Caches the result, so changing the setting after the first call to this + function will have no effect. + """ + global _service + if _service is not None: + return _service + + _service = PeerGradingService(settings.PEER_GRADING_INTERFACE) + + return _service + +def _err_response(msg): + """ + Return a HttpResponse with a json dump with success=False, and the given error message. + """ + return HttpResponse(json.dumps({'success': False, 'error': msg}), + mimetype="application/json") + +def get_next_submission(request, course_id): + required = set(['location']) + if request.method != 'POST': + raise Http404 + actual = set(request.POST.keys()) + missing = required - actual + if len(missing) > 0: + return _err_response('Missing required keys {0}'.format( + ', '.join(missing))) + grader_id = request.user.id + p = request.POST + location = p['location'] + + return HttpResponse(_get_next(course_id, request.user.id, location), + mimetype="application/json") + +def _get_next_submission(course_id, grader_id, location): + """ + Implementation of get_next (also called from save_grade) -- returns a json string + """ + try: + return peer_grading_service().get_next_submission(location, grader_id) + except GradingServiceError: + log.exception("Error from grading service. server url: {0}" + .format(staff_grading_service().url)) + return json.dumps({'success': False, + 'error': 'Could not connect to grading service'}) + + diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 9066f8323a..41eb0fbccf 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -48,10 +48,30 @@ def staff_grading(request, course_id): ajax_url += '/' return render_to_response('instructor/staff_grading.html', { - 'view_html': grading.get_html(), + 'view_html': '', 'course': course, 'course_id': course_id, 'ajax_url': ajax_url, # Checked above 'staff_access': True, }) + +def peer_grading(request, course_id): + ''' + Show a peer grading interface + ''' + course = get_course_with_access(request.user, course_id, 'load') + + ajax_url = reverse('peer_grading', kwargs={'course_id': course_id}) + if not ajax_url.endswith('/'): + ajax_url += '/' + + return render_to_response('peer_grading/peer_grading.html', { + 'view_html': '', + 'course': course, + 'course_id': course_id, + 'ajax_url': ajax_url, + # Checked above + 'staff_access': False, }) + + diff --git a/lms/envs/common.py b/lms/envs/common.py index 1cf22a6323..d18c82b754 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -418,6 +418,7 @@ main_vendor_js = [ discussion_js = sorted(glob2.glob(PROJECT_ROOT / 'static/coffee/src/discussion/**/*.coffee')) staff_grading_js = sorted(glob2.glob(PROJECT_ROOT / 'static/coffee/src/staff_grading/**/*.coffee')) +peer_grading_js = sorted(glob2.glob(PROJECT_ROOT / 'static/coffee/src/peer_grading/**/*.coffee')) # Load javascript from all of the available xmodules, and @@ -526,8 +527,11 @@ PIPELINE_JS = { 'staff_grading' : { 'source_filenames': [pth.replace(PROJECT_ROOT / 'static/', '') for pth in staff_grading_js], 'output_filename': 'js/staff_grading.js' + }, + 'peer_grading' : { + 'source_filenames': [pth.replace(PROJECT_ROOT / 'static/', '') for pth in peer_grading_js], + 'output_filename': 'js/peer_grading.js' } - } PIPELINE_DISABLE_WRAPPER = True diff --git a/lms/templates/instructor/staff_grading.html b/lms/templates/instructor/staff_grading.html index 33580c6267..085480a332 100644 --- a/lms/templates/instructor/staff_grading.html +++ b/lms/templates/instructor/staff_grading.html @@ -24,6 +24,8 @@
+ +

Instructions

@@ -35,6 +37,8 @@
+ +

diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html new file mode 100644 index 0000000000..19753c7ad4 --- /dev/null +++ b/lms/templates/peer_grading/peer_grading.html @@ -0,0 +1,22 @@ +<%inherit file="/main.html" /> +<%block name="bodyclass">${course.css_class} +<%namespace name='static' file='/static_content.html'/> + +<%block name="headextra"> + <%static:css group='course'/> + + +<%block name="title">${course.number} Peer Grading + +<%include file="/courseware/course_navigation.html" args="active_page='staff_grading'" /> + +<%block name="js_extra"> + <%static:js group='peer_grading'/> + + +
+
+
+
+
+
diff --git a/lms/urls.py b/lms/urls.py index f04af88e72..4f5dfeb666 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -250,6 +250,11 @@ if settings.COURSEWARE_ENABLED: 'open_ended_grading.staff_grading_service.save_grade', name='staff_grading_save_grade'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/staff_grading/get_problem_list$', 'open_ended_grading.staff_grading_service.get_problem_list', name='staff_grading_get_problem_list'), + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/get_next_submission$', + 'open_ended_grading.peer_grading_service.get_next_submission', name='peer_grading_get_next_submission'), + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading$', + 'open_ended_grading.views.peer_grading', name='peer_grading'), + ) # discussion forums live within courseware, so courseware must be enabled first From deffa188ff9b9e44a543739f16f98f96834eba55 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Wed, 2 Jan 2013 09:43:21 -0500 Subject: [PATCH 05/49] Fix some minor problems and add new key for peer grading. --- lms/envs/dev.py | 7 +++++++ lms/templates/peer_grading/peer_grading.html | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/lms/envs/dev.py b/lms/envs/dev.py index 0ad42f67d3..058c67fa4d 100644 --- a/lms/envs/dev.py +++ b/lms/envs/dev.py @@ -110,6 +110,13 @@ STAFF_GRADING_INTERFACE = { 'password': 'abcd', } +################################# Peer grading config ##################### + +PEER_GRADING_INTERFACE = { + 'url': 'http://127.0.0.1:3033/peer_grading', + 'username': 'lms', + 'password': 'abcd', + } ################################ LMS Migration ################################# MITX_FEATURES['ENABLE_LMS_MIGRATION'] = True MITX_FEATURES['ACCESS_REQUIRE_STAFF_FOR_COURSE'] = False # require that user be in the staff_* group to be able to enroll diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index 19753c7ad4..0254d4cd67 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -15,7 +15,7 @@
-
+
From 37f261f906411508e1d04031cb08d07664278196 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Wed, 2 Jan 2013 15:01:59 -0500 Subject: [PATCH 06/49] Move peer grading so that there are the individual problem pages and the problem list page --- .../peer_grading_service.py | 35 ++++++++-- lms/djangoapps/open_ended_grading/views.py | 68 +++++++++++++++++++ .../src/peer_grading/peer_grading.coffee | 9 +++ .../peer_grading/peer_grading_problem.coffee | 17 +++++ lms/static/sass/course/_staff_grading.scss | 3 +- lms/templates/peer_grading/peer_grading.html | 15 +++- .../peer_grading/peer_grading_problem.html | 22 ++++++ lms/urls.py | 2 + 8 files changed, 164 insertions(+), 7 deletions(-) create mode 100644 lms/static/coffee/src/peer_grading/peer_grading.coffee create mode 100644 lms/static/coffee/src/peer_grading/peer_grading_problem.coffee create mode 100644 lms/templates/peer_grading/peer_grading_problem.html diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index b2b1fab5d3..1b78a32c5d 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -24,12 +24,14 @@ class PeerGradingService(GradingService): self.get_next_submission_url = self.url + '/get_next_submission/' self.save_grade_url = self.url + '/save_grade/' self.is_student_calibrated_url = self.url + '/is_student_calibrated/' - self.show_calibration_essay = self.url + '/show_calibration_essay/' - self.save_calibration_essay = self.url + '/save_calibration_essay/' + self.show_calibration_essay_url = self.url + '/show_calibration_essay/' + self.save_calibration_essay_url = self.url + '/save_calibration_essay/' + self.get_problem_list_url = self.url + '/get_problem_list/' def get_next_submission(self, problem_location, grader_id): - return self.get(self.get_next_submission_url, False, + response = self.get(self.get_next_submission_url, False, {'location': problem_location, 'grader_id': grader_id}) + return response def save_grade(self, grader_id, submission_id, score, feedback, submission_key): data = {'grader_id' : grader_id, @@ -39,6 +41,29 @@ class PeerGradingService(GradingService): 'submission_key': submission_key} return self.post(self.save_grade_url, False, data) + def is_student_calibrated(self, problem_location, grader_id): + params = {'problem_id' : problem_location, 'student_id': grader_id} + return self.get(self.is_student_calibrated_url, False, params) + + def show_calibration_essay(self, problem_location, grader_id): + params = {'problem_id' : problem_location, 'student_id': grader_id} + return self.get(self.show_calibration_essay_url, False, params) + + def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, score, feedback): + data = {'location': problem_location, + 'student_id': grader_id, + 'calibration_essay_id': calibration_essay_id, + 'submission_key': submission_key, + 'score': score, + 'feedback': feedback} + return self.post(self.save_calibration_essay_url, False, data) + + def get_problem_list(self, course_id, grader_id): + params = {'course_id': course_id, 'student_id': grader_id} + response = self.get(self.get_problem_list_url, False, params) + log.debug("Response! {0}".format(response)) + return response + def peer_grading_service(): """ @@ -64,6 +89,9 @@ def _err_response(msg): mimetype="application/json") def get_next_submission(request, course_id): + """ + TODO: fill in this documentation + """ required = set(['location']) if request.method != 'POST': raise Http404 @@ -91,4 +119,3 @@ def _get_next_submission(course_id, grader_id, location): return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) - diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 41eb0fbccf..d4967aa0e9 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -26,6 +26,10 @@ from xmodule.modulestore import Location from xmodule.modulestore.django import modulestore from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundError, NoPathToItem from xmodule.modulestore.search import path_to_location + +from peer_grading_service import PeerGradingService +from grading_service import GradingServiceError +import json import track.views from .staff_grading import StaffGrading @@ -34,6 +38,8 @@ from .staff_grading import StaffGrading log = logging.getLogger(__name__) template_imports = {'urllib': urllib} +peer_gs = PeerGradingService(settings.PEER_GRADING_INTERFACE) + @cache_control(no_cache=True, no_store=True, must_revalidate=True) def staff_grading(request, course_id): """ @@ -62,6 +68,22 @@ def peer_grading(request, course_id): ''' course = get_course_with_access(request.user, course_id, 'load') + # call problem list service + success = False + error_text = "" + try: + problem_list_text = peer_gs.get_problem_list(course_id, request.user.id) + problem_list_json = json.loads(problem_list_text) + success = problem_list_json['success'] + if 'error' in problem_list_json: + error_text = problem_list_json['error'] + + problem_list = problem_list_json['problem_list'] + + except GradingServiceError: + error_text = "Error occured while contacting the grading service" + success = False + ajax_url = reverse('peer_grading', kwargs={'course_id': course_id}) if not ajax_url.endswith('/'): ajax_url += '/' @@ -71,6 +93,52 @@ def peer_grading(request, course_id): 'course': course, 'course_id': course_id, 'ajax_url': ajax_url, + 'success': success, + 'problem_list': problem_list, + 'error_text': error_text, + # Checked above + 'staff_access': False, }) + + +def peer_grading_problem(request, course_id, problem_location): + ''' + Show individual problem interface + ''' + course = get_course_with_access(request.user, course_id, 'load') + + # TODO: make sure that we show calibration or next submission correctly + # TODO: figure out if we want to make this page pure ajax or not + + problem_info_text = "" + error_text = "" + # if we are still in calibration + + # show a calibration essay + + # else, show an actual problem + try: + problem_info_text = peer_gs.get_next_submission(problem_location, request.user.id) + log.debug(problem_info_text) + problem_info = json.loads(problem_info_text) + success = problem_info['success'] + if 'error' in problem_info: + error_text = problem_info['error'] + except GradingServiceError: + success = False + + + ajax_url = reverse('peer_grading', kwargs={'course_id': course_id}) + if not ajax_url.endswith('/'): + ajax_url += '/' + + return render_to_response('peer_grading/peer_grading_problem.html', { + 'view_html': '', + 'course': course, + 'course_id': course_id, + 'success' : success, + 'problem_info': problem_info_text, + 'ajax_url': ajax_url, + 'error_text': error_text, # Checked above 'staff_access': False, }) diff --git a/lms/static/coffee/src/peer_grading/peer_grading.coffee b/lms/static/coffee/src/peer_grading/peer_grading.coffee new file mode 100644 index 0000000000..46f0206bbf --- /dev/null +++ b/lms/static/coffee/src/peer_grading/peer_grading.coffee @@ -0,0 +1,9 @@ +class PeerGrading + constructor: (backend) -> + @problem_list = $('.problem-list') + @error_container = $('.error-container') + @error_container.toggle(not @error_container.is(':empty')) + + +backend = {} +$(document).ready(() -> new PeerGrading(backend)) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee new file mode 100644 index 0000000000..461089b79c --- /dev/null +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -0,0 +1,17 @@ +class PeerGradingProblemBackend + constructor: (ajax_url, mock_backend) -> + @mock_backend = mock_backend + +class PeerGradingProblem + constructor: (backend) -> + @error_container = $('.error-container') + + @render_problem() + + render_problem: () -> + # do this when it makes sense + @error_container.toggle(not @error_container.is(':empty')) + + +backend = {} +$(document).ready(() -> new PeerGradingProblem(backend)) diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index f1b6c5845d..bb7f6cef45 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -1,4 +1,5 @@ -div.staff-grading { +div.staff-grading, +div.peer-grading{ textarea.feedback-area { height: 75px; margin: 20px; diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index 0254d4cd67..b5a1408fde 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -16,7 +16,18 @@
-
-
+
${error_text}
+

Peer Grading

+

Instructions

+

+ % if success: + + %endif
diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html new file mode 100644 index 0000000000..7ea47f04f7 --- /dev/null +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -0,0 +1,22 @@ + +<%inherit file="/main.html" /> +<%block name="bodyclass">${course.css_class} +<%namespace name='static' file='/static_content.html'/> + +<%block name="headextra"> + <%static:css group='course'/> + + +<%block name="title">${course.number} Peer Grading. + +<%include file="/courseware/course_navigation.html" args="active_page='staff_grading'" /> + +<%block name="js_extra"> + <%static:js group='peer_grading'/> + + +
+
+
${error_text}
+
+
diff --git a/lms/urls.py b/lms/urls.py index 4f5dfeb666..7f30224ad5 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -254,6 +254,8 @@ if settings.COURSEWARE_ENABLED: 'open_ended_grading.peer_grading_service.get_next_submission', name='peer_grading_get_next_submission'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading$', 'open_ended_grading.views.peer_grading', name='peer_grading'), + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/problem/(?P.*)$', + 'open_ended_grading.views.peer_grading_problem', name='peer_grading_problem'), ) From c89ff2ac0ee2245e571819adaa57c402d2ca8f96 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Wed, 2 Jan 2013 17:44:58 -0500 Subject: [PATCH 07/49] Basic peer grading view using mocks and some cleanup in the peer grading service --- .../peer_grading_service.py | 48 ++++++++-- lms/djangoapps/open_ended_grading/views.py | 25 +---- .../src/peer_grading/peer_grading.coffee | 7 +- .../peer_grading/peer_grading_problem.coffee | 91 +++++++++++++++++-- lms/templates/peer_grading/peer_grading.html | 2 +- .../peer_grading/peer_grading_problem.html | 54 ++++++++++- lms/urls.py | 9 +- 7 files changed, 191 insertions(+), 45 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 1b78a32c5d..daccdd53af 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -88,18 +88,28 @@ def _err_response(msg): return HttpResponse(json.dumps({'success': False, 'error': msg}), mimetype="application/json") +def _check_required(request, required): + actual = set(request.POST.keys()) + missing = required - actual + if len(missing) > 0: + return False, "Missing required keys: {0}".format(', '.join(missing)) + else: + return True, "" + +def _check_post(request): + if request.method != 'POST': + raise Http404 + + def get_next_submission(request, course_id): """ TODO: fill in this documentation """ + _check_post(request) required = set(['location']) - if request.method != 'POST': - raise Http404 - actual = set(request.POST.keys()) - missing = required - actual - if len(missing) > 0: - return _err_response('Missing required keys {0}'.format( - ', '.join(missing))) + success, message = _check_required(request, required) + if not success: + return _err_response(message) grader_id = request.user.id p = request.POST location = p['location'] @@ -119,3 +129,27 @@ def _get_next_submission(course_id, grader_id, location): return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) + +def show_calibration_essay(request, course_id): + """ + TODO: fill in this documentation + """ + _check_post(request) + + required = set(['location']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + + grader_id = request.user.id + p = request.POST + location = p['location'] + try: + response = peer_grading_service().show_calibration_essay(location, grader_id) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("Error from grading service. server url: {0}" + .format(staff_grading_service().url)) + return json.dumps({'success': False, + 'error': 'Could not connect to grading service'}) + diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index d4967aa0e9..6d41d47812 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -106,27 +106,6 @@ def peer_grading_problem(request, course_id, problem_location): ''' course = get_course_with_access(request.user, course_id, 'load') - # TODO: make sure that we show calibration or next submission correctly - # TODO: figure out if we want to make this page pure ajax or not - - problem_info_text = "" - error_text = "" - # if we are still in calibration - - # show a calibration essay - - # else, show an actual problem - try: - problem_info_text = peer_gs.get_next_submission(problem_location, request.user.id) - log.debug(problem_info_text) - problem_info = json.loads(problem_info_text) - success = problem_info['success'] - if 'error' in problem_info: - error_text = problem_info['error'] - except GradingServiceError: - success = False - - ajax_url = reverse('peer_grading', kwargs={'course_id': course_id}) if not ajax_url.endswith('/'): ajax_url += '/' @@ -134,11 +113,9 @@ def peer_grading_problem(request, course_id, problem_location): return render_to_response('peer_grading/peer_grading_problem.html', { 'view_html': '', 'course': course, + 'problem_location': problem_location, 'course_id': course_id, - 'success' : success, - 'problem_info': problem_info_text, 'ajax_url': ajax_url, - 'error_text': error_text, # Checked above 'staff_access': False, }) diff --git a/lms/static/coffee/src/peer_grading/peer_grading.coffee b/lms/static/coffee/src/peer_grading/peer_grading.coffee index 46f0206bbf..c20944252c 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading.coffee @@ -1,9 +1,10 @@ class PeerGrading constructor: (backend) -> - @problem_list = $('.problem-list') @error_container = $('.error-container') @error_container.toggle(not @error_container.is(':empty')) + @message_container = $('.message-container') + @message_container.toggle(not @message_container.is(':empty')) -backend = {} -$(document).ready(() -> new PeerGrading(backend)) +mock_backend = false +$(document).ready(() -> new PeerGrading(mock_backend)) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 461089b79c..5b7aef18c7 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -1,17 +1,96 @@ class PeerGradingProblemBackend constructor: (ajax_url, mock_backend) -> @mock_backend = mock_backend + @ajax_url = ajax_url + + post: (cmd, data, callback) -> + if @mock_backend + callback(@mock(cmd, data)) + else + # TODO: replace with postWithPrefix when that's loaded + $.post(@ajax_url + cmd, data, callback) + .error => callback({success: false, error: "Error occured while performing this operation"}) + + mock: (cmd, data) -> + if cmd == 'is_student_calibrated' + # change to test each version + response = + success: true + calibrated: false + else if cmd == 'show_calibration_essay' + response = + success: true + submission_id: 1 + submission_key: 'abcd' + student_response: 'I am a fake response' + prompt: 'Answer this question' + rubric: 'This is a rubric.' + max_score: 4 + + + return response + class PeerGradingProblem constructor: (backend) -> + @prompt_wrapper = $('.prompt-wrapper') + @backend = backend + + # ugly hack to prevent this code from trying to run on the + # general peer grading page + if( @prompt_wrapper.length == 0) + return + + # get the location of the problem + @location = $('.peer-grading').data('location') + + # get the other elements we want to fill in + @submission_container = $('.submission-container') + @prompt_container = $('.prompt-container') + @rubric_container = $('.rubric-container') + @error_container = $('.error-container') - @render_problem() - - render_problem: () -> - # do this when it makes sense - @error_container.toggle(not @error_container.is(':empty')) + @is_calibrated_check() -backend = {} + is_calibrated_check: () => + @backend.post('is_student_calibrated', {}, @calibration_check_callback) + + + fetch_calibration_essay: ()=> + @backend.post('show_calibration_essay', {location: @location}, @render_calibration) + + render_calibration: (response) => + if response.success + #TODO: fill this in + + @submission_container.html("

Calibration Essay

") + @submission_container.append(response.student_response) + @prompt_container.html(response.prompt) + @rubric_container.html(response.rubric) + + else + @error_container.show() + @error_container.html(response.error) + + render_submission: (response) -> + #TODO: fill this in + + calibration_check_callback: (response) => + if response.success + # check whether or not we're still calibrating + if response.calibrated + @fetch_submission() + @calibration = false + else + @fetch_calibration_essay() + @calibration = true + + + + +mock_backend = true +ajax_url = $('.peer-grading').data('ajax_url') +backend = new PeerGradingProblemBackend(ajax_url, mock_backend) $(document).ready(() -> new PeerGradingProblem(backend)) diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index b5a1408fde..02c69954b7 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -19,7 +19,7 @@
${error_text}

Peer Grading

Instructions

-

+

Here are a list of problems that need to be peer graded for this course.

% if success:
    %for problem in problem_list: diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 7ea47f04f7..9e6e44e699 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -15,8 +15,58 @@ <%static:js group='peer_grading'/> +
    -
    -
    ${error_text}
    +
    +
    +
    + +
    +
    +

    Calibration

    +
    +
    +

    Grading

    +
    +
    + +
    +
    +

    Question

    +
    +
    +
    +
    +

    Grading Rubric

    +
    +
    +
    + +
    + + +
    +

    Grading

    + +
    +
    +

    +
    +
    +
    +
    +

    +

    + +
    + + +
    + +
    + +
    +
    diff --git a/lms/urls.py b/lms/urls.py index 7f30224ad5..4e4fafcc23 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -250,12 +250,17 @@ if settings.COURSEWARE_ENABLED: 'open_ended_grading.staff_grading_service.save_grade', name='staff_grading_save_grade'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/staff_grading/get_problem_list$', 'open_ended_grading.staff_grading_service.get_problem_list', name='staff_grading_get_problem_list'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/get_next_submission$', - 'open_ended_grading.peer_grading_service.get_next_submission', name='peer_grading_get_next_submission'), + + + # Peer Grading url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading$', 'open_ended_grading.views.peer_grading', name='peer_grading'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/problem/(?P.*)$', 'open_ended_grading.views.peer_grading_problem', name='peer_grading_problem'), + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/get_next_submission$', + 'open_ended_grading.peer_grading_service.get_next_submission', name='peer_grading_get_next_submission'), + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/show_calibration_essay$', + 'open_ended_grading.peer_grading_service.show_calibration_essay', name='peer_grading_show_calibration_essay'), ) From bd47b0c79ae6778a11a4bb9cca98312235432026 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 3 Jan 2013 12:59:34 -0500 Subject: [PATCH 08/49] New urls and corresponding views in the grading service --- .../peer_grading_service.py | 85 +++++++++++++++++-- lms/urls.py | 7 +- 2 files changed, 86 insertions(+), 6 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index daccdd53af..cd4a94f26d 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -118,9 +118,6 @@ def get_next_submission(request, course_id): mimetype="application/json") def _get_next_submission(course_id, grader_id, location): - """ - Implementation of get_next (also called from save_grade) -- returns a json string - """ try: return peer_grading_service().get_next_submission(location, grader_id) except GradingServiceError: @@ -129,6 +126,57 @@ def _get_next_submission(course_id, grader_id, location): return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) +def save_grade(request, course_id): + """ + TODO: fill in this documentation + """ + _check_post(request) + required = set(['location', 'grader_id', 'submission_id', 'submission_key', 'score', 'feedback']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + grader_id = request.user.id + p = request.POST + location = p['location'] + submission_id = p['submission_id'] + score = p['score'] + feedback = p['feedback'] + submission_key = p['submission_key'] + try: + response = peer_grading_service().save_grade(grader_id, submission_id, + score, feedback, submission_key) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("Error from grading service. server url: {0}" + .format(staff_grading_service().url)) + return json.dumps({'success': False, + 'error': 'Could not connect to grading service'}) + + + +def is_student_calibrated(request, course_id): + """ + TODO: fill in this documentation + """ + _check_post(request) + required = set(['location']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + grader_id = request.user.id + p = request.POST + location = p['location'] + + try: + response = peer_grading_service().is_student_calibrated(location, grader_id) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("Error from grading service. server url: {0}" + .format(staff_grading_service().url)) + return json.dumps({'success': False, + 'error': 'Could not connect to grading service'}) + + def show_calibration_essay(request, course_id): """ @@ -144,12 +192,39 @@ def show_calibration_essay(request, course_id): grader_id = request.user.id p = request.POST location = p['location'] + return HttpResponse(_next_calibration_essay(course_id, grader_id, location), + mimetype="application/json") + +def _next_calibration_essay(course_id, grader_id, location): try: - response = peer_grading_service().show_calibration_essay(location, grader_id) - return HttpResponse(response, mimetype="application/json") + return peer_grading_service().show_calibration_essay(location, grader_id) except GradingServiceError: log.exception("Error from grading service. server url: {0}" .format(staff_grading_service().url)) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) + +def save_calibration_essay(request, course_id): + """ + """ + _check_post(request) + + required = set(['location', 'calibration_essay_id', 'submission_key', 'score', 'feedback']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + grader_id = request.user.id + p = request.POST + location = p['location'] + calibration_essay_id = p['calibration_essay_id'] + submission_key = p['submission_key'] + score = p['score'] + feedback = p['feedback'] + + try: + response = peer_grading_service().save_calibration_essay(location, grader_id, calibration_essay_id, submission_key, score, feedback) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("Error saving calibration grade") + return _err_response('Could not connect to grading service') diff --git a/lms/urls.py b/lms/urls.py index 4e4fafcc23..ed12aa9682 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -261,7 +261,12 @@ if settings.COURSEWARE_ENABLED: 'open_ended_grading.peer_grading_service.get_next_submission', name='peer_grading_get_next_submission'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/show_calibration_essay$', 'open_ended_grading.peer_grading_service.show_calibration_essay', name='peer_grading_show_calibration_essay'), - + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/is_student_calibrated$', + 'open_ended_grading.peer_grading_service.is_student_calibrated', name='peer_grading_is_student_calibrated'), + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/save_grade$', + 'open_ended_grading.peer_grading_service.save_grade', name='peer_grading_save_grade'), + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/save_calibration_essay$', + 'open_ended_grading.peer_grading_service.save_calibration_essay', name='peer_grading_save_calibration_essay'), ) # discussion forums live within courseware, so courseware must be enabled first From 962d83d71b81f20a85c122ee83afeeb233a989a6 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 3 Jan 2013 13:00:02 -0500 Subject: [PATCH 09/49] Display a message when there are no problems to grade --- lms/templates/peer_grading/peer_grading.html | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index 02c69954b7..ee338cecbb 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -21,13 +21,19 @@

    Instructions

    Here are a list of problems that need to be peer graded for this course.

    % if success: - + % if len(problem_list) == 0: +
    + Nothing to grade! +
    + %else: + + %endif %endif
From 1c1034c63ba8ba75239e3084fbaa97107bd76f51 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 3 Jan 2013 13:47:03 -0500 Subject: [PATCH 10/49] Pass location as a parameter, not as a part of the url --- lms/djangoapps/open_ended_grading/views.py | 7 ++++++- lms/urls.py | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 6d41d47812..1026888987 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -71,6 +71,7 @@ def peer_grading(request, course_id): # call problem list service success = False error_text = "" + problem_list = [] try: problem_list_text = peer_gs.get_problem_list(course_id, request.user.id) problem_list_json = json.loads(problem_list_text) @@ -83,6 +84,9 @@ def peer_grading(request, course_id): except GradingServiceError: error_text = "Error occured while contacting the grading service" success = False + except ValueError: + error_text = "Could not get problem list" + success = False ajax_url = reverse('peer_grading', kwargs={'course_id': course_id}) if not ajax_url.endswith('/'): @@ -100,11 +104,12 @@ def peer_grading(request, course_id): 'staff_access': False, }) -def peer_grading_problem(request, course_id, problem_location): +def peer_grading_problem(request, course_id): ''' Show individual problem interface ''' course = get_course_with_access(request.user, course_id, 'load') + problem_location = request.GET.get("location") ajax_url = reverse('peer_grading', kwargs={'course_id': course_id}) if not ajax_url.endswith('/'): diff --git a/lms/urls.py b/lms/urls.py index ed12aa9682..5bc55d6f10 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -255,7 +255,7 @@ if settings.COURSEWARE_ENABLED: # Peer Grading url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading$', 'open_ended_grading.views.peer_grading', name='peer_grading'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/problem/(?P.*)$', + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/problem$', 'open_ended_grading.views.peer_grading_problem', name='peer_grading_problem'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/get_next_submission$', 'open_ended_grading.peer_grading_service.get_next_submission', name='peer_grading_get_next_submission'), From c0001597a3f8f2ade4c183c252648e436788fee1 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 3 Jan 2013 13:47:29 -0500 Subject: [PATCH 11/49] Update some of the templates so that they don't break --- lms/templates/peer_grading/peer_grading.html | 2 +- lms/templates/peer_grading/peer_grading_problem.html | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index ee338cecbb..484bb94182 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -29,7 +29,7 @@ diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 9e6e44e699..116da94ece 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -17,7 +17,7 @@
-
+
@@ -53,6 +53,8 @@

+ +

From becffd4dbb5a75410309fe6da0d9181de2493dff Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 3 Jan 2013 14:08:56 -0500 Subject: [PATCH 12/49] Updated html and javascript for new pages as well as a fix for the peer grading service --- .../peer_grading_service.py | 1 + .../peer_grading/peer_grading_problem.coffee | 146 ++++++++++++++++-- .../peer_grading/peer_grading_problem.html | 6 +- 3 files changed, 135 insertions(+), 18 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index cd4a94f26d..70d0721b3b 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -65,6 +65,7 @@ class PeerGradingService(GradingService): return response +_service = None def peer_grading_service(): """ Return a peer grading service instance--if settings.MOCK_PEER_GRADING is True, diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 5b7aef18c7..7c0921cc98 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -18,14 +18,27 @@ class PeerGradingProblemBackend success: true calibrated: false else if cmd == 'show_calibration_essay' + #response = + # success: false + # error: "There was an error" response = success: true submission_id: 1 submission_key: 'abcd' - student_response: 'I am a fake response' + student_response: 'I am a fake calibration response' prompt: 'Answer this question' rubric: 'This is a rubric.' max_score: 4 + else if cmd == 'get_next_submission' + response = + success: true + submission_id: 1 + submission_key: 'abcd' + student_response: 'I am a fake student response' + prompt: 'Answer this question' + rubric: 'This is a rubric.' + max_score: 4 + return response @@ -48,35 +61,51 @@ class PeerGradingProblem @submission_container = $('.submission-container') @prompt_container = $('.prompt-container') @rubric_container = $('.rubric-container') + @instructions_panel = $('.instructions-panel') + @content_panel = $('.content-panel') @error_container = $('.error-container') + @submission_key_input = $("input[name='submission-key']") + @essay_id_input = $("input[name='essay-id']") + + @score_selection_container = $('.score-selection-container') + @score = null + + @submit_button = $('.submit-button') + @action_button = $('.action-button') + + @action_button.click -> document.location.reload(true) + @is_calibrated_check() + ########## + # + # Ajax calls to the backend + # + ########## is_calibrated_check: () => - @backend.post('is_student_calibrated', {}, @calibration_check_callback) + @backend.post('is_student_calibrated', {location: @location}, @calibration_check_callback) - - fetch_calibration_essay: ()=> + fetch_calibration_essay: () => @backend.post('show_calibration_essay', {location: @location}, @render_calibration) - render_calibration: (response) => - if response.success - #TODO: fill this in + fetch_submission_essay: () => + @backend.post('get_next_submission', {location: @location}, @render_submission) - @submission_container.html("

Calibration Essay

") - @submission_container.append(response.student_response) - @prompt_container.html(response.prompt) - @rubric_container.html(response.rubric) + submit_calibration_essay: ()-> + #TODO: onclick of the submit button. submits the calibration essay grade - else - @error_container.show() - @error_container.html(response.error) - - render_submission: (response) -> - #TODO: fill this in + submit_grade: () -> + #TODO: onclick of the submit button. submits the grade + + ########## + # + # Callbacks for various events + # + ########## calibration_check_callback: (response) => if response.success # check whether or not we're still calibrating @@ -86,6 +115,89 @@ class PeerGradingProblem else @fetch_calibration_essay() @calibration = true + else if response.error + @render_error(response.error) + else + @render_error("Error contacting the grading service") + + + submission_callback: (response) => + if response.success + @is_calibrated_check() + else + if response.error + @render_error(response.error) + else + @render_error("Error occurred while submitting grade") + + graded_callback: (event) => + @score = event.target.value + @show_submit_button() + + + + ########## + # + # Rendering methods and helpers + # + ########## + render_calibration: (response) => + if response.success + + # load in all the data + @submission_container.html("

Calibration Essay

") + @render_submission_data(response) + + # TODO: indicate that we're in calibration mode + + else + if response.error + @render_error(response.error) + else + @render_error("An error occurred while contacting the grading server") + + render_submission_data: (response) => + @content_panel.show() + @submission_container.append(response.student_response) + @prompt_container.html(response.prompt) + @rubric_container.html(response.rubric) + @submission_key_input.val(response.submission_key) + @essay_id_input.val(response.submission_id) + @setup_score_selection(response.max_score) + @submit_button.hide() + @action_button.hide() + + + render_submission: (response) => + #TODO: fill this in + @submit_button.hide() + @render_submission_data(response) + + render_error: (error_message) => + @error_container.show() + @error_container.html(error_message) + @content_panel.hide() + + show_submit_button: () => + @submit_button.show() + + setup_score_selection: (max_score) => + # first, get rid of all the old inputs, if any. + @score_selection_container.html('Choose score: ') + + # Now create new labels and inputs for each possible score. + for score in [0..max_score] + id = 'score-' + score + label = """""" + + input = """ + + """ # " fix broken parsing in emacs + @score_selection_container.append(input + label) + + # And now hook up an event handler again + $("input[name='score-selection']").change @graded_callback + diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 116da94ece..dd577c1295 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -21,6 +21,7 @@
+

Calibration

@@ -68,7 +69,10 @@
-
+
+
+
+ From 078b2a5b95e742c49108a0c01abbb395fbb4d129 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 3 Jan 2013 16:19:04 -0500 Subject: [PATCH 13/49] Fix up some minor state issues and complete basic JS functionality for page --- .../peer_grading_service.py | 6 +- .../peer_grading/peer_grading_problem.coffee | 88 +++++++++++++++---- lms/static/sass/course/_staff_grading.scss | 5 ++ 3 files changed, 78 insertions(+), 21 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 70d0721b3b..af04b0c43d 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -132,7 +132,7 @@ def save_grade(request, course_id): TODO: fill in this documentation """ _check_post(request) - required = set(['location', 'grader_id', 'submission_id', 'submission_key', 'score', 'feedback']) + required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback']) success, message = _check_required(request, required) if not success: return _err_response(message) @@ -211,14 +211,14 @@ def save_calibration_essay(request, course_id): """ _check_post(request) - required = set(['location', 'calibration_essay_id', 'submission_key', 'score', 'feedback']) + required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback']) success, message = _check_required(request, required) if not success: return _err_response(message) grader_id = request.user.id p = request.POST location = p['location'] - calibration_essay_id = p['calibration_essay_id'] + calibration_essay_id = p['submission_id'] submission_key = p['submission_key'] score = p['score'] feedback = p['feedback'] diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 7c0921cc98..20a7948aa2 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -16,7 +16,7 @@ class PeerGradingProblemBackend # change to test each version response = success: true - calibrated: false + calibrated: true else if cmd == 'show_calibration_essay' #response = # success: false @@ -38,7 +38,12 @@ class PeerGradingProblemBackend prompt: 'Answer this question' rubric: 'This is a rubric.' max_score: 4 - + else if cmd == 'save_calibration_essay' + response = + success: true + else if cmd == 'save_grade' + response = + success: true return response @@ -61,13 +66,15 @@ class PeerGradingProblem @submission_container = $('.submission-container') @prompt_container = $('.prompt-container') @rubric_container = $('.rubric-container') - @instructions_panel = $('.instructions-panel') + @calibration_panel = $('.calibration-panel') + @grading_panel = $('.grading-panel') @content_panel = $('.content-panel') @error_container = $('.error-container') @submission_key_input = $("input[name='submission-key']") @essay_id_input = $("input[name='essay-id']") + @feedback_area = $('.feedback-area') @score_selection_container = $('.score-selection-container') @score = null @@ -94,11 +101,23 @@ class PeerGradingProblem fetch_submission_essay: () => @backend.post('get_next_submission', {location: @location}, @render_submission) - submit_calibration_essay: ()-> - #TODO: onclick of the submit button. submits the calibration essay grade + construct_data: () => + data = + score: @score + location: @location + submission_id: @essay_id_input.val() + submission_key: @submission_key_input.val() + feedback: @feedback_area.val() + return data - submit_grade: () -> - #TODO: onclick of the submit button. submits the grade + + submit_calibration_essay: ()=> + data = @construct_data() + @backend.post('save_calibration_essay', data, @submission_callback) + + submit_grade: () => + data = @construct_data() + @backend.post('save_grade', data, @submission_callback) ########## @@ -110,7 +129,7 @@ class PeerGradingProblem if response.success # check whether or not we're still calibrating if response.calibrated - @fetch_submission() + @fetch_submission_essay() @calibration = false else @fetch_calibration_essay() @@ -147,18 +166,55 @@ class PeerGradingProblem # load in all the data @submission_container.html("

Calibration Essay

") @render_submission_data(response) - # TODO: indicate that we're in calibration mode + @calibration_panel.addClass('current-state') + @grading_panel.removeClass('current-state') + # clear out all of the existing text + @calibration_panel.find('p').remove() + @grading_panel.find('p').remove() + + # add in new text + + + @submit_button.click @submit_calibration_essay + + else if response.error + @render_error(response.error) else - if response.error - @render_error(response.error) - else - @render_error("An error occurred while contacting the grading server") + @render_error("An error occurred while retrieving the next calibration essay") + + render_submission: (response) => + if response.success + #TODO: fill this in + @submit_button.hide() + @submission_container.html("

Submitted Essay

") + @render_submission_data(response) + + @calibration_panel.removeClass('current-state') + @grading_panel.addClass('current-state') + + # clear out all of the existing text + @calibration_panel.find('p').remove() + @grading_panel.find('p').remove() + + @submit_button.click @submit_grade + else if response.error + @render_error(response.error) + else + @render_error("An error occured when retrieving the next submission.") + + + make_paragraphs: (text) -> + paragraph_split = text.split(/\n\s*\n/) + new_text = '' + for paragraph in paragraph_split + new_text += "

#{paragraph}

" + return new_text render_submission_data: (response) => @content_panel.show() - @submission_container.append(response.student_response) + @submission_container.append(@make_paragraphs(response.student_response)) @prompt_container.html(response.prompt) @rubric_container.html(response.rubric) @submission_key_input.val(response.submission_key) @@ -168,10 +224,6 @@ class PeerGradingProblem @action_button.hide() - render_submission: (response) => - #TODO: fill this in - @submit_button.hide() - @render_submission_data(response) render_error: (error_message) => @error_container.show() diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index bb7f6cef45..d2006e10a7 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -82,6 +82,11 @@ div.peer-grading{ margin-bottom:5px; font-size: .8em; } + + .current-state + { + background: #eee; + } padding: 40px; } From fe86c25f7218182089c90d41a0857009957ec1b0 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 3 Jan 2013 17:23:30 -0500 Subject: [PATCH 14/49] Bug fixes for the JS and the peer grading service --- .../open_ended_grading/peer_grading_service.py | 10 +++++----- .../src/peer_grading/peer_grading_problem.coffee | 7 +++---- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index af04b0c43d..6b35f55f01 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -33,12 +33,13 @@ class PeerGradingService(GradingService): {'location': problem_location, 'grader_id': grader_id}) return response - def save_grade(self, grader_id, submission_id, score, feedback, submission_key): + def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key): data = {'grader_id' : grader_id, 'submission_id' : submission_id, 'score' : score, 'feedback' : feedback, - 'submission_key': submission_key} + 'submission_key': submission_key, + 'location': location} return self.post(self.save_grade_url, False, data) def is_student_calibrated(self, problem_location, grader_id): @@ -61,7 +62,6 @@ class PeerGradingService(GradingService): def get_problem_list(self, course_id, grader_id): params = {'course_id': course_id, 'student_id': grader_id} response = self.get(self.get_problem_list_url, False, params) - log.debug("Response! {0}".format(response)) return response @@ -115,7 +115,7 @@ def get_next_submission(request, course_id): p = request.POST location = p['location'] - return HttpResponse(_get_next(course_id, request.user.id, location), + return HttpResponse(_get_next_submission(course_id, request.user.id, location), mimetype="application/json") def _get_next_submission(course_id, grader_id, location): @@ -144,7 +144,7 @@ def save_grade(request, course_id): feedback = p['feedback'] submission_key = p['submission_key'] try: - response = peer_grading_service().save_grade(grader_id, submission_id, + response = peer_grading_service().save_grade(location, grader_id, submission_id, score, feedback, submission_key) return HttpResponse(response, mimetype="application/json") except GradingServiceError: diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 20a7948aa2..04b47671a1 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -45,7 +45,6 @@ class PeerGradingProblemBackend response = success: true - return response @@ -101,9 +100,9 @@ class PeerGradingProblem fetch_submission_essay: () => @backend.post('get_next_submission', {location: @location}, @render_submission) - construct_data: () => + construct_data: () -> data = - score: @score + score: $('input[name="score-selection"]:checked').val() location: @location submission_id: @essay_id_input.val() submission_key: @submission_key_input.val() @@ -254,7 +253,7 @@ class PeerGradingProblem -mock_backend = true +mock_backend = false ajax_url = $('.peer-grading').data('ajax_url') backend = new PeerGradingProblemBackend(ajax_url, mock_backend) $(document).ready(() -> new PeerGradingProblem(backend)) From c555bf5a8d470a3a3a21784859c6e6769d49c878 Mon Sep 17 00:00:00 2001 From: ichuang Date: Thu, 3 Jan 2013 22:57:24 -0500 Subject: [PATCH 15/49] fix capa to add contextualize_text for optionresponse and multiplechoice --- common/lib/capa/capa/responsetypes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/common/lib/capa/capa/responsetypes.py b/common/lib/capa/capa/responsetypes.py index 3e79ca2084..1bc34b70a3 100644 --- a/common/lib/capa/capa/responsetypes.py +++ b/common/lib/capa/capa/responsetypes.py @@ -629,7 +629,7 @@ class MultipleChoiceResponse(LoncapaResponse): # define correct choices (after calling secondary setup) xml = self.xml cxml = xml.xpath('//*[@id=$id]//choice[@correct="true"]', id=xml.get('id')) - self.correct_choices = [choice.get('name') for choice in cxml] + self.correct_choices = [contextualize_text(choice.get('name'), self.context) for choice in cxml] def mc_setup_response(self): ''' @@ -723,7 +723,7 @@ class OptionResponse(LoncapaResponse): return cmap def get_answers(self): - amap = dict([(af.get('id'), af.get('correct')) for af in self.answer_fields]) + amap = dict([(af.get('id'), contextualize_text(af.get('correct'), self.context)) for af in self.answer_fields]) # log.debug('%s: expected answers=%s' % (unicode(self),amap)) return amap From b5f34a9da6d7d02c0964bd4a02b84de95dad8421 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 4 Jan 2013 11:03:24 -0500 Subject: [PATCH 16/49] Show special text when calibrating or when grading. --- .../peer_grading/peer_grading_problem.coffee | 30 +++++++++++------ lms/static/sass/course/_staff_grading.scss | 33 +++++++++++++++++-- .../peer_grading/peer_grading_problem.html | 12 +++++++ 3 files changed, 63 insertions(+), 12 deletions(-) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 04b47671a1..d269a23c1e 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -69,6 +69,8 @@ class PeerGradingProblem @grading_panel = $('.grading-panel') @content_panel = $('.content-panel') + @grading_wrapper =$('.grading-wrapper') + @error_container = $('.error-container') @submission_key_input = $("input[name='submission-key']") @@ -76,7 +78,6 @@ class PeerGradingProblem @feedback_area = $('.feedback-area') @score_selection_container = $('.score-selection-container') - @score = null @submit_button = $('.submit-button') @action_button = $('.action-button') @@ -138,6 +139,13 @@ class PeerGradingProblem else @render_error("Error contacting the grading service") + calibration_callback: (response) => + if response.success + # display correct grade + @grading_wrapper.hide() + + else if response.error + @render_error(response.error) submission_callback: (response) => if response.success @@ -149,7 +157,6 @@ class PeerGradingProblem @render_error("Error occurred while submitting grade") graded_callback: (event) => - @score = event.target.value @show_submit_button() @@ -170,12 +177,14 @@ class PeerGradingProblem @grading_panel.removeClass('current-state') # clear out all of the existing text - @calibration_panel.find('p').remove() - @grading_panel.find('p').remove() - - # add in new text + @calibration_panel.find('.calibration-text').show() + @grading_panel.find('.calibration-text').show() + @calibration_panel.find('.grading-text').hide() + @grading_panel.find('.grading-text').hide() + # TODO: add in new text + @submit_button.unbind('click') @submit_button.click @submit_calibration_essay else if response.error @@ -194,9 +203,12 @@ class PeerGradingProblem @grading_panel.addClass('current-state') # clear out all of the existing text - @calibration_panel.find('p').remove() - @grading_panel.find('p').remove() + @calibration_panel.find('.calibration-text').hide() + @grading_panel.find('.calibration-text').hide() + @calibration_panel.find('.grading-text').show() + @grading_panel.find('.grading-text').show() + @submit_button.unbind('click') @submit_button.click @submit_grade else if response.error @render_error(response.error) @@ -251,8 +263,6 @@ class PeerGradingProblem - - mock_backend = false ajax_url = $('.peer-grading').data('ajax_url') backend = new PeerGradingProblemBackend(ajax_url, mock_backend) diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index d2006e10a7..2ca355e304 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -83,10 +83,39 @@ div.peer-grading{ font-size: .8em; } - .current-state + .instructions-panel { - background: #eee; + + > div + { + padding: 10px; + margin: 0px; + border: 1px solid black; + } + .calibration-panel + { + float:left; + width:47%; + } + .grading-panel + { + float:right; + width: 47%; + } + .current-state + { + background: #eee; + } + &:after + { + content:"."; + display:block; + height:0; + visibility: hidden; + clear:both; + } } + padding: 40px; } diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index dd577c1295..c29d509bb8 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -25,9 +25,21 @@

Calibration

+
+

You are being calibrated on this problem

+
+
+

You have successfully calibrated on this problem

+

Grading

+
+

You cannot start grading until you have finished calibrating

+
+
+

Now that you are done calibrating, you can now start grading.

+
From ad63c492bedc75e6dab86bb3de1c117315805fd1 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 4 Jan 2013 12:42:38 -0500 Subject: [PATCH 17/49] Minor updates to the page to handle showing Calibration feedback. Also, allow for the hiding of the Prompt and the Rubric. --- lms/envs/common.py | 1 + .../peer_grading/peer_grading_problem.coffee | 17 ++++++++++++++--- lms/static/sass/course/_staff_grading.scss | 5 ++++- .../peer_grading/peer_grading_problem.html | 8 +++++++- 4 files changed, 26 insertions(+), 5 deletions(-) diff --git a/lms/envs/common.py b/lms/envs/common.py index a24422df50..3b83b708aa 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -497,6 +497,7 @@ PIPELINE_JS = { for pth in sorted(glob2.glob(PROJECT_ROOT / 'static/coffee/src/**/*.coffee'))\ if (pth not in courseware_only_js and pth not in discussion_js and + pth not in peer_grading_js and pth not in staff_grading_js) ] + [ 'js/form.ext.js', diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index d269a23c1e..6437742aac 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -16,7 +16,7 @@ class PeerGradingProblemBackend # change to test each version response = success: true - calibrated: true + calibrated: false else if cmd == 'show_calibration_essay' #response = # success: false @@ -41,6 +41,7 @@ class PeerGradingProblemBackend else if cmd == 'save_calibration_essay' response = success: true + correct_score: 2 else if cmd == 'save_grade' response = success: true @@ -70,6 +71,7 @@ class PeerGradingProblem @content_panel = $('.content-panel') @grading_wrapper =$('.grading-wrapper') + @calibration_feedback_panel = $('.calibration-feedback') @error_container = $('.error-container') @@ -81,8 +83,14 @@ class PeerGradingProblem @submit_button = $('.submit-button') @action_button = $('.action-button') + @calibration_feedback_button = $('.calibration-feedback-button') + Collapsible.setCollapsibles(@content_panel) @action_button.click -> document.location.reload(true) + @calibration_feedback_button.click => + @calibration_feedback_panel.hide() + @grading_wrapper.show() + @is_calibrated_check @is_calibrated_check() @@ -113,7 +121,7 @@ class PeerGradingProblem submit_calibration_essay: ()=> data = @construct_data() - @backend.post('save_calibration_essay', data, @submission_callback) + @backend.post('save_calibration_essay', data, @calibration_callback) submit_grade: () => data = @construct_data() @@ -143,6 +151,8 @@ class PeerGradingProblem if response.success # display correct grade @grading_wrapper.hide() + @calibration_feedback_panel.show() + @calibration_feedback_panel.prepend("

The correct grade is: #{response.correct_score}

") else if response.error @render_error(response.error) @@ -233,6 +243,7 @@ class PeerGradingProblem @setup_score_selection(response.max_score) @submit_button.hide() @action_button.hide() + @calibration_feedback_panel.hide() @@ -263,7 +274,7 @@ class PeerGradingProblem -mock_backend = false +mock_backend = true ajax_url = $('.peer-grading').data('ajax_url') backend = new PeerGradingProblemBackend(ajax_url, mock_backend) $(document).ready(() -> new PeerGradingProblem(backend)) diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index 2ca355e304..06a83651d7 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -37,7 +37,6 @@ div.peer-grading{ } .prompt-information-container, - .submission-wrapper, .rubric-wrapper, .grading-container { @@ -50,6 +49,10 @@ div.peer-grading{ padding: 15px; margin-left: 0px; } + .submission-wrapper + { + padding: 15px; + } .meta-info-wrapper { background-color: #eee; diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index c29d509bb8..08fd33be66 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -43,7 +43,11 @@ -
+
+
+ Display problem information +
+

Question

@@ -56,6 +60,7 @@
+
@@ -85,6 +90,7 @@
+
From befe36f6263dcd793c4d4b54d32469714f754c01 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 4 Jan 2013 14:11:48 -0500 Subject: [PATCH 18/49] Improvements to calibration feedback step --- .../peer_grading/peer_grading_problem.coffee | 49 +++++++++++++++---- lms/static/sass/course/_staff_grading.scss | 20 ++++++++ .../peer_grading/peer_grading_problem.html | 9 +++- 3 files changed, 66 insertions(+), 12 deletions(-) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 6437742aac..4463c1ca85 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -25,9 +25,26 @@ class PeerGradingProblemBackend success: true submission_id: 1 submission_key: 'abcd' - student_response: 'I am a fake calibration response' - prompt: 'Answer this question' - rubric: 'This is a rubric.' + student_response: ''' + Contrary to popular belief, Lorem Ipsum is not simply random text. It has roots in a piece of classical Latin literature from 45 BC, making it over 2000 years old. Richard McClintock, a Latin professor at Hampden-Sydney College in Virginia, looked up one of the more obscure Latin words, consectetur, from a Lorem Ipsum passage, and going through the cites of the word in classical literature, discovered the undoubtable source. Lorem Ipsum comes from sections 1.10.32 and 1.10.33 of "de Finibus Bonorum et Malorum" (The Extremes of Good and Evil) by Cicero, written in 45 BC. This book is a treatise on the theory of ethics, very popular during the Renaissance. The first line of Lorem Ipsum, "Lorem ipsum dolor sit amet..", comes from a line in section 1.10.32. + +The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for those interested. Sections 1.10.32 and 1.10.33 from "de Finibus Bonorum et Malorum" by Cicero are also reproduced in their exact original form, accompanied by English versions from the 1914 translation by H. Rackham. + ''' + prompt: ''' +

S11E3: Metal Bands

+

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

+

* Why is it that both sodium and magnesium behave as metals, even though the s-band of magnesium is filled?

+

This is a self-assessed open response question. Please use as much space as you need in the box below to answer the question.

+ ''' + rubric: ''' +
    +
  • Metals tend to be good electronic conductors, meaning that they have a large number of electrons which are able to access empty (mobile) energy states within the material.
  • +
  • Sodium has a half-filled s-band, so there are a number of empty states immediately above the highest occupied energy levels within the band.
  • +
  • Magnesium has a full s-band, but the the s-band and p-band overlap in magnesium. Thus are still a large number of available energy states immediately above the s-band highest occupied energy level.
  • +
+ +

Please score your response according to how many of the above components you identified:

+ ''' max_score: 4 else if cmd == 'get_next_submission' response = @@ -80,6 +97,7 @@ class PeerGradingProblem @feedback_area = $('.feedback-area') @score_selection_container = $('.score-selection-container') + @score = null @submit_button = $('.submit-button') @action_button = $('.action-button') @@ -90,7 +108,7 @@ class PeerGradingProblem @calibration_feedback_button.click => @calibration_feedback_panel.hide() @grading_wrapper.show() - @is_calibrated_check + @is_calibrated_check() @is_calibrated_check() @@ -111,7 +129,7 @@ class PeerGradingProblem construct_data: () -> data = - score: $('input[name="score-selection"]:checked').val() + score: @score location: @location submission_id: @essay_id_input.val() submission_key: @submission_key_input.val() @@ -149,11 +167,7 @@ class PeerGradingProblem calibration_callback: (response) => if response.success - # display correct grade - @grading_wrapper.hide() - @calibration_feedback_panel.show() - @calibration_feedback_panel.prepend("

The correct grade is: #{response.correct_score}

") - + @render_calibration_feedback(response) else if response.error @render_error(response.error) @@ -167,6 +181,7 @@ class PeerGradingProblem @render_error("Error occurred while submitting grade") graded_callback: (event) => + @score = event.target.value @show_submit_button() @@ -246,6 +261,20 @@ class PeerGradingProblem @calibration_feedback_panel.hide() + render_calibration_feedback: (response) => + # display correct grade + #@grading_wrapper.hide() + @calibration_feedback_panel.show() + calibration_wrapper = $('.calibration-feedback-wrapper') + calibration_wrapper.html("

The score you gave was: #{@score}. The correct score is: #{response.correct_score}

") + score = parseInt(@score) + correct_score = parseInt(response.correct_score) + + if score == correct_score + calibration_wrapper.append("

Congratulations! Your score matches the correct one!

") + else + calibration_wrapper.append("

Please try to understand the grading critera better so that you will be more accurate next time.

") + render_error: (error_message) => @error_container.show() diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index 06a83651d7..39868ecd22 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -38,6 +38,7 @@ div.peer-grading{ .prompt-information-container, .rubric-wrapper, + .calibration-feedback-wrapper, .grading-container { border: 1px solid gray; @@ -51,6 +52,14 @@ div.peer-grading{ } .submission-wrapper { + h3 + { + margin-bottom: 15px; + } + p + { + margin-left:10px; + } padding: 15px; } .meta-info-wrapper @@ -119,6 +128,17 @@ div.peer-grading{ } } + + .collapsible + { + margin-left: 0px; + header + { + margin-top:20px; + margin-bottom:20px; + font-size: 1.2em; + } + } padding: 40px; } diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 08fd33be66..31e8efe290 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -45,7 +45,7 @@
- Display problem information + Show Problem Details
@@ -90,7 +90,12 @@
-
+
+

How did I do?

+
+
+ +
From fa09e25a1a381333cf3eb9059c2bb40a782f007b Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 4 Jan 2013 15:18:39 -0500 Subject: [PATCH 19/49] Add interstitial page between calibration and grading. Make individual Prompt and Rubric Sections hideable. --- .../peer_grading/peer_grading_problem.coffee | 47 +++++++++++++------ lms/static/sass/course/_staff_grading.scss | 9 +++- .../peer_grading/peer_grading_problem.html | 26 ++++++---- 3 files changed, 55 insertions(+), 27 deletions(-) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 4463c1ca85..a341b55238 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -2,6 +2,7 @@ class PeerGradingProblemBackend constructor: (ajax_url, mock_backend) -> @mock_backend = mock_backend @ajax_url = ajax_url + @mock_cnt = 0 post: (cmd, data, callback) -> if @mock_backend @@ -16,11 +17,12 @@ class PeerGradingProblemBackend # change to test each version response = success: true - calibrated: false + calibrated: @mock_cnt >= 2 else if cmd == 'show_calibration_essay' #response = # success: false # error: "There was an error" + @mock_cnt++ response = success: true submission_id: 1 @@ -58,7 +60,7 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t else if cmd == 'save_calibration_essay' response = success: true - correct_score: 2 + actual_score: 2 else if cmd == 'save_grade' response = success: true @@ -89,6 +91,8 @@ class PeerGradingProblem @grading_wrapper =$('.grading-wrapper') @calibration_feedback_panel = $('.calibration-feedback') + @interstitial_page = $('.interstitial-page') + @interstitial_page.hide() @error_container = $('.error-container') @@ -98,10 +102,12 @@ class PeerGradingProblem @score_selection_container = $('.score-selection-container') @score = null + @calibration = null @submit_button = $('.submit-button') @action_button = $('.action-button') @calibration_feedback_button = $('.calibration-feedback-button') + @interstitial_page_button = $('.interstitial-page-button') Collapsible.setCollapsibles(@content_panel) @action_button.click -> document.location.reload(true) @@ -110,6 +116,10 @@ class PeerGradingProblem @grading_wrapper.show() @is_calibrated_check() + @interstitial_page_button.click => + @interstitial_page.hide() + @is_calibrated_check() + @is_calibrated_check() @@ -154,12 +164,15 @@ class PeerGradingProblem calibration_check_callback: (response) => if response.success # check whether or not we're still calibrating - if response.calibrated - @fetch_submission_essay() + if response.calibrated and (@calibration == null or @calibration == false) @calibration = false + @fetch_submission_essay() + else if response.calibrated and @calibration == true + @calibration = false + @render_interstitial_page() else - @fetch_calibration_essay() @calibration = true + @fetch_calibration_essay() else if response.error @render_error(response.error) else @@ -262,20 +275,24 @@ class PeerGradingProblem render_calibration_feedback: (response) => - # display correct grade + # display correct grade #@grading_wrapper.hide() - @calibration_feedback_panel.show() - calibration_wrapper = $('.calibration-feedback-wrapper') - calibration_wrapper.html("

The score you gave was: #{@score}. The correct score is: #{response.correct_score}

") - score = parseInt(@score) - correct_score = parseInt(response.correct_score) + @calibration_feedback_panel.show() + calibration_wrapper = $('.calibration-feedback-wrapper') + calibration_wrapper.html("

The score you gave was: #{@score}. The actual score is: #{response.actual_score}

") + score = parseInt(@score) + actual_score = parseInt(response.actual_score) + #TODO: maybe do another variation depending on whether or not students are close to correct - if score == correct_score - calibration_wrapper.append("

Congratulations! Your score matches the correct one!

") - else - calibration_wrapper.append("

Please try to understand the grading critera better so that you will be more accurate next time.

") + if score == actual_score + calibration_wrapper.append("

Congratulations! Your score matches the actual one!

") + else + calibration_wrapper.append("

Please try to understand the grading critera better so that you will be more accurate next time.

") + render_interstitial_page: () => + @content_panel.hide() + @interstitial_page.show() render_error: (error_message) => @error_container.show() @error_container.html(error_message) diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index 39868ecd22..075db8dbdc 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -36,8 +36,8 @@ div.peer-grading{ } } - .prompt-information-container, - .rubric-wrapper, + .prompt-container, + .rubric-container, .calibration-feedback-wrapper, .grading-container { @@ -103,6 +103,11 @@ div.peer-grading{ padding: 10px; margin: 0px; border: 1px solid black; + h3 + { + text-align:center; + text-transform:uppercase; + } } .calibration-panel { diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 31e8efe290..cd01ddcfa2 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -43,23 +43,22 @@ -
-
- Show Problem Details -
-
-
-

Question

+
+ -
-

Grading Rubric

+
+
View Rubric
+
+
-
@@ -96,6 +95,13 @@ + +
+

Congratulations!

+

You have now completed calibration. You are now ready to start grading.

+ +
+ From 093560e85963e96636d1cd4bed074360ce0f2d32 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 4 Jan 2013 16:36:28 -0500 Subject: [PATCH 20/49] Updates to copy and styling and a few bugfixes --- .../peer_grading/peer_grading_problem.coffee | 21 ++++++++++++++---- lms/static/sass/course/_staff_grading.scss | 22 +++++++++++++++---- .../peer_grading/peer_grading_problem.html | 17 +++++++------- 3 files changed, 44 insertions(+), 16 deletions(-) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index a341b55238..43cd38e20d 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -54,8 +54,21 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t submission_id: 1 submission_key: 'abcd' student_response: 'I am a fake student response' - prompt: 'Answer this question' - rubric: 'This is a rubric.' + prompt: ''' +

S11E3: Metal Bands

+

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

+

* Why is it that both sodium and magnesium behave as metals, even though the s-band of magnesium is filled?

+

This is a self-assessed open response question. Please use as much space as you need in the box below to answer the question.

+ ''' + rubric: ''' +
    +
  • Metals tend to be good electronic conductors, meaning that they have a large number of electrons which are able to access empty (mobile) energy states within the material.
  • +
  • Sodium has a half-filled s-band, so there are a number of empty states immediately above the highest occupied energy levels within the band.
  • +
  • Magnesium has a full s-band, but the the s-band and p-band overlap in magnesium. Thus are still a large number of available energy states immediately above the s-band highest occupied energy level.
  • +
+ +

Please score your response according to how many of the above components you identified:

+ ''' max_score: 4 else if cmd == 'save_calibration_essay' response = @@ -208,7 +221,7 @@ class PeerGradingProblem if response.success # load in all the data - @submission_container.html("

Calibration Essay

") + @submission_container.html("

Training Essay

") @render_submission_data(response) # TODO: indicate that we're in calibration mode @calibration_panel.addClass('current-state') @@ -288,7 +301,7 @@ class PeerGradingProblem calibration_wrapper.append("

Congratulations! Your score matches the actual one!

") else calibration_wrapper.append("

Please try to understand the grading critera better so that you will be more accurate next time.

") - + @submit_button.hide() render_interstitial_page: () => @content_panel.hide() diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index 075db8dbdc..47c437513b 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -36,8 +36,8 @@ div.peer-grading{ } } - .prompt-container, - .rubric-container, + .prompt-information-container, + .rubric-wrapper, .calibration-feedback-wrapper, .grading-container { @@ -102,11 +102,17 @@ div.peer-grading{ { padding: 10px; margin: 0px; - border: 1px solid black; + background: #eee; + height: 10em; h3 { text-align:center; text-transform:uppercase; + color: #777; + } + p + { + color: #777; } } .calibration-panel @@ -121,7 +127,11 @@ div.peer-grading{ } .current-state { - background: #eee; + background: #0F6B8A; + h3, p + { + color: white; + } } &:after { @@ -145,5 +155,9 @@ div.peer-grading{ } } + .interstitial-grading + { + } padding: 40px; } + diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index cd01ddcfa2..40c0146535 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -22,37 +22,38 @@
+

Peer Grading

-

Calibration

+

Learning to Grade

-

You are being calibrated on this problem

+

Before you can do any proper peer grading, you first need to understand how your own grading compares to that of the instrutor. Once your grades begin to match the instructor's, you will move on to grading your peers!

-

You have successfully calibrated on this problem

+

You have successfully managed to calibrate your answers to that of the instructors and have moved onto the next step in the peer grading process.

Grading

-

You cannot start grading until you have finished calibrating

+

You cannot start grading until you have graded a sufficient number of training problems and have been able to demonstrate that your scores closely match that of the instructor.

-

Now that you are done calibrating, you can now start grading.

+

Now that you have finished your training, you are now allowed to grade your peers. Please keep in mind that students are allowed to respond to the grades and feedback they receive.

-
View Rubric
+
Rubric
@@ -98,7 +99,7 @@

Congratulations!

-

You have now completed calibration. You are now ready to start grading.

+

You have now completed the calibration step. You are now ready to start grading.

From e410703e33d66caafc6cde68bcc4d24497491e04 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 4 Jan 2013 17:20:03 -0500 Subject: [PATCH 21/49] Minor bug fixes and improvements --- .../peer_grading/peer_grading_problem.coffee | 35 ++++++++++++++----- lms/static/sass/course/_staff_grading.scss | 4 ++- .../peer_grading/peer_grading_problem.html | 6 +++- 3 files changed, 34 insertions(+), 11 deletions(-) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 43cd38e20d..0ffb5dc76d 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -53,7 +53,11 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t success: true submission_id: 1 submission_key: 'abcd' - student_response: 'I am a fake student response' + student_response: '''Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed nec tristique ante. Proin at mauris sapien, quis varius leo. Morbi laoreet leo nisi. Morbi aliquam lacus ante. Cras iaculis velit sed diam mattis a fermentum urna luctus. Duis consectetur nunc vitae felis facilisis eget vulputate risus viverra. Cras consectetur ullamcorper lobortis. Nam eu gravida lorem. Nulla facilisi. Nullam quis felis enim. Mauris orci lectus, dictum id cursus in, vulputate in massa. + +Phasellus non varius sem. Nullam commodo lacinia odio sit amet egestas. Donec ullamcorper sapien sagittis arcu volutpat placerat. Phasellus ut pretium ante. Nam dictum pulvinar nibh dapibus tristique. Sed at tellus mi, fringilla convallis justo. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus tristique rutrum nulla sed eleifend. Praesent at nunc arcu. Mauris condimentum faucibus nibh, eget commodo quam viverra sed. Morbi in tincidunt dolor. Morbi sed augue et augue interdum fermentum. + +Curabitur tristique purus ac arcu consequat cursus. Cras diam felis, dignissim quis placerat at, aliquet ac metus. Mauris vulputate est eu nibh imperdiet varius. Cras aliquet rhoncus elit a laoreet. Mauris consectetur erat et erat scelerisque eu faucibus dolor consequat. Nam adipiscing sagittis nisl, eu mollis massa tempor ac. Nulla scelerisque tempus blandit. Phasellus ac ipsum eros, id posuere arcu. Nullam non sapien arcu. Vivamus sit amet lorem justo, ac tempus turpis. Suspendisse pharetra gravida imperdiet. Pellentesque lacinia mi eu elit luctus pellentesque. Sed accumsan libero a magna elementum varius. Nunc eget pellentesque metus. ''' prompt: '''

S11E3: Metal Bands

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

@@ -101,6 +105,8 @@ class PeerGradingProblem @calibration_panel = $('.calibration-panel') @grading_panel = $('.grading-panel') @content_panel = $('.content-panel') + @grading_message = $('.grading-message') + @grading_message.hide() @grading_wrapper =$('.grading-wrapper') @calibration_feedback_panel = $('.calibration-feedback') @@ -176,10 +182,12 @@ class PeerGradingProblem ########## calibration_check_callback: (response) => if response.success - # check whether or not we're still calibrating + # if we haven't been calibrating before if response.calibrated and (@calibration == null or @calibration == false) @calibration = false @fetch_submission_essay() + # If we were calibrating before and no longer need to, + # show the interstitial page else if response.calibrated and @calibration == true @calibration = false @render_interstitial_page() @@ -200,6 +208,8 @@ class PeerGradingProblem submission_callback: (response) => if response.success @is_calibrated_check() + @grading_message.fadeIn() + @grading_message.html("

Grade sent successfully.

") else if response.error @render_error(response.error) @@ -207,6 +217,7 @@ class PeerGradingProblem @render_error("Error occurred while submitting grade") graded_callback: (event) => + @grading_message.hide() @score = event.target.value @show_submit_button() @@ -217,6 +228,7 @@ class PeerGradingProblem # Rendering methods and helpers # ########## + # renders a calibration essay render_calibration: (response) => if response.success @@ -227,13 +239,12 @@ class PeerGradingProblem @calibration_panel.addClass('current-state') @grading_panel.removeClass('current-state') - # clear out all of the existing text + # Display the right text @calibration_panel.find('.calibration-text').show() @grading_panel.find('.calibration-text').show() @calibration_panel.find('.grading-text').hide() @grading_panel.find('.grading-text').hide() - # TODO: add in new text @submit_button.unbind('click') @submit_button.click @submit_calibration_essay @@ -243,9 +254,9 @@ class PeerGradingProblem else @render_error("An error occurred while retrieving the next calibration essay") + # Renders a student submission to be graded render_submission: (response) => if response.success - #TODO: fill this in @submit_button.hide() @submission_container.html("

Submitted Essay

") @render_submission_data(response) @@ -253,7 +264,7 @@ class PeerGradingProblem @calibration_panel.removeClass('current-state') @grading_panel.addClass('current-state') - # clear out all of the existing text + # Display the correct text @calibration_panel.find('.calibration-text').hide() @grading_panel.find('.calibration-text').hide() @calibration_panel.find('.grading-text').show() @@ -276,12 +287,14 @@ class PeerGradingProblem render_submission_data: (response) => @content_panel.show() + @submission_container.append(@make_paragraphs(response.student_response)) @prompt_container.html(response.prompt) @rubric_container.html(response.rubric) @submission_key_input.val(response.submission_key) @essay_id_input.val(response.submission_id) @setup_score_selection(response.max_score) + @submit_button.hide() @action_button.hide() @calibration_feedback_panel.hide() @@ -290,17 +303,21 @@ class PeerGradingProblem render_calibration_feedback: (response) => # display correct grade #@grading_wrapper.hide() - @calibration_feedback_panel.show() + @calibration_feedback_panel.slideDown() calibration_wrapper = $('.calibration-feedback-wrapper') calibration_wrapper.html("

The score you gave was: #{@score}. The actual score is: #{response.actual_score}

") + + score = parseInt(@score) actual_score = parseInt(response.actual_score) - #TODO: maybe do another variation depending on whether or not students are close to correct if score == actual_score - calibration_wrapper.append("

Congratulations! Your score matches the actual one!

") + calibration_wrapper.append("

Congratulations! Your score matches the actual score!

") else calibration_wrapper.append("

Please try to understand the grading critera better so that you will be more accurate next time.

") + + # disable score selection and submission from the grading interface + $("input[name='score-selection']").attr('disabled', true) @submit_button.hide() render_interstitial_page: () => diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index 47c437513b..b99056a7f5 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -80,7 +80,8 @@ div.peer-grading{ } } } - .message-container + .message-container, + .grading-message { background-color: $yellow; padding: 10px; @@ -98,6 +99,7 @@ div.peer-grading{ .instructions-panel { + margin-right:20px; > div { padding: 10px; diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 40c0146535..9c11574f8d 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -19,7 +19,6 @@
-

Peer Grading

@@ -87,9 +86,13 @@
+
+
+ +

How did I do?

@@ -97,6 +100,7 @@
+

Congratulations!

You have now completed the calibration step. You are now ready to start grading.

From bf3669395b675a6cdbdd893ed869bc943daf41de Mon Sep 17 00:00:00 2001 From: ichuang Date: Sat, 5 Jan 2013 16:47:40 +0000 Subject: [PATCH 22/49] update symmath snuggletex server --- lms/lib/symmath/formula.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lms/lib/symmath/formula.py b/lms/lib/symmath/formula.py index 1698b004d9..fb5b210a27 100644 --- a/lms/lib/symmath/formula.py +++ b/lms/lib/symmath/formula.py @@ -422,7 +422,8 @@ class formula(object): def GetContentMathML(self, asciimath, mathml): # URL = 'http://192.168.1.2:8080/snuggletex-webapp-1.2.2/ASCIIMathMLUpConversionDemo' - URL = 'http://127.0.0.1:8080/snuggletex-webapp-1.2.2/ASCIIMathMLUpConversionDemo' + # URL = 'http://127.0.0.1:8080/snuggletex-webapp-1.2.2/ASCIIMathMLUpConversionDemo' + URL = 'http://math-xserver.mitx.mit.edu/snuggletex-webapp-1.2.2/ASCIIMathMLUpConversionDemo' if 1: payload = {'asciiMathInput': asciimath, From 9a0d85667ca0027d5a4b69f44c9ae56cbc37c5a5 Mon Sep 17 00:00:00 2001 From: ichuang Date: Sat, 5 Jan 2013 17:24:54 +0000 Subject: [PATCH 23/49] symmath use https, no server cert verify --- lms/lib/symmath/formula.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lms/lib/symmath/formula.py b/lms/lib/symmath/formula.py index fb5b210a27..bab0ab3691 100644 --- a/lms/lib/symmath/formula.py +++ b/lms/lib/symmath/formula.py @@ -423,7 +423,7 @@ class formula(object): def GetContentMathML(self, asciimath, mathml): # URL = 'http://192.168.1.2:8080/snuggletex-webapp-1.2.2/ASCIIMathMLUpConversionDemo' # URL = 'http://127.0.0.1:8080/snuggletex-webapp-1.2.2/ASCIIMathMLUpConversionDemo' - URL = 'http://math-xserver.mitx.mit.edu/snuggletex-webapp-1.2.2/ASCIIMathMLUpConversionDemo' + URL = 'https://math-xserver.mitx.mit.edu/snuggletex-webapp-1.2.2/ASCIIMathMLUpConversionDemo' if 1: payload = {'asciiMathInput': asciimath, @@ -431,7 +431,7 @@ class formula(object): #'asciiMathML':unicode(mathml).encode('utf-8'), } headers = {'User-Agent': "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.13) Gecko/20080311 Firefox/2.0.0.13"} - r = requests.post(URL, data=payload, headers=headers) + r = requests.post(URL, data=payload, headers=headers, verify=False) r.encoding = 'utf-8' ret = r.text #print "encoding: ",r.encoding From 2293a37f7d60eaeae7ba6bb06dc6c4a4f57696f6 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Mon, 7 Jan 2013 09:17:09 -0500 Subject: [PATCH 24/49] Move over old Staff Grading tests to new app. --- lms/djangoapps/instructor/tests.py | 93 ------------------ lms/djangoapps/open_ended_grading/tests.py | 104 +++++++++++++++++++-- 2 files changed, 98 insertions(+), 99 deletions(-) diff --git a/lms/djangoapps/instructor/tests.py b/lms/djangoapps/instructor/tests.py index 865a97951e..2d17cee47d 100644 --- a/lms/djangoapps/instructor/tests.py +++ b/lms/djangoapps/instructor/tests.py @@ -25,7 +25,6 @@ from django_comment_client.models import Role, FORUM_ROLE_ADMINISTRATOR, \ FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA, FORUM_ROLE_STUDENT from django_comment_client.utils import has_forum_access -from instructor import staff_grading_service from courseware.access import _course_staff_group_name import courseware.tests.tests as ct from xmodule.modulestore.django import modulestore @@ -100,7 +99,6 @@ def action_name(operation, rolename): return '{0} forum {1}'.format(operation, FORUM_ADMIN_ACTION_SUFFIX[rolename]) -_mock_service = staff_grading_service.MockStaffGradingService() @override_settings(MODULESTORE=ct.TEST_DATA_XML_MODULESTORE) class TestInstructorDashboardForumAdmin(ct.PageLoader): @@ -223,94 +221,3 @@ class TestInstructorDashboardForumAdmin(ct.PageLoader): self.assertTrue(response.content.find('{0}'.format(roles))>=0, 'not finding roles "{0}"'.format(roles)) -@override_settings(MODULESTORE=ct.TEST_DATA_XML_MODULESTORE) -class TestStaffGradingService(ct.PageLoader): - ''' - Check that staff grading service proxy works. Basically just checking the - access control and error handling logic -- all the actual work is on the - backend. - ''' - def setUp(self): - xmodule.modulestore.django._MODULESTORES = {} - - self.student = 'view@test.com' - self.instructor = 'view2@test.com' - self.password = 'foo' - self.location = 'TestLocation' - self.create_account('u1', self.student, self.password) - self.create_account('u2', self.instructor, self.password) - self.activate_user(self.student) - self.activate_user(self.instructor) - - self.course_id = "edX/toy/2012_Fall" - self.toy = modulestore().get_course(self.course_id) - def make_instructor(course): - group_name = _course_staff_group_name(course.location) - g = Group.objects.create(name=group_name) - g.user_set.add(ct.user(self.instructor)) - - make_instructor(self.toy) - - self.mock_service = staff_grading_service.grading_service() - - self.logout() - - def test_access(self): - """ - Make sure only staff have access. - """ - self.login(self.student, self.password) - - # both get and post should return 404 - for view_name in ('staff_grading_get_next', 'staff_grading_save_grade'): - url = reverse(view_name, kwargs={'course_id': self.course_id}) - self.check_for_get_code(404, url) - self.check_for_post_code(404, url) - - - def test_get_next(self): - self.login(self.instructor, self.password) - - url = reverse('staff_grading_get_next', kwargs={'course_id': self.course_id}) - data = {'location': self.location} - - r = self.check_for_post_code(200, url, data) - d = json.loads(r.content) - self.assertTrue(d['success']) - self.assertEquals(d['submission_id'], self.mock_service.cnt) - self.assertIsNotNone(d['submission']) - self.assertIsNotNone(d['num_graded']) - self.assertIsNotNone(d['min_for_ml']) - self.assertIsNotNone(d['num_pending']) - self.assertIsNotNone(d['prompt']) - self.assertIsNotNone(d['ml_error_info']) - self.assertIsNotNone(d['max_score']) - self.assertIsNotNone(d['rubric']) - - - def test_save_grade(self): - self.login(self.instructor, self.password) - - url = reverse('staff_grading_save_grade', kwargs={'course_id': self.course_id}) - - data = {'score': '12', - 'feedback': 'great!', - 'submission_id': '123', - 'location': self.location} - r = self.check_for_post_code(200, url, data) - d = json.loads(r.content) - self.assertTrue(d['success'], str(d)) - self.assertEquals(d['submission_id'], self.mock_service.cnt) - - def test_get_problem_list(self): - self.login(self.instructor, self.password) - - url = reverse('staff_grading_get_problem_list', kwargs={'course_id': self.course_id}) - data = {} - - r = self.check_for_post_code(200, url, data) - d = json.loads(r.content) - self.assertTrue(d['success'], str(d)) - self.assertIsNotNone(d['problem_list']) - - diff --git a/lms/djangoapps/open_ended_grading/tests.py b/lms/djangoapps/open_ended_grading/tests.py index 501deb776c..0636452779 100644 --- a/lms/djangoapps/open_ended_grading/tests.py +++ b/lms/djangoapps/open_ended_grading/tests.py @@ -6,11 +6,103 @@ Replace this with more appropriate tests for your application. """ from django.test import TestCase +from instructor import staff_grading_service +from django.core.urlresolvers import reverse +from django.contrib.auth.models import Group + +from courseware.access import _course_staff_group_name +import courseware.tests.tests as ct +from xmodule.modulestore.django import modulestore +import xmodule.modulestore.django + +_mock_service = staff_grading_service.MockStaffGradingService() + +@override_settings(MODULESTORE=ct.TEST_DATA_XML_MODULESTORE) +class TestStaffGradingService(ct.PageLoader): + ''' + Check that staff grading service proxy works. Basically just checking the + access control and error handling logic -- all the actual work is on the + backend. + ''' + def setUp(self): + xmodule.modulestore.django._MODULESTORES = {} + + self.student = 'view@test.com' + self.instructor = 'view2@test.com' + self.password = 'foo' + self.location = 'TestLocation' + self.create_account('u1', self.student, self.password) + self.create_account('u2', self.instructor, self.password) + self.activate_user(self.student) + self.activate_user(self.instructor) + + self.course_id = "edX/toy/2012_Fall" + self.toy = modulestore().get_course(self.course_id) + def make_instructor(course): + group_name = _course_staff_group_name(course.location) + g = Group.objects.create(name=group_name) + g.user_set.add(ct.user(self.instructor)) + + make_instructor(self.toy) + + self.mock_service = staff_grading_service.grading_service() + + self.logout() + + def test_access(self): + """ + Make sure only staff have access. + """ + self.login(self.student, self.password) + + # both get and post should return 404 + for view_name in ('staff_grading_get_next', 'staff_grading_save_grade'): + url = reverse(view_name, kwargs={'course_id': self.course_id}) + self.check_for_get_code(404, url) + self.check_for_post_code(404, url) -class SimpleTest(TestCase): - def test_basic_addition(self): - """ - Tests that 1 + 1 always equals 2. - """ - self.assertEqual(1 + 1, 2) + def test_get_next(self): + self.login(self.instructor, self.password) + + url = reverse('staff_grading_get_next', kwargs={'course_id': self.course_id}) + data = {'location': self.location} + + r = self.check_for_post_code(200, url, data) + d = json.loads(r.content) + self.assertTrue(d['success']) + self.assertEquals(d['submission_id'], self.mock_service.cnt) + self.assertIsNotNone(d['submission']) + self.assertIsNotNone(d['num_graded']) + self.assertIsNotNone(d['min_for_ml']) + self.assertIsNotNone(d['num_pending']) + self.assertIsNotNone(d['prompt']) + self.assertIsNotNone(d['ml_error_info']) + self.assertIsNotNone(d['max_score']) + self.assertIsNotNone(d['rubric']) + + + def test_save_grade(self): + self.login(self.instructor, self.password) + + url = reverse('staff_grading_save_grade', kwargs={'course_id': self.course_id}) + + data = {'score': '12', + 'feedback': 'great!', + 'submission_id': '123', + 'location': self.location} + r = self.check_for_post_code(200, url, data) + d = json.loads(r.content) + self.assertTrue(d['success'], str(d)) + self.assertEquals(d['submission_id'], self.mock_service.cnt) + + def test_get_problem_list(self): + self.login(self.instructor, self.password) + + url = reverse('staff_grading_get_problem_list', kwargs={'course_id': self.course_id}) + data = {} + + r = self.check_for_post_code(200, url, data) + d = json.loads(r.content) + self.assertTrue(d['success'], str(d)) + self.assertIsNotNone(d['problem_list']) From faa23732562117d0e490d6af665e698236bc612a Mon Sep 17 00:00:00 2001 From: Robert Chirwa Date: Fri, 4 Jan 2013 13:34:59 -0500 Subject: [PATCH 25/49] Fix typo in the job page MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Carlos Andrés Rocha --- lms/templates/static_templates/jobs.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lms/templates/static_templates/jobs.html b/lms/templates/static_templates/jobs.html index d783403970..f2752a0939 100644 --- a/lms/templates/static_templates/jobs.html +++ b/lms/templates/static_templates/jobs.html @@ -38,7 +38,7 @@

EdX is looking to add new talent to our team!

Our mission is to give a world-class education to everyone, everywhere, regardless of gender, income or social status

-

Today, EdX.org, a not-for-profit provides hundreds of thousands of people from around the globe with access free education.  We offer amazing quality classes by the best professors from the best schools. We enable our members to uncover a new passion that will transform their lives and their communities.

+

Today, EdX.org, a not-for-profit provides hundreds of thousands of people from around the globe with access to free education.  We offer amazing quality classes by the best professors from the best schools. We enable our members to uncover a new passion that will transform their lives and their communities.

Around the world-from coast to coast, in over 192 countries, people are making the decision to take one or several of our courses. As we continue to grow our operations, we are looking for talented, passionate people with great ideas to join the edX team. We aim to create an environment that is supportive, diverse, and as fun as our brand. If you're results-oriented, dedicated, and ready to contribute to an unparalleled member experience for our community, we really want you to apply.

As part of the edX team, you’ll receive:

    From 56f1c6aad93533d1d6e68ffe1e06cd6188816641 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Rocha?= Date: Mon, 7 Jan 2013 10:14:00 -0500 Subject: [PATCH 26/49] Fixed extra characters in faq page --- lms/templates/static_templates/faq.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lms/templates/static_templates/faq.html b/lms/templates/static_templates/faq.html index 030eaa5013..b0543df264 100644 --- a/lms/templates/static_templates/faq.html +++ b/lms/templates/static_templates/faq.html @@ -21,7 +21,7 @@

    What is edX?

    edX is a not-for-profit enterprise of its founding partners, the Massachusetts Institute of Technology (MIT) and Harvard University that offers online learning to on-campus students and to millions of people around the world. To do so, edX is building an open-source online learning platform and hosts an online web portal at www.edx.org for online education.

    -

    EdX currently offers HarvardX, MITx and BerkeleyX classes online for free. Beginning in fall 2013, edX will offer WellesleyX and GeorgetownX classes online for free. The University of Texas System includes nine universities and six health institutions. The edX institutions aim to extend their collective reach to build a global community of online students. Along with offering online courses, the three universities undertake research on how students learn and how technology can transform learning – both on-campus and online throughout the world.

    +

    EdX currently offers HarvardX, MITx and BerkeleyX classes online for free. Beginning in fall 2013, edX will offer WellesleyX and GeorgetownX classes online for free. The University of Texas System includes nine universities and six health institutions. The edX institutions aim to extend their collective reach to build a global community of online students. Along with offering online courses, the three universities undertake research on how students learn and how technology can transform learning both on-campus and online throughout the world.

    From 0c6e8c4820876efdb94173135e315a6f158f7624 Mon Sep 17 00:00:00 2001 From: Jay Zoldak Date: Mon, 7 Jan 2013 14:08:18 -0500 Subject: [PATCH 27/49] Clarify that you need to start up the mongo daemon --- doc/development.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/development.md b/doc/development.md index ebc56fbf1b..36827aafeb 100644 --- a/doc/development.md +++ b/doc/development.md @@ -19,6 +19,11 @@ Use the MacPorts package `mongodb` or the Homebrew formula `mongodb` ## Initializing Mongodb +First start up the mongo daemon. E.g. to start it up in the background +using a config file: + + mongod --config /usr/local/etc/mongod.conf & + Check out the course data directories that you want to work with into the `GITHUB_REPO_ROOT` (by default, `../data`). Then run the following command: From e1ec4eec71061646771af5a5754487c7300cfa30 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Mon, 7 Jan 2013 14:46:32 -0500 Subject: [PATCH 28/49] Get tests to pass by creating a mock peer grading service --- .../peer_grading_service.py | 25 ++++++++++++++++++- lms/djangoapps/open_ended_grading/tests.py | 9 +++++-- lms/djangoapps/open_ended_grading/views.py | 8 +++++- lms/envs/common.py | 2 ++ lms/envs/test.py | 1 + 5 files changed, 41 insertions(+), 4 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 6b35f55f01..37611df4f9 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -15,6 +15,26 @@ from xmodule.course_module import CourseDescriptor log = logging.getLogger(__name__) +class MockPeerGradingService(object): + # TODO: make this return real results + def get_next_submission(self, problem_location, grader_id): + return {'success': true} + + def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key): + return {'success': true} + + def is_student_calibrated(self, problem_location, grader_id): + return {'success': true} + + def show_calibration_essay(self, problem_location, grader_id): + return {'success': true} + + def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, score, feedback): + return {'success': true} + + def get_problem_list(self, course_id, grader_id): + return {'success': true} + class PeerGradingService(GradingService): """ Interface with the grading controller for peer grading @@ -78,7 +98,10 @@ def peer_grading_service(): if _service is not None: return _service - _service = PeerGradingService(settings.PEER_GRADING_INTERFACE) + if settings.MOCK_PEER_GRADING: + _service = MockPeerGradingService() + else: + _service = PeerGradingService(settings.PEER_GRADING_INTERFACE) return _service diff --git a/lms/djangoapps/open_ended_grading/tests.py b/lms/djangoapps/open_ended_grading/tests.py index 0636452779..30a58f6ee8 100644 --- a/lms/djangoapps/open_ended_grading/tests.py +++ b/lms/djangoapps/open_ended_grading/tests.py @@ -6,7 +6,7 @@ Replace this with more appropriate tests for your application. """ from django.test import TestCase -from instructor import staff_grading_service +from open_ended_grading import staff_grading_service from django.core.urlresolvers import reverse from django.contrib.auth.models import Group @@ -14,6 +14,11 @@ from courseware.access import _course_staff_group_name import courseware.tests.tests as ct from xmodule.modulestore.django import modulestore import xmodule.modulestore.django +from nose import SkipTest +from mock import patch, Mock +import json + +from override_settings import override_settings _mock_service = staff_grading_service.MockStaffGradingService() @@ -45,7 +50,7 @@ class TestStaffGradingService(ct.PageLoader): make_instructor(self.toy) - self.mock_service = staff_grading_service.grading_service() + self.mock_service = staff_grading_service.staff_grading_service() self.logout() diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 1026888987..887fe82aec 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -28,6 +28,7 @@ from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundErr from xmodule.modulestore.search import path_to_location from peer_grading_service import PeerGradingService +from peer_grading_service import MockPeerGradingService from grading_service import GradingServiceError import json import track.views @@ -38,7 +39,10 @@ from .staff_grading import StaffGrading log = logging.getLogger(__name__) template_imports = {'urllib': urllib} -peer_gs = PeerGradingService(settings.PEER_GRADING_INTERFACE) +if settings.MOCK_PEER_GRADING: + peer_gs = MockPeerGradingService() +else: + peer_gs = PeerGradingService(settings.PEER_GRADING_INTERFACE) @cache_control(no_cache=True, no_store=True, must_revalidate=True) def staff_grading(request, course_id): @@ -62,6 +66,7 @@ def staff_grading(request, course_id): 'staff_access': True, }) +@cache_control(no_cache=True, no_store=True, must_revalidate=True) def peer_grading(request, course_id): ''' Show a peer grading interface @@ -104,6 +109,7 @@ def peer_grading(request, course_id): 'staff_access': False, }) +@cache_control(no_cache=True, no_store=True, must_revalidate=True) def peer_grading_problem(request, course_id): ''' Show individual problem interface diff --git a/lms/envs/common.py b/lms/envs/common.py index 3b83b708aa..2354975cf0 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -333,6 +333,8 @@ STAFF_GRADING_INTERFACE = None # Used for testing, debugging MOCK_STAFF_GRADING = False +################################# Peer grading config ##################### +PEER_GRADING_INTERFACE = None ################################# Jasmine ################################### JASMINE_TEST_DIRECTORY = PROJECT_ROOT + '/static/coffee' diff --git a/lms/envs/test.py b/lms/envs/test.py index c72c8b98bf..e9e4a43c6f 100644 --- a/lms/envs/test.py +++ b/lms/envs/test.py @@ -62,6 +62,7 @@ XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5 # seconds # Don't rely on a real staff grading backend MOCK_STAFF_GRADING = True +MOCK_PEER_GRADING = True # TODO (cpennington): We need to figure out how envs/test.py can inject things # into common.py so that we don't have to repeat this sort of thing From d3fd6ac151e9a79d5a9118c6f076084849078765 Mon Sep 17 00:00:00 2001 From: Jay Zoldak Date: Mon, 7 Jan 2013 15:23:08 -0500 Subject: [PATCH 29/49] Clarify the syntax of the rake fasttest command --- doc/development.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/development.md b/doc/development.md index 36827aafeb..56415b691e 100644 --- a/doc/development.md +++ b/doc/development.md @@ -42,8 +42,12 @@ This runs all the tests (long, uses collectstatic): If if you aren't changing static files, can run `rake test` once, then run - rake fasttest_{lms,cms} + rake fasttest_lms +or + + rake fasttest_cms + xmodule can be tested independently, with this: rake test_common/lib/xmodule From b7473f8017abe48a36c55f565000c63c67ac038d Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Mon, 7 Jan 2013 15:40:01 -0500 Subject: [PATCH 30/49] Make mock service return useful data --- .../peer_grading_service.py | 31 ++++++++++++++----- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 37611df4f9..4b4601fa91 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -16,24 +16,41 @@ from xmodule.course_module import CourseDescriptor log = logging.getLogger(__name__) class MockPeerGradingService(object): - # TODO: make this return real results def get_next_submission(self, problem_location, grader_id): - return {'success': true} + return json.dumps({'success': True, + 'submission_id':1, + 'submission_key': "", + 'student_response': 'fake student response', + 'prompt': 'fake submission prompt', + 'rubric': 'fake rubric', + 'max_score': 4}) def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key): - return {'success': true} + return json.dumps({'success': True}) def is_student_calibrated(self, problem_location, grader_id): - return {'success': true} + return json.dumps({'success': True, 'calibrated': True}) def show_calibration_essay(self, problem_location, grader_id): - return {'success': true} + return json.dumps({'success': True, + 'submission_id':1, + 'submission_key': '', + 'student_response': 'fake student response', + 'prompt': 'fake submission prompt', + 'rubric': 'fake rubric', + 'max_score': 4}) def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, score, feedback): - return {'success': true} + return {'success': True, 'actual_score': 2} def get_problem_list(self, course_id, grader_id): - return {'success': true} + return json.dumps({'success': True, + 'problem_list': [ + json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo1', \ + 'problem_name': "Problem 1", 'num_graded': 3, 'num_pending': 5}), + json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo2', \ + 'problem_name': "Problem 2", 'num_graded': 1, 'num_pending': 5}) + ]}) class PeerGradingService(GradingService): """ From bbc5cd50bc618371f481d946007c198ef6d2868c Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Mon, 7 Jan 2013 15:49:48 -0500 Subject: [PATCH 31/49] Add in default value for MOCK_PEER_GRADING --- lms/envs/common.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lms/envs/common.py b/lms/envs/common.py index 2354975cf0..88cf09502d 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -335,6 +335,7 @@ MOCK_STAFF_GRADING = False ################################# Peer grading config ##################### PEER_GRADING_INTERFACE = None +MOCK_PEER_GRADING = False ################################# Jasmine ################################### JASMINE_TEST_DIRECTORY = PROJECT_ROOT + '/static/coffee' From 02247172a2fb5fd83d0eb5f81e5f6faaab2c162d Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Mon, 7 Jan 2013 15:55:14 -0500 Subject: [PATCH 32/49] Fix some interstitial page CSS --- lms/static/sass/course/_staff_grading.scss | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index b99056a7f5..816c0efd50 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -157,8 +157,13 @@ div.peer-grading{ } } - .interstitial-grading + .interstitial-page { + text-align: center; + input[type=button] + { + margin-top: 20px; + } } padding: 40px; } From 19401f29d917f261aa968e396e975e7f48fcdf87 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Tue, 8 Jan 2013 10:22:07 -0500 Subject: [PATCH 33/49] Minor javascript updates --- lms/static/coffee/src/peer_grading/peer_grading_problem.coffee | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 0ffb5dc76d..85b5f064a4 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -325,6 +325,7 @@ class PeerGradingProblem @interstitial_page.show() render_error: (error_message) => @error_container.show() + @calibration_feedback_panel.hide() @error_container.html(error_message) @content_panel.hide() @@ -350,7 +351,7 @@ class PeerGradingProblem -mock_backend = true +mock_backend = false ajax_url = $('.peer-grading').data('ajax_url') backend = new PeerGradingProblemBackend(ajax_url, mock_backend) $(document).ready(() -> new PeerGradingProblem(backend)) From fc0a7614bf6e1c189613cac69a5ecdd5f887567a Mon Sep 17 00:00:00 2001 From: David Ormsbee Date: Tue, 8 Jan 2013 11:21:22 -0500 Subject: [PATCH 34/49] Load courses in dir name order, keep separate ParentTracker per course. --- .../xmodule/xmodule/modulestore/__init__.py | 2 +- .../lib/xmodule/xmodule/modulestore/mongo.py | 2 +- .../lib/xmodule/xmodule/modulestore/search.py | 2 +- common/lib/xmodule/xmodule/modulestore/xml.py | 19 ++++++++++--------- 4 files changed, 13 insertions(+), 12 deletions(-) diff --git a/common/lib/xmodule/xmodule/modulestore/__init__.py b/common/lib/xmodule/xmodule/modulestore/__init__.py index 5b94add68f..42c516a199 100644 --- a/common/lib/xmodule/xmodule/modulestore/__init__.py +++ b/common/lib/xmodule/xmodule/modulestore/__init__.py @@ -345,7 +345,7 @@ class ModuleStore(object): ''' raise NotImplementedError - def get_parent_locations(self, location): + def get_parent_locations(self, location, course_id): '''Find all locations that are the parents of this location. Needed for path_to_location(). diff --git a/common/lib/xmodule/xmodule/modulestore/mongo.py b/common/lib/xmodule/xmodule/modulestore/mongo.py index baa4e7870c..cc4d1a7cda 100644 --- a/common/lib/xmodule/xmodule/modulestore/mongo.py +++ b/common/lib/xmodule/xmodule/modulestore/mongo.py @@ -309,7 +309,7 @@ class MongoModuleStore(ModuleStoreBase): self._update_single_item(location, {'metadata': metadata}) - def get_parent_locations(self, location): + def get_parent_locations(self, location, course_id): '''Find all locations that are the parents of this location. Needed for path_to_location(). diff --git a/common/lib/xmodule/xmodule/modulestore/search.py b/common/lib/xmodule/xmodule/modulestore/search.py index f9901e8bfe..4a5ece6854 100644 --- a/common/lib/xmodule/xmodule/modulestore/search.py +++ b/common/lib/xmodule/xmodule/modulestore/search.py @@ -64,7 +64,7 @@ def path_to_location(modulestore, course_id, location): # isn't found so we don't have to do it explicitly. Call this # first to make sure the location is there (even if it's a course, and # we would otherwise immediately exit). - parents = modulestore.get_parent_locations(loc) + parents = modulestore.get_parent_locations(loc, course_id) # print 'Processing loc={0}, path={1}'.format(loc, path) if loc.category == "course": diff --git a/common/lib/xmodule/xmodule/modulestore/xml.py b/common/lib/xmodule/xmodule/modulestore/xml.py index 6b3ff9bff4..bee3ca4440 100644 --- a/common/lib/xmodule/xmodule/modulestore/xml.py +++ b/common/lib/xmodule/xmodule/modulestore/xml.py @@ -275,14 +275,15 @@ class XMLModuleStore(ModuleStoreBase): class_ = getattr(import_module(module_path), class_name) self.default_class = class_ - self.parent_tracker = ParentTracker() + # self.parent_tracker = ParentTracker() + self.parent_trackers = defaultdict(ParentTracker) # If we are specifically asked for missing courses, that should # be an error. If we are asked for "all" courses, find the ones # that have a course.xml if course_dirs is None: - course_dirs = [d for d in os.listdir(self.data_dir) if - os.path.exists(self.data_dir / d / "course.xml")] + course_dirs = sorted([d for d in os.listdir(self.data_dir) if + os.path.exists(self.data_dir / d / "course.xml")]) for course_dir in course_dirs: self.try_load_course(course_dir) @@ -307,7 +308,7 @@ class XMLModuleStore(ModuleStoreBase): if course_descriptor is not None: self.courses[course_dir] = course_descriptor self._location_errors[course_descriptor.location] = errorlog - self.parent_tracker.make_known(course_descriptor.location) + self.parent_trackers[course_descriptor.id].make_known(course_descriptor.location) else: # Didn't load course. Instead, save the errors elsewhere. self.errored_courses[course_dir] = errorlog @@ -432,7 +433,7 @@ class XMLModuleStore(ModuleStoreBase): course_dir, policy, tracker, - self.parent_tracker, + self.parent_trackers[course_id], self.load_error_modules, ) @@ -541,7 +542,7 @@ class XMLModuleStore(ModuleStoreBase): """ raise NotImplementedError("XMLModuleStores are read-only") - def get_parent_locations(self, location): + def get_parent_locations(self, location, course_id): '''Find all locations that are the parents of this location. Needed for path_to_location(). @@ -552,7 +553,7 @@ class XMLModuleStore(ModuleStoreBase): be empty if there are no parents. ''' location = Location.ensure_fully_specified(location) - if not self.parent_tracker.is_known(location): - raise ItemNotFoundError(location) + if not self.parent_trackers[course_id].is_known(location): + raise ItemNotFoundError("{0} not in {1}".format(location, course_id)) - return self.parent_tracker.parents(location) + return self.parent_trackers[course_id].parents(location) From 8952eda98d0953c210eb159f24fd0ddce26cfb81 Mon Sep 17 00:00:00 2001 From: David Ormsbee Date: Tue, 8 Jan 2013 13:34:22 -0500 Subject: [PATCH 35/49] XML loaded courses should no longer be able to throw NoPathToItem. The parent trees are built on a per-course basis (so it'll throw ItemNotFoundError if an item exists in a different course from the one you're asking after. NoPathToItem can still happen with MongoDB backed courseware. They store things like the info page as HTML snippets which are orphaned from other content. --- .../xmodule/modulestore/tests/test_modulestore.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/common/lib/xmodule/xmodule/modulestore/tests/test_modulestore.py b/common/lib/xmodule/xmodule/modulestore/tests/test_modulestore.py index c1d1d50a53..64816581ce 100644 --- a/common/lib/xmodule/xmodule/modulestore/tests/test_modulestore.py +++ b/common/lib/xmodule/xmodule/modulestore/tests/test_modulestore.py @@ -23,12 +23,3 @@ def check_path_to_location(modulestore): for location in not_found: assert_raises(ItemNotFoundError, path_to_location, modulestore, course_id, location) - # Since our test files are valid, there shouldn't be any - # elements with no path to them. But we can look for them in - # another course. - no_path = ( - "i4x://edX/simple/video/Lost_Video", - ) - for location in no_path: - assert_raises(NoPathToItem, path_to_location, modulestore, course_id, location) - From 051339afb03252d8166fd816cf5cf81961df1a58 Mon Sep 17 00:00:00 2001 From: David Ormsbee Date: Tue, 8 Jan 2013 13:54:44 -0500 Subject: [PATCH 36/49] Tiny cleanup. --- common/lib/xmodule/xmodule/modulestore/xml.py | 1 - 1 file changed, 1 deletion(-) diff --git a/common/lib/xmodule/xmodule/modulestore/xml.py b/common/lib/xmodule/xmodule/modulestore/xml.py index bee3ca4440..f967cd0a7f 100644 --- a/common/lib/xmodule/xmodule/modulestore/xml.py +++ b/common/lib/xmodule/xmodule/modulestore/xml.py @@ -275,7 +275,6 @@ class XMLModuleStore(ModuleStoreBase): class_ = getattr(import_module(module_path), class_name) self.default_class = class_ - # self.parent_tracker = ParentTracker() self.parent_trackers = defaultdict(ParentTracker) # If we are specifically asked for missing courses, that should From f0f25296b2cfe050e42161eb5caa530dedb61de3 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Tue, 8 Jan 2013 15:54:49 -0500 Subject: [PATCH 37/49] Update documentation --- .../peer_grading_service.py | 94 ++++++++++++++++--- .../staff_grading_service.py | 2 +- 2 files changed, 81 insertions(+), 15 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 4b4601fa91..0b75997d91 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -144,7 +144,21 @@ def _check_post(request): def get_next_submission(request, course_id): """ - TODO: fill in this documentation + Makes a call to the grading controller for the next essay that should be graded + Returns a json dict with the following keys: + + 'success': bool + + 'submission_id': a unique identifier for the submission, to be passed back + with the grade. + + 'submission': the submission, rendered as read-only html for grading + + 'rubric': the rubric, also rendered as html. + + 'submission_key': a key associated with the submission for validation reasons + + 'error': if success is False, will have an error message with more info. """ _check_post(request) required = set(['location']) @@ -155,12 +169,10 @@ def get_next_submission(request, course_id): p = request.POST location = p['location'] - return HttpResponse(_get_next_submission(course_id, request.user.id, location), - mimetype="application/json") - -def _get_next_submission(course_id, grader_id, location): try: - return peer_grading_service().get_next_submission(location, grader_id) + response = peer_grading_service().get_next_submission(location, grader_id) + return HttpResponse(response, + mimetype="application/json") except GradingServiceError: log.exception("Error from grading service. server url: {0}" .format(staff_grading_service().url)) @@ -169,7 +181,18 @@ def _get_next_submission(course_id, grader_id, location): def save_grade(request, course_id): """ - TODO: fill in this documentation + Saves the grade of a given submission. + Input: + The request should have the following keys: + location - problem location + submission_id - id associated with this submission + submission_key - submission key given for validation purposes + score - the grade that was given to the submission + feedback - the feedback from the student + Returns + A json object with the following keys: + success: bool indicating whether the save was a success + error: if there was an error in the submission, this is the error message """ _check_post(request) required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback']) @@ -197,7 +220,20 @@ def save_grade(request, course_id): def is_student_calibrated(request, course_id): """ - TODO: fill in this documentation + Calls the grading controller to see if the given student is calibrated + on the given problem + + Input: + In the request, we need the following arguments: + location - problem location + + Returns: + Json object with the following keys + success - bool indicating whether or not the call was successful + calibrated - true if the grader has fully calibrated and can now move on to grading + - false if the grader is still working on calibration problems + total_calibrated_on_so_far - the number of calibration essays for this problem + that this grader has graded """ _check_post(request) required = set(['location']) @@ -221,7 +257,26 @@ def is_student_calibrated(request, course_id): def show_calibration_essay(request, course_id): """ - TODO: fill in this documentation + Fetch the next calibration essay from the grading controller and return it + Inputs: + In the request + location - problem location + + Returns: + A json dict with the following keys + 'success': bool + + 'submission_id': a unique identifier for the submission, to be passed back + with the grade. + + 'submission': the submission, rendered as read-only html for grading + + 'rubric': the rubric, also rendered as html. + + 'submission_key': a key associated with the submission for validation reasons + + 'error': if success is False, will have an error message with more info. + """ _check_post(request) @@ -233,12 +288,9 @@ def show_calibration_essay(request, course_id): grader_id = request.user.id p = request.POST location = p['location'] - return HttpResponse(_next_calibration_essay(course_id, grader_id, location), - mimetype="application/json") - -def _next_calibration_essay(course_id, grader_id, location): try: - return peer_grading_service().show_calibration_essay(location, grader_id) + response = peer_grading_service().show_calibration_essay(location, grader_id) + return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error from grading service. server url: {0}" .format(staff_grading_service().url)) @@ -248,6 +300,20 @@ def _next_calibration_essay(course_id, grader_id, location): def save_calibration_essay(request, course_id): """ + Saves the grader's grade of a given calibration. + Input: + The request should have the following keys: + location - problem location + submission_id - id associated with this submission + submission_key - submission key given for validation purposes + score - the grade that was given to the submission + feedback - the feedback from the student + Returns + A json object with the following keys: + success: bool indicating whether the save was a success + error: if there was an error in the submission, this is the error message + actual_score: the score that the instructor gave to this calibration essay + """ _check_post(request) diff --git a/lms/djangoapps/open_ended_grading/staff_grading_service.py b/lms/djangoapps/open_ended_grading/staff_grading_service.py index 5d56a90064..f2d4c5ee19 100644 --- a/lms/djangoapps/open_ended_grading/staff_grading_service.py +++ b/lms/djangoapps/open_ended_grading/staff_grading_service.py @@ -178,7 +178,7 @@ def _check_access(user, course_id): def get_next(request, course_id): """ Get the next thing to grade for course_id and with the location specified - in the . + in the request. Returns a json dict with the following keys: From 1122cdb2862b3ef13c6a438a8bf4baaa350811f1 Mon Sep 17 00:00:00 2001 From: David Ormsbee Date: Tue, 8 Jan 2013 16:37:31 -0500 Subject: [PATCH 38/49] Added more comments in response to code review. --- common/lib/xmodule/xmodule/modulestore/__init__.py | 4 ++-- common/lib/xmodule/xmodule/modulestore/mongo.py | 4 ++-- common/lib/xmodule/xmodule/modulestore/xml.py | 8 +++++--- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/common/lib/xmodule/xmodule/modulestore/__init__.py b/common/lib/xmodule/xmodule/modulestore/__init__.py index 42c516a199..f86a6e9600 100644 --- a/common/lib/xmodule/xmodule/modulestore/__init__.py +++ b/common/lib/xmodule/xmodule/modulestore/__init__.py @@ -346,8 +346,8 @@ class ModuleStore(object): raise NotImplementedError def get_parent_locations(self, location, course_id): - '''Find all locations that are the parents of this location. Needed - for path_to_location(). + '''Find all locations that are the parents of this location in this + course. Needed for path_to_location(). returns an iterable of things that can be passed to Location. ''' diff --git a/common/lib/xmodule/xmodule/modulestore/mongo.py b/common/lib/xmodule/xmodule/modulestore/mongo.py index cc4d1a7cda..4c7ef3c050 100644 --- a/common/lib/xmodule/xmodule/modulestore/mongo.py +++ b/common/lib/xmodule/xmodule/modulestore/mongo.py @@ -310,8 +310,8 @@ class MongoModuleStore(ModuleStoreBase): self._update_single_item(location, {'metadata': metadata}) def get_parent_locations(self, location, course_id): - '''Find all locations that are the parents of this location. Needed - for path_to_location(). + '''Find all locations that are the parents of this location in this + course. Needed for path_to_location(). If there is no data at location in this modulestore, raise ItemNotFoundError. diff --git a/common/lib/xmodule/xmodule/modulestore/xml.py b/common/lib/xmodule/xmodule/modulestore/xml.py index f967cd0a7f..04f3a94d1b 100644 --- a/common/lib/xmodule/xmodule/modulestore/xml.py +++ b/common/lib/xmodule/xmodule/modulestore/xml.py @@ -279,7 +279,9 @@ class XMLModuleStore(ModuleStoreBase): # If we are specifically asked for missing courses, that should # be an error. If we are asked for "all" courses, find the ones - # that have a course.xml + # that have a course.xml. We sort the dirs in alpha order so we always + # read things in the same order (OS differences in load order have + # bitten us in the past.) if course_dirs is None: course_dirs = sorted([d for d in os.listdir(self.data_dir) if os.path.exists(self.data_dir / d / "course.xml")]) @@ -542,8 +544,8 @@ class XMLModuleStore(ModuleStoreBase): raise NotImplementedError("XMLModuleStores are read-only") def get_parent_locations(self, location, course_id): - '''Find all locations that are the parents of this location. Needed - for path_to_location(). + '''Find all locations that are the parents of this location in this + course. Needed for path_to_location(). If there is no data at location in this modulestore, raise ItemNotFoundError. From 950d39838729ba77a94fda7b5c2c582a51901f95 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Wed, 9 Jan 2013 10:19:42 -0500 Subject: [PATCH 39/49] Updates to the CSS --- .../src/peer_grading/peer_grading_problem.coffee | 2 ++ lms/static/sass/course/_staff_grading.scss | 15 ++++----------- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 85b5f064a4..119144d96a 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -129,6 +129,8 @@ class PeerGradingProblem @interstitial_page_button = $('.interstitial-page-button') Collapsible.setCollapsibles(@content_panel) + + # Set up the click event handlers @action_button.click -> document.location.reload(true) @calibration_feedback_button.click => @calibration_feedback_panel.hide() diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index 816c0efd50..92fa760d4a 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -120,29 +120,22 @@ div.peer-grading{ .calibration-panel { float:left; - width:47%; + width:48%; } .grading-panel { float:right; - width: 47%; + width: 48%; } .current-state { - background: #0F6B8A; + background: #1D9DD9; h3, p { color: white; } } - &:after - { - content:"."; - display:block; - height:0; - visibility: hidden; - clear:both; - } + @include clearfix; } From c4d1b2e643aacad659076a6f6bfef8449efa72b2 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Wed, 9 Jan 2013 14:41:23 -0500 Subject: [PATCH 40/49] Use correct user id and make the reload button a back button --- .../open_ended_grading/peer_grading_service.py | 11 ++++++----- .../src/peer_grading/peer_grading_problem.coffee | 2 +- lms/templates/peer_grading/peer_grading_problem.html | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 0b75997d91..b8d8d2dbe6 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -12,6 +12,7 @@ from grading_service import GradingServiceError from courseware.access import has_access from util.json_request import expect_json from xmodule.course_module import CourseDescriptor +from student.models import unique_id_for_user log = logging.getLogger(__name__) @@ -165,7 +166,7 @@ def get_next_submission(request, course_id): success, message = _check_required(request, required) if not success: return _err_response(message) - grader_id = request.user.id + grader_id = unique_id_for_user(request.user) p = request.POST location = p['location'] @@ -199,7 +200,7 @@ def save_grade(request, course_id): success, message = _check_required(request, required) if not success: return _err_response(message) - grader_id = request.user.id + grader_id = unique_id_for_user(request.user) p = request.POST location = p['location'] submission_id = p['submission_id'] @@ -240,7 +241,7 @@ def is_student_calibrated(request, course_id): success, message = _check_required(request, required) if not success: return _err_response(message) - grader_id = request.user.id + grader_id = unique_id_for_user(request.user) p = request.POST location = p['location'] @@ -285,7 +286,7 @@ def show_calibration_essay(request, course_id): if not success: return _err_response(message) - grader_id = request.user.id + grader_id = unique_id_for_user(request.user) p = request.POST location = p['location'] try: @@ -321,7 +322,7 @@ def save_calibration_essay(request, course_id): success, message = _check_required(request, required) if not success: return _err_response(message) - grader_id = request.user.id + grader_id = unique_id_for_user(request.user) p = request.POST location = p['location'] calibration_essay_id = p['submission_id'] diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 85b5f064a4..77cdd04b15 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -129,7 +129,7 @@ class PeerGradingProblem @interstitial_page_button = $('.interstitial-page-button') Collapsible.setCollapsibles(@content_panel) - @action_button.click -> document.location.reload(true) + @action_button.click -> history.back() @calibration_feedback_button.click => @calibration_feedback_panel.hide() @grading_wrapper.show() diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 9c11574f8d..d493e84ace 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -107,6 +107,6 @@
- + From 13c692c29b538e4084edaf68d69f8d6abbb77405 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Wed, 9 Jan 2013 14:59:59 -0500 Subject: [PATCH 41/49] Show back button when we see an error. --- lms/static/coffee/src/peer_grading/peer_grading_problem.coffee | 1 + 1 file changed, 1 insertion(+) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 3a7b7c515a..639fcca947 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -330,6 +330,7 @@ class PeerGradingProblem @calibration_feedback_panel.hide() @error_container.html(error_message) @content_panel.hide() + @action_button.show() show_submit_button: () => @submit_button.show() From 19bc6574871c507484af69c6541e9530bf7fef73 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 10 Jan 2013 12:44:54 -0500 Subject: [PATCH 42/49] Use correct version of the grader id. --- .../open_ended_grading/staff_grading_service.py | 9 +++++---- lms/djangoapps/open_ended_grading/views.py | 3 ++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/staff_grading_service.py b/lms/djangoapps/open_ended_grading/staff_grading_service.py index f2d4c5ee19..521983eeee 100644 --- a/lms/djangoapps/open_ended_grading/staff_grading_service.py +++ b/lms/djangoapps/open_ended_grading/staff_grading_service.py @@ -16,6 +16,7 @@ from django.http import HttpResponse, Http404 from courseware.access import has_access from util.json_request import expect_json from xmodule.course_module import CourseDescriptor +from student.models import unique_id_for_user log = logging.getLogger(__name__) @@ -206,11 +207,11 @@ def get_next(request, course_id): if len(missing) > 0: return _err_response('Missing required keys {0}'.format( ', '.join(missing))) - grader_id = request.user.id + grader_id = unique_id_for_user(request.user) p = request.POST location = p['location'] - return HttpResponse(_get_next(course_id, request.user.id, location), + return HttpResponse(_get_next(course_id, grader_id, location), mimetype="application/json") @@ -238,7 +239,7 @@ def get_problem_list(request, course_id): """ _check_access(request.user, course_id) try: - response = staff_grading_service().get_problem_list(course_id, request.user.id) + response = staff_grading_service().get_problem_list(course_id, unique_id_for_user(request.user)) return HttpResponse(response, mimetype="application/json") except GradingServiceError: @@ -287,7 +288,7 @@ def save_grade(request, course_id): return _err_response('Missing required keys {0}'.format( ', '.join(missing))) - grader_id = request.user.id + grader_id = unique_id_for_user(request.user) p = request.POST diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 887fe82aec..e1aaf7011f 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -21,6 +21,7 @@ from django_comment_client.models import Role, FORUM_ROLE_ADMINISTRATOR, FORUM_R from django_comment_client.utils import has_forum_access from psychometrics import psychoanalyze from student.models import CourseEnrollment +from student.models import unique_id_for_user from xmodule.course_module import CourseDescriptor from xmodule.modulestore import Location from xmodule.modulestore.django import modulestore @@ -78,7 +79,7 @@ def peer_grading(request, course_id): error_text = "" problem_list = [] try: - problem_list_text = peer_gs.get_problem_list(course_id, request.user.id) + problem_list_text = peer_gs.get_problem_list(course_id, unique_id_for_user(request.user)) problem_list_json = json.loads(problem_list_text) success = problem_list_json['success'] if 'error' in problem_list_json: From 4909b84966242a41c4cabd75ecc1007f2f2b4ba8 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 10 Jan 2013 14:51:32 -0500 Subject: [PATCH 43/49] Clean up code and make the arguments for get and post more logical. --- .../open_ended_grading/grading_service.py | 4 ++-- .../peer_grading_service.py | 24 ++++++++++++------- .../staff_grading_service.py | 6 ++--- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/grading_service.py b/lms/djangoapps/open_ended_grading/grading_service.py index 3c92c5bddd..96bd931448 100644 --- a/lms/djangoapps/open_ended_grading/grading_service.py +++ b/lms/djangoapps/open_ended_grading/grading_service.py @@ -44,7 +44,7 @@ class GradingService(object): return response.json - def post(self, url, allow_redirects, data): + def post(self, url, data, allow_redirects=False): """ Make a post request to the grading controller """ @@ -58,7 +58,7 @@ class GradingService(object): return r.text - def get(self, url, allow_redirects, params): + def get(self, url, params, allow_redirects=False): """ Make a get request to the grading controller """ diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index b8d8d2dbe6..14065d54e4 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -16,6 +16,10 @@ from student.models import unique_id_for_user log = logging.getLogger(__name__) +""" +This is a mock peer grading service that can be used for unit tests +without making actual service calls to the grading controller +""" class MockPeerGradingService(object): def get_next_submission(self, problem_location, grader_id): return json.dumps({'success': True, @@ -26,7 +30,8 @@ class MockPeerGradingService(object): 'rubric': 'fake rubric', 'max_score': 4}) - def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key): + def save_grade(self, location, grader_id, submission_id, + score, feedback, submission_key): return json.dumps({'success': True}) def is_student_calibrated(self, problem_location, grader_id): @@ -41,15 +46,16 @@ class MockPeerGradingService(object): 'rubric': 'fake rubric', 'max_score': 4}) - def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, score, feedback): + def save_calibration_essay(self, problem_location, grader_id, + calibration_essay_id, submission_key, score, feedback): return {'success': True, 'actual_score': 2} def get_problem_list(self, course_id, grader_id): return json.dumps({'success': True, 'problem_list': [ - json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo1', \ + json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo1', 'problem_name': "Problem 1", 'num_graded': 3, 'num_pending': 5}), - json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo2', \ + json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo2', 'problem_name': "Problem 2", 'num_graded': 1, 'num_pending': 5}) ]}) @@ -78,15 +84,15 @@ class PeerGradingService(GradingService): 'feedback' : feedback, 'submission_key': submission_key, 'location': location} - return self.post(self.save_grade_url, False, data) + return self.post(self.save_grade_url, data) def is_student_calibrated(self, problem_location, grader_id): params = {'problem_id' : problem_location, 'student_id': grader_id} - return self.get(self.is_student_calibrated_url, False, params) + return self.get(self.is_student_calibrated_url, params) def show_calibration_essay(self, problem_location, grader_id): params = {'problem_id' : problem_location, 'student_id': grader_id} - return self.get(self.show_calibration_essay_url, False, params) + return self.get(self.show_calibration_essay_url, params) def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, score, feedback): data = {'location': problem_location, @@ -95,11 +101,11 @@ class PeerGradingService(GradingService): 'submission_key': submission_key, 'score': score, 'feedback': feedback} - return self.post(self.save_calibration_essay_url, False, data) + return self.post(self.save_calibration_essay_url, data) def get_problem_list(self, course_id, grader_id): params = {'course_id': course_id, 'student_id': grader_id} - response = self.get(self.get_problem_list_url, False, params) + response = self.get(self.get_problem_list_url, params) return response diff --git a/lms/djangoapps/open_ended_grading/staff_grading_service.py b/lms/djangoapps/open_ended_grading/staff_grading_service.py index 521983eeee..5c6cec17eb 100644 --- a/lms/djangoapps/open_ended_grading/staff_grading_service.py +++ b/lms/djangoapps/open_ended_grading/staff_grading_service.py @@ -85,7 +85,7 @@ class StaffGradingService(GradingService): GradingServiceError: something went wrong with the connection. """ params = {'course_id': course_id,'grader_id': grader_id} - return self.get(self.get_problem_list_url, False, params) + return self.get(self.get_problem_list_url, params) def get_next(self, course_id, location, grader_id): @@ -107,7 +107,6 @@ class StaffGradingService(GradingService): GradingServiceError: something went wrong with the connection. """ return self.get(self.get_next_url, - allow_redirects=False, params={'location': location, 'grader_id': grader_id}) @@ -131,8 +130,7 @@ class StaffGradingService(GradingService): 'grader_id': grader_id, 'skipped': skipped} - return self.post(self.save_grade_url, data=data, - allow_redirects=False) + return self.post(self.save_grade_url, data=data) # don't initialize until staff_grading_service() is called--means that just # importing this file doesn't create objects that may not have the right config From 0bfb1feed4af59965a9187351ffaf3507f28bd41 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 10 Jan 2013 15:06:16 -0500 Subject: [PATCH 44/49] Log better exceptions --- .../peer_grading_service.py | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 14065d54e4..5da243b0b5 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -181,8 +181,8 @@ def get_next_submission(request, course_id): return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error from grading service. server url: {0}" - .format(staff_grading_service().url)) + log.exception("Error getting next submission. server url: {0} location: {1}, grader_id: {2}" + .format(staff_grading_service().url, location, grader_id)) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) @@ -218,8 +218,11 @@ def save_grade(request, course_id): score, feedback, submission_key) return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error from grading service. server url: {0}" - .format(staff_grading_service().url)) + log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2}, + submission_key: {3}, score: {4}""" + .format(staff_grading_service().url, + location, submission_id, submission_key, score) + ) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) @@ -255,8 +258,8 @@ def is_student_calibrated(request, course_id): response = peer_grading_service().is_student_calibrated(location, grader_id) return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error from grading service. server url: {0}" - .format(staff_grading_service().url)) + log.exception("Error from grading service. server url: {0}, grader_id: {0}, location: {1}" + .format(staff_grading_service().url, grader_id, location)) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) @@ -299,8 +302,8 @@ def show_calibration_essay(request, course_id): response = peer_grading_service().show_calibration_essay(location, grader_id) return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error from grading service. server url: {0}" - .format(staff_grading_service().url)) + log.exception("Error from grading service. server url: {0}, location: {0}" + .format(staff_grading_service().url, location)) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) @@ -340,5 +343,5 @@ def save_calibration_essay(request, course_id): response = peer_grading_service().save_calibration_essay(location, grader_id, calibration_essay_id, submission_key, score, feedback) return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error saving calibration grade") + log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) return _err_response('Could not connect to grading service') From 084a3c33c8c821568a85c1b6b66db1d9147c26d3 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 10 Jan 2013 15:20:46 -0500 Subject: [PATCH 45/49] Update comments and remove some unnecessary code --- lms/djangoapps/open_ended_grading/tests.py | 5 ++--- lms/djangoapps/open_ended_grading/views.py | 25 +--------------------- 2 files changed, 3 insertions(+), 27 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/tests.py b/lms/djangoapps/open_ended_grading/tests.py index 30a58f6ee8..0c4376a44b 100644 --- a/lms/djangoapps/open_ended_grading/tests.py +++ b/lms/djangoapps/open_ended_grading/tests.py @@ -1,8 +1,7 @@ """ -This file demonstrates writing tests using the unittest module. These will pass -when you run "manage.py test". +Tests for open ended grading interfaces -Replace this with more appropriate tests for your application. +django-admin.py test --settings=lms.envs.test --pythonpath=. lms/djangoapps/open_ended_grading """ from django.test import TestCase diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index e1aaf7011f..d3ce7e167c 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -1,39 +1,20 @@ # Grading Views -from collections import defaultdict -import csv import logging -import os import urllib from django.conf import settings -from django.contrib.auth.models import User, Group -from django.http import HttpResponse -from django_future.csrf import ensure_csrf_cookie from django.views.decorators.cache import cache_control from mitxmako.shortcuts import render_to_response from django.core.urlresolvers import reverse -from courseware import grades -from courseware.access import has_access, get_access_group_name -from courseware.courses import get_course_with_access -from django_comment_client.models import Role, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA -from django_comment_client.utils import has_forum_access -from psychometrics import psychoanalyze -from student.models import CourseEnrollment from student.models import unique_id_for_user -from xmodule.course_module import CourseDescriptor -from xmodule.modulestore import Location -from xmodule.modulestore.django import modulestore -from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundError, NoPathToItem -from xmodule.modulestore.search import path_to_location +from courseware.courses import get_course_with_access from peer_grading_service import PeerGradingService from peer_grading_service import MockPeerGradingService from grading_service import GradingServiceError import json -import track.views - from .staff_grading import StaffGrading @@ -52,14 +33,11 @@ def staff_grading(request, course_id): """ course = get_course_with_access(request.user, course_id, 'staff') - grading = StaffGrading(course) - ajax_url = reverse('staff_grading', kwargs={'course_id': course_id}) if not ajax_url.endswith('/'): ajax_url += '/' return render_to_response('instructor/staff_grading.html', { - 'view_html': '', 'course': course, 'course_id': course_id, 'ajax_url': ajax_url, @@ -99,7 +77,6 @@ def peer_grading(request, course_id): ajax_url += '/' return render_to_response('peer_grading/peer_grading.html', { - 'view_html': '', 'course': course, 'course_id': course_id, 'ajax_url': ajax_url, From ef6d77b116c28c0d050d9309d9234d2643d2145d Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 10 Jan 2013 15:33:17 -0500 Subject: [PATCH 46/49] Clean up names and refactor out some common logic --- lms/djangoapps/open_ended_grading/views.py | 37 +++++++++++++--------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index d3ce7e167c..858c9a4fd5 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -26,6 +26,18 @@ if settings.MOCK_PEER_GRADING: else: peer_gs = PeerGradingService(settings.PEER_GRADING_INTERFACE) +""" +Reverses the URL from the name and the course id, and then adds a trailing slash if +it does not exist yet + +""" +def _reverse_with_slash(url_name, course_id): + ajax_url = reverse(url_name, kwargs={'course_id': course_id}) + if not ajax_url.endswith('/'): + ajax_url += '/' + return ajax_url + + @cache_control(no_cache=True, no_store=True, must_revalidate=True) def staff_grading(request, course_id): """ @@ -33,9 +45,7 @@ def staff_grading(request, course_id): """ course = get_course_with_access(request.user, course_id, 'staff') - ajax_url = reverse('staff_grading', kwargs={'course_id': course_id}) - if not ajax_url.endswith('/'): - ajax_url += '/' + ajax_url = _reverse_with_slash('staff_grading', course_id) return render_to_response('instructor/staff_grading.html', { 'course': course, @@ -57,24 +67,23 @@ def peer_grading(request, course_id): error_text = "" problem_list = [] try: - problem_list_text = peer_gs.get_problem_list(course_id, unique_id_for_user(request.user)) - problem_list_json = json.loads(problem_list_text) - success = problem_list_json['success'] - if 'error' in problem_list_json: - error_text = problem_list_json['error'] + problem_list_json = peer_gs.get_problem_list(course_id, unique_id_for_user(request.user)) + problem_list_dict = json.loads(problem_list_json) + success = problem_list_dict['success'] + if 'error' in problem_list_dict: + error_text = problem_list_dict['error'] - problem_list = problem_list_json['problem_list'] + problem_list = problem_list_dict['problem_list'] except GradingServiceError: error_text = "Error occured while contacting the grading service" success = False + # catch error if if the json loads fails except ValueError: error_text = "Could not get problem list" success = False - ajax_url = reverse('peer_grading', kwargs={'course_id': course_id}) - if not ajax_url.endswith('/'): - ajax_url += '/' + ajax_url = _reverse_with_slash('peer_grading', course_id) return render_to_response('peer_grading/peer_grading.html', { 'course': course, @@ -95,9 +104,7 @@ def peer_grading_problem(request, course_id): course = get_course_with_access(request.user, course_id, 'load') problem_location = request.GET.get("location") - ajax_url = reverse('peer_grading', kwargs={'course_id': course_id}) - if not ajax_url.endswith('/'): - ajax_url += '/' + ajax_url = _reverse_with_slash('peer_grading', course_id) return render_to_response('peer_grading/peer_grading_problem.html', { 'view_html': '', From 8f21d7a738a415d58acc8fb7d0712caad7e7ee43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Andr=C3=A9s=20Rocha?= Date: Thu, 10 Jan 2013 14:48:27 -0500 Subject: [PATCH 47/49] Add property to course module to check if a course is new The property can be set in the policy metadata. If it is not specified then it is set to true if the course has not started yet. Also adds a property to check how many days are left until the course starts. --- common/djangoapps/student/views.py | 2 +- common/lib/xmodule/xmodule/course_module.py | 41 +++++++-- .../xmodule/tests/test_course_module.py | 90 +++++++++++++++++++ lms/djangoapps/courseware/courses.py | 30 ------- lms/djangoapps/courseware/views.py | 4 +- lms/templates/course.html | 2 +- 6 files changed, 130 insertions(+), 39 deletions(-) create mode 100644 common/lib/xmodule/xmodule/tests/test_course_module.py diff --git a/common/djangoapps/student/views.py b/common/djangoapps/student/views.py index 06c59d7937..39805fd85f 100644 --- a/common/djangoapps/student/views.py +++ b/common/djangoapps/student/views.py @@ -78,7 +78,7 @@ def index(request, extra_context={}, user=None): courses = get_courses(None, domain=domain) # Sort courses by how far are they from they start day - key = lambda course: course.metadata['days_to_start'] + key = lambda course: course.days_until_start courses = sorted(courses, key=key, reverse=True) # Get the 3 most recent news diff --git a/common/lib/xmodule/xmodule/course_module.py b/common/lib/xmodule/xmodule/course_module.py index 5253d2976f..163e40b343 100644 --- a/common/lib/xmodule/xmodule/course_module.py +++ b/common/lib/xmodule/xmodule/course_module.py @@ -1,9 +1,9 @@ -from fs.errors import ResourceNotFoundError import logging from lxml import etree -from path import path # NOTE (THK): Only used for detecting presence of syllabus +from path import path # NOTE (THK): Only used for detecting presence of syllabus import requests import time +from datetime import datetime from xmodule.util.decorators import lazyproperty from xmodule.graders import load_grading_policy @@ -13,6 +13,7 @@ from xmodule.timeparse import parse_time, stringify_time log = logging.getLogger(__name__) + class CourseDescriptor(SequenceDescriptor): module_class = SequenceModule @@ -165,6 +166,38 @@ class CourseDescriptor(SequenceDescriptor): def show_calculator(self): return self.metadata.get("show_calculator", None) == "Yes" + @property + def is_new(self): + # The course is "new" if either if the metadata flag is_new is + # true or if the course has not started yet + flag = self.metadata.get('is_new', None) + if flag is None: + return self.days_until_start > 1 + elif isinstance(flag, basestring): + return flag.lower() in ['true', 'yes', 'y'] + else: + return bool(flag) + + @property + def days_until_start(self): + def convert_to_datetime(timestamp): + return datetime.fromtimestamp(time.mktime(timestamp)) + + start_date = convert_to_datetime(self.start) + + # Try to use course advertised date if we can parse it + advertised_start = self.metadata.get('advertised_start', None) + if advertised_start: + try: + start_date = datetime.strptime(advertised_start, + "%Y-%m-%dT%H:%M") + except ValueError: + pass # Invalid date, keep using 'start'' + + now = convert_to_datetime(time.gmtime()) + days_until_start = (start_date - now).days + return days_until_start + @lazyproperty def grading_context(self): """ @@ -244,7 +277,6 @@ class CourseDescriptor(SequenceDescriptor): raise ValueError("{0} is not a course location".format(loc)) return "/".join([loc.org, loc.course, loc.name]) - @property def id(self): """Return the course_id for this course""" @@ -258,7 +290,7 @@ class CourseDescriptor(SequenceDescriptor): # form text... if parsed_advertised_start is None and \ ('advertised_start' in self.metadata): - return self.metadata['advertised_start'] + return self.metadata['advertised_start'] displayed_start = parsed_advertised_start or self.start @@ -341,4 +373,3 @@ class CourseDescriptor(SequenceDescriptor): @property def org(self): return self.location.org - diff --git a/common/lib/xmodule/xmodule/tests/test_course_module.py b/common/lib/xmodule/xmodule/tests/test_course_module.py new file mode 100644 index 0000000000..63eaec1f61 --- /dev/null +++ b/common/lib/xmodule/xmodule/tests/test_course_module.py @@ -0,0 +1,90 @@ +import unittest +from time import strptime, gmtime +from fs.memoryfs import MemoryFS + +from mock import Mock, patch + +from xmodule.modulestore.xml import ImportSystem, XMLModuleStore + + +ORG = 'test_org' +COURSE = 'test_course' + +NOW = strptime('2013-01-01T01:00:00', '%Y-%m-%dT%H:%M:00') + + +class DummySystem(ImportSystem): + @patch('xmodule.modulestore.xml.OSFS', lambda dir: MemoryFS()) + def __init__(self, load_error_modules): + + xmlstore = XMLModuleStore("data_dir", course_dirs=[], + load_error_modules=load_error_modules) + course_id = "/".join([ORG, COURSE, 'test_run']) + course_dir = "test_dir" + policy = {} + error_tracker = Mock() + parent_tracker = Mock() + + super(DummySystem, self).__init__( + xmlstore, + course_id, + course_dir, + policy, + error_tracker, + parent_tracker, + load_error_modules=load_error_modules, + ) + + +class IsNewCourseTestCase(unittest.TestCase): + """Make sure the property is_new works on courses""" + @staticmethod + def get_dummy_course(start, is_new=None, load_error_modules=True): + """Get a dummy course""" + + system = DummySystem(load_error_modules) + is_new = '' if is_new is None else 'is_new="{0}"'.format(is_new).lower() + + start_xml = ''' + + + Two houses, ... + + + '''.format(org=ORG, course=COURSE, start=start, is_new=is_new) + + return system.process_xml(start_xml) + + @patch('xmodule.course_module.time.gmtime') + def test_non_started_yet(self, gmtime_mock): + descriptor = self.get_dummy_course(start='2013-01-05T12:00') + gmtime_mock.return_value = NOW + assert(descriptor.is_new == True) + assert(descriptor.days_until_start == 4) + + @patch('xmodule.course_module.time.gmtime') + def test_already_started(self, gmtime_mock): + gmtime_mock.return_value = NOW + + descriptor = self.get_dummy_course(start='2012-12-02T12:00') + assert(descriptor.is_new == False) + assert(descriptor.days_until_start < 0) + + @patch('xmodule.course_module.time.gmtime') + def test_is_new_set(self, gmtime_mock): + gmtime_mock.return_value = NOW + + descriptor = self.get_dummy_course(start='2012-12-02T12:00', is_new=True) + assert(descriptor.is_new == True) + assert(descriptor.days_until_start < 0) + + descriptor = self.get_dummy_course(start='2013-02-02T12:00', is_new=False) + assert(descriptor.is_new == False) + assert(descriptor.days_until_start > 0) + + descriptor = self.get_dummy_course(start='2013-02-02T12:00', is_new=True) + assert(descriptor.is_new == True) + assert(descriptor.days_until_start > 0) diff --git a/lms/djangoapps/courseware/courses.py b/lms/djangoapps/courseware/courses.py index dc530bdebc..7c0d30ebd8 100644 --- a/lms/djangoapps/courseware/courses.py +++ b/lms/djangoapps/courseware/courses.py @@ -233,35 +233,5 @@ def get_courses(user, domain=None): courses = branding.get_visible_courses(domain) courses = [c for c in courses if has_access(user, c, 'see_exists')] - # Add metadata about the start day and if the course is new - for course in courses: - days_to_start = _get_course_days_to_start(course) - - metadata = course.metadata - metadata['days_to_start'] = days_to_start - metadata['is_new'] = course.metadata.get('is_new', days_to_start > 1) - courses = sorted(courses, key=lambda course:course.number) return courses - - -def _get_course_days_to_start(course): - from datetime import datetime as dt - from time import mktime, gmtime - - convert_to_datetime = lambda ts: dt.fromtimestamp(mktime(ts)) - - start_date = convert_to_datetime(course.start) - - # If the course has a valid advertised date, use that instead - advertised_start = course.metadata.get('advertised_start', None) - if advertised_start: - try: - start_date = dt.strptime(advertised_start, "%Y-%m-%dT%H:%M") - except ValueError: - pass # Invalid date, keep using course.start - - now = convert_to_datetime(gmtime()) - days_to_start = (start_date - now).days - - return days_to_start diff --git a/lms/djangoapps/courseware/views.py b/lms/djangoapps/courseware/views.py index f6e87dfe9f..9e52e2b281 100644 --- a/lms/djangoapps/courseware/views.py +++ b/lms/djangoapps/courseware/views.py @@ -70,7 +70,7 @@ def courses(request): courses = get_courses(request.user, domain=request.META.get('HTTP_HOST')) # Sort courses by how far are they from they start day - key = lambda course: course.metadata['days_to_start'] + key = lambda course: course.days_until_start courses = sorted(courses, key=key, reverse=True) return render_to_response("courseware/courses.html", {'courses': courses}) @@ -440,7 +440,7 @@ def university_profile(request, org_id): domain=request.META.get('HTTP_HOST'))[org_id] # Sort courses by how far are they from they start day - key = lambda course: course.metadata['days_to_start'] + key = lambda course: course.days_until_start courses = sorted(courses, key=key, reverse=True) context = dict(courses=courses, org_id=org_id) diff --git a/lms/templates/course.html b/lms/templates/course.html index a3217d2da5..a2eff572e1 100644 --- a/lms/templates/course.html +++ b/lms/templates/course.html @@ -5,7 +5,7 @@ %> <%page args="course" />
- %if course.metadata.get('is_new'): + %if course.is_new: New %endif From 81bb2dc979f22cb157b2d437869370f4ea3cbae7 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Thu, 10 Jan 2013 16:45:19 -0500 Subject: [PATCH 48/49] Better and clearer comments along with some fixes for code review issues --- .../peer_grading_service.py | 8 ++++ .../src/peer_grading/peer_grading.coffee | 9 ++-- .../peer_grading/peer_grading_problem.coffee | 44 ++++++++++++++++--- 3 files changed, 51 insertions(+), 10 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 5da243b0b5..859499ff7e 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -1,3 +1,11 @@ +""" +This module provides an interface on the grading-service backend +for peer grading + +Use peer_grading_service() to get the version specified +in settings.PEER_GRADING_INTERFACE + +""" import json import logging import requests diff --git a/lms/static/coffee/src/peer_grading/peer_grading.coffee b/lms/static/coffee/src/peer_grading/peer_grading.coffee index c20944252c..0736057df8 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading.coffee @@ -1,10 +1,13 @@ +# This is a simple class that just hides the error container +# and message container when they are empty +# Can (and should be) expanded upon when our problem list +# becomes more sophisticated class PeerGrading - constructor: (backend) -> + constructor: () -> @error_container = $('.error-container') @error_container.toggle(not @error_container.is(':empty')) @message_container = $('.message-container') @message_container.toggle(not @message_container.is(':empty')) -mock_backend = false -$(document).ready(() -> new PeerGrading(mock_backend)) +$(document).ready(() -> new PeerGrading()) diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index 77cdd04b15..e815a05d64 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -1,3 +1,20 @@ +################################## +# +# This is the JS that renders the peer grading problem page. +# Fetches the correct problem and/or calibration essay +# and sends back the grades +# +# Should not be run when we don't have a location to send back +# to the server +# +# PeerGradingProblemBackend - +# makes all the ajax requests and provides a mock interface +# for testing purposes +# +# PeerGradingProblem - +# handles the rendering and user interactions with the interface +# +################################## class PeerGradingProblemBackend constructor: (ajax_url, mock_backend) -> @mock_backend = mock_backend @@ -8,7 +25,7 @@ class PeerGradingProblemBackend if @mock_backend callback(@mock(cmd, data)) else - # TODO: replace with postWithPrefix when that's loaded + # if this post request fails, the error callback will catch it $.post(@ajax_url + cmd, data, callback) .error => callback({success: false, error: "Error occured while performing this operation"}) @@ -90,13 +107,13 @@ class PeerGradingProblem @prompt_wrapper = $('.prompt-wrapper') @backend = backend - # ugly hack to prevent this code from trying to run on the - # general peer grading page - if( @prompt_wrapper.length == 0) - return # get the location of the problem @location = $('.peer-grading').data('location') + # prevent this code from trying to run + # when we don't have a location + if(!@location) + return # get the other elements we want to fill in @submission_container = $('.submission-container') @@ -180,6 +197,8 @@ class PeerGradingProblem # Callbacks for various events # ########## + + # called after we perform an is_student_calibrated check calibration_check_callback: (response) => if response.success # if we haven't been calibrating before @@ -199,12 +218,17 @@ class PeerGradingProblem else @render_error("Error contacting the grading service") + + # called after we submit a calibration score calibration_callback: (response) => if response.success @render_calibration_feedback(response) else if response.error @render_error(response.error) + else + @render_error("Error saving calibration score") + # called after we submit a submission score submission_callback: (response) => if response.success @is_calibrated_check() @@ -216,6 +240,7 @@ class PeerGradingProblem else @render_error("Error occurred while submitting grade") + # called after a grade is selected on the interface graded_callback: (event) => @grading_message.hide() @score = event.target.value @@ -240,6 +265,8 @@ class PeerGradingProblem @grading_panel.removeClass('current-state') # Display the right text + # both versions of the text are written into the template itself + # we only need to show/hide the correct ones at the correct time @calibration_panel.find('.calibration-text').show() @grading_panel.find('.calibration-text').show() @calibration_panel.find('.grading-text').hide() @@ -265,6 +292,8 @@ class PeerGradingProblem @grading_panel.addClass('current-state') # Display the correct text + # both versions of the text are written into the template itself + # we only need to show/hide the correct ones at the correct time @calibration_panel.find('.calibration-text').hide() @grading_panel.find('.calibration-text').hide() @calibration_panel.find('.grading-text').show() @@ -285,6 +314,7 @@ class PeerGradingProblem new_text += "

#{paragraph}

" return new_text + # render common information between calibration and grading render_submission_data: (response) => @content_panel.show() @@ -302,7 +332,6 @@ class PeerGradingProblem render_calibration_feedback: (response) => # display correct grade - #@grading_wrapper.hide() @calibration_feedback_panel.slideDown() calibration_wrapper = $('.calibration-feedback-wrapper') calibration_wrapper.html("

The score you gave was: #{@score}. The actual score is: #{response.actual_score}

") @@ -314,7 +343,7 @@ class PeerGradingProblem if score == actual_score calibration_wrapper.append("

Congratulations! Your score matches the actual score!

") else - calibration_wrapper.append("

Please try to understand the grading critera better so that you will be more accurate next time.

") + calibration_wrapper.append("

Please try to understand the grading critera better to be more accurate next time.

") # disable score selection and submission from the grading interface $("input[name='score-selection']").attr('disabled', true) @@ -323,6 +352,7 @@ class PeerGradingProblem render_interstitial_page: () => @content_panel.hide() @interstitial_page.show() + render_error: (error_message) => @error_container.show() @calibration_feedback_panel.hide() From 9479ef3c849a9281dab6b660ab4fb303b8d641f7 Mon Sep 17 00:00:00 2001 From: Chris Dodge Date: Fri, 11 Jan 2013 14:29:53 -0500 Subject: [PATCH 49/49] fix up call signatures for get_parent_locations() --- cms/djangoapps/contentstore/views.py | 6 +++--- common/lib/xmodule/xmodule/course_module.py | 1 - common/lib/xmodule/xmodule/modulestore/draft.py | 4 ++-- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/cms/djangoapps/contentstore/views.py b/cms/djangoapps/contentstore/views.py index 063c69e950..b7f8aba1ba 100644 --- a/cms/djangoapps/contentstore/views.py +++ b/cms/djangoapps/contentstore/views.py @@ -202,7 +202,7 @@ def edit_subsection(request, location): if item.location.category != 'sequential': return HttpResponseBadRequest() - parent_locs = modulestore().get_parent_locations(location) + parent_locs = modulestore().get_parent_locations(location, None) # we're for now assuming a single parent if len(parent_locs) != 1: @@ -285,10 +285,10 @@ def edit_unit(request, location): # this will need to change to check permissions correctly so as # to pick the correct parent subsection - containing_subsection_locs = modulestore().get_parent_locations(location) + containing_subsection_locs = modulestore().get_parent_locations(location, None) containing_subsection = modulestore().get_item(containing_subsection_locs[0]) - containing_section_locs = modulestore().get_parent_locations(containing_subsection.location) + containing_section_locs = modulestore().get_parent_locations(containing_subsection.location, None) containing_section = modulestore().get_item(containing_section_locs[0]) # cdodge hack. We're having trouble previewing drafts via jump_to redirect diff --git a/common/lib/xmodule/xmodule/course_module.py b/common/lib/xmodule/xmodule/course_module.py index 259e193315..8b4db799cb 100644 --- a/common/lib/xmodule/xmodule/course_module.py +++ b/common/lib/xmodule/xmodule/course_module.py @@ -6,7 +6,6 @@ import requests import time from datetime import datetime -from xmodule.graders import load_grading_policy from xmodule.modulestore import Location from xmodule.seq_module import SequenceDescriptor, SequenceModule from xmodule.timeparse import parse_time, stringify_time diff --git a/common/lib/xmodule/xmodule/modulestore/draft.py b/common/lib/xmodule/xmodule/modulestore/draft.py index 5ad663cec2..4b0b5c8abf 100644 --- a/common/lib/xmodule/xmodule/modulestore/draft.py +++ b/common/lib/xmodule/xmodule/modulestore/draft.py @@ -160,13 +160,13 @@ class DraftModuleStore(ModuleStoreBase): return super(DraftModuleStore, self).delete_item(as_draft(location)) - def get_parent_locations(self, location): + def get_parent_locations(self, location, course_id): '''Find all locations that are the parents of this location. Needed for path_to_location(). returns an iterable of things that can be passed to Location. ''' - return super(DraftModuleStore, self).get_parent_locations(location) + return super(DraftModuleStore, self).get_parent_locations(location, course_id) def publish(self, location, published_by_id): """