From e7450874557b48c5479dad6bc9f008c6c50100ce Mon Sep 17 00:00:00 2001 From: Jay Zoldak Date: Tue, 29 Jan 2013 14:30:15 -0500 Subject: [PATCH 001/126] Fix permissions bug and add test cases for django comment client permissions. --- .../django_comment_client/permissions.py | 7 +- lms/djangoapps/django_comment_client/tests.py | 111 --------------- .../django_comment_client/tests/__init__.py | 0 .../tests/test_permissions.py | 128 ++++++++++++++++++ 4 files changed, 133 insertions(+), 113 deletions(-) delete mode 100644 lms/djangoapps/django_comment_client/tests.py create mode 100644 lms/djangoapps/django_comment_client/tests/__init__.py create mode 100644 lms/djangoapps/django_comment_client/tests/test_permissions.py diff --git a/lms/djangoapps/django_comment_client/permissions.py b/lms/djangoapps/django_comment_client/permissions.py index b95a890dda..b583c3fe74 100644 --- a/lms/djangoapps/django_comment_client/permissions.py +++ b/lms/djangoapps/django_comment_client/permissions.py @@ -29,6 +29,7 @@ def has_permission(user, permission, course_id=None): CONDITIONS = ['is_open', 'is_author'] +# data may be a json file def check_condition(user, condition, course_id, data): def check_open(user, condition, course_id, data): try: @@ -61,8 +62,10 @@ def check_conditions_permissions(user, permissions, course_id, **kwargs): def test(user, per, operator="or"): if isinstance(per, basestring): if per in CONDITIONS: - return check_condition(user, per, course_id, kwargs) + return check_condition(user, per, course_id, kwargs['data']) return cached_has_permission(user, per, course_id=course_id) + # TODO: refactor this to be more clear. + # e.g. the "and operator in" bit on the next line is not needed? elif isinstance(per, list) and operator in ["and", "or"]: results = [test(user, x, operator="and") for x in per] if operator == "or": @@ -102,4 +105,4 @@ def check_permissions_by_view(user, course_id, content, name): p = VIEW_PERMISSIONS[name] except KeyError: logging.warning("Permission for view named %s does not exist in permissions.py" % name) - return check_conditions_permissions(user, p, course_id, content=content) + return check_conditions_permissions(user, p, course_id, data=content) diff --git a/lms/djangoapps/django_comment_client/tests.py b/lms/djangoapps/django_comment_client/tests.py deleted file mode 100644 index ac059a1e3f..0000000000 --- a/lms/djangoapps/django_comment_client/tests.py +++ /dev/null @@ -1,111 +0,0 @@ -from django.contrib.auth.models import User, Group -from django.core.urlresolvers import reverse -from django.test import TestCase -from django.test.client import RequestFactory -from django.conf import settings - -from mock import Mock - -from override_settings import override_settings - -import xmodule.modulestore.django - -from student.models import CourseEnrollment - -from django.db.models.signals import m2m_changed, pre_delete, pre_save, post_delete, post_save -from django.dispatch.dispatcher import _make_id -import string -import random -from .permissions import has_permission -from .models import Role, Permission - -from xmodule.modulestore.django import modulestore -from xmodule.modulestore import Location -from xmodule.modulestore.xml_importer import import_from_xml -from xmodule.modulestore.xml import XMLModuleStore - -import comment_client - -from courseware.tests.tests import PageLoader, TEST_DATA_XML_MODULESTORE - -#@override_settings(MODULESTORE=TEST_DATA_XML_MODULESTORE) -#class TestCohorting(PageLoader): -# """Check that cohorting works properly""" -# -# def setUp(self): -# xmodule.modulestore.django._MODULESTORES = {} -# -# # Assume courses are there -# self.toy = modulestore().get_course("edX/toy/2012_Fall") -# -# # Create two accounts -# self.student = 'view@test.com' -# self.student2 = 'view2@test.com' -# self.password = 'foo' -# self.create_account('u1', self.student, self.password) -# self.create_account('u2', self.student2, self.password) -# self.activate_user(self.student) -# self.activate_user(self.student2) -# -# def test_create_thread(self): -# my_save = Mock() -# comment_client.perform_request = my_save -# -# resp = self.client.post( -# reverse('django_comment_client.base.views.create_thread', -# kwargs={'course_id': 'edX/toy/2012_Fall', -# 'commentable_id': 'General'}), -# {'some': "some", -# 'data': 'data'}) -# self.assertTrue(my_save.called) -# -# #self.assertEqual(resp.status_code, 200) -# #self.assertEqual(my_save.something, "expected", "complaint if not true") -# -# self.toy.metadata["cohort_config"] = {"cohorted": True} -# -# # call the view again ... -# -# # assert that different things happened - - - -class PermissionsTestCase(TestCase): - def random_str(self, length=15, chars=string.ascii_uppercase + string.digits): - return ''.join(random.choice(chars) for x in range(length)) - - def setUp(self): - - self.course_id = "edX/toy/2012_Fall" - - self.moderator_role = Role.objects.get_or_create(name="Moderator", course_id=self.course_id)[0] - self.student_role = Role.objects.get_or_create(name="Student", course_id=self.course_id)[0] - - self.student = User.objects.create(username=self.random_str(), - password="123456", email="john@yahoo.com") - self.moderator = User.objects.create(username=self.random_str(), - password="123456", email="staff@edx.org") - self.moderator.is_staff = True - self.moderator.save() - self.student_enrollment = CourseEnrollment.objects.create(user=self.student, course_id=self.course_id) - self.moderator_enrollment = CourseEnrollment.objects.create(user=self.moderator, course_id=self.course_id) - - def tearDown(self): - self.student_enrollment.delete() - self.moderator_enrollment.delete() - -# Do we need to have this? We shouldn't be deleting students, ever -# self.student.delete() -# self.moderator.delete() - - def testDefaultRoles(self): - self.assertTrue(self.student_role in self.student.roles.all()) - self.assertTrue(self.moderator_role in self.moderator.roles.all()) - - def testPermission(self): - name = self.random_str() - self.moderator_role.add_permission(name) - self.assertTrue(has_permission(self.moderator, name, self.course_id)) - - self.student_role.add_permission(name) - self.assertTrue(has_permission(self.student, name, self.course_id)) diff --git a/lms/djangoapps/django_comment_client/tests/__init__.py b/lms/djangoapps/django_comment_client/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lms/djangoapps/django_comment_client/tests/test_permissions.py b/lms/djangoapps/django_comment_client/tests/test_permissions.py new file mode 100644 index 0000000000..44e1a3a128 --- /dev/null +++ b/lms/djangoapps/django_comment_client/tests/test_permissions.py @@ -0,0 +1,128 @@ +import string +import random +import collections + +import factory +from django.test import TestCase + +from django.contrib.auth.models import User +from student.models import UserProfile, CourseEnrollment +from django_comment_client.models import Role, Permission +import django_comment_client.permissions as p + +class UserFactory(factory.Factory): + FACTORY_FOR = User + username = 'robot' + password = '123456' + email = 'robot@edx.org' + is_active = True + is_staff = False + +class CourseEnrollmentFactory(factory.Factory): + FACTORY_FOR = CourseEnrollment + user = factory.SubFactory(UserFactory) + course_id = 'edX/toy/2012_Fall' + +class RoleFactory(factory.Factory): + FACTORY_FOR = Role + name = 'Student' + course_id = 'edX/toy/2012_Fall' + +class PermissionFactory(factory.Factory): + FACTORY_FOR = Permission + name = 'create_comment' + + +class PermissionsTestCase(TestCase): + def setUp(self): + self.course_id = "edX/toy/2012_Fall" + + self.student_role = RoleFactory(name='Student') + self.moderator_role = RoleFactory(name='Moderator') + self.student = UserFactory(username='student', email='student@edx.org') + self.moderator = UserFactory(username='moderator', email='staff@edx.org', is_staff=True) + self.update_thread_permission = PermissionFactory(name='update_thread') + self.update_thread_permission.roles.add(self.student_role) + self.update_thread_permission.roles.add(self.moderator_role) + self.manage_moderator_permission = PermissionFactory(name='manage_moderator') + self.manage_moderator_permission.roles.add(self.moderator_role) + self.student_enrollment = CourseEnrollmentFactory(user=self.student) + self.moderator_enrollment = CourseEnrollmentFactory(user=self.moderator) + + self.student_open_thread = {'content': { + 'closed': False, + 'user_id': str(self.student.id)} + } + self.student_closed_thread = {'content': { + 'closed': True, + 'user_id': str(self.student.id)} + } + + def test_user_has_permission(self): + s_ut = p.has_permission(self.student, 'update_thread', self.course_id) + m_ut = p.has_permission(self.moderator, 'update_thread', self.course_id) + s_mm = p.has_permission(self.student, 'manage_moderator', self.course_id) + m_mm = p.has_permission(self.moderator, 'manage_moderator', self.course_id) + self.assertTrue(s_ut) + self.assertTrue(m_ut) + self.assertFalse(s_mm) + self.assertTrue(m_mm) + + def test_check_conditions(self): + # Checks whether the discussion thread is open, or whether the author is user + s_o = p.check_condition(self.student, 'is_open', self.course_id, self.student_open_thread) + s_a = p.check_condition(self.student, 'is_author', self.course_id, self.student_open_thread) + m_c = p.check_condition(self.moderator, 'is_open', self.course_id, self.student_closed_thread) + m_a = p.check_condition(self.moderator,'is_author', self.course_id, self.student_open_thread) + self.assertTrue(s_o) + self.assertTrue(s_a) + self.assertFalse(m_c) + self.assertFalse(m_a) + + def test_check_conditions_and_permissions(self): + # Check conditions + ret = p.check_conditions_permissions(self.student, + 'is_open', + self.course_id, + data=self.student_open_thread) + self.assertTrue(ret) + + # Check permissions + ret = p.check_conditions_permissions(self.student, + 'update_thread', + self.course_id, + data=self.student_open_thread) + self.assertTrue(ret) + + # Check that a list of permissions/conditions will be OR'd + ret = p.check_conditions_permissions(self.moderator, + ['is_open','manage_moderator'], + self.course_id, + data=self.student_open_thread) + self.assertTrue(ret) + + # Check that a list of permissions will be OR'd + ret = p.check_conditions_permissions(self.student, + ['update_thread','manage_moderator'], + self.course_id, + data=self.student_open_thread) + self.assertTrue(ret) + + # Check that a list of list of permissions will be AND'd + ret = p.check_conditions_permissions(self.student, + [['update_thread','manage_moderator']], + self.course_id, + data=self.student_open_thread) + self.assertFalse(ret) + + def test_check_permissions_by_view(self): + ret = p.check_permissions_by_view(self.student, self.course_id, + self.student_open_thread, 'openclose_thread') + self.assertFalse(ret) + + # Check a view permission that includes both a condition and a permission + self.vote_permission = PermissionFactory(name='vote') + self.vote_permission.roles.add(self.student_role) + ret = p.check_permissions_by_view(self.student, self.course_id, + self.student_open_thread, 'vote_for_comment') + self.assertTrue(ret) \ No newline at end of file From 1f049410ae5a2638260b196d8f3bcd3d8ef11351 Mon Sep 17 00:00:00 2001 From: Jay Zoldak Date: Tue, 29 Jan 2013 14:31:27 -0500 Subject: [PATCH 002/126] Add tests for django comment client utils --- .../django_comment_client/tests/test_utils.py | 89 +++++++++++++++++++ lms/djangoapps/django_comment_client/utils.py | 1 + 2 files changed, 90 insertions(+) create mode 100644 lms/djangoapps/django_comment_client/tests/test_utils.py diff --git a/lms/djangoapps/django_comment_client/tests/test_utils.py b/lms/djangoapps/django_comment_client/tests/test_utils.py new file mode 100644 index 0000000000..2e24cbd837 --- /dev/null +++ b/lms/djangoapps/django_comment_client/tests/test_utils.py @@ -0,0 +1,89 @@ +import string +import random +import collections + +from django.test import TestCase + +import factory +from django.contrib.auth.models import User +from student.models import UserProfile, CourseEnrollment +from django_comment_client.models import Role, Permission + +import django_comment_client.models as models +import django_comment_client.utils as utils + +import xmodule.modulestore.django as django + +class UserFactory(factory.Factory): + FACTORY_FOR = User + username = 'robot' + password = '123456' + email = 'robot@edx.org' + is_active = True + is_staff = False + +class CourseEnrollmentFactory(factory.Factory): + FACTORY_FOR = CourseEnrollment + user = factory.SubFactory(UserFactory) + course_id = 'edX/toy/2012_Fall' + +class RoleFactory(factory.Factory): + FACTORY_FOR = Role + name = 'Student' + course_id = 'edX/toy/2012_Fall' + +class PermissionFactory(factory.Factory): + FACTORY_FOR = Permission + name = 'create_comment' + +class DictionaryTestCase(TestCase): + def test_extract(self): + d = {'cats': 'meow', 'dogs': 'woof'} + k = ['cats', 'dogs', 'hamsters'] + expected = {'cats': 'meow', 'dogs': 'woof', 'hamsters': None} + self.assertEqual(utils.extract(d, k), expected) + + def test_strip_none(self): + d = {'cats': 'meow', 'dogs': 'woof', 'hamsters': None} + expected = {'cats': 'meow', 'dogs': 'woof'} + self.assertEqual(utils.strip_none(d), expected) + + def test_strip_blank(self): + d = {'cats': 'meow', 'dogs': 'woof', 'hamsters': ' ', 'yetis': ''} + expected = {'cats': 'meow', 'dogs': 'woof'} + self.assertEqual(utils.strip_blank(d), expected) + + def test_merge_dict(self): + d1 ={'cats': 'meow', 'dogs': 'woof'} + d2 ={'lions': 'roar','ducks': 'quack'} + expected ={'cats': 'meow', 'dogs': 'woof','lions': 'roar','ducks': 'quack'} + self.assertEqual(utils.merge_dict(d1, d2), expected) + +class AccessUtilsTestCase(TestCase): + def setUp(self): + self.course_id = 'edX/toy/2012_Fall' + self.student_role = RoleFactory(name='Student', course_id=self.course_id) + self.moderator_role = RoleFactory(name='Moderator', course_id=self.course_id) + self.student1 = UserFactory(username='student', email='student@edx.org') + self.student1_enrollment = CourseEnrollmentFactory(user=self.student1) + self.student_role.users.add(self.student1) + self.student2 = UserFactory(username='student2', email='student2@edx.org') + self.student2_enrollment = CourseEnrollmentFactory(user=self.student2) + self.moderator = UserFactory(username='moderator', email='staff@edx.org', is_staff=True) + self.moderator_enrollment = CourseEnrollmentFactory(user=self.moderator) + self.moderator_role.users.add(self.moderator) + + def test_get_role_ids(self): + ret = utils.get_role_ids(self.course_id) + expected = {u'Moderator': [3], u'Student': [1, 2], 'Staff': [3]} + self.assertEqual(ret, expected) + + def test_has_forum_access(self): + ret = utils.has_forum_access('student', self.course_id, 'Student') + self.assertTrue(ret) + + ret = utils.has_forum_access('not_a_student', self.course_id, 'Student') + self.assertFalse(ret) + + ret = utils.has_forum_access('student', self.course_id, 'NotARole') + self.assertFalse(ret) diff --git a/lms/djangoapps/django_comment_client/utils.py b/lms/djangoapps/django_comment_client/utils.py index 3c9669ac37..b58e3b30e6 100644 --- a/lms/djangoapps/django_comment_client/utils.py +++ b/lms/djangoapps/django_comment_client/utils.py @@ -35,6 +35,7 @@ def strip_blank(dic): return isinstance(v, str) and len(v.strip()) == 0 return dict([(k, v) for k, v in dic.iteritems() if not _is_blank(v)]) +# TODO should we be checking if d1 and d2 have the same keys with different values? def merge_dict(dic1, dic2): return dict(dic1.items() + dic2.items()) From 593b038780e261f10e9dbc704e16db0df5700a47 Mon Sep 17 00:00:00 2001 From: Jay Zoldak Date: Tue, 29 Jan 2013 14:43:28 -0500 Subject: [PATCH 003/126] Add tests for django-comment-client helpers --- lms/djangoapps/django_comment_client/helpers.py | 3 +++ .../django_comment_client/tests/test_helpers.py | 15 +++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 lms/djangoapps/django_comment_client/tests/test_helpers.py diff --git a/lms/djangoapps/django_comment_client/helpers.py b/lms/djangoapps/django_comment_client/helpers.py index 96fd82d37c..0a1e8639ef 100644 --- a/lms/djangoapps/django_comment_client/helpers.py +++ b/lms/djangoapps/django_comment_client/helpers.py @@ -12,6 +12,9 @@ import pystache_custom as pystache import urllib import os +# This method is used to pluralize the words "discussion" and "comment" +# when referring to how many discussion threads or comments the user +# has contributed to. def pluralize(singular_term, count): if int(count) >= 2 or int(count) == 0: return singular_term + 's' diff --git a/lms/djangoapps/django_comment_client/tests/test_helpers.py b/lms/djangoapps/django_comment_client/tests/test_helpers.py new file mode 100644 index 0000000000..bd67830841 --- /dev/null +++ b/lms/djangoapps/django_comment_client/tests/test_helpers.py @@ -0,0 +1,15 @@ +import string +import random +import collections + +from django.test import TestCase + +from django_comment_client.helpers import pluralize + +class PluralizeTestCase(TestCase): + + def testPluralize(self): + self.term = "cat" + self.assertEqual(pluralize(self.term, 0), "cats") + self.assertEqual(pluralize(self.term, 1), "cat") + self.assertEqual(pluralize(self.term, 2), "cats") From d81da9df4dacbc30f85935248b827aa90e4b1610 Mon Sep 17 00:00:00 2001 From: Jay Zoldak Date: Tue, 29 Jan 2013 14:44:37 -0500 Subject: [PATCH 004/126] Add tests for django comment client mustache helpers --- .../django_comment_client/mustache_helpers.py | 2 ++ .../tests/test_mustache_helpers.py | 26 +++++++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 lms/djangoapps/django_comment_client/tests/test_mustache_helpers.py diff --git a/lms/djangoapps/django_comment_client/mustache_helpers.py b/lms/djangoapps/django_comment_client/mustache_helpers.py index 6f04ca527c..9756294696 100644 --- a/lms/djangoapps/django_comment_client/mustache_helpers.py +++ b/lms/djangoapps/django_comment_client/mustache_helpers.py @@ -5,6 +5,8 @@ import urllib import sys import inspect +# This method is used to pluralize the words "discussion" and "comment" +# which is why you need to tack on an "s" for the case of 0 or two or more. def pluralize(content, text): num, word = text.split(' ') num = int(num or '0') diff --git a/lms/djangoapps/django_comment_client/tests/test_mustache_helpers.py b/lms/djangoapps/django_comment_client/tests/test_mustache_helpers.py new file mode 100644 index 0000000000..8638aba67e --- /dev/null +++ b/lms/djangoapps/django_comment_client/tests/test_mustache_helpers.py @@ -0,0 +1,26 @@ +import string +import random +import collections + +from django.test import TestCase + +import django_comment_client.mustache_helpers as mustache_helpers + +class PluralizeTestCase(TestCase): + + def test_pluralize(self): + self.text1 = '0 goat' + self.text2 = '1 goat' + self.text3 = '7 goat' + self.content = 'unused argument' + self.assertEqual(mustache_helpers.pluralize(self.content, self.text1), 'goats') + self.assertEqual(mustache_helpers.pluralize(self.content, self.text2), 'goat') + self.assertEqual(mustache_helpers.pluralize(self.content, self.text3), 'goats') + +class CloseThreadTextTestCase(TestCase): + + def test_close_thread_text(self): + self.contentClosed = {'closed': True} + self.contentOpen = {'closed': False} + self.assertEqual(mustache_helpers.close_thread_text(self.contentClosed), 'Re-open thread') + self.assertEqual(mustache_helpers.close_thread_text(self.contentOpen), 'Close thread') From 053547453ec641feb755247a90cc61560a1f44c0 Mon Sep 17 00:00:00 2001 From: Jay Zoldak Date: Tue, 29 Jan 2013 14:45:35 -0500 Subject: [PATCH 005/126] Add tests for django-comment-client models --- .../django_comment_client/models.py | 8 +-- .../tests/test_models.py | 54 +++++++++++++++++++ 2 files changed, 59 insertions(+), 3 deletions(-) create mode 100644 lms/djangoapps/django_comment_client/tests/test_models.py diff --git a/lms/djangoapps/django_comment_client/models.py b/lms/djangoapps/django_comment_client/models.py index 10c05c75e9..3c3ab2bb53 100644 --- a/lms/djangoapps/django_comment_client/models.py +++ b/lms/djangoapps/django_comment_client/models.py @@ -46,11 +46,13 @@ class Role(models.Model): def add_permission(self, permission): self.permissions.add(Permission.objects.get_or_create(name=permission)[0]) + def has_permission(self, permission): course = get_course_by_id(self.course_id) - if self.name == FORUM_ROLE_STUDENT and \ - (permission.startswith('edit') or permission.startswith('update') or permission.startswith('create')) and \ - (not course.forum_posts_allowed): + changing_comments = permission.startswith('edit') or \ + permission.startswith('update') or permission.startswith('create') + in_blackout_period = not course.forum_posts_allowed + if (self.name == FORUM_ROLE_STUDENT) and in_blackout_period and changing_comments: return False return self.permissions.filter(name=permission).exists() diff --git a/lms/djangoapps/django_comment_client/tests/test_models.py b/lms/djangoapps/django_comment_client/tests/test_models.py new file mode 100644 index 0000000000..3c9b05b202 --- /dev/null +++ b/lms/djangoapps/django_comment_client/tests/test_models.py @@ -0,0 +1,54 @@ +import django_comment_client.models as models +import django_comment_client.permissions as permissions +from django.test import TestCase +from nose.plugins.skip import SkipTest +from courseware.courses import get_course_by_id + +class RoleClassTestCase(TestCase): + def setUp(self): + self.course_id = "edX/toy/2012_Fall" + self.student_role = models.Role.objects.create(name="Student", + course_id=self.course_id) + + def test_unicode(self): + self.assertEqual(str(self.student_role), "Student for edX/toy/2012_Fall") + + self.admin_for_all = models.Role.objects.create(name="Administrator") + self.assertEqual(str(self.admin_for_all), "Administrator for all courses") + + def test_has_permission(self): + self.student_role.add_permission("delete_thread") + self.TA_role = models.Role.objects.create(name="Community TA", + course_id=self.course_id) + self.assertTrue(self.student_role.has_permission("delete_thread")) + self.assertFalse(self.TA_role.has_permission("delete_thread")) + + # Toy course does not have a blackout period defined. + def test_students_can_create_if_not_during_blackout(self): + self.student_role.add_permission("create_comment") + self.assertTrue(self.student_role.has_permission("create_comment")) + + def test_students_cannot_create_during_blackout(self): + # Not sure how to set up these conditions + raise SkipTest() + + def test_inherit_permissions(self): + self.student_role.add_permission("delete_thread") + self.TA_role = models.Role.objects.create(name="Community TA", + course_id=self.course_id) + self.TA_role.inherit_permissions(self.student_role) + self.assertTrue(self.TA_role.has_permission("delete_thread")) + + # TODO: You should not be able to inherit permissions across courses? + def test_inherit_permissions_across_courses(self): + raise SkipTest() + self.student_role.add_permission("delete_thread") + self.course_id_2 = "MITx/6.002x/2012_Fall" + self.admin_role = models.Role.objects.create(name="Administrator", + course_id=self.course_id_2) + self.admin_role.inherit_permissions(self.student_role) + +class PermissionClassTestCase(TestCase): + def test_unicode(self): + self.permission = permissions.Permission.objects.create(name="test") + self.assertEqual(str(self.permission), "test") From 1a398980d05af4daf5410c7c40f22c2e8f52d5f2 Mon Sep 17 00:00:00 2001 From: Jay Zoldak Date: Tue, 29 Jan 2013 14:47:05 -0500 Subject: [PATCH 006/126] Add tests for diango-comment-client middleware --- .../tests/test_middleware.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 lms/djangoapps/django_comment_client/tests/test_middleware.py diff --git a/lms/djangoapps/django_comment_client/tests/test_middleware.py b/lms/djangoapps/django_comment_client/tests/test_middleware.py new file mode 100644 index 0000000000..e3249551b3 --- /dev/null +++ b/lms/djangoapps/django_comment_client/tests/test_middleware.py @@ -0,0 +1,28 @@ +import string +import random +import collections + +from django.test import TestCase + +import comment_client +import django.http +import django_comment_client.middleware as middleware + +class AjaxExceptionTestCase(TestCase): + +# TODO: check whether the correct error message is produced. +# The error message should be the same as the argument to CommentClientError + def setUp(self): + self.a = middleware.AjaxExceptionMiddleware() + self.request1 = django.http.HttpRequest() + self.request0 = django.http.HttpRequest() + self.exception1 = comment_client.CommentClientError('{}') + self.exception0 = ValueError() + self.request1.META['HTTP_X_REQUESTED_WITH'] = "XMLHttpRequest" + self.request0.META['HTTP_X_REQUESTED_WITH'] = "SHADOWFAX" + + def test_process_exception(self): + self.assertIsInstance(self.a.process_exception(self.request1, self.exception1), middleware.JsonError) + self.assertIsNone(self.a.process_exception(self.request1, self.exception0)) + self.assertIsNone(self.a.process_exception(self.request0, self.exception1)) + self.assertIsNone(self.a.process_exception(self.request0, self.exception0)) From 5f538f078b4e7b02a3f328d114635971205267eb Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 19:50:36 -0500 Subject: [PATCH 007/126] Pass flag through from LMS to grading controller --- .../open_ended_grading/peer_grading_service.py | 10 ++++++---- .../src/peer_grading/peer_grading_problem.coffee | 8 ++++++-- lms/templates/peer_grading/peer_grading_problem.html | 1 + 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index caa349125d..76f54bb12c 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -88,7 +88,7 @@ class PeerGradingService(GradingService): {'location': problem_location, 'grader_id': grader_id}) return json.dumps(self._render_rubric(response)) - def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores): + def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged): data = {'grader_id' : grader_id, 'submission_id' : submission_id, 'score' : score, @@ -96,7 +96,8 @@ class PeerGradingService(GradingService): 'submission_key': submission_key, 'location': location, 'rubric_scores': rubric_scores, - 'rubric_scores_complete': True} + 'rubric_scores_complete': True, + 'submission_flagged' : submission_flagged} return self.post(self.save_grade_url, data) def is_student_calibrated(self, problem_location, grader_id): @@ -224,7 +225,7 @@ def save_grade(request, course_id): error: if there was an error in the submission, this is the error message """ _check_post(request) - required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]']) + required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]', 'submission_flagged']) success, message = _check_required(request, required) if not success: return _err_response(message) @@ -236,9 +237,10 @@ def save_grade(request, course_id): feedback = p['feedback'] submission_key = p['submission_key'] rubric_scores = p.getlist('rubric_scores[]') + submission_flagged = p['submission_flagged'] try: response = peer_grading_service().save_grade(location, grader_id, submission_id, - score, feedback, submission_key, rubric_scores) + score, feedback, submission_key, rubric_scores, submission_flagged) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2}, diff --git a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee index c4b87eb30e..ab16b34d12 100644 --- a/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee +++ b/lms/static/coffee/src/peer_grading/peer_grading_problem.coffee @@ -175,6 +175,7 @@ class PeerGradingProblem @submission_container = $('.submission-container') @prompt_container = $('.prompt-container') @rubric_container = $('.rubric-container') + @flag_student_container = $('.flag-student-container') @calibration_panel = $('.calibration-panel') @grading_panel = $('.grading-panel') @content_panel = $('.content-panel') @@ -201,6 +202,7 @@ class PeerGradingProblem @action_button = $('.action-button') @calibration_feedback_button = $('.calibration-feedback-button') @interstitial_page_button = $('.interstitial-page-button') + @flag_student_checkbox = $('.flag-checkbox') Collapsible.setCollapsibles(@content_panel) @@ -252,7 +254,8 @@ class PeerGradingProblem location: @location submission_id: @essay_id_input.val() submission_key: @submission_key_input.val() - feedback: @feedback_area.val() + feedback: @feedback_area.val() + submission_flagged: @flag_student_checkbox.is(':checked') return data @@ -352,7 +355,7 @@ class PeerGradingProblem @grading_panel.find('.calibration-text').show() @calibration_panel.find('.grading-text').hide() @grading_panel.find('.grading-text').hide() - + @flag_student_container.hide() @submit_button.unbind('click') @submit_button.click @submit_calibration_essay @@ -379,6 +382,7 @@ class PeerGradingProblem @grading_panel.find('.calibration-text').hide() @calibration_panel.find('.grading-text').show() @grading_panel.find('.grading-text').show() + @flag_student_container.show() @submit_button.unbind('click') @submit_button.click @submit_grade diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index cb9ed1c0fb..04ee7415ec 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -72,6 +72,7 @@

+

Flag this submission for review by course staff (use if the submission contains inappropriate content):

From 78696c62b8ae3268d5cd73eb6a6f6db2d987d7c5 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 20:36:09 -0500 Subject: [PATCH 008/126] Work on open ended flagged problems --- .../controller_query_service.py | 20 +++++++ lms/djangoapps/open_ended_grading/views.py | 55 +++++++++++++++++++ .../open_ended_flagged_problems.html | 52 ++++++++++++++++++ 3 files changed, 127 insertions(+) create mode 100644 lms/templates/open_ended_problems/open_ended_flagged_problems.html diff --git a/lms/djangoapps/open_ended_grading/controller_query_service.py b/lms/djangoapps/open_ended_grading/controller_query_service.py index 7d515e2475..7c75d44287 100644 --- a/lms/djangoapps/open_ended_grading/controller_query_service.py +++ b/lms/djangoapps/open_ended_grading/controller_query_service.py @@ -21,6 +21,8 @@ class ControllerQueryService(GradingService): self.is_unique_url = self.url + '/is_name_unique/' self.combined_notifications_url = self.url + '/combined_notifications/' self.grading_status_list_url = self.url + '/get_grading_status_list/' + self.flagged_problem_list_url = self.url + '/get_flagged_problem_list/' + self.take_action_on_flags_url = self.url + '/take_action_on_flags/' def check_if_name_is_unique(self, location, problem_id, course_id): params = { @@ -57,3 +59,21 @@ class ControllerQueryService(GradingService): response = self.get(self.grading_status_list_url, params) return response + + def get_flagged_problem_list(self, course_id): + params = { + 'course_id' : course_id, + } + + response = self.get(self.flagged_problem_list_url, params) + return response + + def take_action_on_flags(self, course_id, student_id, submission_id): + params = { + 'course_id' : course_id, + 'student_id' : student_id, + 'submission_id' : submission_id, + } + + response = self.post(self.take_action_on_flags_url, params) + return response diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 2ebd8778e6..fd43921761 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -193,6 +193,61 @@ def student_problem_list(request, course_id): # Checked above 'staff_access': False, }) +@cache_control(no_cache=True, no_store=True, must_revalidate=True) +def flagged_problem_list(request, course_id): + ''' + Show a student problem list + ''' + course = get_course_with_access(request.user, course_id, 'staff') + student_id = unique_id_for_user(request.user) + + # call problem list service + success = False + error_text = "" + problem_list = [] + base_course_url = reverse('courses') + + try: + problem_list_json = controller_qs.get_grading_status_list(course_id, unique_id_for_user(request.user)) + problem_list_dict = json.loads(problem_list_json) + success = problem_list_dict['success'] + if 'error' in problem_list_dict: + error_text = problem_list_dict['error'] + + problem_list = problem_list_dict['problem_list'] + + for i in xrange(0,len(problem_list)): + problem_url_parts = search.path_to_location(modulestore(), course.id, problem_list[i]['location']) + problem_url = base_course_url + "/" + for z in xrange(0,len(problem_url_parts)): + part = problem_url_parts[z] + if part is not None: + if z==1: + problem_url += "courseware/" + problem_url += part + "/" + + problem_list[i].update({'actual_url' : problem_url}) + + except GradingServiceError: + error_text = "Error occured while contacting the grading service" + success = False + # catch error if if the json loads fails + except ValueError: + error_text = "Could not get problem list" + success = False + + ajax_url = _reverse_with_slash('open_ended_problems', course_id) + + return render_to_response('open_ended_problems/open_ended_problems.html', { + 'course': course, + 'course_id': course_id, + 'ajax_url': ajax_url, + 'success': success, + 'problem_list': problem_list, + 'error_text': error_text, + # Checked above + 'staff_access': False, }) + @cache_control(no_cache=True, no_store=True, must_revalidate=True) def combined_notifications(request, course_id): course = get_course_with_access(request.user, course_id, 'load') diff --git a/lms/templates/open_ended_problems/open_ended_flagged_problems.html b/lms/templates/open_ended_problems/open_ended_flagged_problems.html new file mode 100644 index 0000000000..447a34ff45 --- /dev/null +++ b/lms/templates/open_ended_problems/open_ended_flagged_problems.html @@ -0,0 +1,52 @@ +<%inherit file="/main.html" /> +<%block name="bodyclass">${course.css_class} +<%namespace name='static' file='/static_content.html'/> + +<%block name="headextra"> +<%static:css group='course'/> + + +<%block name="title">${course.number} Flagged Open Ended Problems + +<%include file="/courseware/course_navigation.html" args="active_page='flagged_open_ended_problems'" /> + +
+
+
${error_text}
+

Flagged Open Ended Problems

+

Instructions

+

Here are a list of open ended problems for this course that have been flagged by students as potentially inappropriate.

+ % if success: + % if len(flagged_list) == 0: +
+ No flagged problems exist. +
+ %else: + + + + + + + + %for problem in problem_list: + + + + + + + %endfor +
Problem NameStudent IDStudent ResponseSubmission ID
+ ${problem['problem_name']} + + ${problem['student_id']} + + ${problem['student_response']} + + ${problem['submission_id']} +
+ %endif + %endif +
+
From dd72297f73ed9af5e22a8703709a25cd53bb74e6 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 20:39:53 -0500 Subject: [PATCH 009/126] Wire up flagged problem list --- lms/djangoapps/open_ended_grading/views.py | 21 ++++--------------- .../open_ended_flagged_problems.html | 2 +- lms/urls.py | 4 ++++ 3 files changed, 9 insertions(+), 18 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index fd43921761..6a4131dc6f 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -208,25 +208,13 @@ def flagged_problem_list(request, course_id): base_course_url = reverse('courses') try: - problem_list_json = controller_qs.get_grading_status_list(course_id, unique_id_for_user(request.user)) + problem_list_json = controller_qs.get_flagged_problem_list(course_id) problem_list_dict = json.loads(problem_list_json) success = problem_list_dict['success'] if 'error' in problem_list_dict: error_text = problem_list_dict['error'] - problem_list = problem_list_dict['problem_list'] - - for i in xrange(0,len(problem_list)): - problem_url_parts = search.path_to_location(modulestore(), course.id, problem_list[i]['location']) - problem_url = base_course_url + "/" - for z in xrange(0,len(problem_url_parts)): - part = problem_url_parts[z] - if part is not None: - if z==1: - problem_url += "courseware/" - problem_url += part + "/" - - problem_list[i].update({'actual_url' : problem_url}) + problem_list = problem_list_dict['flagged_submissions'] except GradingServiceError: error_text = "Error occured while contacting the grading service" @@ -238,7 +226,7 @@ def flagged_problem_list(request, course_id): ajax_url = _reverse_with_slash('open_ended_problems', course_id) - return render_to_response('open_ended_problems/open_ended_problems.html', { + return render_to_response('open_ended_problems/open_ended_flagged_problems.html', { 'course': course, 'course_id': course_id, 'ajax_url': ajax_url, @@ -246,14 +234,13 @@ def flagged_problem_list(request, course_id): 'problem_list': problem_list, 'error_text': error_text, # Checked above - 'staff_access': False, }) + 'staff_access': True, }) @cache_control(no_cache=True, no_store=True, must_revalidate=True) def combined_notifications(request, course_id): course = get_course_with_access(request.user, course_id, 'load') user = request.user notifications = open_ended_notifications.combined_notifications(course, user) - log.debug(notifications) response = notifications['response'] notification_tuples=open_ended_notifications.NOTIFICATION_TYPES diff --git a/lms/templates/open_ended_problems/open_ended_flagged_problems.html b/lms/templates/open_ended_problems/open_ended_flagged_problems.html index 447a34ff45..054c028071 100644 --- a/lms/templates/open_ended_problems/open_ended_flagged_problems.html +++ b/lms/templates/open_ended_problems/open_ended_flagged_problems.html @@ -8,7 +8,7 @@ <%block name="title">${course.number} Flagged Open Ended Problems -<%include file="/courseware/course_navigation.html" args="active_page='flagged_open_ended_problems'" /> +<%include file="/courseware/course_navigation.html" args="active_page='open_ended_flagged_problems'" />
diff --git a/lms/urls.py b/lms/urls.py index 4b3cc94cab..f122635821 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -285,6 +285,10 @@ if settings.COURSEWARE_ENABLED: # Open Ended problem list url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_problems$', 'open_ended_grading.views.student_problem_list', name='open_ended_problems'), + + # Open Ended flagged problem list + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems$', + 'open_ended_grading.views.flagged_problem_list', name='open_ended_flagged_problems'), # Cohorts management url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/cohorts$', From b6f77f6f8e8f1ba5d2cab867d50b7baf3e7e9334 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 20:43:57 -0500 Subject: [PATCH 010/126] Fix rendering for flagged problems view --- .../open_ended_problems/open_ended_flagged_problems.html | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lms/templates/open_ended_problems/open_ended_flagged_problems.html b/lms/templates/open_ended_problems/open_ended_flagged_problems.html index 054c028071..bf70906637 100644 --- a/lms/templates/open_ended_problems/open_ended_flagged_problems.html +++ b/lms/templates/open_ended_problems/open_ended_flagged_problems.html @@ -28,6 +28,7 @@ Student ID Student Response Submission ID + Location %for problem in problem_list: @@ -43,6 +44,9 @@ ${problem['submission_id']} + + ${problem['location']} + %endfor From e89196e0dd89bed02de4b153c74e7426088de2c0 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 20:47:00 -0500 Subject: [PATCH 011/126] Add in flagged problem list return --- lms/djangoapps/open_ended_grading/views.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 6a4131dc6f..f1a9d41974 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -158,8 +158,9 @@ def student_problem_list(request, course_id): success = problem_list_dict['success'] if 'error' in problem_list_dict: error_text = problem_list_dict['error'] - - problem_list = problem_list_dict['problem_list'] + problem_list = [] + else: + problem_list = problem_list_dict['problem_list'] for i in xrange(0,len(problem_list)): problem_url_parts = search.path_to_location(modulestore(), course.id, problem_list[i]['location']) @@ -213,8 +214,9 @@ def flagged_problem_list(request, course_id): success = problem_list_dict['success'] if 'error' in problem_list_dict: error_text = problem_list_dict['error'] - - problem_list = problem_list_dict['flagged_submissions'] + problem_list=[] + else: + problem_list = problem_list_dict['flagged_submissions'] except GradingServiceError: error_text = "Error occured while contacting the grading service" From f89e36b5b9bd7fc21fa26f636635f39804c0019e Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 20:54:02 -0500 Subject: [PATCH 012/126] Flagged list renders properly --- .../open_ended_flagged_problems.html | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/lms/templates/open_ended_problems/open_ended_flagged_problems.html b/lms/templates/open_ended_problems/open_ended_flagged_problems.html index bf70906637..eb7b34d8e6 100644 --- a/lms/templates/open_ended_problems/open_ended_flagged_problems.html +++ b/lms/templates/open_ended_problems/open_ended_flagged_problems.html @@ -17,18 +17,17 @@

Instructions

Here are a list of open ended problems for this course that have been flagged by students as potentially inappropriate.

% if success: - % if len(flagged_list) == 0: + % if len(problem_list) == 0:
No flagged problems exist.
%else: - + - - - + + %for problem in problem_list: @@ -38,14 +37,11 @@ - %endfor From b248a0957254071c4b0db85cd1aaa0300fe74e8e Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 21:06:54 -0500 Subject: [PATCH 013/126] Work on adding in ban actions --- .../controller_query_service.py | 4 ++- .../peer_grading_service.py | 2 +- lms/djangoapps/open_ended_grading/views.py | 27 +++++++++++++++++++ .../open_ended_flagged_problems.html | 16 +++++++---- 4 files changed, 42 insertions(+), 7 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/controller_query_service.py b/lms/djangoapps/open_ended_grading/controller_query_service.py index 7c75d44287..d40c9b4428 100644 --- a/lms/djangoapps/open_ended_grading/controller_query_service.py +++ b/lms/djangoapps/open_ended_grading/controller_query_service.py @@ -68,12 +68,14 @@ class ControllerQueryService(GradingService): response = self.get(self.flagged_problem_list_url, params) return response - def take_action_on_flags(self, course_id, student_id, submission_id): + def take_action_on_flags(self, course_id, student_id, submission_id, action_type): params = { 'course_id' : course_id, 'student_id' : student_id, 'submission_id' : submission_id, + 'action_type' : action_type } response = self.post(self.take_action_on_flags_url, params) return response + diff --git a/lms/djangoapps/open_ended_grading/peer_grading_service.py b/lms/djangoapps/open_ended_grading/peer_grading_service.py index 76f54bb12c..994ba0b2be 100644 --- a/lms/djangoapps/open_ended_grading/peer_grading_service.py +++ b/lms/djangoapps/open_ended_grading/peer_grading_service.py @@ -377,4 +377,4 @@ def save_calibration_essay(request, course_id): return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) - return _err_response('Could not connect to grading service') + return _err_response('Could not connect to grading service') \ No newline at end of file diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index f1a9d41974..717a33ec61 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -287,5 +287,32 @@ def combined_notifications(request, course_id): return render_to_response('open_ended_problems/combined_notifications.html', combined_dict ) + +def take_action_on_flags(request, course_id): + """ + + """ + if request.method != 'POST': + raise Http404 + + + required = ['submission_id', 'action_type', 'student_id'] + for key in required: + if key not in request.POST: + return HttpResponse(json.dumps({'success': False, 'error': 'Missing key {0}'.format(key)}), + mimetype="application/json") + + p = request.POST + submission_id = p['submission_id'] + action_type = p['action_type'] + student_id = p['student_id'] + + try: + controller_qs = ControllerQueryService() + response = controller_qs.save_calibration_essay(course_id, student_id, course_id, action_type) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) + return _err_response('Could not connect to grading service') diff --git a/lms/templates/open_ended_problems/open_ended_flagged_problems.html b/lms/templates/open_ended_problems/open_ended_flagged_problems.html index eb7b34d8e6..97f1e03c18 100644 --- a/lms/templates/open_ended_problems/open_ended_flagged_problems.html +++ b/lms/templates/open_ended_problems/open_ended_flagged_problems.html @@ -25,9 +25,9 @@
Problem NameName Student IDStudent ResponseSubmission IDLocationIDResponse
${problem['student_id']} - ${problem['student_response']} - ${problem['submission_id']} - ${problem['location']} + ${problem['student_response']}
- - + + %for problem in problem_list: @@ -35,13 +35,19 @@ ${problem['problem_name']} + + - %endfor From 17fce100bb0a41e43a2c8aed6127c9f3e7cb0ed3 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 21:10:48 -0500 Subject: [PATCH 014/126] Add in ban actions to table --- lms/djangoapps/open_ended_grading/views.py | 2 +- .../open_ended_problems/open_ended_flagged_problems.html | 8 ++++---- lms/urls.py | 2 ++ 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 717a33ec61..aaee9f4c98 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -309,7 +309,7 @@ def take_action_on_flags(request, course_id): try: controller_qs = ControllerQueryService() - response = controller_qs.save_calibration_essay(course_id, student_id, course_id, action_type) + response = controller_qs.take_action_on_flags(course_id, student_id, course_id, action_type) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) diff --git a/lms/templates/open_ended_problems/open_ended_flagged_problems.html b/lms/templates/open_ended_problems/open_ended_flagged_problems.html index 97f1e03c18..2397e1a70e 100644 --- a/lms/templates/open_ended_problems/open_ended_flagged_problems.html +++ b/lms/templates/open_ended_problems/open_ended_flagged_problems.html @@ -26,8 +26,8 @@ - - + + %for problem in problem_list: @@ -38,10 +38,10 @@ ${problem['student_response']} + From e41172d55df9f1a0cb142b6a59625eef59dfa519 Mon Sep 17 00:00:00 2001 From: Victor Shnayder Date: Sun, 20 Jan 2013 11:50:51 -0500 Subject: [PATCH 026/126] Add start of test framework for capa --- .../xmodule/xmodule/tests/test_capa_module.py | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 common/lib/xmodule/xmodule/tests/test_capa_module.py diff --git a/common/lib/xmodule/xmodule/tests/test_capa_module.py b/common/lib/xmodule/xmodule/tests/test_capa_module.py new file mode 100644 index 0000000000..148fd893ff --- /dev/null +++ b/common/lib/xmodule/xmodule/tests/test_capa_module.py @@ -0,0 +1,60 @@ +import json +from mock import Mock +import unittest + +from xmodule.capa_module import CapaModule +from xmodule.modulestore import Location +from lxml import etree + +from . import test_system + +class CapaFactory(object): + """ + A helper class to create problem modules with various parameters for testing. + """ + + sample_problem_xml = """ + + +

What is pi, to two decimal placs?

+
+ + + +
+""" + + num = 0 + @staticmethod + def next_num(): + CapaFactory.num += 1 + return CapaFactory.num + + @staticmethod + def create(): + definition = {'data': CapaFactory.sample_problem_xml,} + location = Location(["i4x", "edX", "capa_test", "problem", + "SampleProblem{0}".format(CapaFactory.next_num())]) + metadata = {} + descriptor = Mock(weight="1") + instance_state = None + + module = CapaModule(test_system, location, + definition, descriptor, + instance_state, None, metadata=metadata) + + return module + + + +class CapaModuleTest(unittest.TestCase): + + def test_import(self): + module = CapaFactory.create() + self.assertEqual(module.get_score()['score'], 0) + + other_module = CapaFactory.create() + self.assertEqual(module.get_score()['score'], 0) + self.assertNotEqual(module.url_name, other_module.url_name, + "Factory should be creating unique names for each problem") + From 025b074b87b5fc60c712292d541449d0d470152b Mon Sep 17 00:00:00 2001 From: Victor Shnayder Date: Sun, 20 Jan 2013 12:17:22 -0500 Subject: [PATCH 027/126] Add simple test for showanswer, fix test_system --- common/lib/xmodule/xmodule/tests/__init__.py | 2 +- .../xmodule/xmodule/tests/test_capa_module.py | 60 ++++++++++++++++++- 2 files changed, 59 insertions(+), 3 deletions(-) diff --git a/common/lib/xmodule/xmodule/tests/__init__.py b/common/lib/xmodule/xmodule/tests/__init__.py index a07f1ddfaf..1f323834a9 100644 --- a/common/lib/xmodule/xmodule/tests/__init__.py +++ b/common/lib/xmodule/xmodule/tests/__init__.py @@ -26,7 +26,7 @@ test_system = ModuleSystem( # "render" to just the context... render_template=lambda template, context: str(context), replace_urls=Mock(), - user=Mock(), + user=Mock(is_staff=False), filestore=Mock(), debug=True, xqueue={'interface':None, 'callback_url':'/', 'default_queuename': 'testqueue', 'waittime': 10}, diff --git a/common/lib/xmodule/xmodule/tests/test_capa_module.py b/common/lib/xmodule/xmodule/tests/test_capa_module.py index 148fd893ff..7537cb537c 100644 --- a/common/lib/xmodule/xmodule/tests/test_capa_module.py +++ b/common/lib/xmodule/xmodule/tests/test_capa_module.py @@ -1,7 +1,9 @@ import json from mock import Mock +from pprint import pprint import unittest + from xmodule.capa_module import CapaModule from xmodule.modulestore import Location from lxml import etree @@ -31,13 +33,59 @@ class CapaFactory(object): return CapaFactory.num @staticmethod - def create(): + def create(graceperiod=None, + due=None, + max_attempts=None, + showanswer=None, + rerandomize=None, + force_save_button=None, + attempts=None, + problem_state=None, + ): + """ + All parameters are optional, and are added to the created problem if specified. + + Arguments: + graceperiod: + due: + max_attempts: + showanswer: + force_save_button: + rerandomize: all strings, as specified in the policy for the problem + + problem_state: a dict to to be serialized into the instance_state of the + module. + + attempts: also added to instance state. Should be a number. + """ definition = {'data': CapaFactory.sample_problem_xml,} location = Location(["i4x", "edX", "capa_test", "problem", "SampleProblem{0}".format(CapaFactory.next_num())]) metadata = {} + if graceperiod is not None: + metadata['graceperiod'] = graceperiod + if due is not None: + metadata['due'] = due + if max_attempts is not None: + metadata['attempts'] = max_attempts + if showanswer is not None: + metadata['showanswer'] = showanswer + if force_save_button is not None: + metadata['force_save_button'] = force_save_button + if rerandomize is not None: + metadata['rerandomize'] = rerandomize + + descriptor = Mock(weight="1") - instance_state = None + instance_state_dict = {} + if problem_state is not None: + instance_state_dict = problem_state + if attempts is not None: + instance_state_dict['attempts'] = attempts + if len(instance_state_dict) > 0: + instance_state = json.dumps(instance_state_dict) + else: + instance_state = None module = CapaModule(test_system, location, definition, descriptor, @@ -58,3 +106,11 @@ class CapaModuleTest(unittest.TestCase): self.assertNotEqual(module.url_name, other_module.url_name, "Factory should be creating unique names for each problem") + def test_showanswer(self): + """ + Make sure the show answer logic does the right thing. + """ + # default, no due date, showanswer 'closed' + problem = CapaFactory.create() + pprint(problem.__dict__) + self.assertFalse(problem.answer_available()) From ea091a6eb83b09fbc5bafbe4f0f5011b69c8db7b Mon Sep 17 00:00:00 2001 From: Victor Shnayder Date: Sun, 20 Jan 2013 12:49:05 -0500 Subject: [PATCH 028/126] Add tests for showanswer --- .../xmodule/xmodule/tests/test_capa_module.py | 68 +++++++++++++++++-- 1 file changed, 62 insertions(+), 6 deletions(-) diff --git a/common/lib/xmodule/xmodule/tests/test_capa_module.py b/common/lib/xmodule/xmodule/tests/test_capa_module.py index 7537cb537c..506c7faf9f 100644 --- a/common/lib/xmodule/xmodule/tests/test_capa_module.py +++ b/common/lib/xmodule/xmodule/tests/test_capa_module.py @@ -1,9 +1,9 @@ +import datetime import json from mock import Mock from pprint import pprint import unittest - from xmodule.capa_module import CapaModule from xmodule.modulestore import Location from lxml import etree @@ -56,7 +56,7 @@ class CapaFactory(object): problem_state: a dict to to be serialized into the instance_state of the module. - attempts: also added to instance state. Should be a number. + attempts: also added to instance state. Will be converted to an int. """ definition = {'data': CapaFactory.sample_problem_xml,} location = Location(["i4x", "edX", "capa_test", "problem", @@ -81,7 +81,9 @@ class CapaFactory(object): if problem_state is not None: instance_state_dict = problem_state if attempts is not None: - instance_state_dict['attempts'] = attempts + # converting to int here because I keep putting "0" and "1" in the tests + # since everything else is a string. + instance_state_dict['attempts'] = int(attempts) if len(instance_state_dict) > 0: instance_state = json.dumps(instance_state_dict) else: @@ -97,6 +99,17 @@ class CapaFactory(object): class CapaModuleTest(unittest.TestCase): + + def setUp(self): + now = datetime.datetime.now() + day_delta = datetime.timedelta(days=1) + self.yesterday_str = str(now - day_delta) + self.today_str = str(now) + self.tomorrow_str = str(now + day_delta) + + # in the capa grace period format, not in time delta format + self.two_day_delta_str = "2 days" + def test_import(self): module = CapaFactory.create() self.assertEqual(module.get_score()['score'], 0) @@ -106,11 +119,54 @@ class CapaModuleTest(unittest.TestCase): self.assertNotEqual(module.url_name, other_module.url_name, "Factory should be creating unique names for each problem") - def test_showanswer(self): + def test_showanswer_default(self): """ Make sure the show answer logic does the right thing. """ - # default, no due date, showanswer 'closed' + # default, no due date, showanswer 'closed', so problem is open, and show_answer + # not visible. problem = CapaFactory.create() - pprint(problem.__dict__) self.assertFalse(problem.answer_available()) + + + def test_showanswer_attempted(self): + problem = CapaFactory.create(showanswer='attempted') + self.assertFalse(problem.answer_available()) + problem.attempts = 1 + self.assertTrue(problem.answer_available()) + + + def test_showanswer_closed(self): + + # can see after attempts used up + used_all_attempts = CapaFactory.create(showanswer='closed', + max_attempts="1", + attempts="1") + self.assertTrue(used_all_attempts.answer_available()) + + + # can see after due date + after_due_date = CapaFactory.create(showanswer='closed', + max_attempts="1", + attempts="0", + due=self.yesterday_str) + self.assertTrue(after_due_date.answer_available()) + + # can't see because attempts left + attempts_left_open = CapaFactory.create(showanswer='closed', + max_attempts="1", + attempts="0", + due=self.tomorrow_str) + self.assertFalse(attempts_left_open.answer_available()) + + # Can't see because grace period hasn't expired + still_in_grace = CapaFactory.create(showanswer='closed', + max_attempts="1", + attempts="0", + due=self.yesterday_str, + graceperiod=self.two_day_delta_str) + self.assertFalse(still_in_grace.answer_available()) + + + + From 6088a926cc0697094c1bd6ae095581895fcc4563 Mon Sep 17 00:00:00 2001 From: Victor Shnayder Date: Sun, 20 Jan 2013 17:35:03 -0500 Subject: [PATCH 029/126] Add showanswer="past_due" and tests --- common/lib/xmodule/xmodule/capa_module.py | 35 ++++++++------ .../xmodule/xmodule/tests/test_capa_module.py | 47 ++++++++++++++++++- 2 files changed, 65 insertions(+), 17 deletions(-) diff --git a/common/lib/xmodule/xmodule/capa_module.py b/common/lib/xmodule/xmodule/capa_module.py index f33da6e3a4..6d258e61ed 100644 --- a/common/lib/xmodule/xmodule/capa_module.py +++ b/common/lib/xmodule/xmodule/capa_module.py @@ -389,38 +389,43 @@ class CapaModule(XModule): }) return json.dumps(d, cls=ComplexEncoder) + def is_past_due(self): + """ + Is it now past this problem's due date, including grace period? + """ + return (self.close_date is not None and + datetime.datetime.utcnow() > self.close_date) + def closed(self): ''' Is the student still allowed to submit answers? ''' if self.attempts == self.max_attempts: return True - if self.close_date is not None and datetime.datetime.utcnow() > self.close_date: + if self.is_past_due(): return True return False def answer_available(self): - ''' Is the user allowed to see an answer? + ''' + Is the user allowed to see an answer? ''' if self.show_answer == '': return False - - if self.show_answer == "never": + elif self.show_answer == "never": return False - - # Admins can see the answer, unless the problem explicitly prevents it - if self.system.user_is_staff: + elif self.system.user_is_staff: + # This i after the 'never' check because admins can see the answer + # unless the problem explicitly prevents it return True - - if self.show_answer == 'attempted': + elif self.show_answer == 'attempted': return self.attempts > 0 - - if self.show_answer == 'answered': + elif self.show_answer == 'answered': return self.lcp.done - - if self.show_answer == 'closed': + elif self.show_answer == 'closed': return self.closed() - - if self.show_answer == 'always': + elif self.show_answer == 'past_due': + return self.is_past_due() + elif self.show_answer == 'always': return True return False diff --git a/common/lib/xmodule/xmodule/tests/test_capa_module.py b/common/lib/xmodule/xmodule/tests/test_capa_module.py index 506c7faf9f..e8f639e3c9 100644 --- a/common/lib/xmodule/xmodule/tests/test_capa_module.py +++ b/common/lib/xmodule/xmodule/tests/test_capa_module.py @@ -138,10 +138,11 @@ class CapaModuleTest(unittest.TestCase): def test_showanswer_closed(self): - # can see after attempts used up + # can see after attempts used up, even with due date in the future used_all_attempts = CapaFactory.create(showanswer='closed', max_attempts="1", - attempts="1") + attempts="1", + due=self.tomorrow_str) self.assertTrue(used_all_attempts.answer_available()) @@ -152,6 +153,7 @@ class CapaModuleTest(unittest.TestCase): due=self.yesterday_str) self.assertTrue(after_due_date.answer_available()) + # can't see because attempts left attempts_left_open = CapaFactory.create(showanswer='closed', max_attempts="1", @@ -169,4 +171,45 @@ class CapaModuleTest(unittest.TestCase): + def test_showanswer_past_due(self): + """ + With showanswer="past_due" should only show answer after the problem is closed + for everyone--e.g. after due date + grace period. + """ + + # can see after attempts used up, even with due date in the future + used_all_attempts = CapaFactory.create(showanswer='past_due', + max_attempts="1", + attempts="1", + due=self.tomorrow_str) + self.assertFalse(used_all_attempts.answer_available()) + + + # can see after due date + past_due_date = CapaFactory.create(showanswer='past_due', + max_attempts="1", + attempts="0", + due=self.yesterday_str) + self.assertTrue(past_due_date.answer_available()) + + + # can't see because attempts left + attempts_left_open = CapaFactory.create(showanswer='past_due', + max_attempts="1", + attempts="0", + due=self.tomorrow_str) + self.assertFalse(attempts_left_open.answer_available()) + + # Can't see because grace period hasn't expired, even though have no more + # attempts. + still_in_grace = CapaFactory.create(showanswer='past_due', + max_attempts="1", + attempts="1", + due=self.yesterday_str, + graceperiod=self.two_day_delta_str) + self.assertFalse(still_in_grace.answer_available()) + + + + From f3f509da3b7a63b9d5a14939c02f9a9780104337 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 12:45:48 -0500 Subject: [PATCH 030/126] Fix input area styling --- .../xmodule/xmodule/css/combinedopenended/display.scss | 5 +++-- .../xmodule/js/src/combinedopenended/display.coffee | 2 +- lms/static/coffee/src/open_ended/open_ended.coffee | 9 +++++---- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/common/lib/xmodule/xmodule/css/combinedopenended/display.scss b/common/lib/xmodule/xmodule/css/combinedopenended/display.scss index 41896e6173..38fd6ba01c 100644 --- a/common/lib/xmodule/xmodule/css/combinedopenended/display.scss +++ b/common/lib/xmodule/xmodule/css/combinedopenended/display.scss @@ -442,12 +442,13 @@ section.open-ended-child { margin: 10px; } - span.short-form-response { - padding: 9px; + div.short-form-response { background: #F6F6F6; border: 1px solid #ddd; border-top: 0; margin-bottom: 20px; + overflow-y: auto; + height: 200px; @include clearfix; } diff --git a/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee b/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee index 2aabd35771..89954deb23 100644 --- a/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee +++ b/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee @@ -351,5 +351,5 @@ class @CombinedOpenEnded answer_id = @answer_area.attr('id') answer_val = @answer_area.val() new_text = '' - new_text = "#{answer_val}" + new_text = "
#{answer_val}
" @answer_area.replaceWith(new_text) diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index 558d712c46..aff1e5fc67 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -44,13 +44,14 @@ class OpenEnded .error => callback({success: false, error: "Error occured while performing this operation"}) after_action_wrapper: (target, action_type) -> + tr_parent = target.parent().parent() + tr_children = tr_parent.children() + action_taken = tr_children[4].firstElementChild + action_taken.innerText = "#{action_type} done for student." return @handle_after_action handle_after_action: (data) -> - tr_parent = target.parent().parent() - tr_children = tr_parent.children() - action_taken = tr_children[4].children()[0] - action_taken.replaceWith('
#{action_type} done for student.
') + blah = "blah" gentle_alert: (msg) => if $('.message-container').length From 52f3e9daafa96ee5a589e79e09aaa5611d58c229 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 18:05:57 -0500 Subject: [PATCH 031/126] Start moving peer grading to xmodule --- .../js/src/peergrading/peer_grading.coffee | 27 + .../peergrading/peer_grading_problem.coffee | 478 ++++++++++++++++++ .../xmodule/xmodule/peer_grading_module.py | 439 ++++++++++++++++ .../xmodule/xmodule/peer_grading_service.py | 256 ++++++++++ 4 files changed, 1200 insertions(+) create mode 100644 common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee create mode 100644 common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee create mode 100644 common/lib/xmodule/xmodule/peer_grading_module.py create mode 100644 common/lib/xmodule/xmodule/peer_grading_service.py diff --git a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee new file mode 100644 index 0000000000..ed79ba9c71 --- /dev/null +++ b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee @@ -0,0 +1,27 @@ +# This is a simple class that just hides the error container +# and message container when they are empty +# Can (and should be) expanded upon when our problem list +# becomes more sophisticated +class PeerGrading + constructor: () -> + @error_container = $('.error-container') + @error_container.toggle(not @error_container.is(':empty')) + + @message_container = $('.message-container') + @message_container.toggle(not @message_container.is(':empty')) + + @problem_list = $('.problem-list') + @construct_progress_bar() + + construct_progress_bar: () => + problems = @problem_list.find('tr').next() + problems.each( (index, element) => + problem = $(element) + progress_bar = problem.find('.progress-bar') + bar_value = parseInt(problem.data('graded')) + bar_max = parseInt(problem.data('required')) + bar_value + progress_bar.progressbar({value: bar_value, max: bar_max}) + ) + + +$(document).ready(() -> new PeerGrading()) diff --git a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee new file mode 100644 index 0000000000..ab16b34d12 --- /dev/null +++ b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee @@ -0,0 +1,478 @@ +################################## +# +# This is the JS that renders the peer grading problem page. +# Fetches the correct problem and/or calibration essay +# and sends back the grades +# +# Should not be run when we don't have a location to send back +# to the server +# +# PeerGradingProblemBackend - +# makes all the ajax requests and provides a mock interface +# for testing purposes +# +# PeerGradingProblem - +# handles the rendering and user interactions with the interface +# +################################## +class PeerGradingProblemBackend + constructor: (ajax_url, mock_backend) -> + @mock_backend = mock_backend + @ajax_url = ajax_url + @mock_cnt = 0 + + post: (cmd, data, callback) -> + if @mock_backend + callback(@mock(cmd, data)) + else + # if this post request fails, the error callback will catch it + $.post(@ajax_url + cmd, data, callback) + .error => callback({success: false, error: "Error occured while performing this operation"}) + + mock: (cmd, data) -> + if cmd == 'is_student_calibrated' + # change to test each version + response = + success: true + calibrated: @mock_cnt >= 2 + else if cmd == 'show_calibration_essay' + #response = + # success: false + # error: "There was an error" + @mock_cnt++ + response = + success: true + submission_id: 1 + submission_key: 'abcd' + student_response: ''' + Contrary to popular belief, Lorem Ipsum is not simply random text. It has roots in a piece of classical Latin literature from 45 BC, making it over 2000 years old. Richard McClintock, a Latin professor at Hampden-Sydney College in Virginia, looked up one of the more obscure Latin words, consectetur, from a Lorem Ipsum passage, and going through the cites of the word in classical literature, discovered the undoubtable source. Lorem Ipsum comes from sections 1.10.32 and 1.10.33 of "de Finibus Bonorum et Malorum" (The Extremes of Good and Evil) by Cicero, written in 45 BC. This book is a treatise on the theory of ethics, very popular during the Renaissance. The first line of Lorem Ipsum, "Lorem ipsum dolor sit amet..", comes from a line in section 1.10.32. + +The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for those interested. Sections 1.10.32 and 1.10.33 from "de Finibus Bonorum et Malorum" by Cicero are also reproduced in their exact original form, accompanied by English versions from the 1914 translation by H. Rackham. + ''' + prompt: ''' +

S11E3: Metal Bands

+

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

+

* Why is it that both sodium and magnesium behave as metals, even though the s-band of magnesium is filled?

+

This is a self-assessed open response question. Please use as much space as you need in the box below to answer the question.

+ ''' + rubric: ''' +
NameStudent IDID ResponseUnflagBan
- ${problem['student_id']} + ${problem['student_response']} + + + + ${problem['submission_id']} - ${problem['student_response']} + + ${problem['student_id']}
Name ResponseUnflagBan
- + Unflag - + Ban ${problem['submission_id']} diff --git a/lms/urls.py b/lms/urls.py index f122635821..41e8e9fff1 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -287,6 +287,8 @@ if settings.COURSEWARE_ENABLED: 'open_ended_grading.views.student_problem_list', name='open_ended_problems'), # Open Ended flagged problem list + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems$', + 'open_ended_grading.views.flagged_problem_list', name='open_ended_flagged_problems'), url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems$', 'open_ended_grading.views.flagged_problem_list', name='open_ended_flagged_problems'), From 4c164795691c126ef3d08d5be0e7cba04cfabd0d Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 21:22:59 -0500 Subject: [PATCH 015/126] Working on frontend JS for posting --- lms/djangoapps/open_ended_grading/views.py | 2 +- .../coffee/src/open_ended/open_ended.coffee | 33 +++++++++++++++++++ .../open_ended_flagged_problems.html | 8 +++-- 3 files changed, 40 insertions(+), 3 deletions(-) create mode 100644 lms/static/coffee/src/open_ended/open_ended.coffee diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index aaee9f4c98..137cff7803 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -309,7 +309,7 @@ def take_action_on_flags(request, course_id): try: controller_qs = ControllerQueryService() - response = controller_qs.take_action_on_flags(course_id, student_id, course_id, action_type) + response = controller_qs.take_action_on_flags(course_id, student_id, submission_id, action_type) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee new file mode 100644 index 0000000000..f45efeb8a7 --- /dev/null +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -0,0 +1,33 @@ +# This is a simple class that just hides the error container +# and message container when they are empty +# Can (and should be) expanded upon when our problem list +# becomes more sophisticated +class OpenEnded + constructor: (ajax_url) -> + @ajax_url = ajax_url + @error_container = $('.error-container') + @error_container.toggle(not @error_container.is(':empty')) + + @message_container = $('.message-container') + @message_container.toggle(not @message_container.is(':empty')) + + @problem_list = $('.problem-list') + + @ban_button = $('.ban-button') + @unflag_button = $('.unflag-button') + @ban_button.click @ban + @unflag_button.click @unflag + + unflag: (event) => + event.preventDefault() + + ban: (event) => + event.preventDefault() + + post: (cmd, data, callback) -> + # if this post request fails, the error callback will catch it + $.post(@ajax_url + cmd, data, callback) + .error => callback({success: false, error: "Error occured while performing this operation"}) + +ajax_url = $('.open-ended-problems').data('ajax_url') +$(document).ready(() -> new OpenEnded(ajax_url)) diff --git a/lms/templates/open_ended_problems/open_ended_flagged_problems.html b/lms/templates/open_ended_problems/open_ended_flagged_problems.html index 2397e1a70e..9265ad4663 100644 --- a/lms/templates/open_ended_problems/open_ended_flagged_problems.html +++ b/lms/templates/open_ended_problems/open_ended_flagged_problems.html @@ -10,6 +10,10 @@ <%include file="/courseware/course_navigation.html" args="active_page='open_ended_flagged_problems'" /> +<%block name="js_extra"> + <%static:js group='open_ended'/> + +
${error_text}
@@ -38,10 +42,10 @@ ${problem['student_response']}
- Unflag + Unflag - Ban + Ban ${problem['submission_id']} From 10c7155d4d061d51eee8572a4a5b6fd5f1da03eb Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 21:28:42 -0500 Subject: [PATCH 016/126] Add open ended to JS pipeline --- lms/envs/common.py | 7 ++++++- lms/static/coffee/src/open_ended/open_ended.coffee | 9 +++++++++ lms/urls.py | 4 ++-- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/lms/envs/common.py b/lms/envs/common.py index 16472795e0..426c29c7d0 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -438,6 +438,7 @@ main_vendor_js = [ discussion_js = sorted(rooted_glob(PROJECT_ROOT / 'static', 'coffee/src/discussion/**/*.coffee')) staff_grading_js = sorted(rooted_glob(PROJECT_ROOT / 'static', 'coffee/src/staff_grading/**/*.coffee')) peer_grading_js = sorted(rooted_glob(PROJECT_ROOT / 'static','coffee/src/peer_grading/**/*.coffee')) +open_ended_js = sorted(rooted_glob(PROJECT_ROOT / 'static','coffee/src/open_ended/**/*.coffee')) PIPELINE_CSS = { 'application': { @@ -468,7 +469,7 @@ PIPELINE_JS = { 'source_filenames': sorted( set(rooted_glob(COMMON_ROOT / 'static', 'coffee/src/**/*.coffee') + rooted_glob(PROJECT_ROOT / 'static', 'coffee/src/**/*.coffee')) - - set(courseware_js + discussion_js + staff_grading_js + peer_grading_js) + set(courseware_js + discussion_js + staff_grading_js + peer_grading_js + open_ended_js) ) + [ 'js/form.ext.js', 'js/my_courses_dropdown.js', @@ -501,6 +502,10 @@ PIPELINE_JS = { 'peer_grading' : { 'source_filenames': peer_grading_js, 'output_filename': 'js/peer_grading.js' + }, + 'open_ended' : { + 'source_filenames': open_ended_js, + 'output_filename': 'js/open_ended.js' } } diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index f45efeb8a7..5c0f455ce7 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -20,14 +20,23 @@ class OpenEnded unflag: (event) => event.preventDefault() + @gentle_alert "Unflag" ban: (event) => event.preventDefault() + @gentle_alert "Ban" post: (cmd, data, callback) -> # if this post request fails, the error callback will catch it $.post(@ajax_url + cmd, data, callback) .error => callback({success: false, error: "Error occured while performing this operation"}) + gentle_alert: (msg) => + if $('.message-container').length + $('.message-container').remove() + alert_elem = "
" + msg + "
" + $('.error-container').after(alert_elem) + $('.message-container').css(opacity: 0).animate(opacity: 1, 700) + ajax_url = $('.open-ended-problems').data('ajax_url') $(document).ready(() -> new OpenEnded(ajax_url)) diff --git a/lms/urls.py b/lms/urls.py index 41e8e9fff1..260f55dd05 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -289,8 +289,8 @@ if settings.COURSEWARE_ENABLED: # Open Ended flagged problem list url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems$', 'open_ended_grading.views.flagged_problem_list', name='open_ended_flagged_problems'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems$', - 'open_ended_grading.views.flagged_problem_list', name='open_ended_flagged_problems'), + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems/take_action_on_flag$', + 'open_ended_grading.views.take_action_on_flags', name='open_ended_flagged_problems_take_action'), # Cohorts management url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/cohorts$', From 8e9ec501a777a374491db1c48b5cd5a8c461df24 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 21:44:21 -0500 Subject: [PATCH 017/126] Implement flagging, fix urls --- lms/djangoapps/open_ended_grading/views.py | 3 +-- lms/static/coffee/src/open_ended/open_ended.coffee | 11 ++++++++++- lms/urls.py | 2 +- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 137cff7803..a0ef8239f3 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -226,7 +226,7 @@ def flagged_problem_list(request, course_id): error_text = "Could not get problem list" success = False - ajax_url = _reverse_with_slash('open_ended_problems', course_id) + ajax_url = _reverse_with_slash('open_ended_flagged_problems', course_id) return render_to_response('open_ended_problems/open_ended_flagged_problems.html', { 'course': course, @@ -308,7 +308,6 @@ def take_action_on_flags(request, course_id): student_id = p['student_id'] try: - controller_qs = ControllerQueryService() response = controller_qs.take_action_on_flags(course_id, student_id, submission_id, action_type) return HttpResponse(response, mimetype="application/json") except GradingServiceError: diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index 5c0f455ce7..07b84c8af5 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -24,13 +24,22 @@ class OpenEnded ban: (event) => event.preventDefault() - @gentle_alert "Ban" + parent_tr = $(event.target).parent().parent() + tr_children = parent_tr.children() + action_type = "ban" + submission_id = tr_children[4].innerText + student_id = tr_children[5].innerText + @gentle_alert student_id + @post('take_action_on_flags', {'submission_id' : submission_id, 'student_id' : student_id, 'action_type' : action_type}, @handle_after_action) post: (cmd, data, callback) -> # if this post request fails, the error callback will catch it $.post(@ajax_url + cmd, data, callback) .error => callback({success: false, error: "Error occured while performing this operation"}) + handle_after_action: (data) -> + @gentle_alert data + gentle_alert: (msg) => if $('.message-container').length $('.message-container').remove() diff --git a/lms/urls.py b/lms/urls.py index 260f55dd05..e4494e0166 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -289,7 +289,7 @@ if settings.COURSEWARE_ENABLED: # Open Ended flagged problem list url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems$', 'open_ended_grading.views.flagged_problem_list', name='open_ended_flagged_problems'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems/take_action_on_flag$', + url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_flagged_problems/take_action_on_flags$', 'open_ended_grading.views.take_action_on_flags', name='open_ended_flagged_problems_take_action'), # Cohorts management From b4c80da22498f36aa6da563e96f0fef5e34dbd2d Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 21:50:24 -0500 Subject: [PATCH 018/126] Trim whitespace --- lms/djangoapps/open_ended_grading/views.py | 6 +++++- lms/static/coffee/src/open_ended/open_ended.coffee | 1 - 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index a0ef8239f3..984c544d93 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -25,6 +25,8 @@ import open_ended_notifications from xmodule.modulestore.django import modulestore from xmodule.modulestore import search +from django.http import HttpResponse, Http404 + log = logging.getLogger(__name__) template_imports = {'urllib': urllib} @@ -306,7 +308,9 @@ def take_action_on_flags(request, course_id): submission_id = p['submission_id'] action_type = p['action_type'] student_id = p['student_id'] - + student_id = student_id.strip(' \t\n\r') + submission_id = submission_id.strip(' \t\n\r') + action_type = action_type.lower().strip(' \t\n\r') try: response = controller_qs.take_action_on_flags(course_id, student_id, submission_id, action_type) return HttpResponse(response, mimetype="application/json") diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index 07b84c8af5..e54198e2aa 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -29,7 +29,6 @@ class OpenEnded action_type = "ban" submission_id = tr_children[4].innerText student_id = tr_children[5].innerText - @gentle_alert student_id @post('take_action_on_flags', {'submission_id' : submission_id, 'student_id' : student_id, 'action_type' : action_type}, @handle_after_action) post: (cmd, data, callback) -> From 2affd4760bef18906c858451dcaa8b80af688485 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 21:53:33 -0500 Subject: [PATCH 019/126] Add in wiring for unflag action --- lms/static/coffee/src/open_ended/open_ended.coffee | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index e54198e2aa..227cf2fd76 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -20,7 +20,12 @@ class OpenEnded unflag: (event) => event.preventDefault() - @gentle_alert "Unflag" + parent_tr = $(event.target).parent().parent() + tr_children = parent_tr.children() + action_type = "unflag" + submission_id = tr_children[4].innerText + student_id = tr_children[5].innerText + @post('take_action_on_flags', {'submission_id' : submission_id, 'student_id' : student_id, 'action_type' : action_type}, @handle_after_action) ban: (event) => event.preventDefault() From b0e46085586524caf67b7c76b8907ee23403388d Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Wed, 30 Jan 2013 21:56:52 -0500 Subject: [PATCH 020/126] Fix callback alert --- lms/static/coffee/src/open_ended/open_ended.coffee | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index 227cf2fd76..45de3a4fcc 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -42,7 +42,7 @@ class OpenEnded .error => callback({success: false, error: "Error occured while performing this operation"}) handle_after_action: (data) -> - @gentle_alert data + @gentle_alert data.data gentle_alert: (msg) => if $('.message-container').length From ada9ff7f27925b21da41e26873b39b8f04a61ed0 Mon Sep 17 00:00:00 2001 From: Victor Shnayder Date: Thu, 31 Jan 2013 10:45:11 -0500 Subject: [PATCH 021/126] Fix randomization bug in capa. (Note: capa_problem was still doing randomization internally, but now it does what's actually intended) --- common/lib/xmodule/xmodule/capa_module.py | 33 ++++++++++++++++------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/common/lib/xmodule/xmodule/capa_module.py b/common/lib/xmodule/xmodule/capa_module.py index f33da6e3a4..27bf0c4cb1 100644 --- a/common/lib/xmodule/xmodule/capa_module.py +++ b/common/lib/xmodule/xmodule/capa_module.py @@ -2,6 +2,7 @@ import cgi import datetime import dateutil import dateutil.parser +import hashlib import json import logging import traceback @@ -25,6 +26,22 @@ log = logging.getLogger("mitx.courseware") #----------------------------------------------------------------------------- TIMEDELTA_REGEX = re.compile(r'^((?P\d+?) day(?:s?))?(\s)?((?P\d+?) hour(?:s?))?(\s)?((?P\d+?) minute(?:s)?)?(\s)?((?P\d+?) second(?:s)?)?$') +# Generated this many different variants of problems with rerandomize=per_student +NUM_RANDOMIZATION_BINS = 20 + +def randomization_bin(seed, problem_id): + """ + Pick a randomization bin for the problem given the user's seed and a problem id. + + We do this because we only want e.g. 20 randomizations of a problem to make analytics + interesting. To avoid having sets of students that always get the same problems, + we'll combine the system's per-student seed with the problem id in picking the bin. + """ + h = hashlib.sha1() + h.update(str(seed)) + h.update(str(problem_id)) + # get the first few digits of the hash, convert to an int, then mod. + return int(h.hexdigest()[:7], 16) % NUM_RANDOMIZATION_BINS def only_one(lst, default="", process=lambda x: x): """ @@ -138,13 +155,9 @@ class CapaModule(XModule): if self.rerandomize == 'never': self.seed = 1 - elif self.rerandomize == "per_student" and hasattr(self.system, 'id'): - # TODO: This line is badly broken: - # (1) We're passing student ID to xmodule. - # (2) There aren't bins of students. -- we only want 10 or 20 randomizations, and want to assign students - # to these bins, and may not want cohorts. So e.g. hash(your-id, problem_id) % num_bins. - # - analytics really needs small number of bins. - self.seed = system.id + elif self.rerandomize == "per_student" and hasattr(self.system, 'seed'): + # see comment on randomization_bin + self.seed = randomization_bin(system.seed, self.location.url) else: self.seed = None @@ -669,18 +682,18 @@ class CapaDescriptor(RawDescriptor): # TODO (vshnayder): do problems have any other metadata? Do they # actually use type and points? metadata_attributes = RawDescriptor.metadata_attributes + ('type', 'points') - + def get_context(self): _context = RawDescriptor.get_context(self) _context.update({'markdown': self.metadata.get('markdown', '')}) return _context - + @property def editable_metadata_fields(self): """Remove metadata from the editable fields since it has its own editor""" subset = super(CapaDescriptor,self).editable_metadata_fields if 'markdown' in subset: - subset.remove('markdown') + subset.remove('markdown') return subset From 78f9f63466e6ffce90df07385c80378168af74c7 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 11:19:15 -0500 Subject: [PATCH 022/126] Add in notification type for flagged submissions --- .../open_ended_grading/open_ended_notifications.py | 3 ++- lms/djangoapps/open_ended_grading/views.py | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/lms/djangoapps/open_ended_grading/open_ended_notifications.py b/lms/djangoapps/open_ended_grading/open_ended_notifications.py index 43259f3e1b..fec893894f 100644 --- a/lms/djangoapps/open_ended_grading/open_ended_notifications.py +++ b/lms/djangoapps/open_ended_grading/open_ended_notifications.py @@ -19,7 +19,8 @@ KEY_PREFIX = "open_ended_" NOTIFICATION_TYPES = ( ('student_needs_to_peer_grade', 'peer_grading', 'Peer Grading'), ('staff_needs_to_grade', 'staff_grading', 'Staff Grading'), - ('new_student_grading_to_view', 'open_ended_problems', 'Problems you have submitted') + ('new_student_grading_to_view', 'open_ended_problems', 'Problems you have submitted'), + ('flagged_submissions_exist', 'open_ended_flagged_problems', 'Flagged Submissions') ) def staff_grading_notifications(course, user): diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 984c544d93..1777f26e2e 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -56,12 +56,14 @@ def _reverse_without_slash(url_name, course_id): DESCRIPTION_DICT = { 'Peer Grading': "View all problems that require peer assessment in this particular course.", 'Staff Grading': "View ungraded submissions submitted by students for the open ended problems in the course.", - 'Problems you have submitted': "View open ended problems that you have previously submitted for grading." + 'Problems you have submitted': "View open ended problems that you have previously submitted for grading.", + 'Flagged Submissions' : "View submissions that have been flagged by students as inappropriate." } ALERT_DICT = { 'Peer Grading': "New submissions to grade", 'Staff Grading': "New submissions to grade", - 'Problems you have submitted': "New grades have been returned" + 'Problems you have submitted': "New grades have been returned", + 'Flagged Submissions' : "Submissions have been flagged for review" } @cache_control(no_cache=True, no_store=True, must_revalidate=True) def staff_grading(request, course_id): From e431378f46a812afc5a562cedd87babe7beb5bec Mon Sep 17 00:00:00 2001 From: Victor Shnayder Date: Thu, 31 Jan 2013 12:14:01 -0500 Subject: [PATCH 023/126] Add first pass at a randomize module --- common/lib/xmodule/setup.py | 1 + .../lib/xmodule/xmodule/randomize_module.py | 122 ++++++++++++++++++ 2 files changed, 123 insertions(+) create mode 100644 common/lib/xmodule/xmodule/randomize_module.py diff --git a/common/lib/xmodule/setup.py b/common/lib/xmodule/setup.py index 29227c3188..446078ffcf 100644 --- a/common/lib/xmodule/setup.py +++ b/common/lib/xmodule/setup.py @@ -28,6 +28,7 @@ setup( "error = xmodule.error_module:ErrorDescriptor", "problem = xmodule.capa_module:CapaDescriptor", "problemset = xmodule.seq_module:SequenceDescriptor", + "randomize = xmodule.randomize_module:RandomizeDescriptor", "section = xmodule.backcompat_module:SemanticSectionDescriptor", "sequential = xmodule.seq_module:SequenceDescriptor", "slides = xmodule.backcompat_module:TranslateCustomTagDescriptor", diff --git a/common/lib/xmodule/xmodule/randomize_module.py b/common/lib/xmodule/xmodule/randomize_module.py new file mode 100644 index 0000000000..0bc26c21bf --- /dev/null +++ b/common/lib/xmodule/xmodule/randomize_module.py @@ -0,0 +1,122 @@ +import json +import logging +import random + +from xmodule.mako_module import MakoModuleDescriptor +from xmodule.x_module import XModule +from xmodule.xml_module import XmlDescriptor +from xmodule.modulestore import Location +from xmodule.seq_module import SequenceDescriptor + +from pkg_resources import resource_string + +log = logging.getLogger('mitx.' + __name__) + +class RandomizeModule(XModule): + """ + Chooses a random child module. Chooses the same one every time for each student. + + Example: + + + + + + + User notes: + + - If you're randomizing amongst graded modules, each of them MUST be worth the same + number of points. Otherwise, the earth will be overrun by monsters from the + deeps. You have been warned. + + Technical notes: + - There is more dark magic in this code than I'd like. The whole varying-children + + grading interaction is a tangle between super and subclasses of descriptors and + modules. +""" + + def __init__(self, system, location, definition, descriptor, + instance_state=None, shared_state=None, **kwargs): + XModule.__init__(self, system, location, definition, descriptor, + instance_state, shared_state, **kwargs) + + # NOTE: calling self.get_children() creates a circular reference-- + # it calls get_child_descriptors() internally, but that doesn't work until + # we've picked a choice + num_choices = len(self.descriptor.get_children()) + + self.choice = None + if instance_state is not None: + state = json.loads(instance_state) + self.choice = state.get('choice', None) + if self.choice > num_choices: + # Oops. Children changed. Reset. + self.choice = None + + if self.choice is None: + # choose one based on the system seed, or randomly if that's not available + if num_choices > 0: + if system.seed is not None: + self.choice = system.seed % num_choices + else: + self.choice = random.randrange(0, num_choices) + + log.debug("********* self.choice = %s", self.choice) + if self.choice is not None: + self.child_descriptor = self.descriptor.get_children()[self.choice] + # Now get_children() should return a list with one element + log.debug("children of randomize module (should be only 1): %s", + self.get_children()) + self.child = self.get_children()[0] + else: + self.child_descriptor = None + self.child = None + + + def get_instance_state(self): + return json.dumps({'choice': self.choice}) + + + def get_child_descriptors(self): + """ + For grading--return just the chosen child. + """ + if self.child_descriptor is None: + return [] + + return [self.child_descriptor] + + + def get_html(self): + if self.child is None: + # raise error instead? In fact, could complain on descriptor load... + return "
Nothing to randomize between
" + + return self.child.get_html() + + def get_icon_class(self): + return self.child.get_icon_class() if self.child else 'other' + + +class RandomizeDescriptor(SequenceDescriptor): + # the editing interface can be the same as for sequences -- just a container + module_class = RandomizeModule + + filename_extension = "xml" + + stores_state = True + + def definition_to_xml(self, resource_fs): + xml_object = etree.Element('randomize') + for child in self.get_children(): + xml_object.append( + etree.fromstring(child.export_to_xml(resource_fs))) + return xml_object + + def has_dynamic_children(self): + """ + Grading needs to know that only one of the children is actually "real". This + makes it use module.get_child_descriptors(). + """ + return True + From e0fb906c0692da41253ac1fe9411c6703f981f44 Mon Sep 17 00:00:00 2001 From: Victor Shnayder Date: Thu, 31 Jan 2013 12:14:20 -0500 Subject: [PATCH 024/126] add note about potential bug in verticals. No time to investigate at the moment... --- common/lib/xmodule/xmodule/vertical_module.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/common/lib/xmodule/xmodule/vertical_module.py b/common/lib/xmodule/xmodule/vertical_module.py index 397bd3e136..14105b41d0 100644 --- a/common/lib/xmodule/xmodule/vertical_module.py +++ b/common/lib/xmodule/xmodule/vertical_module.py @@ -48,3 +48,5 @@ class VerticalDescriptor(SequenceDescriptor): js = {'coffee': [resource_string(__name__, 'js/src/vertical/edit.coffee')]} js_module_name = "VerticalDescriptor" + # TODO (victor): Does this need its own definition_to_xml method? Otherwise it looks + # like verticals will get exported as sequentials... From 63d4ac8c442615c45435b2186dfbaa590891037f Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 12:17:21 -0500 Subject: [PATCH 025/126] Working on some flagging coffeescript --- .../coffee/src/open_ended/open_ended.coffee | 22 +++++++++++++------ .../open_ended_flagged_problems.html | 3 +++ 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index 45de3a4fcc..558d712c46 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -23,26 +23,34 @@ class OpenEnded parent_tr = $(event.target).parent().parent() tr_children = parent_tr.children() action_type = "unflag" - submission_id = tr_children[4].innerText - student_id = tr_children[5].innerText - @post('take_action_on_flags', {'submission_id' : submission_id, 'student_id' : student_id, 'action_type' : action_type}, @handle_after_action) + submission_id = tr_children[5].innerText + student_id = tr_children[6].innerText + callback_func = @after_action_wrapper($(event.target), action_type) + @post('take_action_on_flags', {'submission_id' : submission_id, 'student_id' : student_id, 'action_type' : action_type}, callback_func) ban: (event) => event.preventDefault() parent_tr = $(event.target).parent().parent() tr_children = parent_tr.children() action_type = "ban" - submission_id = tr_children[4].innerText - student_id = tr_children[5].innerText - @post('take_action_on_flags', {'submission_id' : submission_id, 'student_id' : student_id, 'action_type' : action_type}, @handle_after_action) + submission_id = tr_children[5].innerText + student_id = tr_children[6].innerText + callback_func = @after_action_wrapper($(event.target), action_type) + @post('take_action_on_flags', {'submission_id' : submission_id, 'student_id' : student_id, 'action_type' : action_type}, callback_func) post: (cmd, data, callback) -> # if this post request fails, the error callback will catch it $.post(@ajax_url + cmd, data, callback) .error => callback({success: false, error: "Error occured while performing this operation"}) + after_action_wrapper: (target, action_type) -> + return @handle_after_action + handle_after_action: (data) -> - @gentle_alert data.data + tr_parent = target.parent().parent() + tr_children = tr_parent.children() + action_taken = tr_children[4].children()[0] + action_taken.replaceWith('
#{action_type} done for student.
') gentle_alert: (msg) => if $('.message-container').length diff --git a/lms/templates/open_ended_problems/open_ended_flagged_problems.html b/lms/templates/open_ended_problems/open_ended_flagged_problems.html index 9265ad4663..ec892da43c 100644 --- a/lms/templates/open_ended_problems/open_ended_flagged_problems.html +++ b/lms/templates/open_ended_problems/open_ended_flagged_problems.html @@ -47,6 +47,9 @@
Ban +
+
${problem['submission_id']}
+ + + + + + + + + + + + + + + + + +
Purpose + + + + + + + +
Organization + + + + + + + +
+ ''' + max_score: 4 + else if cmd == 'get_next_submission' + response = + success: true + submission_id: 1 + submission_key: 'abcd' + student_response: '''Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed nec tristique ante. Proin at mauris sapien, quis varius leo. Morbi laoreet leo nisi. Morbi aliquam lacus ante. Cras iaculis velit sed diam mattis a fermentum urna luctus. Duis consectetur nunc vitae felis facilisis eget vulputate risus viverra. Cras consectetur ullamcorper lobortis. Nam eu gravida lorem. Nulla facilisi. Nullam quis felis enim. Mauris orci lectus, dictum id cursus in, vulputate in massa. + +Phasellus non varius sem. Nullam commodo lacinia odio sit amet egestas. Donec ullamcorper sapien sagittis arcu volutpat placerat. Phasellus ut pretium ante. Nam dictum pulvinar nibh dapibus tristique. Sed at tellus mi, fringilla convallis justo. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus tristique rutrum nulla sed eleifend. Praesent at nunc arcu. Mauris condimentum faucibus nibh, eget commodo quam viverra sed. Morbi in tincidunt dolor. Morbi sed augue et augue interdum fermentum. + +Curabitur tristique purus ac arcu consequat cursus. Cras diam felis, dignissim quis placerat at, aliquet ac metus. Mauris vulputate est eu nibh imperdiet varius. Cras aliquet rhoncus elit a laoreet. Mauris consectetur erat et erat scelerisque eu faucibus dolor consequat. Nam adipiscing sagittis nisl, eu mollis massa tempor ac. Nulla scelerisque tempus blandit. Phasellus ac ipsum eros, id posuere arcu. Nullam non sapien arcu. Vivamus sit amet lorem justo, ac tempus turpis. Suspendisse pharetra gravida imperdiet. Pellentesque lacinia mi eu elit luctus pellentesque. Sed accumsan libero a magna elementum varius. Nunc eget pellentesque metus. ''' + prompt: ''' +

S11E3: Metal Bands

+

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

+

* Why is it that both sodium and magnesium behave as metals, even though the s-band of magnesium is filled?

+

This is a self-assessed open response question. Please use as much space as you need in the box below to answer the question.

+ ''' + rubric: ''' + + + + + + + + + + + + + + + + + + +
Purpose + + + + + + + +
Organization + + + + + + + +
+ ''' + max_score: 4 + else if cmd == 'save_calibration_essay' + response = + success: true + actual_score: 2 + else if cmd == 'save_grade' + response = + success: true + + return response + + +class PeerGradingProblem + constructor: (backend) -> + @prompt_wrapper = $('.prompt-wrapper') + @backend = backend + + + # get the location of the problem + @location = $('.peer-grading').data('location') + # prevent this code from trying to run + # when we don't have a location + if(!@location) + return + + # get the other elements we want to fill in + @submission_container = $('.submission-container') + @prompt_container = $('.prompt-container') + @rubric_container = $('.rubric-container') + @flag_student_container = $('.flag-student-container') + @calibration_panel = $('.calibration-panel') + @grading_panel = $('.grading-panel') + @content_panel = $('.content-panel') + @grading_message = $('.grading-message') + @grading_message.hide() + + @grading_wrapper =$('.grading-wrapper') + @calibration_feedback_panel = $('.calibration-feedback') + @interstitial_page = $('.interstitial-page') + @interstitial_page.hide() + + @error_container = $('.error-container') + + @submission_key_input = $("input[name='submission-key']") + @essay_id_input = $("input[name='essay-id']") + @feedback_area = $('.feedback-area') + + @score_selection_container = $('.score-selection-container') + @rubric_selection_container = $('.rubric-selection-container') + @grade = null + @calibration = null + + @submit_button = $('.submit-button') + @action_button = $('.action-button') + @calibration_feedback_button = $('.calibration-feedback-button') + @interstitial_page_button = $('.interstitial-page-button') + @flag_student_checkbox = $('.flag-checkbox') + + Collapsible.setCollapsibles(@content_panel) + + # Set up the click event handlers + @action_button.click -> history.back() + @calibration_feedback_button.click => + @calibration_feedback_panel.hide() + @grading_wrapper.show() + @is_calibrated_check() + + @interstitial_page_button.click => + @interstitial_page.hide() + @is_calibrated_check() + + @is_calibrated_check() + + + ########## + # + # Ajax calls to the backend + # + ########## + is_calibrated_check: () => + @backend.post('is_student_calibrated', {location: @location}, @calibration_check_callback) + + fetch_calibration_essay: () => + @backend.post('show_calibration_essay', {location: @location}, @render_calibration) + + fetch_submission_essay: () => + @backend.post('get_next_submission', {location: @location}, @render_submission) + + # finds the scores for each rubric category + get_score_list: () => + # find the number of categories: + num_categories = $('table.rubric tr').length + + score_lst = [] + # get the score for each one + for i in [0..(num_categories-1)] + score = $("input[name='score-selection-#{i}']:checked").val() + score_lst.push(score) + + return score_lst + + construct_data: () -> + data = + rubric_scores: @get_score_list() + score: @grade + location: @location + submission_id: @essay_id_input.val() + submission_key: @submission_key_input.val() + feedback: @feedback_area.val() + submission_flagged: @flag_student_checkbox.is(':checked') + return data + + + submit_calibration_essay: ()=> + data = @construct_data() + @backend.post('save_calibration_essay', data, @calibration_callback) + + submit_grade: () => + data = @construct_data() + @backend.post('save_grade', data, @submission_callback) + + + ########## + # + # Callbacks for various events + # + ########## + + # called after we perform an is_student_calibrated check + calibration_check_callback: (response) => + if response.success + # if we haven't been calibrating before + if response.calibrated and (@calibration == null or @calibration == false) + @calibration = false + @fetch_submission_essay() + # If we were calibrating before and no longer need to, + # show the interstitial page + else if response.calibrated and @calibration == true + @calibration = false + @render_interstitial_page() + else + @calibration = true + @fetch_calibration_essay() + else if response.error + @render_error(response.error) + else + @render_error("Error contacting the grading service") + + + # called after we submit a calibration score + calibration_callback: (response) => + if response.success + @render_calibration_feedback(response) + else if response.error + @render_error(response.error) + else + @render_error("Error saving calibration score") + + # called after we submit a submission score + submission_callback: (response) => + if response.success + @is_calibrated_check() + @grading_message.fadeIn() + @grading_message.html("

Grade sent successfully.

") + else + if response.error + @render_error(response.error) + else + @render_error("Error occurred while submitting grade") + + # called after a grade is selected on the interface + graded_callback: (event) => + @grade = $("input[name='grade-selection']:checked").val() + if @grade == undefined + return + # check to see whether or not any categories have not been scored + num_categories = $('table.rubric tr').length + for i in [0..(num_categories-1)] + score = $("input[name='score-selection-#{i}']:checked").val() + if score == undefined + return + # show button if we have scores for all categories + @show_submit_button() + + + + ########## + # + # Rendering methods and helpers + # + ########## + # renders a calibration essay + render_calibration: (response) => + if response.success + + # load in all the data + @submission_container.html("

Training Essay

") + @render_submission_data(response) + # TODO: indicate that we're in calibration mode + @calibration_panel.addClass('current-state') + @grading_panel.removeClass('current-state') + + # Display the right text + # both versions of the text are written into the template itself + # we only need to show/hide the correct ones at the correct time + @calibration_panel.find('.calibration-text').show() + @grading_panel.find('.calibration-text').show() + @calibration_panel.find('.grading-text').hide() + @grading_panel.find('.grading-text').hide() + @flag_student_container.hide() + + @submit_button.unbind('click') + @submit_button.click @submit_calibration_essay + + else if response.error + @render_error(response.error) + else + @render_error("An error occurred while retrieving the next calibration essay") + + # Renders a student submission to be graded + render_submission: (response) => + if response.success + @submit_button.hide() + @submission_container.html("

Submitted Essay

") + @render_submission_data(response) + + @calibration_panel.removeClass('current-state') + @grading_panel.addClass('current-state') + + # Display the correct text + # both versions of the text are written into the template itself + # we only need to show/hide the correct ones at the correct time + @calibration_panel.find('.calibration-text').hide() + @grading_panel.find('.calibration-text').hide() + @calibration_panel.find('.grading-text').show() + @grading_panel.find('.grading-text').show() + @flag_student_container.show() + + @submit_button.unbind('click') + @submit_button.click @submit_grade + else if response.error + @render_error(response.error) + else + @render_error("An error occured when retrieving the next submission.") + + + make_paragraphs: (text) -> + paragraph_split = text.split(/\n\s*\n/) + new_text = '' + for paragraph in paragraph_split + new_text += "

#{paragraph}

" + return new_text + + # render common information between calibration and grading + render_submission_data: (response) => + @content_panel.show() + + @submission_container.append(@make_paragraphs(response.student_response)) + @prompt_container.html(response.prompt) + @rubric_selection_container.html(response.rubric) + @submission_key_input.val(response.submission_key) + @essay_id_input.val(response.submission_id) + @setup_score_selection(response.max_score) + + @submit_button.hide() + @action_button.hide() + @calibration_feedback_panel.hide() + + + render_calibration_feedback: (response) => + # display correct grade + @calibration_feedback_panel.slideDown() + calibration_wrapper = $('.calibration-feedback-wrapper') + calibration_wrapper.html("

The score you gave was: #{@grade}. The actual score is: #{response.actual_score}

") + + + score = parseInt(@grade) + actual_score = parseInt(response.actual_score) + + if score == actual_score + calibration_wrapper.append("

Congratulations! Your score matches the actual score!

") + else + calibration_wrapper.append("

Please try to understand the grading critera better to be more accurate next time.

") + + # disable score selection and submission from the grading interface + $("input[name='score-selection']").attr('disabled', true) + @submit_button.hide() + + render_interstitial_page: () => + @content_panel.hide() + @interstitial_page.show() + + render_error: (error_message) => + @error_container.show() + @calibration_feedback_panel.hide() + @error_container.html(error_message) + @content_panel.hide() + @action_button.show() + + show_submit_button: () => + @submit_button.show() + + setup_score_selection: (max_score) => + + # first, get rid of all the old inputs, if any. + @score_selection_container.html(""" +

Overall Score

+

Choose an overall score for this submission.

+ """) + + # Now create new labels and inputs for each possible score. + for score in [0..max_score] + id = 'score-' + score + label = """""" + + input = """ + + """ # " fix broken parsing in emacs + @score_selection_container.append(input + label) + + # And now hook up an event handler again + $("input[name='score-selection']").change @graded_callback + $("input[name='grade-selection']").change @graded_callback + + + +mock_backend = false +ajax_url = $('.peer-grading').data('ajax_url') +backend = new PeerGradingProblemBackend(ajax_url, mock_backend) +$(document).ready(() -> new PeerGradingProblem(backend)) diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py new file mode 100644 index 0000000000..8002a8d923 --- /dev/null +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -0,0 +1,439 @@ +""" +This module provides an interface on the grading-service backend +for peer grading + +Use peer_grading_service() to get the version specified +in settings.PEER_GRADING_INTERFACE + +""" +import json +import logging +import requests +from requests.exceptions import RequestException, ConnectionError, HTTPError +import sys + +from django.conf import settings +from django.http import HttpResponse, Http404 +from grading_service import GradingService +from grading_service import GradingServiceError + +from courseware.access import has_access +from util.json_request import expect_json +from xmodule.course_module import CourseDescriptor +from xmodule.combined_open_ended_rubric import CombinedOpenEndedRubric +from student.models import unique_id_for_user +from lxml import etree + +import copy +from fs.errors import ResourceNotFoundError +import itertools +import json +import logging +from lxml import etree +from lxml.html import rewrite_links +from path import path +import os +import sys + +from pkg_resources import resource_string +from .capa_module import only_one, ComplexEncoder + +from peer_grading_service import peer_grading_service + +log = logging.getLogger(__name__) + +class PeerGradingModule(XModule): + _VERSION = 1 + + js = {'coffee': [resource_string(__name__, 'js/src/combinedopenended/display.coffee'), + resource_string(__name__, 'js/src/collapsible.coffee'), + resource_string(__name__, 'js/src/javascript_loader.coffee'), + ]} + js_module_name = "PeerGrading" + + css = {'scss': [resource_string(__name__, 'css/combinedopenended/display.scss')]} + + def __init__(self, system, location, definition, descriptor, + instance_state=None, shared_state=None, **kwargs): + XModule.__init__(self, system, location, definition, descriptor, + instance_state, shared_state, **kwargs) + + # Load instance state + if instance_state is not None: + instance_state = json.loads(instance_state) + else: + instance_state = {} + + #We need to set the location here so the child modules can use it + system.set('location', location) + self.peer_gs = peer_grading_service() + log.debug(self.system) + + def _err_response(self, msg): + """ + Return a HttpResponse with a json dump with success=False, and the given error message. + """ + return HttpResponse(json.dumps({'success': False, 'error': msg}), + mimetype="application/json") + + def _check_required(self, get, required): + actual = set(get.keys()) + missing = required - actual + if len(missing) > 0: + return False, "Missing required keys: {0}".format(', '.join(missing)) + else: + return True, "" + + def get_html(self): + """ + Needs to be implemented by inheritors. Renders the HTML that students see. + @return: + """ + pass + + def handle_ajax(self, dispatch, get): + """ + Needs to be implemented by child modules. Handles AJAX events. + @return: + """ + + handlers = { + 'get_next_submission': self.get_next_submission, + 'show_calibration_essay': self.show_calibration_essay, + 'save_post_assessment': self.message_post, + 'is_student_calibrated': self.is_student_calibrated, + 'save_grade': self.save_grade, + 'save_calibration_essay' : self.save_calibration_essay, + } + + if dispatch not in handlers: + return 'Error' + + before = self.get_progress() + d = handlers[dispatch](get) + after = self.get_progress() + d.update({ + 'progress_changed': after != before, + 'progress_status': Progress.to_js_status_str(after), + }) + return json.dumps(d, cls=ComplexEncoder) + + def get_next_submission(self, get): + """ + Makes a call to the grading controller for the next essay that should be graded + Returns a json dict with the following keys: + + 'success': bool + + 'submission_id': a unique identifier for the submission, to be passed back + with the grade. + + 'submission': the submission, rendered as read-only html for grading + + 'rubric': the rubric, also rendered as html. + + 'submission_key': a key associated with the submission for validation reasons + + 'error': if success is False, will have an error message with more info. + """ + _check_post(request) + required = set(['location']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + grader_id = unique_id_for_user(request.user) + p = request.POST + location = p['location'] + + try: + response = peer_grading_service().get_next_submission(location, grader_id) + return HttpResponse(response, + mimetype="application/json") + except GradingServiceError: + log.exception("Error getting next submission. server url: {0} location: {1}, grader_id: {2}" + .format(peer_grading_service().url, location, grader_id)) + return json.dumps({'success': False, + 'error': 'Could not connect to grading service'}) + + def save_grade(self, get): + """ + Saves the grade of a given submission. + Input: + The request should have the following keys: + location - problem location + submission_id - id associated with this submission + submission_key - submission key given for validation purposes + score - the grade that was given to the submission + feedback - the feedback from the student + Returns + A json object with the following keys: + success: bool indicating whether the save was a success + error: if there was an error in the submission, this is the error message + """ + _check_post(request) + required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]', 'submission_flagged']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + grader_id = unique_id_for_user(request.user) + p = request.POST + location = p['location'] + submission_id = p['submission_id'] + score = p['score'] + feedback = p['feedback'] + submission_key = p['submission_key'] + rubric_scores = p.getlist('rubric_scores[]') + submission_flagged = p['submission_flagged'] + try: + response = peer_grading_service().save_grade(location, grader_id, submission_id, + score, feedback, submission_key, rubric_scores, submission_flagged) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2}, + submission_key: {3}, score: {4}""" + .format(peer_grading_service().url, + location, submission_id, submission_key, score) + ) + return json.dumps({'success': False, + 'error': 'Could not connect to grading service'}) + + + + def is_student_calibrated(self, get): + """ + Calls the grading controller to see if the given student is calibrated + on the given problem + + Input: + In the request, we need the following arguments: + location - problem location + + Returns: + Json object with the following keys + success - bool indicating whether or not the call was successful + calibrated - true if the grader has fully calibrated and can now move on to grading + - false if the grader is still working on calibration problems + total_calibrated_on_so_far - the number of calibration essays for this problem + that this grader has graded + """ + _check_post(request) + required = set(['location']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + grader_id = unique_id_for_user(request.user) + p = request.POST + location = p['location'] + + try: + response = peer_grading_service().is_student_calibrated(location, grader_id) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("Error from grading service. server url: {0}, grader_id: {0}, location: {1}" + .format(peer_grading_service().url, grader_id, location)) + return json.dumps({'success': False, + 'error': 'Could not connect to grading service'}) + + + + def show_calibration_essay(self, get): + """ + Fetch the next calibration essay from the grading controller and return it + Inputs: + In the request + location - problem location + + Returns: + A json dict with the following keys + 'success': bool + + 'submission_id': a unique identifier for the submission, to be passed back + with the grade. + + 'submission': the submission, rendered as read-only html for grading + + 'rubric': the rubric, also rendered as html. + + 'submission_key': a key associated with the submission for validation reasons + + 'error': if success is False, will have an error message with more info. + + """ + _check_post(request) + + required = set(['location']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + + grader_id = unique_id_for_user(request.user) + p = request.POST + location = p['location'] + try: + response = peer_grading_service().show_calibration_essay(location, grader_id) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("Error from grading service. server url: {0}, location: {0}" + .format(peer_grading_service().url, location)) + return json.dumps({'success': False, + 'error': 'Could not connect to grading service'}) + # if we can't parse the rubric into HTML, + except etree.XMLSyntaxError: + log.exception("Cannot parse rubric string. Raw string: {0}" + .format(rubric)) + return json.dumps({'success': False, + 'error': 'Error displaying submission'}) + + + def save_calibration_essay(self, get): + """ + Saves the grader's grade of a given calibration. + Input: + The request should have the following keys: + location - problem location + submission_id - id associated with this submission + submission_key - submission key given for validation purposes + score - the grade that was given to the submission + feedback - the feedback from the student + Returns + A json object with the following keys: + success: bool indicating whether the save was a success + error: if there was an error in the submission, this is the error message + actual_score: the score that the instructor gave to this calibration essay + + """ + _check_post(request) + + required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]']) + success, message = _check_required(request, required) + if not success: + return _err_response(message) + grader_id = unique_id_for_user(request.user) + p = request.POST + location = p['location'] + calibration_essay_id = p['submission_id'] + submission_key = p['submission_key'] + score = p['score'] + feedback = p['feedback'] + rubric_scores = p.getlist('rubric_scores[]') + + try: + response = peer_grading_service().save_calibration_essay(location, grader_id, calibration_essay_id, + submission_key, score, feedback, rubric_scores) + return HttpResponse(response, mimetype="application/json") + except GradingServiceError: + log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) + return _err_response('Could not connect to grading service') + def peer_grading(self, request, course_id): + ''' + Show a peer grading interface + ''' + + # call problem list service + success = False + error_text = "" + problem_list = [] + try: + problem_list_json = self.peer_gs.get_problem_list(course_id, unique_id_for_user(request.user)) + problem_list_dict = json.loads(problem_list_json) + success = problem_list_dict['success'] + if 'error' in problem_list_dict: + error_text = problem_list_dict['error'] + + problem_list = problem_list_dict['problem_list'] + + except GradingServiceError: + error_text = "Error occured while contacting the grading service" + success = False + # catch error if if the json loads fails + except ValueError: + error_text = "Could not get problem list" + success = False + + ajax_url = _reverse_with_slash('peer_grading', course_id) + + return self.system.render_template('peer_grading/peer_grading.html', { + 'course': course, + 'course_id': course_id, + 'ajax_url': ajax_url, + 'success': success, + 'problem_list': problem_list, + 'error_text': error_text, + # Checked above + 'staff_access': False, }) + + + def peer_grading_problem(request, course_id): + ''' + Show individual problem interface + ''' + course = get_course_with_access(request.user, course_id, 'load') + problem_location = request.GET.get("location") + + ajax_url = _reverse_with_slash('peer_grading', course_id) + + return render_to_response('peer_grading/peer_grading_problem.html', { + 'view_html': '', + 'course': course, + 'problem_location': problem_location, + 'course_id': course_id, + 'ajax_url': ajax_url, + # Checked above + 'staff_access': False, }) + +class PeerGradingDescriptor(XmlDescriptor, EditingDescriptor): + """ + Module for adding combined open ended questions + """ + mako_template = "widgets/html-edit.html" + module_class = CombinedOpenEndedModule + filename_extension = "xml" + + stores_state = True + has_score = True + template_dir_name = "peer_grading" + + js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]} + js_module_name = "HTMLEditingDescriptor" + + @classmethod + def definition_from_xml(cls, xml_object, system): + """ + Pull out the individual tasks, the rubric, and the prompt, and parse + + Returns: + { + 'rubric': 'some-html', + 'prompt': 'some-html', + 'task_xml': dictionary of xml strings, + } + """ + expected_children = [] + for child in expected_children: + if len(xml_object.xpath(child)) == 0: + raise ValueError("Peer grading definition must include at least one '{0}' tag".format(child)) + + def parse_task(k): + """Assumes that xml_object has child k""" + return [stringify_children(xml_object.xpath(k)[i]) for i in xrange(0, len(xml_object.xpath(k)))] + + def parse(k): + """Assumes that xml_object has child k""" + return xml_object.xpath(k)[0] + + return {} + + + def definition_to_xml(self, resource_fs): + '''Return an xml element representing this definition.''' + elt = etree.Element('peergrading') + + def add_child(k): + child_str = '<{tag}>{body}'.format(tag=k, body=self.definition[k]) + child_node = etree.fromstring(child_str) + elt.append(child_node) + + for child in ['task']: + add_child(child) + + return elt \ No newline at end of file diff --git a/common/lib/xmodule/xmodule/peer_grading_service.py b/common/lib/xmodule/xmodule/peer_grading_service.py new file mode 100644 index 0000000000..e2a5d72b6c --- /dev/null +++ b/common/lib/xmodule/xmodule/peer_grading_service.py @@ -0,0 +1,256 @@ +from .capa_module import only_one, ComplexEncoder +from .editing_module import EditingDescriptor +from .html_checker import check_html +from progress import Progress +from .stringify import stringify_children +from .x_module import XModule +from .xml_module import XmlDescriptor +from xmodule.modulestore import Location +import self_assessment_module +import open_ended_module +from combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError +from .stringify import stringify_children +import json +import logging +import requests +from requests.exceptions import RequestException, ConnectionError, HTTPError +import sys + +from django.conf import settings +from django.http import HttpResponse, Http404 + +from courseware.access import has_access +from util.json_request import expect_json +from xmodule.course_module import CourseDescriptor +from combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError +from lxml import etree + + + +from django.conf import settings + +class PeerGradingService(): + """ + Interface with the grading controller for peer grading + """ + def __init__(self, config): + self.username = config['username'] + self.password = config['password'] + self.url = config['url'] + self.login_url = self.url + '/login/' + self.session = requests.session() + self.get_next_submission_url = self.url + '/get_next_submission/' + self.save_grade_url = self.url + '/save_grade/' + self.is_student_calibrated_url = self.url + '/is_student_calibrated/' + self.show_calibration_essay_url = self.url + '/show_calibration_essay/' + self.save_calibration_essay_url = self.url + '/save_calibration_essay/' + self.get_problem_list_url = self.url + '/get_problem_list/' + self.get_notifications_url = self.url + '/get_notifications/' + + def get_next_submission(self, problem_location, grader_id): + response = self.get(self.get_next_submission_url, + {'location': problem_location, 'grader_id': grader_id}) + return json.dumps(self._render_rubric(response)) + + def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged): + data = {'grader_id' : grader_id, + 'submission_id' : submission_id, + 'score' : score, + 'feedback' : feedback, + 'submission_key': submission_key, + 'location': location, + 'rubric_scores': rubric_scores, + 'rubric_scores_complete': True, + 'submission_flagged' : submission_flagged} + return self.post(self.save_grade_url, data) + + def is_student_calibrated(self, problem_location, grader_id): + params = {'problem_id' : problem_location, 'student_id': grader_id} + return self.get(self.is_student_calibrated_url, params) + + def show_calibration_essay(self, problem_location, grader_id): + params = {'problem_id' : problem_location, 'student_id': grader_id} + response = self.get(self.show_calibration_essay_url, params) + return json.dumps(self._render_rubric(response)) + + def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, + score, feedback, rubric_scores): + data = {'location': problem_location, + 'student_id': grader_id, + 'calibration_essay_id': calibration_essay_id, + 'submission_key': submission_key, + 'score': score, + 'feedback': feedback, + 'rubric_scores[]': rubric_scores, + 'rubric_scores_complete': True} + return self.post(self.save_calibration_essay_url, data) + + def get_problem_list(self, course_id, grader_id): + params = {'course_id': course_id, 'student_id': grader_id} + response = self.get(self.get_problem_list_url, params) + return response + + def get_notifications(self, course_id, grader_id): + params = {'course_id': course_id, 'student_id': grader_id} + response = self.get(self.get_notifications_url, params) + return response + + def _login(self): + """ + Log into the staff grading service. + + Raises requests.exceptions.HTTPError if something goes wrong. + + Returns the decoded json dict of the response. + """ + response = self.session.post(self.login_url, + {'username': self.username, + 'password': self.password,}) + + response.raise_for_status() + + return response.json + + def post(self, url, data, allow_redirects=False): + """ + Make a post request to the grading controller + """ + try: + op = lambda: self.session.post(url, data=data, + allow_redirects=allow_redirects) + r = self._try_with_login(op) + except (RequestException, ConnectionError, HTTPError) as err: + # reraise as promised GradingServiceError, but preserve stacktrace. + raise GradingServiceError, str(err), sys.exc_info()[2] + + return r.text + + def get(self, url, params, allow_redirects=False): + """ + Make a get request to the grading controller + """ + log.debug(params) + op = lambda: self.session.get(url, + allow_redirects=allow_redirects, + params=params) + try: + r = self._try_with_login(op) + except (RequestException, ConnectionError, HTTPError) as err: + # reraise as promised GradingServiceError, but preserve stacktrace. + raise GradingServiceError, str(err), sys.exc_info()[2] + + return r.text + + + def _try_with_login(self, operation): + """ + Call operation(), which should return a requests response object. If + the request fails with a 'login_required' error, call _login() and try + the operation again. + + Returns the result of operation(). Does not catch exceptions. + """ + response = operation() + if (response.json + and response.json.get('success') == False + and response.json.get('error') == 'login_required'): + # apparrently we aren't logged in. Try to fix that. + r = self._login() + if r and not r.get('success'): + log.warning("Couldn't log into peer grading backend. Response: %s", + r) + # try again + response = operation() + response.raise_for_status() + + return response + + def _render_rubric(self, response, view_only=False): + """ + Given an HTTP Response with the key 'rubric', render out the html + required to display the rubric and put it back into the response + + returns the updated response as a dictionary that can be serialized later + + """ + try: + response_json = json.loads(response) + if 'rubric' in response_json: + rubric = response_json['rubric'] + rubric_renderer = CombinedOpenEndedRubric(self.system, False) + success, rubric_html = rubric_renderer.render_rubric(rubric) + response_json['rubric'] = rubric_html + return response_json + # if we can't parse the rubric into HTML, + except etree.XMLSyntaxError, RubricParsingError: + log.exception("Cannot parse rubric string. Raw string: {0}" + .format(rubric)) + return {'success': False, + 'error': 'Error displaying submission'} + except ValueError: + log.exception("Error parsing response: {0}".format(response)) + return {'success': False, + 'error': "Error displaying submission"} + +""" +This is a mock peer grading service that can be used for unit tests +without making actual service calls to the grading controller +""" +class MockPeerGradingService(object): + def get_next_submission(self, problem_location, grader_id): + return json.dumps({'success': True, + 'submission_id':1, + 'submission_key': "", + 'student_response': 'fake student response', + 'prompt': 'fake submission prompt', + 'rubric': 'fake rubric', + 'max_score': 4}) + + def save_grade(self, location, grader_id, submission_id, + score, feedback, submission_key): + return json.dumps({'success': True}) + + def is_student_calibrated(self, problem_location, grader_id): + return json.dumps({'success': True, 'calibrated': True}) + + def show_calibration_essay(self, problem_location, grader_id): + return json.dumps({'success': True, + 'submission_id':1, + 'submission_key': '', + 'student_response': 'fake student response', + 'prompt': 'fake submission prompt', + 'rubric': 'fake rubric', + 'max_score': 4}) + + def save_calibration_essay(self, problem_location, grader_id, + calibration_essay_id, submission_key, score, feedback): + return {'success': True, 'actual_score': 2} + + def get_problem_list(self, course_id, grader_id): + return json.dumps({'success': True, + 'problem_list': [ + json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo1', + 'problem_name': "Problem 1", 'num_graded': 3, 'num_pending': 5}), + json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo2', + 'problem_name': "Problem 2", 'num_graded': 1, 'num_pending': 5}) + ]}) + +_service = None +def peer_grading_service(): + """ + Return a peer grading service instance--if settings.MOCK_PEER_GRADING is True, + returns a mock one, otherwise a real one. + + Caches the result, so changing the setting after the first call to this + function will have no effect. + """ + global _service + if _service is not None: + return _service + + if settings.MOCK_PEER_GRADING: + _service = MockPeerGradingService() + else: + _service = PeerGradingService(settings.PEER_GRADING_INTERFACE) + + return _service From bdb82cda657adcc87aca9d5a4c83b122139451a4 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 18:23:30 -0500 Subject: [PATCH 032/126] Strip out JS, old urls --- .../js/src/peergrading/peer_grading.coffee | 2 +- .../xmodule/xmodule/peer_grading_module.py | 63 ++++++++++--------- .../xmodule/xmodule/peer_grading_service.py | 5 +- lms/envs/common.py | 7 +-- lms/templates/peer_grading/peer_grading.html | 16 ----- .../peer_grading/peer_grading_problem.html | 18 ------ lms/urls.py | 17 ----- 7 files changed, 39 insertions(+), 89 deletions(-) diff --git a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee index ed79ba9c71..a82353b7ef 100644 --- a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee +++ b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee @@ -9,7 +9,7 @@ class PeerGrading @message_container = $('.message-container') @message_container.toggle(not @message_container.is(':empty')) - + @problem_list = $('.problem-list') @construct_progress_bar() diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index 8002a8d923..f6e5af6752 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -38,14 +38,15 @@ import sys from pkg_resources import resource_string from .capa_module import only_one, ComplexEncoder -from peer_grading_service import peer_grading_service +from peer_grading_service import peer_grading_service, GradingServiceError log = logging.getLogger(__name__) class PeerGradingModule(XModule): _VERSION = 1 - js = {'coffee': [resource_string(__name__, 'js/src/combinedopenended/display.coffee'), + js = {'coffee': [resource_string(__name__, 'js/src/peergrading/peer_grading.coffee'), + resource_string(__name__, 'js/src/peergrading/peer_grading_problem.coffee'), resource_string(__name__, 'js/src/collapsible.coffee'), resource_string(__name__, 'js/src/javascript_loader.coffee'), ]} @@ -66,6 +67,7 @@ class PeerGradingModule(XModule): #We need to set the location here so the child modules can use it system.set('location', location) + self.system = system self.peer_gs = peer_grading_service() log.debug(self.system) @@ -104,20 +106,22 @@ class PeerGradingModule(XModule): 'is_student_calibrated': self.is_student_calibrated, 'save_grade': self.save_grade, 'save_calibration_essay' : self.save_calibration_essay, + 'show_problem' : self.peer_grading_problem, } if dispatch not in handlers: return 'Error' - before = self.get_progress() d = handlers[dispatch](get) - after = self.get_progress() - d.update({ - 'progress_changed': after != before, - 'progress_status': Progress.to_js_status_str(after), - }) + return json.dumps(d, cls=ComplexEncoder) + def get_progress(self): + pass + + def get_score(self): + pass + def get_next_submission(self, get): """ Makes a call to the grading controller for the next essay that should be graded @@ -146,12 +150,12 @@ class PeerGradingModule(XModule): location = p['location'] try: - response = peer_grading_service().get_next_submission(location, grader_id) + response = self.peer_gs.get_next_submission(location, grader_id) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error getting next submission. server url: {0} location: {1}, grader_id: {2}" - .format(peer_grading_service().url, location, grader_id)) + .format(self.peer_gs.url, location, grader_id)) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) @@ -185,20 +189,18 @@ class PeerGradingModule(XModule): rubric_scores = p.getlist('rubric_scores[]') submission_flagged = p['submission_flagged'] try: - response = peer_grading_service().save_grade(location, grader_id, submission_id, + response = self.peer_gs.save_grade(location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2}, submission_key: {3}, score: {4}""" - .format(peer_grading_service().url, + .format(self.peer_gs.url, location, submission_id, submission_key, score) ) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) - - def is_student_calibrated(self, get): """ Calls the grading controller to see if the given student is calibrated @@ -226,16 +228,14 @@ class PeerGradingModule(XModule): location = p['location'] try: - response = peer_grading_service().is_student_calibrated(location, grader_id) + response = self.peer_gs.is_student_calibrated(location, grader_id) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error from grading service. server url: {0}, grader_id: {0}, location: {1}" - .format(peer_grading_service().url, grader_id, location)) + .format(self.peer_gs.url, grader_id, location)) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) - - def show_calibration_essay(self, get): """ Fetch the next calibration essay from the grading controller and return it @@ -270,11 +270,11 @@ class PeerGradingModule(XModule): p = request.POST location = p['location'] try: - response = peer_grading_service().show_calibration_essay(location, grader_id) + response = self.peer_gs.show_calibration_essay(location, grader_id) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error from grading service. server url: {0}, location: {0}" - .format(peer_grading_service().url, location)) + .format(self.peer_gs.url, location)) return json.dumps({'success': False, 'error': 'Could not connect to grading service'}) # if we can't parse the rubric into HTML, @@ -318,13 +318,14 @@ class PeerGradingModule(XModule): rubric_scores = p.getlist('rubric_scores[]') try: - response = peer_grading_service().save_calibration_essay(location, grader_id, calibration_essay_id, + response = self.peer_gs.save_calibration_essay(location, grader_id, calibration_essay_id, submission_key, score, feedback, rubric_scores) return HttpResponse(response, mimetype="application/json") except GradingServiceError: log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) return _err_response('Could not connect to grading service') - def peer_grading(self, request, course_id): + + def peer_grading(self, get = None): ''' Show a peer grading interface ''' @@ -334,7 +335,7 @@ class PeerGradingModule(XModule): error_text = "" problem_list = [] try: - problem_list_json = self.peer_gs.get_problem_list(course_id, unique_id_for_user(request.user)) + problem_list_json = self.peer_gs.get_problem_list(course_id, self.system.anonymous_student_id) problem_list_dict = json.loads(problem_list_json) success = problem_list_dict['success'] if 'error' in problem_list_dict: @@ -350,7 +351,7 @@ class PeerGradingModule(XModule): error_text = "Could not get problem list" success = False - ajax_url = _reverse_with_slash('peer_grading', course_id) + ajax_url = self.system.ajax_url return self.system.render_template('peer_grading/peer_grading.html', { 'course': course, @@ -363,16 +364,20 @@ class PeerGradingModule(XModule): 'staff_access': False, }) - def peer_grading_problem(request, course_id): + def peer_grading_problem(self, get = None): ''' Show individual problem interface ''' - course = get_course_with_access(request.user, course_id, 'load') - problem_location = request.GET.get("location") + if get == None: + problem_location = self.system.location + elif get.get('location') is not None: + problem_location = get.get('location') + else: + problem_location = self.system.location - ajax_url = _reverse_with_slash('peer_grading', course_id) + ajax_url = self.system.ajax_url - return render_to_response('peer_grading/peer_grading_problem.html', { + return self.system.render_template('peer_grading/peer_grading_problem.html', { 'view_html': '', 'course': course, 'problem_location': problem_location, diff --git a/common/lib/xmodule/xmodule/peer_grading_service.py b/common/lib/xmodule/xmodule/peer_grading_service.py index e2a5d72b6c..5fc4686533 100644 --- a/common/lib/xmodule/xmodule/peer_grading_service.py +++ b/common/lib/xmodule/xmodule/peer_grading_service.py @@ -25,10 +25,11 @@ from xmodule.course_module import CourseDescriptor from combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError from lxml import etree - - from django.conf import settings +class GradingServiceError(Exception): + pass + class PeerGradingService(): """ Interface with the grading controller for peer grading diff --git a/lms/envs/common.py b/lms/envs/common.py index 426c29c7d0..edbec26933 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -437,7 +437,6 @@ main_vendor_js = [ discussion_js = sorted(rooted_glob(PROJECT_ROOT / 'static', 'coffee/src/discussion/**/*.coffee')) staff_grading_js = sorted(rooted_glob(PROJECT_ROOT / 'static', 'coffee/src/staff_grading/**/*.coffee')) -peer_grading_js = sorted(rooted_glob(PROJECT_ROOT / 'static','coffee/src/peer_grading/**/*.coffee')) open_ended_js = sorted(rooted_glob(PROJECT_ROOT / 'static','coffee/src/open_ended/**/*.coffee')) PIPELINE_CSS = { @@ -469,7 +468,7 @@ PIPELINE_JS = { 'source_filenames': sorted( set(rooted_glob(COMMON_ROOT / 'static', 'coffee/src/**/*.coffee') + rooted_glob(PROJECT_ROOT / 'static', 'coffee/src/**/*.coffee')) - - set(courseware_js + discussion_js + staff_grading_js + peer_grading_js + open_ended_js) + set(courseware_js + discussion_js + staff_grading_js + open_ended_js) ) + [ 'js/form.ext.js', 'js/my_courses_dropdown.js', @@ -499,10 +498,6 @@ PIPELINE_JS = { 'source_filenames': staff_grading_js, 'output_filename': 'js/staff_grading.js' }, - 'peer_grading' : { - 'source_filenames': peer_grading_js, - 'output_filename': 'js/peer_grading.js' - }, 'open_ended' : { 'source_filenames': open_ended_js, 'output_filename': 'js/open_ended.js' diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index bd32b33ec2..fff753da41 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -1,19 +1,3 @@ -<%inherit file="/main.html" /> -<%block name="bodyclass">${course.css_class} -<%namespace name='static' file='/static_content.html'/> - -<%block name="headextra"> - <%static:css group='course'/> - - -<%block name="title">${course.number} Peer Grading - -<%include file="/courseware/course_navigation.html" args="active_page='peer_grading'" /> - -<%block name="js_extra"> - <%static:js group='peer_grading'/> - -
${error_text}
diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 04ee7415ec..f314b9733a 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -1,21 +1,3 @@ - -<%inherit file="/main.html" /> -<%block name="bodyclass">${course.css_class} -<%namespace name='static' file='/static_content.html'/> - -<%block name="headextra"> - <%static:css group='course'/> - - -<%block name="title">${course.number} Peer Grading. - -<%include file="/courseware/course_navigation.html" args="active_page='peer_grading'" /> - -<%block name="js_extra"> - <%static:js group='peer_grading'/> - - -
diff --git a/lms/urls.py b/lms/urls.py index e4494e0166..6e8d08e256 100644 --- a/lms/urls.py +++ b/lms/urls.py @@ -265,23 +265,6 @@ if settings.COURSEWARE_ENABLED: url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/staff_grading/get_problem_list$', 'open_ended_grading.staff_grading_service.get_problem_list', name='staff_grading_get_problem_list'), - - # Peer Grading - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading$', - 'open_ended_grading.views.peer_grading', name='peer_grading'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/problem$', - 'open_ended_grading.views.peer_grading_problem', name='peer_grading_problem'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/get_next_submission$', - 'open_ended_grading.peer_grading_service.get_next_submission', name='peer_grading_get_next_submission'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/show_calibration_essay$', - 'open_ended_grading.peer_grading_service.show_calibration_essay', name='peer_grading_show_calibration_essay'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/is_student_calibrated$', - 'open_ended_grading.peer_grading_service.is_student_calibrated', name='peer_grading_is_student_calibrated'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/save_grade$', - 'open_ended_grading.peer_grading_service.save_grade', name='peer_grading_save_grade'), - url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/peer_grading/save_calibration_essay$', - 'open_ended_grading.peer_grading_service.save_calibration_essay', name='peer_grading_save_calibration_essay'), - # Open Ended problem list url(r'^courses/(?P[^/]+/[^/]+/[^/]+)/open_ended_problems$', 'open_ended_grading.views.student_problem_list', name='open_ended_problems'), From 59ba308354388ff8ab8fb4853b72563d968337f5 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 18:28:15 -0500 Subject: [PATCH 033/126] Add in peer grading entry point --- common/lib/xmodule/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/common/lib/xmodule/setup.py b/common/lib/xmodule/setup.py index 29227c3188..06df6b1123 100644 --- a/common/lib/xmodule/setup.py +++ b/common/lib/xmodule/setup.py @@ -26,6 +26,7 @@ setup( "html = xmodule.html_module:HtmlDescriptor", "image = xmodule.backcompat_module:TranslateCustomTagDescriptor", "error = xmodule.error_module:ErrorDescriptor", + "peergrading = xmodule.peer_grading_module:PeerGradingDescriptor", "problem = xmodule.capa_module:CapaDescriptor", "problemset = xmodule.seq_module:SequenceDescriptor", "section = xmodule.backcompat_module:SemanticSectionDescriptor", From 4825ad7a1557076b257f0f8d69f0578f18c4742d Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 18:35:11 -0500 Subject: [PATCH 034/126] Fix module imports --- .../xmodule/xmodule/peer_grading_module.py | 23 ++++++++----------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index f6e5af6752..cbcba607eb 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -9,34 +9,30 @@ in settings.PEER_GRADING_INTERFACE import json import logging import requests -from requests.exceptions import RequestException, ConnectionError, HTTPError import sys from django.conf import settings from django.http import HttpResponse, Http404 -from grading_service import GradingService -from grading_service import GradingServiceError -from courseware.access import has_access -from util.json_request import expect_json -from xmodule.course_module import CourseDescriptor -from xmodule.combined_open_ended_rubric import CombinedOpenEndedRubric -from student.models import unique_id_for_user +from combined_open_ended_rubric import CombinedOpenEndedRubric from lxml import etree import copy -from fs.errors import ResourceNotFoundError import itertools import json import logging -from lxml import etree from lxml.html import rewrite_links -from path import path import os -import sys from pkg_resources import resource_string from .capa_module import only_one, ComplexEncoder +from .editing_module import EditingDescriptor +from .html_checker import check_html +from progress import Progress +from .stringify import stringify_children +from .x_module import XModule +from .xml_module import XmlDescriptor +from xmodule.modulestore import Location from peer_grading_service import peer_grading_service, GradingServiceError @@ -391,7 +387,7 @@ class PeerGradingDescriptor(XmlDescriptor, EditingDescriptor): Module for adding combined open ended questions """ mako_template = "widgets/html-edit.html" - module_class = CombinedOpenEndedModule + module_class = PeerGradingModule filename_extension = "xml" stores_state = True @@ -413,6 +409,7 @@ class PeerGradingDescriptor(XmlDescriptor, EditingDescriptor): 'task_xml': dictionary of xml strings, } """ + log.debug("In definition") expected_children = [] for child in expected_children: if len(xml_object.xpath(child)) == 0: From edce6edb995bbd8087961e0d6fde910383f2dfdd Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 18:38:55 -0500 Subject: [PATCH 035/126] Clean up peer grading service imports --- .../lib/xmodule/xmodule/peer_grading_service.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/common/lib/xmodule/xmodule/peer_grading_service.py b/common/lib/xmodule/xmodule/peer_grading_service.py index 5fc4686533..172a981a96 100644 --- a/common/lib/xmodule/xmodule/peer_grading_service.py +++ b/common/lib/xmodule/xmodule/peer_grading_service.py @@ -1,15 +1,3 @@ -from .capa_module import only_one, ComplexEncoder -from .editing_module import EditingDescriptor -from .html_checker import check_html -from progress import Progress -from .stringify import stringify_children -from .x_module import XModule -from .xml_module import XmlDescriptor -from xmodule.modulestore import Location -import self_assessment_module -import open_ended_module -from combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError -from .stringify import stringify_children import json import logging import requests @@ -19,13 +7,10 @@ import sys from django.conf import settings from django.http import HttpResponse, Http404 -from courseware.access import has_access -from util.json_request import expect_json -from xmodule.course_module import CourseDescriptor from combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError from lxml import etree -from django.conf import settings +log=logging.getLogger(__name__) class GradingServiceError(Exception): pass From 4608bb274ea83bf2a96631f97fbba34b966cd648 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 18:43:13 -0500 Subject: [PATCH 036/126] Fix system passing issues --- common/lib/xmodule/xmodule/peer_grading_module.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index cbcba607eb..6416c8d6af 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -38,6 +38,9 @@ from peer_grading_service import peer_grading_service, GradingServiceError log = logging.getLogger(__name__) +USE_FOR_SINGLE_LOCATION = False +TRUE_DICT = [True, "True", "true", "TRUE"] + class PeerGradingModule(XModule): _VERSION = 1 @@ -67,6 +70,10 @@ class PeerGradingModule(XModule): self.peer_gs = peer_grading_service() log.debug(self.system) + self.use_for_single_location = self.metadata.get('use_for_single_location', USE_FOR_SINGLE_LOCATION) + if isinstance(self.use_for_single_location, basestring): + self.use_for_single_location = (self.use_for_single_location in TRUE_DICT) + def _err_response(self, msg): """ Return a HttpResponse with a json dump with success=False, and the given error message. @@ -331,7 +338,7 @@ class PeerGradingModule(XModule): error_text = "" problem_list = [] try: - problem_list_json = self.peer_gs.get_problem_list(course_id, self.system.anonymous_student_id) + problem_list_json = self.peer_gs.get_problem_list(self.system.course_id, self.system.anonymous_student_id) problem_list_dict = json.loads(problem_list_json) success = problem_list_dict['success'] if 'error' in problem_list_dict: @@ -351,7 +358,7 @@ class PeerGradingModule(XModule): return self.system.render_template('peer_grading/peer_grading.html', { 'course': course, - 'course_id': course_id, + 'course_id': self.system.course_id, 'ajax_url': ajax_url, 'success': success, 'problem_list': problem_list, @@ -377,7 +384,7 @@ class PeerGradingModule(XModule): 'view_html': '', 'course': course, 'problem_location': problem_location, - 'course_id': course_id, + 'course_id': self.system.course_id, 'ajax_url': ajax_url, # Checked above 'staff_access': False, }) From d8b94f91bd67ddd8c5dca10b91027a12df75ba46 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 18:53:17 -0500 Subject: [PATCH 037/126] Don't pass course to templates --- common/lib/xmodule/xmodule/peer_grading_module.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index 6416c8d6af..87469dba51 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -94,7 +94,10 @@ class PeerGradingModule(XModule): Needs to be implemented by inheritors. Renders the HTML that students see. @return: """ - pass + if not self.use_for_single_location: + return self.peer_grading() + else: + return self.peer_grading_problem({'location' : self.system.location}) def handle_ajax(self, dispatch, get): """ @@ -357,7 +360,6 @@ class PeerGradingModule(XModule): ajax_url = self.system.ajax_url return self.system.render_template('peer_grading/peer_grading.html', { - 'course': course, 'course_id': self.system.course_id, 'ajax_url': ajax_url, 'success': success, @@ -382,7 +384,6 @@ class PeerGradingModule(XModule): return self.system.render_template('peer_grading/peer_grading_problem.html', { 'view_html': '', - 'course': course, 'problem_location': problem_location, 'course_id': self.system.course_id, 'ajax_url': ajax_url, From c2f644656de792c678993b1c020d098348950d45 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 19:12:49 -0500 Subject: [PATCH 038/126] Fix HTML return --- .../js/src/peergrading/peer_grading.coffee | 16 ++++++++++++++- .../xmodule/xmodule/peer_grading_module.py | 20 +++++++++++-------- lms/templates/peer_grading/peer_grading.html | 4 ++-- .../peer_grading/peer_grading_problem.html | 2 +- 4 files changed, 30 insertions(+), 12 deletions(-) diff --git a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee index a82353b7ef..113f5e02a6 100644 --- a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee +++ b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee @@ -4,12 +4,18 @@ # becomes more sophisticated class PeerGrading constructor: () -> + @peer_grading_container = $('.peer-grading') + @peer_grading_outer_container = $('.peer-grading-container') + @ajax_url = peer_grading_container.data('ajax-url') @error_container = $('.error-container') @error_container.toggle(not @error_container.is(':empty')) @message_container = $('.message-container') @message_container.toggle(not @message_container.is(':empty')) + @problem_button = $('.problem-button') + @problem_button.click show_results + @problem_list = $('.problem-list') @construct_progress_bar() @@ -22,6 +28,14 @@ class PeerGrading bar_max = parseInt(problem.data('required')) + bar_value progress_bar.progressbar({value: bar_value, max: bar_max}) ) - + + show_results: (event) => + location_to_fetch = $(event.target).data('location') + data = {'location' : location_to_fetch} + $.postWithPrefix "#{@ajax_url}problem", data, (response) => + if response.success + @peer_grading_outer_container.after(response.html).remove() + else + @gentle_alert response.error $(document).ready(() -> new PeerGrading()) diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index 87469dba51..c5a08e0812 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -74,6 +74,10 @@ class PeerGradingModule(XModule): if isinstance(self.use_for_single_location, basestring): self.use_for_single_location = (self.use_for_single_location in TRUE_DICT) + self.ajax_url = self.system.ajax_url + if not self.ajax_url.endswith("/"): + self.ajax_url = self.ajax_url + "/" + def _err_response(self, msg): """ Return a HttpResponse with a json dump with success=False, and the given error message. @@ -108,11 +112,10 @@ class PeerGradingModule(XModule): handlers = { 'get_next_submission': self.get_next_submission, 'show_calibration_essay': self.show_calibration_essay, - 'save_post_assessment': self.message_post, 'is_student_calibrated': self.is_student_calibrated, 'save_grade': self.save_grade, 'save_calibration_essay' : self.save_calibration_essay, - 'show_problem' : self.peer_grading_problem, + 'problem' : self.peer_grading_problem, } if dispatch not in handlers: @@ -357,9 +360,8 @@ class PeerGradingModule(XModule): error_text = "Could not get problem list" success = False - ajax_url = self.system.ajax_url - - return self.system.render_template('peer_grading/peer_grading.html', { + ajax_url = self.ajax_url + html = self.system.render_template('peer_grading/peer_grading.html', { 'course_id': self.system.course_id, 'ajax_url': ajax_url, 'success': success, @@ -368,6 +370,7 @@ class PeerGradingModule(XModule): # Checked above 'staff_access': False, }) + return html def peer_grading_problem(self, get = None): ''' @@ -380,9 +383,8 @@ class PeerGradingModule(XModule): else: problem_location = self.system.location - ajax_url = self.system.ajax_url - - return self.system.render_template('peer_grading/peer_grading_problem.html', { + ajax_url = self.ajax_url + html = self.system.render_template('peer_grading/peer_grading_problem.html', { 'view_html': '', 'problem_location': problem_location, 'course_id': self.system.course_id, @@ -390,6 +392,8 @@ class PeerGradingModule(XModule): # Checked above 'staff_access': False, }) + return {'html' : html, 'success' : True} + class PeerGradingDescriptor(XmlDescriptor, EditingDescriptor): """ Module for adding combined open ended questions diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index fff753da41..99ef288e5f 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -1,4 +1,4 @@ -
+
${error_text}

Peer Grading

@@ -22,7 +22,7 @@ %for problem in problem_list: - ${problem['problem_name']} + ${problem['problem_name']} ${problem['num_graded']} diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index f314b9733a..9646b861c1 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -1,4 +1,4 @@ -
+
From c1583dbba2861434fb37635d031f7b2b7a61c50b Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 19:57:35 -0500 Subject: [PATCH 039/126] Properly load javascript, fix templates to work with xmodule, modify AJAX handlers --- .../js/src/peergrading/peer_grading.coffee | 14 +- .../peergrading/peer_grading_problem.coffee | 229 +++++++++--------- .../xmodule/xmodule/peer_grading_module.py | 73 +++--- .../xmodule/xmodule/peer_grading_service.py | 2 + lms/templates/peer_grading/peer_grading.html | 2 +- .../peer_grading/peer_grading_problem.html | 2 +- 6 files changed, 159 insertions(+), 163 deletions(-) diff --git a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee index 113f5e02a6..b8196838f3 100644 --- a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee +++ b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading.coffee @@ -2,11 +2,11 @@ # and message container when they are empty # Can (and should be) expanded upon when our problem list # becomes more sophisticated -class PeerGrading - constructor: () -> +class @PeerGrading + constructor: (element) -> @peer_grading_container = $('.peer-grading') @peer_grading_outer_container = $('.peer-grading-container') - @ajax_url = peer_grading_container.data('ajax-url') + @ajax_url = @peer_grading_container.data('ajax-url') @error_container = $('.error-container') @error_container.toggle(not @error_container.is(':empty')) @@ -14,7 +14,7 @@ class PeerGrading @message_container.toggle(not @message_container.is(':empty')) @problem_button = $('.problem-button') - @problem_button.click show_results + @problem_button.click @show_results @problem_list = $('.problem-list') @construct_progress_bar() @@ -35,7 +35,7 @@ class PeerGrading $.postWithPrefix "#{@ajax_url}problem", data, (response) => if response.success @peer_grading_outer_container.after(response.html).remove() + backend = new PeerGradingProblemBackend(@ajax_url, false) + new PeerGradingProblem(backend) else - @gentle_alert response.error - -$(document).ready(() -> new PeerGrading()) + @gentle_alert response.error \ No newline at end of file diff --git a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee index ab16b34d12..ee98905cda 100644 --- a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee +++ b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee @@ -7,7 +7,7 @@ # Should not be run when we don't have a location to send back # to the server # -# PeerGradingProblemBackend - +# PeerGradingProblemBackend - # makes all the ajax requests and provides a mock interface # for testing purposes # @@ -15,7 +15,7 @@ # handles the rendering and user interactions with the interface # ################################## -class PeerGradingProblemBackend +class @PeerGradingProblemBackend constructor: (ajax_url, mock_backend) -> @mock_backend = mock_backend @ajax_url = ajax_url @@ -32,141 +32,140 @@ class PeerGradingProblemBackend mock: (cmd, data) -> if cmd == 'is_student_calibrated' # change to test each version - response = - success: true + response = + success: true calibrated: @mock_cnt >= 2 else if cmd == 'show_calibration_essay' - #response = + #response = # success: false # error: "There was an error" @mock_cnt++ - response = + response = success: true submission_id: 1 submission_key: 'abcd' student_response: ''' - Contrary to popular belief, Lorem Ipsum is not simply random text. It has roots in a piece of classical Latin literature from 45 BC, making it over 2000 years old. Richard McClintock, a Latin professor at Hampden-Sydney College in Virginia, looked up one of the more obscure Latin words, consectetur, from a Lorem Ipsum passage, and going through the cites of the word in classical literature, discovered the undoubtable source. Lorem Ipsum comes from sections 1.10.32 and 1.10.33 of "de Finibus Bonorum et Malorum" (The Extremes of Good and Evil) by Cicero, written in 45 BC. This book is a treatise on the theory of ethics, very popular during the Renaissance. The first line of Lorem Ipsum, "Lorem ipsum dolor sit amet..", comes from a line in section 1.10.32. + Contrary to popular belief, Lorem Ipsum is not simply random text. It has roots in a piece of classical Latin literature from 45 BC, making it over 2000 years old. Richard McClintock, a Latin professor at Hampden-Sydney College in Virginia, looked up one of the more obscure Latin words, consectetur, from a Lorem Ipsum passage, and going through the cites of the word in classical literature, discovered the undoubtable source. Lorem Ipsum comes from sections 1.10.32 and 1.10.33 of "de Finibus Bonorum et Malorum" (The Extremes of Good and Evil) by Cicero, written in 45 BC. This book is a treatise on the theory of ethics, very popular during the Renaissance. The first line of Lorem Ipsum, "Lorem ipsum dolor sit amet..", comes from a line in section 1.10.32. -The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for those interested. Sections 1.10.32 and 1.10.33 from "de Finibus Bonorum et Malorum" by Cicero are also reproduced in their exact original form, accompanied by English versions from the 1914 translation by H. Rackham. - ''' + The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for those interested. Sections 1.10.32 and 1.10.33 from "de Finibus Bonorum et Malorum" by Cicero are also reproduced in their exact original form, accompanied by English versions from the 1914 translation by H. Rackham. + ''' prompt: ''' -

S11E3: Metal Bands

-

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

-

* Why is it that both sodium and magnesium behave as metals, even though the s-band of magnesium is filled?

-

This is a self-assessed open response question. Please use as much space as you need in the box below to answer the question.

- ''' +

S11E3: Metal Bands

+

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

+

* Why is it that both sodium and magnesium behave as metals, even though the s-band of magnesium is filled?

+

This is a self-assessed open response question. Please use as much space as you need in the box below to answer the question.

+ ''' rubric: ''' - - - - - - - - - - - - - - - - - - -
Purpose - - - - - - - -
Organization - - - - - - - -
- ''' + + + + + + + + + + + + + + + + + + +
Purpose + + + + + + + +
Organization + + + + + + + +
+ ''' max_score: 4 else if cmd == 'get_next_submission' - response = + response = success: true submission_id: 1 submission_key: 'abcd' student_response: '''Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed nec tristique ante. Proin at mauris sapien, quis varius leo. Morbi laoreet leo nisi. Morbi aliquam lacus ante. Cras iaculis velit sed diam mattis a fermentum urna luctus. Duis consectetur nunc vitae felis facilisis eget vulputate risus viverra. Cras consectetur ullamcorper lobortis. Nam eu gravida lorem. Nulla facilisi. Nullam quis felis enim. Mauris orci lectus, dictum id cursus in, vulputate in massa. -Phasellus non varius sem. Nullam commodo lacinia odio sit amet egestas. Donec ullamcorper sapien sagittis arcu volutpat placerat. Phasellus ut pretium ante. Nam dictum pulvinar nibh dapibus tristique. Sed at tellus mi, fringilla convallis justo. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus tristique rutrum nulla sed eleifend. Praesent at nunc arcu. Mauris condimentum faucibus nibh, eget commodo quam viverra sed. Morbi in tincidunt dolor. Morbi sed augue et augue interdum fermentum. + Phasellus non varius sem. Nullam commodo lacinia odio sit amet egestas. Donec ullamcorper sapien sagittis arcu volutpat placerat. Phasellus ut pretium ante. Nam dictum pulvinar nibh dapibus tristique. Sed at tellus mi, fringilla convallis justo. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus tristique rutrum nulla sed eleifend. Praesent at nunc arcu. Mauris condimentum faucibus nibh, eget commodo quam viverra sed. Morbi in tincidunt dolor. Morbi sed augue et augue interdum fermentum. -Curabitur tristique purus ac arcu consequat cursus. Cras diam felis, dignissim quis placerat at, aliquet ac metus. Mauris vulputate est eu nibh imperdiet varius. Cras aliquet rhoncus elit a laoreet. Mauris consectetur erat et erat scelerisque eu faucibus dolor consequat. Nam adipiscing sagittis nisl, eu mollis massa tempor ac. Nulla scelerisque tempus blandit. Phasellus ac ipsum eros, id posuere arcu. Nullam non sapien arcu. Vivamus sit amet lorem justo, ac tempus turpis. Suspendisse pharetra gravida imperdiet. Pellentesque lacinia mi eu elit luctus pellentesque. Sed accumsan libero a magna elementum varius. Nunc eget pellentesque metus. ''' + Curabitur tristique purus ac arcu consequat cursus. Cras diam felis, dignissim quis placerat at, aliquet ac metus. Mauris vulputate est eu nibh imperdiet varius. Cras aliquet rhoncus elit a laoreet. Mauris consectetur erat et erat scelerisque eu faucibus dolor consequat. Nam adipiscing sagittis nisl, eu mollis massa tempor ac. Nulla scelerisque tempus blandit. Phasellus ac ipsum eros, id posuere arcu. Nullam non sapien arcu. Vivamus sit amet lorem justo, ac tempus turpis. Suspendisse pharetra gravida imperdiet. Pellentesque lacinia mi eu elit luctus pellentesque. Sed accumsan libero a magna elementum varius. Nunc eget pellentesque metus. ''' prompt: ''' -

S11E3: Metal Bands

-

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

-

* Why is it that both sodium and magnesium behave as metals, even though the s-band of magnesium is filled?

-

This is a self-assessed open response question. Please use as much space as you need in the box below to answer the question.

- ''' +

S11E3: Metal Bands

+

Shown below are schematic band diagrams for two different metals. Both diagrams appear different, yet both of the elements are undisputably metallic in nature.

+

* Why is it that both sodium and magnesium behave as metals, even though the s-band of magnesium is filled?

+

This is a self-assessed open response question. Please use as much space as you need in the box below to answer the question.

+ ''' rubric: ''' - - - - - - - - - - - - - - - - - - -
Purpose - - - - - - - -
Organization - - - - - - - -
- ''' + + + + + + + + + + + + + + + + + + +
Purpose + + + + + + + +
Organization + + + + + + + +
+ ''' max_score: 4 else if cmd == 'save_calibration_essay' - response = + response = success: true actual_score: 2 else if cmd == 'save_grade' - response = + response = success: true return response - -class PeerGradingProblem +class @PeerGradingProblem constructor: (backend) -> @prompt_wrapper = $('.prompt-wrapper') @backend = backend - + # get the location of the problem @location = $('.peer-grading').data('location') - # prevent this code from trying to run + # prevent this code from trying to run # when we don't have a location if(!@location) return @@ -208,7 +207,7 @@ class PeerGradingProblem # Set up the click event handlers @action_button.click -> history.back() - @calibration_feedback_button.click => + @calibration_feedback_button.click => @calibration_feedback_panel.hide() @grading_wrapper.show() @is_calibrated_check() @@ -266,7 +265,7 @@ class PeerGradingProblem submit_grade: () => data = @construct_data() @backend.post('save_grade', data, @submission_callback) - + ########## # @@ -301,7 +300,7 @@ class PeerGradingProblem @render_calibration_feedback(response) else if response.error @render_error(response.error) - else + else @render_error("Error saving calibration score") # called after we submit a submission score @@ -330,8 +329,8 @@ class PeerGradingProblem # show button if we have scores for all categories @show_submit_button() - - + + ########## # # Rendering methods and helpers @@ -344,7 +343,7 @@ class PeerGradingProblem # load in all the data @submission_container.html("

Training Essay

") @render_submission_data(response) - # TODO: indicate that we're in calibration mode + # TODO: indicate that we're in calibration mode @calibration_panel.addClass('current-state') @grading_panel.removeClass('current-state') @@ -428,12 +427,12 @@ class PeerGradingProblem if score == actual_score calibration_wrapper.append("

Congratulations! Your score matches the actual score!

") else - calibration_wrapper.append("

Please try to understand the grading critera better to be more accurate next time.

") + calibration_wrapper.append("

Please try to understand the grading critera better to be more accurate next time.

") # disable score selection and submission from the grading interface $("input[name='score-selection']").attr('disabled', true) @submit_button.hide() - + render_interstitial_page: () => @content_panel.hide() @interstitial_page.show() @@ -449,7 +448,7 @@ class PeerGradingProblem @submit_button.show() setup_score_selection: (max_score) => - + # first, get rid of all the old inputs, if any. @score_selection_container.html("""

Overall Score

@@ -460,7 +459,7 @@ class PeerGradingProblem for score in [0..max_score] id = 'score-' + score label = """""" - + input = """ """ # " fix broken parsing in emacs @@ -470,9 +469,7 @@ class PeerGradingProblem $("input[name='score-selection']").change @graded_callback $("input[name='grade-selection']").change @graded_callback - - -mock_backend = false -ajax_url = $('.peer-grading').data('ajax_url') -backend = new PeerGradingProblemBackend(ajax_url, mock_backend) -$(document).ready(() -> new PeerGradingProblem(backend)) +#mock_backend = false +#ajax_url = $('.peer-grading').data('ajax_url') +#backend = new PeerGradingProblemBackend(ajax_url, mock_backend) +#$(document).ready(() -> new PeerGradingProblem(backend)) diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index c5a08e0812..be09751e29 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -68,7 +68,6 @@ class PeerGradingModule(XModule): system.set('location', location) self.system = system self.peer_gs = peer_grading_service() - log.debug(self.system) self.use_for_single_location = self.metadata.get('use_for_single_location', USE_FOR_SINGLE_LOCATION) if isinstance(self.use_for_single_location, basestring): @@ -108,7 +107,7 @@ class PeerGradingModule(XModule): Needs to be implemented by child modules. Handles AJAX events. @return: """ - + log.debug(get) handlers = { 'get_next_submission': self.get_next_submission, 'show_calibration_essay': self.show_calibration_essay, @@ -123,6 +122,8 @@ class PeerGradingModule(XModule): d = handlers[dispatch](get) + log.debug(d) + return json.dumps(d, cls=ComplexEncoder) def get_progress(self): @@ -149,14 +150,12 @@ class PeerGradingModule(XModule): 'error': if success is False, will have an error message with more info. """ - _check_post(request) required = set(['location']) - success, message = _check_required(request, required) + success, message = self._check_required(get, required) if not success: return _err_response(message) - grader_id = unique_id_for_user(request.user) - p = request.POST - location = p['location'] + grader_id = self.system.anonymous_student_id + location = get['location'] try: response = self.peer_gs.get_next_submission(location, grader_id) @@ -183,20 +182,20 @@ class PeerGradingModule(XModule): success: bool indicating whether the save was a success error: if there was an error in the submission, this is the error message """ - _check_post(request) + required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]', 'submission_flagged']) - success, message = _check_required(request, required) + success, message = self._check_required(get, required) if not success: return _err_response(message) - grader_id = unique_id_for_user(request.user) - p = request.POST - location = p['location'] - submission_id = p['submission_id'] - score = p['score'] - feedback = p['feedback'] - submission_key = p['submission_key'] - rubric_scores = p.getlist('rubric_scores[]') - submission_flagged = p['submission_flagged'] + grader_id = self.system.anonymous_student_id + + location = get['location'] + submission_id = get['submission_id'] + score = get['score'] + feedback = get['feedback'] + submission_key = get['submission_key'] + rubric_scores = get['rubric_scores'] + submission_flagged = get['submission_flagged'] try: response = self.peer_gs.save_grade(location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged) @@ -227,14 +226,14 @@ class PeerGradingModule(XModule): total_calibrated_on_so_far - the number of calibration essays for this problem that this grader has graded """ - _check_post(request) + required = set(['location']) - success, message = _check_required(request, required) + success, message = self._check_required(get, required) if not success: return _err_response(message) - grader_id = unique_id_for_user(request.user) - p = request.POST - location = p['location'] + grader_id = self.system.anonymous_student_id + + location = get['location'] try: response = self.peer_gs.is_student_calibrated(location, grader_id) @@ -268,16 +267,15 @@ class PeerGradingModule(XModule): 'error': if success is False, will have an error message with more info. """ - _check_post(request) required = set(['location']) - success, message = _check_required(request, required) + success, message = self._check_required(get, required) if not success: return _err_response(message) - grader_id = unique_id_for_user(request.user) - p = request.POST - location = p['location'] + grader_id = self.system.anonymous_student_id + + location = get['location'] try: response = self.peer_gs.show_calibration_essay(location, grader_id) return HttpResponse(response, mimetype="application/json") @@ -311,20 +309,19 @@ class PeerGradingModule(XModule): actual_score: the score that the instructor gave to this calibration essay """ - _check_post(request) required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]']) - success, message = _check_required(request, required) + success, message = self._check_required(get, required) if not success: return _err_response(message) - grader_id = unique_id_for_user(request.user) - p = request.POST - location = p['location'] - calibration_essay_id = p['submission_id'] - submission_key = p['submission_key'] - score = p['score'] - feedback = p['feedback'] - rubric_scores = p.getlist('rubric_scores[]') + grader_id = self.system.anonymous_student_id + + location = get['location'] + calibration_essay_id = get['submission_id'] + submission_key = get['submission_key'] + score = get['score'] + feedback = get['feedback'] + rubric_scores = get['rubric_scores'] try: response = self.peer_gs.save_calibration_essay(location, grader_id, calibration_essay_id, diff --git a/common/lib/xmodule/xmodule/peer_grading_service.py b/common/lib/xmodule/xmodule/peer_grading_service.py index 172a981a96..a8e74dd3cc 100644 --- a/common/lib/xmodule/xmodule/peer_grading_service.py +++ b/common/lib/xmodule/xmodule/peer_grading_service.py @@ -48,6 +48,7 @@ class PeerGradingService(): 'rubric_scores': rubric_scores, 'rubric_scores_complete': True, 'submission_flagged' : submission_flagged} + log.debug(data) return self.post(self.save_grade_url, data) def is_student_calibrated(self, problem_location, grader_id): @@ -69,6 +70,7 @@ class PeerGradingService(): 'feedback': feedback, 'rubric_scores[]': rubric_scores, 'rubric_scores_complete': True} + log.debug(data) return self.post(self.save_calibration_essay_url, data) def get_problem_list(self, course_id, grader_id): diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index 99ef288e5f..1dd74d74e4 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -1,5 +1,5 @@
-
+
${error_text}

Peer Grading

Instructions

diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 9646b861c1..af7c1400cb 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -1,5 +1,5 @@
-
+
From d1c55208c1fdfc636e9be8ae900540bd3347a163 Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 20:08:55 -0500 Subject: [PATCH 040/126] Clean up response code --- .../xmodule/xmodule/peer_grading_module.py | 43 ++++++++++--------- .../xmodule/xmodule/peer_grading_service.py | 24 +++++++++-- 2 files changed, 42 insertions(+), 25 deletions(-) diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index be09751e29..cd60e2572c 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -12,7 +12,6 @@ import requests import sys from django.conf import settings -from django.http import HttpResponse, Http404 from combined_open_ended_rubric import CombinedOpenEndedRubric from lxml import etree @@ -81,8 +80,7 @@ class PeerGradingModule(XModule): """ Return a HttpResponse with a json dump with success=False, and the given error message. """ - return HttpResponse(json.dumps({'success': False, 'error': msg}), - mimetype="application/json") + return {'success': False, 'error': msg} def _check_required(self, get, required): actual = set(get.keys()) @@ -107,7 +105,7 @@ class PeerGradingModule(XModule): Needs to be implemented by child modules. Handles AJAX events. @return: """ - log.debug(get) + handlers = { 'get_next_submission': self.get_next_submission, 'show_calibration_essay': self.show_calibration_essay, @@ -123,7 +121,7 @@ class PeerGradingModule(XModule): d = handlers[dispatch](get) log.debug(d) - + return json.dumps(d, cls=ComplexEncoder) def get_progress(self): @@ -159,13 +157,12 @@ class PeerGradingModule(XModule): try: response = self.peer_gs.get_next_submission(location, grader_id) - return HttpResponse(response, - mimetype="application/json") + return response except GradingServiceError: log.exception("Error getting next submission. server url: {0} location: {1}, grader_id: {2}" .format(self.peer_gs.url, location, grader_id)) - return json.dumps({'success': False, - 'error': 'Could not connect to grading service'}) + return {'success': False, + 'error': 'Could not connect to grading service'} def save_grade(self, get): """ @@ -199,15 +196,17 @@ class PeerGradingModule(XModule): try: response = self.peer_gs.save_grade(location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged) - return HttpResponse(response, mimetype="application/json") + return response except GradingServiceError: log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2}, submission_key: {3}, score: {4}""" .format(self.peer_gs.url, location, submission_id, submission_key, score) ) - return json.dumps({'success': False, - 'error': 'Could not connect to grading service'}) + return { + 'success': False, + 'error': 'Could not connect to grading service' + } def is_student_calibrated(self, get): """ @@ -237,12 +236,14 @@ class PeerGradingModule(XModule): try: response = self.peer_gs.is_student_calibrated(location, grader_id) - return HttpResponse(response, mimetype="application/json") + return response except GradingServiceError: log.exception("Error from grading service. server url: {0}, grader_id: {0}, location: {1}" .format(self.peer_gs.url, grader_id, location)) - return json.dumps({'success': False, - 'error': 'Could not connect to grading service'}) + return { + 'success': False, + 'error': 'Could not connect to grading service' + } def show_calibration_essay(self, get): """ @@ -278,18 +279,18 @@ class PeerGradingModule(XModule): location = get['location'] try: response = self.peer_gs.show_calibration_essay(location, grader_id) - return HttpResponse(response, mimetype="application/json") + return response except GradingServiceError: log.exception("Error from grading service. server url: {0}, location: {0}" .format(self.peer_gs.url, location)) - return json.dumps({'success': False, - 'error': 'Could not connect to grading service'}) + return {'success': False, + 'error': 'Could not connect to grading service'} # if we can't parse the rubric into HTML, except etree.XMLSyntaxError: log.exception("Cannot parse rubric string. Raw string: {0}" .format(rubric)) - return json.dumps({'success': False, - 'error': 'Error displaying submission'}) + return {'success': False, + 'error': 'Error displaying submission'} def save_calibration_essay(self, get): @@ -326,7 +327,7 @@ class PeerGradingModule(XModule): try: response = self.peer_gs.save_calibration_essay(location, grader_id, calibration_essay_id, submission_key, score, feedback, rubric_scores) - return HttpResponse(response, mimetype="application/json") + return response except GradingServiceError: log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) return _err_response('Could not connect to grading service') diff --git a/common/lib/xmodule/xmodule/peer_grading_service.py b/common/lib/xmodule/xmodule/peer_grading_service.py index a8e74dd3cc..3328a2c3cc 100644 --- a/common/lib/xmodule/xmodule/peer_grading_service.py +++ b/common/lib/xmodule/xmodule/peer_grading_service.py @@ -36,7 +36,7 @@ class PeerGradingService(): def get_next_submission(self, problem_location, grader_id): response = self.get(self.get_next_submission_url, {'location': problem_location, 'grader_id': grader_id}) - return json.dumps(self._render_rubric(response)) + return self._render_rubric(response) def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged): data = {'grader_id' : grader_id, @@ -58,7 +58,7 @@ class PeerGradingService(): def show_calibration_essay(self, problem_location, grader_id): params = {'problem_id' : problem_location, 'student_id': grader_id} response = self.get(self.show_calibration_essay_url, params) - return json.dumps(self._render_rubric(response)) + return self._render_rubric(response) def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, score, feedback, rubric_scores): @@ -111,7 +111,13 @@ class PeerGradingService(): # reraise as promised GradingServiceError, but preserve stacktrace. raise GradingServiceError, str(err), sys.exc_info()[2] - return r.text + text = r.text + try: + text= json.loads(text) + except: + pass + + return text def get(self, url, params, allow_redirects=False): """ @@ -127,7 +133,13 @@ class PeerGradingService(): # reraise as promised GradingServiceError, but preserve stacktrace. raise GradingServiceError, str(err), sys.exc_info()[2] - return r.text + text = r.text + try: + text= json.loads(text) + except: + pass + + return text def _try_with_login(self, operation): @@ -163,6 +175,10 @@ class PeerGradingService(): """ try: response_json = json.loads(response) + except: + response_json = response + + try: if 'rubric' in response_json: rubric = response_json['rubric'] rubric_renderer = CombinedOpenEndedRubric(self.system, False) From 5ac6439cc015b826c6c968cc123f40e503984d5d Mon Sep 17 00:00:00 2001 From: Vik Paruchuri Date: Thu, 31 Jan 2013 20:22:35 -0500 Subject: [PATCH 041/126] Xmodule working...need to work on some issues (rubric scores not passing properly), and also fix notifications --- common/lib/xmodule/xmodule/peer_grading_module.py | 13 +++++++------ common/lib/xmodule/xmodule/peer_grading_service.py | 10 ++++------ 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index cd60e2572c..c2df24dfff 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -66,7 +66,7 @@ class PeerGradingModule(XModule): #We need to set the location here so the child modules can use it system.set('location', location) self.system = system - self.peer_gs = peer_grading_service() + self.peer_gs = peer_grading_service(self.system) self.use_for_single_location = self.metadata.get('use_for_single_location', USE_FOR_SINGLE_LOCATION) if isinstance(self.use_for_single_location, basestring): @@ -106,6 +106,7 @@ class PeerGradingModule(XModule): @return: """ + log.debug(get) handlers = { 'get_next_submission': self.get_next_submission, 'show_calibration_essay': self.show_calibration_essay, @@ -120,8 +121,6 @@ class PeerGradingModule(XModule): d = handlers[dispatch](get) - log.debug(d) - return json.dumps(d, cls=ComplexEncoder) def get_progress(self): @@ -191,8 +190,10 @@ class PeerGradingModule(XModule): score = get['score'] feedback = get['feedback'] submission_key = get['submission_key'] - rubric_scores = get['rubric_scores'] + rubric_scores = get['rubric_scores[]'] submission_flagged = get['submission_flagged'] + log.debug(get) + log.debug(rubric_scores) try: response = self.peer_gs.save_grade(location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged) @@ -322,7 +323,7 @@ class PeerGradingModule(XModule): submission_key = get['submission_key'] score = get['score'] feedback = get['feedback'] - rubric_scores = get['rubric_scores'] + rubric_scores = get['rubric_scores[]'] try: response = self.peer_gs.save_calibration_essay(location, grader_id, calibration_essay_id, @@ -343,7 +344,7 @@ class PeerGradingModule(XModule): problem_list = [] try: problem_list_json = self.peer_gs.get_problem_list(self.system.course_id, self.system.anonymous_student_id) - problem_list_dict = json.loads(problem_list_json) + problem_list_dict = problem_list_json success = problem_list_dict['success'] if 'error' in problem_list_dict: error_text = problem_list_dict['error'] diff --git a/common/lib/xmodule/xmodule/peer_grading_service.py b/common/lib/xmodule/xmodule/peer_grading_service.py index 3328a2c3cc..06fa7351cd 100644 --- a/common/lib/xmodule/xmodule/peer_grading_service.py +++ b/common/lib/xmodule/xmodule/peer_grading_service.py @@ -19,7 +19,7 @@ class PeerGradingService(): """ Interface with the grading controller for peer grading """ - def __init__(self, config): + def __init__(self, config, system): self.username = config['username'] self.password = config['password'] self.url = config['url'] @@ -32,6 +32,7 @@ class PeerGradingService(): self.save_calibration_essay_url = self.url + '/save_calibration_essay/' self.get_problem_list_url = self.url + '/get_problem_list/' self.get_notifications_url = self.url + '/get_notifications/' + self.system = system def get_next_submission(self, problem_location, grader_id): response = self.get(self.get_next_submission_url, @@ -48,7 +49,6 @@ class PeerGradingService(): 'rubric_scores': rubric_scores, 'rubric_scores_complete': True, 'submission_flagged' : submission_flagged} - log.debug(data) return self.post(self.save_grade_url, data) def is_student_calibrated(self, problem_location, grader_id): @@ -70,7 +70,6 @@ class PeerGradingService(): 'feedback': feedback, 'rubric_scores[]': rubric_scores, 'rubric_scores_complete': True} - log.debug(data) return self.post(self.save_calibration_essay_url, data) def get_problem_list(self, course_id, grader_id): @@ -123,7 +122,6 @@ class PeerGradingService(): """ Make a get request to the grading controller """ - log.debug(params) op = lambda: self.session.get(url, allow_redirects=allow_redirects, params=params) @@ -240,7 +238,7 @@ class MockPeerGradingService(object): ]}) _service = None -def peer_grading_service(): +def peer_grading_service(system): """ Return a peer grading service instance--if settings.MOCK_PEER_GRADING is True, returns a mock one, otherwise a real one. @@ -255,6 +253,6 @@ def peer_grading_service(): if settings.MOCK_PEER_GRADING: _service = MockPeerGradingService() else: - _service = PeerGradingService(settings.PEER_GRADING_INTERFACE) + _service = PeerGradingService(settings.PEER_GRADING_INTERFACE, system) return _service From 1595bd9b0eb82df1bc68b7813f642c809ab67844 Mon Sep 17 00:00:00 2001 From: Don Mitchell Date: Fri, 1 Feb 2013 17:20:58 -0500 Subject: [PATCH 042/126] Move dnd code from base.js to a js file only loaded by overview.html --- cms/djangoapps/contentstore/views.py | 1 - cms/static/js/base.js | 188 -------------------------- cms/static/js/views/overview.js | 191 +++++++++++++++++++++++++++ cms/templates/overview.html | 1 + 4 files changed, 192 insertions(+), 189 deletions(-) create mode 100644 cms/static/js/views/overview.js diff --git a/cms/djangoapps/contentstore/views.py b/cms/djangoapps/contentstore/views.py index 14f96e312a..f70164138d 100644 --- a/cms/djangoapps/contentstore/views.py +++ b/cms/djangoapps/contentstore/views.py @@ -261,7 +261,6 @@ def edit_unit(request, location): break lms_link = get_lms_link_for_item(item.location) - preview_lms_link = get_lms_link_for_item(item.location, preview=True) component_templates = defaultdict(list) diff --git a/cms/static/js/base.js b/cms/static/js/base.js index 41c1ee3cdb..7e55d2b8d8 100644 --- a/cms/static/js/base.js +++ b/cms/static/js/base.js @@ -80,64 +80,6 @@ $(document).ready(function() { $('.import .file-input').click(); }); - // making the unit list draggable. Note: sortable didn't work b/c it considered - // drop points which the user hovered over as destinations and proactively changed - // the dom; so, if the user subsequently dropped at an illegal spot, the reversion - // point was the last dom change. - $('.unit').draggable({ - axis: 'y', - handle: '.drag-handle', - zIndex: 999, - start: initiateHesitate, - drag: checkHoverState, - stop: removeHesitate, - revert: "invalid" - }); - - // Subsection reordering - $('.id-holder').draggable({ - axis: 'y', - handle: '.section-item .drag-handle', - zIndex: 999, - start: initiateHesitate, - drag: checkHoverState, - stop: removeHesitate, - revert: "invalid" - }); - - // Section reordering - $('.courseware-section').draggable({ - axis: 'y', - handle: 'header .drag-handle', - stack: '.courseware-section', - revert: "invalid" - }); - - - $('.sortable-unit-list').droppable({ - accept : '.unit', - greedy: true, - tolerance: "pointer", - hoverClass: "dropover", - drop: onUnitReordered - }); - $('.subsection-list > ol').droppable({ - // why don't we have a more useful class for subsections than id-holder? - accept : '.id-holder', // '.unit, .id-holder', - tolerance: "pointer", - hoverClass: "dropover", - drop: onSubsectionReordered, - greedy: true - }); - - // Section reordering - $('.courseware-overview').droppable({ - accept : '.courseware-section', - tolerance: "pointer", - drop: onSectionReordered, - greedy: true - }); - $('.new-course-button').bind('click', addNewCourse); // section name editing @@ -279,136 +221,6 @@ function removePolicyMetadata(e) { saveSubsection() } -CMS.HesitateEvent.toggleXpandHesitation = null; -function initiateHesitate(event, ui) { - CMS.HesitateEvent.toggleXpandHesitation = new CMS.HesitateEvent(expandSection, 'dragLeave', true); - $('.collapsed').on('dragEnter', CMS.HesitateEvent.toggleXpandHesitation, CMS.HesitateEvent.toggleXpandHesitation.trigger); - $('.collapsed').each(function() { - this.proportions = {width : this.offsetWidth, height : this.offsetHeight }; - // reset b/c these were holding values from aborts - this.isover = false; - }); -} -function checkHoverState(event, ui) { - // copied from jquery.ui.droppable.js $.ui.ddmanager.drag & other ui.intersect - var draggable = $(this).data("ui-draggable"), - x1 = (draggable.positionAbs || draggable.position.absolute).left + (draggable.helperProportions.width / 2), - y1 = (draggable.positionAbs || draggable.position.absolute).top + (draggable.helperProportions.height / 2); - $('.collapsed').each(function() { - // don't expand the thing being carried - if (ui.helper.is(this)) { - return; - } - - $.extend(this, {offset : $(this).offset()}); - - var droppable = this, - l = droppable.offset.left, - r = l + droppable.proportions.width, - t = droppable.offset.top, - b = t + droppable.proportions.height; - - if (l === r) { - // probably wrong values b/c invisible at the time of caching - droppable.proportions = { width : droppable.offsetWidth, height : droppable.offsetHeight }; - r = l + droppable.proportions.width; - b = t + droppable.proportions.height; - } - // equivalent to the intersects test - var intersects = (l < x1 && // Right Half - x1 < r && // Left Half - t < y1 && // Bottom Half - y1 < b ), // Top Half - - c = !intersects && this.isover ? "isout" : (intersects && !this.isover ? "isover" : null); - - if(!c) { - return; - } - - this[c] = true; - this[c === "isout" ? "isover" : "isout"] = false; - $(this).trigger(c === "isover" ? "dragEnter" : "dragLeave"); - }); -} -function removeHesitate(event, ui) { - $('.collapsed').off('dragEnter', CMS.HesitateEvent.toggleXpandHesitation.trigger); - CMS.HesitateEvent.toggleXpandHesitation = null; -} - -function expandSection(event) { - $(event.delegateTarget).removeClass('collapsed', 400); - // don't descend to icon's on children (which aren't under first child) only to this element's icon - $(event.delegateTarget).children().first().find('.expand-collapse-icon').removeClass('expand', 400).addClass('collapse'); -} - -function onUnitReordered(event, ui) { - // a unit's been dropped on this subsection, - // figure out where it came from and where it slots in. - _handleReorder(event, ui, 'subsection-id', 'li:.leaf'); -} - -function onSubsectionReordered(event, ui) { - // a subsection has been dropped on this section, - // figure out where it came from and where it slots in. - _handleReorder(event, ui, 'section-id', 'li:.branch'); -} - -function onSectionReordered(event, ui) { - // a section moved w/in the overall (cannot change course via this, so no parentage change possible, just order) - _handleReorder(event, ui, 'course-id', '.courseware-section'); -} - -function _handleReorder(event, ui, parentIdField, childrenSelector) { - // figure out where it came from and where it slots in. - var subsection_id = $(event.target).data(parentIdField); - var _els = $(event.target).children(childrenSelector); - var children = _els.map(function(idx, el) { return $(el).data('id'); }).get(); - // if new to this parent, figure out which parent to remove it from and do so - if (!_.contains(children, ui.draggable.data('id'))) { - var old_parent = ui.draggable.parent(); - var old_children = old_parent.children(childrenSelector).map(function(idx, el) { return $(el).data('id'); }).get(); - old_children = _.without(old_children, ui.draggable.data('id')); - $.ajax({ - url: "/save_item", - type: "POST", - dataType: "json", - contentType: "application/json", - data:JSON.stringify({ 'id' : old_parent.data(parentIdField), 'children' : old_children}) - }); - } - else { - // staying in same parent - // remove so that the replacement in the right place doesn't double it - children = _.without(children, ui.draggable.data('id')); - } - // add to this parent (figure out where) - for (var i = 0; i < _els.length; i++) { - if (!ui.draggable.is(_els[i]) && ui.offset.top < $(_els[i]).offset().top) { - // insert at i in children and _els - ui.draggable.insertBefore($(_els[i])); - // TODO figure out correct way to have it remove the style: top:n; setting (and similar line below) - ui.draggable.attr("style", "position:relative;"); - children.splice(i, 0, ui.draggable.data('id')); - break; - } - } - // see if it goes at end (the above loop didn't insert it) - if (!_.contains(children, ui.draggable.data('id'))) { - $(event.target).append(ui.draggable); - ui.draggable.attr("style", "position:relative;"); // STYLE hack too - children.push(ui.draggable.data('id')); - } - $.ajax({ - url: "/save_item", - type: "POST", - dataType: "json", - contentType: "application/json", - data:JSON.stringify({ 'id' : subsection_id, 'children' : children}) - }); - -} - function getEdxTimeFromDateTimeVals(date_val, time_val, format) { var edxTimeStr = null; diff --git a/cms/static/js/views/overview.js b/cms/static/js/views/overview.js new file mode 100644 index 0000000000..c007ef3efc --- /dev/null +++ b/cms/static/js/views/overview.js @@ -0,0 +1,191 @@ +$(document).ready(function() { + // making the unit list draggable. Note: sortable didn't work b/c it considered + // drop points which the user hovered over as destinations and proactively changed + // the dom; so, if the user subsequently dropped at an illegal spot, the reversion + // point was the last dom change. + $('.unit').draggable({ + axis: 'y', + handle: '.drag-handle', + zIndex: 999, + start: initiateHesitate, + drag: checkHoverState, + stop: removeHesitate, + revert: "invalid" + }); + + // Subsection reordering + $('.id-holder').draggable({ + axis: 'y', + handle: '.section-item .drag-handle', + zIndex: 999, + start: initiateHesitate, + drag: checkHoverState, + stop: removeHesitate, + revert: "invalid" + }); + + // Section reordering + $('.courseware-section').draggable({ + axis: 'y', + handle: 'header .drag-handle', + stack: '.courseware-section', + revert: "invalid" + }); + + + $('.sortable-unit-list').droppable({ + accept : '.unit', + greedy: true, + tolerance: "pointer", + hoverClass: "dropover", + drop: onUnitReordered + }); + $('.subsection-list > ol').droppable({ + // why don't we have a more useful class for subsections than id-holder? + accept : '.id-holder', // '.unit, .id-holder', + tolerance: "pointer", + hoverClass: "dropover", + drop: onSubsectionReordered, + greedy: true + }); + + // Section reordering + $('.courseware-overview').droppable({ + accept : '.courseware-section', + tolerance: "pointer", + drop: onSectionReordered, + greedy: true + }); + +}); + + +CMS.HesitateEvent.toggleXpandHesitation = null; +function initiateHesitate(event, ui) { + CMS.HesitateEvent.toggleXpandHesitation = new CMS.HesitateEvent(expandSection, 'dragLeave', true); + $('.collapsed').on('dragEnter', CMS.HesitateEvent.toggleXpandHesitation, CMS.HesitateEvent.toggleXpandHesitation.trigger); + $('.collapsed').each(function() { + this.proportions = {width : this.offsetWidth, height : this.offsetHeight }; + // reset b/c these were holding values from aborts + this.isover = false; + }); +} +function checkHoverState(event, ui) { + // copied from jquery.ui.droppable.js $.ui.ddmanager.drag & other ui.intersect + var draggable = $(this).data("ui-draggable"), + x1 = (draggable.positionAbs || draggable.position.absolute).left + (draggable.helperProportions.width / 2), + y1 = (draggable.positionAbs || draggable.position.absolute).top + (draggable.helperProportions.height / 2); + $('.collapsed').each(function() { + // don't expand the thing being carried + if (ui.helper.is(this)) { + return; + } + + $.extend(this, {offset : $(this).offset()}); + + var droppable = this, + l = droppable.offset.left, + r = l + droppable.proportions.width, + t = droppable.offset.top, + b = t + droppable.proportions.height; + + if (l === r) { + // probably wrong values b/c invisible at the time of caching + droppable.proportions = { width : droppable.offsetWidth, height : droppable.offsetHeight }; + r = l + droppable.proportions.width; + b = t + droppable.proportions.height; + } + // equivalent to the intersects test + var intersects = (l < x1 && // Right Half + x1 < r && // Left Half + t < y1 && // Bottom Half + y1 < b ), // Top Half + + c = !intersects && this.isover ? "isout" : (intersects && !this.isover ? "isover" : null); + + if(!c) { + return; + } + + this[c] = true; + this[c === "isout" ? "isover" : "isout"] = false; + $(this).trigger(c === "isover" ? "dragEnter" : "dragLeave"); + }); +} +function removeHesitate(event, ui) { + $('.collapsed').off('dragEnter', CMS.HesitateEvent.toggleXpandHesitation.trigger); + CMS.HesitateEvent.toggleXpandHesitation = null; +} + +function expandSection(event) { + $(event.delegateTarget).removeClass('collapsed', 400); + // don't descend to icon's on children (which aren't under first child) only to this element's icon + $(event.delegateTarget).children().first().find('.expand-collapse-icon').removeClass('expand', 400).addClass('collapse'); +} + +function onUnitReordered(event, ui) { + // a unit's been dropped on this subsection, + // figure out where it came from and where it slots in. + _handleReorder(event, ui, 'subsection-id', 'li:.leaf'); +} + +function onSubsectionReordered(event, ui) { + // a subsection has been dropped on this section, + // figure out where it came from and where it slots in. + _handleReorder(event, ui, 'section-id', 'li:.branch'); +} + +function onSectionReordered(event, ui) { + // a section moved w/in the overall (cannot change course via this, so no parentage change possible, just order) + _handleReorder(event, ui, 'course-id', '.courseware-section'); +} + +function _handleReorder(event, ui, parentIdField, childrenSelector) { + // figure out where it came from and where it slots in. + var subsection_id = $(event.target).data(parentIdField); + var _els = $(event.target).children(childrenSelector); + var children = _els.map(function(idx, el) { return $(el).data('id'); }).get(); + // if new to this parent, figure out which parent to remove it from and do so + if (!_.contains(children, ui.draggable.data('id'))) { + var old_parent = ui.draggable.parent(); + var old_children = old_parent.children(childrenSelector).map(function(idx, el) { return $(el).data('id'); }).get(); + old_children = _.without(old_children, ui.draggable.data('id')); + $.ajax({ + url: "/save_item", + type: "POST", + dataType: "json", + contentType: "application/json", + data:JSON.stringify({ 'id' : old_parent.data(parentIdField), 'children' : old_children}) + }); + } + else { + // staying in same parent + // remove so that the replacement in the right place doesn't double it + children = _.without(children, ui.draggable.data('id')); + } + // add to this parent (figure out where) + for (var i = 0; i < _els.length; i++) { + if (!ui.draggable.is(_els[i]) && ui.offset.top < $(_els[i]).offset().top) { + // insert at i in children and _els + ui.draggable.insertBefore($(_els[i])); + // TODO figure out correct way to have it remove the style: top:n; setting (and similar line below) + ui.draggable.attr("style", "position:relative;"); + children.splice(i, 0, ui.draggable.data('id')); + break; + } + } + // see if it goes at end (the above loop didn't insert it) + if (!_.contains(children, ui.draggable.data('id'))) { + $(event.target).append(ui.draggable); + ui.draggable.attr("style", "position:relative;"); // STYLE hack too + children.push(ui.draggable.data('id')); + } + $.ajax({ + url: "/save_item", + type: "POST", + dataType: "json", + contentType: "application/json", + data:JSON.stringify({ 'id' : subsection_id, 'children' : children}) + }); + +} diff --git a/cms/templates/overview.html b/cms/templates/overview.html index a20531200e..20ddcead01 100644 --- a/cms/templates/overview.html +++ b/cms/templates/overview.html @@ -18,6 +18,7 @@ +