diff --git a/cms/.coveragerc b/cms/.coveragerc index b7ae181e99..4f0dbebe79 100644 --- a/cms/.coveragerc +++ b/cms/.coveragerc @@ -2,7 +2,7 @@ [run] data_file = reports/cms/.coverage source = cms,common/djangoapps -omit = cms/envs/*, cms/manage.py +omit = cms/envs/*, cms/manage.py, common/djangoapps/terrain/*, common/djangoapps/*/migrations/* [report] ignore_errors = True diff --git a/cms/djangoapps/contentstore/features/common.py b/cms/djangoapps/contentstore/features/common.py index f868b598a8..925bb101f3 100644 --- a/cms/djangoapps/contentstore/features/common.py +++ b/cms/djangoapps/contentstore/features/common.py @@ -20,7 +20,8 @@ def i_visit_the_studio_homepage(step): # LETTUCE_SERVER_PORT = 8001 # in your settings.py file. world.browser.visit(django_url('/')) - assert world.browser.is_element_present_by_css('body.no-header', 10) + signin_css = 'a.action-signin' + assert world.browser.is_element_present_by_css(signin_css, 10) @step('I am logged into Studio$') @@ -113,7 +114,11 @@ def log_into_studio( create_studio_user(uname=uname, email=email, is_staff=is_staff) world.browser.cookies.delete() world.browser.visit(django_url('/')) - world.browser.is_element_present_by_css('body.no-header', 10) + signin_css = 'a.action-signin' + world.browser.is_element_present_by_css(signin_css, 10) + + # click the signin button + css_click(signin_css) login_form = world.browser.find_by_css('form#login_form') login_form.find_by_name('email').fill(email) @@ -127,16 +132,19 @@ def create_a_course(): css_click('a.new-course-button') fill_in_course_info() css_click('input.new-course-save') - assert_true(world.browser.is_element_present_by_css('a#courseware-tab', 5)) + course_title_css = 'span.course-title' + assert_true(world.browser.is_element_present_by_css(course_title_css, 5)) def add_section(name='My Section'): link_css = 'a.new-courseware-section-button' css_click(link_css) - name_css = '.new-section-name' - save_css = '.new-section-name-save' + name_css = 'input.new-section-name' + save_css = 'input.new-section-name-save' css_fill(name_css, name) css_click(save_css) + span_css = 'span.section-name-span' + assert_true(world.browser.is_element_present_by_css(span_css, 5)) def add_subsection(name='Subsection One'): diff --git a/cms/djangoapps/contentstore/features/courses.py b/cms/djangoapps/contentstore/features/courses.py index d2d038a928..e394165f08 100644 --- a/cms/djangoapps/contentstore/features/courses.py +++ b/cms/djangoapps/contentstore/features/courses.py @@ -34,8 +34,8 @@ def i_click_the_course_link_in_my_courses(step): @step('the Courseware page has loaded in Studio$') def courseware_page_has_loaded_in_studio(step): - courseware_css = 'a#courseware-tab' - assert world.browser.is_element_present_by_css(courseware_css) + course_title_css = 'span.course-title' + assert world.browser.is_element_present_by_css(course_title_css) @step('I see the course listed in My Courses$') @@ -59,4 +59,4 @@ def i_am_on_tab(step, tab_name): @step('I see a link for adding a new section$') def i_see_new_section_link(step): link_css = 'a.new-courseware-section-button' - assert_css_with_text(link_css, 'New Section') + assert_css_with_text(link_css, '+ New Section') diff --git a/cms/djangoapps/contentstore/features/signup.feature b/cms/djangoapps/contentstore/features/signup.feature index 8a6f93d33b..03a1c9524a 100644 --- a/cms/djangoapps/contentstore/features/signup.feature +++ b/cms/djangoapps/contentstore/features/signup.feature @@ -5,8 +5,8 @@ Feature: Sign in Scenario: Sign up from the homepage Given I visit the Studio homepage - When I click the link with the text "Sign up" + When I click the link with the text "Sign Up" And I fill in the registration form - And I press the "Create My Account" button on the registration form + And I press the Create My Account button on the registration form Then I should see be on the studio home page - And I should see the message "please click on the activation link in your email." \ No newline at end of file + And I should see the message "please click on the activation link in your email." diff --git a/cms/djangoapps/contentstore/features/signup.py b/cms/djangoapps/contentstore/features/signup.py index e105b674f7..a786225ead 100644 --- a/cms/djangoapps/contentstore/features/signup.py +++ b/cms/djangoapps/contentstore/features/signup.py @@ -11,10 +11,11 @@ def i_fill_in_the_registration_form(step): register_form.find_by_name('terms_of_service').check() -@step('I press the "([^"]*)" button on the registration form$') -def i_press_the_button_on_the_registration_form(step, button): +@step('I press the Create My Account button on the registration form$') +def i_press_the_button_on_the_registration_form(step): register_form = world.browser.find_by_css('form#register_form') - register_form.find_by_value(button).click() + submit_css = 'button#submit' + register_form.find_by_css(submit_css).click() @step('I should see be on the studio home page$') diff --git a/cms/djangoapps/contentstore/tests/test_contentstore.py b/cms/djangoapps/contentstore/tests/test_contentstore.py index e3680018e3..944d4879c9 100644 --- a/cms/djangoapps/contentstore/tests/test_contentstore.py +++ b/cms/djangoapps/contentstore/tests/test_contentstore.py @@ -5,7 +5,7 @@ from django.test.utils import override_settings from django.conf import settings from django.core.urlresolvers import reverse from path import path -from tempfile import mkdtemp +from tempdir import mkdtemp_clean import json from fs.osfs import OSFS import copy @@ -194,7 +194,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase): import_from_xml(ms, 'common/test/data/', ['full']) location = CourseDescriptor.id_to_location('edX/full/6.002_Spring_2012') - root_dir = path(mkdtemp()) + root_dir = path(mkdtemp_clean()) print 'Exporting to tempdir = {0}'.format(root_dir) @@ -214,12 +214,21 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase): fs = OSFS(root_dir / 'test_export/policies/6.002_Spring_2012') self.assertTrue(fs.exists('grading_policy.json')) + course = ms.get_item(location) # compare what's on disk compared to what we have in our course with fs.open('grading_policy.json','r') as grading_policy: - on_disk = loads(grading_policy.read()) - course = ms.get_item(location) + on_disk = loads(grading_policy.read()) self.assertEqual(on_disk, course.definition['data']['grading_policy']) + #check for policy.json + self.assertTrue(fs.exists('policy.json')) + + # compare what's on disk to what we have in the course module + with fs.open('policy.json','r') as course_policy: + on_disk = loads(course_policy.read()) + self.assertIn('course/6.002_Spring_2012', on_disk) + self.assertEqual(on_disk['course/6.002_Spring_2012'], course.metadata) + # remove old course delete_course(ms, cs, location) @@ -255,6 +264,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase): self.assertContains(resp, '/c4x/edX/full/asset/handouts_schematic_tutorial.pdf') + class ContentStoreTest(ModuleStoreTestCase): """ Tests for the CMS ContentStore application. @@ -333,7 +343,7 @@ class ContentStoreTest(ModuleStoreTestCase): # Create a course so there is something to view resp = self.client.get(reverse('index')) self.assertContains(resp, - '

My Courses

', + '

My Courses

', status_code=200, html=True) @@ -369,7 +379,7 @@ class ContentStoreTest(ModuleStoreTestCase): resp = self.client.get(reverse('course_index', kwargs=data)) self.assertContains(resp, - 'Robot Super Course', + '
', status_code=200, html=True) @@ -392,11 +402,11 @@ class ContentStoreTest(ModuleStoreTestCase): def test_capa_module(self): """Test that a problem treats markdown specially.""" - CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course') + course = CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course') problem_data = { 'parent_location': 'i4x://MITx/999/course/Robot_Super_Course', - 'template': 'i4x://edx/templates/problem/Empty' + 'template': 'i4x://edx/templates/problem/Blank_Common_Problem' } resp = self.client.post(reverse('clone_item'), problem_data) @@ -413,6 +423,52 @@ class ContentStoreTest(ModuleStoreTestCase): self.assertNotIn('markdown', problem.editable_metadata_fields, "Markdown slipped into the editable metadata fields") + def test_metadata_inheritance(self): + import_from_xml(modulestore(), 'common/test/data/', ['full']) + + ms = modulestore('direct') + course = ms.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None])) + + verticals = ms.get_items(['i4x', 'edX', 'full', 'vertical', None, None]) + + # let's assert on the metadata_inheritance on an existing vertical + for vertical in verticals: + self.assertIn('xqa_key', vertical.metadata) + self.assertEqual(course.metadata['xqa_key'], vertical.metadata['xqa_key']) + + self.assertGreater(len(verticals), 0) + + new_component_location = Location('i4x', 'edX', 'full', 'html', 'new_component') + source_template_location = Location('i4x', 'edx', 'templates', 'html', 'Blank_HTML_Page') + + # crate a new module and add it as a child to a vertical + ms.clone_item(source_template_location, new_component_location) + parent = verticals[0] + ms.update_children(parent.location, parent.definition.get('children', []) + [new_component_location.url()]) + + # flush the cache + ms.get_cached_metadata_inheritance_tree(new_component_location, -1) + new_module = ms.get_item(new_component_location) + + # check for grace period definition which should be defined at the course level + self.assertIn('graceperiod', new_module.metadata) + + self.assertEqual(course.metadata['graceperiod'], new_module.metadata['graceperiod']) + + # + # now let's define an override at the leaf node level + # + new_module.metadata['graceperiod'] = '1 day' + ms.update_metadata(new_module.location, new_module.metadata) + + # flush the cache and refetch + ms.get_cached_metadata_inheritance_tree(new_component_location, -1) + new_module = ms.get_item(new_component_location) + + self.assertIn('graceperiod', new_module.metadata) + self.assertEqual('1 day', new_module.metadata['graceperiod']) + + class TemplateTestCase(ModuleStoreTestCase): def test_template_cleanup(self): @@ -420,7 +476,7 @@ class TemplateTestCase(ModuleStoreTestCase): # insert a bogus template in the store bogus_template_location = Location('i4x', 'edx', 'templates', 'html', 'bogus') - source_template_location = Location('i4x', 'edx', 'templates', 'html', 'Empty') + source_template_location = Location('i4x', 'edx', 'templates', 'html', 'Blank_HTML_Page') ms.clone_item(source_template_location, bogus_template_location) diff --git a/cms/djangoapps/contentstore/tests/test_course_settings.py b/cms/djangoapps/contentstore/tests/test_course_settings.py index 925b2431b9..86503d2136 100644 --- a/cms/djangoapps/contentstore/tests/test_course_settings.py +++ b/cms/djangoapps/contentstore/tests/test_course_settings.py @@ -143,10 +143,6 @@ class CourseDetailsViewTest(CourseTestCase): def test_update_and_fetch(self): details = CourseDetails.fetch(self.course_location) - resp = self.client.get(reverse('course_settings', kwargs={'org': self.course_location.org, 'course': self.course_location.course, - 'name': self.course_location.name})) - self.assertContains(resp, '
  • Course Details
  • ', status_code=200, html=True) - # resp s/b json from here on url = reverse('course_settings', kwargs={'org': self.course_location.org, 'course': self.course_location.course, 'name': self.course_location.name, 'section': 'details'}) diff --git a/cms/djangoapps/contentstore/tests/tests.py b/cms/djangoapps/contentstore/tests/tests.py index 166982e35f..c4a46459e2 100644 --- a/cms/djangoapps/contentstore/tests/tests.py +++ b/cms/djangoapps/contentstore/tests/tests.py @@ -4,7 +4,6 @@ from django.test.client import Client from django.conf import settings from django.core.urlresolvers import reverse from path import path -from tempfile import mkdtemp import json from fs.osfs import OSFS import copy diff --git a/cms/djangoapps/contentstore/views.py b/cms/djangoapps/contentstore/views.py index 87a2943773..6d5905afe7 100644 --- a/cms/djangoapps/contentstore/views.py +++ b/cms/djangoapps/contentstore/views.py @@ -59,6 +59,7 @@ from cms.djangoapps.models.settings.course_details import CourseDetails,\ from cms.djangoapps.models.settings.course_grading import CourseGradingModel from cms.djangoapps.contentstore.utils import get_modulestore from lxml import etree +from django.shortcuts import redirect # to install PIL on MacOSX: 'easy_install http://dist.repoze.org/PIL-1.1.6.tar.gz' @@ -81,6 +82,11 @@ def signup(request): csrf_token = csrf(request)['csrf_token'] return render_to_response('signup.html', {'csrf': csrf_token}) +def old_login_redirect(request): + ''' + Redirect to the active login url. + ''' + return redirect('login', permanent=True) @ssl_login_shortcut @ensure_csrf_cookie @@ -94,6 +100,11 @@ def login_page(request): 'forgot_password_link': "//{base}/#forgot-password-modal".format(base=settings.LMS_BASE), }) +def howitworks(request): + if request.user.is_authenticated(): + return index(request) + else: + return render_to_response('howitworks.html', {}) # ==== Views for any logged-in user ================================== @@ -120,7 +131,8 @@ def index(request): reverse('course_index', args=[ course.location.org, course.location.course, - course.location.name])) + course.location.name]), + get_lms_link_for_item(course.location)) for course in courses], 'user': request.user, 'disable_course_creation': settings.MITX_FEATURES.get('DISABLE_COURSE_CREATION', False) and not request.user.is_staff @@ -161,6 +173,8 @@ def course_index(request, org, course, name): if not has_access(request.user, location): raise PermissionDenied() + lms_link = get_lms_link_for_item(location) + upload_asset_callback_url = reverse('upload_asset', kwargs={ 'org': org, 'course': course, @@ -173,6 +187,7 @@ def course_index(request, org, course, name): return render_to_response('overview.html', { 'active_tab': 'courseware', 'context_course': course, + 'lms_link': lms_link, 'sections': sections, 'course_graders': json.dumps(CourseGradingModel.fetch(course.location).graders), 'parent_location': course.location, @@ -273,7 +288,7 @@ def edit_unit(request, location): template.display_name, template.location.url(), 'markdown' in template.metadata, - template.location.name == 'Empty' + 'empty' in template.metadata )) components = [ @@ -730,8 +745,6 @@ def clone_item(request): #@login_required #@ensure_csrf_cookie - - def upload_asset(request, org, course, coursename): ''' cdodge: this method allows for POST uploading of files into the course asset library, which will @@ -796,8 +809,6 @@ def upload_asset(request, org, course, coursename): ''' This view will return all CMS users who are editors for the specified course ''' - - @login_required @ensure_csrf_cookie def manage_users(request, location): @@ -819,7 +830,7 @@ def manage_users(request, location): }) -def create_json_response(errmsg=None): +def create_json_response(errmsg = None): if errmsg is not None: resp = HttpResponse(json.dumps({'Status': 'Failed', 'ErrMsg': errmsg})) else: @@ -831,8 +842,6 @@ def create_json_response(errmsg=None): This POST-back view will add a user - specified by email - to the list of editors for the specified course ''' - - @expect_json @login_required @ensure_csrf_cookie @@ -865,8 +874,6 @@ def add_user(request, location): This POST-back view will remove a user - specified by email - from the list of editors for the specified course ''' - - @expect_json @login_required @ensure_csrf_cookie @@ -1124,8 +1131,31 @@ def get_course_settings(request, org, course, name): course_details = CourseDetails.fetch(location) return render_to_response('settings.html', { - 'active_tab': 'settings', 'context_course': course_module, + 'course_location' : location, + 'course_details' : json.dumps(course_details, cls=CourseSettingsEncoder) + }) + +@login_required +@ensure_csrf_cookie +def course_config_graders_page(request, org, course, name): + """ + Send models and views as well as html for editing the course settings to the client. + + org, course, name: Attributes of the Location for the item to edit + """ + location = ['i4x', org, course, 'course', name] + + # check that logged in user has permissions to this item + if not has_access(request.user, location): + raise PermissionDenied() + + course_module = modulestore().get_item(location) + course_details = CourseGradingModel.fetch(location) + + return render_to_response('settings_graders.html', { + 'context_course': course_module, + 'course_location' : location, 'course_details': json.dumps(course_details, cls=CourseSettingsEncoder) }) diff --git a/cms/envs/common.py b/cms/envs/common.py index 30aac6ea01..50f237c374 100644 --- a/cms/envs/common.py +++ b/cms/envs/common.py @@ -20,7 +20,6 @@ Longer TODO: """ import sys -import tempfile import os.path import os import lms.envs.common @@ -59,7 +58,8 @@ sys.path.append(COMMON_ROOT / 'lib') ############################# WEB CONFIGURATION ############################# # This is where we stick our compiled template files. -MAKO_MODULE_DIR = tempfile.mkdtemp('mako') +from tempdir import mkdtemp_clean +MAKO_MODULE_DIR = mkdtemp_clean('mako') MAKO_TEMPLATES = {} MAKO_TEMPLATES['main'] = [ PROJECT_ROOT / 'templates', @@ -74,8 +74,8 @@ TEMPLATE_DIRS = MAKO_TEMPLATES['main'] MITX_ROOT_URL = '' -LOGIN_REDIRECT_URL = MITX_ROOT_URL + '/login' -LOGIN_URL = MITX_ROOT_URL + '/login' +LOGIN_REDIRECT_URL = MITX_ROOT_URL + '/signin' +LOGIN_URL = MITX_ROOT_URL + '/signin' TEMPLATE_CONTEXT_PROCESSORS = ( diff --git a/cms/static/client_templates/course_grade_policy.html b/cms/static/client_templates/course_grade_policy.html index c9a21280dd..db129614f6 100644 --- a/cms/static/client_templates/course_grade_policy.html +++ b/cms/static/client_templates/course_grade_policy.html @@ -1,69 +1,37 @@ -
  • -
    - +
  • +
    + + + e.g. Homework, Midterm Exams +
    -
    -
    - - e.g. Homework, Labs, Midterm Exams, Final Exam -
    -
    - - -
    - - -
    -
    - - e.g. HW, Midterm, Final -
    -
    -
    - -
    - - -
    -
    - - e.g. 25% -
    -
    -
    - -
    - - -
    -
    - - total exercises assigned -
    -
    -
    - -
    - - -
    -
    - - total exercises that won't be graded -
    -
    -
    - Delete +
    + + + e.g. HW, Midterm +
    + +
    + + + e.g. 25% +
    + +
    + + + total exercises assigned +
    + +
    + + + total exercises that won't be graded +
    + +
    + Delete +
  • diff --git a/cms/static/coffee/src/views/tabs.coffee b/cms/static/coffee/src/views/tabs.coffee index 5a826c1794..9fbe4e5789 100644 --- a/cms/static/coffee/src/views/tabs.coffee +++ b/cms/static/coffee/src/views/tabs.coffee @@ -1,6 +1,4 @@ class CMS.Views.TabsEdit extends Backbone.View - events: - 'click .new-tab': 'addNewTab' initialize: => @$('.component').each((idx, element) => @@ -13,6 +11,7 @@ class CMS.Views.TabsEdit extends Backbone.View ) ) + @options.mast.find('.new-tab').on('click', @addNewTab) @$('.components').sortable( handle: '.drag-handle' update: @tabMoved diff --git a/cms/static/img/hiw-feature1.png b/cms/static/img/hiw-feature1.png new file mode 100644 index 0000000000..3cfd48d066 Binary files /dev/null and b/cms/static/img/hiw-feature1.png differ diff --git a/cms/static/img/hiw-feature2.png b/cms/static/img/hiw-feature2.png new file mode 100644 index 0000000000..9442325dd5 Binary files /dev/null and b/cms/static/img/hiw-feature2.png differ diff --git a/cms/static/img/hiw-feature3.png b/cms/static/img/hiw-feature3.png new file mode 100644 index 0000000000..fa6b81ae89 Binary files /dev/null and b/cms/static/img/hiw-feature3.png differ diff --git a/cms/static/img/html-icon.png b/cms/static/img/html-icon.png index e739f2fc11..8f576178b2 100644 Binary files a/cms/static/img/html-icon.png and b/cms/static/img/html-icon.png differ diff --git a/cms/static/img/large-discussion-icon.png b/cms/static/img/large-discussion-icon.png index 2f0bfea98f..cebf332769 100644 Binary files a/cms/static/img/large-discussion-icon.png and b/cms/static/img/large-discussion-icon.png differ diff --git a/cms/static/img/large-freeform-icon.png b/cms/static/img/large-freeform-icon.png index b1d195a7ca..0d5e454f58 100644 Binary files a/cms/static/img/large-freeform-icon.png and b/cms/static/img/large-freeform-icon.png differ diff --git a/cms/static/img/large-problem-icon.png b/cms/static/img/large-problem-icon.png index b962d42b14..a30ab8eac8 100644 Binary files a/cms/static/img/large-problem-icon.png and b/cms/static/img/large-problem-icon.png differ diff --git a/cms/static/img/large-video-icon.png b/cms/static/img/large-video-icon.png index 392851324c..f1ab048b4c 100644 Binary files a/cms/static/img/large-video-icon.png and b/cms/static/img/large-video-icon.png differ diff --git a/cms/static/img/logo-edx-studio-white.png b/cms/static/img/logo-edx-studio-white.png new file mode 100644 index 0000000000..3e3ee63622 Binary files /dev/null and b/cms/static/img/logo-edx-studio-white.png differ diff --git a/cms/static/img/logo-edx-studio.png b/cms/static/img/logo-edx-studio.png new file mode 100644 index 0000000000..006194a195 Binary files /dev/null and b/cms/static/img/logo-edx-studio.png differ diff --git a/cms/static/img/pl-1x1-000.png b/cms/static/img/pl-1x1-000.png new file mode 100644 index 0000000000..b94b7a9746 Binary files /dev/null and b/cms/static/img/pl-1x1-000.png differ diff --git a/cms/static/img/pl-1x1-fff.png b/cms/static/img/pl-1x1-fff.png new file mode 100644 index 0000000000..7081c75d36 Binary files /dev/null and b/cms/static/img/pl-1x1-fff.png differ diff --git a/cms/static/img/preview-lms-staticpages.png b/cms/static/img/preview-lms-staticpages.png new file mode 100644 index 0000000000..05a62f7c7f Binary files /dev/null and b/cms/static/img/preview-lms-staticpages.png differ diff --git a/cms/static/img/thumb-hiw-feature1.png b/cms/static/img/thumb-hiw-feature1.png new file mode 100644 index 0000000000..b2dc0c00ee Binary files /dev/null and b/cms/static/img/thumb-hiw-feature1.png differ diff --git a/cms/static/img/thumb-hiw-feature2.png b/cms/static/img/thumb-hiw-feature2.png new file mode 100644 index 0000000000..e96bcad1aa Binary files /dev/null and b/cms/static/img/thumb-hiw-feature2.png differ diff --git a/cms/static/img/thumb-hiw-feature3.png b/cms/static/img/thumb-hiw-feature3.png new file mode 100644 index 0000000000..f694fca516 Binary files /dev/null and b/cms/static/img/thumb-hiw-feature3.png differ diff --git a/cms/static/js/base.js b/cms/static/js/base.js index 7e55d2b8d8..d8b32cb0e8 100644 --- a/cms/static/js/base.js +++ b/cms/static/js/base.js @@ -5,7 +5,7 @@ var $newComponentItem; var $changedInput; var $spinner; -$(document).ready(function() { +$(document).ready(function () { $body = $('body'); $modal = $('.history-modal'); $modalCover = $(' diff --git a/cms/templates/activation_complete.html b/cms/templates/activation_complete.html index 5d9437ccb3..1e195a632c 100644 --- a/cms/templates/activation_complete.html +++ b/cms/templates/activation_complete.html @@ -5,7 +5,7 @@

    Activation Complete!

    -

    Thanks for activating your account. Log in here.

    +

    Thanks for activating your account. Log in here.

    diff --git a/cms/templates/asset_index.html b/cms/templates/asset_index.html index 01766e2dac..5ace98df56 100644 --- a/cms/templates/asset_index.html +++ b/cms/templates/asset_index.html @@ -1,7 +1,7 @@ <%inherit file="base.html" /> <%! from django.core.urlresolvers import reverse %> -<%block name="bodyclass">assets -<%block name="title">Courseware Assets +<%block name="bodyclass">is-signedin course uploads +<%block name="title">Uploads & Files <%namespace name='static' file='static_content.html'/> @@ -33,12 +33,27 @@ +
    +
    +
    + Course Content +

    Files & Uploads

    +
    + + +
    +
    +
    diff --git a/cms/templates/base.html b/cms/templates/base.html index 84f10fc2d1..498897bd11 100644 --- a/cms/templates/base.html +++ b/cms/templates/base.html @@ -5,23 +5,29 @@ + + <%block name="title"></%block> | + % if context_course: + <% ctx_loc = context_course.location %> + ${context_course.display_name} | + % endif + edX Studio + + + + <%static:css group='base-style'/> - + - <%block name="title"></%block> - - - - <%block name="header_extras"> - <%include file="widgets/header.html" args="active_tab=active_tab"/> + <%include file="widgets/header.html" /> <%include file="courseware_vendor_js.html"/> @@ -47,9 +53,9 @@ <%block name="content"> + <%include file="widgets/footer.html" /> <%block name="jsextra"> - diff --git a/cms/templates/course_index.html b/cms/templates/course_index.html index e490ad7817..5c8772c1ed 100644 --- a/cms/templates/course_index.html +++ b/cms/templates/course_index.html @@ -1,5 +1,5 @@ <%inherit file="base.html" /> -<%block name="title">Course Manager + <%include file="widgets/header.html"/> <%block name="content"> diff --git a/cms/templates/course_info.html b/cms/templates/course_info.html index 83d829efa0..a68a0da76a 100644 --- a/cms/templates/course_info.html +++ b/cms/templates/course_info.html @@ -2,8 +2,9 @@ <%namespace name='static' file='static_content.html'/> -<%block name="title">Course Info -<%block name="bodyclass">course-info +<%block name="title">Updates +<%block name="bodyclass">is-signedin course course-info updates + <%block name="jsextra"> @@ -41,16 +42,38 @@ <%block name="content"> +
    +
    +
    + Course Content +

    Course Updates

    +
    + + +
    +
    + +
    +
    +
    +

    Course updates are announcements or notifications you want to share with your class. Other course authors have used them for important exam/date reminders, change in schedules, and to call out any important steps students need to be aware of.

    +
    +
    +
    +
    -

    Course Info

    diff --git a/cms/templates/edit-static-page.html b/cms/templates/edit-static-page.html index 02fe2308fa..f1b2374b46 100644 --- a/cms/templates/edit-static-page.html +++ b/cms/templates/edit-static-page.html @@ -1,7 +1,7 @@ <%inherit file="base.html" /> <%! from django.core.urlresolvers import reverse %> -<%block name="title">Edit Static Page -<%block name="bodyclass">edit-static-page +<%block name="title">Editing Static Page +<%block name="bodyclass">is-signedin course pages edit-static-page <%block name="content">
    diff --git a/cms/templates/edit-tabs.html b/cms/templates/edit-tabs.html index c6ffb14124..1a44de60c1 100644 --- a/cms/templates/edit-tabs.html +++ b/cms/templates/edit-tabs.html @@ -1,7 +1,7 @@ <%inherit file="base.html" /> <%! from django.core.urlresolvers import reverse %> -<%block name="title">Tabs -<%block name="bodyclass">static-pages +<%block name="title">Static Pages +<%block name="bodyclass">is-signedin course pages static-pages <%block name="jsextra"> <%block name="content"> +
    +
    +
    + Course Content +

    Static Pages

    +
    + + +
    +
    + +
    +
    + +
    +
    +
    -
    -

    Here you can add and manage additional pages for your course

    -

    These pages will be added to the primary navigation menu alongside Courseware, Course Info, Discussion, etc.

    -
    - -
      @@ -43,4 +67,17 @@
    + +
    +

    How Static Pages are Used in Your Course

    +
    + Preview of how Static Pages are used in your course +
    These pages will be presented in your course's main navigation alongside Courseware, Course Info, Discussion, etc.
    +
    + + + + close modal + +
    \ No newline at end of file diff --git a/cms/templates/edit_subsection.html b/cms/templates/edit_subsection.html index d81f577940..00780eab3b 100644 --- a/cms/templates/edit_subsection.html +++ b/cms/templates/edit_subsection.html @@ -7,8 +7,9 @@ %> <%! from django.core.urlresolvers import reverse %> -<%block name="bodyclass">subsection <%block name="title">CMS Subsection +<%block name="bodyclass">is-signedin course subsection + <%namespace name="units" file="widgets/units.html" /> <%namespace name='static' file='static_content.html'/> @@ -97,6 +98,7 @@
    +
    <%block name="jsextra"> diff --git a/cms/templates/export.html b/cms/templates/export.html index fcdd26458a..27045d82ce 100644 --- a/cms/templates/export.html +++ b/cms/templates/export.html @@ -2,10 +2,19 @@ <%namespace name='static' file='static_content.html'/> <%! from django.core.urlresolvers import reverse %> -<%block name="title">Export -<%block name="bodyclass">export +<%block name="title">Export Course +<%block name="bodyclass">is-signedin course tools export <%block name="content"> +
    +
    +
    + Tools +

    Course Export

    +
    +
    +
    +
    diff --git a/cms/templates/howitworks.html b/cms/templates/howitworks.html new file mode 100644 index 0000000000..1cf9b17710 --- /dev/null +++ b/cms/templates/howitworks.html @@ -0,0 +1,185 @@ +<%inherit file="base.html" /> +<%! from django.core.urlresolvers import reverse %> + +<%block name="title">Welcome +<%block name="bodyclass">not-signedin index howitworks + +<%block name="content"> + +
    +
    +
    +

    Welcome to

    +

    Studio helps manage your courses online, so you can focus on teaching them

    +
    +
    +
    + +
    +
    +
    +

    Studio's Many Features

    +
    + +
      +
    1. +
      + + Studio Helps You Keep Your Courses Organized +
      Studio Helps You Keep Your Courses Organized
      + + + +
      +
      + +
      +

      Keeping Your Course Organized

      +

      The backbone of your course is how it is organized. Studio offers an Outline editor, providing a simple hierarchy and easy drag and drop to help you and your students stay organized.

      + +
        +
      • +

        Simple Organization For Content

        +

        Studio uses a simple hierarchy of sections and subsections to organize your content.

        +
      • + +
      • +

        Change Your Mind Anytime

        +

        Draft your outline and build content anywhere. Simple drag and drop tools let your reorganize quickly.

        +
      • + +
      • +

        Go A Week Or A Semester At A Time

        +

        Build and release sections to your students incrementally. You don't have to have it all done at once.

        +
      • +
      +
      +
    2. + +
    3. +
      + + Learning is More than Just Lectures +
      Learning is More than Just Lectures
      + + + +
      +
      + +
      +

      Learning is More than Just Lectures

      +

      Studio lets you weave your content together in a way that reinforces learning — short video lectures interleaved with exercises and more. Insert videos and author a wide variety of exercise types with just a few clicks.

      + +
        +
      • +

        Create Learning Pathways

        +

        Help your students understand a small interactive piece at a time with multimedia, HTML, and exercises.

        +
      • + +
      • +

        Work Visually, Organize Quickly

        +

        Work visually and see exactly what your students will see. Reorganize all your content with drag and drop.

        +
      • + +
      • +

        A Broad Library of Problem Types

        +

        It's more than just multiple choice. Studio has nearly a dozen types of problems to challenge your learners.

        +
      • +
      +
      +
    4. + +
    5. +
      + + Studio Gives You Simple, Fast, and Incremental Publishing. With Friends. +
      Studio Gives You Simple, Fast, and Incremental Publishing. With Friends.
      + + + +
      +
      + +
      +

      Simple, Fast, and Incremental Publishing. With Friends.

      +

      Studio works like web applications you already know, yet understands how you build curriculum. Instant publishing to the web when you want it, incremental release when it makes sense. And with co-authors, you can have a whole team building a course, together.

      + +
        +
      • +

        Instant Changes

        +

        Caught a bug? No problem. When you want, your changes to live when you hit Save.

        +
      • + +
      • +

        Release-On Date Publishing

        +

        When you've finished a section, pick when you want it to go live and Studio takes care of the rest. Build your course incrementally.

        +
      • + +
      • +

        Work in Teams

        +

        Co-authors have full access to all the same authoring tools. Make your course better through a team effort.

        +
      • +
      +
      +
    6. +
    +
    +
    + +
    +
    +
    +

    Sign Up for Studio Today!

    +
    + + +
    +
    + +
    +

    Outlining Your Course

    +
    + +
    Simple two-level outline to organize your couse. Drag and drop, and see your course at a glance.
    +
    + + + + close modal + +
    + +
    +

    More than Just Lectures

    +
    + +
    Quickly create videos, text snippets, inline discussions, and a variety of problem types.
    +
    + + + + close modal + +
    + +
    +

    Publishing on Date

    +
    + +
    Simply set the date of a section or subsection, and Studio will publish it to your students for you.
    +
    + + + + close modal + +
    + \ No newline at end of file diff --git a/cms/templates/import.html b/cms/templates/import.html index e4f8019714..b0a9f04903 100644 --- a/cms/templates/import.html +++ b/cms/templates/import.html @@ -2,10 +2,19 @@ <%namespace name='static' file='static_content.html'/> <%! from django.core.urlresolvers import reverse %> -<%block name="title">Import -<%block name="bodyclass">import +<%block name="title">Import Course +<%block name="bodyclass">is-signedin course tools import <%block name="content"> +
    +
    +
    + Tools +

    Course Import

    +
    +
    +
    +
    diff --git a/cms/templates/index.html b/cms/templates/index.html index 45c4edc176..fdb46612a0 100644 --- a/cms/templates/index.html +++ b/cms/templates/index.html @@ -1,6 +1,7 @@ <%inherit file="base.html" /> -<%block name="bodyclass">index + <%block name="title">Courses +<%block name="bodyclass">is-signedin index dashboard <%block name="header_extras"> - - + \ No newline at end of file diff --git a/cms/templates/manage_users.html b/cms/templates/manage_users.html index 99ac279bfb..722e756203 100644 --- a/cms/templates/manage_users.html +++ b/cms/templates/manage_users.html @@ -1,17 +1,31 @@ <%inherit file="base.html" /> <%block name="title">Course Staff Manager -<%block name="bodyclass">users +<%block name="bodyclass">is-signedin course users settings team + <%block name="content"> +
    +
    +
    + Course Settings +

    Course Team

    +
    + + +
    +
    +
    -
    - %if allow_actions: - - New User - - %endif -

    The following list of users have been designated as course staff. This means that these users will have permissions to modify course content. You may add additional course staff below, if you are the course instructor. Please note that they must have already registered and verified their account.

    diff --git a/cms/templates/overview.html b/cms/templates/overview.html index 20ddcead01..91a1107726 100644 --- a/cms/templates/overview.html +++ b/cms/templates/overview.html @@ -6,7 +6,8 @@ from datetime import datetime %> <%! from django.core.urlresolvers import reverse %> -<%block name="title">CMS Courseware Overview +<%block name="title">Course Outline +<%block name="bodyclass">is-signedin course outline <%namespace name='static' file='static_content.html'/> <%namespace name="units" file="widgets/units.html" /> @@ -119,12 +120,32 @@
    +
    +
    +
    + Course Content +

    Course Outline

    +
    + + +
    +
    +
    -
    % for section in sections:
    diff --git a/cms/templates/settings.html b/cms/templates/settings.html index c96d5686fd..32d24b77e6 100644 --- a/cms/templates/settings.html +++ b/cms/templates/settings.html @@ -1,6 +1,6 @@ <%inherit file="base.html" /> -<%block name="bodyclass">settings -<%block name="title">Settings +<%block name="title">Schedule & Details +<%block name="bodyclass">is-signedin course schedule settings <%namespace name='static' file='static_content.html'/> <%! @@ -15,24 +15,24 @@ from contentstore import utils - - - - - + + + + + + + + + + + +<%block name="content"> + +
    +
    +

    Settings

    +
    +
    + +
    +

    Faculty

    + +
    +
    +

    Faculty Members

    + Individuals instructing and help with this course +
    + +
    +
    +
      +
    • +
      + +
      + +
      +
      + +
      + +
      + +
      +
      + +
      + + +
      + +
      + +
      + + A brief description of your education, experience, and expertise +
      +
      + + Delete Faculty Member +
    • + +
    • +
      + +
      + +
      +
      + +
      + +
      + +
      +
      + +
      + +
      +
      + + Upload Faculty Photo + + Max size: 30KB +
      +
      +
      + +
      + +
      +
      + + A brief description of your education, experience, and expertise +
      +
      +
      +
    • +
    + + + New Faculty Member + +
    +
    +
    + +
    + +
    +

    Problems

    + +
    +
    +

    General Settings

    + Course-wide settings for all problems +
    + +
    +

    Problem Randomization:

    + +
    +
    + + +
    + + randomize all problems +
    +
    + +
    + + +
    + + do not randomize problems +
    +
    + +
    + + +
    + + randomize problems per student +
    +
    +
    +
    + +
    +

    Show Answers:

    + +
    +
    + + +
    + + Answers will be shown after the number of attempts has been met +
    +
    + +
    + + +
    + + Answers will never be shown, regardless of attempts +
    +
    +
    +
    + +
    + + +
    +
    + + Students will this have this number of chances to answer a problem. To set infinite atttempts, use "0" +
    +
    +
    +
    + +
    +
    +

    [Assignment Type Name]

    +
    + +
    +

    Problem Randomization:

    + +
    +
    + + +
    + + randomize all problems +
    +
    + +
    + + +
    + + do not randomize problems +
    +
    + +
    + + +
    + + randomize problems per student +
    +
    +
    +
    + +
    +

    Show Answers:

    + +
    +
    + + +
    + + Answers will be shown after the number of attempts has been met +
    +
    + +
    + + +
    + + Answers will never be shown, regardless of attempts +
    +
    +
    +
    + +
    + + +
    +
    + + Students will this have this number of chances to answer a problem. To set infinite atttempts, use "0" +
    +
    +
    +
    +
    + +
    +

    Discussions

    + +
    +
    +

    General Settings

    + Course-wide settings for online discussion +
    + +
    +

    Anonymous Discussions:

    + +
    +
    + + +
    + + Students and faculty will be able to post anonymously +
    +
    + +
    + + +
    + + Posting anonymously is not allowed. Any previous anonymous posts will be reverted to non-anonymous +
    +
    +
    +
    + +
    +

    Anonymous Discussions:

    + +
    +
    + + +
    + + Students and faculty will be able to post anonymously +
    +
    + +
    + + +
    + + This option is disabled since there are previous discussions that are anonymous. +
    +
    +
    +
    + +
    +

    Discussion Categories

    + +
    + + + + New Discussion Category + +
    +
    +
    +
    +
    +
    +
    +
    +
    + diff --git a/cms/templates/settings_graders.html b/cms/templates/settings_graders.html new file mode 100644 index 0000000000..61cb59e995 --- /dev/null +++ b/cms/templates/settings_graders.html @@ -0,0 +1,151 @@ +<%inherit file="base.html" /> +<%block name="title">Grading +<%block name="bodyclass">is-signedin course grading settings + +<%namespace name='static' file='static_content.html'/> +<%! +from contentstore import utils +%> + +<%block name="jsextra"> + + + + + + + + + + + + + +<%block name="content"> +
    +
    +
    + Settings +

    Grading

    +
    +
    +
    + +
    +
    +
    +
    +
    +
    +

    Overall Grade Range

    + Your overall grading scale for student final grades +
    + +
      +
    1. +
      + +
      +
      +
        +
      1. 0
      2. +
      3. 10
      4. +
      5. 20
      6. +
      7. 30
      8. +
      9. 40
      10. +
      11. 50
      12. +
      13. 60
      14. +
      15. 70
      16. +
      17. 80
      18. +
      19. 90
      20. +
      21. 100
      22. +
      +
        +
      +
      +
      +
      +
    2. +
    +
    + +
    + +
    +
    +

    Grading Rules & Policies

    + Deadlines, requirements, and logistics around grading student work +
    + +
      +
    1. + + + Leeway on due dates +
    2. +
    +
    + +
    + +
    +
    +

    Assignment Types

    + Categories and labels for any exercises that are gradable +
    + +
      + +
    + + +
    +
    +
    + + +
    +
    + diff --git a/cms/templates/signup.html b/cms/templates/signup.html index 2c60b758e6..30c5c1cf2b 100644 --- a/cms/templates/signup.html +++ b/cms/templates/signup.html @@ -1,94 +1,141 @@ <%inherit file="base.html" /> <%! from django.core.urlresolvers import reverse %> -<%block name="title">Sign up -<%block name="bodyclass">no-header +<%block name="title">Sign Up +<%block name="bodyclass">not-signedin signup <%block name="content"> -
    +
    +
    +
    +

    Sign Up for edX Studio

    + +
    - +
    +

    I've never authored a course online before. Is there help?

    +

    Absolutely. We have created an online course, edX101, that describes some best practices: from filming video, creating exercises, to the basics of running an online course. Additionally, we're always here to help, just drop us a note.

    +
    + +
    +
    + - + ); + }); + })(this) + \ No newline at end of file diff --git a/cms/templates/unit.html b/cms/templates/unit.html index f3a779604e..c529f5863a 100644 --- a/cms/templates/unit.html +++ b/cms/templates/unit.html @@ -1,8 +1,9 @@ <%inherit file="base.html" /> <%! from django.core.urlresolvers import reverse %> <%namespace name="units" file="widgets/units.html" /> -<%block name="bodyclass">unit -<%block name="title">CMS Unit +<%block name="title">Individual Unit +<%block name="bodyclass">is-signedin course unit + <%block name="jsextra"> @@ -56,38 +65,66 @@
    % for type, templates in sorted(component_templates.items()):
    -

    Select ${type} component type:

    - - + % if type == "problem": +
    + + % endif +
    +
      + % for name, location, has_markdown, is_empty in templates: + % if has_markdown or type != "problem": + % if is_empty: +
    • + + ${name} + +
    • + + % else: +
    • + + ${name} + +
    • + % endif + % endif + + %endfor +
    +
    + % if type == "problem": +
    +
      + % for name, location, has_markdown, is_empty in templates: + % if not has_markdown: + % if is_empty: +
    • + + ${name} + +
    • + + % else: +
    • + + ${name} + + +
    • + % endif + % endif + % endfor +
    +
    +
    + % endif Cancel
    % endfor diff --git a/cms/templates/widgets/footer.html b/cms/templates/widgets/footer.html new file mode 100644 index 0000000000..0f265dfc2c --- /dev/null +++ b/cms/templates/widgets/footer.html @@ -0,0 +1,30 @@ +<%! from django.core.urlresolvers import reverse %> + + \ No newline at end of file diff --git a/cms/templates/widgets/header.html b/cms/templates/widgets/header.html index 5f41452339..7b516ececd 100644 --- a/cms/templates/widgets/header.html +++ b/cms/templates/widgets/header.html @@ -1,40 +1,117 @@ <%! from django.core.urlresolvers import reverse %> -<% active_tab_class = 'active-tab-' + active_tab if active_tab else '' %> -
    -
    -
    -
    - % if context_course: - <% ctx_loc = context_course.location %> - › - ${context_course.display_name} › - % endif -
    +
    + + +
    + % if user.is_authenticated(): + + % else: + + % endif +
    +
    +
    \ No newline at end of file diff --git a/cms/templates/widgets/problem-edit.html b/cms/templates/widgets/problem-edit.html index 4ff9d299ab..8ca07a7928 100644 --- a/cms/templates/widgets/problem-edit.html +++ b/cms/templates/widgets/problem-edit.html @@ -1,20 +1,20 @@ <%include file="metadata-edit.html" />
    - %if markdown != '' or data == '\n\n': + %if enable_markdown:
    • -
    • -
    • -
    • -
    • @@ -56,7 +56,7 @@
    -
    Check Multiple
    +
    Checkboxes
    @@ -67,7 +67,7 @@
    -
    String Response
    +
    Text Input
    @@ -76,7 +76,7 @@
    -
    Numerical Response
    +
    Numerical Input
    @@ -85,7 +85,7 @@
    -
    Option Response
    +
    Dropdown
    diff --git a/cms/urls.py b/cms/urls.py index ad4dd87d74..35b2707241 100644 --- a/cms/urls.py +++ b/cms/urls.py @@ -6,7 +6,8 @@ from django.conf.urls import patterns, include, url # admin.autodiscover() urlpatterns = ('', - url(r'^$', 'contentstore.views.index', name='index'), + url(r'^$', 'contentstore.views.howitworks', name='homepage'), + url(r'^listing', 'contentstore.views.index', name='index'), url(r'^edit/(?P.*?)$', 'contentstore.views.edit_unit', name='edit_unit'), url(r'^subsection/(?P.*?)$', 'contentstore.views.edit_subsection', name='edit_subsection'), url(r'^preview_component/(?P.*?)$', 'contentstore.views.preview_component', name='preview_component'), @@ -42,9 +43,10 @@ urlpatterns = ('', 'contentstore.views.remove_user', name='remove_user'), url(r'^(?P[^/]+)/(?P[^/]+)/info/(?P[^/]+)$', 'contentstore.views.course_info', name='course_info'), url(r'^(?P[^/]+)/(?P[^/]+)/course_info/updates/(?P.*)$', 'contentstore.views.course_info_updates', name='course_info'), - url(r'^(?P[^/]+)/(?P[^/]+)/settings/(?P[^/]+)$', 'contentstore.views.get_course_settings', name='course_settings'), - url(r'^(?P[^/]+)/(?P[^/]+)/settings/(?P[^/]+)/section/(?P
    [^/]+).*$', 'contentstore.views.course_settings_updates', name='course_settings'), - url(r'^(?P[^/]+)/(?P[^/]+)/grades/(?P[^/]+)/(?P.*)$', 'contentstore.views.course_grader_updates', name='course_settings'), + url(r'^(?P[^/]+)/(?P[^/]+)/settings-details/(?P[^/]+)$', 'contentstore.views.get_course_settings', name='course_settings'), + url(r'^(?P[^/]+)/(?P[^/]+)/settings-grading/(?P[^/]+)$', 'contentstore.views.course_config_graders_page', name='course_settings'), + url(r'^(?P[^/]+)/(?P[^/]+)/settings-details/(?P[^/]+)/section/(?P
    [^/]+).*$', 'contentstore.views.course_settings_updates', name='course_settings'), + url(r'^(?P[^/]+)/(?P[^/]+)/settings-grading/(?P[^/]+)/(?P.*)$', 'contentstore.views.course_grader_updates', name='course_settings'), url(r'^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/gradeas.*$', 'contentstore.views.assignment_type_update', name='assignment_type_update'), @@ -76,13 +78,15 @@ urlpatterns = ('', # User creation and updating views urlpatterns += ( + url(r'^howitworks$', 'contentstore.views.howitworks', name='howitworks'), url(r'^signup$', 'contentstore.views.signup', name='signup'), url(r'^create_account$', 'student.views.create_account'), url(r'^activate/(?P[^/]*)$', 'student.views.activate_account', name='activate'), # form page - url(r'^login$', 'contentstore.views.login_page', name='login'), + url(r'^login$', 'contentstore.views.old_login_redirect', name='old_login'), + url(r'^signin$', 'contentstore.views.login_page', name='login'), # ajax view that actually does the work url(r'^login_post$', 'student.views.login_user', name='login_post'), diff --git a/common/djangoapps/mitxmako/makoloader.py b/common/djangoapps/mitxmako/makoloader.py index 29184299b6..d623e8bcff 100644 --- a/common/djangoapps/mitxmako/makoloader.py +++ b/common/djangoapps/mitxmako/makoloader.py @@ -9,6 +9,7 @@ from django.template.loaders.app_directories import Loader as AppDirectoriesLoad from mitxmako.template import Template import mitxmako.middleware +import tempdir log = logging.getLogger(__name__) @@ -30,7 +31,7 @@ class MakoLoader(object): if module_directory is None: log.warning("For more caching of mako templates, set the MAKO_MODULE_DIR in settings!") - module_directory = tempfile.mkdtemp() + module_directory = tempdir.mkdtemp_clean() self.module_directory = module_directory diff --git a/common/djangoapps/mitxmako/middleware.py b/common/djangoapps/mitxmako/middleware.py index 64cb2e5415..3f66f8cc48 100644 --- a/common/djangoapps/mitxmako/middleware.py +++ b/common/djangoapps/mitxmako/middleware.py @@ -13,7 +13,7 @@ # limitations under the License. from mako.lookup import TemplateLookup -import tempfile +import tempdir from django.template import RequestContext from django.conf import settings @@ -29,7 +29,7 @@ class MakoMiddleware(object): module_directory = getattr(settings, 'MAKO_MODULE_DIR', None) if module_directory is None: - module_directory = tempfile.mkdtemp() + module_directory = tempdir.mkdtemp_clean() for location in template_locations: lookup[location] = TemplateLookup(directories=template_locations[location], diff --git a/common/djangoapps/student/management/commands/tests/test_pearson.py b/common/djangoapps/student/management/commands/tests/test_pearson.py index 12969405de..65d628fba0 100644 --- a/common/djangoapps/student/management/commands/tests/test_pearson.py +++ b/common/djangoapps/student/management/commands/tests/test_pearson.py @@ -7,6 +7,7 @@ import logging import os from tempfile import mkdtemp import cStringIO +import shutil import sys from django.test import TestCase @@ -143,23 +144,18 @@ class PearsonTestCase(TestCase): ''' Base class for tests running Pearson-related commands ''' - import_dir = mkdtemp(prefix="import") - export_dir = mkdtemp(prefix="export") def assertErrorContains(self, error_message, expected): self.assertTrue(error_message.find(expected) >= 0, 'error message "{}" did not contain "{}"'.format(error_message, expected)) + def setUp(self): + self.import_dir = mkdtemp(prefix="import") + self.addCleanup(shutil.rmtree, self.import_dir) + self.export_dir = mkdtemp(prefix="export") + self.addCleanup(shutil.rmtree, self.export_dir) + def tearDown(self): - def delete_temp_dir(dirname): - if os.path.exists(dirname): - for filename in os.listdir(dirname): - os.remove(os.path.join(dirname, filename)) - os.rmdir(dirname) - - # clean up after any test data was dumped to temp directory - delete_temp_dir(self.import_dir) - delete_temp_dir(self.export_dir) - + pass # and clean up the database: # TestCenterUser.objects.all().delete() # TestCenterRegistration.objects.all().delete() diff --git a/lms/djangoapps/terrain/__init__.py b/common/djangoapps/terrain/__init__.py similarity index 100% rename from lms/djangoapps/terrain/__init__.py rename to common/djangoapps/terrain/__init__.py diff --git a/lms/djangoapps/terrain/browser.py b/common/djangoapps/terrain/browser.py similarity index 87% rename from lms/djangoapps/terrain/browser.py rename to common/djangoapps/terrain/browser.py index e1925bde0b..8c2a8ba7a5 100644 --- a/lms/djangoapps/terrain/browser.py +++ b/common/djangoapps/terrain/browser.py @@ -11,8 +11,9 @@ from django.core.management import call_command @before.harvest def initial_setup(server): - # Launch firefox + # Launch the browser app (choose one of these below) world.browser = Browser('chrome') + # world.browser = Browser('firefox') @before.each_scenario diff --git a/lms/djangoapps/terrain/factories.py b/common/djangoapps/terrain/factories.py similarity index 100% rename from lms/djangoapps/terrain/factories.py rename to common/djangoapps/terrain/factories.py diff --git a/lms/djangoapps/terrain/steps.py b/common/djangoapps/terrain/steps.py similarity index 100% rename from lms/djangoapps/terrain/steps.py rename to common/djangoapps/terrain/steps.py diff --git a/common/lib/tempdir.py b/common/lib/tempdir.py new file mode 100644 index 0000000000..0acd92ba33 --- /dev/null +++ b/common/lib/tempdir.py @@ -0,0 +1,17 @@ +"""Make temporary directories nicely.""" + +import atexit +import os.path +import shutil +import tempfile + +def mkdtemp_clean(suffix="", prefix="tmp", dir=None): + """Just like mkdtemp, but the directory will be deleted when the process ends.""" + the_dir = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir) + atexit.register(cleanup_tempdir, the_dir) + return the_dir + +def cleanup_tempdir(the_dir): + """Called on process exit to remove a temp directory.""" + if os.path.exists(the_dir): + shutil.rmtree(the_dir) diff --git a/common/lib/xmodule/xmodule/capa_module.py b/common/lib/xmodule/xmodule/capa_module.py index d806ec7913..4635cc6871 100644 --- a/common/lib/xmodule/xmodule/capa_module.py +++ b/common/lib/xmodule/xmodule/capa_module.py @@ -703,15 +703,15 @@ class CapaDescriptor(RawDescriptor): def get_context(self): _context = RawDescriptor.get_context(self) - _context.update({'markdown': self.metadata.get('markdown', '')}) + _context.update({'markdown': self.metadata.get('markdown', ''), + 'enable_markdown' : 'markdown' in self.metadata}) return _context @property def editable_metadata_fields(self): - """Remove metadata from the editable fields since it has its own editor""" - subset = super(CapaDescriptor, self).editable_metadata_fields - if 'markdown' in subset: - subset.remove('markdown') + """Remove any metadata from the editable fields which have their own editor or shouldn't be edited by user.""" + subset = [field for field in super(CapaDescriptor,self).editable_metadata_fields + if field not in ['markdown', 'empty']] return subset diff --git a/common/lib/xmodule/xmodule/combined_open_ended_module.py b/common/lib/xmodule/xmodule/combined_open_ended_module.py index 2da15a4086..ee69d925d0 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_module.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_module.py @@ -1,25 +1,13 @@ -import copy -from fs.errors import ResourceNotFoundError -import itertools import json import logging from lxml import etree -from lxml.html import rewrite_links -from path import path -import os -import sys from pkg_resources import resource_string -from .capa_module import only_one, ComplexEncoder from .editing_module import EditingDescriptor -from .html_checker import check_html -from progress import Progress -from .stringify import stringify_children from .x_module import XModule from .xml_module import XmlDescriptor -from xmodule.modulestore import Location -from combined_open_ended_modulev1 import CombinedOpenEndedV1Module, CombinedOpenEndedV1Descriptor +from xmodule.open_ended_grading_classes.combined_open_ended_modulev1 import CombinedOpenEndedV1Module, CombinedOpenEndedV1Descriptor log = logging.getLogger("mitx.courseware") @@ -120,11 +108,13 @@ class CombinedOpenEndedModule(XModule): instance_state = {} self.version = self.metadata.get('version', DEFAULT_VERSION) + version_error_string = "Version of combined open ended module {0} is not correct. Going with version {1}" if not isinstance(self.version, basestring): try: self.version = str(self.version) except: - log.error("Version {0} is not correct. Going with version {1}".format(self.version, DEFAULT_VERSION)) + #This is a dev_facing_error + log.info(version_error_string.format(self.version, DEFAULT_VERSION)) self.version = DEFAULT_VERSION versions = [i[0] for i in VERSION_TUPLES] @@ -134,7 +124,8 @@ class CombinedOpenEndedModule(XModule): try: version_index = versions.index(self.version) except: - log.error("Version {0} is not correct. Going with version {1}".format(self.version, DEFAULT_VERSION)) + #This is a dev_facing_error + log.error(version_error_string.format(self.version, DEFAULT_VERSION)) self.version = DEFAULT_VERSION version_index = versions.index(self.version) @@ -217,4 +208,4 @@ class CombinedOpenEndedDescriptor(XmlDescriptor, EditingDescriptor): for child in ['task']: add_child(child) - return elt \ No newline at end of file + return elt diff --git a/common/lib/xmodule/xmodule/combined_open_ended_rubric.py b/common/lib/xmodule/xmodule/combined_open_ended_rubric.py deleted file mode 100644 index 689103a86a..0000000000 --- a/common/lib/xmodule/xmodule/combined_open_ended_rubric.py +++ /dev/null @@ -1,169 +0,0 @@ -import logging -from lxml import etree - -log = logging.getLogger(__name__) - - -class RubricParsingError(Exception): - def __init__(self, msg): - self.msg = msg - - -class CombinedOpenEndedRubric(object): - - def __init__ (self, system, view_only = False): - self.has_score = False - self.view_only = view_only - self.system = system - - def render_rubric(self, rubric_xml): - ''' - render_rubric: takes in an xml string and outputs the corresponding - html for that xml, given the type of rubric we're generating - Input: - rubric_xml: an string that has not been parsed into xml that - represents this particular rubric - Output: - html: the html that corresponds to the xml given - ''' - success = False - try: - rubric_categories = self.extract_categories(rubric_xml) - max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories) - max_score = max(max_scores) - html = self.system.render_template('open_ended_rubric.html', - {'categories': rubric_categories, - 'has_score': self.has_score, - 'view_only': self.view_only, - 'max_score': max_score}) - success = True - except: - error_message = "[render_rubric] Could not parse the rubric with xml: {0}".format(rubric_xml) - log.error(error_message) - raise RubricParsingError(error_message) - return success, html - - def check_if_rubric_is_parseable(self, rubric_string, location, max_score_allowed, max_score): - success, rubric_feedback = self.render_rubric(rubric_string) - if not success: - error_message = "Could not parse rubric : {0} for location {1}".format(rubric_string, location.url()) - log.error(error_message) - raise RubricParsingError(error_message) - - rubric_categories = self.extract_categories(rubric_string) - total = 0 - for category in rubric_categories: - total = total + len(category['options']) - 1 - if len(category['options']) > (max_score_allowed + 1): - error_message = "Number of score points in rubric {0} higher than the max allowed, which is {1}".format( - len(category['options']), max_score_allowed) - log.error(error_message) - raise RubricParsingError(error_message) - - if total != max_score: - error_msg = "The max score {0} for problem {1} does not match the total number of points in the rubric {2}".format( - max_score, location, total) - log.error(error_msg) - raise RubricParsingError(error_msg) - - def extract_categories(self, element): - ''' - Contstruct a list of categories such that the structure looks like: - [ { category: "Category 1 Name", - options: [{text: "Option 1 Name", points: 0}, {text:"Option 2 Name", points: 5}] - }, - { category: "Category 2 Name", - options: [{text: "Option 1 Name", points: 0}, - {text: "Option 2 Name", points: 1}, - {text: "Option 3 Name", points: 2]}] - - ''' - if isinstance(element, basestring): - element = etree.fromstring(element) - categories = [] - for category in element: - if category.tag != 'category': - raise RubricParsingError("[extract_categories] Expected a tag: got {0} instead".format(category.tag)) - else: - categories.append(self.extract_category(category)) - return categories - - - def extract_category(self, category): - ''' - construct an individual category - {category: "Category 1 Name", - options: [{text: "Option 1 text", points: 1}, - {text: "Option 2 text", points: 2}]} - - all sorting and auto-point generation occurs in this function - ''' - descriptionxml = category[0] - optionsxml = category[1:] - scorexml = category[1] - score = None - if scorexml.tag == 'score': - score_text = scorexml.text - optionsxml = category[2:] - score = int(score_text) - self.has_score = True - # if we are missing the score tag and we are expecting one - elif self.has_score: - raise RubricParsingError("[extract_category] Category {0} is missing a score".format(descriptionxml.text)) - - - # parse description - if descriptionxml.tag != 'description': - raise RubricParsingError("[extract_category]: expected description tag, got {0} instead".format(descriptionxml.tag)) - - description = descriptionxml.text - - cur_points = 0 - options = [] - autonumbering = True - # parse options - for option in optionsxml: - if option.tag != 'option': - raise RubricParsingError("[extract_category]: expected option tag, got {0} instead".format(option.tag)) - else: - pointstr = option.get("points") - if pointstr: - autonumbering = False - # try to parse this into an int - try: - points = int(pointstr) - except ValueError: - raise RubricParsingError("[extract_category]: expected points to have int, got {0} instead".format(pointstr)) - elif autonumbering: - # use the generated one if we're in the right mode - points = cur_points - cur_points = cur_points + 1 - else: - raise Exception("[extract_category]: missing points attribute. Cannot continue to auto-create points values after a points value is explicitly defined.") - - selected = score == points - optiontext = option.text - options.append({'text': option.text, 'points': points, 'selected': selected}) - - # sort and check for duplicates - options = sorted(options, key=lambda option: option['points']) - CombinedOpenEndedRubric.validate_options(options) - - return {'description': description, 'options': options} - - - @staticmethod - def validate_options(options): - ''' - Validates a set of options. This can and should be extended to filter out other bad edge cases - ''' - if len(options) == 0: - raise RubricParsingError("[extract_category]: no options associated with this category") - if len(options) == 1: - return - prev = options[0]['points'] - for option in options[1:]: - if prev == option['points']: - raise RubricParsingError("[extract_category]: found duplicate point values between two different options") - else: - prev = option['points'] diff --git a/common/lib/xmodule/xmodule/css/combinedopenended/display.scss b/common/lib/xmodule/xmodule/css/combinedopenended/display.scss index 8d921f828b..20700ab092 100644 --- a/common/lib/xmodule/xmodule/css/combinedopenended/display.scss +++ b/common/lib/xmodule/xmodule/css/combinedopenended/display.scss @@ -24,14 +24,11 @@ section.combined-open-ended { @include clearfix; .status-container { - float:right; - width:40%; + padding-bottom: 5px; } .item-container { - float:left; - width: 53%; - padding-bottom: 50px; + padding-bottom: 10px; } .result-container @@ -46,14 +43,26 @@ section.combined-open-ended { } } +section.legend-container { + .legenditem { + background-color : #d4d4d4; + font-size: .9em; + padding: 2px; + display: inline; + width: 20%; + } + margin-bottom: 5px; +} + section.combined-open-ended-status { .statusitem { - background-color: #FAFAFA; color: #2C2C2C; - font-family: monospace; - font-size: 1em; - padding: 10px; + background-color : #d4d4d4; + font-size: .9em; + padding: 2px; + display: inline; + width: 20%; .show-results { margin-top: .3em; text-align:right; @@ -61,12 +70,12 @@ section.combined-open-ended-status { .show-results-button { font: 1em monospace; } - } + } .statusitem-current { - background-color: #d4d4d4; + background-color: #B2B2B2; color: #222; - } + } span { &.unanswered { @@ -98,8 +107,29 @@ section.combined-open-ended-status { } } -div.result-container { +div.combined-rubric-container { + ul.rubric-list{ + list-style-type: none; + padding:0; + margin:0; + li { + &.rubric-list-item{ + margin-bottom: 2px; + padding: 0px; + } + } + } + span.rubric-category { + font-size: .9em; + } + padding-bottom: 5px; + padding-top: 10px; +} + +div.result-container { + padding-top: 10px; + padding-bottom: 5px; .evaluation { p { @@ -113,9 +143,8 @@ div.result-container { } .evaluation-response { - margin-bottom: 10px; + margin-bottom: 2px; header { - text-align: right; a { font-size: .85em; } @@ -198,20 +227,6 @@ div.result-container { } } - .result-correct { - background: url('../images/correct-icon.png') left 20px no-repeat; - .result-actual-output { - color: #090; - } - } - - .result-incorrect { - background: url('../images/incorrect-icon.png') left 20px no-repeat; - .result-actual-output { - color: #B00; - } - } - .markup-text{ margin: 5px; padding: 20px 0px 15px 50px; @@ -229,6 +244,16 @@ div.result-container { } } } + .rubric-result-container { + .rubric-result { + font-size: .9em; + padding: 2px; + display: inline-table; + } + padding: 2px; + margin: 0px; + display : inline; + } } @@ -404,7 +429,7 @@ section.open-ended-child { div.short-form-response { background: #F6F6F6; border: 1px solid #ddd; - margin-bottom: 20px; + margin-bottom: 0px; overflow-y: auto; height: 200px; @include clearfix; @@ -478,6 +503,18 @@ section.open-ended-child { margin-left: .75rem; } + ul.rubric-list{ + list-style-type: none; + padding:0; + margin:0; + li { + &.rubric-list-item{ + margin-bottom: 0px; + padding: 0px; + } + } + } + ol { list-style: decimal outside none; margin-bottom: lh(); @@ -503,9 +540,8 @@ section.open-ended-child { } li { - line-height: 1.4em; - margin-bottom: lh(.5); - + margin-bottom: 0px; + padding: 0px; &:last-child { margin-bottom: 0; } diff --git a/common/lib/xmodule/xmodule/css/html/display.scss b/common/lib/xmodule/xmodule/css/html/display.scss index 956923c6d0..93138ac5a9 100644 --- a/common/lib/xmodule/xmodule/css/html/display.scss +++ b/common/lib/xmodule/xmodule/css/html/display.scss @@ -49,10 +49,18 @@ p { em, i { font-style: italic; + + span { + font-style: italic; + } } strong, b { font-weight: bold; + + span { + font-weight: bold; + } } p + p, ul + p, ol + p { diff --git a/common/lib/xmodule/xmodule/html_module.py b/common/lib/xmodule/xmodule/html_module.py index af1ce0ad80..456ea3cf10 100644 --- a/common/lib/xmodule/xmodule/html_module.py +++ b/common/lib/xmodule/xmodule/html_module.py @@ -172,6 +172,13 @@ class HtmlDescriptor(XmlDescriptor, EditingDescriptor): elt.set("filename", relname) return elt + @property + def editable_metadata_fields(self): + """Remove any metadata from the editable fields which have their own editor or shouldn't be edited by user.""" + subset = [field for field in super(HtmlDescriptor,self).editable_metadata_fields + if field not in ['empty']] + return subset + class AboutDescriptor(HtmlDescriptor): """ diff --git a/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee b/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee index ae63171ed4..39c91d8c70 100644 --- a/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee +++ b/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee @@ -4,11 +4,11 @@ class @Rubric # finds the scores for each rubric category @get_score_list: () => # find the number of categories: - num_categories = $('table.rubric tr').length + num_categories = $('.rubric-category').length score_lst = [] # get the score for each one - for i in [0..(num_categories-2)] + for i in [0..(num_categories-1)] score = $("input[name='score-selection-#{i}']:checked").val() score_lst.push(score) @@ -23,9 +23,8 @@ class @Rubric @check_complete: () -> # check to see whether or not any categories have not been scored - num_categories = $('table.rubric tr').length - # -2 because we want to skip the header - for i in [0..(num_categories-2)] + num_categories = $('.rubric-category').length + for i in [0..(num_categories-1)] score = $("input[name='score-selection-#{i}']:checked").val() if score == undefined return false @@ -52,22 +51,30 @@ class @CombinedOpenEnded @reset_button.click @reset @next_problem_button = @$('.next-step-button') @next_problem_button.click @next_problem + @status_container = @$('.status-elements') @show_results_button=@$('.show-results-button') @show_results_button.click @show_results + @question_header = @$('.question-header') + @question_header.click @collapse_question + # valid states: 'initial', 'assessing', 'post_assessment', 'done' Collapsible.setCollapsibles(@el) @submit_evaluation_button = $('.submit-evaluation-button') @submit_evaluation_button.click @message_post @results_container = $('.result-container') + @combined_rubric_container = $('.combined-rubric-container') + + @legend_container= $('.legend-container') + @show_legend_current() # Where to put the rubric once we load it @el = $(element).find('section.open-ended-child') @errors_area = @$('.error') @answer_area = @$('textarea.answer') - + @prompt_container = @$('.prompt') @rubric_wrapper = @$('.rubric-wrapper') @hint_wrapper = @$('.hint-wrapper') @message_wrapper = @$('.message-wrapper') @@ -82,11 +89,22 @@ class @CombinedOpenEnded @can_upload_files = false @open_ended_child= @$('.open-ended-child') + @out_of_sync_message = 'The problem state got out of sync. Try reloading the page.' + + if @task_number>1 + @prompt_hide() + else if @task_number==1 and @child_state!='initial' + @prompt_hide() + @find_assessment_elements() @find_hint_elements() @rebind() + if @task_number>1 + @show_combined_rubric_current() + @show_results_current() + # locally scoped jquery. $: (selector) -> $(selector, @el) @@ -102,7 +120,7 @@ class @CombinedOpenEnded Collapsible.setCollapsibles(@results_container) show_results: (event) => - status_item = $(event.target).parent().parent() + status_item = $(event.target).parent() status_number = status_item.data('status-number') data = {'task_number' : status_number} $.postWithPrefix "#{@ajax_url}/get_results", data, (response) => @@ -115,6 +133,27 @@ class @CombinedOpenEnded else @gentle_alert response.error + show_combined_rubric_current: () => + data = {} + $.postWithPrefix "#{@ajax_url}/get_combined_rubric", data, (response) => + if response.success + @combined_rubric_container.after(response.html).remove() + @combined_rubric_container= $('div.combined_rubric_container') + + show_status_current: () => + data = {} + $.postWithPrefix "#{@ajax_url}/get_status", data, (response) => + if response.success + @status_container.after(response.html).remove() + @status_container= $('.status-elements') + + show_legend_current: () => + data = {} + $.postWithPrefix "#{@ajax_url}/get_legend", data, (response) => + if response.success + @legend_container.after(response.html).remove() + @legend_container= $('.legend-container') + message_post: (event)=> Logger.log 'message_post', @answers external_grader_message=$(event.target).parent().parent().parent() @@ -156,6 +195,11 @@ class @CombinedOpenEnded @next_problem_button.hide() @hide_file_upload() @hint_area.attr('disabled', false) + if @task_number>1 or @child_state!='initial' + @show_status_current() + + if @task_number==1 and @child_state=='assessing' + @prompt_hide() if @child_state == 'done' @rubric_wrapper.hide() if @child_type=="openended" @@ -251,13 +295,14 @@ class @CombinedOpenEnded $.ajaxWithPrefix("#{@ajax_url}/save_answer",settings) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) save_assessment: (event) => event.preventDefault() if @child_state == 'assessing' && Rubric.check_complete() checked_assessment = Rubric.get_total_score() - data = {'assessment' : checked_assessment} + score_list = Rubric.get_score_list() + data = {'assessment' : checked_assessment, 'score_list' : score_list} $.postWithPrefix "#{@ajax_url}/save_assessment", data, (response) => if response.success @child_state = response.state @@ -267,13 +312,12 @@ class @CombinedOpenEnded @find_hint_elements() else if @child_state == 'done' @rubric_wrapper.hide() - @message_wrapper.html(response.message_html) @rebind() else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) save_hint: (event) => event.preventDefault() @@ -288,7 +332,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) skip_post_assessment: => if @child_state == 'post_assessment' @@ -300,7 +344,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) reset: (event) => event.preventDefault() @@ -320,7 +364,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) next_problem: => if @child_state == 'done' @@ -343,7 +387,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) gentle_alert: (msg) => if @el.find('.open-ended-alert').length @@ -367,13 +411,13 @@ class @CombinedOpenEnded window.queuePollerID = window.setTimeout(@poll, 10000) setup_file_upload: => - if window.File and window.FileReader and window.FileList and window.Blob - if @accept_file_upload == "True" - @can_upload_files = true - @file_upload_area.html('') - @file_upload_area.show() - else - @gentle_alert 'File uploads are required for this question, but are not supported in this browser. Try the newest version of google chrome. Alternatively, if you have uploaded the image to the web, you can paste a link to it into the answer box.' + if @accept_file_upload == "True" + if window.File and window.FileReader and window.FileList and window.Blob + @can_upload_files = true + @file_upload_area.html('') + @file_upload_area.show() + else + @gentle_alert 'File uploads are required for this question, but are not supported in this browser. Try the newest version of google chrome. Alternatively, if you have uploaded the image to the web, you can paste a link to it into the answer box.' hide_file_upload: => if @accept_file_upload == "True" @@ -390,3 +434,26 @@ class @CombinedOpenEnded # wrap this so that it can be mocked reload: -> location.reload() + + collapse_question: () => + @prompt_container.slideToggle() + @prompt_container.toggleClass('open') + if @question_header.text() == "(Hide)" + new_text = "(Show)" + else + new_text = "(Hide)" + @question_header.text(new_text) + + prompt_show: () => + if @prompt_container.is(":hidden")==true + @prompt_container.slideToggle() + @prompt_container.toggleClass('open') + @question_header.text("(Hide)") + + prompt_hide: () => + if @prompt_container.is(":visible")==true + @prompt_container.slideToggle() + @prompt_container.toggleClass('open') + @question_header.text("(Show)") + + diff --git a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee index deeb82900b..63c58e1766 100644 --- a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee +++ b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee @@ -175,17 +175,23 @@ class @PeerGradingProblem @prompt_container = $('.prompt-container') @rubric_container = $('.rubric-container') @flag_student_container = $('.flag-student-container') + @answer_unknown_container = $('.answer-unknown-container') @calibration_panel = $('.calibration-panel') @grading_panel = $('.grading-panel') @content_panel = $('.content-panel') @grading_message = $('.grading-message') @grading_message.hide() + @question_header = $('.question-header') + @question_header.click @collapse_question @grading_wrapper =$('.grading-wrapper') @calibration_feedback_panel = $('.calibration-feedback') @interstitial_page = $('.interstitial-page') @interstitial_page.hide() + @calibration_interstitial_page = $('.calibration-interstitial-page') + @calibration_interstitial_page.hide() + @error_container = $('.error-container') @submission_key_input = $("input[name='submission-key']") @@ -201,7 +207,10 @@ class @PeerGradingProblem @action_button = $('.action-button') @calibration_feedback_button = $('.calibration-feedback-button') @interstitial_page_button = $('.interstitial-page-button') + @calibration_interstitial_page_button = $('.calibration-interstitial-page-button') @flag_student_checkbox = $('.flag-checkbox') + @answer_unknown_checkbox = $('.answer-unknown-checkbox') + @collapse_question() Collapsible.setCollapsibles(@content_panel) @@ -210,12 +219,21 @@ class @PeerGradingProblem @calibration_feedback_button.click => @calibration_feedback_panel.hide() @grading_wrapper.show() + @gentle_alert "Calibration essay saved. Fetched the next essay." @is_calibrated_check() @interstitial_page_button.click => @interstitial_page.hide() @is_calibrated_check() + @calibration_interstitial_page_button.click => + @calibration_interstitial_page.hide() + @is_calibrated_check() + + @calibration_feedback_button.hide() + @calibration_feedback_panel.hide() + @error_container.hide() + @is_calibrated_check() @@ -233,6 +251,9 @@ class @PeerGradingProblem fetch_submission_essay: () => @backend.post('get_next_submission', {location: @location}, @render_submission) + gentle_alert: (msg) => + @grading_message.fadeIn() + @grading_message.html("

    " + msg + "

    ") construct_data: () -> data = @@ -243,6 +264,7 @@ class @PeerGradingProblem submission_key: @submission_key_input.val() feedback: @feedback_area.val() submission_flagged: @flag_student_checkbox.is(':checked') + answer_unknown: @answer_unknown_checkbox.is(':checked') return data @@ -273,6 +295,9 @@ class @PeerGradingProblem else if response.calibrated and @calibration == true @calibration = false @render_interstitial_page() + else if not response.calibrated and @calibration==null + @calibration=true + @render_calibration_interstitial_page() else @calibration = true @fetch_calibration_essay() @@ -296,7 +321,7 @@ class @PeerGradingProblem if response.success @is_calibrated_check() @grading_message.fadeIn() - @grading_message.html("

    Grade sent successfully.

    ") + @grading_message.html("

    Successfully saved your feedback. Fetched the next essay.

    ") else if response.error @render_error(response.error) @@ -308,6 +333,7 @@ class @PeerGradingProblem # check to see whether or not any categories have not been scored if Rubric.check_complete() # show button if we have scores for all categories + @grading_message.hide() @show_submit_button() @grade = Rubric.get_total_score() @@ -323,7 +349,7 @@ class @PeerGradingProblem if response.success # load in all the data - @submission_container.html("

    Training Essay

    ") + @submission_container.html("") @render_submission_data(response) # TODO: indicate that we're in calibration mode @calibration_panel.addClass('current-state') @@ -337,6 +363,9 @@ class @PeerGradingProblem @calibration_panel.find('.grading-text').hide() @grading_panel.find('.grading-text').hide() @flag_student_container.hide() + @answer_unknown_container.hide() + + @feedback_area.val("") @submit_button.unbind('click') @submit_button.click @submit_calibration_essay @@ -350,7 +379,7 @@ class @PeerGradingProblem render_submission: (response) => if response.success @submit_button.hide() - @submission_container.html("

    Submitted Essay

    ") + @submission_container.html("") @render_submission_data(response) @calibration_panel.removeClass('current-state') @@ -364,6 +393,8 @@ class @PeerGradingProblem @calibration_panel.find('.grading-text').show() @grading_panel.find('.grading-text').show() @flag_student_container.show() + @answer_unknown_container.show() + @feedback_area.val("") @submit_button.unbind('click') @submit_button.click @submit_grade @@ -408,18 +439,25 @@ class @PeerGradingProblem actual_score = parseInt(response.actual_score) if score == actual_score - calibration_wrapper.append("

    Congratulations! Your score matches the actual score!

    ") + calibration_wrapper.append("

    Your score matches the actual score!

    ") else - calibration_wrapper.append("

    Please try to understand the grading critera better to be more accurate next time.

    ") + calibration_wrapper.append("

    You may want to review the rubric again.

    ") # disable score selection and submission from the grading interface $("input[name='score-selection']").attr('disabled', true) @submit_button.hide() + @calibration_feedback_button.show() render_interstitial_page: () => @content_panel.hide() + @grading_message.hide() @interstitial_page.show() + render_calibration_interstitial_page: () => + @content_panel.hide() + @action_button.hide() + @calibration_interstitial_page.show() + render_error: (error_message) => @error_container.show() @calibration_feedback_panel.hide() @@ -433,3 +471,12 @@ class @PeerGradingProblem setup_score_selection: (max_score) => # And now hook up an event handler again $("input[class='score-selection']").change @graded_callback + + collapse_question: () => + @prompt_container.slideToggle() + @prompt_container.toggleClass('open') + if @question_header.text() == "(Hide)" + new_text = "(Show)" + else + new_text = "(Hide)" + @question_header.text(new_text) diff --git a/common/lib/xmodule/xmodule/js/src/videoalpha/display/html5_video.js b/common/lib/xmodule/xmodule/js/src/videoalpha/display/html5_video.js index acdc03932c..c3cc462ab8 100644 --- a/common/lib/xmodule/xmodule/js/src/videoalpha/display/html5_video.js +++ b/common/lib/xmodule/xmodule/js/src/videoalpha/display/html5_video.js @@ -221,6 +221,15 @@ this.HTML5Video = (function () { // and end playing at the specified end time. After it was paused, or when a seek operation happeded, // the starting time and ending time will reset to the beginning and the end of the video respectively. this.video.addEventListener('canplay', function () { + // Because firefox triggers 'canplay' event every time when 'currentTime' property + // changes, we must make sure that this block of code runs only once. Otherwise, + // this will be an endless loop ('currentTime' property is changed below). + // + // Chrome is immune to this behavior. + if (_this.playerState !== HTML5Video.PlayerState.UNSTARTED) { + return; + } + _this.playerState = HTML5Video.PlayerState.PAUSED; if (_this.start > _this.video.duration) { diff --git a/common/lib/xmodule/xmodule/mako_module.py b/common/lib/xmodule/xmodule/mako_module.py index dab5d5e85b..da96bfa212 100644 --- a/common/lib/xmodule/xmodule/mako_module.py +++ b/common/lib/xmodule/xmodule/mako_module.py @@ -44,5 +44,6 @@ class MakoModuleDescriptor(XModuleDescriptor): # cdodge: encapsulate a means to expose "editable" metadata fields (i.e. not internal system metadata) @property def editable_metadata_fields(self): - subset = [name for name in self.metadata.keys() if name not in self.system_metadata_fields] + subset = [name for name in self.metadata.keys() if name not in self.system_metadata_fields and + name not in self._inherited_metadata] return subset diff --git a/common/lib/xmodule/xmodule/modulestore/mongo.py b/common/lib/xmodule/xmodule/modulestore/mongo.py index f4db62ac31..012efb0c27 100644 --- a/common/lib/xmodule/xmodule/modulestore/mongo.py +++ b/common/lib/xmodule/xmodule/modulestore/mongo.py @@ -1,11 +1,13 @@ import pymongo import sys import logging +import copy from bson.son import SON from fs.osfs import OSFS from itertools import repeat from path import path +from datetime import datetime, timedelta from importlib import import_module from xmodule.errortracker import null_error_tracker, exc_info_to_str @@ -27,9 +29,11 @@ class CachingDescriptorSystem(MakoDescriptorSystem): """ A system that has a cache of module json that it will use to load modules from, with a backup of calling to the underlying modulestore for more data + TODO (cdodge) when the 'split module store' work has been completed we can remove all + references to metadata_inheritance_tree """ def __init__(self, modulestore, module_data, default_class, resources_fs, - error_tracker, render_template): + error_tracker, render_template, metadata_inheritance_tree = None): """ modulestore: the module store that can be used to retrieve additional modules @@ -54,6 +58,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem): # cdodge: other Systems have a course_id attribute defined. To keep things consistent, let's # define an attribute here as well, even though it's None self.course_id = None + self.metadata_inheritance_tree = metadata_inheritance_tree def load_item(self, location): location = Location(location) @@ -61,11 +66,13 @@ class CachingDescriptorSystem(MakoDescriptorSystem): if json_data is None: return self.modulestore.get_item(location) else: - # TODO (vshnayder): metadata inheritance is somewhat broken because mongo, doesn't - # always load an entire course. We're punting on this until after launch, and then - # will build a proper course policy framework. + # load the module and apply the inherited metadata try: - return XModuleDescriptor.load_from_json(json_data, self, self.default_class) + module = XModuleDescriptor.load_from_json(json_data, self, self.default_class) + if self.metadata_inheritance_tree is not None: + metadata_to_inherit = self.metadata_inheritance_tree.get('parent_metadata', {}).get(location.url(),{}) + module.inherit_metadata(metadata_to_inherit) + return module except: return ErrorDescriptor.from_json( json_data, @@ -142,6 +149,82 @@ class MongoModuleStore(ModuleStoreBase): self.fs_root = path(fs_root) self.error_tracker = error_tracker self.render_template = render_template + self.metadata_inheritance_cache = {} + + def get_metadata_inheritance_tree(self, location): + ''' + TODO (cdodge) This method can be deleted when the 'split module store' work has been completed + ''' + + # get all collections in the course, this query should not return any leaf nodes + query = { '_id.org' : location.org, + '_id.course' : location.course, + '_id.revision' : None, + 'definition.children':{'$ne': []} + } + # we just want the Location, children, and metadata + record_filter = {'_id':1,'definition.children':1,'metadata':1} + + # call out to the DB + resultset = self.collection.find(query, record_filter) + + results_by_url = {} + root = None + + # now go through the results and order them by the location url + for result in resultset: + location = Location(result['_id']) + results_by_url[location.url()] = result + if location.category == 'course': + root = location.url() + + # now traverse the tree and compute down the inherited metadata + metadata_to_inherit = {} + def _compute_inherited_metadata(url): + my_metadata = results_by_url[url]['metadata'] + for key in my_metadata.keys(): + if key not in XModuleDescriptor.inheritable_metadata: + del my_metadata[key] + results_by_url[url]['metadata'] = my_metadata + + # go through all the children and recurse, but only if we have + # in the result set. Remember results will not contain leaf nodes + for child in results_by_url[url].get('definition',{}).get('children',[]): + if child in results_by_url: + new_child_metadata = copy.deepcopy(my_metadata) + new_child_metadata.update(results_by_url[child]['metadata']) + results_by_url[child]['metadata'] = new_child_metadata + metadata_to_inherit[child] = new_child_metadata + _compute_inherited_metadata(child) + else: + # this is likely a leaf node, so let's record what metadata we need to inherit + metadata_to_inherit[child] = my_metadata + + if root is not None: + _compute_inherited_metadata(root) + + cache = {'parent_metadata': metadata_to_inherit, + 'timestamp' : datetime.now()} + + return cache + + def get_cached_metadata_inheritance_tree(self, location, max_age_allowed): + ''' + TODO (cdodge) This method can be deleted when the 'split module store' work has been completed + ''' + cache_name = '{0}/{1}'.format(location.org, location.course) + cache = self.metadata_inheritance_cache.get(cache_name,{'parent_metadata': {}, + 'timestamp': datetime.now() - timedelta(hours=1)}) + age = (datetime.now() - cache['timestamp']) + + if age.seconds >= max_age_allowed: + logging.debug('loading entire inheritance tree for {0}'.format(cache_name)) + cache = self.get_metadata_inheritance_tree(location) + self.metadata_inheritance_cache[cache_name] = cache + + return cache + + def _clean_item_data(self, item): """ @@ -196,6 +279,8 @@ class MongoModuleStore(ModuleStoreBase): resource_fs = OSFS(root) + # TODO (cdodge): When the 'split module store' work has been completed, we should remove + # the 'metadata_inheritance_tree' parameter system = CachingDescriptorSystem( self, data_cache, @@ -203,6 +288,7 @@ class MongoModuleStore(ModuleStoreBase): resource_fs, self.error_tracker, self.render_template, + metadata_inheritance_tree = self.get_cached_metadata_inheritance_tree(Location(item['location']), 60) ) return system.load_item(item['location']) @@ -261,11 +347,11 @@ class MongoModuleStore(ModuleStoreBase): descendents of the queried modules for more efficient results later in the request. The depth is counted in the number of calls to get_children() to cache. None indicates to cache all descendents. - """ location = Location.ensure_fully_specified(location) item = self._find_one(location) - return self._load_items([item], depth)[0] + module = self._load_items([item], depth)[0] + return module def get_instance(self, course_id, location, depth=0): """ @@ -285,7 +371,8 @@ class MongoModuleStore(ModuleStoreBase): sort=[('revision', pymongo.ASCENDING)], ) - return self._load_items(list(items), depth) + modules = self._load_items(list(items), depth) + return modules def clone_item(self, source, location): """ @@ -313,7 +400,7 @@ class MongoModuleStore(ModuleStoreBase): raise DuplicateItemError(location) - def get_course_for_item(self, location): + def get_course_for_item(self, location, depth=0): ''' VS[compat] cdodge: for a given Xmodule, return the course that it belongs to @@ -327,7 +414,7 @@ class MongoModuleStore(ModuleStoreBase): # know the 'name' parameter in this context, so we have # to assume there's only one item in this query even though we are not specifying a name course_search_location = ['i4x', location.org, location.course, 'course', None] - courses = self.get_items(course_search_location) + courses = self.get_items(course_search_location, depth=depth) # make sure we found exactly one match on this above course search found_cnt = len(courses) diff --git a/common/lib/xmodule/xmodule/modulestore/xml_exporter.py b/common/lib/xmodule/xmodule/modulestore/xml_exporter.py index 509a2c7db9..55844116c6 100644 --- a/common/lib/xmodule/xmodule/modulestore/xml_exporter.py +++ b/common/lib/xmodule/xmodule/modulestore/xml_exporter.py @@ -31,8 +31,15 @@ def export_to_xml(modulestore, contentstore, course_location, root_dir, course_d # export the grading policy policies_dir = export_fs.makeopendir('policies') course_run_policy_dir = policies_dir.makeopendir(course.location.name) - with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy: - grading_policy.write(dumps(course.definition['data']['grading_policy'])) + if 'grading_policy' in course.definition['data']: + with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy: + grading_policy.write(dumps(course.definition['data']['grading_policy'])) + + # export all of the course metadata in policy.json + with course_run_policy_dir.open('policy.json', 'w') as course_policy: + policy = {} + policy = {'course/' + course.location.name: course.metadata} + course_policy.write(dumps(policy)) def export_extra_content(export_fs, modulestore, course_location, category_type, dirname, file_suffix=''): diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/__init__.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/__init__.py new file mode 100644 index 0000000000..9aa77fde52 --- /dev/null +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/__init__.py @@ -0,0 +1 @@ +__author__ = 'vik' diff --git a/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py similarity index 77% rename from common/lib/xmodule/xmodule/combined_open_ended_modulev1.py rename to common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py index 8bd7df86c1..cc0a957e66 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py @@ -1,39 +1,25 @@ -import copy -from fs.errors import ResourceNotFoundError -import itertools import json import logging from lxml import etree from lxml.html import rewrite_links -from path import path -import os -import sys - -from pkg_resources import resource_string - -from .capa_module import only_one, ComplexEncoder -from .editing_module import EditingDescriptor -from .html_checker import check_html -from progress import Progress -from .stringify import stringify_children -from .x_module import XModule -from .xml_module import XmlDescriptor -from xmodule.modulestore import Location +from xmodule.timeinfo import TimeInfo +from xmodule.capa_module import only_one, ComplexEncoder +from xmodule.editing_module import EditingDescriptor +from xmodule.html_checker import check_html +from xmodule.progress import Progress +from xmodule.stringify import stringify_children +from xmodule.x_module import XModule +from xmodule.xml_module import XmlDescriptor import self_assessment_module import open_ended_module -from combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError -from .stringify import stringify_children -import dateutil -import dateutil.parser -import datetime -from timeparse import parse_timedelta +from combined_open_ended_rubric import CombinedOpenEndedRubric, GRADER_TYPE_IMAGE_DICT, HUMAN_GRADER_TYPE, LEGEND_LIST log = logging.getLogger("mitx.courseware") # Set the default number of max attempts. Should be 1 for production # Set higher for debugging/testing # attempts specified in xml definition overrides this. -MAX_ATTEMPTS = 10000 +MAX_ATTEMPTS = 1 # Set maximum available number of points. # Overriden by max_score specified in xml. @@ -55,9 +41,13 @@ TRUE_DICT = ["True", True, "TRUE", "true"] HUMAN_TASK_TYPE = { 'selfassessment' : "Self Assessment", - 'openended' : "External Grader", + 'openended' : "edX Assessment", } +#Default value that controls whether or not to skip basic spelling checks in the controller +#Metadata overrides this +SKIP_BASIC_CHECKS = False + class CombinedOpenEndedV1Module(): """ This is a module that encapsulates all open ended grading (self assessment, peer assessment, etc). @@ -72,7 +62,7 @@ class CombinedOpenEndedV1Module(): 'save_assessment' -- Saves the student assessment (or external grader assessment) 'save_post_assessment' -- saves a post assessment (hint, feedback on feedback, etc) ajax actions implemented by combined open ended module are: - 'reset' -- resets the whole combined open ended module and returns to the first child module + 'reset' -- resets the whole combined open ended module and returns to the first child moduleresource_string 'next_problem' -- moves to the next child module 'get_results' -- gets results from a given child module @@ -89,14 +79,6 @@ class CombinedOpenEndedV1Module(): INTERMEDIATE_DONE = 'intermediate_done' DONE = 'done' - js = {'coffee': [resource_string(__name__, 'js/src/combinedopenended/display.coffee'), - resource_string(__name__, 'js/src/collapsible.coffee'), - resource_string(__name__, 'js/src/javascript_loader.coffee'), - ]} - js_module_name = "CombinedOpenEnded" - - css = {'scss': [resource_string(__name__, 'css/combinedopenended/display.scss')]} - def __init__(self, system, location, definition, descriptor, instance_state=None, shared_state=None, metadata = None, static_data = None, **kwargs): @@ -164,36 +146,25 @@ class CombinedOpenEndedV1Module(): self.max_attempts = int(self.metadata.get('attempts', MAX_ATTEMPTS)) self.is_scored = self.metadata.get('is_graded', IS_SCORED) in TRUE_DICT self.accept_file_upload = self.metadata.get('accept_file_upload', ACCEPT_FILE_UPLOAD) in TRUE_DICT + self.skip_basic_checks = self.metadata.get('skip_spelling_checks', SKIP_BASIC_CHECKS) display_due_date_string = self.metadata.get('due', None) - if display_due_date_string is not None: - try: - self.display_due_date = dateutil.parser.parse(display_due_date_string) - except ValueError: - log.error("Could not parse due date {0} for location {1}".format(display_due_date_string, location)) - raise - else: - self.display_due_date = None - + grace_period_string = self.metadata.get('graceperiod', None) - if grace_period_string is not None and self.display_due_date: - try: - self.grace_period = parse_timedelta(grace_period_string) - self.close_date = self.display_due_date + self.grace_period - except: - log.error("Error parsing the grace period {0} for location {1}".format(grace_period_string, location)) - raise - else: - self.grace_period = None - self.close_date = self.display_due_date + try: + self.timeinfo = TimeInfo(display_due_date_string, grace_period_string) + except: + log.error("Error parsing due date information in location {0}".format(location)) + raise + self.display_due_date = self.timeinfo.display_due_date # Used for progress / grading. Currently get credit just for # completion (doesn't matter if you self-assessed correct/incorrect). self._max_score = int(self.metadata.get('max_score', MAX_SCORE)) - rubric_renderer = CombinedOpenEndedRubric(system, True) + self.rubric_renderer = CombinedOpenEndedRubric(system, True) rubric_string = stringify_children(definition['rubric']) - rubric_renderer.check_if_rubric_is_parseable(rubric_string, location, MAX_SCORE_ALLOWED, self._max_score) + self.rubric_renderer.check_if_rubric_is_parseable(rubric_string, location, MAX_SCORE_ALLOWED, self._max_score) #Static data is passed to the child modules to render self.static_data = { @@ -203,7 +174,9 @@ class CombinedOpenEndedV1Module(): 'rubric': definition['rubric'], 'display_name': self.display_name, 'accept_file_upload': self.accept_file_upload, - 'close_date' : self.close_date, + 'close_date' : self.timeinfo.close_date, + 's3_interface' : self.system.s3_interface, + 'skip_basic_checks' : self.skip_basic_checks, } self.task_xml = definition['task_xml'] @@ -354,9 +327,10 @@ class CombinedOpenEndedV1Module(): 'state': self.state, 'task_count': len(self.task_xml), 'task_number': self.current_task_number + 1, - 'status': self.get_status(), + 'status': self.get_status(False), 'display_name': self.display_name, 'accept_file_upload': self.accept_file_upload, + 'legend_list' : LEGEND_LIST, } return context @@ -431,6 +405,9 @@ class CombinedOpenEndedV1Module(): last_score = task.latest_score() last_post_assessment = task.latest_post_assessment(self.system) last_post_feedback = "" + feedback_dicts = [{}] + grader_ids = [0] + submission_ids = [0] if task_type == "openended": last_post_assessment = task.latest_post_assessment(self.system, short_feedback=False, join_feedback=False) if isinstance(last_post_assessment, list): @@ -441,6 +418,18 @@ class CombinedOpenEndedV1Module(): else: last_post_evaluation = task.format_feedback_with_evaluation(self.system, last_post_assessment) last_post_assessment = last_post_evaluation + rubric_data = task._parse_score_msg(task.history[-1].get('post_assessment', ""), self.system) + rubric_scores = rubric_data['rubric_scores'] + grader_types = rubric_data['grader_types'] + feedback_items = rubric_data['feedback_items'] + feedback_dicts = rubric_data['feedback_dicts'] + grader_ids = rubric_data['grader_ids'] + submission_ids = rubric_data['submission_ids'] + elif task_type== "selfassessment": + rubric_scores = last_post_assessment + grader_types = ['SA'] + feedback_items = [''] + last_post_assessment = "" last_correctness = task.is_last_response_correct() max_score = task.max_score() state = task.state @@ -453,6 +442,16 @@ class CombinedOpenEndedV1Module(): human_state = task.HUMAN_NAMES[state] else: human_state = state + if len(grader_types)>0: + grader_type = grader_types[0] + else: + grader_type = "IN" + + if grader_type in HUMAN_GRADER_TYPE: + human_grader_name = HUMAN_GRADER_TYPE[grader_type] + else: + human_grader_name = grader_type + last_response_dict = { 'response': last_response, 'score': last_score, @@ -465,8 +464,15 @@ class CombinedOpenEndedV1Module(): 'correct': last_correctness, 'min_score_to_attempt': min_score_to_attempt, 'max_score_to_attempt': max_score_to_attempt, + 'rubric_scores' : rubric_scores, + 'grader_types' : grader_types, + 'feedback_items' : feedback_items, + 'grader_type' : grader_type, + 'human_grader_type' : human_grader_name, + 'feedback_dicts' : feedback_dicts, + 'grader_ids' : grader_ids, + 'submission_ids' : submission_ids, } - return last_response_dict def update_task_states(self): @@ -502,19 +508,95 @@ class CombinedOpenEndedV1Module(): pass return return_html + def get_rubric(self, get): + """ + Gets the results of a given grader via ajax. + Input: AJAX get dictionary + Output: Dictionary to be rendered via ajax that contains the result html. + """ + all_responses = [] + loop_up_to_task = self.current_task_number+1 + for i in xrange(0,loop_up_to_task): + all_responses.append(self.get_last_response(i)) + rubric_scores = [all_responses[i]['rubric_scores'] for i in xrange(0,len(all_responses)) if len(all_responses[i]['rubric_scores'])>0 and all_responses[i]['grader_types'][0] in HUMAN_GRADER_TYPE.keys()] + grader_types = [all_responses[i]['grader_types'] for i in xrange(0,len(all_responses)) if len(all_responses[i]['grader_types'])>0 and all_responses[i]['grader_types'][0] in HUMAN_GRADER_TYPE.keys()] + feedback_items = [all_responses[i]['feedback_items'] for i in xrange(0,len(all_responses)) if len(all_responses[i]['feedback_items'])>0 and all_responses[i]['grader_types'][0] in HUMAN_GRADER_TYPE.keys()] + rubric_html = self.rubric_renderer.render_combined_rubric(stringify_children(self.static_data['rubric']), rubric_scores, + grader_types, feedback_items) + + response_dict = all_responses[-1] + context = { + 'results': rubric_html, + 'task_name' : 'Scored Rubric', + 'class_name' : 'combined-rubric-container' + } + html = self.system.render_template('combined_open_ended_results.html', context) + return {'html': html, 'success': True} + + def get_legend(self, get): + """ + Gets the results of a given grader via ajax. + Input: AJAX get dictionary + Output: Dictionary to be rendered via ajax that contains the result html. + """ + context = { + 'legend_list' : LEGEND_LIST, + } + html = self.system.render_template('combined_open_ended_legend.html', context) + return {'html': html, 'success': True} + def get_results(self, get): """ Gets the results of a given grader via ajax. Input: AJAX get dictionary Output: Dictionary to be rendered via ajax that contains the result html. """ - task_number = int(get['task_number']) self.update_task_states() - response_dict = self.get_last_response(task_number) - context = {'results': response_dict['post_assessment'], 'task_number': task_number + 1} + loop_up_to_task = self.current_task_number+1 + all_responses =[] + for i in xrange(0,loop_up_to_task): + all_responses.append(self.get_last_response(i)) + context_list = [] + for ri in all_responses: + for i in xrange(0,len(ri['rubric_scores'])): + feedback = ri['feedback_dicts'][i].get('feedback','') + rubric_data = self.rubric_renderer.render_rubric(stringify_children(self.static_data['rubric']), ri['rubric_scores'][i]) + if rubric_data['success']: + rubric_html = rubric_data['html'] + else: + rubric_html = '' + context = { + 'rubric_html': rubric_html, + 'grader_type': ri['grader_type'], + 'feedback' : feedback, + 'grader_id' : ri['grader_ids'][i], + 'submission_id' : ri['submission_ids'][i], + } + context_list.append(context) + feedback_table = self.system.render_template('open_ended_result_table.html', { + 'context_list' : context_list, + 'grader_type_image_dict' : GRADER_TYPE_IMAGE_DICT, + 'human_grader_types' : HUMAN_GRADER_TYPE, + 'rows': 50, + 'cols': 50, + }) + context = { + 'results': feedback_table, + 'task_name' : "Feedback", + 'class_name' : "result-container", + } html = self.system.render_template('combined_open_ended_results.html', context) return {'html': html, 'success': True} + def get_status_ajax(self, get): + """ + Gets the results of a given grader via ajax. + Input: AJAX get dictionary + Output: Dictionary to be rendered via ajax that contains the result html. + """ + html = self.get_status(True) + return {'html': html, 'success': True} + def handle_ajax(self, dispatch, get): """ This is called by courseware.module_render, to handle an AJAX call. @@ -529,7 +611,10 @@ class CombinedOpenEndedV1Module(): handlers = { 'next_problem': self.next_problem, 'reset': self.reset, - 'get_results': self.get_results + 'get_results': self.get_results, + 'get_combined_rubric': self.get_rubric, + 'get_status' : self.get_status_ajax, + 'get_legend' : self.get_legend, } if dispatch not in handlers: @@ -561,7 +646,10 @@ class CombinedOpenEndedV1Module(): if self.attempts > self.max_attempts: return { 'success': False, - 'error': 'Too many attempts.' + #This is a student_facing_error + 'error': ('You have attempted this question {0} times. ' + 'You are only allowed to attempt it {1} times.').format( + self.attempts, self.max_attempts) } self.state = self.INITIAL self.allow_reset = False @@ -593,7 +681,7 @@ class CombinedOpenEndedV1Module(): return json.dumps(state) - def get_status(self): + def get_status(self, render_via_ajax): """ Gets the status panel to be displayed at the top right. Input: None @@ -604,7 +692,13 @@ class CombinedOpenEndedV1Module(): task_data = self.get_last_response(i) task_data.update({'task_number': i + 1}) status.append(task_data) - context = {'status_list': status} + + context = { + 'status_list': status, + 'grader_type_image_dict' : GRADER_TYPE_IMAGE_DICT, + 'legend_list' : LEGEND_LIST, + 'render_via_ajax' : render_via_ajax, + } status_html = self.system.render_template("combined_open_ended_status.html", context) return status_html @@ -679,9 +773,6 @@ class CombinedOpenEndedV1Descriptor(XmlDescriptor, EditingDescriptor): has_score = True template_dir_name = "combinedopenended" - js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]} - js_module_name = "HTMLEditingDescriptor" - @classmethod def definition_from_xml(cls, xml_object, system): """ @@ -697,7 +788,8 @@ class CombinedOpenEndedV1Descriptor(XmlDescriptor, EditingDescriptor): expected_children = ['task', 'rubric', 'prompt'] for child in expected_children: if len(xml_object.xpath(child)) == 0: - raise ValueError("Combined Open Ended definition must include at least one '{0}' tag".format(child)) + #This is a staff_facing_error + raise ValueError("Combined Open Ended definition must include at least one '{0}' tag. Contact the learning sciences group for assistance.".format(child)) def parse_task(k): """Assumes that xml_object has child k""" diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py new file mode 100644 index 0000000000..f756b2b853 --- /dev/null +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py @@ -0,0 +1,317 @@ +import logging +from lxml import etree + +log = logging.getLogger(__name__) + +GRADER_TYPE_IMAGE_DICT = { + 'SA' : '/static/images/self_assessment_icon.png', + 'PE' : '/static/images/peer_grading_icon.png', + 'ML' : '/static/images/ml_grading_icon.png', + 'IN' : '/static/images/peer_grading_icon.png', + 'BC' : '/static/images/ml_grading_icon.png', + } + +HUMAN_GRADER_TYPE = { + 'SA' : 'Self-Assessment', + 'PE' : 'Peer-Assessment', + 'IN' : 'Instructor-Assessment', + 'ML' : 'AI-Assessment', + 'BC' : 'AI-Assessment', + } + +DO_NOT_DISPLAY = ['BC', 'IN'] + +LEGEND_LIST = [{'name' : HUMAN_GRADER_TYPE[k], 'image' : GRADER_TYPE_IMAGE_DICT[k]} for k in GRADER_TYPE_IMAGE_DICT.keys() if k not in DO_NOT_DISPLAY ] + +class RubricParsingError(Exception): + def __init__(self, msg): + self.msg = msg + + +class CombinedOpenEndedRubric(object): + + def __init__ (self, system, view_only = False): + self.has_score = False + self.view_only = view_only + self.system = system + + def render_rubric(self, rubric_xml, score_list = None): + ''' + render_rubric: takes in an xml string and outputs the corresponding + html for that xml, given the type of rubric we're generating + Input: + rubric_xml: an string that has not been parsed into xml that + represents this particular rubric + Output: + html: the html that corresponds to the xml given + ''' + success = False + try: + rubric_categories = self.extract_categories(rubric_xml) + if score_list and len(score_list)==len(rubric_categories): + for i in xrange(0,len(rubric_categories)): + category = rubric_categories[i] + for j in xrange(0,len(category['options'])): + if score_list[i]==j: + rubric_categories[i]['options'][j]['selected'] = True + rubric_scores = [cat['score'] for cat in rubric_categories] + max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories) + max_score = max(max_scores) + rubric_template = 'open_ended_rubric.html' + if self.view_only: + rubric_template = 'open_ended_view_only_rubric.html' + html = self.system.render_template(rubric_template, + {'categories': rubric_categories, + 'has_score': self.has_score, + 'view_only': self.view_only, + 'max_score': max_score, + 'combined_rubric' : False + }) + success = True + except: + #This is a staff_facing_error + error_message = "[render_rubric] Could not parse the rubric with xml: {0}. Contact the learning sciences group for assistance.".format(rubric_xml) + log.exception(error_message) + raise RubricParsingError(error_message) + return {'success' : success, 'html' : html, 'rubric_scores' : rubric_scores} + + def check_if_rubric_is_parseable(self, rubric_string, location, max_score_allowed, max_score): + rubric_dict = self.render_rubric(rubric_string) + success = rubric_dict['success'] + rubric_feedback = rubric_dict['html'] + if not success: + #This is a staff_facing_error + error_message = "Could not parse rubric : {0} for location {1}. Contact the learning sciences group for assistance.".format(rubric_string, location.url()) + log.error(error_message) + raise RubricParsingError(error_message) + + rubric_categories = self.extract_categories(rubric_string) + total = 0 + for category in rubric_categories: + total = total + len(category['options']) - 1 + if len(category['options']) > (max_score_allowed + 1): + #This is a staff_facing_error + error_message = "Number of score points in rubric {0} higher than the max allowed, which is {1}. Contact the learning sciences group for assistance.".format( + len(category['options']), max_score_allowed) + log.error(error_message) + raise RubricParsingError(error_message) + + if total != max_score: + #This is a staff_facing_error + error_msg = "The max score {0} for problem {1} does not match the total number of points in the rubric {2}. Contact the learning sciences group for assistance.".format( + max_score, location, total) + log.error(error_msg) + raise RubricParsingError(error_msg) + + def extract_categories(self, element): + ''' + Contstruct a list of categories such that the structure looks like: + [ { category: "Category 1 Name", + options: [{text: "Option 1 Name", points: 0}, {text:"Option 2 Name", points: 5}] + }, + { category: "Category 2 Name", + options: [{text: "Option 1 Name", points: 0}, + {text: "Option 2 Name", points: 1}, + {text: "Option 3 Name", points: 2]}] + + ''' + if isinstance(element, basestring): + element = etree.fromstring(element) + categories = [] + for category in element: + if category.tag != 'category': + #This is a staff_facing_error + raise RubricParsingError("[extract_categories] Expected a tag: got {0} instead. Contact the learning sciences group for assistance.".format(category.tag)) + else: + categories.append(self.extract_category(category)) + return categories + + + def extract_category(self, category): + ''' + construct an individual category + {category: "Category 1 Name", + options: [{text: "Option 1 text", points: 1}, + {text: "Option 2 text", points: 2}]} + + all sorting and auto-point generation occurs in this function + ''' + descriptionxml = category[0] + optionsxml = category[1:] + scorexml = category[1] + score = None + if scorexml.tag == 'score': + score_text = scorexml.text + optionsxml = category[2:] + score = int(score_text) + self.has_score = True + # if we are missing the score tag and we are expecting one + elif self.has_score: + #This is a staff_facing_error + raise RubricParsingError("[extract_category] Category {0} is missing a score. Contact the learning sciences group for assistance.".format(descriptionxml.text)) + + + # parse description + if descriptionxml.tag != 'description': + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: expected description tag, got {0} instead. Contact the learning sciences group for assistance.".format(descriptionxml.tag)) + + description = descriptionxml.text + + cur_points = 0 + options = [] + autonumbering = True + # parse options + for option in optionsxml: + if option.tag != 'option': + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: expected option tag, got {0} instead. Contact the learning sciences group for assistance.".format(option.tag)) + else: + pointstr = option.get("points") + if pointstr: + autonumbering = False + # try to parse this into an int + try: + points = int(pointstr) + except ValueError: + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: expected points to have int, got {0} instead. Contact the learning sciences group for assistance.".format(pointstr)) + elif autonumbering: + # use the generated one if we're in the right mode + points = cur_points + cur_points = cur_points + 1 + else: + raise Exception("[extract_category]: missing points attribute. Cannot continue to auto-create points values after a points value is explicitly defined.") + + selected = score == points + optiontext = option.text + options.append({'text': option.text, 'points': points, 'selected': selected}) + + # sort and check for duplicates + options = sorted(options, key=lambda option: option['points']) + CombinedOpenEndedRubric.validate_options(options) + + return {'description': description, 'options': options, 'score' : score} + + def render_combined_rubric(self,rubric_xml,scores,score_types,feedback_types): + success, score_tuples = CombinedOpenEndedRubric.reformat_scores_for_rendering(scores,score_types,feedback_types) + rubric_categories = self.extract_categories(rubric_xml) + max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories) + max_score = max(max_scores) + for i in xrange(0,len(rubric_categories)): + category = rubric_categories[i] + for j in xrange(0,len(category['options'])): + rubric_categories[i]['options'][j]['grader_types'] = [] + for tuple in score_tuples: + if tuple[1] == i and tuple[2] ==j: + for grader_type in tuple[3]: + rubric_categories[i]['options'][j]['grader_types'].append(grader_type) + + html = self.system.render_template('open_ended_combined_rubric.html', + {'categories': rubric_categories, + 'has_score': True, + 'view_only': True, + 'max_score': max_score, + 'combined_rubric' : True, + 'grader_type_image_dict' : GRADER_TYPE_IMAGE_DICT, + 'human_grader_types' : HUMAN_GRADER_TYPE, + }) + return html + + + @staticmethod + def validate_options(options): + ''' + Validates a set of options. This can and should be extended to filter out other bad edge cases + ''' + if len(options) == 0: + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: no options associated with this category. Contact the learning sciences group for assistance.") + if len(options) == 1: + return + prev = options[0]['points'] + for option in options[1:]: + if prev == option['points']: + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: found duplicate point values between two different options. Contact the learning sciences group for assistance.") + else: + prev = option['points'] + + @staticmethod + def reformat_scores_for_rendering(scores, score_types, feedback_types): + """ + Takes in a list of rubric scores, the types of those scores, and feedback associated with them + Outputs a reformatted list of score tuples (count, rubric category, rubric score, [graders that gave this score], [feedback types]) + @param scores: + @param score_types: + @param feedback_types: + @return: + """ + success = False + if len(scores)==0: + #This is a dev_facing_error + log.error("Score length is 0 when trying to reformat rubric scores for rendering.") + return success, "" + + if len(scores) != len(score_types) or len(feedback_types) != len(scores): + #This is a dev_facing_error + log.error("Length mismatches when trying to reformat rubric scores for rendering. " + "Scores: {0}, Score Types: {1} Feedback Types: {2}".format(scores, score_types, feedback_types)) + return success, "" + + score_lists = [] + score_type_list = [] + feedback_type_list = [] + for i in xrange(0,len(scores)): + score_cont_list = scores[i] + for j in xrange(0,len(score_cont_list)): + score_list = score_cont_list[j] + score_lists.append(score_list) + score_type_list.append(score_types[i][j]) + feedback_type_list.append(feedback_types[i][j]) + + score_list_len = len(score_lists[0]) + for i in xrange(0,len(score_lists)): + score_list = score_lists[i] + if len(score_list)!=score_list_len: + return success, "" + + score_tuples = [] + for i in xrange(0,len(score_lists)): + for j in xrange(0,len(score_lists[i])): + tuple = [1,j,score_lists[i][j],[],[]] + score_tuples, tup_ind = CombinedOpenEndedRubric.check_for_tuple_matches(score_tuples,tuple) + score_tuples[tup_ind][0] += 1 + score_tuples[tup_ind][3].append(score_type_list[i]) + score_tuples[tup_ind][4].append(feedback_type_list[i]) + + success = True + return success, score_tuples + + @staticmethod + def check_for_tuple_matches(tuples, tuple): + """ + Checks to see if a tuple in a list of tuples is a match for tuple. + If not match, creates a new tuple matching tuple. + @param tuples: list of tuples + @param tuple: tuples to match + @return: a new list of tuples, and the index of the tuple that matches tuple + """ + category = tuple[1] + score = tuple[2] + tup_ind = -1 + for t in xrange(0,len(tuples)): + if tuples[t][1] == category and tuples[t][2] == score: + tup_ind = t + break + + if tup_ind == -1: + tuples.append([0,category,score,[],[]]) + tup_ind = len(tuples)-1 + return tuples, tup_ind + + + + + + diff --git a/common/lib/xmodule/xmodule/grading_service_module.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py similarity index 87% rename from common/lib/xmodule/xmodule/grading_service_module.py rename to common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py index 10c6f16adb..8a4caa1291 100644 --- a/common/lib/xmodule/xmodule/grading_service_module.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py @@ -5,7 +5,7 @@ import requests from requests.exceptions import RequestException, ConnectionError, HTTPError import sys -from xmodule.combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError +from combined_open_ended_rubric import CombinedOpenEndedRubric from lxml import etree log = logging.getLogger(__name__) @@ -22,8 +22,6 @@ class GradingService(object): def __init__(self, config): self.username = config['username'] self.password = config['password'] - self.url = config['url'] - self.login_url = self.url + '/login/' self.session = requests.session() self.system = config['system'] @@ -53,6 +51,8 @@ class GradingService(object): r = self._try_with_login(op) except (RequestException, ConnectionError, HTTPError) as err: # reraise as promised GradingServiceError, but preserve stacktrace. + #This is a dev_facing_error + log.error("Problem posting data to the grading controller. URL: {0}, data: {1}".format(url, data)) raise GradingServiceError, str(err), sys.exc_info()[2] return r.text @@ -69,6 +69,8 @@ class GradingService(object): r = self._try_with_login(op) except (RequestException, ConnectionError, HTTPError) as err: # reraise as promised GradingServiceError, but preserve stacktrace. + #This is a dev_facing_error + log.error("Problem getting data from the grading controller. URL: {0}, params: {1}".format(url, params)) raise GradingServiceError, str(err), sys.exc_info()[2] return r.text @@ -114,16 +116,20 @@ class GradingService(object): if 'rubric' in response_json: rubric = response_json['rubric'] rubric_renderer = CombinedOpenEndedRubric(self.system, view_only) - success, rubric_html = rubric_renderer.render_rubric(rubric) + rubric_dict = rubric_renderer.render_rubric(rubric) + success = rubric_dict['success'] + rubric_html = rubric_dict['html'] response_json['rubric'] = rubric_html return response_json # if we can't parse the rubric into HTML, except etree.XMLSyntaxError, RubricParsingError: + #This is a dev_facing_error log.exception("Cannot parse rubric string. Raw string: {0}" .format(rubric)) return {'success': False, 'error': 'Error displaying submission'} except ValueError: + #This is a dev_facing_error log.exception("Error parsing response: {0}".format(response)) return {'success': False, 'error': "Error displaying submission"} diff --git a/common/lib/xmodule/xmodule/open_ended_image_submission.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_image_submission.py similarity index 92% rename from common/lib/xmodule/xmodule/open_ended_image_submission.py rename to common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_image_submission.py index 66500146ed..edae69854f 100644 --- a/common/lib/xmodule/xmodule/open_ended_image_submission.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_image_submission.py @@ -13,11 +13,6 @@ from urlparse import urlparse import requests from boto.s3.connection import S3Connection from boto.s3.key import Key -#TODO: Settings import is needed now in order to specify the URL and keys for amazon s3 (to upload images). -#Eventually, the goal is to replace the global django settings import with settings specifically -#for this module. There is no easy way to do this now, so piggybacking on the django settings -#makes sense. -from django.conf import settings import pickle import logging import re @@ -221,7 +216,7 @@ def run_image_tests(image): return success -def upload_to_s3(file_to_upload, keyname): +def upload_to_s3(file_to_upload, keyname, s3_interface): ''' Upload file to S3 using provided keyname. @@ -237,8 +232,8 @@ def upload_to_s3(file_to_upload, keyname): #im.save(out_im, 'PNG') try: - conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) - bucketname = str(settings.AWS_STORAGE_BUCKET_NAME) + conn = S3Connection(s3_interface['access_key'], s3_interface['secret_access_key']) + bucketname = str(s3_interface['storage_bucket_name']) bucket = conn.create_bucket(bucketname.lower()) k = Key(bucket) @@ -256,8 +251,9 @@ def upload_to_s3(file_to_upload, keyname): return True, public_url except: - error_message = "Could not connect to S3." - log.exception(error_message) + #This is a dev_facing_error + error_message = "Could not connect to S3 to upload peer grading image. Trying to utilize bucket: {0}".format(bucketname.lower()) + log.error(error_message) return False, error_message diff --git a/common/lib/xmodule/xmodule/open_ended_module.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py similarity index 81% rename from common/lib/xmodule/xmodule/open_ended_module.py rename to common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py index 0ad6a26995..0b546482f4 100644 --- a/common/lib/xmodule/xmodule/open_ended_module.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py @@ -5,28 +5,16 @@ hints, answers, and assessment judgment (currently only correct/incorrect). Parses xml definition file--see below for exact format. """ -import copy -from fs.errors import ResourceNotFoundError -import itertools import json import logging from lxml import etree -from lxml.html import rewrite_links -from path import path -import os -import sys -import hashlib import capa.xqueue_interface as xqueue_interface -from pkg_resources import resource_string - -from .capa_module import only_one, ComplexEncoder -from .editing_module import EditingDescriptor -from .html_checker import check_html -from progress import Progress -from .stringify import stringify_children -from .xml_module import XmlDescriptor -from xmodule.modulestore import Location +from xmodule.capa_module import ComplexEncoder +from xmodule.editing_module import EditingDescriptor +from xmodule.progress import Progress +from xmodule.stringify import stringify_children +from xmodule.xml_module import XmlDescriptor from capa.util import * import openendedchild @@ -71,12 +59,14 @@ class OpenEndedModule(openendedchild.OpenEndedChild): self.submission_id = None self.grader_id = None + error_message = "No {0} found in problem xml for open ended problem. Contact the learning sciences group for assistance." if oeparam is None: - raise ValueError("No oeparam found in problem xml.") + #This is a staff_facing_error + raise ValueError(error_message.format('oeparam')) if self.prompt is None: - raise ValueError("No prompt found in problem xml.") + raise ValueError(error_message.format('prompt')) if self.rubric is None: - raise ValueError("No rubric found in problem xml.") + raise ValueError(error_message.format('rubric')) self._parse(oeparam, self.prompt, self.rubric, system) @@ -85,6 +75,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): self.send_to_grader(self.latest_answer(), system) self.created = False + def _parse(self, oeparam, prompt, rubric, system): ''' Parse OpenEndedResponse XML: @@ -110,19 +101,21 @@ class OpenEndedModule(openendedchild.OpenEndedChild): # __init__ adds it (easiest way to get problem location into # response types) except TypeError, ValueError: - log.exception("Grader payload %r is not a json object!", grader_payload) + #This is a dev_facing_error + log.exception("Grader payload from external open ended grading server is not a json object! Object: {0}".format(grader_payload)) self.initial_display = find_with_default(oeparam, 'initial_display', '') self.answer = find_with_default(oeparam, 'answer_display', 'No answer given.') parsed_grader_payload.update({ - 'location': system.location.url(), + 'location': self.location_string, 'course_id': system.course_id, 'prompt': prompt_string, 'rubric': rubric_string, 'initial_display': self.initial_display, 'answer': self.answer, - 'problem_id': self.display_name + 'problem_id': self.display_name, + 'skip_basic_checks': self.skip_basic_checks, }) updated_grader_payload = json.dumps(parsed_grader_payload) @@ -145,24 +138,27 @@ class OpenEndedModule(openendedchild.OpenEndedChild): """ event_info = dict() - event_info['problem_id'] = system.location.url() + event_info['problem_id'] = self.location_string event_info['student_id'] = system.anonymous_student_id event_info['survey_responses'] = get survey_responses = event_info['survey_responses'] for tag in ['feedback', 'submission_id', 'grader_id', 'score']: if tag not in survey_responses: - return {'success': False, 'msg': "Could not find needed tag {0}".format(tag)} + #This is a student_facing_error + return {'success': False, 'msg': "Could not find needed tag {0} in the survey responses. Please try submitting again.".format(tag)} try: submission_id = int(survey_responses['submission_id']) grader_id = int(survey_responses['grader_id']) feedback = str(survey_responses['feedback'].encode('ascii', 'ignore')) score = int(survey_responses['score']) except: + #This is a dev_facing_error error_message = ("Could not parse submission id, grader id, " "or feedback from message_post ajax call. Here is the message data: {0}".format( survey_responses)) log.exception(error_message) + #This is a student_facing_error return {'success': False, 'msg': "There was an error saving your feedback. Please contact course staff."} qinterface = system.xqueue['interface'] @@ -199,6 +195,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): self.state = self.DONE + #This is a student_facing_message return {'success': success, 'msg': "Successfully submitted your feedback."} def send_to_grader(self, submission, system): @@ -306,6 +303,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): 'grammar': 1, # needs to be after all the other feedback 'markup_text': 3} + do_not_render = ['topicality', 'prompt-overlap'] default_priority = 2 @@ -347,22 +345,31 @@ class OpenEndedModule(openendedchild.OpenEndedChild): for tag in ['success', 'feedback', 'submission_id', 'grader_id']: if tag not in response_items: - return format_feedback('errors', 'Error getting feedback') + #This is a student_facing_error + return format_feedback('errors', 'Error getting feedback from grader.') feedback_items = response_items['feedback'] try: feedback = json.loads(feedback_items) except (TypeError, ValueError): - log.exception("feedback_items have invalid json %r", feedback_items) - return format_feedback('errors', 'Could not parse feedback') + #This is a dev_facing_error + log.exception("feedback_items from external open ended grader have invalid json {0}".format(feedback_items)) + #This is a student_facing_error + return format_feedback('errors', 'Error getting feedback from grader.') if response_items['success']: if len(feedback) == 0: - return format_feedback('errors', 'No feedback available') + #This is a student_facing_error + return format_feedback('errors', 'No feedback available from grader.') + + for tag in do_not_render: + if tag in feedback: + feedback.pop(tag) feedback_lst = sorted(feedback.items(), key=get_priority) feedback_list_part1 = u"\n".join(format_feedback(k, v) for k, v in feedback_lst) else: + #This is a student_facing_error feedback_list_part1 = format_feedback('errors', response_items['feedback']) feedback_list_part2 = (u"\n".join([format_feedback_hidden(feedback_type, value) @@ -381,9 +388,13 @@ class OpenEndedModule(openendedchild.OpenEndedChild): rubric_feedback = "" feedback = self._convert_longform_feedback_to_html(response_items) + rubric_scores = [] if response_items['rubric_scores_complete'] == True: rubric_renderer = CombinedOpenEndedRubric(system, True) - success, rubric_feedback = rubric_renderer.render_rubric(response_items['rubric_xml']) + rubric_dict = rubric_renderer.render_rubric(response_items['rubric_xml']) + success = rubric_dict['success'] + rubric_feedback = rubric_dict['html'] + rubric_scores = rubric_dict['rubric_scores'] if not response_items['success']: return system.render_template("open_ended_error.html", @@ -396,7 +407,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): 'rubric_feedback': rubric_feedback }) - return feedback_template + return feedback_template, rubric_scores def _parse_score_msg(self, score_msg, system, join_feedback=True): @@ -420,18 +431,30 @@ class OpenEndedModule(openendedchild.OpenEndedChild): correct: Correctness of submission (Boolean) score: Points to be assigned (numeric, can be float) """ - fail = {'valid': False, 'score': 0, 'feedback': ''} + fail = { + 'valid': False, + 'score': 0, + 'feedback': '', + 'rubric_scores' : [[0]], + 'grader_types' : [''], + 'feedback_items' : [''], + 'feedback_dicts' : [{}], + 'grader_ids' : [0], + 'submission_ids' : [0], + } try: score_result = json.loads(score_msg) except (TypeError, ValueError): - error_message = ("External grader message should be a JSON-serialized dict." + #This is a dev_facing_error + error_message = ("External open ended grader message should be a JSON-serialized dict." " Received score_msg = {0}".format(score_msg)) log.error(error_message) fail['feedback'] = error_message return fail if not isinstance(score_result, dict): - error_message = ("External grader message should be a JSON-serialized dict." + #This is a dev_facing_error + error_message = ("External open ended grader message should be a JSON-serialized dict." " Received score_result = {0}".format(score_result)) log.error(error_message) fail['feedback'] = error_message @@ -439,7 +462,8 @@ class OpenEndedModule(openendedchild.OpenEndedChild): for tag in ['score', 'feedback', 'grader_type', 'success', 'grader_id', 'submission_id']: if tag not in score_result: - error_message = ("External grader message is missing required tag: {0}" + #This is a dev_facing_error + error_message = ("External open ended grader message is missing required tag: {0}" .format(tag)) log.error(error_message) fail['feedback'] = error_message @@ -447,6 +471,11 @@ class OpenEndedModule(openendedchild.OpenEndedChild): #This is to support peer grading if isinstance(score_result['score'], list): feedback_items = [] + rubric_scores = [] + grader_types = [] + feedback_dicts = [] + grader_ids = [] + submission_ids = [] for i in xrange(0, len(score_result['score'])): new_score_result = { 'score': score_result['score'][i], @@ -458,7 +487,17 @@ class OpenEndedModule(openendedchild.OpenEndedChild): 'rubric_scores_complete': score_result['rubric_scores_complete'][i], 'rubric_xml': score_result['rubric_xml'][i], } - feedback_items.append(self._format_feedback(new_score_result, system)) + feedback_template, rubric_score = self._format_feedback(new_score_result, system) + feedback_items.append(feedback_template) + rubric_scores.append(rubric_score) + grader_types.append(score_result['grader_type']) + try: + feedback_dict = json.loads(score_result['feedback'][i]) + except: + pass + feedback_dicts.append(feedback_dict) + grader_ids.append(score_result['grader_id'][i]) + submission_ids.append(score_result['submission_id']) if join_feedback: feedback = "".join(feedback_items) else: @@ -466,13 +505,33 @@ class OpenEndedModule(openendedchild.OpenEndedChild): score = int(median(score_result['score'])) else: #This is for instructor and ML grading - feedback = self._format_feedback(score_result, system) + feedback, rubric_score = self._format_feedback(score_result, system) score = score_result['score'] + rubric_scores = [rubric_score] + grader_types = [score_result['grader_type']] + feedback_items = [feedback] + try: + feedback_dict = json.loads(score_result['feedback']) + except: + pass + feedback_dicts = [feedback_dict] + grader_ids = [score_result['grader_id']] + submission_ids = [score_result['submission_id']] self.submission_id = score_result['submission_id'] self.grader_id = score_result['grader_id'] - return {'valid': True, 'score': score, 'feedback': feedback} + return { + 'valid': True, + 'score': score, + 'feedback': feedback, + 'rubric_scores' : rubric_scores, + 'grader_types' : grader_types, + 'feedback_items' : feedback_items, + 'feedback_dicts' : feedback_dicts, + 'grader_ids' : grader_ids, + 'submission_ids' : submission_ids, + } def latest_post_assessment(self, system, short_feedback=False, join_feedback=True): """ @@ -521,7 +580,10 @@ class OpenEndedModule(openendedchild.OpenEndedChild): } if dispatch not in handlers: - return 'Error' + #This is a dev_facing_error + log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) + #This is a dev_facing_error + return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False}) before = self.get_progress() d = handlers[dispatch](get, system) @@ -562,15 +624,21 @@ class OpenEndedModule(openendedchild.OpenEndedChild): success, get = self.append_image_to_student_answer(get) error_message = "" if success: - get['student_answer'] = OpenEndedModule.sanitize_html(get['student_answer']) - self.new_history_entry(get['student_answer']) - self.send_to_grader(get['student_answer'], system) - self.change_state(self.ASSESSING) + success, allowed_to_submit, error_message = self.check_if_student_can_submit() + if allowed_to_submit: + get['student_answer'] = OpenEndedModule.sanitize_html(get['student_answer']) + self.new_history_entry(get['student_answer']) + self.send_to_grader(get['student_answer'], system) + self.change_state(self.ASSESSING) + else: + #Error message already defined + success = False else: + #This is a student_facing_error error_message = "There was a problem saving the image in your submission. Please try a different image, or try pasting a link to an image into the answer box." return { - 'success': True, + 'success': success, 'error': error_message, 'student_response': get['student_answer'] } @@ -635,9 +703,6 @@ class OpenEndedDescriptor(XmlDescriptor, EditingDescriptor): has_score = True template_dir_name = "openended" - js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]} - js_module_name = "HTMLEditingDescriptor" - @classmethod def definition_from_xml(cls, xml_object, system): """ @@ -650,7 +715,8 @@ class OpenEndedDescriptor(XmlDescriptor, EditingDescriptor): """ for child in ['openendedparam']: if len(xml_object.xpath(child)) != 1: - raise ValueError("Open Ended definition must include exactly one '{0}' tag".format(child)) + #This is a staff_facing_error + raise ValueError("Open Ended definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format(child)) def parse(k): """Assumes that xml_object has child k""" diff --git a/common/lib/xmodule/xmodule/openendedchild.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py similarity index 81% rename from common/lib/xmodule/xmodule/openendedchild.py rename to common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py index ba2de5c930..01ccf4c7ac 100644 --- a/common/lib/xmodule/xmodule/openendedchild.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py @@ -13,17 +13,16 @@ import hashlib import capa.xqueue_interface as xqueue_interface import re -from pkg_resources import resource_string - -from .capa_module import only_one, ComplexEncoder -from .editing_module import EditingDescriptor -from .html_checker import check_html -from progress import Progress -from .stringify import stringify_children -from .xml_module import XmlDescriptor +from xmodule.capa_module import only_one, ComplexEncoder +import open_ended_image_submission +from xmodule.editing_module import EditingDescriptor +from xmodule.html_checker import check_html +from xmodule.progress import Progress +from xmodule.stringify import stringify_children +from xmodule.xml_module import XmlDescriptor from xmodule.modulestore import Location from capa.util import * -import open_ended_image_submission +from peer_grading_service import PeerGradingService from datetime import datetime @@ -68,10 +67,10 @@ class OpenEndedChild(object): #This is used to tell students where they are at in the module HUMAN_NAMES = { - 'initial': 'Started', - 'assessing': 'Being scored', - 'post_assessment': 'Scoring finished', - 'done': 'Problem complete', + 'initial': 'Not started', + 'assessing': 'In progress', + 'post_assessment': 'Done', + 'done': 'Done', } def __init__(self, system, location, definition, descriptor, static_data, @@ -100,10 +99,20 @@ class OpenEndedChild(object): self.display_name = static_data['display_name'] self.accept_file_upload = static_data['accept_file_upload'] self.close_date = static_data['close_date'] + self.s3_interface = static_data['s3_interface'] + self.skip_basic_checks = static_data['skip_basic_checks'] # Used for progress / grading. Currently get credit just for # completion (doesn't matter if you self-assessed correct/incorrect). self._max_score = static_data['max_score'] + self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system) + + self.system = system + self.location_string = location + try: + self.location_string = self.location_string.url() + except: + pass self.setup_response(system, location, definition, descriptor) @@ -127,18 +136,18 @@ class OpenEndedChild(object): if self.closed(): return True, { 'success': False, - 'error': 'This problem is now closed.' + #This is a student_facing_error + 'error': 'The problem close date has passed, and this problem is now closed.' } elif self.attempts > self.max_attempts: return True, { 'success': False, - 'error': 'Too many attempts.' + #This is a student_facing_error + 'error': 'You have attempted this problem {0} times. You are allowed {1} attempts.'.format(self.attempts, self.max_attempts) } else: return False, {} - - def latest_answer(self): """Empty string if not available""" if not self.history: @@ -253,7 +262,8 @@ class OpenEndedChild(object): try: return Progress(self.get_score()['score'], self._max_score) except Exception as err: - log.exception("Got bad progress") + #This is a dev_facing_error + log.exception("Got bad progress from open ended child module. Max Score: {1}".format(self._max_score)) return None return None @@ -261,10 +271,12 @@ class OpenEndedChild(object): """ return dict out-of-sync error message, and also log. """ - log.warning("Assessment module state out sync. state: %r, get: %r. %s", + #This is a dev_facing_error + log.warning("Open ended child state out sync. state: %r, get: %r. %s", self.state, get, msg) + #This is a student_facing_error return {'success': False, - 'error': 'The problem state got out-of-sync'} + 'error': 'The problem state got out-of-sync. Please try reloading the page.'} def get_html(self): """ @@ -321,7 +333,7 @@ class OpenEndedChild(object): try: image_data.seek(0) - success, s3_public_url = open_ended_image_submission.upload_to_s3(image_data, image_key) + success, s3_public_url = open_ended_image_submission.upload_to_s3(image_data, image_key, self.s3_interface) except: log.exception("Could not upload image to S3.") @@ -410,3 +422,34 @@ class OpenEndedChild(object): success = True return success, string + + def check_if_student_can_submit(self): + location = self.location_string + + student_id = self.system.anonymous_student_id + success = False + allowed_to_submit = True + response = {} + #This is a student_facing_error + error_string = ("You need to peer grade {0} more in order to make another submission. " + "You have graded {1}, and {2} are required. You have made {3} successful peer grading submissions.") + try: + response = self.peer_gs.get_data_for_location(location, student_id) + count_graded = response['count_graded'] + count_required = response['count_required'] + student_sub_count = response['student_sub_count'] + success = True + except: + #This is a dev_facing_error + log.error("Could not contact external open ended graders for location {0} and student {1}".format(location,student_id)) + #This is a student_facing_error + error_message = "Could not contact the graders. Please notify course staff." + return success, allowed_to_submit, error_message + if count_graded>=count_required: + return success, allowed_to_submit, "" + else: + allowed_to_submit = False + #This is a student_facing_error + error_message = error_string.format(count_required-count_graded, count_graded, count_required, student_sub_count) + return success, allowed_to_submit, error_message + diff --git a/common/lib/xmodule/xmodule/peer_grading_service.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py similarity index 80% rename from common/lib/xmodule/xmodule/peer_grading_service.py rename to common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py index 8c50b6ff0a..42c54f0463 100644 --- a/common/lib/xmodule/xmodule/peer_grading_service.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py @@ -1,18 +1,7 @@ import json import logging -import requests -from requests.exceptions import RequestException, ConnectionError, HTTPError -import sys -#TODO: Settings import is needed now in order to specify the URL where to find the peer grading service. -#Eventually, the goal is to replace the global django settings import with settings specifically -#for this xmodule. There is no easy way to do this now, so piggybacking on the django settings -#makes sense. -from django.conf import settings - -from combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError -from lxml import etree -from grading_service_module import GradingService, GradingServiceError +from grading_service_module import GradingService log = logging.getLogger(__name__) @@ -28,6 +17,8 @@ class PeerGradingService(GradingService): def __init__(self, config, system): config['system'] = system super(PeerGradingService, self).__init__(config) + self.url = config['url'] + config['peer_grading'] + self.login_url = self.url + '/login/' self.get_next_submission_url = self.url + '/get_next_submission/' self.save_grade_url = self.url + '/save_grade/' self.is_student_calibrated_url = self.url + '/is_student_calibrated/' @@ -39,8 +30,8 @@ class PeerGradingService(GradingService): self.system = system def get_data_for_location(self, problem_location, student_id): - response = self.get(self.get_data_for_location_url, - {'location': problem_location, 'student_id': student_id}) + params = {'location': problem_location, 'student_id': student_id} + response = self.get(self.get_data_for_location_url, params) return self.try_to_decode(response) def get_next_submission(self, problem_location, grader_id): @@ -115,7 +106,7 @@ class MockPeerGradingService(object): 'max_score': 4}) def save_grade(self, location, grader_id, submission_id, - score, feedback, submission_key): + score, feedback, submission_key, rubric_scores, submission_flagged): return json.dumps({'success': True}) def is_student_calibrated(self, problem_location, grader_id): @@ -131,7 +122,8 @@ class MockPeerGradingService(object): 'max_score': 4}) def save_calibration_essay(self, problem_location, grader_id, - calibration_essay_id, submission_key, score, feedback): + calibration_essay_id, submission_key, score, + feedback, rubric_scores): return {'success': True, 'actual_score': 2} def get_problem_list(self, course_id, grader_id): @@ -142,25 +134,3 @@ class MockPeerGradingService(object): json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo2', 'problem_name': "Problem 2", 'num_graded': 1, 'num_pending': 5}) ]}) - -_service = None - - -def peer_grading_service(system): - """ - Return a peer grading service instance--if settings.MOCK_PEER_GRADING is True, - returns a mock one, otherwise a real one. - - Caches the result, so changing the setting after the first call to this - function will have no effect. - """ - global _service - if _service is not None: - return _service - - if settings.MOCK_PEER_GRADING: - _service = MockPeerGradingService() - else: - _service = PeerGradingService(settings.PEER_GRADING_INTERFACE, system) - - return _service diff --git a/common/lib/xmodule/xmodule/self_assessment_module.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py similarity index 75% rename from common/lib/xmodule/xmodule/self_assessment_module.py rename to common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py index c8d1fe7a28..7ecb3c4d5e 100644 --- a/common/lib/xmodule/xmodule/self_assessment_module.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py @@ -1,24 +1,12 @@ -import copy -from fs.errors import ResourceNotFoundError -import itertools import json import logging from lxml import etree -from lxml.html import rewrite_links -from path import path -import os -import sys -from pkg_resources import resource_string - -from .capa_module import only_one, ComplexEncoder -from .editing_module import EditingDescriptor -from .html_checker import check_html -from progress import Progress -from .stringify import stringify_children -from .x_module import XModule -from .xml_module import XmlDescriptor -from xmodule.modulestore import Location +from xmodule.capa_module import ComplexEncoder +from xmodule.editing_module import EditingDescriptor +from xmodule.progress import Progress +from xmodule.stringify import stringify_children +from xmodule.xml_module import XmlDescriptor import openendedchild from combined_open_ended_rubric import CombinedOpenEndedRubric @@ -53,8 +41,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): @param descriptor: SelfAssessmentDescriptor @return: None """ - self.submit_message = definition['submitmessage'] - self.hint_prompt = definition['hintprompt'] self.prompt = stringify_children(self.prompt) self.rubric = stringify_children(self.rubric) @@ -76,8 +62,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): 'previous_answer': previous_answer, 'ajax_url': system.ajax_url, 'initial_rubric': self.get_rubric_html(system), - 'initial_hint': "", - 'initial_message': self.get_message_html(), 'state': self.state, 'allow_reset': self._allow_reset(), 'child_type': 'selfassessment', @@ -106,9 +90,11 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): } if dispatch not in handlers: - return 'Error' + #This is a dev_facing_error + log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) + #This is a dev_facing_error + return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False}) - log.debug(get) before = self.get_progress() d = handlers[dispatch](get, system) after = self.get_progress() @@ -126,7 +112,9 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): return '' rubric_renderer = CombinedOpenEndedRubric(system, False) - success, rubric_html = rubric_renderer.render_rubric(self.rubric) + rubric_dict = rubric_renderer.render_rubric(self.rubric) + success = rubric_dict['success'] + rubric_html = rubric_dict['html'] # we'll render it context = {'rubric': rubric_html, @@ -138,7 +126,8 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): elif self.state in (self.POST_ASSESSMENT, self.DONE): context['read_only'] = True else: - raise ValueError("Illegal state '%r'" % self.state) + #This is a dev_facing_error + raise ValueError("Self assessment module is in an illegal state '{0}'".format(self.state)) return system.render_template('self_assessment_rubric.html', context) @@ -156,27 +145,18 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): else: hint = '' - context = {'hint_prompt': self.hint_prompt, - 'hint': hint} + context = {'hint': hint} if self.state == self.POST_ASSESSMENT: context['read_only'] = False elif self.state == self.DONE: context['read_only'] = True else: - raise ValueError("Illegal state '%r'" % self.state) + #This is a dev_facing_error + raise ValueError("Self Assessment module is in an illegal state '{0}'".format(self.state)) return system.render_template('self_assessment_hint.html', context) - def get_message_html(self): - """ - Return the appropriate version of the message view, based on state. - """ - if self.state != self.DONE: - return "" - - return """
    {0}
    """.format(self.submit_message) - def save_answer(self, get, system): """ @@ -202,10 +182,16 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): # add new history element with answer and empty score and hint. success, get = self.append_image_to_student_answer(get) if success: - get['student_answer'] = SelfAssessmentModule.sanitize_html(get['student_answer']) - self.new_history_entry(get['student_answer']) - self.change_state(self.ASSESSING) + success, allowed_to_submit, error_message = self.check_if_student_can_submit() + if allowed_to_submit: + get['student_answer'] = SelfAssessmentModule.sanitize_html(get['student_answer']) + self.new_history_entry(get['student_answer']) + self.change_state(self.ASSESSING) + else: + #Error message already defined + success = False else: + #This is a student_facing_error error_message = "There was a problem saving the image in your submission. Please try a different image, or try pasting a link to an image into the answer box." return { @@ -235,15 +221,22 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): try: score = int(get['assessment']) + score_list = get.getlist('score_list[]') + for i in xrange(0,len(score_list)): + score_list[i] = int(score_list[i]) except ValueError: - return {'success': False, 'error': "Non-integer score value"} + #This is a dev_facing_error + log.error("Non-integer score value passed to save_assessment ,or no score list present.") + #This is a student_facing_error + return {'success': False, 'error': "Error saving your score. Please notify course staff."} + #Record score as assessment and rubric scores as post assessment self.record_latest_score(score) + self.record_latest_post_assessment(json.dumps(score_list)) d = {'success': True, } self.change_state(self.DONE) - d['message_html'] = self.get_message_html() d['allow_reset'] = self._allow_reset() d['state'] = self.state @@ -251,6 +244,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): def save_hint(self, get, system): ''' + Not used currently, as hints have been removed from the system. Save the hint. Returns a dict { 'success': bool, 'message_html': message_html, @@ -268,9 +262,19 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): self.change_state(self.DONE) return {'success': True, - 'message_html': self.get_message_html(), + 'message_html': '', 'allow_reset': self._allow_reset()} + def latest_post_assessment(self, system): + latest_post_assessment = super(SelfAssessmentModule, self).latest_post_assessment(system) + try: + rubric_scores = json.loads(latest_post_assessment) + except: + #This is a dev_facing_error + log.error("Cannot parse rubric scores in self assessment module from {0}".format(latest_post_assessment)) + rubric_scores = [] + return [rubric_scores] + class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor): """ @@ -284,10 +288,6 @@ class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor): has_score = True template_dir_name = "selfassessment" - js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]} - js_module_name = "HTMLEditingDescriptor" - css = {'scss': [resource_string(__name__, 'css/editor/edit.scss'), resource_string(__name__, 'css/html/edit.scss')]} - @classmethod def definition_from_xml(cls, xml_object, system): """ @@ -299,18 +299,17 @@ class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor): 'hintprompt': 'some-html' } """ - expected_children = ['submitmessage', 'hintprompt'] + expected_children = [] for child in expected_children: if len(xml_object.xpath(child)) != 1: - raise ValueError("Self assessment definition must include exactly one '{0}' tag".format(child)) + #This is a staff_facing_error + raise ValueError("Self assessment definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format(child)) def parse(k): """Assumes that xml_object has child k""" return stringify_children(xml_object.xpath(k)[0]) - return {'submitmessage': parse('submitmessage'), - 'hintprompt': parse('hintprompt'), - } + return {} def definition_to_xml(self, resource_fs): '''Return an xml element representing this definition.''' @@ -321,7 +320,7 @@ class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor): child_node = etree.fromstring(child_str) elt.append(child_node) - for child in ['submitmessage', 'hintprompt']: + for child in []: add_child(child) return elt diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index 20f71f3b3c..e262db5615 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -1,39 +1,20 @@ -""" -This module provides an interface on the grading-service backend -for peer grading - -Use peer_grading_service() to get the version specified -in settings.PEER_GRADING_INTERFACE - -""" import json import logging -import requests -import sys -from django.conf import settings - -from combined_open_ended_rubric import CombinedOpenEndedRubric from lxml import etree -import copy -import itertools -import json -import logging -from lxml.html import rewrite_links -import os - +from datetime import datetime from pkg_resources import resource_string -from .capa_module import only_one, ComplexEncoder +from .capa_module import ComplexEncoder from .editing_module import EditingDescriptor -from .html_checker import check_html -from progress import Progress from .stringify import stringify_children from .x_module import XModule from .xml_module import XmlDescriptor from xmodule.modulestore import Location +from xmodule.modulestore.django import modulestore +from timeinfo import TimeInfo -from peer_grading_service import peer_grading_service, GradingServiceError +from xmodule.open_ended_grading_classes.peer_grading_service import PeerGradingService, GradingServiceError log = logging.getLogger(__name__) @@ -43,6 +24,8 @@ TRUE_DICT = [True, "True", "true", "TRUE"] MAX_SCORE = 1 IS_GRADED = True +EXTERNAL_GRADER_NO_CONTACT_ERROR = "Failed to contact external graders. Please notify course staff." + class PeerGradingModule(XModule): _VERSION = 1 @@ -70,7 +53,8 @@ class PeerGradingModule(XModule): #We need to set the location here so the child modules can use it system.set('location', location) self.system = system - self.peer_gs = peer_grading_service(self.system) + self.peer_gs = PeerGradingService(self.system.open_ended_grading_interface, self.system) + self.use_for_single_location = self.metadata.get('use_for_single_location', USE_FOR_SINGLE_LOCATION) if isinstance(self.use_for_single_location, basestring): @@ -80,10 +64,28 @@ class PeerGradingModule(XModule): if isinstance(self.is_graded, basestring): self.is_graded = (self.is_graded in TRUE_DICT) + display_due_date_string = self.metadata.get('due', None) + grace_period_string = self.metadata.get('graceperiod', None) + + try: + self.timeinfo = TimeInfo(display_due_date_string, grace_period_string) + except: + log.error("Error parsing due date information in location {0}".format(location)) + raise + + self.display_due_date = self.timeinfo.display_due_date + self.link_to_location = self.metadata.get('link_to_location', USE_FOR_SINGLE_LOCATION) if self.use_for_single_location == True: - #This will raise an exception if the location is invalid - link_to_location_object = Location(self.link_to_location) + try: + self.linked_problem = modulestore().get_instance(self.system.course_id, self.link_to_location) + except: + log.error("Linked location {0} for peer grading module {1} does not exist".format( + self.link_to_location, self.location)) + raise + due_date = self.linked_problem.metadata.get('peer_grading_due', None) + if due_date: + self.metadata['due'] = due_date self.ajax_url = self.system.ajax_url if not self.ajax_url.endswith("/"): @@ -95,6 +97,15 @@ class PeerGradingModule(XModule): #This could result in an exception, but not wrapping in a try catch block so it moves up the stack self.max_grade = int(self.max_grade) + def closed(self): + return self._closed(self.timeinfo) + + def _closed(self, timeinfo): + if timeinfo.close_date is not None and datetime.utcnow() > timeinfo.close_date: + return True + return False + + def _err_response(self, msg): """ Return a HttpResponse with a json dump with success=False, and the given error message. @@ -114,6 +125,8 @@ class PeerGradingModule(XModule): Needs to be implemented by inheritors. Renders the HTML that students see. @return: """ + if self.closed(): + return self.peer_grading_closed() if not self.use_for_single_location: return self.peer_grading() else: @@ -134,7 +147,10 @@ class PeerGradingModule(XModule): } if dispatch not in handlers: - return 'Error' + #This is a dev_facing_error + log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) + #This is a dev_facing_error + return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False}) d = handlers[dispatch](get) @@ -142,7 +158,7 @@ class PeerGradingModule(XModule): def query_data_for_location(self): student_id = self.system.anonymous_student_id - location = self.system.location + location = self.link_to_location success = False response = {} @@ -152,6 +168,7 @@ class PeerGradingModule(XModule): count_required = response['count_required'] success = True except GradingServiceError: + #This is a dev_facing_error log.exception("Error getting location data from controller for location {0}, student {1}" .format(location, student_id)) @@ -171,12 +188,13 @@ class PeerGradingModule(XModule): success, response = self.query_data_for_location() if not success: log.exception("No instance data found and could not get data from controller for loc {0} student {1}".format( - self.system.location, self.system.anonymous_student_id + self.system.location.url(), self.system.anonymous_student_id )) return None count_graded = response['count_graded'] count_required = response['count_required'] if count_required > 0 and count_graded >= count_required: + #Ensures that once a student receives a final score for peer grading, that it does not change. self.student_data_for_location = response score_dict = { @@ -226,10 +244,12 @@ class PeerGradingModule(XModule): response = self.peer_gs.get_next_submission(location, grader_id) return response except GradingServiceError: + #This is a dev_facing_error log.exception("Error getting next submission. server url: {0} location: {1}, grader_id: {2}" .format(self.peer_gs.url, location, grader_id)) + #This is a student_facing_error return {'success': False, - 'error': 'Could not connect to grading service'} + 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR} def save_grade(self, get): """ @@ -266,14 +286,16 @@ class PeerGradingModule(XModule): score, feedback, submission_key, rubric_scores, submission_flagged) return response except GradingServiceError: - log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2}, + #This is a dev_facing_error + log.exception("""Error saving grade to open ended grading service. server url: {0}, location: {1}, submission_id:{2}, submission_key: {3}, score: {4}""" .format(self.peer_gs.url, location, submission_id, submission_key, score) ) + #This is a student_facing_error return { 'success': False, - 'error': 'Could not connect to grading service' + 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR } def is_student_calibrated(self, get): @@ -306,11 +328,13 @@ class PeerGradingModule(XModule): response = self.peer_gs.is_student_calibrated(location, grader_id) return response except GradingServiceError: - log.exception("Error from grading service. server url: {0}, grader_id: {0}, location: {1}" + #This is a dev_facing_error + log.exception("Error from open ended grading service. server url: {0}, grader_id: {0}, location: {1}" .format(self.peer_gs.url, grader_id, location)) + #This is a student_facing_error return { 'success': False, - 'error': 'Could not connect to grading service' + 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR } def show_calibration_essay(self, get): @@ -349,16 +373,20 @@ class PeerGradingModule(XModule): response = self.peer_gs.show_calibration_essay(location, grader_id) return response except GradingServiceError: - log.exception("Error from grading service. server url: {0}, location: {0}" + #This is a dev_facing_error + log.exception("Error from open ended grading service. server url: {0}, location: {0}" .format(self.peer_gs.url, location)) + #This is a student_facing_error return {'success': False, - 'error': 'Could not connect to grading service'} + 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR} # if we can't parse the rubric into HTML, except etree.XMLSyntaxError: + #This is a dev_facing_error log.exception("Cannot parse rubric string. Raw string: {0}" .format(rubric)) + #This is a student_facing_error return {'success': False, - 'error': 'Error displaying submission'} + 'error': 'Error displaying submission. Please notify course staff.'} def save_calibration_essay(self, get): @@ -397,8 +425,20 @@ class PeerGradingModule(XModule): submission_key, score, feedback, rubric_scores) return response except GradingServiceError: + #This is a dev_facing_error log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) - return self._err_response('Could not connect to grading service') + #This is a student_facing_error + return self._err_response('There was an error saving your score. Please notify course staff.') + + def peer_grading_closed(self): + ''' + Show the Peer grading closed template + ''' + html = self.system.render_template('peer_grading/peer_grading_closed.html', { + 'use_for_single_location': self.use_for_single_location + }) + return html + def peer_grading(self, get=None): ''' @@ -419,13 +459,49 @@ class PeerGradingModule(XModule): problem_list = problem_list_dict['problem_list'] except GradingServiceError: - error_text = "Error occured while contacting the grading service" + #This is a student_facing_error + error_text = EXTERNAL_GRADER_NO_CONTACT_ERROR success = False # catch error if if the json loads fails except ValueError: - error_text = "Could not get problem list" + #This is a student_facing_error + error_text = "Could not get list of problems to peer grade. Please notify course staff." success = False + + def _find_corresponding_module_for_location(location): + ''' + find the peer grading module that links to the given location + ''' + try: + return modulestore().get_instance(self.system.course_id, location) + except: + # the linked problem doesn't exist + log.error("Problem {0} does not exist in this course".format(location)) + raise + + + for problem in problem_list: + problem_location = problem['location'] + descriptor = _find_corresponding_module_for_location(problem_location) + if descriptor: + problem['due'] = descriptor.metadata.get('peer_grading_due', None) + grace_period_string = descriptor.metadata.get('graceperiod', None) + try: + problem_timeinfo = TimeInfo(problem['due'], grace_period_string) + except: + log.error("Malformed due date or grace period string for location {0}".format(problem_location)) + raise + if self._closed(problem_timeinfo): + problem['closed'] = True + else: + problem['closed'] = False + else: + # if we can't find the due date, assume that it doesn't have one + problem['due'] = None + problem['closed'] = False + + ajax_url = self.ajax_url html = self.system.render_template('peer_grading/peer_grading.html', { 'course_id': self.system.course_id, @@ -447,6 +523,8 @@ class PeerGradingModule(XModule): if get == None or get.get('location') == None: if not self.use_for_single_location: #This is an error case, because it must be set to use a single location to be called without get parameters + #This is a dev_facing_error + log.error("Peer grading problem in peer_grading_module called with no get parameters, but use_for_single_location is False.") return {'html': "", 'success': False} problem_location = self.link_to_location @@ -511,7 +589,8 @@ class PeerGradingDescriptor(XmlDescriptor, EditingDescriptor): expected_children = [] for child in expected_children: if len(xml_object.xpath(child)) == 0: - raise ValueError("Peer grading definition must include at least one '{0}' tag".format(child)) + #This is a staff_facing_error + raise ValueError("Peer grading definition must include at least one '{0}' tag. Contact the learning sciences group for assistance.".format(child)) def parse_task(k): """Assumes that xml_object has child k""" diff --git a/common/lib/xmodule/xmodule/templates/html/empty.yaml b/common/lib/xmodule/xmodule/templates/html/empty.yaml index 1262ed37cf..b6d867d7d6 100644 --- a/common/lib/xmodule/xmodule/templates/html/empty.yaml +++ b/common/lib/xmodule/xmodule/templates/html/empty.yaml @@ -1,6 +1,7 @@ --- metadata: - display_name: Empty + display_name: Blank HTML Page + empty: True data: | diff --git a/common/lib/xmodule/xmodule/templates/problem/circuitschematic.yaml b/common/lib/xmodule/xmodule/templates/problem/circuitschematic.yaml index f56b17b1b9..a94b824cfb 100644 --- a/common/lib/xmodule/xmodule/templates/problem/circuitschematic.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/circuitschematic.yaml @@ -1,6 +1,7 @@ + --- metadata: - display_name: Circuit Schematic + display_name: Circuit Schematic Builder rerandomize: never showanswer: always weight: "" diff --git a/common/lib/xmodule/xmodule/templates/problem/customgrader.yaml b/common/lib/xmodule/xmodule/templates/problem/customgrader.yaml index 6ada6f97f3..aadbe4075a 100644 --- a/common/lib/xmodule/xmodule/templates/problem/customgrader.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/customgrader.yaml @@ -1,6 +1,6 @@ --- metadata: - display_name: Custom Grader + display_name: Custom Python-Evaluated Input rerandomize: never showanswer: always weight: "" @@ -8,7 +8,7 @@ metadata: data: |

    - A custom response problem accepts one or more lines of text input from the + A custom python-evaluated input problem accepts one or more lines of text input from the student, and evaluates the inputs for correctness based on evaluation using a python script embedded within the problem.

    diff --git a/common/lib/xmodule/xmodule/templates/problem/empty.yaml b/common/lib/xmodule/xmodule/templates/problem/empty.yaml index 346f49609c..39c9e7671c 100644 --- a/common/lib/xmodule/xmodule/templates/problem/empty.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/empty.yaml @@ -1,10 +1,11 @@ --- metadata: - display_name: Empty + display_name: Blank Common Problem rerandomize: never showanswer: always markdown: "" weight: "" + empty: True attempts: "" data: | diff --git a/common/lib/xmodule/xmodule/templates/problem/emptyadvanced.yaml b/common/lib/xmodule/xmodule/templates/problem/emptyadvanced.yaml new file mode 100644 index 0000000000..bba7b3a8ac --- /dev/null +++ b/common/lib/xmodule/xmodule/templates/problem/emptyadvanced.yaml @@ -0,0 +1,13 @@ +--- +metadata: + display_name: Blank Advanced Problem + rerandomize: never + showanswer: always + weight: "" + attempts: "" + empty: True +data: | + + + +children: [] diff --git a/common/lib/xmodule/xmodule/templates/problem/forumularesponse.yaml b/common/lib/xmodule/xmodule/templates/problem/forumularesponse.yaml index 5b30a0497d..b4c53a107b 100644 --- a/common/lib/xmodule/xmodule/templates/problem/forumularesponse.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/forumularesponse.yaml @@ -1,6 +1,6 @@ --- metadata: - display_name: Formula Response + display_name: Math Expression Input rerandomize: never showanswer: always weight: "" @@ -8,7 +8,7 @@ metadata: data: |

    - A formula response problem accepts a line of text representing a mathematical expression from the + A math expression input problem accepts a line of text representing a mathematical expression from the student, and evaluates the input for equivalence to a mathematical expression provided by the grader. Correctness is based on numerical sampling of the symbolic expressions.

    diff --git a/common/lib/xmodule/xmodule/templates/problem/imageresponse.yaml b/common/lib/xmodule/xmodule/templates/problem/imageresponse.yaml index 069c157852..3ef619d54b 100644 --- a/common/lib/xmodule/xmodule/templates/problem/imageresponse.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/imageresponse.yaml @@ -1,6 +1,6 @@ --- metadata: - display_name: Image Response + display_name: Image Mapped Input rerandomize: never showanswer: always weight: "" @@ -8,7 +8,7 @@ metadata: data: |

    - An image response problem presents an image for the student. Input is + An image mapped input problem presents an image for the student. Input is given by the location of mouse clicks on the image. Correctness of input can be evaluated based on expected dimensions of a rectangle.

    diff --git a/common/lib/xmodule/xmodule/templates/problem/multiplechoice.yaml b/common/lib/xmodule/xmodule/templates/problem/multiplechoice.yaml index 9e61324ae1..3a35a35199 100644 --- a/common/lib/xmodule/xmodule/templates/problem/multiplechoice.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/multiplechoice.yaml @@ -26,10 +26,6 @@ metadata: ( ) The vegetable peeler - ( ) Android - - ( ) The Beatles - [explanation] The release of the iPod allowed consumers to carry their entire music library with them in a @@ -51,8 +47,6 @@ data: | Napster The iPod The vegetable peeler - Android - The Beatles diff --git a/common/lib/xmodule/xmodule/templates/problem/numericalresponse.yaml b/common/lib/xmodule/xmodule/templates/problem/numericalresponse.yaml index e0a5776222..1dc46f5f51 100644 --- a/common/lib/xmodule/xmodule/templates/problem/numericalresponse.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/numericalresponse.yaml @@ -1,12 +1,12 @@ --- metadata: - display_name: Numerical Response + display_name: Numerical Input rerandomize: never showanswer: always weight: "" attempts: "" markdown: - "A numerical response problem accepts a line of text input from the + "A numerical input problem accepts a line of text input from the student, and evaluates the input for correctness based on its numerical value. @@ -45,7 +45,7 @@ metadata: data: |

    - A numerical response problem accepts a line of text input from the + A numerical input problem accepts a line of text input from the student, and evaluates the input for correctness based on its numerical value.

    diff --git a/common/lib/xmodule/xmodule/templates/problem/optionresponse.yaml b/common/lib/xmodule/xmodule/templates/problem/optionresponse.yaml index 1a42a5a009..f523c7fdc5 100644 --- a/common/lib/xmodule/xmodule/templates/problem/optionresponse.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/optionresponse.yaml @@ -1,12 +1,12 @@ --- metadata: - display_name: Option Response + display_name: Dropdown rerandomize: never showanswer: always weight: "" attempts: "" markdown: - "OptionResponse gives a limited set of options for students to respond with, and presents those options + "Dropdown problems give a limited set of options for students to respond with, and present those options in a format that encourages them to search for a specific answer rather than being immediately presented with options from which to recognize the correct answer. @@ -14,30 +14,30 @@ metadata: The answer options and the identification of the correct answer is defined in the optioninput tag. - Translation between Option Response and __________ is extremely straightforward: + Translation between Dropdown and __________ is extremely straightforward: - [[(Multiple Choice), String Response, Numerical Response, External Response, Image Response]] + [[(Multiple Choice), Text Input, Numerical Input, External Response, Image Response]] [explanation] Multiple Choice also allows students to select from a variety of pre-written responses, although the - format makes it easier for students to read very long response options. Optionresponse also differs + format makes it easier for students to read very long response options. Dropdowns also differ slightly because students are more likely to think of an answer and then search for it rather than relying purely on recognition to answer the question. [explanation] " data: | -

    OptionResponse gives a limited set of options for students to respond with, and presents those options +

    Dropdown problems give a limited set of options for students to respond with, and present those options in a format that encourages them to search for a specific answer rather than being immediately presented with options from which to recognize the correct answer.

    The answer options and the identification of the correct answer is defined in the optioninput tag.

    -

    Translation between Option Response and __________ is extremely straightforward: +

    Translation between Dropdown and __________ is extremely straightforward: - +

    diff --git a/common/lib/xmodule/xmodule/templates/problem/string_response.yaml b/common/lib/xmodule/xmodule/templates/problem/string_response.yaml index 1761ea8f67..c018d3f6cf 100644 --- a/common/lib/xmodule/xmodule/templates/problem/string_response.yaml +++ b/common/lib/xmodule/xmodule/templates/problem/string_response.yaml @@ -1,15 +1,15 @@ --- metadata: - display_name: String Response + display_name: Text Input rerandomize: never showanswer: always weight: "" attempts: "" # Note, the extra newlines are needed to make the yaml parser add blank lines instead of folding markdown: - "A string response problem accepts a line of text input from the + "A text input problem accepts a line of text from the student, and evaluates the input for correctness based on an expected - answer within each input box. + answer. The answer is correct if it matches every character of the expected answer. This can be a problem with @@ -30,9 +30,9 @@ data: |

    - A string response problem accepts a line of text input from the + A text input problem accepts a line of text from the student, and evaluates the input for correctness based on an expected - answer within each input box. + answer. The answer is correct if it matches every character of the expected answer. This can be a problem with international spelling, dates, or anything where the format of the answer is not clear.

    diff --git a/common/lib/xmodule/xmodule/tests/__init__.py b/common/lib/xmodule/xmodule/tests/__init__.py index 04e7ee19b1..9474717cb2 100644 --- a/common/lib/xmodule/xmodule/tests/__init__.py +++ b/common/lib/xmodule/xmodule/tests/__init__.py @@ -19,6 +19,15 @@ import xmodule from xmodule.x_module import ModuleSystem from mock import Mock +open_ended_grading_interface = { + 'url': 'http://sandbox-grader-001.m.edx.org/peer_grading', + 'username': 'incorrect_user', + 'password': 'incorrect_pass', + 'staff_grading' : 'staff_grading', + 'peer_grading' : 'peer_grading', + 'grading_controller' : 'grading_controller' + } + test_system = ModuleSystem( ajax_url='courses/course_id/modx/a_location', track_function=Mock(), @@ -31,7 +40,8 @@ test_system = ModuleSystem( debug=True, xqueue={'interface': None, 'callback_url': '/', 'default_queuename': 'testqueue', 'waittime': 10}, node_path=os.environ.get("NODE_PATH", "/usr/local/lib/node_modules"), - anonymous_student_id='student' + anonymous_student_id='student', + open_ended_grading_interface= open_ended_grading_interface ) diff --git a/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py b/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py index c2b27e4953..5f6496f823 100644 --- a/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py +++ b/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py @@ -2,9 +2,9 @@ import json from mock import Mock, MagicMock, ANY import unittest -from xmodule.openendedchild import OpenEndedChild -from xmodule.open_ended_module import OpenEndedModule -from xmodule.combined_open_ended_modulev1 import CombinedOpenEndedV1Module +from xmodule.open_ended_grading_classes.openendedchild import OpenEndedChild +from xmodule.open_ended_grading_classes.open_ended_module import OpenEndedModule +from xmodule.open_ended_grading_classes.combined_open_ended_modulev1 import CombinedOpenEndedV1Module from xmodule.modulestore import Location from lxml import etree @@ -12,6 +12,8 @@ import capa.xqueue_interface as xqueue_interface from datetime import datetime from . import test_system + +import test_util_open_ended """ Tests for the various pieces of the CombinedOpenEndedGrading system @@ -43,7 +45,10 @@ class OpenEndedChildTest(unittest.TestCase): 'max_score': max_score, 'display_name': 'Name', 'accept_file_upload': False, - 'close_date': None + 'close_date': None, + 's3_interface' : "", + 'open_ended_grading_interface' : {}, + 'skip_basic_checks' : False, } definition = Mock() descriptor = Mock() @@ -161,6 +166,9 @@ class OpenEndedModuleTest(unittest.TestCase): 'accept_file_upload': False, 'rewrite_content_links' : "", 'close_date': None, + 's3_interface' : test_util_open_ended.S3_INTERFACE, + 'open_ended_grading_interface' : test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE, + 'skip_basic_checks' : False, } oeparam = etree.XML(''' @@ -293,6 +301,9 @@ class CombinedOpenEndedModuleTest(unittest.TestCase): 'accept_file_upload' : False, 'rewrite_content_links' : "", 'close_date' : "", + 's3_interface' : test_util_open_ended.S3_INTERFACE, + 'open_ended_grading_interface' : test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE, + 'skip_basic_checks' : False, } oeparam = etree.XML(''' diff --git a/common/lib/xmodule/xmodule/tests/test_export.py b/common/lib/xmodule/xmodule/tests/test_export.py index da1b04bd94..e9fb89e9f6 100644 --- a/common/lib/xmodule/xmodule/tests/test_export.py +++ b/common/lib/xmodule/xmodule/tests/test_export.py @@ -4,7 +4,7 @@ from fs.osfs import OSFS from nose.tools import assert_equals, assert_true from path import path from tempfile import mkdtemp -from shutil import copytree +import shutil from xmodule.modulestore.xml import XMLModuleStore @@ -46,11 +46,11 @@ class RoundTripTestCase(unittest.TestCase): Thus we make sure that export and import work properly. ''' def check_export_roundtrip(self, data_dir, course_dir): - root_dir = path(mkdtemp()) + root_dir = path(self.temp_dir) print "Copying test course to temp dir {0}".format(root_dir) data_dir = path(data_dir) - copytree(data_dir / course_dir, root_dir / course_dir) + shutil.copytree(data_dir / course_dir, root_dir / course_dir) print "Starting import" initial_import = XMLModuleStore(root_dir, course_dirs=[course_dir]) @@ -108,6 +108,8 @@ class RoundTripTestCase(unittest.TestCase): def setUp(self): self.maxDiff = None + self.temp_dir = mkdtemp() + self.addCleanup(shutil.rmtree, self.temp_dir) def test_toy_roundtrip(self): self.check_export_roundtrip(DATA_DIR, "toy") diff --git a/common/lib/xmodule/xmodule/tests/test_self_assessment.py b/common/lib/xmodule/xmodule/tests/test_self_assessment.py index 617b2b142a..b9c3076b7c 100644 --- a/common/lib/xmodule/xmodule/tests/test_self_assessment.py +++ b/common/lib/xmodule/xmodule/tests/test_self_assessment.py @@ -1,13 +1,14 @@ import json -from mock import Mock +from mock import Mock, MagicMock import unittest -from xmodule.self_assessment_module import SelfAssessmentModule +from xmodule.open_ended_grading_classes.self_assessment_module import SelfAssessmentModule from xmodule.modulestore import Location from lxml import etree from . import test_system +import test_util_open_ended class SelfAssessmentTest(unittest.TestCase): @@ -46,7 +47,10 @@ class SelfAssessmentTest(unittest.TestCase): 'max_score': 1, 'display_name': "Name", 'accept_file_upload': False, - 'close_date': None + 'close_date': None, + 's3_interface' : test_util_open_ended.S3_INTERFACE, + 'open_ended_grading_interface' : test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE, + 'skip_basic_checks' : False, } self.module = SelfAssessmentModule(test_system, self.location, @@ -59,13 +63,29 @@ class SelfAssessmentTest(unittest.TestCase): self.assertTrue("This is sample prompt text" in html) def test_self_assessment_flow(self): + responses = {'assessment': '0', 'score_list[]': ['0', '0']} + def get_fake_item(name): + return responses[name] + + def get_data_for_location(self,location,student): + return { + 'count_graded' : 0, + 'count_required' : 0, + 'student_sub_count': 0, + } + + mock_query_dict = MagicMock() + mock_query_dict.__getitem__.side_effect = get_fake_item + mock_query_dict.getlist = get_fake_item + + self.module.peer_gs.get_data_for_location = get_data_for_location self.assertEqual(self.module.get_score()['score'], 0) self.module.save_answer({'student_answer': "I am an answer"}, test_system) self.assertEqual(self.module.state, self.module.ASSESSING) - self.module.save_assessment({'assessment': '0'}, test_system) + self.module.save_assessment(mock_query_dict, test_system) self.assertEqual(self.module.state, self.module.DONE) @@ -75,5 +95,6 @@ class SelfAssessmentTest(unittest.TestCase): # if we now assess as right, skip the REQUEST_HINT state self.module.save_answer({'student_answer': 'answer 4'}, test_system) - self.module.save_assessment({'assessment': '1'}, test_system) + responses['assessment'] = '1' + self.module.save_assessment(mock_query_dict, test_system) self.assertEqual(self.module.state, self.module.DONE) diff --git a/common/lib/xmodule/xmodule/tests/test_util_open_ended.py b/common/lib/xmodule/xmodule/tests/test_util_open_ended.py new file mode 100644 index 0000000000..8d1fcd30ce --- /dev/null +++ b/common/lib/xmodule/xmodule/tests/test_util_open_ended.py @@ -0,0 +1,14 @@ +OPEN_ENDED_GRADING_INTERFACE = { + 'url' : 'http://127.0.0.1:3033/', + 'username' : 'incorrect', + 'password' : 'incorrect', + 'staff_grading' : 'staff_grading', + 'peer_grading' : 'peer_grading', + 'grading_controller' : 'grading_controller' +} + +S3_INTERFACE = { + 'aws_access_key' : "", + 'aws_secret_key' : "", + "aws_bucket_name" : "", +} \ No newline at end of file diff --git a/common/lib/xmodule/xmodule/timeinfo.py b/common/lib/xmodule/xmodule/timeinfo.py new file mode 100644 index 0000000000..6c6a72e700 --- /dev/null +++ b/common/lib/xmodule/xmodule/timeinfo.py @@ -0,0 +1,39 @@ +import dateutil +import dateutil.parser +import datetime +from timeparse import parse_timedelta + +import logging +log = logging.getLogger(__name__) + +class TimeInfo(object): + """ + This is a simple object that calculates and stores datetime information for an XModule + based on the due date string and the grace period string + + So far it parses out three different pieces of time information: + self.display_due_date - the 'official' due date that gets displayed to students + self.grace_period - the length of the grace period + self.close_date - the real due date + + """ + def __init__(self, display_due_date_string, grace_period_string): + if display_due_date_string is not None: + try: + self.display_due_date = dateutil.parser.parse(display_due_date_string) + except ValueError: + log.error("Could not parse due date {0}".format(display_due_date_string)) + raise + else: + self.display_due_date = None + + if grace_period_string is not None and self.display_due_date: + try: + self.grace_period = parse_timedelta(grace_period_string) + self.close_date = self.display_due_date + self.grace_period + except: + log.error("Error parsing the grace period {0}".format(grace_period_string)) + raise + else: + self.grace_period = None + self.close_date = self.display_due_date diff --git a/common/lib/xmodule/xmodule/x_module.py b/common/lib/xmodule/xmodule/x_module.py index 07b9d3653e..dccc96a7ca 100644 --- a/common/lib/xmodule/xmodule/x_module.py +++ b/common/lib/xmodule/xmodule/x_module.py @@ -411,7 +411,6 @@ class ResourceTemplates(object): return templates - class XModuleDescriptor(Plugin, HTMLSnippet, ResourceTemplates): """ An XModuleDescriptor is a specification for an element of a course. This @@ -585,11 +584,11 @@ class XModuleDescriptor(Plugin, HTMLSnippet, ResourceTemplates): def inherit_metadata(self, metadata): """ Updates this module with metadata inherited from a containing module. - Only metadata specified in self.inheritable_metadata will + Only metadata specified in inheritable_metadata will be inherited """ # Set all inheritable metadata from kwargs that are - # in self.inheritable_metadata and aren't already set in metadata + # in inheritable_metadata and aren't already set in metadata for attr in self.inheritable_metadata: if attr not in self.metadata and attr in metadata: self._inherited_metadata.add(attr) @@ -879,7 +878,9 @@ class ModuleSystem(object): xqueue=None, node_path="", anonymous_student_id='', - course_id=None): + course_id=None, + open_ended_grading_interface=None, + s3_interface=None): ''' Create a closure around the system environment. @@ -930,6 +931,8 @@ class ModuleSystem(object): self.anonymous_student_id = anonymous_student_id self.course_id = course_id self.user_is_staff = user is not None and user.is_staff + self.open_ended_grading_interface = open_ended_grading_interface + self.s3_interface = s3_interface def get(self, attr): ''' provide uniform access to attributes (like etree).''' diff --git a/common/static/images/grading_notification.png b/common/static/images/grading_notification.png new file mode 100644 index 0000000000..cd93857da9 Binary files /dev/null and b/common/static/images/grading_notification.png differ diff --git a/common/static/images/ml_grading_icon.png b/common/static/images/ml_grading_icon.png new file mode 100644 index 0000000000..283355814e Binary files /dev/null and b/common/static/images/ml_grading_icon.png differ diff --git a/common/static/images/peer_grading_icon.png b/common/static/images/peer_grading_icon.png new file mode 100644 index 0000000000..0ee7cf5f17 Binary files /dev/null and b/common/static/images/peer_grading_icon.png differ diff --git a/common/static/images/random_grading_icon.png b/common/static/images/random_grading_icon.png new file mode 100644 index 0000000000..d3737e61b0 Binary files /dev/null and b/common/static/images/random_grading_icon.png differ diff --git a/common/static/images/self_assessment_icon.png b/common/static/images/self_assessment_icon.png new file mode 100644 index 0000000000..c4b84e2ec8 Binary files /dev/null and b/common/static/images/self_assessment_icon.png differ diff --git a/common/static/sass/_mixins.scss b/common/static/sass/_mixins.scss index 58a92d1ee6..76d52ed930 100644 --- a/common/static/sass/_mixins.scss +++ b/common/static/sass/_mixins.scss @@ -1,19 +1,39 @@ +// font-sizing @function em($pxval, $base: 16) { @return #{$pxval / $base}em; } -// Line-height +@mixin font-size($sizeValue: 1.6){ + font-size: $sizeValue + px; + font-size: ($sizeValue/10) + rem; +} + +// line-height @function lh($amount: 1) { @return $body-line-height * $amount; } -@mixin hide-text(){ - text-indent: -9999px; +// image-replacement hidden text +@mixin text-hide() { + text-indent: 100%; + white-space: nowrap; overflow: hidden; - display: block; } -@mixin vertically-and-horizontally-centered ( $height, $width ) { +// hidden elems - screenreaders +@mixin text-sr() { + border: 0; + clip: rect(0 0 0 0); + height: 1px; + margin: -1px; + overflow: hidden; + padding: 0; + position: absolute; + width: 1px; +} + +// vertical and horizontal centering +@mixin vertically-and-horizontally-centered ($height, $width) { left: 50%; margin-left: -$width / 2; //margin-top: -$height / 2; @@ -22,3 +42,26 @@ position: absolute; top: 150px; } + +// sizing +@mixin size($width: $baseline, $height: $baseline) { + height: $height; + width: $width; +} + +@mixin square($size: $baseline) { + @include size($size); +} + +// placeholder styling +@mixin placeholder($color) { + :-moz-placeholder { + color: $color; + } + ::-webkit-input-placeholder { + color: $color; + } + :-ms-input-placeholder { + color: $color; + } +} \ No newline at end of file diff --git a/lms/.coveragerc b/lms/.coveragerc index 35aa7a3851..72b7b037ef 100644 --- a/lms/.coveragerc +++ b/lms/.coveragerc @@ -2,7 +2,7 @@ [run] data_file = reports/lms/.coverage source = lms,common/djangoapps -omit = lms/envs/* +omit = lms/envs/*, common/djangoapps/terrain/*, common/djangoapps/*/migrations/* [report] ignore_errors = True diff --git a/lms/djangoapps/portal/features/common.py b/lms/djangoapps/courseware/features/common.py similarity index 97% rename from lms/djangoapps/portal/features/common.py rename to lms/djangoapps/courseware/features/common.py index 8bfb548367..2e19696ad4 100644 --- a/lms/djangoapps/portal/features/common.py +++ b/lms/djangoapps/courseware/features/common.py @@ -1,5 +1,4 @@ -from lettuce import world, step # , before, after -from factories import * +from lettuce import world, step from django.core.management import call_command from nose.tools import assert_equals, assert_in from lettuce.django import django_url diff --git a/lms/djangoapps/courseware/features/courses.py b/lms/djangoapps/courseware/features/courses.py index 9b1316b00d..ba0bcd359b 100644 --- a/lms/djangoapps/courseware/features/courses.py +++ b/lms/djangoapps/courseware/features/courses.py @@ -9,7 +9,6 @@ logger = getLogger(__name__) ## support functions - def get_courses(): ''' Returns dict of lists of courses available, keyed by course.org (ie university). @@ -20,29 +19,6 @@ def get_courses(): courses = sorted(courses, key=lambda course: course.number) return courses -# def get_courseware(course_id): -# """ -# Given a course_id (string), return a courseware array of dictionaries for the -# top two levels of navigation. Example: - -# [ -# {'chapter_name': 'Overview', -# 'sections': ['Welcome', 'System Usage Sequence', 'Lab0: Using the tools', 'Circuit Sandbox'] -# }, -# {'chapter_name': 'Week 1', -# 'sections': ['Administrivia and Circuit Elements', 'Basic Circuit Analysis', 'Resistor Divider', 'Week 1 Tutorials'] -# }, -# {'chapter_name': 'Midterm Exam', -# 'sections': ['Midterm Exam'] -# } -# ] -# """ - -# course = get_course_by_id(course_id) -# chapters = course.get_children() -# courseware = [ {'chapter_name':c.display_name, 'sections':[s.display_name for s in c.get_children()]} for c in chapters] -# return courseware - def get_courseware_with_tabs(course_id): """ @@ -106,8 +82,8 @@ def get_courseware_with_tabs(course_id): course = get_course_by_id(course_id) chapters = [chapter for chapter in course.get_children() if chapter.metadata.get('hide_from_toc', 'false').lower() != 'true'] courseware = [{'chapter_name': c.display_name, - 'sections': [{'section_name': s.display_name, - 'clickable_tab_count': len(s.get_children()) if (type(s) == seq_module.SequenceDescriptor) else 0, + 'sections': [{'section_name': s.display_name, + 'clickable_tab_count': len(s.get_children()) if (type(s) == seq_module.SequenceDescriptor) else 0, 'tabs': [{'children_count': len(t.get_children()) if (type(t) == vertical_module.VerticalDescriptor) else 0, 'class': t.__class__.__name__} for t in s.get_children()]} diff --git a/lms/djangoapps/courseware/features/courseware.feature b/lms/djangoapps/courseware/features/courseware.feature index 21c7e84541..279e5732c9 100644 --- a/lms/djangoapps/courseware/features/courseware.feature +++ b/lms/djangoapps/courseware/features/courseware.feature @@ -9,10 +9,3 @@ Feature: View the Courseware Tab And I click on View Courseware When I click on the "Courseware" tab Then the "Courseware" tab is active - - # TODO: fix this one? Not sure whether you should get a 404. - # Scenario: I cannot get to the courseware tab when not logged in - # Given I am not logged in - # And I visit the homepage - # When I visit the courseware URL - # Then the login dialog is visible diff --git a/lms/djangoapps/courseware/features/courseware_common.py b/lms/djangoapps/courseware/features/courseware_common.py index 5ee21da906..96304e016f 100644 --- a/lms/djangoapps/courseware/features/courseware_common.py +++ b/lms/djangoapps/courseware/features/courseware_common.py @@ -4,13 +4,13 @@ from lettuce.django import django_url @step('I click on View Courseware') def i_click_on_view_courseware(step): - css = 'p.enter-course' + css = 'a.enter-course' world.browser.find_by_css(css).first.click() @step('I click on the "([^"]*)" tab$') def i_click_on_the_tab(step, tab): - world.browser.find_link_by_text(tab).first.click() + world.browser.find_link_by_partial_text(tab).first.click() world.save_the_html() diff --git a/lms/djangoapps/portal/features/homepage.feature b/lms/djangoapps/courseware/features/homepage.feature similarity index 100% rename from lms/djangoapps/portal/features/homepage.feature rename to lms/djangoapps/courseware/features/homepage.feature diff --git a/lms/djangoapps/portal/features/homepage.py b/lms/djangoapps/courseware/features/homepage.py similarity index 100% rename from lms/djangoapps/portal/features/homepage.py rename to lms/djangoapps/courseware/features/homepage.py diff --git a/lms/djangoapps/portal/features/login.feature b/lms/djangoapps/courseware/features/login.feature similarity index 100% rename from lms/djangoapps/portal/features/login.feature rename to lms/djangoapps/courseware/features/login.feature diff --git a/lms/djangoapps/portal/features/login.py b/lms/djangoapps/courseware/features/login.py similarity index 99% rename from lms/djangoapps/portal/features/login.py rename to lms/djangoapps/courseware/features/login.py index 094db078ca..ca7d710c61 100644 --- a/lms/djangoapps/portal/features/login.py +++ b/lms/djangoapps/courseware/features/login.py @@ -34,7 +34,6 @@ def click_the_dropdown(step): #### helper functions - def user_is_an_unactivated_user(uname): u = User.objects.get(username=uname) u.is_active = False diff --git a/lms/djangoapps/courseware/features/openended.feature b/lms/djangoapps/courseware/features/openended.feature index 3c7043ba54..cc9f6e1c5f 100644 --- a/lms/djangoapps/courseware/features/openended.feature +++ b/lms/djangoapps/courseware/features/openended.feature @@ -3,31 +3,35 @@ Feature: Open ended grading In order to complete the courseware questions I want the machine learning grading to be functional - Scenario: An answer that is too short is rejected - Given I navigate to an openended question - And I enter the answer "z" - When I press the "Check" button - And I wait for "8" seconds - And I see the grader status "Submitted for grading" - And I press the "Recheck for Feedback" button - Then I see the red X - And I see the grader score "0" + # Commenting these all out right now until we can + # make a reference implementation for a course with + # an open ended grading problem that is always available + # + # Scenario: An answer that is too short is rejected + # Given I navigate to an openended question + # And I enter the answer "z" + # When I press the "Check" button + # And I wait for "8" seconds + # And I see the grader status "Submitted for grading" + # And I press the "Recheck for Feedback" button + # Then I see the red X + # And I see the grader score "0" - Scenario: An answer with too many spelling errors is rejected - Given I navigate to an openended question - And I enter the answer "az" - When I press the "Check" button - And I wait for "8" seconds - And I see the grader status "Submitted for grading" - And I press the "Recheck for Feedback" button - Then I see the red X - And I see the grader score "0" - When I click the link for full output - Then I see the spelling grading message "More spelling errors than average." + # Scenario: An answer with too many spelling errors is rejected + # Given I navigate to an openended question + # And I enter the answer "az" + # When I press the "Check" button + # And I wait for "8" seconds + # And I see the grader status "Submitted for grading" + # And I press the "Recheck for Feedback" button + # Then I see the red X + # And I see the grader score "0" + # When I click the link for full output + # Then I see the spelling grading message "More spelling errors than average." - Scenario: An answer makes its way to the instructor dashboard - Given I navigate to an openended question as staff - When I submit the answer "I love Chemistry." - And I wait for "8" seconds - And I visit the staff grading page - Then my answer is queued for instructor grading \ No newline at end of file + # Scenario: An answer makes its way to the instructor dashboard + # Given I navigate to an openended question as staff + # When I submit the answer "I love Chemistry." + # And I wait for "8" seconds + # And I visit the staff grading page + # Then my answer is queued for instructor grading diff --git a/lms/djangoapps/portal/features/registration.feature b/lms/djangoapps/courseware/features/registration.feature similarity index 94% rename from lms/djangoapps/portal/features/registration.feature rename to lms/djangoapps/courseware/features/registration.feature index d8a6796ee3..d9b588534b 100644 --- a/lms/djangoapps/portal/features/registration.feature +++ b/lms/djangoapps/courseware/features/registration.feature @@ -14,4 +14,4 @@ Feature: Register for a course And I visit the dashboard When I click the link with the text "Unregister" And I press the "Unregister" button in the Unenroll dialog - Then I should see "Looks like you haven't registered for any courses yet." somewhere in the page \ No newline at end of file + Then I should see "Looks like you haven't registered for any courses yet." somewhere in the page diff --git a/lms/djangoapps/portal/features/registration.py b/lms/djangoapps/courseware/features/registration.py similarity index 88% rename from lms/djangoapps/portal/features/registration.py rename to lms/djangoapps/courseware/features/registration.py index b2b4c4bd8d..f585136412 100644 --- a/lms/djangoapps/portal/features/registration.py +++ b/lms/djangoapps/courseware/features/registration.py @@ -4,7 +4,7 @@ from lettuce import world, step @step('I register for the course numbered "([^"]*)"$') def i_register_for_the_course(step, course): courses_section = world.browser.find_by_css('section.courses') - course_link_css = 'article[id*="%s"] a' % course + course_link_css = 'article[id*="%s"] > div' % course course_link = courses_section.find_by_css(course_link_css).first course_link.click() @@ -25,3 +25,4 @@ def i_should_see_that_course_in_my_dashboard(step, course): def i_press_the_button_in_the_unenroll_dialog(step, value): button_css = 'section#unenroll-modal input[value="%s"]' % value world.browser.find_by_css(button_css).click() + assert world.browser.is_element_present_by_css('section.container.dashboard') diff --git a/lms/djangoapps/portal/features/signup.feature b/lms/djangoapps/courseware/features/signup.feature similarity index 100% rename from lms/djangoapps/portal/features/signup.feature rename to lms/djangoapps/courseware/features/signup.feature diff --git a/lms/djangoapps/portal/features/signup.py b/lms/djangoapps/courseware/features/signup.py similarity index 100% rename from lms/djangoapps/portal/features/signup.py rename to lms/djangoapps/courseware/features/signup.py diff --git a/lms/djangoapps/courseware/features/smart-accordion.feature b/lms/djangoapps/courseware/features/smart-accordion.feature index 90d097144a..ccf1d45601 100644 --- a/lms/djangoapps/courseware/features/smart-accordion.feature +++ b/lms/djangoapps/courseware/features/smart-accordion.feature @@ -23,37 +23,41 @@ Feature: There are courses on the homepage As an acceptance test I want to count all the chapters, sections, and tabs for each course - Scenario: Navigate through course MITx/3.091x/2012_Fall - Given I am registered for course "MITx/3.091x/2012_Fall" - And I log in - Then I verify all the content of each course + # Commenting these all out for now because they don't always run, + # they have too many prerequesites, e.g. the course exists, and + # is within the start and end dates, etc. - Scenario: Navigate through course MITx/6.002x/2012_Fall - Given I am registered for course "MITx/6.002x/2012_Fall" - And I log in - Then I verify all the content of each course + # Scenario: Navigate through course MITx/3.091x/2012_Fall + # Given I am registered for course "MITx/3.091x/2012_Fall" + # And I log in + # Then I verify all the content of each course - Scenario: Navigate through course MITx/6.00x/2012_Fall - Given I am registered for course "MITx/6.00x/2012_Fall" - And I log in - Then I verify all the content of each course + # Scenario: Navigate through course MITx/6.002x/2012_Fall + # Given I am registered for course "MITx/6.002x/2012_Fall" + # And I log in + # Then I verify all the content of each course - Scenario: Navigate through course HarvardX/PH207x/2012_Fall - Given I am registered for course "HarvardX/PH207x/2012_Fall" - And I log in - Then I verify all the content of each course + # Scenario: Navigate through course MITx/6.00x/2012_Fall + # Given I am registered for course "MITx/6.00x/2012_Fall" + # And I log in + # Then I verify all the content of each course - Scenario: Navigate through course BerkeleyX/CS169.1x/2012_Fall - Given I am registered for course "BerkeleyX/CS169.1x/2012_Fall" - And I log in - Then I verify all the content of each course + # Scenario: Navigate through course HarvardX/PH207x/2012_Fall + # Given I am registered for course "HarvardX/PH207x/2012_Fall" + # And I log in + # Then I verify all the content of each course - Scenario: Navigate through course BerkeleyX/CS169.2x/2012_Fall - Given I am registered for course "BerkeleyX/CS169.2x/2012_Fall" - And I log in - Then I verify all the content of each course + # Scenario: Navigate through course BerkeleyX/CS169.1x/2012_Fall + # Given I am registered for course "BerkeleyX/CS169.1x/2012_Fall" + # And I log in + # Then I verify all the content of each course - Scenario: Navigate through course BerkeleyX/CS184.1x/2012_Fall - Given I am registered for course "BerkeleyX/CS184.1x/2012_Fall" - And I log in - Then I verify all the content of each course \ No newline at end of file + # Scenario: Navigate through course BerkeleyX/CS169.2x/2012_Fall + # Given I am registered for course "BerkeleyX/CS169.2x/2012_Fall" + # And I log in + # Then I verify all the content of each course + + # Scenario: Navigate through course BerkeleyX/CS184.1x/2012_Fall + # Given I am registered for course "BerkeleyX/CS184.1x/2012_Fall" + # And I log in + # Then I verify all the content of each course \ No newline at end of file diff --git a/lms/djangoapps/courseware/module_render.py b/lms/djangoapps/courseware/module_render.py index 599007bb83..7877c83bdc 100644 --- a/lms/djangoapps/courseware/module_render.py +++ b/lms/djangoapps/courseware/module_render.py @@ -226,6 +226,30 @@ def _get_module(user, request, descriptor, student_module_cache, course_id, 'waittime': settings.XQUEUE_WAITTIME_BETWEEN_REQUESTS } + def get_or_default(key, default): + getattr(settings, key, default) + + #This is a hacky way to pass settings to the combined open ended xmodule + #It needs an S3 interface to upload images to S3 + #It needs the open ended grading interface in order to get peer grading to be done + #TODO: refactor these settings into module-specific settings when possible. + #this first checks to see if the descriptor is the correct one, and only sends settings if it is + is_descriptor_combined_open_ended = (descriptor.__class__.__name__ == 'CombinedOpenEndedDescriptor') + is_descriptor_peer_grading = (descriptor.__class__.__name__ == 'PeerGradingDescriptor') + open_ended_grading_interface = None + s3_interface = None + if is_descriptor_combined_open_ended or is_descriptor_peer_grading: + open_ended_grading_interface = settings.OPEN_ENDED_GRADING_INTERFACE + open_ended_grading_interface['mock_peer_grading'] = settings.MOCK_PEER_GRADING + open_ended_grading_interface['mock_staff_grading'] = settings.MOCK_STAFF_GRADING + if is_descriptor_combined_open_ended: + s3_interface = { + 'access_key' : get_or_default('AWS_ACCESS_KEY_ID',''), + 'secret_access_key' : get_or_default('AWS_SECRET_ACCESS_KEY',''), + 'storage_bucket_name' : get_or_default('AWS_STORAGE_BUCKET_NAME','') + } + + def inner_get_module(descriptor): """ Delegate to get_module. It does an access check, so may return None @@ -255,6 +279,8 @@ def _get_module(user, request, descriptor, student_module_cache, course_id, node_path=settings.NODE_PATH, anonymous_student_id=unique_id_for_user(user), course_id=course_id, + open_ended_grading_interface=open_ended_grading_interface, + s3_interface=s3_interface, ) # pass position specified in URL to module through ModuleSystem system.set('position', position) @@ -280,6 +306,7 @@ def _get_module(user, request, descriptor, student_module_cache, course_id, # Make an error module return err_descriptor.xmodule_constructor(system)(None, None) + system.set('user_is_staff', has_access(user, descriptor.location, 'staff', course_id)) _get_html = module.get_html if wrap_xmodule_display == True: diff --git a/lms/djangoapps/instructor/views.py b/lms/djangoapps/instructor/views.py index 2bb4f00724..75146b833f 100644 --- a/lms/djangoapps/instructor/views.py +++ b/lms/djangoapps/instructor/views.py @@ -413,7 +413,7 @@ def instructor_dashboard(request, course_id): smdat = StudentModule.objects.filter(course_id=course_id, module_state_key=module_state_key) smdat = smdat.order_by('student') - msg+="Found module to reset. " + msg += "Found %d records to dump " % len(smdat) except Exception as err: msg+="Couldn't find module with that urlname. " msg += "
    %s
    " % escape(err) diff --git a/lms/djangoapps/open_ended_grading/controller_query_service.py b/lms/djangoapps/open_ended_grading/controller_query_service.py index 83d5617bd2..1b124fc116 100644 --- a/lms/djangoapps/open_ended_grading/controller_query_service.py +++ b/lms/djangoapps/open_ended_grading/controller_query_service.py @@ -1,12 +1,6 @@ -import json import logging -import requests -from requests.exceptions import RequestException, ConnectionError, HTTPError -import sys -from xmodule.grading_service_module import GradingService, GradingServiceError +from xmodule.open_ended_grading_classes.grading_service_module import GradingService -from django.conf import settings -from django.http import HttpResponse, Http404 from xmodule.x_module import ModuleSystem from mitxmako.shortcuts import render_to_string @@ -20,6 +14,8 @@ class ControllerQueryService(GradingService): def __init__(self, config): config['system'] = ModuleSystem(None, None, None, render_to_string, None) super(ControllerQueryService, self).__init__(config) + self.url = config['url'] + config['grading_controller'] + self.login_url = self.url + '/login/' self.check_eta_url = self.url + '/get_submission_eta/' self.is_unique_url = self.url + '/is_name_unique/' self.combined_notifications_url = self.url + '/combined_notifications/' diff --git a/lms/djangoapps/open_ended_grading/open_ended_notifications.py b/lms/djangoapps/open_ended_grading/open_ended_notifications.py index f79013e396..c4054895d3 100644 --- a/lms/djangoapps/open_ended_grading/open_ended_notifications.py +++ b/lms/djangoapps/open_ended_grading/open_ended_notifications.py @@ -1,16 +1,14 @@ from django.conf import settings +from xmodule.open_ended_grading_classes import peer_grading_service from staff_grading_service import StaffGradingService from open_ended_grading.controller_query_service import ControllerQueryService -from xmodule import peer_grading_service import json from student.models import unique_id_for_user -import open_ended_util from courseware.models import StudentModule import logging from courseware.access import has_access from util.cache import cache import datetime -from xmodule import peer_grading_service from xmodule.x_module import ModuleSystem from mitxmako.shortcuts import render_to_string @@ -28,7 +26,7 @@ NOTIFICATION_TYPES = ( def staff_grading_notifications(course, user): - staff_gs = StaffGradingService(settings.STAFF_GRADING_INTERFACE) + staff_gs = StaffGradingService(settings.OPEN_ENDED_GRADING_INTERFACE) pending_grading = False img_path = "" course_id = course.id @@ -47,10 +45,11 @@ def staff_grading_notifications(course, user): except: #Non catastrophic error, so no real action notifications = {} - log.info("Problem with getting notifications from staff grading service.") + #This is a dev_facing_error + log.info("Problem with getting notifications from staff grading service for course {0} user {1}.".format(course_id, student_id)) if pending_grading: - img_path = "/static/images/slider-handle.png" + img_path = "/static/images/grading_notification.png" notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications} @@ -61,7 +60,7 @@ def staff_grading_notifications(course, user): def peer_grading_notifications(course, user): system = ModuleSystem(None, None, None, render_to_string, None) - peer_gs = peer_grading_service.PeerGradingService(settings.PEER_GRADING_INTERFACE, system) + peer_gs = peer_grading_service.PeerGradingService(settings.OPEN_ENDED_GRADING_INTERFACE, system) pending_grading = False img_path = "" course_id = course.id @@ -80,10 +79,11 @@ def peer_grading_notifications(course, user): except: #Non catastrophic error, so no real action notifications = {} - log.info("Problem with getting notifications from peer grading service.") + #This is a dev_facing_error + log.info("Problem with getting notifications from peer grading service for course {0} user {1}.".format(course_id, student_id)) if pending_grading: - img_path = "/static/images/slider-handle.png" + img_path = "/static/images/grading_notification.png" notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications} @@ -93,8 +93,7 @@ def peer_grading_notifications(course, user): def combined_notifications(course, user): - controller_url = open_ended_util.get_controller_url() - controller_qs = ControllerQueryService(controller_url) + controller_qs = ControllerQueryService(settings.OPEN_ENDED_GRADING_INTERFACE) student_id = unique_id_for_user(user) user_is_staff = has_access(user, course, 'staff') course_id = course.id @@ -126,10 +125,11 @@ def combined_notifications(course, user): except: #Non catastrophic error, so no real action notifications = {} - log.exception("Problem with getting notifications from controller query service.") + #This is a dev_facing_error + log.exception("Problem with getting notifications from controller query service for course {0} user {1}.".format(course_id, student_id)) if pending_grading: - img_path = "/static/images/slider-handle.png" + img_path = "/static/images/grading_notification.png" notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications} diff --git a/lms/djangoapps/open_ended_grading/open_ended_util.py b/lms/djangoapps/open_ended_grading/open_ended_util.py deleted file mode 100644 index 1aa0f1ba70..0000000000 --- a/lms/djangoapps/open_ended_grading/open_ended_util.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.conf import settings -import logging - -log = logging.getLogger(__name__) - - -def get_controller_url(): - peer_grading_url = settings.PEER_GRADING_INTERFACE['url'] - split_url = peer_grading_url.split("/") - controller_url = "http://" + split_url[2] + "/grading_controller" - controller_settings = settings.PEER_GRADING_INTERFACE.copy() - controller_settings['url'] = controller_url - return controller_settings diff --git a/lms/djangoapps/open_ended_grading/staff_grading_service.py b/lms/djangoapps/open_ended_grading/staff_grading_service.py index dfadacb724..79b92dffba 100644 --- a/lms/djangoapps/open_ended_grading/staff_grading_service.py +++ b/lms/djangoapps/open_ended_grading/staff_grading_service.py @@ -4,10 +4,7 @@ This module provides views that proxy to the staff grading backend service. import json import logging -import requests -from requests.exceptions import RequestException, ConnectionError, HTTPError -import sys -from xmodule.grading_service_module import GradingService, GradingServiceError +from xmodule.open_ended_grading_classes.grading_service_module import GradingService, GradingServiceError from django.conf import settings from django.http import HttpResponse, Http404 @@ -21,6 +18,7 @@ from mitxmako.shortcuts import render_to_string log = logging.getLogger(__name__) +STAFF_ERROR_MESSAGE = 'Could not contact the external grading server. Please contact the development team. If you do not have a point of contact, you can contact Vik at vik@edx.org.' class MockStaffGradingService(object): """ @@ -53,7 +51,7 @@ class MockStaffGradingService(object): ]}) - def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores): + def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores, submission_flagged): return self.get_next(course_id, 'fake location', grader_id) @@ -64,6 +62,8 @@ class StaffGradingService(GradingService): def __init__(self, config): config['system'] = ModuleSystem(None, None, None, render_to_string, None) super(StaffGradingService, self).__init__(config) + self.url = config['url'] + config['staff_grading'] + self.login_url = self.url + '/login/' self.get_next_url = self.url + '/get_next_submission/' self.save_grade_url = self.url + '/save_grade/' self.get_problem_list_url = self.url + '/get_problem_list/' @@ -114,7 +114,7 @@ class StaffGradingService(GradingService): return json.dumps(self._render_rubric(response)) - def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores): + def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores, submission_flagged): """ Save a score and feedback for a submission. @@ -133,7 +133,8 @@ class StaffGradingService(GradingService): 'grader_id': grader_id, 'skipped': skipped, 'rubric_scores': rubric_scores, - 'rubric_scores_complete': True} + 'rubric_scores_complete': True, + 'submission_flagged': submission_flagged} return self.post(self.save_grade_url, data=data) @@ -163,7 +164,7 @@ def staff_grading_service(): if settings.MOCK_STAFF_GRADING: _service = MockStaffGradingService() else: - _service = StaffGradingService(settings.STAFF_GRADING_INTERFACE) + _service = StaffGradingService(settings.OPEN_ENDED_GRADING_INTERFACE) return _service @@ -254,10 +255,12 @@ def get_problem_list(request, course_id): return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error from grading service. server url: {0}" + #This is a dev_facing_error + log.exception("Error from staff grading service in open ended grading. server url: {0}" .format(staff_grading_service().url)) + #This is a staff_facing_error return HttpResponse(json.dumps({'success': False, - 'error': 'Could not connect to grading service'})) + 'error': STAFF_ERROR_MESSAGE})) def _get_next(course_id, grader_id, location): @@ -267,10 +270,12 @@ def _get_next(course_id, grader_id, location): try: return staff_grading_service().get_next(course_id, location, grader_id) except GradingServiceError: - log.exception("Error from grading service. server url: {0}" + #This is a dev facing error + log.exception("Error from staff grading service in open ended grading. server url: {0}" .format(staff_grading_service().url)) + #This is a staff_facing_error return json.dumps({'success': False, - 'error': 'Could not connect to grading service'}) + 'error': STAFF_ERROR_MESSAGE}) @expect_json @@ -292,7 +297,7 @@ def save_grade(request, course_id): if request.method != 'POST': raise Http404 - required = set(['score', 'feedback', 'submission_id', 'location', 'rubric_scores[]']) + required = set(['score', 'feedback', 'submission_id', 'location','submission_flagged', 'rubric_scores[]']) actual = set(request.POST.keys()) missing = required - actual if len(missing) > 0: @@ -313,20 +318,26 @@ def save_grade(request, course_id): p['score'], p['feedback'], skipped, - p.getlist('rubric_scores[]')) + p.getlist('rubric_scores[]'), + p['submission_flagged']) except GradingServiceError: - log.exception("Error saving grade") - return _err_response('Could not connect to grading service') + #This is a dev_facing_error + log.exception("Error saving grade in the staff grading interface in open ended grading. Request: {0} Course ID: {1}".format(request, course_id)) + #This is a staff_facing_error + return _err_response(STAFF_ERROR_MESSAGE) try: result = json.loads(result_json) except ValueError: - log.exception("save_grade returned broken json: %s", result_json) - return _err_response('Grading service returned mal-formatted data.') + #This is a dev_facing_error + log.exception("save_grade returned broken json in the staff grading interface in open ended grading: {0}".format(result_json)) + #This is a staff_facing_error + return _err_response(STAFF_ERROR_MESSAGE) if not result.get('success', False): - log.warning('Got success=False from grading service. Response: %s', result_json) - return _err_response('Grading service failed') + #This is a dev_facing_error + log.warning('Got success=False from staff grading service in open ended grading. Response: {0}'.format(result_json)) + return _err_response(STAFF_ERROR_MESSAGE) # Ok, save_grade seemed to work. Get the next submission to grade. return HttpResponse(_get_next(course_id, grader_id, location), diff --git a/lms/djangoapps/open_ended_grading/tests.py b/lms/djangoapps/open_ended_grading/tests.py index ec2fe5ab38..d452883ebb 100644 --- a/lms/djangoapps/open_ended_grading/tests.py +++ b/lms/djangoapps/open_ended_grading/tests.py @@ -6,7 +6,8 @@ django-admin.py test --settings=lms.envs.test --pythonpath=. lms/djangoapps/open from django.test import TestCase from open_ended_grading import staff_grading_service -from xmodule import peer_grading_service, peer_grading_module +from xmodule.open_ended_grading_classes import peer_grading_service +from xmodule import peer_grading_module from django.core.urlresolvers import reverse from django.contrib.auth.models import Group @@ -15,7 +16,7 @@ import courseware.tests.tests as ct from xmodule.modulestore.django import modulestore import xmodule.modulestore.django from nose import SkipTest -from mock import patch, Mock +from mock import patch, Mock, MagicMock import json from xmodule.x_module import ModuleSystem from mitxmako.shortcuts import render_to_string @@ -25,6 +26,8 @@ log = logging.getLogger(__name__) from django.test.utils import override_settings from django.http import QueryDict +from xmodule.tests import test_util_open_ended + @override_settings(MODULESTORE=ct.TEST_DATA_XML_MODULESTORE) class TestStaffGradingService(ct.PageLoader): @@ -100,6 +103,7 @@ class TestStaffGradingService(ct.PageLoader): 'feedback': 'great!', 'submission_id': '123', 'location': self.location, + 'submission_flagged': "true", 'rubric_scores[]': ['1', '2']} r = self.check_for_post_code(200, url, data) @@ -143,9 +147,11 @@ class TestPeerGradingService(ct.PageLoader): location = "i4x://edX/toy/peergrading/init" self.mock_service = peer_grading_service.MockPeerGradingService() - self.system = ModuleSystem(location, None, None, render_to_string, None) + self.system = ModuleSystem(location, None, None, render_to_string, None, + s3_interface = test_util_open_ended.S3_INTERFACE, + open_ended_grading_interface=test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE + ) self.descriptor = peer_grading_module.PeerGradingDescriptor(self.system) - self.peer_module = peer_grading_module.PeerGradingModule(self.system, location, "", self.descriptor) self.peer_module.peer_gs = self.mock_service self.logout() @@ -163,23 +169,35 @@ class TestPeerGradingService(ct.PageLoader): def test_get_next_submission_missing_location(self): data = {} - r = self.peer_module.get_next_submission(data) - d = r + d = self.peer_module.get_next_submission(data) self.assertFalse(d['success']) self.assertEqual(d['error'], "Missing required keys: location") def test_save_grade_success(self): - raise SkipTest() - data = 'rubric_scores[]=1|rubric_scores[]=2|location=' + self.location + '|submission_id=1|submission_key=fake key|score=2|feedback=feedback|submission_flagged=False' - qdict = QueryDict(data.replace("|", "&")) + data = { + 'rubric_scores[]': [0, 0], + 'location': self.location, + 'submission_id': 1, + 'submission_key': 'fake key', + 'score': 2, + 'feedback': 'feedback', + 'submission_flagged': 'false' + } + + qdict = MagicMock() + def fake_get_item(key): + return data[key] + qdict.__getitem__.side_effect = fake_get_item + qdict.getlist = fake_get_item + qdict.keys = data.keys + r = self.peer_module.save_grade(qdict) - d = r + d = json.loads(r) self.assertTrue(d['success']) def test_save_grade_missing_keys(self): data = {} - r = self.peer_module.save_grade(data) - d = r + d = self.peer_module.save_grade(data) self.assertFalse(d['success']) self.assertTrue(d['error'].find('Missing required keys:') > -1) @@ -192,8 +210,7 @@ class TestPeerGradingService(ct.PageLoader): def test_is_calibrated_failure(self): data = {} - r = self.peer_module.is_student_calibrated(data) - d = r + d = self.peer_module.is_student_calibrated(data) self.assertFalse(d['success']) self.assertFalse('calibrated' in d) @@ -213,25 +230,36 @@ class TestPeerGradingService(ct.PageLoader): def test_show_calibration_essay_missing_key(self): data = {} - r = self.peer_module.show_calibration_essay(data) - d = r + d = self.peer_module.show_calibration_essay(data) self.assertFalse(d['success']) self.assertEqual(d['error'], "Missing required keys: location") def test_save_calibration_essay_success(self): - raise SkipTest() - data = 'rubric_scores[]=1|rubric_scores[]=2|location=' + self.location + '|submission_id=1|submission_key=fake key|score=2|feedback=feedback|submission_flagged=False' - qdict = QueryDict(data.replace("|", "&")) - r = self.peer_module.save_calibration_essay(qdict) - d = r + data = { + 'rubric_scores[]': [0, 0], + 'location': self.location, + 'submission_id': 1, + 'submission_key': 'fake key', + 'score': 2, + 'feedback': 'feedback', + 'submission_flagged': 'false' + } + + qdict = MagicMock() + def fake_get_item(key): + return data[key] + qdict.__getitem__.side_effect = fake_get_item + qdict.getlist = fake_get_item + qdict.keys = data.keys + + d = self.peer_module.save_calibration_essay(qdict) self.assertTrue(d['success']) self.assertTrue('actual_score' in d) def test_save_calibration_essay_missing_keys(self): data = {} - r = self.peer_module.save_calibration_essay(data) - d = r + d = self.peer_module.save_calibration_essay(data) self.assertFalse(d['success']) self.assertTrue(d['error'].find('Missing required keys:') > -1) self.assertFalse('actual_score' in d) diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index f2e2a4513e..77c1cda6bc 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -2,7 +2,6 @@ import logging import urllib -import re from django.conf import settings from django.views.decorators.cache import cache_control @@ -13,12 +12,10 @@ from student.models import unique_id_for_user from courseware.courses import get_course_with_access from controller_query_service import ControllerQueryService -from xmodule.grading_service_module import GradingServiceError +from xmodule.open_ended_grading_classes.grading_service_module import GradingServiceError import json -from .staff_grading import StaffGrading from student.models import unique_id_for_user -import open_ended_util import open_ended_notifications from xmodule.modulestore.django import modulestore @@ -30,8 +27,7 @@ log = logging.getLogger(__name__) template_imports = {'urllib': urllib} -controller_url = open_ended_util.get_controller_url() -controller_qs = ControllerQueryService(controller_url) +controller_qs = ControllerQueryService(settings.OPEN_ENDED_GRADING_INTERFACE) """ Reverses the URL from the name and the course id, and then adds a trailing slash if @@ -64,6 +60,8 @@ ALERT_DICT = { 'Flagged Submissions': "Submissions have been flagged for review" } +STUDENT_ERROR_MESSAGE = "Error occured while contacting the grading service. Please notify course staff." +STAFF_ERROR_MESSAGE = "Error occured while contacting the grading service. Please notify the development team. If you do not have a point of contact, please email Vik at vik@edx.org" @cache_control(no_cache=True, no_store=True, must_revalidate=True) def staff_grading(request, course_id): @@ -100,7 +98,9 @@ def peer_grading(request, course_id): return HttpResponseRedirect(problem_url) except: + #This is a student_facing_error error_message = "Error with initializing peer grading. Centralized module does not exist. Please contact course staff." + #This is a dev_facing_error log.exception(error_message + "Current course is: {0}".format(course_id)) return HttpResponse(error_message) @@ -136,30 +136,34 @@ def student_problem_list(request, course_id): problem_list = [] base_course_url = reverse('courses') - #try: - problem_list_json = controller_qs.get_grading_status_list(course_id, unique_id_for_user(request.user)) - problem_list_dict = json.loads(problem_list_json) - success = problem_list_dict['success'] - if 'error' in problem_list_dict: - error_text = problem_list_dict['error'] - problem_list = [] - else: - problem_list = problem_list_dict['problem_list'] + try: + problem_list_json = controller_qs.get_grading_status_list(course_id, unique_id_for_user(request.user)) + problem_list_dict = json.loads(problem_list_json) + success = problem_list_dict['success'] + if 'error' in problem_list_dict: + error_text = problem_list_dict['error'] + problem_list = [] + else: + problem_list = problem_list_dict['problem_list'] - for i in xrange(0, len(problem_list)): - problem_url_parts = search.path_to_location(modulestore(), course.id, problem_list[i]['location']) - problem_url = generate_problem_url(problem_url_parts, base_course_url) - problem_list[i].update({'actual_url': problem_url}) + for i in xrange(0, len(problem_list)): + problem_url_parts = search.path_to_location(modulestore(), course.id, problem_list[i]['location']) + problem_url = generate_problem_url(problem_url_parts, base_course_url) + problem_list[i].update({'actual_url': problem_url}) - """ except GradingServiceError: - error_text = "Error occured while contacting the grading service" + #This is a student_facing_error + error_text = STUDENT_ERROR_MESSAGE + #This is a dev facing error + log.error("Problem contacting open ended grading service.") success = False # catch error if if the json loads fails except ValueError: - error_text = "Could not get problem list" + #This is a student facing error + error_text = STUDENT_ERROR_MESSAGE + #This is a dev_facing_error + log.error("Problem with results from external grading service for open ended.") success = False - """ ajax_url = _reverse_with_slash('open_ended_problems', course_id) @@ -199,11 +203,17 @@ def flagged_problem_list(request, course_id): problem_list = problem_list_dict['flagged_submissions'] except GradingServiceError: - error_text = "Error occured while contacting the grading service" + #This is a staff_facing_error + error_text = STAFF_ERROR_MESSAGE + #This is a dev_facing_error + log.error("Could not get flagged problem list from external grading service for open ended.") success = False # catch error if if the json loads fails except ValueError: - error_text = "Could not get problem list" + #This is a staff_facing_error + error_text = STAFF_ERROR_MESSAGE + #This is a dev_facing_error + log.error("Could not parse problem list from external grading service response.") success = False ajax_url = _reverse_with_slash('open_ended_flagged_problems', course_id) @@ -287,7 +297,8 @@ def take_action_on_flags(request, course_id): required = ['submission_id', 'action_type', 'student_id'] for key in required: if key not in request.POST: - return HttpResponse(json.dumps({'success': False, 'error': 'Missing key {0}'.format(key)}), + #This is a staff_facing_error + return HttpResponse(json.dumps({'success': False, 'error': STAFF_ERROR_MESSAGE + 'Missing key {0} from submission. Please reload and try again.'.format(key)}), mimetype="application/json") p = request.POST @@ -301,5 +312,6 @@ def take_action_on_flags(request, course_id): response = controller_qs.take_action_on_flags(course_id, student_id, submission_id, action_type) return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error saving calibration grade, submission_id: {0}, submission_key: {1}, grader_id: {2}".format(submission_id, submission_key, grader_id)) - return _err_response('Could not connect to grading service') + #This is a dev_facing_error + log.exception("Error taking action on flagged peer grading submissions, submission_id: {0}, action_type: {1}, grader_id: {2}".format(submission_id, action_type, grader_id)) + return _err_response(STAFF_ERROR_MESSAGE) diff --git a/lms/djangoapps/portal/README.md b/lms/djangoapps/portal/README.md deleted file mode 100644 index 09930ec8fb..0000000000 --- a/lms/djangoapps/portal/README.md +++ /dev/null @@ -1,15 +0,0 @@ -## acceptance_testing - -This fake django app is here to support acceptance testing using lettuce + -splinter (which wraps selenium). - -First you need to make sure that you've installed the requirements. -This includes lettuce, selenium, splinter, etc. -Do this with: -```pip install -r test-requirements.txt``` - -The settings.py environment file used is named acceptance.py. -It uses a test SQLite database defined as ../db/test-mitx.db. -You need to first start up the server separately, then run the lettuce scenarios. - -Full documentation can be found on the wiki at this link. diff --git a/lms/djangoapps/portal/__init__.py b/lms/djangoapps/portal/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/lms/djangoapps/portal/features/factories.py b/lms/djangoapps/portal/features/factories.py deleted file mode 100644 index 71781ea3d6..0000000000 --- a/lms/djangoapps/portal/features/factories.py +++ /dev/null @@ -1,37 +0,0 @@ -import factory -from student.models import User, UserProfile, Registration -from datetime import datetime -import uuid - - -class UserProfileFactory(factory.Factory): - FACTORY_FOR = UserProfile - - user = None - name = 'Jack Foo' - level_of_education = None - gender = 'm' - mailing_address = None - goals = 'World domination' - - -class RegistrationFactory(factory.Factory): - FACTORY_FOR = Registration - - user = None - activation_key = uuid.uuid4().hex - - -class UserFactory(factory.Factory): - FACTORY_FOR = User - - username = 'robot' - email = 'robot+test@edx.org' - password = 'test' - first_name = 'Robot' - last_name = 'Test' - is_staff = False - is_active = True - is_superuser = False - last_login = datetime(2012, 1, 1) - date_joined = datetime(2011, 1, 1) diff --git a/lms/envs/acceptance.py b/lms/envs/acceptance.py index 412815a402..b6941f4a70 100644 --- a/lms/envs/acceptance.py +++ b/lms/envs/acceptance.py @@ -38,4 +38,4 @@ MITX_FEATURES['STUB_VIDEO_FOR_TESTING'] = True # Include the lettuce app for acceptance testing, including the 'harvest' django-admin command INSTALLED_APPS += ('lettuce.django',) -LETTUCE_APPS = ('portal',) # dummy app covers the home page, login, registration, and course enrollment +LETTUCE_APPS = ('courseware',) diff --git a/lms/envs/aws.py b/lms/envs/aws.py index c4d3d8e772..9089bc92ed 100644 --- a/lms/envs/aws.py +++ b/lms/envs/aws.py @@ -100,10 +100,7 @@ XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE'] MODULESTORE = AUTH_TOKENS.get('MODULESTORE', MODULESTORE) CONTENTSTORE = AUTH_TOKENS.get('CONTENTSTORE', CONTENTSTORE) -STAFF_GRADING_INTERFACE = AUTH_TOKENS.get('STAFF_GRADING_INTERFACE', - STAFF_GRADING_INTERFACE) -PEER_GRADING_INTERFACE = AUTH_TOKENS.get('PEER_GRADING_INTERFACE', - PEER_GRADING_INTERFACE) +OPEN_ENDED_GRADING_INTERFACE = AUTH_TOKENS.get('OPEN_ENDED_GRADING_INTERFACE', OPEN_ENDED_GRADING_INTERFACE) PEARSON_TEST_USER = "pearsontest" PEARSON_TEST_PASSWORD = AUTH_TOKENS.get("PEARSON_TEST_PASSWORD") diff --git a/lms/envs/common.py b/lms/envs/common.py index 684dfd8e88..9b0afeef3a 100644 --- a/lms/envs/common.py +++ b/lms/envs/common.py @@ -20,7 +20,6 @@ Longer TODO: """ import sys import os -import tempfile from xmodule.static_content import write_module_styles, write_module_js from path import path @@ -133,7 +132,8 @@ OPENID_PROVIDER_TRUSTED_ROOTS = ['cs50.net', '*.cs50.net'] ################################## MITXWEB ##################################### # This is where we stick our compiled template files. Most of the app uses Mako # templates -MAKO_MODULE_DIR = tempfile.mkdtemp('mako') +from tempdir import mkdtemp_clean +MAKO_MODULE_DIR = mkdtemp_clean('mako') MAKO_TEMPLATES = {} MAKO_TEMPLATES['main'] = [PROJECT_ROOT / 'templates', COMMON_ROOT / 'templates', @@ -310,37 +310,30 @@ WIKI_USE_BOOTSTRAP_SELECT_WIDGET = False WIKI_LINK_LIVE_LOOKUPS = False WIKI_LINK_DEFAULT_LEVEL = 2 -################################# Staff grading config ##################### - -#By setting up the default settings with an incorrect user name and password, -# will get an error when attempting to connect -STAFF_GRADING_INTERFACE = { - 'url': 'http://sandbox-grader-001.m.edx.org/staff_grading', - 'username': 'incorrect_user', - 'password': 'incorrect_pass', - } - -# Used for testing, debugging -MOCK_STAFF_GRADING = False - ################################# Pearson TestCenter config ################ PEARSONVUE_SIGNINPAGE_URL = "https://www1.pearsonvue.com/testtaker/signin/SignInPage/EDX" # TESTCENTER_ACCOMMODATION_REQUEST_EMAIL = "exam-help@edx.org" -################################# Peer grading config ##################### +################################# open ended grading config ##################### #By setting up the default settings with an incorrect user name and password, # will get an error when attempting to connect -PEER_GRADING_INTERFACE = { +OPEN_ENDED_GRADING_INTERFACE = { 'url': 'http://sandbox-grader-001.m.edx.org/peer_grading', 'username': 'incorrect_user', 'password': 'incorrect_pass', + 'staff_grading' : 'staff_grading', + 'peer_grading' : 'peer_grading', + 'grading_controller' : 'grading_controller' } -# Used for testing, debugging +# Used for testing, debugging peer grading MOCK_PEER_GRADING = False +# Used for testing, debugging staff grading +MOCK_STAFF_GRADING = False + ################################# Jasmine ################################### JASMINE_TEST_DIRECTORY = PROJECT_ROOT + '/static/coffee' diff --git a/lms/envs/dev.py b/lms/envs/dev.py index 47bcee1b7e..6ecbbb0f85 100644 --- a/lms/envs/dev.py +++ b/lms/envs/dev.py @@ -131,21 +131,17 @@ if os.path.isdir(DATA_DIR): MITX_VERSION_STRING = os.popen('cd %s; git describe' % REPO_ROOT).read().strip() -################################# Staff grading config ##################### +################################# Open ended grading config ##################### -STAFF_GRADING_INTERFACE = { - 'url': 'http://127.0.0.1:3033/staff_grading', - 'username': 'lms', - 'password': 'abcd', - } +OPEN_ENDED_GRADING_INTERFACE = { + 'url' : 'http://127.0.0.1:3033/', + 'username' : 'lms', + 'password' : 'abcd', + 'staff_grading' : 'staff_grading', + 'peer_grading' : 'peer_grading', + 'grading_controller' : 'grading_controller' +} -################################# Peer grading config ##################### - -PEER_GRADING_INTERFACE = { - 'url': 'http://127.0.0.1:3033/peer_grading', - 'username': 'lms', - 'password': 'abcd', - } ################################ LMS Migration ################################# MITX_FEATURES['ENABLE_LMS_MIGRATION'] = True MITX_FEATURES['ACCESS_REQUIRE_STAFF_FOR_COURSE'] = False # require that user be in the staff_* group to be able to enroll diff --git a/lms/envs/test.py b/lms/envs/test.py index 6cad6416d0..c1863349fb 100644 --- a/lms/envs/test.py +++ b/lms/envs/test.py @@ -170,4 +170,4 @@ PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', # 'django.contrib.auth.hashers.CryptPasswordHasher', -) +) \ No newline at end of file diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index cc8bad5473..bfb0fa5931 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -41,7 +41,7 @@ class OpenEnded post: (cmd, data, callback) -> # if this post request fails, the error callback will catch it $.post(@ajax_url + cmd, data, callback) - .error => callback({success: false, error: "Error occured while performing this operation"}) + .error => callback({success: false, error: "Error occured while performing javascript ajax post."}) after_action_wrapper: (target, action_type) -> tr_parent = target.parent().parent() diff --git a/lms/static/coffee/src/staff_grading/staff_grading.coffee b/lms/static/coffee/src/staff_grading/staff_grading.coffee index 117388bab0..c4ccee4571 100644 --- a/lms/static/coffee/src/staff_grading/staff_grading.coffee +++ b/lms/static/coffee/src/staff_grading/staff_grading.coffee @@ -143,7 +143,7 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t else # TODO: replace with postWithPrefix when that's loaded $.post(@ajax_url + cmd, data, callback) - .error => callback({success: false, error: "Error occured while performing this operation"}) + .error => callback({success: false, error: "Error occured while performing javascript AJAX post."}) class @StaffGrading @@ -170,6 +170,7 @@ class @StaffGrading @feedback_area = $('.feedback-area') @score_selection_container = $('.score-selection-container') @grade_selection_container = $('.grade-selection-container') + @flag_submission_checkbox = $('.flag-checkbox') @submit_button = $('.submit-button') @action_button = $('.action-button') @@ -180,6 +181,10 @@ class @StaffGrading @ml_error_info_container = $('.ml-error-info-container') @breadcrumbs = $('.breadcrumbs') + + @question_header = $('.question-header') + @question_header.click @collapse_question + @collapse_question() # model state @state = state_no_data @@ -255,6 +260,7 @@ class @StaffGrading submission_id: @submission_id location: @location skipped: true + submission_flagged: false @backend.post('save_grade', data, @ajax_callback) get_problem_list: () -> @@ -268,6 +274,7 @@ class @StaffGrading feedback: @feedback_area.val() submission_id: @submission_id location: @location + submission_flagged: @flag_submission_checkbox.is(':checked') @backend.post('save_grade', data, @ajax_callback) @@ -325,6 +332,7 @@ class @StaffGrading @error_container.html(@error_msg) @message_container.toggle(@message != "") @error_container.toggle(@error_msg != "") + @flag_submission_checkbox.prop('checked', false) # only show the grading elements when we are not in list view or the state @@ -388,10 +396,10 @@ class @StaffGrading else if @state == state_grading @ml_error_info_container.html(@ml_error_info) - meta_list = $("
      ") - meta_list.append("
    • Available - #{@num_pending}
    • ") - meta_list.append("
    • Graded - #{@num_graded}
    • ") - meta_list.append("
    • Needed for ML - #{Math.max(@min_for_ml - @num_graded, 0)}
    • ") + meta_list = $("
      ") + meta_list.append("
      #{@num_pending} available |
      ") + meta_list.append("
      #{@num_graded} graded |
      ") + meta_list.append("
      #{Math.max(@min_for_ml - @num_graded, 0)} more needed to start ML

      ") @problem_meta_info.html(meta_list) @prompt_container.html(@prompt) @@ -428,7 +436,17 @@ class @StaffGrading @get_next_submission(@location) else @error('System got into invalid state for submission: ' + @state) - + + collapse_question: () => + @prompt_container.slideToggle() + @prompt_container.toggleClass('open') + if @question_header.text() == "(Hide)" + new_text = "(Show)" + else + new_text = "(Hide)" + @question_header.text(new_text) + + # for now, just create an instance and load it... mock_backend = false diff --git a/lms/static/images/university/anu/anu-cover.jpg b/lms/static/images/university/anu/anu-cover.jpg new file mode 100644 index 0000000000..591df9791d Binary files /dev/null and b/lms/static/images/university/anu/anu-cover.jpg differ diff --git a/lms/static/images/university/anu/anu.png b/lms/static/images/university/anu/anu.png new file mode 100644 index 0000000000..5caeb0b180 Binary files /dev/null and b/lms/static/images/university/anu/anu.png differ diff --git a/lms/static/images/university/delft/delft-cover.jpg b/lms/static/images/university/delft/delft-cover.jpg new file mode 100644 index 0000000000..73092d7596 Binary files /dev/null and b/lms/static/images/university/delft/delft-cover.jpg differ diff --git a/lms/static/images/university/delft/delft.png b/lms/static/images/university/delft/delft.png new file mode 100644 index 0000000000..03c566e91f Binary files /dev/null and b/lms/static/images/university/delft/delft.png differ diff --git a/lms/static/images/university/epfl/epfl-cover.jpg b/lms/static/images/university/epfl/epfl-cover.jpg new file mode 100644 index 0000000000..42b188c925 Binary files /dev/null and b/lms/static/images/university/epfl/epfl-cover.jpg differ diff --git a/lms/static/images/university/epfl/epfl.png b/lms/static/images/university/epfl/epfl.png new file mode 100644 index 0000000000..6725340a1a Binary files /dev/null and b/lms/static/images/university/epfl/epfl.png differ diff --git a/lms/static/images/university/mcgill/mcgill-cover.jpg b/lms/static/images/university/mcgill/mcgill-cover.jpg new file mode 100644 index 0000000000..4d213c8c92 Binary files /dev/null and b/lms/static/images/university/mcgill/mcgill-cover.jpg differ diff --git a/lms/static/images/university/mcgill/mcgill.png b/lms/static/images/university/mcgill/mcgill.png new file mode 100644 index 0000000000..5a0bcb56ad Binary files /dev/null and b/lms/static/images/university/mcgill/mcgill.png differ diff --git a/lms/static/images/university/rice/rice-cover.jpg b/lms/static/images/university/rice/rice-cover.jpg new file mode 100644 index 0000000000..2266a72c6d Binary files /dev/null and b/lms/static/images/university/rice/rice-cover.jpg differ diff --git a/lms/static/images/university/rice/rice.png b/lms/static/images/university/rice/rice.png new file mode 100644 index 0000000000..95865f2dba Binary files /dev/null and b/lms/static/images/university/rice/rice.png differ diff --git a/lms/static/images/university/toronto/toronto-cover.jpg b/lms/static/images/university/toronto/toronto-cover.jpg new file mode 100644 index 0000000000..0d3434659e Binary files /dev/null and b/lms/static/images/university/toronto/toronto-cover.jpg differ diff --git a/lms/static/images/university/toronto/toronto.png b/lms/static/images/university/toronto/toronto.png new file mode 100644 index 0000000000..86851b9468 Binary files /dev/null and b/lms/static/images/university/toronto/toronto.png differ diff --git a/lms/static/sass/course/_rubric.scss b/lms/static/sass/course/_rubric.scss index 5048d70253..294ac86d78 100644 --- a/lms/static/sass/course/_rubric.scss +++ b/lms/static/sass/course/_rubric.scss @@ -1,11 +1,11 @@ .rubric { - margin: 40px 0px; + margin: 0px 0px; + color: #3C3C3C; tr { - margin:10px 0px; + margin:0px 0px; height: 100%; } td { - padding: 20px 0px 25px 0px; height: 100%; border: 1px black solid; text-align: center; @@ -21,19 +21,13 @@ .rubric-label { position: relative; - padding: 0px 15px 15px 15px; - width: 130px; - min-height: 50px; - min-width: 50px; font-size: .9em; - background-color: white; display: block; } .grade { position: absolute; bottom:0px; right:0px; - margin:10px; } .selected-grade, .selected-grade .rubric-label { @@ -42,11 +36,21 @@ } input[type=radio]:checked + .rubric-label { background: white; - color: $base-font-color; } + color: $base-font-color; + white-space:nowrap; + } + .wrappable { + white-space:normal; + } input[class='score-selection'] { position: relative; - margin-left: 10px; font-size: 16px; } + ul.rubric-list + { + list-style-type: none; + padding:0; + margin:0; + } } diff --git a/lms/static/sass/course/_staff_grading.scss b/lms/static/sass/course/_staff_grading.scss index 1aaca8f077..b387d753d1 100644 --- a/lms/static/sass/course/_staff_grading.scss +++ b/lms/static/sass/course/_staff_grading.scss @@ -2,19 +2,48 @@ div.staff-grading, div.peer-grading{ textarea.feedback-area { height: 75px; - margin: 20px; + margin: 0px; + } + + ul.rubric-list{ + list-style-type: none; + padding:0; + margin:0; + li { + &.rubric-list-item{ + margin-bottom: 0px; + padding: 0px; + } + } + } + + h1 { + margin : 0 0 0 10px; + } + + h2{ + a + { + text-size: .5em; + } } div { - margin: 10px; + margin: 0px; + &.submission-container{ + overflow-y: auto; + height: 150px; + background: #F6F6F6; + border: 1px solid #ddd; + @include clearfix; + } } label { - margin: 10px; - padding: 5px; - @include inline-block; + margin: 0px; + padding: 2px; min-width: 50px; - background-color: #CCC; + background-color: white; text-size: 1.5em; } @@ -36,11 +65,11 @@ div.peer-grading{ width:100%; th { - padding: 10px; + padding: 2px; } td { - padding:10px; + padding:2px; } td.problem-name { @@ -59,71 +88,61 @@ div.peer-grading{ .calibration-feedback-wrapper, .grading-container { - border: 1px solid gray; - padding: 15px; + padding: 2px; } .error-container { background-color: #FFCCCC; - padding: 15px; + padding: 2px; margin-left: 0px; } .submission-wrapper { h3 { - margin-bottom: 15px; + margin-bottom: 2px; } p { - margin-left:10px; + margin-left:2px; } - padding: 15px; + padding: 2px; + padding-bottom: 15px; } .meta-info-wrapper { background-color: #eee; - padding:15px; - h3 + padding:2px; + div { - font-size:1em; - } - ul - { - list-style-type: none; - font-size: .85em; - li - { - margin: 5px 0px; - } + display : inline; } } .message-container, .grading-message { background-color: $yellow; - padding: 10px; + padding: 2px; margin-left:0px; } .breadcrumbs { - margin-top:20px; + margin-top:2px; margin-left:0px; - margin-bottom:5px; + margin-bottom:2px; font-size: .8em; } .instructions-panel { - margin-right:20px; + margin-right:2px; > div { padding: 2px; - margin: 0px; + margin-bottom: 5px; background: #eee; - height: 10em; width:47.6%; h3 { @@ -161,8 +180,8 @@ div.peer-grading{ margin-left: 0px; header { - margin-top:20px; - margin-bottom:20px; + margin-top:2px; + margin-bottom:2px; font-size: 1.2em; } } @@ -175,5 +194,7 @@ div.peer-grading{ margin-top: 20px; } } - padding: 40px; + padding: 15px; + border: none; } + diff --git a/lms/static/sass/multicourse/_courses.scss b/lms/static/sass/multicourse/_courses.scss index 7e5bd4ff38..45ecfcd23f 100644 --- a/lms/static/sass/multicourse/_courses.scss +++ b/lms/static/sass/multicourse/_courses.scss @@ -5,6 +5,7 @@ header.search { background: rgb(240,240,240); background-size: cover; + background-position: center top !important; border-bottom: 1px solid rgb(100,100,100); @include box-shadow(inset 0 -1px 8px 0 rgba(0,0,0, 0.2), inset 0 1px 12px 0 rgba(0,0,0, 0.3)); height: 430px; diff --git a/lms/static/sass/multicourse/_home.scss b/lms/static/sass/multicourse/_home.scss index 669bd889b0..b5546aa470 100644 --- a/lms/static/sass/multicourse/_home.scss +++ b/lms/static/sass/multicourse/_home.scss @@ -1,6 +1,11 @@ .home { padding: 0px; + > .container { + @include box-sizing(border-box); + width: flex-grid(12); + } + > header { background: rgb(255,255,255); @include background-image(url('/static/images/homepage-bg.jpg')); @@ -175,9 +180,6 @@ } .university-partners { - @include background-image(linear-gradient(180deg, rgba(245,245,245, 0) 0%, - rgba(245,245,245, 1) 50%, - rgba(245,245,245, 0) 100%)); border-bottom: 1px solid rgb(210,210,210); margin-bottom: 0px; overflow: hidden; @@ -300,7 +302,6 @@ } img { - max-width: 190px; position: relative; @include transition(all, 0.25s, ease-in-out); vertical-align: middle; @@ -324,6 +325,44 @@ } } } + + &.university-partners2x6 { + @include box-sizing(border-box); + width: flex-grid(12, 12); + + .partners { + @include box-sizing(border-box); + @include clearfix(); + margin-left: 60px; + padding: 12px 0; + + .partner { + @include box-sizing(border-box); + width: flex-grid(2, 12); + display: block; + float: left; + padding: 0 12px; + + a { + + img { + width: 100%; + height: auto; + } + + .name > span { + font-size: 1.0em; + } + + &:hover { + .name { + bottom: 14px; + } + } + } + } + } + } } .more-info { diff --git a/lms/templates/combined_open_ended.html b/lms/templates/combined_open_ended.html index 4599feaa3b..700e171ace 100644 --- a/lms/templates/combined_open_ended.html +++ b/lms/templates/combined_open_ended.html @@ -1,24 +1,26 @@
      - -

      ${display_name}

      - ${status | n} + ${status|n}
      +

      ${display_name}

      -

      Problem

      +

      Prompt (Hide)

      % for item in items: -
      ${item['content'] | n}
      +
      ${item['content'] | n}
      % endfor
      -
      - +
      +
      +
      +
      +
      diff --git a/lms/templates/combined_open_ended_legend.html b/lms/templates/combined_open_ended_legend.html new file mode 100644 index 0000000000..e3e2494670 --- /dev/null +++ b/lms/templates/combined_open_ended_legend.html @@ -0,0 +1,13 @@ +
      +
      + Legend +
      + % for i in xrange(0,len(legend_list)): + <%legend_title=legend_list[i]['name'] %> + <%legend_image=legend_list[i]['image'] %> + +
      + ${legend_title}= +
      + % endfor +
      diff --git a/lms/templates/combined_open_ended_results.html b/lms/templates/combined_open_ended_results.html index 65732cdbaa..0a03737b8f 100644 --- a/lms/templates/combined_open_ended_results.html +++ b/lms/templates/combined_open_ended_results.html @@ -1,4 +1,4 @@ -
      -

      Results from Step ${task_number}

      +
      +

      ${task_name}

      ${results | n}
      diff --git a/lms/templates/combined_open_ended_status.html b/lms/templates/combined_open_ended_status.html index 1640ae3311..d13077737f 100644 --- a/lms/templates/combined_open_ended_status.html +++ b/lms/templates/combined_open_ended_status.html @@ -1,34 +1,23 @@ -%if status_list[0]['state'] != 'initial': -

      Status

      -%endif diff --git a/lms/templates/feed.rss b/lms/templates/feed.rss index 8048c86cbd..a5b40ca11c 100644 --- a/lms/templates/feed.rss +++ b/lms/templates/feed.rss @@ -6,7 +6,16 @@ ## EdX Blog - 2013-01-30T14:00:12-07:00 + 2013-02-20T14:00:12-07:00 + + tag:www.edx.org,2012:Post/13 + 2013-02-20T10:00:00-07:00 + 2013-02-20T10:00:00-07:00 + + edX Expands Internationally and Doubles its Institutional Membership with the Addition of Six New Schools + <img src="${static.url('images/press/releases/edx-logo_240x180.png')}" /> + <p></p> + tag:www.edx.org,2012:Post/13 2013-01-30T10:00:00-07:00 @@ -16,15 +25,6 @@ <img src="${static.url('images/press/releases/eric-lander_240x180.jpg')}" /> <p></p> - - tag:www.edx.org,2012:Post/12 - 2013-01-29T10:00:00-07:00 - 2013-01-29T10:00:00-07:00 - - City of Boston and edX partner to establish BostonX to improve educational access for residents - <img src="${static.url('images/press/releases/edx-logo_240x180.png')}" /> - <p></p> - tag:www.edx.org,2012:Post/11 2013-01-22T10:00:00-07:00 @@ -34,6 +34,15 @@ <img src="${static.url('images/press/releases/dr-lewin-316_240x180.jpg')}" /> <p></p> + + tag:www.edx.org,2012:Post/12 + 2013-01-29T10:00:00-07:00 + 2013-01-29T10:00:00-07:00 + + City of Boston and edX partner to establish BostonX to improve educational access for residents + <img src="${static.url('images/press/releases/edx-logo_240x180.png')}" /> + <p></p> + diff --git a/lms/templates/index.html b/lms/templates/index.html index d08ba09e61..3e6d22cc91 100644 --- a/lms/templates/index.html +++ b/lms/templates/index.html @@ -47,8 +47,8 @@

      Explore free courses from edX universities

      -
      -
        +
        +
        1. @@ -65,7 +65,7 @@
      -
    • +
    • @@ -73,11 +73,6 @@
    • - - -
      - -
      1. @@ -86,6 +81,27 @@
      +
    • + + +
      + McGillX +
      +
      +
    • +
    • + + +
      + ANUx +
      +
      +
    • + + +
      + +
      1. @@ -94,7 +110,7 @@
    -
  • +
  • @@ -102,6 +118,38 @@
  • +
  • + + +
    + TorontoX +
    +
    +
  • +
  • + + +
    + EPFLx +
    +
    +
  • +
  • + + +
    + DelftX +
    +
    +
  • +
  • + + +
    + RiceX +
    +
    +
  • diff --git a/lms/templates/instructor/staff_grading.html b/lms/templates/instructor/staff_grading.html index dcfece34b8..1c5f7364ad 100644 --- a/lms/templates/instructor/staff_grading.html +++ b/lms/templates/instructor/staff_grading.html @@ -42,15 +42,13 @@

    -

    Problem Information

    -

    Maching Learning Information

    -

    Question

    +

    Prompt (Hide)

    @@ -62,11 +60,10 @@
    -

    Grading

    -

    Student Submission

    +

    Student Response

    @@ -78,6 +75,9 @@

    Written Feedback

    +

    + Flag as inappropriate content for later review +

    diff --git a/lms/templates/open_ended.html b/lms/templates/open_ended.html index 5697a5ab8b..64defedda4 100644 --- a/lms/templates/open_ended.html +++ b/lms/templates/open_ended.html @@ -3,7 +3,7 @@
    ${prompt|n}
    -

    Answer

    +

    Response

    diff --git a/lms/templates/open_ended_combined_rubric.html b/lms/templates/open_ended_combined_rubric.html new file mode 100644 index 0000000000..61393cdc95 --- /dev/null +++ b/lms/templates/open_ended_combined_rubric.html @@ -0,0 +1,28 @@ +
    + % for i in range(len(categories)): + <% category = categories[i] %> + ${category['description']}
    +
      + % for j in range(len(category['options'])): + <% option = category['options'][j] %> +
    • +
      + %for grader_type in category['options'][j]['grader_types']: + % if grader_type in grader_type_image_dict: + <% grader_image = grader_type_image_dict[grader_type] %> + % if grader_type in human_grader_types: + <% human_title = human_grader_types[grader_type] %> + % else: + <% human_title = grader_type %> + % endif + + % endif + %endfor + ${option['points']} points : ${option['text']} +
      +
    • + % endfor +
    + % endfor +
    + diff --git a/lms/templates/open_ended_feedback.html b/lms/templates/open_ended_feedback.html index 7fffddb88f..e16aea0b53 100644 --- a/lms/templates/open_ended_feedback.html +++ b/lms/templates/open_ended_feedback.html @@ -1,17 +1,10 @@
    -
    Feedback
    -
    -
    -

    Score: ${score}

    - % if grader_type == "ML": -

    Check below for full feedback:

    - % endif -
    -
    -
    -
    - ${ feedback | n} -
    +
    ${rubric_feedback | n} + % if grader_type=="PE": +
    + ${ feedback | n} +
    + % endif
    diff --git a/lms/templates/open_ended_problems/combined_notifications.html b/lms/templates/open_ended_problems/combined_notifications.html index 9de6ef3273..deb66b6064 100644 --- a/lms/templates/open_ended_problems/combined_notifications.html +++ b/lms/templates/open_ended_problems/combined_notifications.html @@ -33,7 +33,7 @@
    ${notification['name']}
    %if notification['alert']: -

    ${notification['alert_message']}

    +

    ${notification['alert_message']}

    %endif

    ${notification['description']}

    diff --git a/lms/templates/open_ended_result_table.html b/lms/templates/open_ended_result_table.html new file mode 100644 index 0000000000..24bf7a76fe --- /dev/null +++ b/lms/templates/open_ended_result_table.html @@ -0,0 +1,58 @@ +% for co in context_list: + % if co['grader_type'] in grader_type_image_dict: + <%grader_type=co['grader_type']%> + <% grader_image = grader_type_image_dict[grader_type] %> + % if grader_type in human_grader_types: + <% human_title = human_grader_types[grader_type] %> + % else: + <% human_title = grader_type %> + % endif +
    +
    + +
    +
    + ${co['rubric_html']} +
    +
    + %if len(co['feedback'])>2: +
    +
    + See full feedback +
    + +
    + %endif +
    + %if grader_type!="SA": +
    + + +
    +
    + Respond to Feedback +
    +
    +

    How accurate do you find this feedback?

    +
    +
      +
    • +
    • +
    • +
    • +
    • +
    +
    +

    Additional comments:

    + + +
    +
    +
    + %endif +
    +
    + %endif +%endfor \ No newline at end of file diff --git a/lms/templates/open_ended_rubric.html b/lms/templates/open_ended_rubric.html index eb3fc564b4..2cbab3ab3b 100644 --- a/lms/templates/open_ended_rubric.html +++ b/lms/templates/open_ended_rubric.html @@ -1,44 +1,25 @@ -
    +

    Rubric

    - % if view_only and has_score: -

    This is the rubric that was used to grade your submission. The highlighted selection matches how the grader feels you performed in each category.

    - % elif view_only: -

    Use the below rubric to rate this submission.

    - % else:

    Select the criteria you feel best represents this submission in each category.

    - % endif - - - - % for i in range(max_score + 1): - - % endfor - +
    % for i in range(len(categories)): - <% category = categories[i] %> -
    - - % for j in range(len(category['options'])): + <% category = categories[i] %> + ${category['description']}
    +
      + % for j in range(len(category['options'])): <% option = category['options'][j] %> - %if option['selected']: -
    - % endfor - + %if option['selected']: +
  • + %else: +
  • + % endif + +
  • + % endfor + % endfor -
    - ${i} points -
    ${category['description']} - %else: - - % endif - % if view_only: - ## if this is the selected rubric block, show it highlighted -
    - ${option['text']} -
    - % else: - - - % endif -
    +
    diff --git a/lms/templates/open_ended_view_only_rubric.html b/lms/templates/open_ended_view_only_rubric.html new file mode 100644 index 0000000000..7cd9370c47 --- /dev/null +++ b/lms/templates/open_ended_view_only_rubric.html @@ -0,0 +1,12 @@ +
    + % for i in range(len(categories)): + <% category = categories[i] %> + % for j in range(len(category['options'])): + <% option = category['options'][j] %> + % if option['selected']: + ${category['description']} : ${option['points']} | + % endif + % endfor + % endfor +
    + diff --git a/lms/templates/peer_grading/peer_grading.html b/lms/templates/peer_grading/peer_grading.html index d309b4486c..0485b698b2 100644 --- a/lms/templates/peer_grading/peer_grading.html +++ b/lms/templates/peer_grading/peer_grading.html @@ -14,6 +14,7 @@ + @@ -22,7 +23,18 @@ %for problem in problem_list: +
    Problem NameDue date Graded Available Required
    - ${problem['problem_name']} + %if problem['closed']: + ${problem['problem_name']} + %else: + ${problem['problem_name']} + %endif + + % if problem['due']: + ${problem['due']} + % else: + No due date + % endif ${problem['num_graded']} diff --git a/lms/templates/peer_grading/peer_grading_closed.html b/lms/templates/peer_grading/peer_grading_closed.html new file mode 100644 index 0000000000..712ad8b380 --- /dev/null +++ b/lms/templates/peer_grading/peer_grading_closed.html @@ -0,0 +1,10 @@ +
    +

    Peer Grading

    +

    The due date has passed, and + % if use_for_single_location: + peer grading for this problem is closed at this time. + %else: + peer grading is closed at this time. + %endif +

    +
    diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 5963691700..853fa750e8 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -3,30 +3,17 @@
    -

    Peer Grading

    Learning to Grade

    -
    -

    Before you can do any proper peer grading, you first need to understand how your own grading compares to that of the instrutor. Once your grades begin to match the instructor's, you will move on to grading your peers!

    -
    -
    -

    You have successfully managed to calibrate your answers to that of the instructors and have moved onto the next step in the peer grading process.

    -
    -

    Grading

    -
    -

    You cannot start grading until you have graded a sufficient number of training problems and have been able to demonstrate that your scores closely match that of the instructor.

    -
    -
    -

    Now that you have finished your training, you are now allowed to grade your peers. Please keep in mind that students are allowed to respond to the grades and feedback they receive.

    -
    +

    Peer Grading

    -

    Question

    +

    Prompt (Hide)

    @@ -34,11 +21,11 @@
    -
    +
    -

    Grading

    +

    Student Response

    @@ -57,6 +44,7 @@

    Flag this submission for review by course staff (use if the submission contains inappropriate content):

    +

    I do not know how to grade this question:

    @@ -70,7 +58,6 @@
    -

    How did I do?

    @@ -81,11 +68,20 @@
    -

    Congratulations!

    -

    You have now completed the calibration step. You are now ready to start grading.

    +

    Ready to grade!

    +

    You have finished learning to grade, which means that you are now ready to start grading.

    + +
    +

    Learning to grade

    +

    You have not yet finished learning to grade this problem.

    +

    You will now be shown a series of instructor-scored essays, and will be asked to score them yourself.

    +

    Once you can score the essays similarly to an instructor, you will be ready to grade your peers.

    + +
    +
    diff --git a/lms/templates/self_assessment_hint.html b/lms/templates/self_assessment_hint.html index 1adfc69e39..8c6eacba11 100644 --- a/lms/templates/self_assessment_hint.html +++ b/lms/templates/self_assessment_hint.html @@ -1,6 +1,6 @@
    - ${hint_prompt} + Please enter a hint below:
    diff --git a/lms/templates/self_assessment_prompt.html b/lms/templates/self_assessment_prompt.html index 364009b134..5347e23844 100644 --- a/lms/templates/self_assessment_prompt.html +++ b/lms/templates/self_assessment_prompt.html @@ -5,7 +5,7 @@ ${prompt}
    -

    Answer

    +

    Response

    @@ -14,9 +14,9 @@
    ${initial_rubric}
    -
    ${initial_hint}
    +
    -
    ${initial_message}
    +
    diff --git a/lms/templates/static_templates/press_releases/edx_expands_internationally.html b/lms/templates/static_templates/press_releases/edx_expands_internationally.html new file mode 100644 index 0000000000..0ee42dafa9 --- /dev/null +++ b/lms/templates/static_templates/press_releases/edx_expands_internationally.html @@ -0,0 +1,81 @@ +<%! from django.core.urlresolvers import reverse %> +<%inherit file="../../main.html" /> + +<%namespace name='static' file='../../static_content.html'/> + +<%block name="title">edX Expands Internationally and Doubles its Institutional Membership with the Addition of Six New Schools +
    + + +
    +
    +

    edX Expands Internationally and Doubles its Institutional Membership with the Addition of Six New Schools

    +
    +
    +

    edX welcomes The Australian National University, Delft University of Technology, École Polytechnique Fédérale de Lausanne, McGill University, Rice University and University of Toronto to its X University Consortium of the world’s leading higher education institutions

    + +

    CAMBRIDGE, MA – Feb. 20, 2013 – +EdX, the not-for-profit online learning enterprise founded by Harvard University and the Massachusetts Institute of Technology (MIT), announced today the international expansion of its X University Consortium with the addition of six new global higher education institutions. The Australian National University (ANU), Delft University of Technology in the Netherlands, École Polytechnique Fédérale de Lausanne (EPFL) in Switzerland, McGill University and the University of Toronto in Canada, and Rice University in the United States are joining the Consortium and will use the edX platform to deliver the next generation of online and blended courses. This international expansion enables edX to better achieve its mission of providing world-class courses to everyone, everywhere, and is the natural next step to continue serving the large international student body already using edX on a daily basis. +

    + +

    While MOOCs, or massive open online courses, have typically focused on offering a variety of online courses inexpensively or for free, edX's vision is much larger. EdX is building an open source educational platform and a network of the world's top universities to improve education both online and on campus while conducting research on how students learn. To date, edX has more than 700,000 individuals on its platform, who account for more than 900,000 course enrollments. The addition of these new higher education institutions stretching from North America to Europe to the Asia Pacific will double the number of X University Consortium members and add a rich variety of new courses to edX’s offerings: +

    + +
      +
    • The Australian National University, a celebrated place of intensive research, education and policy engagement, will provide a series of ANUx courses to the open source platform including Astrophysics taught by Nobel Laureate and Professor of Astrophysics Brian Schmidt and his colleague Dr. Paul Francis, and Engaging India, taught by Dr. McComas Taylor and Dr. Peter Friedlander.
    • + +
    • Delft University of Technology, the largest and oldest technological university in the Netherlands, will provide a series of DelftX courses under Creative Commons license, including Introduction to Aerospace Engineering by Professor Jacco Hoekstra, Solar Energy by Dr. Arno Smets, and Water Treatment Engineering by Professor Jules van Lier.
    • + +
    • École Polytechnique Fédérale de Lausanne, one of the most famous institutions of science and technology in Europe, will provide a series of EPFLx courses specially tailored to fit the edX format, originating from its five schools -- Engineering, Life Sciences, Informatics and Communication, Architecture and Basic Sciences.
    • + +
    • McGill University, one of Canada's best-known institutions of higher learning and one of the leading universities in the world, will provide a series of McGillX courses in areas ranging from science and the humanities to public policy issues.
    • + +
    • Rice University, in Houston, Texas, is consistently ranked among the nation's top 20 universities by U.S. News & World Report. Rice has highly respected schools of Architecture, Business, Continuing Studies, Engineering, Humanities, Music, Natural Sciences and Social Sciences and is home to the Baker Institute for Public Policy. Rice's Smalley Institute for Nanoscale Science and Technology was the world’s first nanotechnology center when it opened in 1991. Rice will initially provide four RiceX courses and investigate ways to integrate its learning analytics tools from OpenStax Tutor to enable students and instructors to track their progress in real time.
    • + +
    • University of Toronto, one of the most respected and influential institutions of higher education and advanced research in the world, will provide a series of TorontoX courses including Terrestrial Energy System by Professor Bryan Kanrey, Behavioral Economics by Professor Dilip Soman, The Logic of Business: Building Blocks for Organizational Design by Professor Mihnea Moldoveanu, and Bioinformatic Methods by Professor Nicholas Provart.
    • +
    + +

    “We have had an international student community from the very beginning, and bringing these leading universities, from North America and Europe and the Asia Pacific into the edX organization will help us meet the tremendous demand we are experiencing,” said Anant Agarwal, President of edX. “Each of these schools was carefully selected for the distinct expertise they bring to our growing family of edX institutions. We remain committed to growing edX to meet the needs of the world while maintaining a superior learning experience for all.”

    + +

    Courses offered by institutions on the edX platform provide the same rigor as on-campus classes but are designed to take advantage of the unique features and benefits of online learning environments, including game-like experiences, instant feedback and cutting-edge virtual laboratories. Through edX, the new X Universities will provide interactive education experiences for students around the world. All that is required of edX students is access to the Internet and a desire to learn. By breaking down the barriers of location and cost and enabling the global exchange of information and ideas, edX is changing the foundations of both teaching and learning.

    + +

    The new member institutions will join founding universities MIT and Harvard, as well as the University of California, Berkeley, the University of Texas System, Wellesley College and Georgetown University in the X University Consortium. ANUx, DelftX, EPFLx, McGillX, RiceX and TorontoX will offer courses on edX beginning in late 2013. All of the courses will be hosted on edX’s open source platform at www.edx.org. +

    + +

    About edX

    + +

    EdX is a not-for-profit enterprise of its founding partners Harvard University and the Massachusetts Institute of Technology focused on transforming online and on-campus learning through groundbreaking methodologies, game-like experiences and cutting-edge research. EdX provides inspirational and transformative knowledge to students of all ages, social status, and income who form worldwide communities of learners. EdX uses its open source technology to transcend physical and social borders. We’re focused on people, not profit. EdX is based in Cambridge, Massachusetts in the USA.

    + + +
    +

    Media Contact:

    +

    Dan O'Connell

    +

    oconnell@edx.org

    +

    (617) 480-6585

    +
    + + +
    +
    +
    diff --git a/lms/templates/static_templates/press_releases/template.html b/lms/templates/static_templates/press_releases/template.html index bf2ba9bc6f..52eebf49f5 100644 --- a/lms/templates/static_templates/press_releases/template.html +++ b/lms/templates/static_templates/press_releases/template.html @@ -33,10 +33,10 @@ Text

    -

    Contact:

    -

    Brad Baker, Weber Shandwick for edX

    -

    BBaker@webershandwick.com

    -

    (617) 520-7043

    +

    Media Contact:

    +

    Dan O'Connell

    +

    oconnell@edx.org

    +

    (617) 480-6585