diff --git a/common/lib/xmodule/xmodule/combined_open_ended_module.py b/common/lib/xmodule/xmodule/combined_open_ended_module.py index eaea908f3a..ee69d925d0 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_module.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_module.py @@ -108,11 +108,13 @@ class CombinedOpenEndedModule(XModule): instance_state = {} self.version = self.metadata.get('version', DEFAULT_VERSION) + version_error_string = "Version of combined open ended module {0} is not correct. Going with version {1}" if not isinstance(self.version, basestring): try: self.version = str(self.version) except: - log.error("Version {0} is not correct. Going with version {1}".format(self.version, DEFAULT_VERSION)) + #This is a dev_facing_error + log.info(version_error_string.format(self.version, DEFAULT_VERSION)) self.version = DEFAULT_VERSION versions = [i[0] for i in VERSION_TUPLES] @@ -122,7 +124,8 @@ class CombinedOpenEndedModule(XModule): try: version_index = versions.index(self.version) except: - log.error("Version {0} is not correct. Going with version {1}".format(self.version, DEFAULT_VERSION)) + #This is a dev_facing_error + log.error(version_error_string.format(self.version, DEFAULT_VERSION)) self.version = DEFAULT_VERSION version_index = versions.index(self.version) diff --git a/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee b/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee index fd0391450b..39c91d8c70 100644 --- a/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee +++ b/common/lib/xmodule/xmodule/js/src/combinedopenended/display.coffee @@ -89,6 +89,8 @@ class @CombinedOpenEnded @can_upload_files = false @open_ended_child= @$('.open-ended-child') + @out_of_sync_message = 'The problem state got out of sync. Try reloading the page.' + if @task_number>1 @prompt_hide() else if @task_number==1 and @child_state!='initial' @@ -293,7 +295,7 @@ class @CombinedOpenEnded $.ajaxWithPrefix("#{@ajax_url}/save_answer",settings) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) save_assessment: (event) => event.preventDefault() @@ -315,7 +317,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) save_hint: (event) => event.preventDefault() @@ -330,7 +332,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) skip_post_assessment: => if @child_state == 'post_assessment' @@ -342,7 +344,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) reset: (event) => event.preventDefault() @@ -362,7 +364,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) next_problem: => if @child_state == 'done' @@ -385,7 +387,7 @@ class @CombinedOpenEnded else @errors_area.html(response.error) else - @errors_area.html('Problem state got out of sync. Try reloading the page.') + @errors_area.html(@out_of_sync_message) gentle_alert: (msg) => if @el.find('.open-ended-alert').length diff --git a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee index 5770238649..63c58e1766 100644 --- a/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee +++ b/common/lib/xmodule/xmodule/js/src/peergrading/peer_grading_problem.coffee @@ -175,6 +175,7 @@ class @PeerGradingProblem @prompt_container = $('.prompt-container') @rubric_container = $('.rubric-container') @flag_student_container = $('.flag-student-container') + @answer_unknown_container = $('.answer-unknown-container') @calibration_panel = $('.calibration-panel') @grading_panel = $('.grading-panel') @content_panel = $('.content-panel') @@ -208,6 +209,7 @@ class @PeerGradingProblem @interstitial_page_button = $('.interstitial-page-button') @calibration_interstitial_page_button = $('.calibration-interstitial-page-button') @flag_student_checkbox = $('.flag-checkbox') + @answer_unknown_checkbox = $('.answer-unknown-checkbox') @collapse_question() Collapsible.setCollapsibles(@content_panel) @@ -262,6 +264,7 @@ class @PeerGradingProblem submission_key: @submission_key_input.val() feedback: @feedback_area.val() submission_flagged: @flag_student_checkbox.is(':checked') + answer_unknown: @answer_unknown_checkbox.is(':checked') return data @@ -360,6 +363,8 @@ class @PeerGradingProblem @calibration_panel.find('.grading-text').hide() @grading_panel.find('.grading-text').hide() @flag_student_container.hide() + @answer_unknown_container.hide() + @feedback_area.val("") @submit_button.unbind('click') @@ -388,6 +393,7 @@ class @PeerGradingProblem @calibration_panel.find('.grading-text').show() @grading_panel.find('.grading-text').show() @flag_student_container.show() + @answer_unknown_container.show() @feedback_area.val("") @submit_button.unbind('click') diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py index 0e3bd86581..cc0a957e66 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py @@ -19,7 +19,7 @@ log = logging.getLogger("mitx.courseware") # Set the default number of max attempts. Should be 1 for production # Set higher for debugging/testing # attempts specified in xml definition overrides this. -MAX_ATTEMPTS = 10000 +MAX_ATTEMPTS = 1 # Set maximum available number of points. # Overriden by max_score specified in xml. @@ -149,6 +149,7 @@ class CombinedOpenEndedV1Module(): self.skip_basic_checks = self.metadata.get('skip_spelling_checks', SKIP_BASIC_CHECKS) display_due_date_string = self.metadata.get('due', None) + grace_period_string = self.metadata.get('graceperiod', None) try: self.timeinfo = TimeInfo(display_due_date_string, grace_period_string) @@ -645,7 +646,10 @@ class CombinedOpenEndedV1Module(): if self.attempts > self.max_attempts: return { 'success': False, - 'error': 'Too many attempts.' + #This is a student_facing_error + 'error': ('You have attempted this question {0} times. ' + 'You are only allowed to attempt it {1} times.').format( + self.attempts, self.max_attempts) } self.state = self.INITIAL self.allow_reset = False @@ -784,7 +788,8 @@ class CombinedOpenEndedV1Descriptor(XmlDescriptor, EditingDescriptor): expected_children = ['task', 'rubric', 'prompt'] for child in expected_children: if len(xml_object.xpath(child)) == 0: - raise ValueError("Combined Open Ended definition must include at least one '{0}' tag".format(child)) + #This is a staff_facing_error + raise ValueError("Combined Open Ended definition must include at least one '{0}' tag. Contact the learning sciences group for assistance.".format(child)) def parse_task(k): """Assumes that xml_object has child k""" @@ -809,4 +814,4 @@ class CombinedOpenEndedV1Descriptor(XmlDescriptor, EditingDescriptor): for child in ['task']: add_child(child) - return elt + return elt \ No newline at end of file diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py index 7c00c5f029..f756b2b853 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py @@ -4,7 +4,6 @@ from lxml import etree log = logging.getLogger(__name__) GRADER_TYPE_IMAGE_DICT = { - '8B' : '/static/images/random_grading_icon.png', 'SA' : '/static/images/self_assessment_icon.png', 'PE' : '/static/images/peer_grading_icon.png', 'ML' : '/static/images/ml_grading_icon.png', @@ -13,7 +12,6 @@ GRADER_TYPE_IMAGE_DICT = { } HUMAN_GRADER_TYPE = { - '8B' : 'Magic-8-Ball-Assessment', 'SA' : 'Self-Assessment', 'PE' : 'Peer-Assessment', 'IN' : 'Instructor-Assessment', @@ -71,8 +69,9 @@ class CombinedOpenEndedRubric(object): }) success = True except: - error_message = "[render_rubric] Could not parse the rubric with xml: {0}".format(rubric_xml) - log.error(error_message) + #This is a staff_facing_error + error_message = "[render_rubric] Could not parse the rubric with xml: {0}. Contact the learning sciences group for assistance.".format(rubric_xml) + log.exception(error_message) raise RubricParsingError(error_message) return {'success' : success, 'html' : html, 'rubric_scores' : rubric_scores} @@ -81,7 +80,8 @@ class CombinedOpenEndedRubric(object): success = rubric_dict['success'] rubric_feedback = rubric_dict['html'] if not success: - error_message = "Could not parse rubric : {0} for location {1}".format(rubric_string, location.url()) + #This is a staff_facing_error + error_message = "Could not parse rubric : {0} for location {1}. Contact the learning sciences group for assistance.".format(rubric_string, location.url()) log.error(error_message) raise RubricParsingError(error_message) @@ -90,13 +90,15 @@ class CombinedOpenEndedRubric(object): for category in rubric_categories: total = total + len(category['options']) - 1 if len(category['options']) > (max_score_allowed + 1): - error_message = "Number of score points in rubric {0} higher than the max allowed, which is {1}".format( + #This is a staff_facing_error + error_message = "Number of score points in rubric {0} higher than the max allowed, which is {1}. Contact the learning sciences group for assistance.".format( len(category['options']), max_score_allowed) log.error(error_message) raise RubricParsingError(error_message) if total != max_score: - error_msg = "The max score {0} for problem {1} does not match the total number of points in the rubric {2}".format( + #This is a staff_facing_error + error_msg = "The max score {0} for problem {1} does not match the total number of points in the rubric {2}. Contact the learning sciences group for assistance.".format( max_score, location, total) log.error(error_msg) raise RubricParsingError(error_msg) @@ -118,7 +120,8 @@ class CombinedOpenEndedRubric(object): categories = [] for category in element: if category.tag != 'category': - raise RubricParsingError("[extract_categories] Expected a tag: got {0} instead".format(category.tag)) + #This is a staff_facing_error + raise RubricParsingError("[extract_categories] Expected a tag: got {0} instead. Contact the learning sciences group for assistance.".format(category.tag)) else: categories.append(self.extract_category(category)) return categories @@ -144,12 +147,14 @@ class CombinedOpenEndedRubric(object): self.has_score = True # if we are missing the score tag and we are expecting one elif self.has_score: - raise RubricParsingError("[extract_category] Category {0} is missing a score".format(descriptionxml.text)) + #This is a staff_facing_error + raise RubricParsingError("[extract_category] Category {0} is missing a score. Contact the learning sciences group for assistance.".format(descriptionxml.text)) # parse description if descriptionxml.tag != 'description': - raise RubricParsingError("[extract_category]: expected description tag, got {0} instead".format(descriptionxml.tag)) + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: expected description tag, got {0} instead. Contact the learning sciences group for assistance.".format(descriptionxml.tag)) description = descriptionxml.text @@ -159,7 +164,8 @@ class CombinedOpenEndedRubric(object): # parse options for option in optionsxml: if option.tag != 'option': - raise RubricParsingError("[extract_category]: expected option tag, got {0} instead".format(option.tag)) + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: expected option tag, got {0} instead. Contact the learning sciences group for assistance.".format(option.tag)) else: pointstr = option.get("points") if pointstr: @@ -168,7 +174,8 @@ class CombinedOpenEndedRubric(object): try: points = int(pointstr) except ValueError: - raise RubricParsingError("[extract_category]: expected points to have int, got {0} instead".format(pointstr)) + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: expected points to have int, got {0} instead. Contact the learning sciences group for assistance.".format(pointstr)) elif autonumbering: # use the generated one if we're in the right mode points = cur_points @@ -200,7 +207,6 @@ class CombinedOpenEndedRubric(object): for grader_type in tuple[3]: rubric_categories[i]['options'][j]['grader_types'].append(grader_type) - log.debug(rubric_categories) html = self.system.render_template('open_ended_combined_rubric.html', {'categories': rubric_categories, 'has_score': True, @@ -219,13 +225,15 @@ class CombinedOpenEndedRubric(object): Validates a set of options. This can and should be extended to filter out other bad edge cases ''' if len(options) == 0: - raise RubricParsingError("[extract_category]: no options associated with this category") + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: no options associated with this category. Contact the learning sciences group for assistance.") if len(options) == 1: return prev = options[0]['points'] for option in options[1:]: if prev == option['points']: - raise RubricParsingError("[extract_category]: found duplicate point values between two different options") + #This is a staff_facing_error + raise RubricParsingError("[extract_category]: found duplicate point values between two different options. Contact the learning sciences group for assistance.") else: prev = option['points'] @@ -241,11 +249,14 @@ class CombinedOpenEndedRubric(object): """ success = False if len(scores)==0: - log.error("Score length is 0.") + #This is a dev_facing_error + log.error("Score length is 0 when trying to reformat rubric scores for rendering.") return success, "" if len(scores) != len(score_types) or len(feedback_types) != len(scores): - log.error("Length mismatches.") + #This is a dev_facing_error + log.error("Length mismatches when trying to reformat rubric scores for rendering. " + "Scores: {0}, Score Types: {1} Feedback Types: {2}".format(scores, score_types, feedback_types)) return success, "" score_lists = [] diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py index 6bd7a6fd0e..8a4caa1291 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py @@ -51,6 +51,8 @@ class GradingService(object): r = self._try_with_login(op) except (RequestException, ConnectionError, HTTPError) as err: # reraise as promised GradingServiceError, but preserve stacktrace. + #This is a dev_facing_error + log.error("Problem posting data to the grading controller. URL: {0}, data: {1}".format(url, data)) raise GradingServiceError, str(err), sys.exc_info()[2] return r.text @@ -67,6 +69,8 @@ class GradingService(object): r = self._try_with_login(op) except (RequestException, ConnectionError, HTTPError) as err: # reraise as promised GradingServiceError, but preserve stacktrace. + #This is a dev_facing_error + log.error("Problem getting data from the grading controller. URL: {0}, params: {1}".format(url, params)) raise GradingServiceError, str(err), sys.exc_info()[2] return r.text @@ -119,11 +123,13 @@ class GradingService(object): return response_json # if we can't parse the rubric into HTML, except etree.XMLSyntaxError, RubricParsingError: + #This is a dev_facing_error log.exception("Cannot parse rubric string. Raw string: {0}" .format(rubric)) return {'success': False, 'error': 'Error displaying submission'} except ValueError: + #This is a dev_facing_error log.exception("Error parsing response: {0}".format(response)) return {'success': False, 'error': "Error displaying submission"} diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_image_submission.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_image_submission.py index 88921c1429..edae69854f 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_image_submission.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_image_submission.py @@ -251,8 +251,9 @@ def upload_to_s3(file_to_upload, keyname, s3_interface): return True, public_url except: - error_message = "Could not connect to S3." - log.exception(error_message) + #This is a dev_facing_error + error_message = "Could not connect to S3 to upload peer grading image. Trying to utilize bucket: {0}".format(bucketname.lower()) + log.error(error_message) return False, error_message diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py index 95c631c8fd..96d75b366c 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py @@ -59,12 +59,14 @@ class OpenEndedModule(openendedchild.OpenEndedChild): self.submission_id = None self.grader_id = None + error_message = "No {0} found in problem xml for open ended problem. Contact the learning sciences group for assistance." if oeparam is None: - raise ValueError("No oeparam found in problem xml.") + #This is a staff_facing_error + raise ValueError(error_message.format('oeparam')) if self.prompt is None: - raise ValueError("No prompt found in problem xml.") + raise ValueError(error_message.format('prompt')) if self.rubric is None: - raise ValueError("No rubric found in problem xml.") + raise ValueError(error_message.format('rubric')) self._parse(oeparam, self.prompt, self.rubric, system) @@ -73,6 +75,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): self.send_to_grader(self.latest_answer(), system) self.created = False + def _parse(self, oeparam, prompt, rubric, system): ''' Parse OpenEndedResponse XML: @@ -98,7 +101,8 @@ class OpenEndedModule(openendedchild.OpenEndedChild): # __init__ adds it (easiest way to get problem location into # response types) except TypeError, ValueError: - log.exception("Grader payload %r is not a json object!", grader_payload) + #This is a dev_facing_error + log.exception("Grader payload from external open ended grading server is not a json object! Object: {0}".format(grader_payload)) self.initial_display = find_with_default(oeparam, 'initial_display', '') self.answer = find_with_default(oeparam, 'answer_display', 'No answer given.') @@ -141,17 +145,20 @@ class OpenEndedModule(openendedchild.OpenEndedChild): survey_responses = event_info['survey_responses'] for tag in ['feedback', 'submission_id', 'grader_id', 'score']: if tag not in survey_responses: - return {'success': False, 'msg': "Could not find needed tag {0}".format(tag)} + #This is a student_facing_error + return {'success': False, 'msg': "Could not find needed tag {0} in the survey responses. Please try submitting again.".format(tag)} try: submission_id = int(survey_responses['submission_id']) grader_id = int(survey_responses['grader_id']) feedback = str(survey_responses['feedback'].encode('ascii', 'ignore')) score = int(survey_responses['score']) except: + #This is a dev_facing_error error_message = ("Could not parse submission id, grader id, " "or feedback from message_post ajax call. Here is the message data: {0}".format( survey_responses)) log.exception(error_message) + #This is a student_facing_error return {'success': False, 'msg': "There was an error saving your feedback. Please contact course staff."} qinterface = system.xqueue['interface'] @@ -188,6 +195,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): self.state = self.DONE + #This is a student_facing_message return {'success': success, 'msg': "Successfully submitted your feedback."} def send_to_grader(self, submission, system): @@ -337,18 +345,22 @@ class OpenEndedModule(openendedchild.OpenEndedChild): for tag in ['success', 'feedback', 'submission_id', 'grader_id']: if tag not in response_items: - return format_feedback('errors', 'Error getting feedback') + #This is a student_facing_error + return format_feedback('errors', 'Error getting feedback from grader.') feedback_items = response_items['feedback'] try: feedback = json.loads(feedback_items) except (TypeError, ValueError): - log.exception("feedback_items have invalid json %r", feedback_items) - return format_feedback('errors', 'Could not parse feedback') + #This is a dev_facing_error + log.exception("feedback_items from external open ended grader have invalid json {0}".format(feedback_items)) + #This is a student_facing_error + return format_feedback('errors', 'Error getting feedback from grader.') if response_items['success']: if len(feedback) == 0: - return format_feedback('errors', 'No feedback available') + #This is a student_facing_error + return format_feedback('errors', 'No feedback available from grader.') for tag in do_not_render: if tag in feedback: @@ -357,6 +369,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): feedback_lst = sorted(feedback.items(), key=get_priority) feedback_list_part1 = u"\n".join(format_feedback(k, v) for k, v in feedback_lst) else: + #This is a student_facing_error feedback_list_part1 = format_feedback('errors', response_items['feedback']) feedback_list_part2 = (u"\n".join([format_feedback_hidden(feedback_type, value) @@ -432,14 +445,16 @@ class OpenEndedModule(openendedchild.OpenEndedChild): try: score_result = json.loads(score_msg) except (TypeError, ValueError): - error_message = ("External grader message should be a JSON-serialized dict." + #This is a dev_facing_error + error_message = ("External open ended grader message should be a JSON-serialized dict." " Received score_msg = {0}".format(score_msg)) log.error(error_message) fail['feedback'] = error_message return fail if not isinstance(score_result, dict): - error_message = ("External grader message should be a JSON-serialized dict." + #This is a dev_facing_error + error_message = ("External open ended grader message should be a JSON-serialized dict." " Received score_result = {0}".format(score_result)) log.error(error_message) fail['feedback'] = error_message @@ -447,7 +462,8 @@ class OpenEndedModule(openendedchild.OpenEndedChild): for tag in ['score', 'feedback', 'grader_type', 'success', 'grader_id', 'submission_id']: if tag not in score_result: - error_message = ("External grader message is missing required tag: {0}" + #This is a dev_facing_error + error_message = ("External open ended grader message is missing required tag: {0}" .format(tag)) log.error(error_message) fail['feedback'] = error_message @@ -564,7 +580,10 @@ class OpenEndedModule(openendedchild.OpenEndedChild): } if dispatch not in handlers: - return 'Error' + #This is a dev_facing_error + log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) + #This is a dev_facing_error + return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False}) before = self.get_progress() d = handlers[dispatch](get, system) @@ -605,15 +624,21 @@ class OpenEndedModule(openendedchild.OpenEndedChild): success, get = self.append_image_to_student_answer(get) error_message = "" if success: - get['student_answer'] = OpenEndedModule.sanitize_html(get['student_answer']) - self.new_history_entry(get['student_answer']) - self.send_to_grader(get['student_answer'], system) - self.change_state(self.ASSESSING) + success, allowed_to_submit, error_message = self.check_if_student_can_submit() + if allowed_to_submit: + get['student_answer'] = OpenEndedModule.sanitize_html(get['student_answer']) + self.new_history_entry(get['student_answer']) + self.send_to_grader(get['student_answer'], system) + self.change_state(self.ASSESSING) + else: + #Error message already defined + success = False else: + #This is a student_facing_error error_message = "There was a problem saving the image in your submission. Please try a different image, or try pasting a link to an image into the answer box." return { - 'success': True, + 'success': success, 'error': error_message, 'student_response': get['student_answer'] } @@ -690,7 +715,8 @@ class OpenEndedDescriptor(XmlDescriptor, EditingDescriptor): """ for child in ['openendedparam']: if len(xml_object.xpath(child)) != 1: - raise ValueError("Open Ended definition must include exactly one '{0}' tag".format(child)) + #This is a staff_facing_error + raise ValueError("Open Ended definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format(child)) def parse(k): """Assumes that xml_object has child k""" diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py index 1700dcaa07..dd8fa2a54e 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py @@ -22,6 +22,7 @@ from xmodule.stringify import stringify_children from xmodule.xml_module import XmlDescriptor from xmodule.modulestore import Location from capa.util import * +from peer_grading_service import PeerGradingService from datetime import datetime @@ -104,7 +105,9 @@ class OpenEndedChild(object): # Used for progress / grading. Currently get credit just for # completion (doesn't matter if you self-assessed correct/incorrect). self._max_score = static_data['max_score'] + self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system) + self.system = system self.setup_response(system, location, definition, descriptor) def setup_response(self, system, location, definition, descriptor): @@ -127,12 +130,14 @@ class OpenEndedChild(object): if self.closed(): return True, { 'success': False, - 'error': 'This problem is now closed.' + #This is a student_facing_error + 'error': 'The problem close date has passed, and this problem is now closed.' } elif self.attempts > self.max_attempts: return True, { 'success': False, - 'error': 'Too many attempts.' + #This is a student_facing_error + 'error': 'You have attempted this problem {0} times. You are allowed {1} attempts.'.format(self.attempts, self.max_attempts) } else: return False, {} @@ -251,7 +256,8 @@ class OpenEndedChild(object): try: return Progress(self.get_score()['score'], self._max_score) except Exception as err: - log.exception("Got bad progress") + #This is a dev_facing_error + log.exception("Got bad progress from open ended child module. Max Score: {1}".format(self._max_score)) return None return None @@ -259,10 +265,12 @@ class OpenEndedChild(object): """ return dict out-of-sync error message, and also log. """ - log.warning("Assessment module state out sync. state: %r, get: %r. %s", + #This is a dev_facing_error + log.warning("Open ended child state out sync. state: %r, get: %r. %s", self.state, get, msg) + #This is a student_facing_error return {'success': False, - 'error': 'The problem state got out-of-sync'} + 'error': 'The problem state got out-of-sync. Please try reloading the page.'} def get_html(self): """ @@ -408,3 +416,33 @@ class OpenEndedChild(object): success = True return success, string + + def check_if_student_can_submit(self): + location = self.system.location.url() + student_id = self.system.anonymous_student_id + success = False + allowed_to_submit = True + response = {} + #This is a student_facing_error + error_string = ("You need to peer grade {0} more in order to make another submission. " + "You have graded {1}, and {2} are required. You have made {3} successful peer grading submissions.") + try: + response = self.peer_gs.get_data_for_location(location, student_id) + count_graded = response['count_graded'] + count_required = response['count_required'] + student_sub_count = response['student_sub_count'] + success = True + except: + #This is a dev_facing_error + log.error("Could not contact external open ended graders for location {0} and student {1}".format(location,student_id)) + #This is a student_facing_error + error_message = "Could not contact the graders. Please notify course staff." + return success, allowed_to_submit, error_message + if count_graded>=count_required: + return success, allowed_to_submit, "" + else: + allowed_to_submit = False + #This is a student_facing_error + error_message = error_string.format(count_required-count_graded, count_graded, count_required, student_sub_count) + return success, allowed_to_submit, error_message + diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py index be1ff5bef6..42c54f0463 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py @@ -30,8 +30,8 @@ class PeerGradingService(GradingService): self.system = system def get_data_for_location(self, problem_location, student_id): - response = self.get(self.get_data_for_location_url, - {'location': problem_location, 'student_id': student_id}) + params = {'location': problem_location, 'student_id': student_id} + response = self.get(self.get_data_for_location_url, params) return self.try_to_decode(response) def get_next_submission(self, problem_location, grader_id): diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py index c608eeea06..7ecb3c4d5e 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py @@ -90,7 +90,10 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): } if dispatch not in handlers: - return 'Error' + #This is a dev_facing_error + log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) + #This is a dev_facing_error + return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False}) before = self.get_progress() d = handlers[dispatch](get, system) @@ -123,7 +126,8 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): elif self.state in (self.POST_ASSESSMENT, self.DONE): context['read_only'] = True else: - raise ValueError("Illegal state '%r'" % self.state) + #This is a dev_facing_error + raise ValueError("Self assessment module is in an illegal state '{0}'".format(self.state)) return system.render_template('self_assessment_rubric.html', context) @@ -148,7 +152,8 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): elif self.state == self.DONE: context['read_only'] = True else: - raise ValueError("Illegal state '%r'" % self.state) + #This is a dev_facing_error + raise ValueError("Self Assessment module is in an illegal state '{0}'".format(self.state)) return system.render_template('self_assessment_hint.html', context) @@ -177,10 +182,16 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): # add new history element with answer and empty score and hint. success, get = self.append_image_to_student_answer(get) if success: - get['student_answer'] = SelfAssessmentModule.sanitize_html(get['student_answer']) - self.new_history_entry(get['student_answer']) - self.change_state(self.ASSESSING) + success, allowed_to_submit, error_message = self.check_if_student_can_submit() + if allowed_to_submit: + get['student_answer'] = SelfAssessmentModule.sanitize_html(get['student_answer']) + self.new_history_entry(get['student_answer']) + self.change_state(self.ASSESSING) + else: + #Error message already defined + success = False else: + #This is a student_facing_error error_message = "There was a problem saving the image in your submission. Please try a different image, or try pasting a link to an image into the answer box." return { @@ -214,7 +225,10 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): for i in xrange(0,len(score_list)): score_list[i] = int(score_list[i]) except ValueError: - return {'success': False, 'error': "Non-integer score value, or no score list"} + #This is a dev_facing_error + log.error("Non-integer score value passed to save_assessment ,or no score list present.") + #This is a student_facing_error + return {'success': False, 'error': "Error saving your score. Please notify course staff."} #Record score as assessment and rubric scores as post assessment self.record_latest_score(score) @@ -256,6 +270,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): try: rubric_scores = json.loads(latest_post_assessment) except: + #This is a dev_facing_error log.error("Cannot parse rubric scores in self assessment module from {0}".format(latest_post_assessment)) rubric_scores = [] return [rubric_scores] @@ -287,7 +302,8 @@ class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor): expected_children = [] for child in expected_children: if len(xml_object.xpath(child)) != 1: - raise ValueError("Self assessment definition must include exactly one '{0}' tag".format(child)) + #This is a staff_facing_error + raise ValueError("Self assessment definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format(child)) def parse(k): """Assumes that xml_object has child k""" diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index 4dfe8e0dfa..e262db5615 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -24,6 +24,8 @@ TRUE_DICT = [True, "True", "true", "TRUE"] MAX_SCORE = 1 IS_GRADED = True +EXTERNAL_GRADER_NO_CONTACT_ERROR = "Failed to contact external graders. Please notify course staff." + class PeerGradingModule(XModule): _VERSION = 1 @@ -145,7 +147,10 @@ class PeerGradingModule(XModule): } if dispatch not in handlers: - return 'Error' + #This is a dev_facing_error + log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) + #This is a dev_facing_error + return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False}) d = handlers[dispatch](get) @@ -163,6 +168,7 @@ class PeerGradingModule(XModule): count_required = response['count_required'] success = True except GradingServiceError: + #This is a dev_facing_error log.exception("Error getting location data from controller for location {0}, student {1}" .format(location, student_id)) @@ -188,6 +194,7 @@ class PeerGradingModule(XModule): count_graded = response['count_graded'] count_required = response['count_required'] if count_required > 0 and count_graded >= count_required: + #Ensures that once a student receives a final score for peer grading, that it does not change. self.student_data_for_location = response score_dict = { @@ -237,10 +244,12 @@ class PeerGradingModule(XModule): response = self.peer_gs.get_next_submission(location, grader_id) return response except GradingServiceError: + #This is a dev_facing_error log.exception("Error getting next submission. server url: {0} location: {1}, grader_id: {2}" .format(self.peer_gs.url, location, grader_id)) + #This is a student_facing_error return {'success': False, - 'error': 'Could not connect to grading service'} + 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR} def save_grade(self, get): """ @@ -277,14 +286,16 @@ class PeerGradingModule(XModule): score, feedback, submission_key, rubric_scores, submission_flagged) return response except GradingServiceError: - log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2}, + #This is a dev_facing_error + log.exception("""Error saving grade to open ended grading service. server url: {0}, location: {1}, submission_id:{2}, submission_key: {3}, score: {4}""" .format(self.peer_gs.url, location, submission_id, submission_key, score) ) + #This is a student_facing_error return { 'success': False, - 'error': 'Could not connect to grading service' + 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR } def is_student_calibrated(self, get): @@ -317,11 +328,13 @@ class PeerGradingModule(XModule): response = self.peer_gs.is_student_calibrated(location, grader_id) return response except GradingServiceError: - log.exception("Error from grading service. server url: {0}, grader_id: {0}, location: {1}" + #This is a dev_facing_error + log.exception("Error from open ended grading service. server url: {0}, grader_id: {0}, location: {1}" .format(self.peer_gs.url, grader_id, location)) + #This is a student_facing_error return { 'success': False, - 'error': 'Could not connect to grading service' + 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR } def show_calibration_essay(self, get): @@ -360,16 +373,20 @@ class PeerGradingModule(XModule): response = self.peer_gs.show_calibration_essay(location, grader_id) return response except GradingServiceError: - log.exception("Error from grading service. server url: {0}, location: {0}" + #This is a dev_facing_error + log.exception("Error from open ended grading service. server url: {0}, location: {0}" .format(self.peer_gs.url, location)) + #This is a student_facing_error return {'success': False, - 'error': 'Could not connect to grading service'} + 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR} # if we can't parse the rubric into HTML, except etree.XMLSyntaxError: + #This is a dev_facing_error log.exception("Cannot parse rubric string. Raw string: {0}" .format(rubric)) + #This is a student_facing_error return {'success': False, - 'error': 'Error displaying submission'} + 'error': 'Error displaying submission. Please notify course staff.'} def save_calibration_essay(self, get): @@ -408,8 +425,10 @@ class PeerGradingModule(XModule): submission_key, score, feedback, rubric_scores) return response except GradingServiceError: + #This is a dev_facing_error log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) - return self._err_response('Could not connect to grading service') + #This is a student_facing_error + return self._err_response('There was an error saving your score. Please notify course staff.') def peer_grading_closed(self): ''' @@ -440,11 +459,13 @@ class PeerGradingModule(XModule): problem_list = problem_list_dict['problem_list'] except GradingServiceError: - error_text = "Error occured while contacting the grading service" + #This is a student_facing_error + error_text = EXTERNAL_GRADER_NO_CONTACT_ERROR success = False # catch error if if the json loads fails except ValueError: - error_text = "Could not get problem list" + #This is a student_facing_error + error_text = "Could not get list of problems to peer grade. Please notify course staff." success = False @@ -502,6 +523,8 @@ class PeerGradingModule(XModule): if get == None or get.get('location') == None: if not self.use_for_single_location: #This is an error case, because it must be set to use a single location to be called without get parameters + #This is a dev_facing_error + log.error("Peer grading problem in peer_grading_module called with no get parameters, but use_for_single_location is False.") return {'html': "", 'success': False} problem_location = self.link_to_location @@ -566,7 +589,8 @@ class PeerGradingDescriptor(XmlDescriptor, EditingDescriptor): expected_children = [] for child in expected_children: if len(xml_object.xpath(child)) == 0: - raise ValueError("Peer grading definition must include at least one '{0}' tag".format(child)) + #This is a staff_facing_error + raise ValueError("Peer grading definition must include at least one '{0}' tag. Contact the learning sciences group for assistance.".format(child)) def parse_task(k): """Assumes that xml_object has child k""" diff --git a/common/lib/xmodule/xmodule/tests/__init__.py b/common/lib/xmodule/xmodule/tests/__init__.py index 04e7ee19b1..9474717cb2 100644 --- a/common/lib/xmodule/xmodule/tests/__init__.py +++ b/common/lib/xmodule/xmodule/tests/__init__.py @@ -19,6 +19,15 @@ import xmodule from xmodule.x_module import ModuleSystem from mock import Mock +open_ended_grading_interface = { + 'url': 'http://sandbox-grader-001.m.edx.org/peer_grading', + 'username': 'incorrect_user', + 'password': 'incorrect_pass', + 'staff_grading' : 'staff_grading', + 'peer_grading' : 'peer_grading', + 'grading_controller' : 'grading_controller' + } + test_system = ModuleSystem( ajax_url='courses/course_id/modx/a_location', track_function=Mock(), @@ -31,7 +40,8 @@ test_system = ModuleSystem( debug=True, xqueue={'interface': None, 'callback_url': '/', 'default_queuename': 'testqueue', 'waittime': 10}, node_path=os.environ.get("NODE_PATH", "/usr/local/lib/node_modules"), - anonymous_student_id='student' + anonymous_student_id='student', + open_ended_grading_interface= open_ended_grading_interface ) diff --git a/lms/djangoapps/open_ended_grading/open_ended_notifications.py b/lms/djangoapps/open_ended_grading/open_ended_notifications.py index ecec29fce3..c4054895d3 100644 --- a/lms/djangoapps/open_ended_grading/open_ended_notifications.py +++ b/lms/djangoapps/open_ended_grading/open_ended_notifications.py @@ -45,7 +45,8 @@ def staff_grading_notifications(course, user): except: #Non catastrophic error, so no real action notifications = {} - log.info("Problem with getting notifications from staff grading service.") + #This is a dev_facing_error + log.info("Problem with getting notifications from staff grading service for course {0} user {1}.".format(course_id, student_id)) if pending_grading: img_path = "/static/images/grading_notification.png" @@ -78,7 +79,8 @@ def peer_grading_notifications(course, user): except: #Non catastrophic error, so no real action notifications = {} - log.info("Problem with getting notifications from peer grading service.") + #This is a dev_facing_error + log.info("Problem with getting notifications from peer grading service for course {0} user {1}.".format(course_id, student_id)) if pending_grading: img_path = "/static/images/grading_notification.png" @@ -123,7 +125,8 @@ def combined_notifications(course, user): except: #Non catastrophic error, so no real action notifications = {} - log.exception("Problem with getting notifications from controller query service.") + #This is a dev_facing_error + log.exception("Problem with getting notifications from controller query service for course {0} user {1}.".format(course_id, student_id)) if pending_grading: img_path = "/static/images/grading_notification.png" diff --git a/lms/djangoapps/open_ended_grading/staff_grading_service.py b/lms/djangoapps/open_ended_grading/staff_grading_service.py index 0ead7aa364..79b92dffba 100644 --- a/lms/djangoapps/open_ended_grading/staff_grading_service.py +++ b/lms/djangoapps/open_ended_grading/staff_grading_service.py @@ -18,6 +18,7 @@ from mitxmako.shortcuts import render_to_string log = logging.getLogger(__name__) +STAFF_ERROR_MESSAGE = 'Could not contact the external grading server. Please contact the development team. If you do not have a point of contact, you can contact Vik at vik@edx.org.' class MockStaffGradingService(object): """ @@ -254,10 +255,12 @@ def get_problem_list(request, course_id): return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error from grading service. server url: {0}" + #This is a dev_facing_error + log.exception("Error from staff grading service in open ended grading. server url: {0}" .format(staff_grading_service().url)) + #This is a staff_facing_error return HttpResponse(json.dumps({'success': False, - 'error': 'Could not connect to grading service'})) + 'error': STAFF_ERROR_MESSAGE})) def _get_next(course_id, grader_id, location): @@ -267,10 +270,12 @@ def _get_next(course_id, grader_id, location): try: return staff_grading_service().get_next(course_id, location, grader_id) except GradingServiceError: - log.exception("Error from grading service. server url: {0}" + #This is a dev facing error + log.exception("Error from staff grading service in open ended grading. server url: {0}" .format(staff_grading_service().url)) + #This is a staff_facing_error return json.dumps({'success': False, - 'error': 'Could not connect to grading service'}) + 'error': STAFF_ERROR_MESSAGE}) @expect_json @@ -316,18 +321,23 @@ def save_grade(request, course_id): p.getlist('rubric_scores[]'), p['submission_flagged']) except GradingServiceError: - log.exception("Error saving grade") - return _err_response('Could not connect to grading service') + #This is a dev_facing_error + log.exception("Error saving grade in the staff grading interface in open ended grading. Request: {0} Course ID: {1}".format(request, course_id)) + #This is a staff_facing_error + return _err_response(STAFF_ERROR_MESSAGE) try: result = json.loads(result_json) except ValueError: - log.exception("save_grade returned broken json: %s", result_json) - return _err_response('Grading service returned mal-formatted data.') + #This is a dev_facing_error + log.exception("save_grade returned broken json in the staff grading interface in open ended grading: {0}".format(result_json)) + #This is a staff_facing_error + return _err_response(STAFF_ERROR_MESSAGE) if not result.get('success', False): - log.warning('Got success=False from grading service. Response: %s', result_json) - return _err_response('Grading service failed') + #This is a dev_facing_error + log.warning('Got success=False from staff grading service in open ended grading. Response: {0}'.format(result_json)) + return _err_response(STAFF_ERROR_MESSAGE) # Ok, save_grade seemed to work. Get the next submission to grade. return HttpResponse(_get_next(course_id, grader_id, location), diff --git a/lms/djangoapps/open_ended_grading/views.py b/lms/djangoapps/open_ended_grading/views.py index 80f50b282d..77c1cda6bc 100644 --- a/lms/djangoapps/open_ended_grading/views.py +++ b/lms/djangoapps/open_ended_grading/views.py @@ -60,6 +60,8 @@ ALERT_DICT = { 'Flagged Submissions': "Submissions have been flagged for review" } +STUDENT_ERROR_MESSAGE = "Error occured while contacting the grading service. Please notify course staff." +STAFF_ERROR_MESSAGE = "Error occured while contacting the grading service. Please notify the development team. If you do not have a point of contact, please email Vik at vik@edx.org" @cache_control(no_cache=True, no_store=True, must_revalidate=True) def staff_grading(request, course_id): @@ -96,7 +98,9 @@ def peer_grading(request, course_id): return HttpResponseRedirect(problem_url) except: + #This is a student_facing_error error_message = "Error with initializing peer grading. Centralized module does not exist. Please contact course staff." + #This is a dev_facing_error log.exception(error_message + "Current course is: {0}".format(course_id)) return HttpResponse(error_message) @@ -132,30 +136,34 @@ def student_problem_list(request, course_id): problem_list = [] base_course_url = reverse('courses') - #try: - problem_list_json = controller_qs.get_grading_status_list(course_id, unique_id_for_user(request.user)) - problem_list_dict = json.loads(problem_list_json) - success = problem_list_dict['success'] - if 'error' in problem_list_dict: - error_text = problem_list_dict['error'] - problem_list = [] - else: - problem_list = problem_list_dict['problem_list'] + try: + problem_list_json = controller_qs.get_grading_status_list(course_id, unique_id_for_user(request.user)) + problem_list_dict = json.loads(problem_list_json) + success = problem_list_dict['success'] + if 'error' in problem_list_dict: + error_text = problem_list_dict['error'] + problem_list = [] + else: + problem_list = problem_list_dict['problem_list'] - for i in xrange(0, len(problem_list)): - problem_url_parts = search.path_to_location(modulestore(), course.id, problem_list[i]['location']) - problem_url = generate_problem_url(problem_url_parts, base_course_url) - problem_list[i].update({'actual_url': problem_url}) + for i in xrange(0, len(problem_list)): + problem_url_parts = search.path_to_location(modulestore(), course.id, problem_list[i]['location']) + problem_url = generate_problem_url(problem_url_parts, base_course_url) + problem_list[i].update({'actual_url': problem_url}) - """ except GradingServiceError: - error_text = "Error occured while contacting the grading service" + #This is a student_facing_error + error_text = STUDENT_ERROR_MESSAGE + #This is a dev facing error + log.error("Problem contacting open ended grading service.") success = False # catch error if if the json loads fails except ValueError: - error_text = "Could not get problem list" + #This is a student facing error + error_text = STUDENT_ERROR_MESSAGE + #This is a dev_facing_error + log.error("Problem with results from external grading service for open ended.") success = False - """ ajax_url = _reverse_with_slash('open_ended_problems', course_id) @@ -195,11 +203,17 @@ def flagged_problem_list(request, course_id): problem_list = problem_list_dict['flagged_submissions'] except GradingServiceError: - error_text = "Error occured while contacting the grading service" + #This is a staff_facing_error + error_text = STAFF_ERROR_MESSAGE + #This is a dev_facing_error + log.error("Could not get flagged problem list from external grading service for open ended.") success = False # catch error if if the json loads fails except ValueError: - error_text = "Could not get problem list" + #This is a staff_facing_error + error_text = STAFF_ERROR_MESSAGE + #This is a dev_facing_error + log.error("Could not parse problem list from external grading service response.") success = False ajax_url = _reverse_with_slash('open_ended_flagged_problems', course_id) @@ -283,7 +297,8 @@ def take_action_on_flags(request, course_id): required = ['submission_id', 'action_type', 'student_id'] for key in required: if key not in request.POST: - return HttpResponse(json.dumps({'success': False, 'error': 'Missing key {0}'.format(key)}), + #This is a staff_facing_error + return HttpResponse(json.dumps({'success': False, 'error': STAFF_ERROR_MESSAGE + 'Missing key {0} from submission. Please reload and try again.'.format(key)}), mimetype="application/json") p = request.POST @@ -297,5 +312,6 @@ def take_action_on_flags(request, course_id): response = controller_qs.take_action_on_flags(course_id, student_id, submission_id, action_type) return HttpResponse(response, mimetype="application/json") except GradingServiceError: - log.exception("Error saving calibration grade, submission_id: {0}, submission_key: {1}, grader_id: {2}".format(submission_id, submission_key, grader_id)) - return _err_response('Could not connect to grading service') + #This is a dev_facing_error + log.exception("Error taking action on flagged peer grading submissions, submission_id: {0}, action_type: {1}, grader_id: {2}".format(submission_id, action_type, grader_id)) + return _err_response(STAFF_ERROR_MESSAGE) diff --git a/lms/envs/test.py b/lms/envs/test.py index 6cad6416d0..c1863349fb 100644 --- a/lms/envs/test.py +++ b/lms/envs/test.py @@ -170,4 +170,4 @@ PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', # 'django.contrib.auth.hashers.CryptPasswordHasher', -) +) \ No newline at end of file diff --git a/lms/static/coffee/src/open_ended/open_ended.coffee b/lms/static/coffee/src/open_ended/open_ended.coffee index cc8bad5473..bfb0fa5931 100644 --- a/lms/static/coffee/src/open_ended/open_ended.coffee +++ b/lms/static/coffee/src/open_ended/open_ended.coffee @@ -41,7 +41,7 @@ class OpenEnded post: (cmd, data, callback) -> # if this post request fails, the error callback will catch it $.post(@ajax_url + cmd, data, callback) - .error => callback({success: false, error: "Error occured while performing this operation"}) + .error => callback({success: false, error: "Error occured while performing javascript ajax post."}) after_action_wrapper: (target, action_type) -> tr_parent = target.parent().parent() diff --git a/lms/static/coffee/src/staff_grading/staff_grading.coffee b/lms/static/coffee/src/staff_grading/staff_grading.coffee index 8a1bf1adbb..c4ccee4571 100644 --- a/lms/static/coffee/src/staff_grading/staff_grading.coffee +++ b/lms/static/coffee/src/staff_grading/staff_grading.coffee @@ -143,7 +143,7 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t else # TODO: replace with postWithPrefix when that's loaded $.post(@ajax_url + cmd, data, callback) - .error => callback({success: false, error: "Error occured while performing this operation"}) + .error => callback({success: false, error: "Error occured while performing javascript AJAX post."}) class @StaffGrading diff --git a/lms/templates/combined_open_ended_status.html b/lms/templates/combined_open_ended_status.html index fa84e7bab7..d13077737f 100644 --- a/lms/templates/combined_open_ended_status.html +++ b/lms/templates/combined_open_ended_status.html @@ -16,7 +16,7 @@ %else: ${status['human_task']} %endif - (${status['human_state']}) ${status['score']} / ${status['max_score']} + (${status['human_state']}) %endfor diff --git a/lms/templates/instructor/staff_grading.html b/lms/templates/instructor/staff_grading.html index bd9dde7117..1c5f7364ad 100644 --- a/lms/templates/instructor/staff_grading.html +++ b/lms/templates/instructor/staff_grading.html @@ -78,6 +78,7 @@

Flag as inappropriate content for later review

+
diff --git a/lms/templates/peer_grading/peer_grading_problem.html b/lms/templates/peer_grading/peer_grading_problem.html index 9468b594a2..853fa750e8 100644 --- a/lms/templates/peer_grading/peer_grading_problem.html +++ b/lms/templates/peer_grading/peer_grading_problem.html @@ -44,6 +44,7 @@

Flag this submission for review by course staff (use if the submission contains inappropriate content):

+

I do not know how to grade this question: