From 47c24d8e7e7f37a1c6b5eaddc19afef5baafd750 Mon Sep 17 00:00:00 2001 From: Diana Huang Date: Fri, 25 Jan 2013 13:33:35 -0500 Subject: [PATCH] Raise smarter exceptions when parsing rubrics. --- .../xmodule/combined_open_ended_module.py | 6 +-- .../xmodule/combined_open_ended_rubric.py | 49 ++++++++----------- .../lib/xmodule/xmodule/open_ended_module.py | 2 +- .../xmodule/xmodule/self_assessment_module.py | 2 +- .../open_ended_grading/grading_service.py | 17 ++++--- 5 files changed, 33 insertions(+), 43 deletions(-) diff --git a/common/lib/xmodule/xmodule/combined_open_ended_module.py b/common/lib/xmodule/xmodule/combined_open_ended_module.py index d8d1f6cfe1..b9236ad18f 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_module.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_module.py @@ -141,11 +141,7 @@ class CombinedOpenEndedModule(XModule): self._max_score = int(self.metadata.get('max_score', MAX_SCORE)) rubric_renderer = CombinedOpenEndedRubric(system, True) - success, rubric_feedback = rubric_renderer.render_rubric(stringify_children(definition['rubric'])) - if not success: - error_message="Could not parse rubric : {0}".format(definition['rubric']) - log.exception(error_message) - raise Exception + rubric_feedback = rubric_renderer.render_rubric(stringify_children(definition['rubric'])) #Static data is passed to the child modules to render self.static_data = { 'max_score': self._max_score, diff --git a/common/lib/xmodule/xmodule/combined_open_ended_rubric.py b/common/lib/xmodule/xmodule/combined_open_ended_rubric.py index bf6836bacc..4380e32d5b 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_rubric.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_rubric.py @@ -3,6 +3,9 @@ from lxml import etree log=logging.getLogger(__name__) +class RubricParsingError(Exception): + pass + class CombinedOpenEndedRubric(object): def __init__ (self, system, view_only = False): @@ -10,35 +13,25 @@ class CombinedOpenEndedRubric(object): self.view_only = view_only self.system = system - ''' - render_rubric: takes in an xml string and outputs the corresponding - html for that xml, given the type of rubric we're generating - Input: - rubric_xml: an string that has not been parsed into xml that - represents this particular rubric - Output: - html: the html that corresponds to the xml given - ''' def render_rubric(self, rubric_xml): - success = False + ''' + render_rubric: takes in an xml string and outputs the corresponding + html for that xml, given the type of rubric we're generating + Input: + rubric_xml: an string that has not been parsed into xml that + represents this particular rubric + Output: + html: the html that corresponds to the xml given + ''' try: rubric_categories = self.extract_categories(rubric_xml) html = self.system.render_template('open_ended_rubric.html', {'categories' : rubric_categories, 'has_score': self.has_score, 'view_only': self.view_only}) - success = True except: - log.exception("Could not parse the rubric.") - try: - html = etree.tostring(rubric_xml, pretty_print=True) - except: - log.exception("Rubric XML is a string, not an XML object : {0}".format(rubric_xml)) - if isinstance(rubric_xml, basestring): - html = rubric_xml - else: - html = "Invalid rubric. Please contact course staff." - return success, html + raise RubricParsingError("[render_rubric] Could not parse the rubric with xml: {0}".format(rubric_xml)) + return html def extract_categories(self, element): ''' @@ -57,7 +50,7 @@ class CombinedOpenEndedRubric(object): categories = [] for category in element: if category.tag != 'category': - raise Exception("[extract_categories] Expected a tag: got {0} instead".format(category.tag)) + raise RubricParsingError("[extract_categories] Expected a tag: got {0} instead".format(category.tag)) else: categories.append(self.extract_category(category)) return categories @@ -83,12 +76,12 @@ class CombinedOpenEndedRubric(object): self.has_score = True # if we are missing the score tag and we are expecting one elif self.has_score: - raise Exception("[extract_category] Category {0} is missing a score".format(descriptionxml.text)) + raise RubricParsingError("[extract_category] Category {0} is missing a score".format(descriptionxml.text)) # parse description if descriptionxml.tag != 'description': - raise Exception("[extract_category]: expected description tag, got {0} instead".format(descriptionxml.tag)) + raise RubricParsingError("[extract_category]: expected description tag, got {0} instead".format(descriptionxml.tag)) description = descriptionxml.text @@ -98,7 +91,7 @@ class CombinedOpenEndedRubric(object): # parse options for option in optionsxml: if option.tag != 'option': - raise Exception("[extract_category]: expected option tag, got {0} instead".format(option.tag)) + raise RubricParsingError("[extract_category]: expected option tag, got {0} instead".format(option.tag)) else: pointstr = option.get("points") if pointstr: @@ -107,7 +100,7 @@ class CombinedOpenEndedRubric(object): try: points = int(pointstr) except ValueError: - raise Exception("[extract_category]: expected points to have int, got {0} instead".format(pointstr)) + raise RubricParsingError("[extract_category]: expected points to have int, got {0} instead".format(pointstr)) elif autonumbering: # use the generated one if we're in the right mode points = cur_points @@ -132,12 +125,12 @@ class CombinedOpenEndedRubric(object): Validates a set of options. This can and should be extended to filter out other bad edge cases ''' if len(options) == 0: - raise Exception("[extract_category]: no options associated with this category") + raise RubricParsingError("[extract_category]: no options associated with this category") if len(options) == 1: return prev = options[0]['points'] for option in options[1:]: if prev == option['points']: - raise Exception("[extract_category]: found duplicate point values between two different options") + raise RubricParsingError("[extract_category]: found duplicate point values between two different options") else: prev = option['points'] diff --git a/common/lib/xmodule/xmodule/open_ended_module.py b/common/lib/xmodule/xmodule/open_ended_module.py index c674d330e1..3117d9566a 100644 --- a/common/lib/xmodule/xmodule/open_ended_module.py +++ b/common/lib/xmodule/xmodule/open_ended_module.py @@ -383,7 +383,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): feedback = self._convert_longform_feedback_to_html(response_items) if response_items['rubric_scores_complete']==True: rubric_renderer = CombinedOpenEndedRubric(system, True) - success, rubric_feedback = rubric_renderer.render_rubric(response_items['rubric_xml']) + rubric_feedback = rubric_renderer.render_rubric(response_items['rubric_xml']) if not response_items['success']: return system.render_template("open_ended_error.html", diff --git a/common/lib/xmodule/xmodule/self_assessment_module.py b/common/lib/xmodule/xmodule/self_assessment_module.py index 73f580c0f5..fb1d306708 100644 --- a/common/lib/xmodule/xmodule/self_assessment_module.py +++ b/common/lib/xmodule/xmodule/self_assessment_module.py @@ -123,7 +123,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): return '' rubric_renderer = CombinedOpenEndedRubric(system, True) - success, rubric_html = rubric_renderer.render_rubric(self.rubric) + rubric_html = rubric_renderer.render_rubric(self.rubric) # we'll render it context = {'rubric': rubric_html, diff --git a/lms/djangoapps/open_ended_grading/grading_service.py b/lms/djangoapps/open_ended_grading/grading_service.py index e8af5f09f6..9d4849d3ed 100644 --- a/lms/djangoapps/open_ended_grading/grading_service.py +++ b/lms/djangoapps/open_ended_grading/grading_service.py @@ -11,7 +11,7 @@ from django.http import HttpResponse, Http404 from courseware.access import has_access from util.json_request import expect_json from xmodule.course_module import CourseDescriptor -from xmodule.combined_open_ended_rubric import CombinedOpenEndedRubric +from xmodule.combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError from lxml import etree from mitxmako.shortcuts import render_to_string from xmodule.x_module import ModuleSystem @@ -113,19 +113,20 @@ class GradingService(object): if response_json.has_key('rubric'): rubric = response_json['rubric'] rubric_renderer = CombinedOpenEndedRubric(self.system, False) - success, rubric_html = rubric_renderer.render_rubric(rubric) - if not success: - error_message = "Could not render rubric: {0}".format(rubric) - log.exception(error_message) - return json.dumps({'success': False, - 'error': error_message}) + rubric_html = rubric_renderer.render_rubric(rubric) response_json['rubric'] = rubric_html return json.dumps(response_json) # if we can't parse the rubric into HTML, - except etree.XMLSyntaxError: + except etree.XMLSyntaxError, RubricParsingError: log.exception("Cannot parse rubric string. Raw string: {0}" .format(rubric)) return json.dumps({'success': False, 'error': 'Error displaying submission'}) + except ValueError: + log.exception("Error parsing response: {0}".format(response)) + return json.dumps({'success': False, + 'error': "Error displaying submission"}) + +