diff --git a/common/lib/xmodule/xmodule/capa_module.py b/common/lib/xmodule/xmodule/capa_module.py index 4cc8cb5cc8..a03c0f4160 100644 --- a/common/lib/xmodule/xmodule/capa_module.py +++ b/common/lib/xmodule/xmodule/capa_module.py @@ -852,14 +852,14 @@ class CapaModule(CapaFields, XModule): log.warning("Input error in capa_module:problem_rescore", exc_info=True) event_info['failure'] = 'input_error' self.system.track_function('problem_rescore_fail', event_info) - return {'success': "Error: {0}".format(inst.message)} + return {'success': u"Error: {0}".format(inst.message)} except Exception as err: event_info['failure'] = 'unexpected' self.system.track_function('problem_rescore_fail', event_info) if self.system.DEBUG: - msg = "Error checking problem: " + str(err) - msg += '\nTraceback:\n' + traceback.format_exc() + msg = u"Error checking problem: {0}".format(err.message) + msg += u'\nTraceback:\n' + traceback.format_exc() return {'success': msg} raise diff --git a/common/lib/xmodule/xmodule/tests/test_capa_module.py b/common/lib/xmodule/xmodule/tests/test_capa_module.py index e71abc811d..e738d8e031 100644 --- a/common/lib/xmodule/xmodule/tests/test_capa_module.py +++ b/common/lib/xmodule/xmodule/tests/test_capa_module.py @@ -658,11 +658,11 @@ class CapaModuleTest(unittest.TestCase): # Simulate answering a problem that raises the exception with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore: - mock_rescore.side_effect = exception_class('test error') + mock_rescore.side_effect = exception_class(u'test error u\03a9') result = module.rescore_problem() # Expect an AJAX alert message in 'success' - expected_msg = 'Error: test error' + expected_msg = u'Error: test error u\03a9' self.assertEqual(result['success'], expected_msg) # Expect that the number of attempts is NOT incremented diff --git a/lms/djangoapps/instructor_task/tasks_helper.py b/lms/djangoapps/instructor_task/tasks_helper.py index 7bdced17e3..5f730a7c73 100644 --- a/lms/djangoapps/instructor_task/tasks_helper.py +++ b/lms/djangoapps/instructor_task/tasks_helper.py @@ -331,15 +331,15 @@ def rescore_problem_module_state(module_descriptor, student_module, xmodule_inst result = instance.rescore_problem() if 'success' not in result: # don't consider these fatal, but false means that the individual call didn't complete: - TASK_LOG.warning("error processing rescore call for course {course}, problem {loc} and student {student}: " + TASK_LOG.warning(u"error processing rescore call for course {course}, problem {loc} and student {student}: " "unexpected response {msg}".format(msg=result, course=course_id, loc=module_state_key, student=student)) return False elif result['success'] not in ['correct', 'incorrect']: - TASK_LOG.warning("error processing rescore call for course {course}, problem {loc} and student {student}: " + TASK_LOG.warning(u"error processing rescore call for course {course}, problem {loc} and student {student}: " "{msg}".format(msg=result['success'], course=course_id, loc=module_state_key, student=student)) return False else: - TASK_LOG.debug("successfully processed rescore call for course {course}, problem {loc} and student {student}: " + TASK_LOG.debug(u"successfully processed rescore call for course {course}, problem {loc} and student {student}: " "{msg}".format(msg=result['success'], course=course_id, loc=module_state_key, student=student)) return True diff --git a/lms/djangoapps/instructor_task/tests/test_integration.py b/lms/djangoapps/instructor_task/tests/test_integration.py index 4574a4c4ab..3491db0547 100644 --- a/lms/djangoapps/instructor_task/tests/test_integration.py +++ b/lms/djangoapps/instructor_task/tests/test_integration.py @@ -27,6 +27,7 @@ from instructor_task.api import (submit_rescore_problem_for_all_students, submit_delete_problem_state_for_all_students) from instructor_task.models import InstructorTask from instructor_task.tests.test_base import InstructorTaskTestCase, TEST_COURSE_ORG, TEST_COURSE_NUMBER +from capa.responsetypes import StudentInputError log = logging.getLogger(__name__) @@ -34,7 +35,7 @@ log = logging.getLogger(__name__) class TestIntegrationTask(InstructorTaskTestCase): """ - Base class to provide general methods used for "integration" testing of particular tasks. + Base class to provide general methods used for "integration" testing of particular tasks. """ def submit_student_answer(self, username, problem_url_name, responses): @@ -199,6 +200,36 @@ class TestRescoringTask(TestIntegrationTask): status = self.get_task_status(instructor_task.task_id) self.assertEqual(status['message'], expected_message) + def test_rescoring_bad_unicode_input(self): + """Generate a real failure in rescoring a problem, with an answer including unicode""" + # At one point, the student answers that resulted in StudentInputErrors were being + # persisted (even though they were not counted as an attempt). That is not possible + # now, so it's harder to generate a test for how such input is handled. + problem_url_name = 'H1P1' + # set up an option problem -- doesn't matter really what problem it is, but we need + # it to have an answer. + self.define_option_problem(problem_url_name) + self.submit_student_answer('u1', problem_url_name, ['Option 1', 'Option 1']) + + # return an input error as if it were a numerical response, with an embedded unicode character: + expected_message = u"Could not interpret '2/3\u03a9' as a number" + with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore: + mock_rescore.side_effect = StudentInputError(expected_message) + instructor_task = self.submit_rescore_all_student_answers('instructor', problem_url_name) + + # check instructor_task returned + instructor_task = InstructorTask.objects.get(id=instructor_task.id) + self.assertEqual(instructor_task.task_state, 'SUCCESS') + self.assertEqual(instructor_task.requester.username, 'instructor') + self.assertEqual(instructor_task.task_type, 'rescore_problem') + task_input = json.loads(instructor_task.task_input) + self.assertFalse('student' in task_input) + self.assertEqual(task_input['problem_url'], InstructorTaskTestCase.problem_location(problem_url_name)) + status = json.loads(instructor_task.task_output) + self.assertEqual(status['attempted'], 1) + self.assertEqual(status['updated'], 0) + self.assertEqual(status['total'], 1) + def test_rescoring_non_problem(self): """confirm that a non-problem will not submit""" problem_url_name = self.problem_section.location.url()