diff --git a/common/lib/xmodule/xmodule/combined_open_ended_module.py b/common/lib/xmodule/xmodule/combined_open_ended_module.py index 67ff206e89..f4074283fe 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_module.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_module.py @@ -104,11 +104,14 @@ class CombinedOpenEndedModule(CombinedOpenEndedFields, XModule): icon_class = 'problem' - js = {'coffee': - [resource_string(__name__, 'js/src/combinedopenended/display.coffee'), - resource_string(__name__, 'js/src/collapsible.coffee'), - resource_string(__name__, 'js/src/javascript_loader.coffee'), - ]} + js = { + 'coffee': + [ + resource_string(__name__, 'js/src/combinedopenended/display.coffee'), + resource_string(__name__, 'js/src/collapsible.coffee'), + resource_string(__name__, 'js/src/javascript_loader.coffee'), + ] + } js_module_name = "CombinedOpenEnded" css = {'scss': [resource_string(__name__, 'css/combinedopenended/display.scss')]} diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py index 6767851d3a..1404f52300 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py @@ -294,9 +294,8 @@ class CombinedOpenEndedV1Module(): if self.current_task_number > 0: last_response_data = self.get_last_response(self.current_task_number - 1) current_response_data = self.get_current_attributes(self.current_task_number) - if (current_response_data['min_score_to_attempt'] > last_response_data['score'] - or current_response_data['max_score_to_attempt'] < last_response_data['score']): + or current_response_data['max_score_to_attempt'] < last_response_data['score']): self.state = self.DONE self.ready_to_reset = True @@ -662,9 +661,10 @@ class CombinedOpenEndedV1Module(): return { 'success': False, #This is a student_facing_error - 'error': ('You have attempted this question {0} times. ' - 'You are only allowed to attempt it {1} times.').format( - self.student_attempts, self.attempts) + 'error': ( + 'You have attempted this question {0} times. ' + 'You are only allowed to attempt it {1} times.' + ).format(self.student_attempts, self.attempts) } self.state = self.INITIAL self.ready_to_reset = False @@ -815,7 +815,6 @@ class CombinedOpenEndedV1Module(): 'error': 'The problem state got out-of-sync. Please try reloading the page.'} - class CombinedOpenEndedV1Descriptor(): """ Module for adding combined open ended questions @@ -861,7 +860,6 @@ class CombinedOpenEndedV1Descriptor(): return {'task_xml': parse_task('task'), 'prompt': parse('prompt'), 'rubric': parse('rubric')} - def definition_to_xml(self, resource_fs): '''Return an xml element representing this definition.''' elt = etree.Element('combinedopenended') diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py index f3f6568b1e..b16f0618bb 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/grading_service_module.py @@ -76,7 +76,6 @@ class GradingService(object): return r.text - def _try_with_login(self, operation): """ Call operation(), which should return a requests response object. If @@ -87,7 +86,7 @@ class GradingService(object): """ response = operation() if (response.json - and response.json.get('success') == False + and response.json.get('success') is False and response.json.get('error') == 'login_required'): # apparrently we aren't logged in. Try to fix that. r = self._login() diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py index 266d332a7f..7ba046b2ad 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/open_ended_module.py @@ -72,7 +72,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): self._parse(oeparam, self.child_prompt, self.child_rubric, system) - if self.child_created == True and self.child_state == self.ASSESSING: + if self.child_created is True and self.child_state == self.ASSESSING: self.child_created = False self.send_to_grader(self.latest_answer(), system) self.child_created = False @@ -159,9 +159,11 @@ class OpenEndedModule(openendedchild.OpenEndedChild): score = int(survey_responses['score']) except: #This is a dev_facing_error - error_message = ("Could not parse submission id, grader id, " - "or feedback from message_post ajax call. Here is the message data: {0}".format( - survey_responses)) + error_message = ( + "Could not parse submission id, grader id, " + "or feedback from message_post ajax call. " + "Here is the message data: {0}".format(survey_responses) + ) log.exception(error_message) #This is a student_facing_error return {'success': False, 'msg': "There was an error saving your feedback. Please contact course staff."} @@ -179,8 +181,9 @@ class OpenEndedModule(openendedchild.OpenEndedChild): queue_name=self.message_queue_name ) - student_info = {'anonymous_student_id': anonymous_student_id, - 'submission_time': qtime, + student_info = { + 'anonymous_student_id': anonymous_student_id, + 'submission_time': qtime, } contents = { 'feedback': feedback, @@ -190,8 +193,10 @@ class OpenEndedModule(openendedchild.OpenEndedChild): 'student_info': json.dumps(student_info), } - (error, msg) = qinterface.send_to_queue(header=xheader, - body=json.dumps(contents)) + (error, msg) = qinterface.send_to_queue( + header=xheader, + body=json.dumps(contents) + ) #Convert error to a success value success = True @@ -224,15 +229,18 @@ class OpenEndedModule(openendedchild.OpenEndedChild): anonymous_student_id + str(len(self.child_history))) - xheader = xqueue_interface.make_xheader(lms_callback_url=system.xqueue['construct_callback'](), + xheader = xqueue_interface.make_xheader( + lms_callback_url=system.xqueue['construct_callback'](), lms_key=queuekey, - queue_name=self.queue_name) + queue_name=self.queue_name + ) contents = self.payload.copy() # Metadata related to the student submission revealed to the external grader - student_info = {'anonymous_student_id': anonymous_student_id, - 'submission_time': qtime, + student_info = { + 'anonymous_student_id': anonymous_student_id, + 'submission_time': qtime, } #Update contents with student response and student info @@ -243,12 +251,16 @@ class OpenEndedModule(openendedchild.OpenEndedChild): }) # Submit request. When successful, 'msg' is the prior length of the queue - qinterface.send_to_queue(header=xheader, - body=json.dumps(contents)) + qinterface.send_to_queue( + header=xheader, + body=json.dumps(contents) + ) # State associated with the queueing request - queuestate = {'key': queuekey, - 'time': qtime, } + queuestate = { + 'key': queuekey, + 'time': qtime, + } return True def _update_score(self, score_msg, queuekey, system): @@ -302,11 +314,13 @@ class OpenEndedModule(openendedchild.OpenEndedChild): # We want to display available feedback in a particular order. # This dictionary specifies which goes first--lower first. - priorities = {# These go at the start of the feedback - 'spelling': 0, - 'grammar': 1, - # needs to be after all the other feedback - 'markup_text': 3} + priorities = { + # These go at the start of the feedback + 'spelling': 0, + 'grammar': 1, + # needs to be after all the other feedback + 'markup_text': 3 + } do_not_render = ['topicality', 'prompt-overlap'] default_priority = 2 @@ -393,7 +407,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): rubric_feedback = "" feedback = self._convert_longform_feedback_to_html(response_items) rubric_scores = [] - if response_items['rubric_scores_complete'] == True: + if response_items['rubric_scores_complete'] is True: rubric_renderer = CombinedOpenEndedRubric(system, True) rubric_dict = rubric_renderer.render_rubric(response_items['rubric_xml']) success = rubric_dict['success'] @@ -401,8 +415,10 @@ class OpenEndedModule(openendedchild.OpenEndedChild): rubric_scores = rubric_dict['rubric_scores'] if not response_items['success']: - return system.render_template("{0}/open_ended_error.html".format(self.TEMPLATE_DIR), - {'errors': feedback}) + return system.render_template( + "{0}/open_ended_error.html".format(self.TEMPLATE_DIR), + {'errors': feedback} + ) feedback_template = system.render_template("{0}/open_ended_feedback.html".format(self.TEMPLATE_DIR), { 'grader_type': response_items['grader_type'], @@ -545,8 +561,11 @@ class OpenEndedModule(openendedchild.OpenEndedChild): if not self.child_history: return "" - feedback_dict = self._parse_score_msg(self.child_history[-1].get('post_assessment', ""), system, - join_feedback=join_feedback) + feedback_dict = self._parse_score_msg( + self.child_history[-1].get('post_assessment', ""), + system, + join_feedback=join_feedback + ) if not short_feedback: return feedback_dict['feedback'] if feedback_dict['valid'] else '' if feedback_dict['valid']: @@ -734,8 +753,9 @@ class OpenEndedDescriptor(): """Assumes that xml_object has child k""" return xml_object.xpath(k)[0] - return {'oeparam': parse('openendedparam')} - + return { + 'oeparam': parse('openendedparam') + } def definition_to_xml(self, resource_fs): '''Return an xml element representing this definition.''' diff --git a/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py b/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py index 2d8d3805f1..7dc8d99451 100644 --- a/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py +++ b/common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py @@ -101,8 +101,9 @@ class OpenEndedChild(object): # completion (doesn't matter if you self-assessed correct/incorrect). if system.open_ended_grading_interface: self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system) - self.controller_qs = controller_query_service.ControllerQueryService(system.open_ended_grading_interface, - system) + self.controller_qs = controller_query_service.ControllerQueryService( + system.open_ended_grading_interface,system + ) else: self.peer_gs = MockPeerGradingService() self.controller_qs = None @@ -180,8 +181,8 @@ class OpenEndedChild(object): try: answer = autolink_html(answer) cleaner = Cleaner(style=True, links=True, add_nofollow=False, page_structure=True, safe_attrs_only=True, - host_whitelist=open_ended_image_submission.TRUSTED_IMAGE_DOMAINS, - whitelist_tags=set(['embed', 'iframe', 'a', 'img'])) + host_whitelist=open_ended_image_submission.TRUSTED_IMAGE_DOMAINS, + whitelist_tags=set(['embed', 'iframe', 'a', 'img'])) clean_html = cleaner.clean_html(answer) clean_html = re.sub(r'

$', '', re.sub(r'^

', '', clean_html)) except: @@ -282,7 +283,7 @@ class OpenEndedChild(object): """ #This is a dev_facing_error log.warning("Open ended child state out sync. state: %r, get: %r. %s", - self.child_state, get, msg) + self.child_state, get, msg) #This is a student_facing_error return {'success': False, 'error': 'The problem state got out-of-sync. Please try reloading the page.'} @@ -343,7 +344,7 @@ class OpenEndedChild(object): try: image_data.seek(0) success, s3_public_url = open_ended_image_submission.upload_to_s3(image_data, image_key, - self.s3_interface) + self.s3_interface) except: log.exception("Could not upload image to S3.") @@ -462,7 +463,7 @@ class OpenEndedChild(object): allowed_to_submit = False #This is a student_facing_error error_message = error_string.format(count_required - count_graded, count_graded, count_required, - student_sub_count) + student_sub_count) return success, allowed_to_submit, error_message def get_eta(self): diff --git a/common/lib/xmodule/xmodule/peer_grading_module.py b/common/lib/xmodule/xmodule/peer_grading_module.py index 1ad31922f5..eebfbe22e5 100644 --- a/common/lib/xmodule/xmodule/peer_grading_module.py +++ b/common/lib/xmodule/xmodule/peer_grading_module.py @@ -498,7 +498,6 @@ class PeerGradingModule(PeerGradingFields, XModule): log.error("Problem {0} does not exist in this course".format(location)) raise - for problem in problem_list: problem_location = problem['location'] descriptor = _find_corresponding_module_for_location(problem_location) diff --git a/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py b/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py index 48ea6e7911..d8f4fbbca1 100644 --- a/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py +++ b/common/lib/xmodule/xmodule/tests/test_combined_open_ended.py @@ -18,7 +18,7 @@ import logging log = logging.getLogger(__name__) -from .import test_system +from . import test_system ORG = 'edX' COURSE = 'open_ended' # name of directory with course data @@ -70,8 +70,7 @@ class OpenEndedChildTest(unittest.TestCase): def setUp(self): self.test_system = test_system() self.openendedchild = OpenEndedChild(self.test_system, self.location, - self.definition, self.descriptor, self.static_data, self.metadata) - + self.definition, self.descriptor, self.static_data, self.metadata) def test_latest_answer_empty(self): answer = self.openendedchild.latest_answer() @@ -117,7 +116,7 @@ class OpenEndedChildTest(unittest.TestCase): post_assessment = "Post assessment" self.openendedchild.record_latest_post_assessment(post_assessment) self.assertEqual(post_assessment, - self.openendedchild.latest_post_assessment(self.test_system)) + self.openendedchild.latest_post_assessment(self.test_system)) def test_get_score(self): new_answer = "New Answer" @@ -144,12 +143,12 @@ class OpenEndedChildTest(unittest.TestCase): self.openendedchild.new_history_entry(new_answer) self.openendedchild.record_latest_score(self.static_data['max_score']) self.assertEqual(self.openendedchild.is_last_response_correct(), - 'correct') + 'correct') self.openendedchild.new_history_entry(new_answer) self.openendedchild.record_latest_score(0) self.assertEqual(self.openendedchild.is_last_response_correct(), - 'incorrect') + 'incorrect') class OpenEndedModuleTest(unittest.TestCase): @@ -207,7 +206,7 @@ class OpenEndedModuleTest(unittest.TestCase): 'default_queuename': 'testqueue', 'waittime': 1} self.openendedmodule = OpenEndedModule(self.test_system, self.location, - self.definition, self.descriptor, self.static_data, self.metadata) + self.definition, self.descriptor, self.static_data, self.metadata) def test_message_post(self): get = {'feedback': 'feedback text', @@ -372,21 +371,20 @@ class CombinedOpenEndedModuleTest(unittest.TestCase): descriptor = Mock(data=full_definition) test_system = test_system() combinedoe_container = CombinedOpenEndedModule(test_system, - location, - descriptor, - model_data={'data': full_definition, 'weight': '1'}) - + location, + descriptor, + model_data={'data': full_definition, 'weight': '1'}) def setUp(self): # TODO: this constructor call is definitely wrong, but neither branch # of the merge matches the module constructor. Someone (Vik?) should fix this. self.combinedoe = CombinedOpenEndedV1Module(self.test_system, - self.location, - self.definition, - self.descriptor, - static_data=self.static_data, - metadata=self.metadata, - instance_state=self.static_data) + self.location, + self.definition, + self.descriptor, + static_data=self.static_data, + metadata=self.metadata, + instance_state=self.static_data) def test_get_tag_name(self): name = self.combinedoe.get_tag_name("Tag") @@ -441,12 +439,12 @@ class CombinedOpenEndedModuleTest(unittest.TestCase): definition = {'prompt': etree.XML(self.prompt), 'rubric': etree.XML(self.rubric), 'task_xml': xml} descriptor = Mock(data=definition) combinedoe = CombinedOpenEndedV1Module(self.test_system, - self.location, - definition, - descriptor, - static_data=self.static_data, - metadata=self.metadata, - instance_state=self.static_data) + self.location, + definition, + descriptor, + static_data=self.static_data, + metadata=self.metadata, + instance_state=self.static_data) changed = combinedoe.update_task_states() self.assertFalse(changed) @@ -471,12 +469,12 @@ class CombinedOpenEndedModuleTest(unittest.TestCase): 'task_xml': [self.task_xml1, self.task_xml2]} descriptor = Mock(data=definition) combinedoe = CombinedOpenEndedV1Module(self.test_system, - self.location, - definition, - descriptor, - static_data=self.static_data, - metadata=self.metadata, - instance_state=instance_state) + self.location, + definition, + descriptor, + static_data=self.static_data, + metadata=self.metadata, + instance_state=instance_state) score_dict = combinedoe.get_score() self.assertEqual(score_dict['score'], 15.0) self.assertEqual(score_dict['total'], 15.0) diff --git a/common/lib/xmodule/xmodule/tests/test_util_open_ended.py b/common/lib/xmodule/xmodule/tests/test_util_open_ended.py index 3737586232..42d6410ebd 100644 --- a/common/lib/xmodule/xmodule/tests/test_util_open_ended.py +++ b/common/lib/xmodule/xmodule/tests/test_util_open_ended.py @@ -18,11 +18,11 @@ S3_INTERFACE = { "aws_bucket_name": "", } + class MockQueryDict(dict): """ Mock a query dict so that it can be used in test classes """ - def getlist(self, key, default=None): try: return super(MockQueryDict, self).__getitem__(key) @@ -51,4 +51,4 @@ class DummyModulestore(object): if not isinstance(location, Location): location = Location(location) descriptor = self.modulestore.get_instance(course.id, location, depth=None) - return descriptor.xmodule(self.test_system) \ No newline at end of file + return descriptor.xmodule(self.test_system)