diff --git a/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py b/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py index 0e14accfeb..2683aa40b1 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py @@ -573,9 +573,10 @@ class CombinedOpenEndedV1Module(): Input: AJAX get dictionary Output: Dictionary to be rendered via ajax that contains the result html. """ - task_number = int(get['task_number']) self.update_task_states() - response_dict = self.get_last_response(task_number) + loop_up_to_task = self.current_task_number+1 + for i in xrange(0,loop_up_to_task): + all_responses.append(self.get_last_response(i)) context = { 'results': response_dict['post_assessment'], 'task_number': task_number + 1, diff --git a/common/lib/xmodule/xmodule/open_ended_module.py b/common/lib/xmodule/xmodule/open_ended_module.py index c9d12ef75c..4603fbae1d 100644 --- a/common/lib/xmodule/xmodule/open_ended_module.py +++ b/common/lib/xmodule/xmodule/open_ended_module.py @@ -429,7 +429,17 @@ class OpenEndedModule(openendedchild.OpenEndedChild): correct: Correctness of submission (Boolean) score: Points to be assigned (numeric, can be float) """ - fail = {'valid': False, 'score': 0, 'feedback': '', 'rubric_scores' : [[0]], 'grader_types' : [''], 'feedback_items' : ['']} + fail = { + 'valid': False, + 'score': 0, + 'feedback': '', + 'rubric_scores' : [[0]], + 'grader_types' : [''], + 'feedback_items' : [''], + 'feedback_dicts' : [{}], + 'grader_ids' : [0], + 'submission_ids' : [0], + } try: score_result = json.loads(score_msg) except (TypeError, ValueError): @@ -458,6 +468,9 @@ class OpenEndedModule(openendedchild.OpenEndedChild): feedback_items = [] rubric_scores = [] grader_types = [] + feedback_dicts = [] + grader_ids = [] + submission_ids = [] for i in xrange(0, len(score_result['score'])): new_score_result = { 'score': score_result['score'][i], @@ -473,6 +486,9 @@ class OpenEndedModule(openendedchild.OpenEndedChild): feedback_items.append(feedback_template) rubric_scores.append(rubric_score) grader_types.append(score_result['grader_type']) + feedback_dicts.append(score_result['feedback'][i]) + grader_ids.append(score_result['grader_id'][i]) + submission_ids.append(score_result['submission_id']) if join_feedback: feedback = "".join(feedback_items) else: @@ -485,6 +501,9 @@ class OpenEndedModule(openendedchild.OpenEndedChild): rubric_scores = [rubric_score] grader_types = [score_result['grader_type']] feedback_items = [feedback] + feedback_dicts = [score_result['feedback']] + grader_ids = [score_result['grader_id']] + submission_ids = [score_result['submission_id']] self.submission_id = score_result['submission_id'] self.grader_id = score_result['grader_id'] @@ -495,7 +514,10 @@ class OpenEndedModule(openendedchild.OpenEndedChild): 'feedback': feedback, 'rubric_scores' : rubric_scores, 'grader_types' : grader_types, - 'feedback_items' : feedback_items + 'feedback_items' : feedback_items, + 'feedback_dicts' : feedback_dicts, + 'grader_ids' : grader_ids, + 'submission_ids' : submission_ids, } def latest_post_assessment(self, system, short_feedback=False, join_feedback=True): diff --git a/lms/templates/open_ended_feedback.html b/lms/templates/open_ended_feedback.html index 2cd7faec70..e16aea0b53 100644 --- a/lms/templates/open_ended_feedback.html +++ b/lms/templates/open_ended_feedback.html @@ -1,5 +1,5 @@
-
+
${rubric_feedback | n} % if grader_type=="PE":
@@ -7,9 +7,4 @@
% endif
-
-
- ${ feedback | n} -
-