diff --git a/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py b/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py index 9989fb8614..16df364ff6 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_modulev1.py @@ -191,9 +191,9 @@ class CombinedOpenEndedV1Module(): # completion (doesn't matter if you self-assessed correct/incorrect). self._max_score = int(self.metadata.get('max_score', MAX_SCORE)) - rubric_renderer = CombinedOpenEndedRubric(system, True) + self.rubric_renderer = CombinedOpenEndedRubric(system, True) rubric_string = stringify_children(definition['rubric']) - rubric_renderer.check_if_rubric_is_parseable(rubric_string, location, MAX_SCORE_ALLOWED, self._max_score) + self.rubric_renderer.check_if_rubric_is_parseable(rubric_string, location, MAX_SCORE_ALLOWED, self._max_score) #Static data is passed to the child modules to render self.static_data = { @@ -444,9 +444,11 @@ class CombinedOpenEndedV1Module(): rubric_data = task._parse_score_msg(task.history[-1].get('post_assessment', ""), self.system) rubric_scores = rubric_data['rubric_scores'] grader_types = rubric_data['grader_types'] + feedback_items = rubric_data['feedback_items'] elif task_type== "selfassessment": rubric_scores = last_post_assessment grader_types = ['SA'] + feedback_items = [] last_post_assessment = "" last_correctness = task.is_last_response_correct() max_score = task.max_score() @@ -473,6 +475,8 @@ class CombinedOpenEndedV1Module(): 'min_score_to_attempt': min_score_to_attempt, 'max_score_to_attempt': max_score_to_attempt, 'rubric_scores' : rubric_scores, + 'grader_types' : grader_types, + 'feedback_items' : feedback_items, } return last_response_dict @@ -509,7 +513,7 @@ class CombinedOpenEndedV1Module(): pass return return_html - def get_rubric_scores(self, get): + def get_results(self, get): """ Gets the results of a given grader via ajax. Input: AJAX get dictionary @@ -517,12 +521,21 @@ class CombinedOpenEndedV1Module(): """ task_number = int(get['task_number']) self.update_task_states() - response_dict = self.get_last_response(task_number) - context = {'results': response_dict['post_assessment'], 'task_number': task_number + 1, 'task_name' : response_dict['human_task']} + all_responses = [] + for i in xrange(0,task_number+1): + all_responses.append(self.get_last_response(i)) + rubric_scores = [rd['rubric_scores'] for rd in all_responses] + grader_types = [rd['grader_types'] for rd in all_responses] + feedback_items = [rd['feedback_items'] for rd in all_responses] + + rubric_html = self.rubric_renderer.render_combined_rubric(self.static_data['rubric'], rubric_scores, + grader_types, feedback_items) + + context = {'results': rubric_html, 'task_number': task_number + 1, 'task_name' : response_dict['human_task']} html = self.system.render_template('combined_open_ended_results.html', context) return {'html': html, 'success': True} - def get_results(self, get): + def get_results_old(self, get): """ Gets the results of a given grader via ajax. Input: AJAX get dictionary diff --git a/common/lib/xmodule/xmodule/combined_open_ended_rubric.py b/common/lib/xmodule/xmodule/combined_open_ended_rubric.py index 25bac18679..6b10b66653 100644 --- a/common/lib/xmodule/xmodule/combined_open_ended_rubric.py +++ b/common/lib/xmodule/xmodule/combined_open_ended_rubric.py @@ -36,7 +36,9 @@ class CombinedOpenEndedRubric(object): {'categories': rubric_categories, 'has_score': self.has_score, 'view_only': self.view_only, - 'max_score': max_score}) + 'max_score': max_score, + 'combined_rubric' : False + }) success = True except: error_message = "[render_rubric] Could not parse the rubric with xml: {0}".format(rubric_xml) @@ -154,6 +156,29 @@ class CombinedOpenEndedRubric(object): return {'description': description, 'options': options, 'score' : score} + def render_combined_rubric(self,rubric_xml,scores,score_types,feedback_types): + score_tuples = CombinedOpenEndedRubric.reformat_scores_for_rendering(scores,score_types,feedback_types) + rubric_categories = self.extract_categories(rubric_xml) + max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories) + max_score = max(max_scores) + for i in xrange(0,len(rubric_categories)): + category = rubric_categories[i] + rubric_categories[i]['grader_types'] = [] + for j in xrange(0,len(category['options'])): + for tuple in score_tuples: + if tuple[1] == i and tuple[2] ==j: + for grader_type in tuple[3]: + rubric_categories[i]['grader_types'].append(grader_type) + + html = self.system.render_template('open_ended_rubric.html', + {'categories': rubric_categories, + 'has_score': True, + 'view_only': True, + 'max_score': max_score, + 'combined_rubric' : True + }) + return html + @staticmethod def validate_options(options): @@ -175,10 +200,10 @@ class CombinedOpenEndedRubric(object): def reformat_scores_for_rendering(scores, score_types, feedback_types): success = False if len(scores)==0: - return success + return success, "" if len(scores) != len(score_types) or len(feedback_types) != len(scores): - return success + return success, "" score_lists = [] score_type_list = [] @@ -197,7 +222,7 @@ class CombinedOpenEndedRubric(object): score_type = score_type_list[i] feedback_type = feedback_type_list[i] if len(score_list)!=score_list_len or len(score_type)!=score_list_len or len(feedback_type)!=score_list_len: - return success + return success, "" score_tuples = [] for i in xrange(0,len(score_lists)): @@ -208,6 +233,9 @@ class CombinedOpenEndedRubric(object): score_tuples[tup_ind][3].append(score_type_list[i][j]) score_tuples[tup_ind][4].append(feedback_type_list[i][j]) + success = True + return success, score_tuples + @staticmethod def check_for_tuple_matches(tuples, tuple): category = tuple[1] diff --git a/lms/templates/open_ended_rubric.html b/lms/templates/open_ended_rubric.html index 2436cbb849..0dc4bf29f0 100644 --- a/lms/templates/open_ended_rubric.html +++ b/lms/templates/open_ended_rubric.html @@ -20,11 +20,18 @@
  • % endif % if view_only: - % if option['selected']: + % if option['selected'] and combined_rubric==False: ## if this is the selected rubric block, show it highlighted
    ${option['points']} points : ${option['text']}
    + % elif combined_rubric == True +
    + %for grader_type in option['grader_types'] + ${grader_type} + %endfor + ${option['points']} points : ${option['text']} +
    % endif % else: