feat: add custom task kwargs
Add task kwarg handling for some instructor tasks and and let upload reports to a custom directory
This commit is contained in:
@@ -345,31 +345,31 @@ def submit_calculate_problem_responses_csv(
|
||||
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
|
||||
|
||||
|
||||
def submit_calculate_grades_csv(request, course_key):
|
||||
def submit_calculate_grades_csv(request, course_key, **task_kwargs):
|
||||
"""
|
||||
AlreadyRunningError is raised if the course's grades are already being updated.
|
||||
"""
|
||||
task_type = 'grade_course'
|
||||
task_class = calculate_grades_csv
|
||||
task_input = {}
|
||||
task_input = task_kwargs
|
||||
task_key = ""
|
||||
|
||||
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
|
||||
|
||||
|
||||
def submit_problem_grade_report(request, course_key):
|
||||
def submit_problem_grade_report(request, course_key, **task_kwargs):
|
||||
"""
|
||||
Submits a task to generate a CSV grade report containing problem
|
||||
values.
|
||||
"""
|
||||
task_type = 'grade_problems'
|
||||
task_class = calculate_problem_grade_report
|
||||
task_input = {}
|
||||
task_input = task_kwargs
|
||||
task_key = ""
|
||||
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
|
||||
|
||||
|
||||
def submit_calculate_students_features_csv(request, course_key, features):
|
||||
def submit_calculate_students_features_csv(request, course_key, features, **task_kwargs):
|
||||
"""
|
||||
Submits a task to generate a CSV containing student profile info.
|
||||
|
||||
@@ -377,7 +377,7 @@ def submit_calculate_students_features_csv(request, course_key, features):
|
||||
"""
|
||||
task_type = 'profile_info_csv'
|
||||
task_class = calculate_students_features_csv
|
||||
task_input = features
|
||||
task_input = dict(features=features, **task_kwargs)
|
||||
task_key = ""
|
||||
|
||||
return submit_task(request, task_type, task_class, course_key, task_input, task_key)
|
||||
|
||||
@@ -264,13 +264,13 @@ class DjangoStorageReportStore(ReportStore):
|
||||
getattr(settings, config_name).get('STORAGE_KWARGS'),
|
||||
)
|
||||
|
||||
def store(self, course_id, filename, buff):
|
||||
def store(self, course_id, filename, buff, parent_dir=''):
|
||||
"""
|
||||
Store the contents of `buff` in a directory determined by hashing
|
||||
`course_id`, and name the file `filename`. `buff` can be any file-like
|
||||
object, ready to be read from the beginning.
|
||||
"""
|
||||
path = self.path_to(course_id, filename)
|
||||
path = self.path_to(course_id, filename, parent_dir)
|
||||
# See https://github.com/boto/boto/issues/2868
|
||||
# Boto doesn't play nice with unicode in python3
|
||||
buff_contents = buff.read()
|
||||
@@ -282,7 +282,7 @@ class DjangoStorageReportStore(ReportStore):
|
||||
|
||||
self.storage.save(path, buff)
|
||||
|
||||
def store_rows(self, course_id, filename, rows):
|
||||
def store_rows(self, course_id, filename, rows, parent_dir=''):
|
||||
"""
|
||||
Given a course_id, filename, and rows (each row is an iterable of
|
||||
strings), write the rows to the storage backend in csv format.
|
||||
@@ -291,7 +291,7 @@ class DjangoStorageReportStore(ReportStore):
|
||||
csvwriter = csv.writer(output_buffer)
|
||||
csvwriter.writerows(self._get_utf8_encoded_rows(rows))
|
||||
output_buffer.seek(0)
|
||||
self.store(course_id, filename, output_buffer)
|
||||
self.store(course_id, filename, output_buffer, parent_dir)
|
||||
|
||||
def links_for(self, course_id):
|
||||
"""
|
||||
@@ -321,9 +321,10 @@ class DjangoStorageReportStore(ReportStore):
|
||||
for filename, full_path in files
|
||||
]
|
||||
|
||||
def path_to(self, course_id, filename=''):
|
||||
def path_to(self, course_id, filename='', parent_dir=''):
|
||||
"""
|
||||
Return the full path to a given file for a given course.
|
||||
"""
|
||||
hashed_course_id = hashlib.sha1(str(course_id).encode('utf-8')).hexdigest()
|
||||
return os.path.join(hashed_course_id, filename)
|
||||
directory = parent_dir if bool(parent_dir) else hashed_course_id
|
||||
return os.path.join(directory, filename)
|
||||
|
||||
@@ -65,7 +65,7 @@ def upload_students_csv(_xmodule_instance_args, _entry_id, course_id, task_input
|
||||
task_progress.update_task_state(extra_meta=current_step)
|
||||
|
||||
# compute the student features table and format it
|
||||
query_features = task_input
|
||||
query_features = task_input.get('features')
|
||||
student_data = enrolled_students_features(course_id, query_features)
|
||||
header, rows = format_dictlist(student_data, query_features)
|
||||
|
||||
@@ -78,6 +78,8 @@ def upload_students_csv(_xmodule_instance_args, _entry_id, course_id, task_input
|
||||
task_progress.update_task_state(extra_meta=current_step)
|
||||
|
||||
# Perform the upload
|
||||
upload_csv_to_report_store(rows, 'student_profile_info', course_id, start_date)
|
||||
upload_parent_dir = task_input.get('upload_parent_dir', '')
|
||||
upload_filename = task_input.get('filename', 'student_profile_info')
|
||||
upload_csv_to_report_store(rows, upload_filename, course_id, start_date, parent_dir=upload_parent_dir)
|
||||
|
||||
return task_progress.update_task_state(extra_meta=current_step)
|
||||
|
||||
@@ -184,9 +184,9 @@ class GradeReportBase:
|
||||
Creates and uploads a CSV for the given headers and rows.
|
||||
"""
|
||||
date = datetime.now(UTC)
|
||||
upload_csv_to_report_store(success_rows, context.file_name, context.course_id, date)
|
||||
upload_csv_to_report_store(success_rows, context.upload_filename, context.course_id, date)
|
||||
if len(error_rows) > 1:
|
||||
upload_csv_to_report_store(error_rows, context.file_name + '_err', context.course_id, date)
|
||||
upload_csv_to_report_store(error_rows, context.upload_filename + '_err', context.course_id, date)
|
||||
|
||||
def log_additional_info_for_testing(self, context, message):
|
||||
"""
|
||||
@@ -221,6 +221,8 @@ class _CourseGradeReportContext:
|
||||
self.course_id = course_id
|
||||
self.task_progress = TaskProgress(self.action_name, total=None, start_time=time())
|
||||
self.report_for_verified_only = course_grade_report_verified_only(self.course_id)
|
||||
self.upload_parent_dir = _task_input.get('upload_parent_dir', '')
|
||||
self.upload_filename = _task_input.get('filename', 'grade_report')
|
||||
|
||||
@lazy
|
||||
def course(self):
|
||||
@@ -314,7 +316,8 @@ class _ProblemGradeReportContext:
|
||||
self.course_id = course_id
|
||||
self.report_for_verified_only = problem_grade_report_verified_only(self.course_id)
|
||||
self.task_progress = TaskProgress(self.action_name, total=None, start_time=time())
|
||||
self.file_name = 'problem_grade_report'
|
||||
self.upload_filename = _task_input.get('filename', 'problem_grade_report')
|
||||
self.upload_dir = _task_input.get('upload_parent_dir', '')
|
||||
|
||||
@lazy
|
||||
def course(self):
|
||||
@@ -482,10 +485,21 @@ class CourseGradeReport:
|
||||
Creates and uploads a CSV for the given headers and rows.
|
||||
"""
|
||||
date = datetime.now(UTC)
|
||||
upload_csv_to_report_store([success_headers] + success_rows, 'grade_report', context.course_id, date)
|
||||
upload_csv_to_report_store(
|
||||
[success_headers] + success_rows,
|
||||
context.upload_filename,
|
||||
context.course_id,
|
||||
date,
|
||||
parent_dir=context.upload_parent_dir
|
||||
)
|
||||
if len(error_rows) > 0:
|
||||
error_rows = [error_headers] + error_rows
|
||||
upload_csv_to_report_store(error_rows, 'grade_report_err', context.course_id, date)
|
||||
upload_csv_to_report_store(
|
||||
[error_headers] + error_rows,
|
||||
'{}_err'.format(context.upload_filename),
|
||||
context.course_id,
|
||||
date,
|
||||
parent_dir=context.upload_parent_dir
|
||||
)
|
||||
|
||||
def _grades_header(self, context):
|
||||
"""
|
||||
|
||||
@@ -19,7 +19,7 @@ UPDATE_STATUS_FAILED = 'failed'
|
||||
UPDATE_STATUS_SKIPPED = 'skipped'
|
||||
|
||||
|
||||
def upload_csv_to_report_store(rows, csv_name, course_id, timestamp, config_name='GRADES_DOWNLOAD'):
|
||||
def upload_csv_to_report_store(rows, csv_name, course_id, timestamp, config_name='GRADES_DOWNLOAD', parent_dir=''):
|
||||
"""
|
||||
Upload data as a CSV using ReportStore.
|
||||
|
||||
@@ -32,6 +32,7 @@ def upload_csv_to_report_store(rows, csv_name, course_id, timestamp, config_name
|
||||
]
|
||||
csv_name: Name of the resulting CSV
|
||||
course_id: ID of the course
|
||||
parent_dor: Name of the directory where the CSV file will be stored
|
||||
|
||||
Returns:
|
||||
report_name: string - Name of the generated report
|
||||
@@ -43,7 +44,7 @@ def upload_csv_to_report_store(rows, csv_name, course_id, timestamp, config_name
|
||||
timestamp_str=timestamp.strftime("%Y-%m-%d-%H%M")
|
||||
)
|
||||
|
||||
report_store.store_rows(course_id, report_name, rows)
|
||||
report_store.store_rows(course_id, report_name, rows, parent_dir)
|
||||
tracker_emit(csv_name)
|
||||
return report_name
|
||||
|
||||
|
||||
@@ -634,7 +634,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
|
||||
self.submit_student_answer(self.student_b.username, problem_b_url, [OPTION_1, OPTION_2])
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.verify_csv_task_success(result)
|
||||
self.verify_grades_in_csv(
|
||||
[
|
||||
@@ -667,7 +667,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
|
||||
self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1])
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.verify_csv_task_success(result)
|
||||
self.verify_grades_in_csv(
|
||||
[
|
||||
|
||||
@@ -95,7 +95,7 @@ class InstructorGradeReportTestCase(TestReportMixin, InstructorTaskCourseTestCas
|
||||
Verify cell data in the grades CSV for a particular user.
|
||||
"""
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
result = CourseGradeReport.generate(None, None, course_id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, course_id, {}, 'graded')
|
||||
self.assertDictContainsSubset({'attempted': num_rows, 'succeeded': num_rows, 'failed': 0}, result)
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
report_csv_filename = report_store.links_for(course_id)[0][0]
|
||||
@@ -130,7 +130,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
|
||||
self.current_task.update_state = Mock()
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_current_task:
|
||||
mock_current_task.return_value = self.current_task
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
num_students = len(emails)
|
||||
self.assertDictContainsSubset({'attempted': num_students, 'succeeded': num_students, 'failed': 0}, result)
|
||||
|
||||
@@ -144,7 +144,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
|
||||
mock_grades_iter.return_value = [
|
||||
(self.create_student('username', 'student@example.com'), None, TypeError('Cannot grade student'))
|
||||
]
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result)
|
||||
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
@@ -328,7 +328,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
|
||||
None,
|
||||
)
|
||||
]
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 1, 'failed': 0}, result)
|
||||
|
||||
def test_certificate_eligibility(self):
|
||||
@@ -410,7 +410,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
with check_mongo_calls(mongo_count):
|
||||
with self.assertNumQueries(expected_query_count):
|
||||
CourseGradeReport.generate(None, None, course.id, None, 'graded')
|
||||
CourseGradeReport.generate(None, None, course.id, {}, 'graded')
|
||||
|
||||
def test_inactive_enrollments(self):
|
||||
"""
|
||||
@@ -424,7 +424,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_current_task:
|
||||
mock_current_task.return_value = self.current_task
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
|
||||
self._verify_cell_data_for_user('active-student', self.course.id, 'Enrollment Status', ENROLLED_IN_COURSE)
|
||||
self._verify_cell_data_for_user('inactive-student', self.course.id, 'Enrollment Status', NOT_ENROLLED_IN_COURSE)
|
||||
@@ -814,7 +814,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
Verify that we see no grade information for a course with no graded
|
||||
problems.
|
||||
"""
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
|
||||
self.verify_rows_in_csv([
|
||||
dict(list(zip(
|
||||
@@ -838,7 +838,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
self.define_option_problem('Problem1', parent=vertical)
|
||||
|
||||
self.submit_student_answer(self.student_1.username, 'Problem1', ['Option 1'])
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
|
||||
problem_name = 'Homework 1: Subsection - Problem1'
|
||||
header_row = self.csv_header_row + [problem_name + ' (Earned)', problem_name + ' (Possible)']
|
||||
@@ -882,7 +882,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
|
||||
self.submit_student_answer(self.student_1.username, 'Problem1', ['Option 1'])
|
||||
self.submit_student_answer(student_verified.username, 'Problem1', ['Option 1'])
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset(
|
||||
{'action_name': 'graded', 'attempted': 1, 'succeeded': 1, 'failed': 0}, result
|
||||
)
|
||||
@@ -902,7 +902,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
self.define_option_problem('Problem1', parent=vertical)
|
||||
|
||||
self.submit_student_answer(self.student_1.username, 'Problem1', ['Option 1'])
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 3, 'succeeded': 3, 'failed': 0}, result)
|
||||
problem_name = 'Homework 1: Subsection - Problem1'
|
||||
header_row = self.csv_header_row + [problem_name + ' (Earned)', problem_name + ' (Possible)']
|
||||
@@ -973,7 +973,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
|
||||
self.submit_student_answer(self.student_b.username, self.problem_b_url, [self.OPTION_1, self.OPTION_2])
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset(
|
||||
{'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result
|
||||
)
|
||||
@@ -1068,7 +1068,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
|
||||
header_row += [problem + ' (Earned)', problem + ' (Possible)']
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
ProblemGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
assert self.get_csv_row_with_headers() == header_row
|
||||
|
||||
|
||||
@@ -1125,7 +1125,7 @@ class TestProblemReportCohortedContent(TestReportMixin, ContentGroupTestCase, In
|
||||
self.submit_student_answer(self.beta_user.username, 'Problem1', ['Option 1', 'Option 2'])
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = ProblemGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset(
|
||||
{'action_name': 'graded', 'attempted': 5, 'succeeded': 5, 'failed': 0}, result
|
||||
)
|
||||
@@ -1282,6 +1282,36 @@ class TestStudentReport(TestReportMixin, InstructorTaskCourseTestCase):
|
||||
assert len(links) == 1
|
||||
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 1, 'failed': 0}, result)
|
||||
|
||||
def test_custom_directory(self):
|
||||
self.create_student('student', 'student@example.com')
|
||||
directory_name = 'test_dir'
|
||||
task_input = {'features': [], 'upload_parent_dir': directory_name}
|
||||
patched_upload = patch('lms.djangoapps.instructor_task.tasks_helper.enrollments.upload_csv_to_report_store')
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
with patched_upload as mock_upload_report:
|
||||
upload_students_csv(None, None, self.course.id, task_input, 'calculated')
|
||||
|
||||
mock_upload_report.assert_called_once_with(
|
||||
[[], []],
|
||||
'student_profile_info',
|
||||
self.course.id,
|
||||
ANY,
|
||||
parent_dir=directory_name
|
||||
)
|
||||
|
||||
def test_custom_filename(self):
|
||||
self.create_student('student', 'student@example.com')
|
||||
filename = "test_filename"
|
||||
task_input = {'features': [], 'filename': filename}
|
||||
patched_upload = patch('lms.djangoapps.instructor_task.tasks_helper.enrollments.upload_csv_to_report_store')
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
with patched_upload as mock_upload_report:
|
||||
upload_students_csv(None, None, self.course.id, task_input, 'calculated')
|
||||
|
||||
mock_upload_report.assert_called_once_with([[], []], filename, self.course.id, ANY, parent_dir='')
|
||||
|
||||
@ddt.data(['student', 'student\xec'])
|
||||
def test_unicode_usernames(self, students):
|
||||
"""
|
||||
@@ -1325,11 +1355,13 @@ class TestTeamStudentReport(TestReportMixin, InstructorTaskCourseTestCase):
|
||||
""" Run the upload_students_csv task and verify that the correct team was added to the CSV. """
|
||||
current_task = Mock()
|
||||
current_task.update_state = Mock()
|
||||
task_input = [
|
||||
'id', 'username', 'name', 'email', 'language', 'location',
|
||||
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
|
||||
'goals', 'team'
|
||||
]
|
||||
task_input = {
|
||||
'features': [
|
||||
'id', 'username', 'name', 'email', 'language', 'location',
|
||||
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
|
||||
'goals', 'team'
|
||||
]
|
||||
}
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_current_task:
|
||||
mock_current_task.return_value = current_task
|
||||
result = upload_students_csv(None, None, self.course.id, task_input, 'calculated')
|
||||
@@ -1760,7 +1792,7 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'), \
|
||||
patch.dict(settings.FEATURES, {'PERSISTENT_GRADES_ENABLED_FOR_ALL_TESTS': persistent_grades_enabled}):
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset(
|
||||
{'action_name': 'graded', 'attempted': 1, 'succeeded': 1, 'failed': 0},
|
||||
result,
|
||||
@@ -1782,6 +1814,27 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
ignore_other_columns=True,
|
||||
)
|
||||
|
||||
def test_grade_report_custom_directory(self):
|
||||
self.submit_student_answer(self.student.username, 'Problem1', ['Option 1'])
|
||||
|
||||
directory_name = "test_dir"
|
||||
task_input = {
|
||||
"upload_parent_dir": directory_name
|
||||
}
|
||||
|
||||
patched_upload = patch('lms.djangoapps.instructor_task.tasks_helper.grades.upload_csv_to_report_store')
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
with patched_upload as mock_upload_report:
|
||||
CourseGradeReport.generate(None, None, self.course.id, task_input, 'graded')
|
||||
|
||||
mock_upload_report.assert_called_once_with(
|
||||
[ANY, ANY],
|
||||
'grade_report',
|
||||
self.course.id,
|
||||
ANY,
|
||||
parent_dir=directory_name
|
||||
)
|
||||
|
||||
def test_grade_report_with_overrides(self):
|
||||
course_data = CourseData(self.student, course=self.course)
|
||||
subsection_grade = CreateSubsectionGrade(self.unattempted_section, course_data.structure, {}, {})
|
||||
@@ -1798,7 +1851,7 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
self.submit_student_answer(self.student.username, 'Problem1', ['Option 1'])
|
||||
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset(
|
||||
{'action_name': 'graded', 'attempted': 1, 'succeeded': 1, 'failed': 0},
|
||||
result,
|
||||
@@ -1842,7 +1895,7 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
|
||||
self.submit_student_answer(student_1.username, 'Problem1', ['Option 1'])
|
||||
self.submit_student_answer(student_verified.username, 'Problem1', ['Option 1'])
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
result = CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
self.assertDictContainsSubset(
|
||||
{'action_name': 'graded', 'attempted': 1, 'succeeded': 1, 'failed': 0}, result
|
||||
)
|
||||
@@ -1854,7 +1907,7 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'), \
|
||||
patch('lms.djangoapps.grades.course_data.get_course_blocks') as mock_course_blocks, \
|
||||
patch('lms.djangoapps.grades.subsection_grade.get_score') as mock_get_score:
|
||||
CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
assert not mock_course_blocks.called
|
||||
assert not mock_get_score.called
|
||||
|
||||
@@ -1910,7 +1963,7 @@ class TestGradeReportEnrollmentAndCertificateInfo(TestReportMixin, InstructorTas
|
||||
Verify grade report data.
|
||||
"""
|
||||
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
|
||||
CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
|
||||
CourseGradeReport.generate(None, None, self.course.id, {}, 'graded')
|
||||
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
|
||||
report_csv_filename = report_store.links_for(self.course.id)[0][0]
|
||||
report_path = report_store.path_to(self.course.id, report_csv_filename)
|
||||
@@ -2605,7 +2658,7 @@ class TestInstructorOra2Report(SharedModuleStoreTestCase):
|
||||
filename = f'{course_id_string}_ORA_data_{timestamp_str}.csv'
|
||||
|
||||
assert return_val == UPDATE_STATUS_SUCCEEDED
|
||||
mock_store_rows.assert_called_once_with(self.course.id, filename, [test_header] + test_rows)
|
||||
mock_store_rows.assert_called_once_with(self.course.id, filename, [test_header] + test_rows, '')
|
||||
|
||||
|
||||
class TestInstructorOra2AttachmentsExport(SharedModuleStoreTestCase):
|
||||
@@ -2658,7 +2711,7 @@ class TestInstructorOra2AttachmentsExport(SharedModuleStoreTestCase):
|
||||
filename = f'{course_id_string}_ORA_summary_{timestamp_str}.csv'
|
||||
|
||||
self.assertEqual(return_val, UPDATE_STATUS_SUCCEEDED)
|
||||
mock_store_rows.assert_called_once_with(self.course.id, filename, [test_header] + test_rows)
|
||||
mock_store_rows.assert_called_once_with(self.course.id, filename, [test_header] + test_rows, '')
|
||||
|
||||
def test_export_fails_if_error_on_create_zip_step(self):
|
||||
with ExitStack() as stack:
|
||||
|
||||
Reference in New Issue
Block a user