feat: optionally emit course completion analytics when a learner enters the courseware (#36507)
This PR attempts to improve the ability to collect analytics about learner's progress in their courses. Currently, the only place we regularly calculate course progress is when a learner visits the "Progress" tab in the courseware. Now, _optionally_, when a learner visits the home page of their course, we will enqueue a Celery task that will calculate their progress and emit a tracking event. This event is gated by use of the COURSE_HOME_SEND_COURSE_PROGRESS_ANALYTICS_FOR_STUDENT waffle flag.
This commit is contained in:
@@ -3,17 +3,16 @@ Tests for Outline Tab API in the Course Home API
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from unittest.mock import Mock, patch # lint-amnesty, pylint: disable=wrong-import-order
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import ddt # lint-amnesty, pylint: disable=wrong-import-order
|
||||
import json # lint-amnesty, pylint: disable=wrong-import-order
|
||||
import ddt
|
||||
from completion.models import BlockCompletion
|
||||
from django.conf import settings # lint-amnesty, pylint: disable=wrong-import-order
|
||||
from django.conf import settings
|
||||
from django.test import override_settings
|
||||
from django.urls import reverse # lint-amnesty, pylint: disable=wrong-import-order
|
||||
from edx_toggles.toggles.testutils import override_waffle_flag # lint-amnesty, pylint: disable=wrong-import-order
|
||||
from django.urls import reverse
|
||||
from edx_toggles.toggles.testutils import override_waffle_flag
|
||||
|
||||
from cms.djangoapps.contentstore.outlines import update_outline_from_modulestore
|
||||
from common.djangoapps.course_modes.models import CourseMode
|
||||
@@ -21,7 +20,9 @@ from common.djangoapps.course_modes.tests.factories import CourseModeFactory
|
||||
from common.djangoapps.student.models import CourseEnrollment
|
||||
from common.djangoapps.student.roles import CourseInstructorRole
|
||||
from common.djangoapps.student.tests.factories import UserFactory
|
||||
from lms.djangoapps.course_home_api.toggles import COURSE_HOME_SEND_COURSE_PROGRESS_ANALYTICS_FOR_STUDENT
|
||||
from lms.djangoapps.course_home_api.tests.utils import BaseCourseHomeTests
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
|
||||
from openedx.core.djangoapps.content.learning_sequences.api import replace_course_outline
|
||||
from openedx.core.djangoapps.content.learning_sequences.data import CourseOutlineData, CourseVisibility
|
||||
@@ -33,12 +34,15 @@ from openedx.features.course_experience import (
|
||||
COURSE_ENABLE_UNENROLLED_ACCESS_FLAG,
|
||||
ENABLE_COURSE_GOALS
|
||||
)
|
||||
from openedx.features.discounts.applicability import (
|
||||
DISCOUNT_APPLICABILITY_FLAG,
|
||||
FIRST_PURCHASE_DISCOUNT_OVERRIDE_FLAG
|
||||
from openedx.features.discounts.applicability import DISCOUNT_APPLICABILITY_FLAG, FIRST_PURCHASE_DISCOUNT_OVERRIDE_FLAG
|
||||
from xmodule.course_block import (
|
||||
COURSE_VISIBILITY_PUBLIC,
|
||||
COURSE_VISIBILITY_PUBLIC_OUTLINE
|
||||
)
|
||||
from xmodule.modulestore.tests.factories import (
|
||||
BlockFactory,
|
||||
CourseFactory
|
||||
)
|
||||
from xmodule.course_block import COURSE_VISIBILITY_PUBLIC, COURSE_VISIBILITY_PUBLIC_OUTLINE # lint-amnesty, pylint: disable=wrong-import-order
|
||||
from xmodule.modulestore.tests.factories import CourseFactory, BlockFactory # lint-amnesty, pylint: disable=wrong-import-order
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
@@ -461,6 +465,25 @@ class OutlineTabTestViews(BaseCourseHomeTests):
|
||||
CourseEnrollment.enroll(UserFactory(), self.course.id) # grr, some rando took our spot!
|
||||
self.assert_can_enroll(False)
|
||||
|
||||
@override_waffle_flag(COURSE_HOME_SEND_COURSE_PROGRESS_ANALYTICS_FOR_STUDENT, active=True)
|
||||
@patch("lms.djangoapps.course_home_api.outline.views.collect_progress_for_user_in_course.delay")
|
||||
def test_course_progress_analytics_enabled(self, mock_task):
|
||||
"""
|
||||
Ensures that the `calculate_course_progress_for_user_in_course` task is enqueued, with the correct args, only
|
||||
if the feature is enabled.
|
||||
"""
|
||||
self.client.get(self.url)
|
||||
mock_task.assert_called_once_with(str(self.course.id), self.user.id)
|
||||
|
||||
@override_waffle_flag(COURSE_HOME_SEND_COURSE_PROGRESS_ANALYTICS_FOR_STUDENT, active=False)
|
||||
@patch("lms.djangoapps.course_home_api.outline.views.collect_progress_for_user_in_course.delay")
|
||||
def test_course_progress_analytics_disabled(self, mock_task):
|
||||
"""
|
||||
Ensures that the `calculate_course_progress_for_user_in_course` task is not run if the feature is disabled.
|
||||
"""
|
||||
self.client.get(self.url)
|
||||
mock_task.assert_not_called()
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class SidebarBlocksTestViews(BaseCourseHomeTests):
|
||||
|
||||
@@ -35,6 +35,8 @@ from lms.djangoapps.course_home_api.outline.serializers import (
|
||||
OutlineTabSerializer,
|
||||
)
|
||||
from lms.djangoapps.course_home_api.utils import get_course_or_403
|
||||
from lms.djangoapps.course_home_api.tasks import collect_progress_for_user_in_course
|
||||
from lms.djangoapps.course_home_api.toggles import send_course_progress_analytics_for_student_is_enabled
|
||||
from lms.djangoapps.courseware.access import has_access
|
||||
from lms.djangoapps.courseware.context_processor import user_timezone_locale_prefs
|
||||
from lms.djangoapps.courseware.courses import get_course_date_blocks, get_course_info_section
|
||||
@@ -366,6 +368,9 @@ class OutlineTabView(RetrieveAPIView):
|
||||
context['enrollment'] = enrollment
|
||||
serializer = self.get_serializer_class()(data, context=context)
|
||||
|
||||
if send_course_progress_analytics_for_student_is_enabled(course_key) and not user_is_masquerading:
|
||||
collect_progress_for_user_in_course.delay(course_key_string, request.user.id)
|
||||
|
||||
return Response(serializer.data)
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
|
||||
43
lms/djangoapps/course_home_api/progress/api.py
Normal file
43
lms/djangoapps/course_home_api/progress/api.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""
|
||||
Python APIs exposed for the progress tracking functionality of the course home API.
|
||||
"""
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from opaque_keys.edx.keys import CourseKey
|
||||
|
||||
from lms.djangoapps.courseware.courses import get_course_blocks_completion_summary
|
||||
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
def calculate_progress_for_learner_in_course(course_key: CourseKey, user: User) -> dict:
|
||||
"""
|
||||
Calculate a given learner's progress in the specified course run.
|
||||
"""
|
||||
summary = get_course_blocks_completion_summary(course_key, user)
|
||||
if not summary:
|
||||
return {}
|
||||
|
||||
complete_count = summary.get("complete_count", 0)
|
||||
locked_count = summary.get("locked_count", 0)
|
||||
incomplete_count = summary.get("incomplete_count", 0)
|
||||
|
||||
# This completion calculation mirrors the logic used in the CompletionDonutChart component on the Learning MFE's
|
||||
# Progress tab. It's duplicated here to enable backend reporting on learner progress. Ideally, this logic should be
|
||||
# refactored in the future so that the calculation is handled solely on the backend, eliminating the need for it to
|
||||
# be done in the frontend.
|
||||
num_total_units = complete_count + incomplete_count + locked_count
|
||||
complete_percentage = round(complete_count / num_total_units, 2)
|
||||
locked_percentage = round(locked_count / num_total_units, 2)
|
||||
incomplete_percentage = 1.00 - complete_percentage - locked_percentage
|
||||
|
||||
return {
|
||||
"complete_count": complete_count,
|
||||
"locked_count": locked_count,
|
||||
"incomplete_count": incomplete_count,
|
||||
"total_count": num_total_units,
|
||||
"complete_percentage": complete_percentage,
|
||||
"locked_percentage": locked_percentage,
|
||||
"incomplete_percentage": incomplete_percentage
|
||||
}
|
||||
50
lms/djangoapps/course_home_api/progress/tests/test_api.py
Normal file
50
lms/djangoapps/course_home_api/progress/tests/test_api.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Tests for the Python APIs exposed by the Progress API of the Course Home API app.
|
||||
"""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from lms.djangoapps.course_home_api.progress.api import calculate_progress_for_learner_in_course
|
||||
|
||||
|
||||
class ProgressApiTests(TestCase):
|
||||
"""
|
||||
Tests for the progress calculation functions.
|
||||
"""
|
||||
@patch("lms.djangoapps.course_home_api.progress.api.get_course_blocks_completion_summary")
|
||||
def test_calculate_progress_for_learner_in_course(self, mock_get_summary):
|
||||
"""
|
||||
A test to verify functionality of the function under test.
|
||||
"""
|
||||
get_summary_return_val = {
|
||||
"complete_count": 5,
|
||||
"incomplete_count": 2,
|
||||
"locked_count": 1,
|
||||
}
|
||||
mock_get_summary.return_value = get_summary_return_val
|
||||
|
||||
expected_data = {
|
||||
"complete_count": 5,
|
||||
"incomplete_count": 2,
|
||||
"locked_count": 1,
|
||||
"total_count": 8,
|
||||
"complete_percentage": 0.62,
|
||||
"locked_percentage": 0.12,
|
||||
"incomplete_percentage": 0.26,
|
||||
}
|
||||
|
||||
results = calculate_progress_for_learner_in_course("some_course", "some_user")
|
||||
assert mock_get_summary.called_once_with("some_course", "some_user")
|
||||
assert results == expected_data
|
||||
|
||||
@patch("lms.djangoapps.course_home_api.progress.api.get_course_blocks_completion_summary")
|
||||
def test_calculate_progress_for_learner_in_course_summary_empty(self, mock_get_summary):
|
||||
"""
|
||||
A test to verify functionality of the function under test if a block summary is not received.
|
||||
"""
|
||||
mock_get_summary.return_value = {}
|
||||
|
||||
results = calculate_progress_for_learner_in_course("some_course", "some_user")
|
||||
assert not results
|
||||
50
lms/djangoapps/course_home_api/tasks.py
Normal file
50
lms/djangoapps/course_home_api/tasks.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Celery tasks used by the `course_home_api` app.
|
||||
"""
|
||||
import logging
|
||||
|
||||
from celery import shared_task
|
||||
from django.contrib.auth import get_user_model
|
||||
from edx_django_utils.monitoring import set_code_owner_attribute
|
||||
from eventtracking import tracker
|
||||
from opaque_keys import InvalidKeyError
|
||||
from opaque_keys.edx.keys import CourseKey
|
||||
|
||||
from common.djangoapps.student.models_api import get_course_enrollment
|
||||
from lms.djangoapps.course_home_api.progress.api import calculate_progress_for_learner_in_course
|
||||
|
||||
User = get_user_model()
|
||||
COURSE_COMPLETION_FOR_USER_EVENT_NAME = "edx.bi.user.course-progress"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task
|
||||
@set_code_owner_attribute
|
||||
def collect_progress_for_user_in_course(course_id: str, user_id: str) -> None:
|
||||
"""
|
||||
Celery task that retrieves a learner's progress in a given course.
|
||||
"""
|
||||
try:
|
||||
course_key = CourseKey.from_string(course_id)
|
||||
except InvalidKeyError:
|
||||
log.warning(f"Invalid course id {course_id}, aborting task.")
|
||||
return
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
except User.DoesNotExist:
|
||||
log.warning(f"Could not retrieve a user with id {user_id}, aborting task.")
|
||||
return
|
||||
|
||||
progress = calculate_progress_for_learner_in_course(course_key, user)
|
||||
enrollment = get_course_enrollment(user, course_key)
|
||||
# add a few extra fields to the returned data to make the event payload a bit more usable
|
||||
progress["user_id"] = user.id
|
||||
progress["course_id"] = course_id
|
||||
progress["enrollment_mode"] = enrollment.mode
|
||||
|
||||
tracker.emit(
|
||||
COURSE_COMPLETION_FOR_USER_EVENT_NAME,
|
||||
progress
|
||||
)
|
||||
88
lms/djangoapps/course_home_api/tests/test_tasks.py
Normal file
88
lms/djangoapps/course_home_api/tests/test_tasks.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Tests for Celery tasks used by the `course_home_api` app.
|
||||
"""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from opaque_keys.edx.keys import CourseKey
|
||||
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
|
||||
|
||||
from common.djangoapps.student.tests.factories import CourseEnrollmentFactory, UserFactory
|
||||
from lms.djangoapps.course_home_api.tasks import (
|
||||
COURSE_COMPLETION_FOR_USER_EVENT_NAME,
|
||||
collect_progress_for_user_in_course
|
||||
)
|
||||
from openedx.core.djangoapps.catalog.tests.factories import CourseFactory, CourseRunFactory
|
||||
|
||||
|
||||
class CalculateCompletionTaskTests(ModuleStoreTestCase):
|
||||
"""
|
||||
Tests for the `emit_course_completion_analytics_for_user` Celery task.
|
||||
"""
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.user = UserFactory()
|
||||
self.course_run = CourseRunFactory()
|
||||
self.course_run_key_string = self.course_run['key']
|
||||
self.course = CourseFactory(key=self.course_run_key_string, course_runs=[self.course_run])
|
||||
self.enrollment = CourseEnrollmentFactory(
|
||||
user=self.user,
|
||||
course_id=self.course_run_key_string,
|
||||
mode="verified"
|
||||
)
|
||||
|
||||
@patch("lms.djangoapps.course_home_api.tasks.calculate_progress_for_learner_in_course")
|
||||
@patch("lms.djangoapps.course_home_api.tasks.tracker.emit")
|
||||
def test_successful_event_emission(self, mock_tracker, mock_progress):
|
||||
"""
|
||||
Test to ensure a tracker event is emit by the task with the expected completion information.
|
||||
"""
|
||||
mock_progress.return_value = {
|
||||
"complete_count": 5,
|
||||
"incomplete_count": 2,
|
||||
"locked_count": 1,
|
||||
"total_count": 8,
|
||||
"complete_percentage": 0.62,
|
||||
"locked_percentage": 0.12,
|
||||
"incomplete_percentage": 0.26,
|
||||
}
|
||||
|
||||
expected_data = {
|
||||
"user_id": self.user.id,
|
||||
"course_id": self.course_run_key_string,
|
||||
"enrollment_mode": self.enrollment.mode,
|
||||
"complete_count": 5,
|
||||
"incomplete_count": 2,
|
||||
"locked_count": 1,
|
||||
"total_count": 8,
|
||||
"complete_percentage": 0.62,
|
||||
"locked_percentage": 0.12,
|
||||
"incomplete_percentage": 0.26,
|
||||
}
|
||||
|
||||
collect_progress_for_user_in_course(self.course_run_key_string, self.user.id)
|
||||
mock_progress.assert_called_once_with(CourseKey.from_string(self.course_run_key_string), self.user)
|
||||
mock_tracker.assert_called_once_with(
|
||||
COURSE_COMPLETION_FOR_USER_EVENT_NAME,
|
||||
expected_data,
|
||||
)
|
||||
|
||||
@patch("lms.djangoapps.course_home_api.tasks.calculate_progress_for_learner_in_course")
|
||||
@patch("lms.djangoapps.course_home_api.tasks.tracker.emit")
|
||||
def test_aborted_task_user_dne(self, mock_tracker, mock_progress):
|
||||
"""
|
||||
Test to ensure the task is aborted if we cannot find the user for some reason.
|
||||
"""
|
||||
collect_progress_for_user_in_course(self.course_run_key_string, 8675309)
|
||||
mock_progress.assert_not_called()
|
||||
mock_tracker.assert_not_called()
|
||||
|
||||
@patch("lms.djangoapps.course_home_api.tasks.calculate_progress_for_learner_in_course")
|
||||
@patch("lms.djangoapps.course_home_api.tasks.tracker.emit")
|
||||
def test_aborted_task_bad_course_id(self, mock_tracker, mock_progress):
|
||||
"""
|
||||
Test to ensure the task is aborted if the course key provided is no good.
|
||||
"""
|
||||
collect_progress_for_user_in_course("nonsense", self.user.id)
|
||||
mock_progress.assert_not_called()
|
||||
mock_tracker.assert_not_called()
|
||||
@@ -36,6 +36,21 @@ COURSE_HOME_NEW_DISCUSSION_SIDEBAR_VIEW = CourseWaffleFlag(
|
||||
)
|
||||
|
||||
|
||||
# Waffle flag to enable emission of course progress analytics for students in their courses.
|
||||
#
|
||||
# .. toggle_name: course_home.send_course_progress_analytics_for_student
|
||||
# .. toggle_implementation: CourseWaffleFlag
|
||||
# .. toggle_default: False
|
||||
# .. toggle_description: This toggle controls whether the system will enqueue a Celery task responsible for emitting an
|
||||
# analytics events describing how much course content a learner has completed in a course.
|
||||
# .. toggle_use_cases: open_edx
|
||||
# .. toggle_creation_date: 2025-04-02
|
||||
# .. toggle_target_removal_date: None
|
||||
COURSE_HOME_SEND_COURSE_PROGRESS_ANALYTICS_FOR_STUDENT = CourseWaffleFlag(
|
||||
f'{WAFFLE_FLAG_NAMESPACE}.send_course_progress_analytics_for_student', __name__
|
||||
)
|
||||
|
||||
|
||||
def course_home_mfe_progress_tab_is_active(course_key):
|
||||
# Avoiding a circular dependency
|
||||
from .models import DisableProgressPageStackedConfig
|
||||
@@ -51,3 +66,10 @@ def new_discussion_sidebar_view_is_enabled(course_key):
|
||||
Returns True if the new discussion sidebar view is enabled for the given course.
|
||||
"""
|
||||
return COURSE_HOME_NEW_DISCUSSION_SIDEBAR_VIEW.is_enabled(course_key)
|
||||
|
||||
|
||||
def send_course_progress_analytics_for_student_is_enabled(course_key):
|
||||
"""
|
||||
Returns True if the course completion analytics feature is enabled for a given course.
|
||||
"""
|
||||
return COURSE_HOME_SEND_COURSE_PROGRESS_ANALYTICS_FOR_STUDENT.is_enabled(course_key)
|
||||
|
||||
Reference in New Issue
Block a user