Merge branch 'master' into master

This commit is contained in:
Santhosh Kumar
2025-09-10 10:23:43 +05:30
committed by GitHub
68 changed files with 2694 additions and 708 deletions

View File

@@ -238,7 +238,6 @@
"cms/djangoapps/cms_user_tasks/",
"cms/djangoapps/course_creators/",
"cms/djangoapps/export_course_metadata/",
"cms/djangoapps/import_from_modulestore/",
"cms/djangoapps/maintenance/",
"cms/djangoapps/models/",
"cms/djangoapps/pipeline_js/",

View File

@@ -7,8 +7,13 @@ from opaque_keys.edx.keys import CourseKey
from user_tasks.conf import settings as user_tasks_settings
from user_tasks.models import UserTaskArtifact, UserTaskStatus
from cms.djangoapps.contentstore.tasks import CourseLinkCheckTask, LinkState, extract_content_URLs_from_course
from cms.djangoapps.contentstore.utils import create_course_info_usage_key
from cms.djangoapps.contentstore.tasks import (
CourseLinkCheckTask,
CourseLinkUpdateTask,
LinkState,
extract_content_URLs_from_course
)
from cms.djangoapps.contentstore.utils import create_course_info_usage_key, get_previous_run_course_key
from cms.djangoapps.contentstore.xblock_storage_handlers.view_handlers import get_xblock
from cms.djangoapps.contentstore.xblock_storage_handlers.xblock_helpers import usage_key_with_run
from openedx.core.lib.xblock_utils import get_course_update_items
@@ -118,7 +123,13 @@ def generate_broken_links_descriptor(json_content, request_user, course_key):
'url': 'url/to/block',
'brokenLinks: [],
'lockedLinks: [],
'previousRunLinks: []
'previousRunLinks: [
{
'originalLink': 'http://...',
'isUpdated': true,
'updatedLink': 'http://...'
}
]
},
...,
]
@@ -138,7 +149,13 @@ def generate_broken_links_descriptor(json_content, request_user, course_key):
'brokenLinks': [],
'lockedLinks': [],
'externalForbiddenLinks': [],
'previousRunLinks': []
'previousRunLinks': [
{
'originalLink': 'http://...',
'isUpdated': true,
'updatedLink': 'http://...'
}
]
},
...
{
@@ -147,7 +164,13 @@ def generate_broken_links_descriptor(json_content, request_user, course_key):
'brokenLinks': [],
'lockedLinks': [],
'externalForbiddenLinks': [],
'previousRunLinks': []
'previousRunLinks': [
{
'originalLink': 'http://...',
'isUpdated': true,
'updatedLink': 'http://...'
}
]
}
],
'custom_pages': [
@@ -157,7 +180,13 @@ def generate_broken_links_descriptor(json_content, request_user, course_key):
'brokenLinks': [],
'lockedLinks': [],
'externalForbiddenLinks': [],
'previousRunLinks': []
'previousRunLinks': [
{
'originalLink': 'http://...',
'isUpdated': true,
'updatedLink': 'http://...'
}
]
},
...
]
@@ -166,7 +195,7 @@ def generate_broken_links_descriptor(json_content, request_user, course_key):
return _generate_enhanced_links_descriptor(json_content, request_user, course_key)
def _update_node_tree_and_dictionary(block, link, link_state, node_tree, dictionary):
def _update_node_tree_and_dictionary(block, link, link_state, node_tree, dictionary, course_key=None):
"""
Inserts a block into the node tree and add its attributes to the dictionary.
@@ -215,7 +244,7 @@ def _update_node_tree_and_dictionary(block, link, link_state, node_tree, diction
# Traverse the path and build the tree structure
for xblock in path:
xblock_id = xblock.location.block_id
xblock_id = xblock.location
updated_dictionary.setdefault(
xblock_id,
{
@@ -240,7 +269,7 @@ def _update_node_tree_and_dictionary(block, link, link_state, node_tree, diction
elif link_state == LinkState.EXTERNAL_FORBIDDEN:
updated_dictionary[xblock_id].setdefault('external_forbidden_links', []).append(link)
elif link_state == LinkState.PREVIOUS_RUN:
updated_dictionary[xblock_id].setdefault('previous_run_links', []).append(link)
_add_previous_run_link(updated_dictionary, xblock_id, link, course_key)
else:
updated_dictionary[xblock_id].setdefault('broken_links', []).append(link)
@@ -325,11 +354,11 @@ def sort_course_sections(course_key, data):
revision=ModuleStoreEnum.RevisionOption.published_only
)
# Return unchanged data if course_blocks or required keys are missing
if not course_blocks or 'LinkCheckOutput' not in data or 'sections' not in data['LinkCheckOutput']:
return data # Return unchanged data if course_blocks or required keys are missing
sorted_section_ids = [section.location.block_id for section in course_blocks[0].get_children()]
return data
sorted_section_ids = [section.location for section in course_blocks[0].get_children()]
sections_map = {section['id']: section for section in data['LinkCheckOutput']['sections']}
data['LinkCheckOutput']['sections'] = [
sections_map[section_id]
@@ -340,7 +369,7 @@ def sort_course_sections(course_key, data):
return data
def _generate_links_descriptor_for_content(json_content, request_user):
def _generate_links_descriptor_for_content(json_content, request_user, course_key=None):
"""
Creates a content tree of all links in a course and their states
Returns a structure containing all broken links and locked links for a course.
@@ -363,6 +392,7 @@ def _generate_links_descriptor_for_content(json_content, request_user):
link_state=link_state,
node_tree=xblock_node_tree,
dictionary=xblock_dictionary,
course_key=course_key,
)
result = _create_dto_recursive(xblock_node_tree, xblock_dictionary)
@@ -386,7 +416,7 @@ def _generate_enhanced_links_descriptor(json_content, request_user, course_key):
for item in json_content:
block_id, link, *rest = item
if "course_info" in block_id and "updates" in block_id:
if isinstance(block_id, int):
course_updates_links.append(item)
elif "course_info" in block_id and "handouts" in block_id:
handouts_links.append(item)
@@ -396,22 +426,22 @@ def _generate_enhanced_links_descriptor(json_content, request_user, course_key):
content_links.append(item)
try:
main_content = _generate_links_descriptor_for_content(content_links, request_user)
main_content = _generate_links_descriptor_for_content(content_links, request_user, course_key)
except Exception: # pylint: disable=broad-exception-caught
main_content = {"sections": []}
course_updates_data = (
_generate_course_updates_structure(course, course_updates_links)
_generate_enhanced_content_structure(course, course_updates_links, "updates", course_key)
if course_updates_links and course else []
)
handouts_data = (
_generate_handouts_structure(course, handouts_links)
_generate_enhanced_content_structure(course, handouts_links, "handouts", course_key)
if handouts_links and course else []
)
custom_pages_data = (
_generate_custom_pages_structure(course, custom_pages_links)
_generate_enhanced_content_structure(course, custom_pages_links, "custom_pages", course_key)
if custom_pages_links and course else []
)
@@ -421,7 +451,7 @@ def _generate_enhanced_links_descriptor(json_content, request_user, course_key):
return result
def _generate_enhanced_content_structure(course, content_links, content_type):
def _generate_enhanced_content_structure(course, content_links, content_type, course_key=None):
"""
Unified function to generate structure for enhanced content (updates, handouts, custom pages).
@@ -429,24 +459,25 @@ def _generate_enhanced_content_structure(course, content_links, content_type):
course: Course object
content_links: List of link items for this content type
content_type: 'updates', 'handouts', or 'custom_pages'
course_key: Course key to check for link updates (optional)
Returns:
List of content items with categorized links
"""
result = []
try:
if content_type == "custom_pages":
result = _generate_custom_pages_content(course, content_links)
elif content_type == "updates":
result = _generate_course_updates_content(course, content_links)
elif content_type == "handouts":
result = _generate_handouts_content(course, content_links)
return result
except Exception as e: # pylint: disable=broad-exception-caught
return result
generators = {
"custom_pages": _generate_custom_pages_content,
"updates": _generate_course_updates_content,
"handouts": _generate_handouts_content,
}
generator = generators.get(content_type)
if generator:
return generator(course, content_links, course_key)
return []
def _generate_course_updates_content(course, updates_links):
def _generate_course_updates_content(course, updates_links, course_key=None):
"""Generate course updates content with categorized links."""
store = modulestore()
usage_key = create_course_info_usage_key(course, "updates")
@@ -460,23 +491,10 @@ def _generate_course_updates_content(course, updates_links):
if not update_items:
return course_updates
# Create link state mapping
link_state_map = {
item[1]: item[2] if len(item) >= 3 else LinkState.BROKEN
for item in updates_links if len(item) >= 2
}
for update in update_items:
if update.get("status") != "deleted":
update_content = update.get("content", "")
update_links = extract_content_URLs_from_course(update_content) if update_content else []
# Match links with their states
update_link_data = _create_empty_links_data()
for link in update_links:
link_state = link_state_map.get(link)
if link_state is not None:
_categorize_link_by_state(link, link_state, update_link_data)
update_link_data = _process_content_links(update_content, updates_links, course_key)
course_updates.append(
{
@@ -490,7 +508,7 @@ def _generate_course_updates_content(course, updates_links):
return course_updates
def _generate_handouts_content(course, handouts_links):
def _generate_handouts_content(course, handouts_links, course_key=None):
"""Generate handouts content with categorized links."""
store = modulestore()
usage_key = create_course_info_usage_key(course, "handouts")
@@ -504,15 +522,7 @@ def _generate_handouts_content(course, handouts_links):
):
return course_handouts
# Create link state mapping for handouts
link_state_map = {
item[1]: item[2] if len(item) >= 3 else LinkState.BROKEN
for item in handouts_links if len(item) >= 2
}
links_data = _create_empty_links_data()
for link, link_state in link_state_map.items():
_categorize_link_by_state(link, link_state, links_data)
links_data = _process_content_links(handouts_block.data, handouts_links, course_key)
course_handouts = [
{
@@ -525,7 +535,7 @@ def _generate_handouts_content(course, handouts_links):
return course_handouts
def _generate_custom_pages_content(course, custom_pages_links):
def _generate_custom_pages_content(course, custom_pages_links, course_key=None):
"""Generate custom pages content with categorized links."""
custom_pages = []
@@ -539,7 +549,7 @@ def _generate_custom_pages_content(course, custom_pages_links):
block_id, link = item[0], item[1]
link_state = item[2] if len(item) >= 3 else LinkState.BROKEN
links_by_page.setdefault(block_id, _create_empty_links_data())
_categorize_link_by_state(link, link_state, links_by_page[block_id])
_categorize_link_by_state(link, link_state, links_by_page[block_id], course_key)
# Process static tabs and add their pages
for tab in course.tabs:
@@ -555,24 +565,7 @@ def _generate_custom_pages_content(course, custom_pages_links):
return custom_pages
def _generate_course_updates_structure(course, updates_links):
"""Generate structure for course updates."""
return _generate_enhanced_content_structure(course, updates_links, "updates")
def _generate_handouts_structure(course, handouts_links):
"""Generate structure for course handouts."""
return _generate_enhanced_content_structure(course, handouts_links, "handouts")
def _generate_custom_pages_structure(course, custom_pages_links):
"""Generate structure for custom pages (static tabs)."""
return _generate_enhanced_content_structure(
course, custom_pages_links, "custom_pages"
)
def _categorize_link_by_state(link, link_state, links_data):
def _categorize_link_by_state(link, link_state, links_data, course_key=None):
"""
Helper function to categorize a link into the appropriate list based on its state.
@@ -580,6 +573,7 @@ def _categorize_link_by_state(link, link_state, links_data):
link (str): The URL link to categorize
link_state (str): The state of the link (broken, locked, external-forbidden, previous-run)
links_data (dict): Dictionary containing the categorized link lists
course_key: Course key to check for link updates (optional)
"""
state_to_key = {
LinkState.BROKEN: "brokenLinks",
@@ -590,7 +584,11 @@ def _categorize_link_by_state(link, link_state, links_data):
key = state_to_key.get(link_state)
if key:
links_data[key].append(link)
if key == "previousRunLinks":
data = _generate_link_update_info(link, course_key)
links_data[key].append(data)
else:
links_data[key].append(link)
def _create_empty_links_data():
@@ -606,3 +604,267 @@ def _create_empty_links_data():
"externalForbiddenLinks": [],
"previousRunLinks": [],
}
def get_course_link_update_data(request, course_id):
"""
Retrieves data and formats it for the course link update status request.
"""
status = None
results = []
task_status = _latest_course_link_update_task_status(request, course_id)
if task_status is None:
status = "uninitiated"
else:
status = task_status.state
if task_status.state == UserTaskStatus.SUCCEEDED:
try:
artifact = UserTaskArtifact.objects.get(
status=task_status, name="LinkUpdateResults"
)
with artifact.file as file:
content = file.read()
results = json.loads(content)
except (UserTaskArtifact.DoesNotExist, ValueError):
# If no artifact found or invalid JSON, just return empty results
results = []
data = {
"status": status,
**({"results": results}),
}
return data
def _latest_course_link_update_task_status(request, course_id, view_func=None):
"""
Get the most recent course link update status for the specified course key.
"""
args = {"course_id": course_id}
name = CourseLinkUpdateTask.generate_name(args)
task_status = UserTaskStatus.objects.filter(name=name)
for status_filter in STATUS_FILTERS:
task_status = status_filter().filter_queryset(request, task_status, view_func)
return task_status.order_by("-created").first()
def _get_link_update_status(original_url, course_key):
"""
Check whether a given link has been updated based on the latest link update results.
Args:
original_url (str): The original URL to check
course_key: The course key
Returns:
dict: Dictionary with 'originalLink', 'isUpdated', and 'updatedLink' keys
"""
def _create_response(original_link, is_updated, updated_link=None):
"""Helper to create consistent response format."""
return {
"originalLink": original_link,
"isUpdated": is_updated,
"updatedLink": updated_link,
}
try:
# Check if URL contains current course key (indicates it's been updated)
current_course_str = str(course_key)
if current_course_str in original_url:
prev_run_key = get_previous_run_course_key(course_key)
if prev_run_key:
reconstructed_original = original_url.replace(current_course_str, str(prev_run_key))
return _create_response(reconstructed_original, True, original_url)
return _create_response(original_url, True, original_url)
update_results = _get_update_results(course_key)
if not update_results:
return _create_response(original_url, False, None)
for result in update_results:
if not result.get("success", False):
continue
result_original = result.get("original_url", "")
result_new = result.get("new_url", "")
# Direct match with original URL
if result_original == original_url:
return _create_response(original_url, True, result_new)
# Check if current URL is an updated URL
if result_new == original_url:
return _create_response(result_original, True, original_url)
# Check if URLs match through reconstruction
if _urls_match_through_reconstruction(original_url, result_new, course_key):
return _create_response(original_url, True, result_new)
return _create_response(original_url, False, None)
except Exception: # pylint: disable=broad-except
return _create_response(original_url, False, None)
def _get_update_results(course_key):
"""
Helper function to get update results from the latest link update task.
Returns:
list: Update results or empty list if not found
"""
try:
task_status = _latest_course_link_update_task_status(None, str(course_key))
if not task_status or task_status.state != UserTaskStatus.SUCCEEDED:
return []
artifact = UserTaskArtifact.objects.get(
status=task_status, name="LinkUpdateResults"
)
with artifact.file as file:
content = file.read()
return json.loads(content)
except (UserTaskArtifact.DoesNotExist, ValueError, json.JSONDecodeError):
return []
def _is_previous_run_link(link, course_key):
"""
Check if a link is a previous run link by checking if it contains a previous course key
or if it has update results indicating it was updated.
Args:
link: The URL to check
course_key: The current course key
Returns:
bool: True if the link appears to be a previous run link
"""
try:
if str(course_key) in link:
return True
prev_run_key = get_previous_run_course_key(course_key)
if prev_run_key and str(prev_run_key) in link:
return True
update_results = _get_update_results(course_key)
for result in update_results:
if not result.get("success", False):
continue
if link in [result.get("original_url", ""), result.get("new_url", "")]:
return True
return False
except Exception: # pylint: disable=broad-except
return False
def _urls_match_through_reconstruction(original_url, new_url, course_key):
"""
Check if an original URL matches a new URL through course key reconstruction.
Args:
original_url (str): The original URL from broken links
new_url (str): The new URL from update results
course_key: The current course key
Returns:
bool: True if they match through reconstruction
"""
try:
prev_run_key = get_previous_run_course_key(course_key)
if not prev_run_key:
return False
# Reconstruct what the original URL would have been
reconstructed_original = new_url.replace(str(course_key), str(prev_run_key))
return reconstructed_original == original_url
except Exception: # pylint: disable=broad-except
return False
def _process_content_links(content_text, all_links, course_key=None):
"""
Helper function to process links in content and categorize them by state.
Args:
content_text: The text content to extract links from
all_links: List of tuples containing (url, state) or (url, state, extra_info)
course_key: Course key to check for link updates (optional)
Returns:
dict: Categorized link data
"""
if not content_text:
return _create_empty_links_data()
content_links = extract_content_URLs_from_course(content_text)
if not content_links:
return _create_empty_links_data()
# Create link state mapping
link_state_map = {
item[1]: item[2] if len(item) >= 3 else LinkState.BROKEN
for item in all_links if len(item) >= 2
}
# Categorize links by state
link_data = _create_empty_links_data()
for link in content_links:
link_state = link_state_map.get(link)
if link_state is not None:
_categorize_link_by_state(link, link_state, link_data, course_key)
else:
# Check if this link is a previous run link that might have been updated
if course_key and _is_previous_run_link(link, course_key):
_categorize_link_by_state(link, LinkState.PREVIOUS_RUN, link_data, course_key)
return link_data
def _generate_link_update_info(link, course_key=None):
"""
Create a previous run link data with appropriate update status.
Args:
link: The link URL
course_key: Course key to check for updates (optional)
Returns:
dict: Previous run link data with originalLink, isUpdated, and updatedLink
"""
if course_key:
updated_info = _get_link_update_status(link, course_key)
if updated_info:
return {
'originalLink': updated_info['originalLink'],
'isUpdated': updated_info['isUpdated'],
'updatedLink': updated_info['updatedLink']
}
return {
'originalLink': link,
'isUpdated': False,
'updatedLink': None
}
def _add_previous_run_link(dictionary, xblock_id, link, course_key):
"""
Helper function to add a previous run link with appropriate update status.
Args:
dictionary: The xblock dictionary to update
xblock_id: The ID of the xblock
link: The link URL
course_key: Course key to check for updates (optional)
"""
data = _generate_link_update_info(link, course_key)
dictionary[xblock_id].setdefault('previous_run_links', []).append(data)

View File

@@ -61,10 +61,10 @@ class TestLinkCheckProvider(CourseTestCase):
when passed a block level xblock.
"""
expected_tree = {
'chapter_1': {
'sequential_1': {
'vertical_1': {
'block_1': {}
self.mock_section.location: {
self.mock_subsection.location: {
self.mock_unit.location: {
self.mock_block.location: {}
}
}
}
@@ -81,19 +81,19 @@ class TestLinkCheckProvider(CourseTestCase):
when passed a block level xblock.
"""
expected_dictionary = {
'chapter_1': {
self.mock_section.location: {
'display_name': 'Section Name',
'category': 'chapter'
},
'sequential_1': {
self.mock_subsection.location: {
'display_name': 'Subsection Name',
'category': 'sequential'
},
'vertical_1': {
self.mock_unit.location: {
'display_name': 'Unit Name',
'category': 'vertical'
},
'block_1': {
self.mock_block.location: {
'display_name': 'Block Name',
'category': 'html',
'url': f'/course/{self.course.id}/editor/html/{self.mock_block.location}',
@@ -274,11 +274,16 @@ class TestLinkCheckProvider(CourseTestCase):
def test_sorts_sections_correctly(self, mock_modulestore):
"""Test that the function correctly sorts sections based on published course structure."""
# Create mock location objects that will match the section IDs in data
mock_location2 = "section2"
mock_location3 = "section3"
mock_location1 = "section1"
mock_course_block = Mock()
mock_course_block.get_children.return_value = [
Mock(location=Mock(block_id="section2")),
Mock(location=Mock(block_id="section3")),
Mock(location=Mock(block_id="section1")),
Mock(location=mock_location2),
Mock(location=mock_location3),
Mock(location=mock_location1),
]
mock_modulestore_instance = Mock()
@@ -301,8 +306,7 @@ class TestLinkCheckProvider(CourseTestCase):
{"id": "section3", "name": "Bonus"},
{"id": "section1", "name": "Intro"},
]
assert result["LinkCheckOutput"]["sections"] == expected_sections
self.assertEqual(result["LinkCheckOutput"]["sections"], expected_sections)
def test_prev_run_link_detection(self):
"""Test the core logic of separating previous run links from regular links."""
@@ -366,46 +370,47 @@ class TestLinkCheckProvider(CourseTestCase):
def test_course_updates_and_custom_pages_structure(self):
"""Test that course_updates and custom_pages are properly structured in the response."""
course_key = self.course.id
# Test data that represents the broken links JSON structure
json_content = [
# Regular course content
[
"course-v1:Test+Course+2024+type@html+block@content1",
str(self.mock_block.location),
"http://content-link.com",
"broken",
LinkState.BROKEN,
],
[
"course-v1:Test+Course+2024+type@vertical+block@unit1",
str(self.mock_unit.location),
"http://unit-link.com",
"locked",
LinkState.LOCKED,
],
# Course updates
[
"course-v1:Test+Course+2024+type@course_info+block@updates",
f"{course_key}+type@course_info+block@updates",
"http://update1.com",
"broken",
LinkState.BROKEN,
],
[
"course-v1:Test+Course+2024+type@course_info+block@updates",
f"{course_key}+type@course_info+block@updates",
"http://update2.com",
"locked",
LinkState.LOCKED,
],
# Handouts (should be merged into course_updates)
[
"course-v1:Test+Course+2024+type@course_info+block@handouts",
f"{course_key}+type@course_info+block@handouts",
"http://handout.com",
"broken",
LinkState.BROKEN,
],
# Custom pages (static tabs)
[
"course-v1:Test+Course+2024+type@static_tab+block@page1",
f"{course_key}+type@static_tab+block@page1",
"http://page1.com",
"broken",
LinkState.BROKEN,
],
[
"course-v1:Test+Course+2024+type@static_tab+block@page2",
f"{course_key}+type@static_tab+block@page2",
"http://page2.com",
"external-forbidden",
LinkState.EXTERNAL_FORBIDDEN,
],
]
@@ -413,17 +418,42 @@ class TestLinkCheckProvider(CourseTestCase):
"cms.djangoapps.contentstore.core.course_optimizer_provider._generate_links_descriptor_for_content"
) as mock_content, mock.patch(
"cms.djangoapps.contentstore.core.course_optimizer_provider.modulestore"
) as mock_modulestore:
) as mock_modulestore, mock.patch(
"cms.djangoapps.contentstore.core.course_optimizer_provider.create_course_info_usage_key"
) as mock_create_usage_key, mock.patch(
"cms.djangoapps.contentstore.core.course_optimizer_provider.get_course_update_items"
) as mock_get_update_items, mock.patch(
"cms.djangoapps.contentstore.core.course_optimizer_provider.extract_content_URLs_from_course"
) as mock_extract_urls:
mock_content.return_value = {"sections": []}
mock_course = self.mock_course
mock_tab1 = StaticTab(name="Page1", url_slug="page1")
mock_tab2 = StaticTab(name="Page2", url_slug="page2")
mock_tab1 = StaticTab(name="Test Page 1", url_slug="page1")
mock_tab2 = StaticTab(name="Test Page 2", url_slug="page2")
mock_course.tabs = [mock_tab1, mock_tab2]
mock_course.id = CourseKey.from_string("course-v1:Test+Course+2024")
mock_course.id = course_key
mock_modulestore.return_value.get_course.return_value = mock_course
course_key = CourseKey.from_string("course-v1:Test+Course+2024")
mock_updates_usage_key = Mock()
mock_handouts_usage_key = Mock()
mock_create_usage_key.side_effect = lambda course, info_type: (
mock_updates_usage_key if info_type == "updates" else mock_handouts_usage_key
)
mock_updates_block = Mock()
mock_updates_block.data = "Check out <a href='http://update1.com'>this update</a>"
mock_handouts_block = Mock()
mock_handouts_block.data = "Download <a href='http://handout.com'>handout</a>"
mock_get_item_mapping = {
mock_updates_usage_key: mock_updates_block,
mock_handouts_usage_key: mock_handouts_block,
}
mock_modulestore.return_value.get_item.side_effect = (
lambda usage_key: mock_get_item_mapping.get(usage_key, Mock())
)
mock_get_update_items.return_value = [
{"id": "update1", "date": "2024-01-01", "content": "Update content 1", "status": "visible"},
{"id": "update2", "date": "2024-01-02", "content": "Update content 2", "status": "visible"}
]
mock_extract_urls.return_value = ["http://update1.com", "http://update2.com"]
result = generate_broken_links_descriptor(
json_content, self.user, course_key
)

View File

@@ -50,3 +50,62 @@ class LinkCheckSerializer(serializers.Serializer):
LinkCheckCreatedAt = serializers.DateTimeField(required=False)
LinkCheckOutput = LinkCheckOutputSerializer(required=False)
LinkCheckError = serializers.CharField(required=False)
class CourseRerunLinkDataSerializer(serializers.Serializer):
""" Serializer for individual course rerun link data """
url = serializers.CharField(required=True, allow_null=False, allow_blank=False)
type = serializers.CharField(required=True, allow_null=False, allow_blank=False)
id = serializers.CharField(required=True, allow_null=False, allow_blank=False)
class CourseRerunLinkUpdateRequestSerializer(serializers.Serializer):
"""Serializer for course rerun link update request."""
ACTION_CHOICES = ("all", "single")
action = serializers.ChoiceField(choices=ACTION_CHOICES, required=True)
data = CourseRerunLinkDataSerializer(many=True, required=False)
def validate(self, attrs):
"""
Validate that 'data' is provided when action is 'single'.
"""
action = attrs.get("action")
data = attrs.get("data")
if action == "single" and not data:
raise serializers.ValidationError(
{"data": "This field is required when action is 'single'."}
)
return attrs
class CourseRerunLinkUpdateResultSerializer(serializers.Serializer):
""" Serializer for individual course rerun link update result """
new_url = serializers.CharField(required=True, allow_null=False, allow_blank=False)
original_url = serializers.CharField(required=False, allow_null=True, allow_blank=True)
type = serializers.CharField(required=True, allow_null=False, allow_blank=True)
id = serializers.CharField(required=True, allow_null=False, allow_blank=False)
success = serializers.BooleanField(required=True)
error_message = serializers.CharField(required=False, allow_null=True, allow_blank=True)
def to_representation(self, instance):
"""
Override to exclude error_message field when success is True or error_message is null/empty
"""
data = super().to_representation(instance)
if data.get('success') is True or not data.get('error_message'):
data.pop('error_message', None)
return data
class CourseRerunLinkUpdateStatusSerializer(serializers.Serializer):
""" Serializer for course rerun link update status """
status = serializers.ChoiceField(
choices=['pending', 'in_progress', 'completed', 'failed', 'uninitiated'],
required=True
)
results = CourseRerunLinkUpdateResultSerializer(many=True, required=False)

View File

@@ -0,0 +1,160 @@
"""
Unit tests for Course Rerun Link Update API
"""
import json
from unittest.mock import Mock, patch
from django.urls import reverse
from user_tasks.models import UserTaskStatus
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
class TestCourseLinkUpdateAPI(CourseTestCase):
"""
Tests for the Course Rerun Link Update API endpoints
"""
def setUp(self):
super().setUp()
self.sample_links_data = [
{
"url": "http://localhost:18000/course/course-v1:edX+DemoX+Demo_Course_2023/course",
"type": "course_content",
"id": "block-v1:edX+DemoX+Demo_Course+type@html+block@intro",
},
{
"url": "http://localhost:18000/course/course-v1:edX+DemoX+Demo_Course_2023/progress",
"type": "course_updates",
"id": "1",
},
{
"url": "http://localhost:18000/course/course-v1:edX+DemoX+Demo_Course_2023/handouts",
"type": "handouts",
"id": "block-v1:edX+DemoX+Demo_Course+type@course_info+block@handouts",
},
]
self.enable_optimizer_patch = (
"cms.djangoapps.contentstore.rest_api.v0.views.course_optimizer."
"enable_course_optimizer_check_prev_run_links"
)
self.update_links_patch = (
"cms.djangoapps.contentstore.rest_api.v0.views.course_optimizer."
"update_course_rerun_links"
)
self.task_status_patch = (
"cms.djangoapps.contentstore.core.course_optimizer_provider."
"_latest_course_link_update_task_status"
)
self.user_task_artifact_patch = (
"cms.djangoapps.contentstore.core.course_optimizer_provider."
"UserTaskArtifact"
)
def make_post_request(self, course_id=None, data=None, **kwargs):
"""Helper method to make POST requests to the link update endpoint"""
url = self.get_update_url(course_id or self.course.id)
response = self.client.post(
url,
data=json.dumps(data) if data else None,
content_type="application/json",
)
return response
def get_update_url(self, course_key):
"""Get the update endpoint URL"""
return reverse(
"cms.djangoapps.contentstore:v0:rerun_link_update",
kwargs={"course_id": str(course_key)},
)
def get_status_url(self, course_key):
"""Get the status endpoint URL"""
return reverse(
"cms.djangoapps.contentstore:v0:rerun_link_update_status",
kwargs={"course_id": str(course_key)},
)
def test_post_update_all_links_success(self):
"""Test successful request to update all links"""
with patch(self.enable_optimizer_patch, return_value=True):
with patch(self.update_links_patch) as mock_task:
mock_task.delay.return_value = Mock()
data = {"action": "all"}
response = self.make_post_request(data=data)
self.assertEqual(response.status_code, 200)
self.assertIn("status", response.json())
mock_task.delay.assert_called_once()
def test_post_update_single_links_success(self):
"""Test successful request to update single links"""
with patch(self.enable_optimizer_patch, return_value=True):
with patch(self.update_links_patch) as mock_task:
mock_task.delay.return_value = Mock()
data = {
"action": "single",
"data": [
{
"url": "http://localhost:18000/course/course-v1:edX+DemoX+Demo_Course/course",
"type": "course_content",
"id": "block-v1:edX+DemoX+Demo_Course+type@html+block@abc123",
},
{
"url": "http://localhost:18000/course/course-v1:edX+DemoX+Demo_Course/progress",
"type": "course_updates",
"id": "1",
},
],
}
response = self.make_post_request(data=data)
self.assertEqual(response.status_code, 200)
self.assertIn("status", response.json())
mock_task.delay.assert_called_once()
def test_post_update_missing_action_returns_400(self):
"""Test that missing action parameter returns 400"""
with patch(
self.enable_optimizer_patch,
return_value=True,
):
data = {}
response = self.make_post_request(data=data)
self.assertEqual(response.status_code, 400)
self.assertIn("error", response.json())
self.assertIn("action", response.json()["error"])
def test_error_handling_workflow(self):
"""Test error handling in the complete workflow"""
with patch(
self.enable_optimizer_patch,
return_value=True,
):
with patch(self.update_links_patch) as mock_task:
# Step 1: Start task
mock_task.delay.return_value = Mock()
data = {"action": "all"}
response = self.make_post_request(data=data)
self.assertEqual(response.status_code, 200)
# Step 2: Check failed status
with patch(self.task_status_patch) as mock_status:
with patch(self.user_task_artifact_patch) as mock_artifact:
mock_task_status = Mock()
mock_task_status.state = UserTaskStatus.FAILED
mock_status.return_value = mock_task_status
status_url = self.get_status_url(self.course.id)
status_response = self.client.get(status_url)
self.assertEqual(status_response.status_code, 200)
status_data = status_response.json()
self.assertEqual(status_data["status"], "Failed")
self.assertEqual(status_data["results"], [])

View File

@@ -9,11 +9,13 @@ from .views import (
AdvancedCourseSettingsView,
APIHeartBeatView,
AuthoringGradingView,
CourseTabSettingsView,
CourseTabListView,
CourseTabReorderView,
LinkCheckView,
CourseTabSettingsView,
LinkCheckStatusView,
LinkCheckView,
RerunLinkUpdateStatusView,
RerunLinkUpdateView,
TranscriptView,
YoutubeTranscriptCheckView,
YoutubeTranscriptUploadView,
@@ -114,4 +116,13 @@ urlpatterns = [
fr'^link_check_status/{settings.COURSE_ID_PATTERN}$',
LinkCheckStatusView.as_view(), name='link_check_status'
),
re_path(
fr'^rerun_link_update/{settings.COURSE_ID_PATTERN}$',
RerunLinkUpdateView.as_view(), name='rerun_link_update'
),
re_path(
fr'^rerun_link_update_status/{settings.COURSE_ID_PATTERN}$',
RerunLinkUpdateStatusView.as_view(), name='rerun_link_update_status'
),
]

View File

@@ -4,6 +4,6 @@ Views for v0 contentstore API.
from .advanced_settings import AdvancedCourseSettingsView
from .api_heartbeat import APIHeartBeatView
from .authoring_grading import AuthoringGradingView
from .course_optimizer import LinkCheckView, LinkCheckStatusView
from .tabs import CourseTabSettingsView, CourseTabListView, CourseTabReorderView
from .course_optimizer import LinkCheckStatusView, LinkCheckView, RerunLinkUpdateStatusView, RerunLinkUpdateView
from .tabs import CourseTabListView, CourseTabReorderView, CourseTabSettingsView
from .transcripts import TranscriptView, YoutubeTranscriptCheckView, YoutubeTranscriptUploadView

View File

@@ -1,17 +1,33 @@
""" API Views for Course Optimizer. """
"""API Views for Course Optimizer."""
import edx_api_doc_tools as apidocs
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from rest_framework.views import APIView
from rest_framework import status
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from user_tasks.models import UserTaskStatus
from cms.djangoapps.contentstore.core.course_optimizer_provider import get_link_check_data, sort_course_sections
from cms.djangoapps.contentstore.rest_api.v0.serializers.course_optimizer import LinkCheckSerializer
from cms.djangoapps.contentstore.tasks import check_broken_links
from cms.djangoapps.contentstore.core.course_optimizer_provider import (
get_course_link_update_data,
get_link_check_data,
sort_course_sections,
)
from cms.djangoapps.contentstore.rest_api.v0.serializers.course_optimizer import (
CourseRerunLinkUpdateStatusSerializer,
LinkCheckSerializer,
CourseRerunLinkUpdateRequestSerializer,
)
from cms.djangoapps.contentstore.tasks import check_broken_links, update_course_rerun_links
from cms.djangoapps.contentstore.toggles import enable_course_optimizer_check_prev_run_links
from common.djangoapps.student.auth import has_course_author_access, has_studio_read_access
from common.djangoapps.util.json_request import JsonResponse
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, verify_course_exists, view_auth_classes
from openedx.core.lib.api.view_utils import (
DeveloperErrorViewMixin,
verify_course_exists,
view_auth_classes,
)
@view_auth_classes(is_authenticated=True)
@@ -113,7 +129,14 @@ class LinkCheckStatusView(DeveloperErrorViewMixin, APIView):
"brokenLinks": [<string>, ...],
"lockedLinks": [<string>, ...],
"externalForbiddenLinks": [<string>, ...],
"previousRunLinks": [<string>, ...]
"previousRunLinks": [
{
"originalLink": <string>,
"isUpdated": <boolean>,
"updatedLink": <string>
},
...
]
},
{ <another block> },
],
@@ -134,7 +157,14 @@ class LinkCheckStatusView(DeveloperErrorViewMixin, APIView):
"brokenLinks": [<string>, ...],
"lockedLinks": [<string>, ...],
"externalForbiddenLinks": [<string>, ...],
"previousRunLinks": [<string>, ...]
"previousRunLinks": [
{
"originalLink": <string>,
"isUpdated": <boolean>,
"updatedLink": <string>
},
...
]
},
...,
{ <another course-updates> },
@@ -146,7 +176,14 @@ class LinkCheckStatusView(DeveloperErrorViewMixin, APIView):
"brokenLinks": [<string>, ...],
"lockedLinks": [<string>, ...],
"externalForbiddenLinks": [<string>, ...],
"previousRunLinks": [<string>, ...]
"previousRunLinks": [
{
"originalLink": <string>,
"isUpdated": <boolean>,
"updatedLink": <string>
},
...
]
}
],
"custom_pages": [
@@ -157,7 +194,14 @@ class LinkCheckStatusView(DeveloperErrorViewMixin, APIView):
"brokenLinks": [<string>, ...],
"lockedLinks": [<string>, ...],
"externalForbiddenLinks": [<string>, ...],
"previousRunLinks": [<string>, ...]
"previousRunLinks": [
{
"originalLink": <string>,
"isUpdated": <boolean>,
"updatedLink": <string>
},
...
]
},
...,
{ <another page> },
@@ -167,11 +211,212 @@ class LinkCheckStatusView(DeveloperErrorViewMixin, APIView):
"""
course_key = CourseKey.from_string(course_id)
if not has_course_author_access(request.user, course_key):
print('missing course author access')
self.permission_denied(request)
data = get_link_check_data(request, course_id)
data = sort_course_sections(course_key, data)
link_check_data = get_link_check_data(request, course_id)
sorted_sections = sort_course_sections(course_key, link_check_data)
serializer = LinkCheckSerializer(data)
serializer = LinkCheckSerializer(sorted_sections)
return Response(serializer.data)
@view_auth_classes(is_authenticated=True)
class RerunLinkUpdateView(DeveloperErrorViewMixin, APIView):
"""
View for queueing a celery task to update course links to the latest re-run.
"""
@apidocs.schema(
parameters=[
apidocs.string_parameter(
"course_id", apidocs.ParameterLocation.PATH, description="Course ID"
)
],
body=CourseRerunLinkUpdateRequestSerializer,
responses={
200: "Celery task queued.",
400: "Bad request - invalid action or missing data.",
401: "The requester is not authenticated.",
403: "The requester cannot access the specified course.",
404: "The requested course does not exist.",
},
)
@verify_course_exists()
def post(self, request: Request, course_id: str):
"""
Queue celery task to update course links to the latest re-run.
**Example Request - Update All Links**
POST /api/contentstore/v0/rerun_link_update/{course_id}
```json
{
"action": "all"
}
```
**Example Request - Update Single Links**
POST /api/contentstore/v0/rerun_link_update/{course_id}
```json
{
"action": "single",
"data": [
{
"url": "http://localhost:18000/course/course-v1:edX+DemoX+Demo_Course/course",
"type": "course_updates",
"id": "block_id_123"
}
]
}
```
**Response Values**
```json
{
"status": "pending"
}
```
"""
try:
course_key = CourseKey.from_string(course_id)
except (InvalidKeyError, IndexError):
return JsonResponse(
{"error": "Invalid course id, it does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
# Check course author permissions
if not has_course_author_access(request.user, course_key):
self.permission_denied(request)
if not enable_course_optimizer_check_prev_run_links(course_key):
return JsonResponse(
{
"error": "Course optimizer check for previous run links is not enabled."
},
status=status.HTTP_400_BAD_REQUEST,
)
action = request.data.get("action")
if not action or action not in ["all", "single"]:
return JsonResponse(
{"error": 'Invalid or missing action. Must be "all" or "single".'},
status=status.HTTP_400_BAD_REQUEST,
)
if action == "single":
data = request.data.get("data")
if not data or not isinstance(data, list):
return JsonResponse(
{
'data': "This field is required when action is 'single'."
},
status=status.HTTP_400_BAD_REQUEST,
)
update_course_rerun_links.delay(
request.user.id,
course_id,
action,
request.data.get("data", []),
request.LANGUAGE_CODE,
)
return JsonResponse({"status": UserTaskStatus.PENDING})
@view_auth_classes()
class RerunLinkUpdateStatusView(DeveloperErrorViewMixin, APIView):
"""
View for checking the status of the course link update task and returning the results.
"""
@apidocs.schema(
parameters=[
apidocs.string_parameter(
"course_id", apidocs.ParameterLocation.PATH, description="Course ID"
),
],
responses={
200: "OK",
401: "The requester is not authenticated.",
403: "The requester cannot access the specified course.",
404: "The requested course does not exist.",
},
)
def get(self, request: Request, course_id: str):
"""
**Use Case**
GET handler to return the status of the course link update task from UserTaskStatus.
If no task has been started for the course, return 'uninitiated'.
If the task was successful, the updated links results are also returned.
Possible statuses:
'pending', 'in_progress', 'completed', 'failed', 'uninitiated'
**Example Request**
GET /api/contentstore/v0/rerun_link_update_status/{course_id}
**Example Response - Task In Progress**
```json
{
"status": "pending"
}
```
**Example Response - Task Completed**
```json
{
"status": "completed",
"results": [
{
"id": "block_id_123",
"type": "course_updates",
"new_url": "http://localhost:18000/course/course-v1:edX+DemoX+2024_Q2/course",
"success": true
},
{
"id": "block_id_456",
"type": "course_updates",
"new_url": "http://localhost:18000/course/course-v1:edX+DemoX+2024_Q2/progress",
"success": true
}
]
}
```
**Example Response - Task Failed**
```json
{
"status": "failed",
"error": "Target course run not found or inaccessible"
}
```
"""
try:
course_key = CourseKey.from_string(course_id)
except (InvalidKeyError, IndexError):
return JsonResponse(
{"error": "Invalid course id, it does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
# Check course author permissions
if not has_course_author_access(request.user, course_key):
self.permission_denied(request)
if not enable_course_optimizer_check_prev_run_links(course_key):
return JsonResponse(
{
"error": "Course optimizer check for previous run links is not enabled."
},
status=status.HTTP_400_BAD_REQUEST,
)
data = get_course_link_update_data(request, course_id)
serializer = CourseRerunLinkUpdateStatusSerializer(data)
return Response(serializer.data)

View File

@@ -456,25 +456,23 @@ class DownstreamView(DeveloperErrorViewMixin, APIView):
"""
downstream = _load_accessible_block(request.user, usage_key_string, require_write_access=True)
affected_blocks: list[XBlock] = []
# Get the upstream ref before severing the link, so we can delete
# the corresponding ComponentLink or ContainerLink below.
upstream_ref = downstream.upstream
try:
# Try to get the upstream key before severing the link, so we can delete
# the corresponding ComponentLink or ContainerLink below.
try:
upstream_key = UpstreamLink.get_for_block(downstream).upstream_key
except NoUpstream:
# Even if we don't have an UpstreamLink, we still need to check
# if the block has the upstream key set, so we don't want to
# raise an exception here.
upstream_key = None
affected_blocks = sever_upstream_link(downstream)
# Remove the ComponentLink or ContainerLink, if it exists.
if upstream_key:
if isinstance(upstream_key, LibraryUsageLocatorV2):
if upstream_ref:
try:
ComponentLink.get_by_downstream_usage_key(downstream.usage_key).delete()
elif isinstance(upstream_key, LibraryContainerLocator):
ContainerLink.get_by_downstream_usage_key(downstream.usage_key).delete()
except ComponentLink.DoesNotExist:
try:
ContainerLink.get_by_downstream_usage_key(downstream.usage_key).delete()
except ContainerLink.DoesNotExist:
# If neither link exists, that's fine--we just wanted to clean up if possible.
pass
except NoUpstream:
logger.exception(
"Tried to DELETE upstream link of '%s', but it wasn't linked to anything in the first place. "

View File

@@ -385,7 +385,9 @@ def export_olx(self, user_id, course_key_string, language):
try:
self.status.set_state('Exporting')
set_custom_attribute("exporting_started", str(courselike_key))
tarball = create_export_tarball(courselike_block, courselike_key, {}, self.status)
set_custom_attribute("exporting_completed", str(courselike_key))
artifact = UserTaskArtifact(status=self.status, name='Output')
artifact.file.save(name=os.path.basename(tarball.name), content=File(tarball))
artifact.save()
@@ -412,10 +414,13 @@ def create_export_tarball(course_block, course_key, context, status=None):
if isinstance(course_key, LibraryLocator):
export_library_to_xml(modulestore(), contentstore(), course_key, root_dir, name)
else:
set_custom_attribute("exporting_course_to_xml_started", str(course_key))
export_course_to_xml(modulestore(), contentstore(), course_block.id, root_dir, name)
set_custom_attribute("exporting_course_to_xml_completed", str(course_key))
if status:
status.set_state('Compressing')
set_custom_attribute("compressing_started", str(course_key))
status.increment_completed_steps()
LOGGER.debug('tar file being generated at %s', export_file.name)
with tarfile.open(name=export_file.name, mode='w:gz') as tar_file:
@@ -456,6 +461,7 @@ def create_export_tarball(course_block, course_key, context, status=None):
if os.path.exists(root_dir / name):
shutil.rmtree(root_dir / name)
set_custom_attribute("compressing_completed", str(course_key))
return export_file
@@ -1261,7 +1267,7 @@ def _scan_course_for_links(course_key):
# and it doesn't contain user-facing links to scan.
if block.category == 'drag-and-drop-v2':
continue
block_id = str(block.usage_key)
block_id = str(block.location)
block_info = get_block_info(block)
block_data = block_info['data']
url_list = extract_content_URLs_from_course(block_data)
@@ -1342,7 +1348,7 @@ def _scan_course_updates_for_links(course):
course_updates.append(
{
"displayName": update.get("date", "Unknown"),
"block_id": str(usage_key),
"block_id": update.get("id", str(usage_key)),
"urls": url_list,
}
)
@@ -1753,3 +1759,533 @@ def handle_unlink_upstream_container(upstream_container_key_string: str) -> None
upstream_container_key=upstream_container_key,
):
make_copied_tags_editable(str(link.downstream_usage_key))
class CourseLinkUpdateTask(UserTask): # pylint: disable=abstract-method
"""
Base class for course link update tasks.
"""
@staticmethod
def calculate_total_steps(arguments_dict):
"""
Get the number of in-progress steps in the link update process, as shown in the UI.
For reference, these are:
1. Scanning
2. Updating
"""
return 2
@classmethod
def generate_name(cls, arguments_dict):
"""
Create a name for this particular task instance.
Arguments:
arguments_dict (dict): The arguments given to the task function
Returns:
str: The generated name
"""
key = arguments_dict["course_id"]
return f"Course link update of {key}"
@shared_task(base=CourseLinkUpdateTask, bind=True)
def update_course_rerun_links(
self, user_id, course_id, action, data=None, language=None
):
"""
Updates course links to point to the latest re-run.
"""
set_code_owner_attribute_from_module(__name__)
return _update_course_rerun_links(
self, user_id, course_id, action, data, language
)
def _update_course_rerun_links(
task_instance, user_id, course_id, action, data, language
):
"""
Updates course links to point to the latest re-run.
Args:
task_instance: The Celery task instance
user_id: ID of the user requesting the update
course_id: String representation of the course key
action: 'all' or 'single'
data: List of specific links to update (when action='single')
language: Language code for translations
"""
user = _validate_user(task_instance, user_id, language)
if not user:
return
task_instance.status.set_state(UserTaskStatus.IN_PROGRESS)
course_key = CourseKey.from_string(course_id)
prev_run_course_key = get_previous_run_course_key(course_key)
try:
task_instance.status.set_state("Scanning")
if action == "all":
url_list = _scan_course_for_links(course_key)
links_to_update = []
# Filter only course-specific links that need updating
for block_id, url in url_list:
if _course_link_update_required(url, course_key, prev_run_course_key):
links_to_update.append(
{
"id": block_id,
"url": url,
"type": _determine_link_type(block_id),
}
)
else:
# Process only single link updates
links_to_update = data or []
task_instance.status.increment_completed_steps()
task_instance.status.set_state("Updating")
updated_links = []
for link_data in links_to_update:
try:
new_url = _update_link_to_latest_rerun(
link_data, course_key, prev_run_course_key, user
)
updated_links.append(
{
"original_url": link_data.get("url", ""),
"new_url": new_url,
"type": link_data.get("type", "unknown"),
"id": link_data.get("id", ""),
"success": True,
}
)
except Exception as e: # pylint: disable=broad-except
LOGGER.error(
f'Failed to update link {link_data.get("url", "")}: {str(e)}'
)
updated_links.append(
{
"original_url": link_data.get("url", ""),
"new_url": link_data.get("url", ""),
"type": link_data.get("type", "unknown"),
"id": link_data.get("id", ""),
"success": False,
"error_message": str(e),
}
)
task_instance.status.increment_completed_steps()
file_name = f"{str(course_key)}_link_updates"
results_file = NamedTemporaryFile(prefix=file_name + ".", suffix=".json")
with open(results_file.name, "w") as file:
json.dump(updated_links, file, indent=4)
artifact = UserTaskArtifact(
status=task_instance.status, name="LinkUpdateResults"
)
artifact.file.save(
name=os.path.basename(results_file.name), content=File(results_file)
)
artifact.save()
# Update the existing broken links file to reflect the updated links
_update_broken_links_file_with_updated_links(course_key, updated_links)
task_instance.status.succeed()
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Error updating links for course %s", course_key, exc_info=True
)
if task_instance.status.state != UserTaskStatus.FAILED:
task_instance.status.fail({"raw_error_msg": str(e)})
def _course_link_update_required(url, course_key, prev_run_course_key):
"""
Checks if a course link needs to be updated for a re-run.
Args:
url: The URL to check
course_key: The current course key
Returns:
bool: True if the link needs updating
"""
if not url or not course_key:
return False
course_id_match = contains_previous_course_reference(url, prev_run_course_key)
if not course_id_match:
return False
# Check if it's the same org and course but different run
if (
prev_run_course_key.org == course_key.org
and prev_run_course_key.course == course_key.course
and prev_run_course_key.run != course_key.run
):
return True
return False
def _determine_link_type(block_id):
"""
Determines the type of link based on block_id and URL.
Args:
block_id: The block ID containing the link
url: The URL
Returns:
str: The type of link ('course_updates', 'handouts', 'custom_pages', 'course_content')
"""
if not block_id:
return "course_content"
block_id_str = str(block_id)
if isinstance(block_id, int):
return "course_updates"
if "course_info" in block_id_str and "handouts" in block_id_str:
return "handouts"
if "static_tab" in block_id_str:
return "custom_pages"
return "course_content"
def _update_link_to_latest_rerun(link_data, course_key, prev_run_course_key, user):
"""
Updates a single link to point to the latest course re-run.
Args:
link_data: Dictionary containing link information
course_key: The current course key
prev_run_course_key: The previous course run key
user: The authenticated user making the request
Returns:
str: The updated URL
"""
original_url = link_data.get("url", "")
block_id = link_data.get("id", "")
link_type = link_data.get("type", "course_content")
if not original_url:
return original_url
prev_run_course_org = prev_run_course_key.org if prev_run_course_key else None
prev_run_course_course = (
prev_run_course_key.course if prev_run_course_key else None
)
if prev_run_course_key == course_key:
return original_url
# Validate url based on previous-run org
if (
prev_run_course_org != course_key.org
or prev_run_course_course != course_key.course
):
return original_url
new_url = original_url.replace(str(prev_run_course_key), str(course_key))
# condition because we're showing handouts as updates
if link_type == "course_updates" and "handouts" in str(block_id):
link_type = "handouts"
_update_block_content_with_new_url(
block_id, original_url, new_url, link_type, course_key, user
)
return new_url
def _update_course_updates_link(block_id, old_url, new_url, course_key, user):
"""
Updates course updates with the new URL.
Args:
block_id: The ID of the block containing the link (can be usage key or update ID)
old_url: The original URL to replace
new_url: The new URL to use
course_key: The current course key
user: The authenticated user making the request
"""
store = modulestore()
course_updates = store.get_item(course_key.make_usage_key("course_info", "updates"))
if hasattr(course_updates, "items"):
for update in course_updates.items:
update_matches = False
if "course_info" in str(block_id) and "updates" in str(block_id):
update_matches = True
else:
try:
update_matches = update.get("id", None) == int(block_id)
except (ValueError, TypeError):
update_matches = False
if update_matches and "content" in update:
update["content"] = update["content"].replace(old_url, new_url)
store.update_item(course_updates, user.id)
LOGGER.info(
f"Updated course updates with new URL: {old_url} -> {new_url}"
)
def _update_handouts_link(block_id, old_url, new_url, course_key, user):
"""
Updates course handouts with the new URL.
Args:
block_id: The ID of the block containing the link
old_url: The original URL to replace
new_url: The new URL to use
course_key: The current course key
user: The authenticated user making the request
"""
store = modulestore()
handouts = store.get_item(course_key.make_usage_key("course_info", "handouts"))
if hasattr(handouts, "data") and old_url in handouts.data:
handouts.data = handouts.data.replace(old_url, new_url)
store.update_item(handouts, user.id)
LOGGER.info(f"Updated handouts with new URL: {old_url} -> {new_url}")
def _update_custom_pages_link(block_id, old_url, new_url, course_key, user):
"""
Updates custom pages (static tabs) with the new URL.
Args:
block_id: The ID of the block containing the link (usage key string)
old_url: The original URL to replace
new_url: The new URL to use
course_key: The current course key
user: The authenticated user making the request
"""
store = modulestore()
try:
usage_key = UsageKey.from_string(block_id)
static_tab = store.get_item(usage_key)
if hasattr(static_tab, "data") and old_url in static_tab.data:
static_tab.data = static_tab.data.replace(old_url, new_url)
store.update_item(static_tab, user.id)
LOGGER.info(
f"Updated static tab {block_id} with new URL: {old_url} -> {new_url}"
)
except InvalidKeyError:
LOGGER.warning(f"Invalid usage key for static tab: {block_id}")
def _update_course_content_link(block_id, old_url, new_url, course_key, user):
"""
Updates course content blocks with the new URL.
Args:
block_id: The ID of the block containing the link (usage key string)
old_url: The original URL to replace
new_url: The new URL to use
course_key: The current course key
user: The authenticated user making the request
"""
store = modulestore()
try:
usage_key = UsageKey.from_string(block_id)
block = store.get_item(usage_key)
if hasattr(block, "data") and old_url in block.data:
block.data = block.data.replace(old_url, new_url)
store.update_item(block, user.id)
store.publish(block.location, user.id)
LOGGER.info(
f"Updated block {block_id} data with new URL: {old_url} -> {new_url}"
)
except InvalidKeyError:
LOGGER.warning(f"Invalid usage key for block: {block_id}")
def _update_block_content_with_new_url(block_id, old_url, new_url, link_type, course_key, user):
"""
Updates the content of a block in the modulestore to replace old URL with new URL.
Args:
block_id: The ID of the block containing the link
old_url: The original URL to replace
new_url: The new URL to use
link_type: The type of link ('course_content', 'course_updates', 'handouts', 'custom_pages')
course_key: The current course key
user: The authenticated user making the request
"""
if link_type == "course_updates":
_update_course_updates_link(block_id, old_url, new_url, course_key, user)
elif link_type == "handouts":
_update_handouts_link(block_id, old_url, new_url, course_key, user)
elif link_type == "custom_pages":
_update_custom_pages_link(block_id, old_url, new_url, course_key, user)
else:
_update_course_content_link(block_id, old_url, new_url, course_key, user)
def _update_broken_links_file_with_updated_links(course_key, updated_links):
"""
Updates the existing broken links file to reflect the status of updated links.
This function finds the latest broken links file for the course and updates it
to remove successfully updated links or update their status.
Args:
course_key: The current course key
updated_links: List of updated link results from the link update task
"""
try:
# Find the latest broken links task artifact for this course
latest_artifact = UserTaskArtifact.objects.filter(
name="BrokenLinks", status__name__contains=str(course_key)
).order_by("-created").first()
if not latest_artifact or not latest_artifact.file:
LOGGER.debug(f"No broken links file found for course {course_key}")
return
# Read the existing broken links file
try:
with latest_artifact.file.open("r") as file:
existing_broken_links = json.load(file)
except (json.JSONDecodeError, IOError) as e:
LOGGER.error(
f"Failed to read broken links file for course {course_key}: {e}"
)
return
successful_results = []
for result in updated_links:
if not result.get("success"):
continue
original_url = result.get("original_url") or _get_original_url_from_updated_result(result, course_key)
if not original_url:
continue
successful_results.append(
{
"original_url": original_url,
"new_url": result.get("new_url"),
"type": result.get("type"),
"id": str(result.get("id")) if result.get("id") is not None else None,
}
)
updated_broken_links = []
for link in existing_broken_links:
if len(link) >= 3:
block_id, url, link_state = link[0], link[1], link[2]
applied = False
for res in successful_results:
if res["original_url"] != url:
continue
if _update_result_applies_to_block(res, block_id) and res.get('id') == str(block_id):
new_url = res["new_url"]
updated_broken_links.append([block_id, new_url, link_state])
applied = True
break
if not applied:
updated_broken_links.append(link)
else:
updated_broken_links.append(link)
# Create a new temporary file with updated data
file_name = f"{course_key}_updated"
updated_file = NamedTemporaryFile(prefix=file_name + ".", suffix=".json")
with open(updated_file.name, "w") as file:
json.dump(updated_broken_links, file, indent=4)
# Update the existing artifact with the new file
latest_artifact.file.save(
name=os.path.basename(updated_file.name), content=File(updated_file)
)
latest_artifact.save()
LOGGER.info(f"Successfully updated broken links file for course {course_key}")
except Exception as e: # pylint: disable=broad-except
LOGGER.error(f"Failed to update broken links file for course {course_key}: {e}")
def _get_original_url_from_updated_result(update_result, course_key):
"""
Reconstruct the original URL from an update result.
Args:
update_result: The update result containing new_url and other info
course_key: The current course key
Returns:
str: The original URL before update, or None if it cannot be determined
"""
try:
new_url = update_result.get("new_url", "")
if not new_url or str(course_key) not in new_url:
return None
prev_run_course_key = get_previous_run_course_key(course_key)
if not prev_run_course_key:
return None
return new_url.replace(str(course_key), str(prev_run_course_key))
except Exception as e: # pylint: disable=broad-except
LOGGER.debug(
f"Failed to reconstruct original URL from update result: {e}"
)
return None
def _update_result_applies_to_block(result_entry, block_id):
"""
Determine if a given update result applies to a specific broken-link block id.
The task update results contain a 'type' and an 'id' indicating where the
replacement was applied. A single URL may appear in multiple places (course
content, course_updates, handouts, custom pages). We should only apply the
replacement to broken-link entries that match the same target area.
"""
try:
result_type = (result_entry.get("type") or "course_content").lower()
result_id = result_entry.get("id")
block_id_str = str(block_id) if block_id is not None else ""
result_id_str = str(result_id) if result_id is not None else None
if result_id_str and block_id_str == result_id_str:
return True
is_course_info = "course_info" in block_id_str
is_updates_section = "updates" in block_id_str
is_handouts_section = "handouts" in block_id_str
is_static_tab = "static_tab" in block_id_str
block_category = (
"course_updates" if is_course_info and is_updates_section else
"handouts" if is_course_info and is_handouts_section else
"custom_pages" if is_static_tab else
"course_content"
)
return block_category == result_type
except Exception: # pylint: disable=broad-except
return False

View File

@@ -13,6 +13,7 @@ import ddt
from django.conf import settings
from django.test.client import Client
from django.test.utils import override_settings
from django.core.files.storage import storages
from xmodule.contentstore.django import contentstore
from xmodule.exceptions import NotFoundError
@@ -281,19 +282,24 @@ class ContentStoreImportTest(ModuleStoreTestCase):
@override_settings(
COURSE_IMPORT_EXPORT_STORAGE="cms.djangoapps.contentstore.storage.ImportExportS3Storage",
DEFAULT_FILE_STORAGE="django.core.files.storage.FileSystemStorage"
STORAGES={
'default': {
'BACKEND': "django.core.files.storage.FileSystemStorage"
}
}
)
def test_resolve_default_storage(self):
def test_default_storage(self):
""" Ensure the default storage is invoked, even if course export storage is configured """
storage = resolve_storage_backend(
storage_key="default",
legacy_setting_key="DEFAULT_FILE_STORAGE"
)
storage = storages["default"]
self.assertEqual(storage.__class__.__name__, "FileSystemStorage")
@override_settings(
COURSE_IMPORT_EXPORT_STORAGE="cms.djangoapps.contentstore.storage.ImportExportS3Storage",
DEFAULT_FILE_STORAGE="django.core.files.storage.FileSystemStorage",
STORAGES={
'default': {
'BACKEND': "django.core.files.storage.FileSystemStorage"
}
},
COURSE_IMPORT_EXPORT_BUCKET="bucket_name_test"
)
def test_resolve_happy_path_storage(self):

View File

@@ -669,7 +669,7 @@ def use_legacy_logged_out_home():
# after creating a course rerun.
# .. toggle_use_cases: temporary
# .. toggle_creation_date: 2025-07-21
# .. toggle_target_removal_date: None
# .. toggle_target_removal_date: 2026-02-25
ENABLE_COURSE_OPTIMIZER_CHECK_PREV_RUN_LINKS = CourseWaffleFlag(
f'{CONTENTSTORE_NAMESPACE}.enable_course_optimizer_check_prev_run_links',
__name__,

View File

@@ -12,6 +12,7 @@ from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, BlockFactory
from common.djangoapps.util.storage import resolve_storage_backend
from storages.backends.s3boto3 import S3Boto3Storage
from django.core.files.storage import storages
from .signals import export_course_metadata
from .toggles import EXPORT_COURSE_METADATA_FLAG
@@ -60,16 +61,24 @@ class TestExportCourseMetadata(SharedModuleStoreTestCase):
@override_settings(
COURSE_METADATA_EXPORT_STORAGE="cms.djangoapps.export_course_metadata.storage.CourseMetadataExportS3Storage",
DEFAULT_FILE_STORAGE="django.core.files.storage.FileSystemStorage"
STORAGES={
'default': {
'BACKEND': "django.core.files.storage.FileSystemStorage"
}
}
)
def test_resolve_default_storage(self):
""" Ensure the default storage is invoked, even if course export storage is configured """
storage = resolve_storage_backend(storage_key="default", legacy_setting_key="default")
storage = storages["default"]
self.assertEqual(storage.__class__.__name__, "FileSystemStorage")
@override_settings(
COURSE_METADATA_EXPORT_STORAGE="cms.djangoapps.export_course_metadata.storage.CourseMetadataExportS3Storage",
DEFAULT_FILE_STORAGE="django.core.files.storage.FileSystemStorage",
STORAGES={
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage"
}
},
COURSE_METADATA_EXPORT_BUCKET="bucket_name_test"
)
def test_resolve_happy_path_storage(self):

View File

@@ -1,13 +0,0 @@
"""
App for importing from the modulestore tools.
"""
from django.apps import AppConfig
class ImportFromModulestoreConfig(AppConfig):
"""
App for importing legacy content from the modulestore.
"""
name = 'cms.djangoapps.import_from_modulestore'

View File

@@ -1,54 +0,0 @@
"""
This module contains the data models for the import_from_modulestore app.
"""
from collections import namedtuple
from enum import Enum
from openedx.core.djangoapps.content_libraries import api as content_libraries_api
from django.db.models import TextChoices
from django.utils.translation import gettext_lazy as _
class ImportStatus(TextChoices):
"""
The status of this modulestore-to-learning-core import.
"""
NOT_STARTED = 'not_started', _('Waiting to stage content')
STAGING = 'staging', _('Staging content for import')
STAGING_FAILED = _('Failed to stage content')
STAGED = 'staged', _('Content is staged and ready for import')
IMPORTING = 'importing', _('Importing staged content')
IMPORTING_FAILED = 'importing_failed', _('Failed to import staged content')
IMPORTED = 'imported', _('Successfully imported content')
CANCELED = 'canceled', _('Canceled')
class CompositionLevel(Enum):
"""
Enumeration of composition levels for course content.
Defines the different levels of composition for course content,
including chapters, sequentials, verticals, and xblocks.
It also categorizes these levels into complicated and flat
levels for easier processing.
"""
CHAPTER = content_libraries_api.ContainerType.Section
SEQUENTIAL = content_libraries_api.ContainerType.Subsection
VERTICAL = content_libraries_api.ContainerType.Unit
COMPONENT = 'component'
OLX_COMPLEX_LEVELS = [
VERTICAL.olx_tag,
SEQUENTIAL.olx_tag,
CHAPTER.olx_tag,
]
@classmethod
def values(cls):
"""
Returns all levels of composition levels.
"""
return [composition_level.value for composition_level in cls]
PublishableVersionWithMapping = namedtuple('PublishableVersionWithMapping', ['publishable_version', 'mapping'])

View File

@@ -1,82 +0,0 @@
# Generated by Django 4.2.20 on 2025-04-21 16:23
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
import opaque_keys.edx.django.models
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('content_staging', '0005_stagedcontent_version_num'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('oel_publishing', '0008_alter_draftchangelogrecord_options_and_more'),
]
operations = [
migrations.CreateModel(
name='Import',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)),
('status', models.CharField(choices=[('not_started', 'Waiting to stage content'), ('staging', 'Staging content for import'), ('Failed to stage content', 'Staging Failed'), ('staged', 'Content is staged and ready for import'), ('importing', 'Importing staged content'), ('importing_failed', 'Failed to import staged content'), ('imported', 'Successfully imported content'), ('canceled', 'Canceled')], db_index=True, default='not_started', max_length=100)),
('source_key', opaque_keys.edx.django.models.LearningContextKeyField(db_index=True, help_text='The modulestore course', max_length=255)),
('target_change', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='oel_publishing.draftchangelog')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Import from modulestore',
'verbose_name_plural': 'Imports from modulestore',
},
),
migrations.CreateModel(
name='PublishableEntityMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('source_usage_key', opaque_keys.edx.django.models.UsageKeyField(help_text='Original usage key/ID of the thing that has been imported.', max_length=255)),
('target_entity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_publishing.publishableentity')),
('target_package', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='oel_publishing.learningpackage')),
],
options={
'unique_together': {('source_usage_key', 'target_package')},
},
),
migrations.CreateModel(
name='StagedContentForImport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('source_usage_key', opaque_keys.edx.django.models.UsageKeyField(help_text='The original Usage key of the highest-level component that was saved in StagedContent.', max_length=255)),
('import_event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='staged_content_for_import', to='import_from_modulestore.import')),
('staged_content', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='staged_content_for_import', to='content_staging.stagedcontent')),
],
options={
'unique_together': {('import_event', 'staged_content')},
},
),
migrations.CreateModel(
name='PublishableEntityImport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('import_event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='import_from_modulestore.import')),
('resulting_change', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='oel_publishing.draftchangelogrecord')),
('resulting_mapping', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='import_from_modulestore.publishableentitymapping')),
],
options={
'unique_together': {('import_event', 'resulting_mapping')},
},
),
]

View File

@@ -1,65 +0,0 @@
# Generated by Django 4.2.23 on 2025-08-19 16:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('import_from_modulestore', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
name='publishableentityimport',
unique_together=None,
),
migrations.RemoveField(
model_name='publishableentityimport',
name='import_event',
),
migrations.RemoveField(
model_name='publishableentityimport',
name='resulting_change',
),
migrations.RemoveField(
model_name='publishableentityimport',
name='resulting_mapping',
),
migrations.AlterUniqueTogether(
name='publishableentitymapping',
unique_together=None,
),
migrations.RemoveField(
model_name='publishableentitymapping',
name='target_entity',
),
migrations.RemoveField(
model_name='publishableentitymapping',
name='target_package',
),
migrations.AlterUniqueTogether(
name='stagedcontentforimport',
unique_together=None,
),
migrations.RemoveField(
model_name='stagedcontentforimport',
name='import_event',
),
migrations.RemoveField(
model_name='stagedcontentforimport',
name='staged_content',
),
migrations.DeleteModel(
name='Import',
),
migrations.DeleteModel(
name='PublishableEntityImport',
),
migrations.DeleteModel(
name='PublishableEntityMapping',
),
migrations.DeleteModel(
name='StagedContentForImport',
),
]

View File

@@ -1,3 +0,0 @@
"""
Models for the course to library import app (TO BE DELETED)
"""

View File

@@ -1157,7 +1157,6 @@ PIPELINE = {
'YUI_BINARY': 'yui-compressor',
}
STATICFILES_STORAGE = 'openedx.core.storage.ProductionStorage'
STATICFILES_STORAGE_KWARGS = {}
# List of finder classes that know how to find static files in various locations.
@@ -1497,7 +1496,6 @@ INSTALLED_APPS = [
'openedx.core.djangoapps.course_groups', # not used in cms (yet), but tests run
'cms.djangoapps.xblock_config.apps.XBlockConfig',
'cms.djangoapps.export_course_metadata.apps.ExportCourseMetadataConfig',
'cms.djangoapps.import_from_modulestore.apps.ImportFromModulestoreConfig',
# New (Learning-Core-based) XBlock runtime
'openedx.core.djangoapps.xblock.apps.StudioXBlockAppConfig',
@@ -2387,7 +2385,15 @@ BULK_EMAIL_DEFAULT_FROM_EMAIL = 'no-reply@example.com'
BULK_EMAIL_LOG_SENT_EMAILS = False
############### Settings for django file storage ##################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STORAGES = {
'default': {
'BACKEND': 'django.core.files.storage.FileSystemStorage'
},
'staticfiles': {
'BACKEND': 'openedx.core.storage.ProductionStorage'
}
}
###################### Grade Downloads ######################
# These keys are used for all of our asynchronous downloadable files, including

View File

@@ -9,7 +9,7 @@ from os.path import abspath, dirname, join
from .production import * # pylint: disable=wildcard-import, unused-wildcard-import
# Don't use S3 in devstack, fall back to filesystem
del DEFAULT_FILE_STORAGE
STORAGES['default']['BACKEND'] = 'django.core.files.storage.FileSystemStorage'
COURSE_IMPORT_EXPORT_STORAGE = 'django.core.files.storage.FileSystemStorage'
USER_TASKS_ARTIFACT_STORAGE = COURSE_IMPORT_EXPORT_STORAGE
@@ -56,7 +56,7 @@ FEATURES['ENABLE_VIDEO_UPLOAD_PIPELINE'] = True
# Skip packaging and optimization in development
PIPELINE['PIPELINE_ENABLED'] = False
STATICFILES_STORAGE = 'openedx.core.storage.DevelopmentStorage'
STORAGES['staticfiles']['BACKEND'] = 'openedx.core.storage.DevelopmentStorage'
# Revert to the default set of finders as we don't want the production pipeline
STATICFILES_FINDERS = [

View File

@@ -291,7 +291,11 @@ DATABASES:
USER: user
DATA_DIR: /edx/var/edxapp
DEFAULT_FEEDBACK_EMAIL: feedback@example.com
DEFAULT_FILE_STORAGE: storages.backends.s3boto3.S3Boto3Storage
STORAGES:
default:
BACKEND: storages.backends.s3boto3.S3Boto3Storage
staticfiles:
BACKEND: openedx.core.storage.ProductionStorage
DEFAULT_FROM_EMAIL: no-reply@registration.localhost
DEFAULT_HASHING_ALGORITHM: sha256
DEFAULT_JWT_ISSUER:

View File

@@ -82,10 +82,11 @@ with codecs.open(CONFIG_FILE, encoding='utf-8') as f:
'MKTG_URL_LINK_MAP',
'REST_FRAMEWORK',
'EVENT_BUS_PRODUCER_CONFIG',
'DEFAULT_FILE_STORAGE',
'STATICFILES_STORAGE',
]
})
#######################################################################################################################
#### LOAD THE EDX-PLATFORM GIT REVISION
####
@@ -150,11 +151,6 @@ if 'loc_cache' not in CACHES:
if 'staticfiles' in CACHES:
CACHES['staticfiles']['KEY_PREFIX'] = EDX_PLATFORM_REVISION
# In order to transition from local disk asset storage to S3 backed asset storage,
# we need to run asset collection twice, once for local disk and once for S3.
# Once we have migrated to service assets off S3, then we can convert this back to
# managed by the yaml file contents
STATICFILES_STORAGE = os.environ.get('STATICFILES_STORAGE', STATICFILES_STORAGE)
MKTG_URL_LINK_MAP.update(_YAML_TOKENS.get('MKTG_URL_LINK_MAP', {}))
@@ -194,21 +190,38 @@ AWS_BUCKET_ACL = AWS_DEFAULT_ACL
# The number of seconds that a generated URL is valid for.
AWS_QUERYSTRING_EXPIRE = 7 * 24 * 60 * 60 # 7 days
# Change to S3Boto3 if we haven't specified another default storage AND we have specified AWS creds.
if (not _YAML_TOKENS.get('DEFAULT_FILE_STORAGE')) and AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
_yaml_storages = _YAML_TOKENS.get('STORAGES', {})
_storages_default_backend_is_missing = not _yaml_storages.get('default', {}).get('BACKEND')
# For backward compatibility, if YAML provides legacy keys (DEFAULT_FILE_STORAGE, STATICFILES_STORAGE)
# and STORAGES doesnt explicitly define the corresponding backend, migrate the legacy value into STORAGES.
# If YAML doesn't provide lagacy keys, no backend is defined in STORAGES['default'] and AWS creds are present,
# fall back to S3Boto3Storage.
#
# This ensures YAML-provided values take precedence over defaults from common.py,
# without overwriting user-defined STORAGES and AWS creds are treated only as a fallback.
if _storages_default_backend_is_missing:
if 'DEFAULT_FILE_STORAGE' in _YAML_TOKENS:
STORAGES['default']['BACKEND'] = _YAML_TOKENS['DEFAULT_FILE_STORAGE']
elif AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
STORAGES['default']['BACKEND'] = 'storages.backends.s3boto3.S3Boto3Storage'
# Apply legacy STATICFILES_STORAGE if no backend is defined for "staticfiles"
if 'STATICFILES_STORAGE' in _YAML_TOKENS and not _yaml_storages.get('staticfiles', {}).get('BACKEND'):
STORAGES['staticfiles']['BACKEND'] = _YAML_TOKENS['STATICFILES_STORAGE']
if COURSE_IMPORT_EXPORT_BUCKET:
COURSE_IMPORT_EXPORT_STORAGE = 'cms.djangoapps.contentstore.storage.ImportExportS3Storage'
else:
COURSE_IMPORT_EXPORT_STORAGE = DEFAULT_FILE_STORAGE
COURSE_IMPORT_EXPORT_STORAGE = STORAGES['default']['BACKEND']
USER_TASKS_ARTIFACT_STORAGE = COURSE_IMPORT_EXPORT_STORAGE
if COURSE_METADATA_EXPORT_BUCKET:
COURSE_METADATA_EXPORT_STORAGE = 'cms.djangoapps.export_course_metadata.storage.CourseMetadataExportS3Storage'
else:
COURSE_METADATA_EXPORT_STORAGE = DEFAULT_FILE_STORAGE
COURSE_METADATA_EXPORT_STORAGE = STORAGES['default']['BACKEND']
# The normal database user does not have enough permissions to run migrations.
# Migrations are run with separate credentials, given as DB_MIGRATION_*

View File

@@ -29,7 +29,6 @@ from .common import *
from lms.envs.test import ( # pylint: disable=wrong-import-order, disable=unused-import
ACCOUNT_MICROFRONTEND_URL,
COMPREHENSIVE_THEME_DIRS, # unimport:skip
DEFAULT_FILE_STORAGE,
ECOMMERCE_API_URL,
ENABLE_COMPREHENSIVE_THEMING,
JWT_AUTH,
@@ -91,7 +90,7 @@ STATICFILES_DIRS += [
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a ValueError.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE = "pipeline.storage.NonPackagingPipelineStorage"
STORAGES['staticfiles']['BACKEND'] = "pipeline.storage.NonPackagingPipelineStorage"
STATIC_URL = "/static/"
# Update module store settings per defaults for tests

View File

@@ -37,9 +37,9 @@ def update_saml_provider_configs_on_configuration_change(sender, instance, creat
# Find all existing SAMLProviderConfig instances (current_set) that should be
# pointing to this slug but are pointing to an older version
existing_providers = SAMLProviderConfig.objects.current_set().filter(
site_id=instance.site_id,
saml_configuration__site_id=instance.site_id,
saml_configuration__slug=instance.slug
).exclude(saml_configuration_id=instance.id)
).exclude(saml_configuration_id=instance.id).exclude(saml_configuration_id__isnull=True)
updated_count = 0
for provider_config in existing_providers:

View File

@@ -6,7 +6,9 @@ import ddt
from unittest import mock
from unittest.mock import call
from django.test import TestCase, override_settings
from common.djangoapps.third_party_auth.tests.factories import SAMLConfigurationFactory
from django.contrib.sites.models import Site
from common.djangoapps.third_party_auth.tests.factories import SAMLConfigurationFactory, SAMLProviderConfigFactory
from common.djangoapps.third_party_auth.models import SAMLProviderConfig
@ddt.ddt
@@ -21,97 +23,181 @@ class TestSAMLConfigurationSignalHandlers(TestCase):
org_info_str='{"en-US": {"url": "http://test.com", "displayname": "Test", "name": "test"}}'
)
@ddt.data(
# Case 1: Tests behavior when SAML config signal handlers are disabled
# Verifies that basic attributes are set but no provider updates are attempted
{
'enabled': False,
'simulate_error': False,
'description': 'handlers disabled',
'expected_calls': [
call('saml_config_signal.enabled', False),
call('saml_config_signal.new_config_id', 'CONFIG_ID'),
call('saml_config_signal.slug', 'test-config'),
],
'expected_call_count': 3,
},
# Case 2: Tests behavior when SAML config signal handlers are enabled
# Verifies that attributes are set and provider updates are attempted successfully
{
'enabled': True,
'simulate_error': False,
'description': 'handlers enabled',
'expected_calls': [
call('saml_config_signal.enabled', True),
call('saml_config_signal.new_config_id', 'CONFIG_ID'),
call('saml_config_signal.slug', 'test-config'),
call('saml_config_signal.updated_count', 0),
],
'expected_call_count': 4,
},
# Case 3: Tests error handling when signal handlers are enabled but encounter an exception
# Verifies that error information is properly captured when provider updates fail
{
'enabled': True,
'simulate_error': True,
'description': 'handlers enabled with exception',
'expected_calls': [
call('saml_config_signal.enabled', True),
call('saml_config_signal.new_config_id', 'CONFIG_ID'),
call('saml_config_signal.slug', 'test-config'),
],
'expected_call_count': 4, # includes error_message call
'error_message': 'Test error',
},
)
@ddt.unpack
self.site1 = Site.objects.get_or_create(domain='test-site1.com', name='Site 1')[0]
self.site2 = Site.objects.get_or_create(domain='test-site2.com', name='Site 2')[0]
# Existing SAML config used by provider update tests
self.existing_saml_config = SAMLConfigurationFactory(
site=self.site1,
slug='slug',
entity_id='https://existing.example.com'
)
@mock.patch('common.djangoapps.third_party_auth.signals.handlers.set_custom_attribute')
def test_saml_config_signal_handlers(
self, mock_set_custom_attribute, enabled, simulate_error,
description, expected_calls, expected_call_count, error_message=None):
def test_saml_config_signal_handlers_disabled(self, mock_set_custom_attribute):
"""
Test SAML configuration signal handlers under different conditions.
Test behavior when SAML config signal handlers are disabled.
Verifies that basic attributes are set but no provider updates are attempted.
"""
with override_settings(ENABLE_SAML_CONFIG_SIGNAL_HANDLERS=enabled):
if simulate_error:
# Simulate an exception in the provider config update logic
with mock.patch(
'common.djangoapps.third_party_auth.models.SAMLProviderConfig.objects.current_set',
side_effect=Exception(error_message)
):
self.saml_config.entity_id = 'https://updated.example.com'
self.saml_config.save()
else:
with override_settings(ENABLE_SAML_CONFIG_SIGNAL_HANDLERS=False):
self.saml_config.entity_id = 'https://updated.example.com'
self.saml_config.save()
expected_calls = [
call('saml_config_signal.enabled', False),
call('saml_config_signal.new_config_id', self.saml_config.id),
call('saml_config_signal.slug', 'test-config'),
]
mock_set_custom_attribute.assert_has_calls(expected_calls, any_order=False)
assert mock_set_custom_attribute.call_count == 3
@mock.patch('common.djangoapps.third_party_auth.signals.handlers.set_custom_attribute')
def test_saml_config_signal_handlers_with_error(self, mock_set_custom_attribute):
"""
Test error handling when signal handlers encounter an exception.
Verifies that error information is properly captured when provider updates fail.
"""
error_message = "Test error"
with override_settings(ENABLE_SAML_CONFIG_SIGNAL_HANDLERS=True):
# Simulate an exception in the provider config update logic
with mock.patch(
'common.djangoapps.third_party_auth.models.SAMLProviderConfig.objects.current_set',
side_effect=Exception(error_message)
):
self.saml_config.entity_id = 'https://updated.example.com'
self.saml_config.save()
expected_calls_with_id = []
for call_obj in expected_calls:
args = list(call_obj[1])
if args[1] == 'CONFIG_ID':
args[1] = self.saml_config.id
expected_calls_with_id.append(call(args[0], args[1]))
expected_calls = [
call('saml_config_signal.enabled', True),
call('saml_config_signal.new_config_id', self.saml_config.id),
call('saml_config_signal.slug', 'test-config'),
]
# Verify expected calls were made
mock_set_custom_attribute.assert_has_calls(expected_calls_with_id, any_order=False)
mock_set_custom_attribute.assert_has_calls(expected_calls, any_order=False)
assert mock_set_custom_attribute.call_count == 4
# Verify total call count
assert mock_set_custom_attribute.call_count == expected_call_count, (
f"Expected {expected_call_count} calls for {description}, "
f"got {mock_set_custom_attribute.call_count}"
# Verify error message was logged
mock_set_custom_attribute.assert_any_call(
'saml_config_signal.error_message',
mock.ANY
)
error_calls = [
call for call in mock_set_custom_attribute.mock_calls
if call[1][0] == 'saml_config_signal.error_message'
]
assert error_message in error_calls[0][1][1], (
f"Expected '{error_message}' in error message, "
f"got: {error_calls[0][1][1]}"
)
# If error is expected, verify error message was logged
if error_message:
mock_set_custom_attribute.assert_any_call(
'saml_config_signal.error_message',
mock.ANY
)
error_calls = [
call for call in mock_set_custom_attribute.mock_calls
if call[1][0] == 'saml_config_signal.error_message'
]
assert error_message in error_calls[0][1][1], (
f"Expected '{error_message}' in error message for {description}, "
f"got: {error_calls[0][1][1]}"
)
def _get_current_provider(self, slug):
"""
Helper to get current version of provider by slug.
"""
return SAMLProviderConfig.objects.current_set().get(slug=slug)
def _get_site(self, site_id):
"""
Helper to get site by ID (1 = site1, 2 = site2).
"""
if site_id == 1:
return self.site1
elif site_id == 2:
return self.site2
else:
raise ValueError(f"Unexpected site_id: {site_id}.")
@ddt.data(
# Args: provider_site_id, provider_slug, signal_saml_site_id, signal_saml_slug, is_provider_updated
# All tests: provider's saml_configuration has site_id=1, slug='slug'
# Signal matches provider's saml config and should update
(1, 'slug', 1, 'slug', True), # Same site, same slug
(2, 'slug', 1, 'slug', True), # Cross-site provider, matching saml config
(1, 'provider-slug', 1, 'slug', True), # Different provider slug, matching saml config
# Signal does not match provider's saml config and should not update
(1, 'slug', 2, 'slug', False), # Different saml config site
(2, 'slug', 2, 'slug', False), # Different saml config site (cross-site)
(1, 'provider-slug', 1, 'provider-slug', False), # Different saml config slug
(2, 'provider-slug', 1, 'provider-slug', False), # Different saml config slug (cross-site)
)
@ddt.unpack
@mock.patch('common.djangoapps.third_party_auth.signals.handlers.set_custom_attribute')
@override_settings(ENABLE_SAML_CONFIG_SIGNAL_HANDLERS=True)
def test_saml_provider_config_updates(self, provider_site_id, provider_slug,
signal_saml_site_id, signal_saml_slug, is_provider_updated,
mock_set_custom_attribute):
"""
Test SAML provider config updates under different scenarios.
Tests that providers are updated only when the signal's SAML configuration
matches the provider's existing SAML configuration (by site and slug).
"""
provider_site = self._get_site(provider_site_id)
signal_saml_site = self._get_site(signal_saml_site_id)
provider = SAMLProviderConfigFactory(
slug=provider_slug,
site=provider_site,
saml_configuration=self.existing_saml_config
)
original_config_id = provider.saml_configuration_id
new_saml_config = SAMLConfigurationFactory(
site=signal_saml_site,
slug=signal_saml_slug,
entity_id='https://new.example.com'
)
current_provider = self._get_current_provider(provider_slug)
mock_set_custom_attribute.assert_any_call('saml_config_signal.enabled', True)
mock_set_custom_attribute.assert_any_call('saml_config_signal.new_config_id', new_saml_config.id)
mock_set_custom_attribute.assert_any_call('saml_config_signal.slug', signal_saml_slug)
if is_provider_updated:
mock_set_custom_attribute.assert_any_call('saml_config_signal.updated_count', 1)
self.assertEqual(current_provider.saml_configuration_id, new_saml_config.id,
"Provider should be updated when signal SAML config matches")
else:
mock_set_custom_attribute.assert_any_call('saml_config_signal.updated_count', 0)
self.assertEqual(current_provider.saml_configuration_id, original_config_id,
"Provider should NOT be updated when signal SAML config doesn't match")
@ddt.data(
# Args: provider_site_id, provider_slug, signal_saml_site_id, signal_saml_slug
# All tests: provider's saml config is None and should never be updated
(1, 'slug', 1, 'default'),
(1, 'default', 1, 'default'),
(2, 'slug', 1, 'default'),
)
@ddt.unpack
@override_settings(ENABLE_SAML_CONFIG_SIGNAL_HANDLERS=True)
def test_saml_provider_with_null_config_not_updated(self, provider_site_id, provider_slug,
signal_saml_site_id, signal_saml_slug):
"""
Test that providers with NULL SAML configuration are never updated by signal handler.
This is critical for fallback authentication scenarios where providers
intentionally have no SAML configuration.
"""
provider_site = self._get_site(provider_site_id)
signal_saml_site = self._get_site(signal_saml_site_id)
null_provider = SAMLProviderConfigFactory(
slug=provider_slug,
site=provider_site,
saml_configuration=None
)
new_saml_config = SAMLConfigurationFactory(
site=signal_saml_site,
slug=signal_saml_slug,
entity_id='https://new.example.com'
)
current_provider = self._get_current_provider(provider_slug)
self.assertIsNone(current_provider.saml_configuration_id,
"Provider with NULL SAML config should never be updated")

View File

@@ -78,7 +78,7 @@ def store_uploaded_file(
file_storage = DefaultStorage()
# If a file already exists with the supplied name, file_storage will make the filename unique.
stored_file_name = file_storage.save(stored_file_name, uploaded_file)
if is_private and settings.DEFAULT_FILE_STORAGE == 'storages.backends.s3boto3.S3Boto3Storage':
if is_private and settings.STORAGES['default']['BACKEND'] == 'storages.backends.s3boto3.S3Boto3Storage':
S3Boto3Storage().connection.meta.client.put_object_acl(
ACL='private',
Bucket=settings.AWS_STORAGE_BUCKET_NAME,

View File

@@ -38,9 +38,7 @@ def resolve_storage_backend(
storage_path = getattr(settings, legacy_setting_key, None)
storages_config = getattr(settings, 'STORAGES', {})
if options is None:
options = {}
options = options or {}
if storage_key in storages_config:
# Use case 1: STORAGES is defined
@@ -70,5 +68,5 @@ def resolve_storage_backend(
break
storage_path = storage_path.get(deep_setting_key)
StorageClass = import_string(storage_path or settings.DEFAULT_FILE_STORAGE)
StorageClass = import_string(storage_path or storages_config["default"]["BACKEND"])
return StorageClass(**options)

View File

@@ -4,6 +4,7 @@ Tests for the resolve_storage_backend function in common.djangoapps.util.storage
from django.test import TestCase
from django.test.utils import override_settings
from unittest.mock import patch, MagicMock
from common.djangoapps.util.storage import resolve_storage_backend
@@ -20,6 +21,7 @@ class ResolveStorageTest(TestCase):
BLOCK_STRUCTURES_SETTINGS="cms.djangoapps.contentstore.storage.ImportExportS3Storage"
)
def test_legacy_settings(self):
"""Test legacy string-based storage settings."""
storage = resolve_storage_backend(
storage_key="block_structures_settings",
legacy_setting_key="BLOCK_STRUCTURES_SETTINGS",
@@ -33,6 +35,7 @@ class ResolveStorageTest(TestCase):
}
)
def test_nested_legacy_settings(self):
"""Test legacy nested dictionary."""
storage = resolve_storage_backend(
storage_key="block_structures_settings",
legacy_setting_key="BLOCK_STRUCTURES_SETTINGS",
@@ -47,6 +50,7 @@ class ResolveStorageTest(TestCase):
}
)
def test_nested_legacy_settings_failed(self):
"""Test legacy nested dictionary settings with missing key falls back to default."""
storage = resolve_storage_backend(
storage_key="block_structures_settings",
legacy_setting_key="BLOCK_STRUCTURES_SETTINGS",
@@ -54,3 +58,75 @@ class ResolveStorageTest(TestCase):
options={}
)
assert storage.__class__.__name__ == DEFAULT_STORAGE_CLASS_NAME
@override_settings(
STORAGES={
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
"OPTIONS": {}
}
},
LEGACY_SETTING="cms.djangoapps.contentstore.storage.ImportExportS3Storage"
)
def test_missing_storage_key_fallback_to_legacy(self):
"""Test fallback to legacy settings when storage key not found in STORAGES."""
storage = resolve_storage_backend(
storage_key="nonexistent_storage",
legacy_setting_key="LEGACY_SETTING",
options={}
)
assert storage.__class__.__name__ == "ImportExportS3Storage"
def test_no_storages_no_legacy_setting(self):
"""Test fallback to default storage when neither STORAGES nor legacy setting exists."""
storage = resolve_storage_backend(
storage_key="nonexistent_storage",
legacy_setting_key="NONEXISTENT_LEGACY_SETTING",
options={}
)
assert storage.__class__.__name__ == DEFAULT_STORAGE_CLASS_NAME
@override_settings(
STORAGES={
"default": {
"BACKEND": "cms.djangoapps.contentstore.storage.ImportExportS3Storage",
"OPTIONS": {}
}
}
)
def test_fallback_to_custom_default_backend(self):
"""Test fallback uses custom default backend from STORAGES config."""
storage = resolve_storage_backend(
storage_key="nonexistent_storage",
legacy_setting_key="NONEXISTENT_LEGACY_SETTING",
options={}
)
assert storage.__class__.__name__ == "ImportExportS3Storage"
@override_settings(
STORAGES={
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
"OPTIONS": {}
},
"custom_storage_key": {
"BACKEND": "cms.djangoapps.contentstore.storage.ImportExportS3Storage",
"OPTIONS": {}
}
}
)
@patch('common.djangoapps.util.storage.storages')
def test_modern_storages_config(self, mock_storages):
"""Test modern Django STORAGES configuration that takes precedence."""
mock_storage_instance = MagicMock()
mock_storage_instance.__class__.__name__ = "ImportExportS3Storage"
mock_storages.__getitem__.return_value = mock_storage_instance
storage = resolve_storage_backend(
storage_key="custom_storage_key",
legacy_setting_key="SOME_LEGACY_SETTING",
options={}
)
mock_storages.__getitem__.assert_called_once_with("custom_storage_key")
assert storage.__class__.__name__ == "ImportExportS3Storage"

View File

@@ -11,47 +11,55 @@ from openedx.core.djangoapps.discussions.models import (
def filter_discussion_xblocks_from_response(response, course_key):
"""
Removes discussion xblocks if discussion provider is openedx
Removes discussion xblocks if discussion provider is openedx.
"""
configuration = DiscussionsConfiguration.get(context_key=course_key)
provider = configuration.provider_type
if provider == Provider.OPEN_EDX:
# Finding ids of discussion xblocks
if isinstance(response.data, ReturnList):
discussion_xblocks = [
value.get('id') for value in response.data if value.get('type') == 'discussion'
]
else:
discussion_xblocks = [
key for key, value in response.data.get('blocks', {}).items()
if value.get('type') == 'discussion'
]
# Filtering discussion xblocks keys from blocks
if isinstance(response.data, ReturnList):
filtered_blocks = {
value.get('id'): value
for value in response.data
if value.get('type') != 'discussion'
}
else:
filtered_blocks = {
key: value
for key, value in response.data.get('blocks', {}).items()
if value.get('type') != 'discussion'
}
# Removing reference of discussion xblocks from unit
# These references needs to be removed because they no longer exist
for _, block_data in filtered_blocks.items():
for key in ['descendants', 'children']:
descendants = block_data.get(key, [])
if descendants:
descendants = [
descendant for descendant in descendants
if descendant not in discussion_xblocks
]
block_data[key] = descendants
if isinstance(response.data, ReturnList):
response.data = filtered_blocks
else:
response.data['blocks'] = filtered_blocks
if provider != Provider.OPEN_EDX:
return response
is_list_response = isinstance(response.data, ReturnList)
# Find discussion xblock IDs
if is_list_response:
discussion_xblocks = [
block.get('id') for block in response.data
if block.get('type') == 'discussion'
]
else:
discussion_xblocks = [
key for key, value in response.data.get('blocks', {}).items()
if value.get('type') == 'discussion'
]
# Filter out discussion blocks
if is_list_response:
filtered_blocks = [
block for block in response.data
if block.get('type') != 'discussion'
]
else:
filtered_blocks = {
key: value for key, value in response.data.get('blocks', {}).items()
if value.get('type') != 'discussion'
}
# Remove references to discussion xblocks
# These references needs to be removed because they no longer exist
blocks_iterable = filtered_blocks if is_list_response else filtered_blocks.values()
for block_data in blocks_iterable:
for key in ['descendants', 'children']:
if key in block_data:
block_data[key] = [
descendant for descendant in block_data[key]
if descendant not in discussion_xblocks
]
# Update response
if is_list_response:
response.data = ReturnList(filtered_blocks, serializer=None)
else:
response.data['blocks'] = filtered_blocks
return response

View File

@@ -330,12 +330,14 @@ class BlocksInCourseView(BlocksView):
if course_block.get('type') == 'course':
root = course_block['id']
else:
root = str(course_usage_key)
else:
root = response.data['root']
course_blocks = response.data['blocks']
if not root:
raise ValueError(f"Unable to find course block in {course_key_string}")
raise ValidationError(f"Unable to find course block in '{course_key_string}'")
recurse_mark_complete(root, course_blocks)
return response

View File

@@ -215,7 +215,6 @@ class ThreadViewSetPartialUpdateTest(
"anonymous_to_peers": False,
"closed": False,
"pinned": False,
"read": True,
"editing_user_id": str(self.user.id),
}
self.check_mock_called_with("update_thread", -1, **params)

View File

@@ -623,4 +623,4 @@ def uses_shib(course):
Returns a boolean indicating if Shibboleth authentication is set for this course.
"""
return course.enrollment_domain and course.enrollment_domain.startswith(settings.SHIBBOLETH_DOMAIN_PREFIX)
return bool(course.enrollment_domain and course.enrollment_domain.startswith(settings.SHIBBOLETH_DOMAIN_PREFIX))

View File

@@ -272,12 +272,12 @@ class DjangoStorageReportStore(ReportStore):
@classmethod
def from_config(cls, config_name):
"""
By default, the default file storage specified by the `DEFAULT_FILE_STORAGE`
By default, the default file storage specified by the `STORAGES['default']`
setting will be used. To configure the storage used, add a dict in
settings with the following fields::
STORAGE_CLASS : The import path of the storage class to use. If
not set, the DEFAULT_FILE_STORAGE setting will be used.
not set, the STORAGES['default']['BACKEND'] setting will be used.
STORAGE_KWARGS : An optional dict of kwargs to pass to the storage
constructor. This can be used to specify a
different S3 bucket or root path, for example.

View File

@@ -2182,7 +2182,6 @@ PIPELINE = {
'UGLIFYJS_BINARY': 'node_modules/.bin/uglifyjs',
}
STATICFILES_STORAGE = 'openedx.core.storage.ProductionStorage'
STATICFILES_STORAGE_KWARGS = {}
# List of finder classes that know how to find static files in various locations.
@@ -4609,7 +4608,14 @@ VIDEO_UPLOAD_PIPELINE = {
}
############### Settings for django file storage ##################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STORAGES = {
'default': {
'BACKEND': 'django.core.files.storage.FileSystemStorage'
},
'staticfiles': {
'BACKEND': 'openedx.core.storage.ProductionStorage'
}
}
### Proctoring configuration (redirct URLs and keys shared between systems) ####
PROCTORING_BACKENDS = {

View File

@@ -17,7 +17,7 @@ from openedx.core.djangoapps.plugins.constants import ProjectType, SettingsType
from .production import * # pylint: disable=wildcard-import, unused-wildcard-import
# Don't use S3 in devstack, fall back to filesystem
del DEFAULT_FILE_STORAGE
STORAGES['default']['BACKEND'] = 'django.core.files.storage.FileSystemStorage'
ORA2_FILEUPLOAD_BACKEND = 'django'
@@ -123,7 +123,7 @@ def should_show_debug_toolbar(request): # lint-amnesty, pylint: disable=missing
########################### PIPELINE #################################
PIPELINE['PIPELINE_ENABLED'] = False
STATICFILES_STORAGE = 'openedx.core.storage.DevelopmentStorage'
STORAGES['staticfiles']['BACKEND'] = 'openedx.core.storage.DevelopmentStorage'
# Revert to the default set of finders as we don't want the production pipeline
STATICFILES_FINDERS = [

View File

@@ -376,7 +376,11 @@ DATABASES:
DATA_DIR: /edx/var/edxapp
DEFAULT_COURSE_VISIBILITY_IN_CATALOG: both
DEFAULT_FEEDBACK_EMAIL: feedback@example.com
DEFAULT_FILE_STORAGE: storages.backends.s3boto3.S3Boto3Storage
STORAGES:
default:
BACKEND: storages.backends.s3boto3.S3Boto3Storage
staticfiles:
BACKEND: openedx.core.storage.ProductionStorage
DEFAULT_FROM_EMAIL: sandbox-notifications@example.com
DEFAULT_HASHING_ALGORITHM: sha256
DEFAULT_JWT_ISSUER:

View File

@@ -77,10 +77,11 @@ with codecs.open(CONFIG_FILE, encoding='utf-8') as f:
'MKTG_URL_LINK_MAP',
'REST_FRAMEWORK',
'EVENT_BUS_PRODUCER_CONFIG',
'DEFAULT_FILE_STORAGE',
'STATICFILES_STORAGE',
]
})
#######################################################################################################################
#### LOAD THE EDX-PLATFORM GIT REVISION
####
@@ -218,9 +219,26 @@ if AWS_SECRET_ACCESS_KEY == "":
AWS_DEFAULT_ACL = 'public-read'
AWS_BUCKET_ACL = AWS_DEFAULT_ACL
# Change to S3Boto3 if we haven't specified another default storage AND we have specified AWS creds.
if (not _YAML_TOKENS.get('DEFAULT_FILE_STORAGE')) and AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
_yaml_storages = _YAML_TOKENS.get('STORAGES', {})
_storages_default_backend_is_missing = not _yaml_storages.get('default', {}).get('BACKEND')
# For backward compatibility, if YAML provides legacy keys (DEFAULT_FILE_STORAGE, STATICFILES_STORAGE)
# and STORAGES doesnt explicitly define the corresponding backend, migrate the legacy value into STORAGES.
# If YAML doesn't provide lagacy keys, no backend is defined in STORAGES['default'] and AWS creds are present,
# fall back to S3Boto3Storage.
#
# This ensures YAML-provided values take precedence over defaults from common.py,
# without overwriting user-defined STORAGES and AWS creds are treated only as a fallback.
if _storages_default_backend_is_missing:
if 'DEFAULT_FILE_STORAGE' in _YAML_TOKENS:
STORAGES['default']['BACKEND'] = _YAML_TOKENS['DEFAULT_FILE_STORAGE']
elif AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
STORAGES['default']['BACKEND'] = 'storages.backends.s3boto3.S3Boto3Storage'
# Apply legacy STATICFILES_STORAGE if no backend is defined for "staticfiles"
if 'STATICFILES_STORAGE' in _YAML_TOKENS and not _yaml_storages.get('staticfiles', {}).get('BACKEND'):
STORAGES['staticfiles']['BACKEND'] = _YAML_TOKENS['STATICFILES_STORAGE']
# The normal database user does not have enough permissions to run migrations.
# Migrations are run with separate credentials, given as DB_MIGRATION_*

View File

@@ -151,7 +151,7 @@ STATICFILES_DIRS += [
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a ValueError.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
STORAGES['staticfiles']['BACKEND'] = 'pipeline.storage.NonPackagingPipelineStorage'
# Don't use compression during tests
PIPELINE['JS_COMPRESSOR'] = None
@@ -298,7 +298,7 @@ ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS = OrderedDict([
])
############################ STATIC FILES #############################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STORAGES['default']['BACKEND'] = 'django.core.files.storage.FileSystemStorage'
MEDIA_ROOT = TEST_ROOT / "uploads"
MEDIA_URL = "/uploads/"
STATICFILES_DIRS.append(("uploads", MEDIA_ROOT))

View File

@@ -27,15 +27,24 @@ class CourseEnrollView extends Backbone.View {
}
render() {
let filledTemplate;
const context = this.model.toJSON();
if (this.$parentEl && this.enrollModel) {
context.collectionCourseStatus = this.collectionCourseStatus;
filledTemplate = this.tpl(context);
HtmlUtils.setHtml(this.$el, filledTemplate);
HtmlUtils.setHtml(this.$parentEl, HtmlUtils.HTML(this.$el));
}
this.postRender();
let filledTemplate;
const context = this.model.toJSON();
let hideEnrollmentDate = false;
if (context.upcoming_course_runs.length > 0) {
const currentDate = Date.now();
const upcomingEnrollmentDate = new Date(context.upcoming_course_runs[0]?.enrollment_open_date);
hideEnrollmentDate = currentDate > upcomingEnrollmentDate;
}
context.hide_enrollment_date = hideEnrollmentDate;
if (this.$parentEl && this.enrollModel) {
context.collectionCourseStatus = this.collectionCourseStatus;
filledTemplate = this.tpl(context);
HtmlUtils.setHtml(this.$el, filledTemplate);
HtmlUtils.setHtml(this.$parentEl, HtmlUtils.HTML(this.$el));
}
this.postRender();
}
postRender() {

View File

@@ -43,12 +43,14 @@
<div class="no-action-message">
<%- gettext('Coming Soon') %>
</div>
<div class="enrollment-opens">
<% if (!hide_enrollment_date) { %>
<div class="enrollment-opens">
<%- gettext('Enrollment Opens on') %>
<span class="enrollment-open-date">
<%- upcoming_course_runs[0].enrollment_open_date %>
</span>
</div>
</div>
<% } %>
<% } else { %>
<div class="no-action-message">
<%- gettext('Not Currently Available') %>

View File

@@ -9,7 +9,6 @@ files =
cms/lib/xblock/upstream_sync.py,
cms/lib/xblock/upstream_sync_container.py,
cms/djangoapps/contentstore/rest_api/v2/views/downstreams.py,
cms/djangoapps/import_from_modulestore,
openedx/core/djangoapps/content/learning_sequences,
# FIXME: need to solve type issues and add 'search' app here:
# openedx/core/djangoapps/content/search,

View File

@@ -73,10 +73,20 @@ class CourseProgramSerializer(serializers.Serializer): # lint-amnesty, pylint:
}
class PrerequisiteCourseSerializer(serializers.Serializer):
"""
Serializer for prerequisite course data with the serialized course key and display name.
"""
key = serializers.CharField()
display = serializers.CharField()
class CourseInfoSerializer(serializers.Serializer): # pylint: disable=abstract-method
"""
Serializer for Course objects providing minimal data about the course.
Compare this with CourseDetailSerializer.
For detailed information about what each field is for, see the docstring of the
CoursewareInformation class.
"""
access_expiration = serializers.DictField()
@@ -115,6 +125,40 @@ class CourseInfoSerializer(serializers.Serializer): # pylint: disable=abstract-
is_integrity_signature_enabled = serializers.BooleanField()
user_needs_integrity_signature = serializers.BooleanField()
learning_assistant_enabled = serializers.BooleanField()
show_courseware_link = serializers.BooleanField()
is_course_full = serializers.BooleanField()
can_enroll = serializers.BooleanField()
invitation_only = serializers.BooleanField()
is_shib_course = serializers.BooleanField()
allow_anonymous = serializers.BooleanField()
ecommerce_checkout = serializers.BooleanField()
single_paid_mode = serializers.DictField()
ecommerce_checkout_link = AbsoluteURLField(allow_null=True)
course_image_urls = serializers.ListField(
child=serializers.CharField(),
allow_empty=True,
default=list,
)
start_date_is_still_default = serializers.BooleanField()
advertised_start = serializers.CharField()
course_price = serializers.CharField()
pre_requisite_courses = serializers.ListField(
child=PrerequisiteCourseSerializer(),
allow_empty=True,
default=list,
)
about_sidebar_html = serializers.CharField(
allow_blank=True,
allow_null=True,
default=None,
)
display_number_with_default = serializers.CharField()
display_org_with_default = serializers.CharField()
overview = serializers.CharField(
allow_blank=True,
allow_null=True,
default=None,
)
def __init__(self, *args, **kwargs):
"""

View File

@@ -15,7 +15,7 @@ from django.test import override_settings
from django.test.client import RequestFactory
from edx_django_utils.cache import TieredCache
from edx_toggles.toggles.testutils import override_waffle_flag
from edx_toggles.toggles.testutils import override_waffle_flag, override_waffle_switch
from xmodule.data import CertificatesDisplayBehaviors
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
@@ -45,7 +45,7 @@ from common.djangoapps.student.roles import CourseInstructorRole
from common.djangoapps.student.tests.factories import CourseEnrollmentCelebrationFactory, UserFactory
from openedx.core.djangoapps.agreements.api import create_integrity_signature
from openedx.core.djangolib.testing.utils import skip_unless_lms
from openedx.features.course_experience.waffle import ENABLE_COURSE_ABOUT_SIDEBAR_HTML
User = get_user_model()
@@ -611,3 +611,257 @@ class CelebrationApiTestViews(BaseCoursewareTests, MasqueradeMixin):
# make sure they didn't change during masquerade attempt
assert celebration.celebrate_first_section
assert not celebration.celebrate_weekly_goal
@ddt.ddt
@skip_unless_lms # If run in CMS, the tests fail as the courseware_api.views module contains imports from the LMS.
class CoursewareMetaTestViews(BaseCoursewareTests):
"""
Tests for the CoursewareMeta class
"""
def setUp(self):
super().setUp()
self.course_enrollment = CourseEnrollment.enroll(self.user, self.course.id, 'audit')
self.request = RequestFactory().get(self.url)
def create_courseware_meta(self, user=None):
"""
Helper method to create CoursewareMeta instance
"""
from openedx.core.djangoapps.courseware_api.views import CoursewareMeta
user = user or self.user
self.request.user = user
return CoursewareMeta(self.course.id, self.request, username=user.username)
@ddt.data(True, False)
def test_is_course_full_property(self, is_course_full):
"""
Test is_course_full property
"""
with mock.patch(
'openedx.core.djangoapps.courseware_api.views.CourseEnrollment.objects.is_course_full'
) as mock_is_course_full:
mock_is_course_full.return_value = is_course_full
meta = self.create_courseware_meta()
assert meta.is_course_full is is_course_full
@ddt.data(True, False)
def test_invitation_only_property(self, invitation_only):
"""
Test invitation_only property
"""
with override_settings(COURSES_INVITE_ONLY=invitation_only):
meta = self.create_courseware_meta()
assert meta.invitation_only is invitation_only
@ddt.data(True, False)
@mock.patch(
'openedx.core.djangoapps.courseware_api.views.get_course_about_section', new_callable=mock.PropertyMock
)
def test_about_sidebar_html_property(self, waffle_enabled, mock_get_course_about_section):
"""
Test about_sidebar_html property with different waffle settings
"""
mock_get_course_about_section.return_value = '<div>About Course</div>'
with override_waffle_switch(ENABLE_COURSE_ABOUT_SIDEBAR_HTML, active=waffle_enabled):
meta = self.create_courseware_meta()
if waffle_enabled:
assert meta.about_sidebar_html == '<div>About Course</div>'
else:
assert meta.about_sidebar_html is None
@ddt.ddt
@skip_unless_lms
class CoursewareMetaAPIResponseTestViews(BaseCoursewareTests):
"""
Tests for API response fields returned by CoursewareMeta through the API endpoint
"""
def setUp(self):
super().setUp()
CourseEnrollment.enroll(self.user, self.course.id, 'audit')
def test_api_returns_show_courseware_link_field(self):
"""
Test that API response contains show_courseware_link field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'show_courseware_link' in response.data
assert isinstance(response.data['show_courseware_link'], bool)
def test_api_returns_is_course_full_field(self):
"""
Test that API response contains is_course_full field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'is_course_full' in response.data
assert isinstance(response.data['is_course_full'], bool)
def test_api_returns_can_enroll_field(self):
"""
Test that API response contains can_enroll field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'can_enroll' in response.data
assert isinstance(response.data['can_enroll'], bool)
def test_api_returns_invitation_only_field(self):
"""
Test that API response contains invitation_only field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'invitation_only' in response.data
assert isinstance(response.data['invitation_only'], bool)
def test_api_returns_is_shib_course_field(self):
"""
Test that API response contains is_shib_course field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'is_shib_course' in response.data
assert isinstance(response.data['is_shib_course'], bool)
def test_api_returns_allow_anonymous_field(self):
"""
Test that API response contains allow_anonymous field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'allow_anonymous' in response.data
assert isinstance(response.data['allow_anonymous'], bool)
def test_api_returns_ecommerce_checkout_field(self):
"""
Test that API response contains ecommerce_checkout field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'ecommerce_checkout' in response.data
assert isinstance(response.data['ecommerce_checkout'], bool)
def test_api_returns_single_paid_mode_field(self):
"""
Test that API response contains single_paid_mode field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'single_paid_mode' in response.data
assert isinstance(response.data['single_paid_mode'], dict)
def test_api_returns_ecommerce_checkout_link_field(self):
"""
Test that API response contains ecommerce_checkout_link field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'ecommerce_checkout_link' in response.data
checkout_link = response.data['ecommerce_checkout_link']
assert isinstance(checkout_link, str) or checkout_link is None
def test_api_returns_course_image_urls_field(self):
"""
Test that API response contains course_image_urls field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'course_image_urls' in response.data
assert isinstance(response.data['course_image_urls'], list)
def test_api_returns_start_date_is_still_default_field(self):
"""
Test that API response contains start_date_is_still_default field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'start_date_is_still_default' in response.data
assert isinstance(response.data['start_date_is_still_default'], bool)
def test_api_returns_advertised_start_field(self):
"""
Test that API response contains advertised_start field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'advertised_start' in response.data
advertised_start = response.data['advertised_start']
assert isinstance(advertised_start, str) or advertised_start is None
def test_api_returns_course_price_field(self):
"""
Test that API response contains course_price field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'course_price' in response.data
assert isinstance(response.data['course_price'], str)
def test_api_returns_pre_requisite_courses_field(self):
"""
Test that API response contains pre_requisite_courses field
"""
response = self.client.get(self.url)
assert response.status_code == 200
assert 'pre_requisite_courses' in response.data
assert isinstance(response.data['pre_requisite_courses'], list)
@ddt.data(True, False)
@mock.patch(
'openedx.core.djangoapps.courseware_api.views.get_course_about_section', new_callable=mock.PropertyMock
)
def test_api_about_sidebar_html_with_waffle(self, waffle_enabled, mock_get_course_about_section):
"""
Test API returns correct about_sidebar_html value based on waffle flag
"""
with override_waffle_switch(ENABLE_COURSE_ABOUT_SIDEBAR_HTML, active=waffle_enabled):
mock_get_course_about_section.return_value = '<div>About Course</div>'
response = self.client.get(self.url)
assert response.status_code == 200
assert 'about_sidebar_html' in response.data
if waffle_enabled:
assert response.data['about_sidebar_html'] == '<div>About Course</div>'
else:
assert response.data['about_sidebar_html'] is None
@ddt.ddt
@skip_unless_lms
class CoursewareMetaIntegrationTestViews(BaseCoursewareTests):
"""
Integration tests for CoursewareMeta with different user states and course configurations
"""
@ddt.data(
('audit', False),
('verified', True),
('honor', True),
('professional', True),
)
@ddt.unpack
def test_enrollment_mode_affects_can_access_proctored_exams(self, enrollment_mode, expected_access):
"""
Test that enrollment mode affects proctored exam access in API response
"""
CourseEnrollment.enroll(self.user, self.course.id, enrollment_mode)
response = self.client.get(self.url)
assert response.status_code == 200
assert response.data['can_access_proctored_exams'] == expected_access
@mock.patch('openedx.core.djangoapps.courseware_api.views.check_public_access')
def test_public_course_affects_allow_anonymous(self, mock_check_public_access):
"""
Test that course visibility settings affect allow_anonymous field
"""
mock_check_public_access.return_value = ACCESS_GRANTED
response = self.client.get(self.url)
assert response.status_code == 200
assert response.data['allow_anonymous'] is True

View File

@@ -25,15 +25,22 @@ from xmodule.modulestore.exceptions import ItemNotFoundError, NoPathToItem
from xmodule.modulestore.search import path_to_location
from xmodule.x_module import PUBLIC_VIEW, STUDENT_VIEW
from common.djangoapps.course_modes.models import CourseMode
from common.djangoapps.course_modes.models import CourseMode, get_course_prices
from common.djangoapps.util.views import expose_header
from lms.djangoapps.edxnotes.helpers import is_feature_enabled
from lms.djangoapps.certificates.api import get_certificate_url
from lms.djangoapps.certificates.models import GeneratedCertificate
from lms.djangoapps.commerce.utils import EcommerceService
from lms.djangoapps.course_api.api import course_detail
from lms.djangoapps.course_goals.models import UserActivity
from lms.djangoapps.course_goals.api import get_course_goal
from lms.djangoapps.courseware.access import has_access
from lms.djangoapps.courseware.access_utils import check_public_access
from lms.djangoapps.courseware.courses import (
get_course_about_section,
get_course_with_access,
get_permission_for_course_about,
)
from lms.djangoapps.courseware.context_processor import user_timezone_locale_prefs
from lms.djangoapps.courseware.entrance_exams import course_has_entrance_exam, user_has_passed_entrance_exam
@@ -43,19 +50,24 @@ from lms.djangoapps.courseware.masquerade import (
is_masquerading_as_non_audit_enrollment,
)
from lms.djangoapps.courseware.models import LastSeenCoursewareTimezone
from lms.djangoapps.courseware.permissions import VIEW_COURSEWARE
from lms.djangoapps.courseware.block_render import get_block_by_usage_id
from lms.djangoapps.courseware.toggles import course_exit_page_is_active
from lms.djangoapps.courseware.toggles import course_exit_page_is_active, course_is_invitation_only
from lms.djangoapps.courseware.views.views import get_cert_data
from lms.djangoapps.gating.api import get_entrance_exam_score, get_entrance_exam_usage_key
from lms.djangoapps.grades.api import CourseGradeFactory
from lms.djangoapps.instructor.enrollment import uses_shib
from common.djangoapps.util.milestones_helpers import get_prerequisite_courses_display
from lms.djangoapps.verify_student.services import IDVerificationService
from openedx.core.djangoapps.agreements.api import get_integrity_signature
from openedx.core.djangoapps.courseware_api.utils import get_celebrations_dict
from openedx.core.djangoapps.enrollments.permissions import ENROLL_IN_COURSE
from openedx.core.djangoapps.programs.utils import ProgramProgressMeter
from openedx.core.lib.api.authentication import BearerAuthenticationAllowInactiveUser
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin
from openedx.core.lib.courses import get_course_by_id
from openedx.features.course_experience import ENABLE_COURSE_GOALS
from openedx.features.course_experience.waffle import ENABLE_COURSE_ABOUT_SIDEBAR_HTML
from openedx.features.content_type_gating.models import ContentTypeGatingConfig
from openedx.features.course_duration_limits.access import get_access_expiration_data
from openedx.features.discounts.utils import generate_offer_data
@@ -64,6 +76,10 @@ from common.djangoapps.student.models import (
CourseEnrollmentCelebration,
LinkedInAddToProfileConfiguration
)
from xmodule.course_block import (
COURSE_VISIBILITY_PUBLIC,
COURSE_VISIBILITY_PUBLIC_OUTLINE,
)
from .serializers import CourseInfoSerializer
@@ -75,13 +91,13 @@ class CoursewareMeta:
def __init__(self, course_key, request, username=''):
self.request = request
self.overview = course_detail(
self.course_overview = course_detail(
self.request,
username or self.request.user.username,
course_key,
)
original_user_is_staff = has_access(self.request.user, 'staff', self.overview).has_access
original_user_is_staff = has_access(self.request.user, 'staff', self.course_overview).has_access
self.original_user_is_global_staff = self.request.user.is_staff
self.course_key = course_key
self.course = get_course_by_id(self.course_key)
@@ -91,12 +107,13 @@ class CoursewareMeta:
staff_access=original_user_is_staff,
)
self.request.user = self.effective_user
self.overview.bind_course_for_student(self.request)
self.course_overview.bind_course_for_student(self.request)
self.enrollment_object = CourseEnrollment.get_enrollment(self.effective_user, self.course_key,
select_related=['celebration', 'user__celebration'])
self.ecomm_service = EcommerceService()
def __getattr__(self, name):
return getattr(self.overview, name)
return getattr(self.course_overview, name)
@property
def enrollment(self):
@@ -113,11 +130,11 @@ class CoursewareMeta:
@property
def access_expiration(self):
return get_access_expiration_data(self.effective_user, self.overview)
return get_access_expiration_data(self.effective_user, self.course_overview)
@property
def offer(self):
return generate_offer_data(self.effective_user, self.overview)
return generate_offer_data(self.effective_user, self.course_overview)
@property
def content_type_gating_enabled(self):
@@ -140,8 +157,8 @@ class CoursewareMeta:
Return whether edxnotes is enabled and visible.
"""
return {
'enabled': is_feature_enabled(self.overview, self.effective_user),
'visible': self.overview.edxnotes_visibility,
'enabled': is_feature_enabled(self.course_overview, self.effective_user),
'visible': self.course_overview.edxnotes_visibility,
}
@property
@@ -214,12 +231,12 @@ class CoursewareMeta:
"""
return {
'entrance_exam_current_score': get_entrance_exam_score(
self.course_grade, get_entrance_exam_usage_key(self.overview),
self.course_grade, get_entrance_exam_usage_key(self.course_overview),
),
'entrance_exam_enabled': course_has_entrance_exam(self.overview),
'entrance_exam_id': self.overview.entrance_exam_id,
'entrance_exam_minimum_score_pct': self.overview.entrance_exam_minimum_score_pct,
'entrance_exam_passed': user_has_passed_entrance_exam(self.effective_user, self.overview),
'entrance_exam_enabled': course_has_entrance_exam(self.course_overview),
'entrance_exam_id': self.course_overview.entrance_exam_id,
'entrance_exam_minimum_score_pct': self.course_overview.entrance_exam_minimum_score_pct,
'entrance_exam_passed': user_has_passed_entrance_exam(self.effective_user, self.course_overview),
}
@property
@@ -271,7 +288,7 @@ class CoursewareMeta:
get_certificate_url(course_id=self.course_key, uuid=user_certificate.verify_uuid)
)
return linkedin_config.add_to_profile_url(
self.overview.display_name, user_certificate.mode, cert_url, certificate=user_certificate,
self.course_overview.display_name, user_certificate.mode, cert_url, certificate=user_certificate,
)
@property
@@ -369,6 +386,146 @@ class CoursewareMeta:
"""
return getattr(settings, 'LEARNING_ASSISTANT_AVAILABLE', False)
@property
def show_courseware_link(self):
"""
Returns a boolean representing whether the courseware link should be shown in the course details page.
"""
with modulestore().bulk_operations(self.course_key):
permission = get_permission_for_course_about()
course_with_access = get_course_with_access(self.request.user, permission, self.course_key)
return bool(
self.request.user.has_perm(VIEW_COURSEWARE, course_with_access)
or settings.FEATURES.get('ENABLE_LMS_MIGRATION')
)
@property
def is_course_full(self):
"""
Returns a boolean representing whether the course is full.
"""
return CourseEnrollment.objects.is_course_full(self.course)
@property
def can_enroll(self):
"""
Returns a boolean representing whether the user can enroll in the course.
"""
return bool(self.request.user.has_perm(ENROLL_IN_COURSE, self.course))
@property
def invitation_only(self):
"""
Returns a boolean representing whether the course is invitation only.
"""
return course_is_invitation_only(self.course)
@property
def is_shib_course(self):
"""
Returns a boolean representing whether the course is a Shibboleth course.
"""
return uses_shib(self.course)
@property
def allow_anonymous(self):
"""
Returns a boolean representing whether the course allows anonymous access.
"""
return bool(check_public_access(self.course, [COURSE_VISIBILITY_PUBLIC, COURSE_VISIBILITY_PUBLIC_OUTLINE]))
@property
def ecommerce_checkout(self):
"""
Returns a boolean representing whether the course has an ecommerce checkout.
"""
return self.ecomm_service.is_enabled(self.request.user)
@property
def single_paid_mode(self):
"""
Returns a dict representing the single paid mode for the course, if it exists.
"""
modes = CourseMode.modes_for_course_dict(self.course_key)
single_paid_mode = {}
if self.ecommerce_checkout:
if len(modes) == 1 and list(modes.values())[0].min_price:
single_paid_mode = list(modes.values())[0]
else:
# have professional ignore other modes for historical reasons
single_paid_mode = modes.get(CourseMode.PROFESSIONAL)
if single_paid_mode:
return {
"sku": single_paid_mode.sku,
"name": single_paid_mode.name,
"min_price": single_paid_mode.min_price,
"description": single_paid_mode.description,
}
return {}
@property
def ecommerce_checkout_link(self):
"""
Returns the ecommerce checkout link for the course.
"""
if self.single_paid_mode and self.single_paid_mode.sku:
return self.ecomm_service.get_checkout_page_url(
self.single_paid_mode.sku, course_run_keys=[self.course_key]
)
return None
@property
def course_image_urls(self):
"""
Returns a list of course image URLs.
"""
return self.course_overview.image_urls
@property
def start_date_is_still_default(self):
"""
Returns a boolean indicating whether the course start date is still the default value.
"""
return self.course_overview.start_date_is_still_default
@property
def advertised_start(self):
"""
Returns the advertised start date of the course.
"""
return self.course_overview.advertised_start
@property
def course_price(self):
"""
Returns the course price, formatted with the currency symbol.
"""
_, course_price = get_course_prices(self.course)
return course_price
@property
def pre_requisite_courses(self):
"""
Returns a list of pre-requisite courses for the course.
"""
return get_prerequisite_courses_display(self.course)
@property
def about_sidebar_html(self):
"""
Returns the HTML content for the course about section.
"""
if ENABLE_COURSE_ABOUT_SIDEBAR_HTML.is_enabled():
return get_course_about_section(self.request, self.course, "about_sidebar_html")
return None
@property
def overview(self):
"""
Returns the overview HTML content for the course.
"""
return get_course_about_section(self.request, self.course, "overview")
@method_decorator(transaction.non_atomic_requests, name='dispatch')
class CoursewareInformation(RetrieveAPIView):
@@ -458,9 +615,45 @@ class CoursewareInformation(RetrieveAPIView):
* certificate_data: data regarding the effective user's certificate for the given course
* verify_identity_url: URL for a learner to verify their identity. Only returned for learners enrolled in a
verified mode. Will update to reverify URL if necessary.
* verification_status: The verification status of the effective user in the course. Possible values:
* 'none': No verification has been created for the user
* 'expired': The verification has expired
* 'approved': The verification has been approved
* 'pending': The verification is pending
* 'must_reverify': The user must reverify their identity
* linkedin_add_to_profile_url: URL to add the effective user's certificate to a LinkedIn Profile.
* user_needs_integrity_signature: Whether the user needs to sign the integrity agreement for the course
* learning_assistant_enabled: Whether the Xpert Learning Assistant is enabled for the requesting user
* show_courseware_link: Whether the courseware link should be shown in the course details page
* is_course_full: Whether the course is full
* can_enroll: Whether the user can enroll in the course
* invitation_only: Whether the course is invitation only
* is_shib_course: Whether the course is a Shibboleth course
* allow_anonymous: Whether the course allows anonymous access
* ecommerce_checkout: Whether the course has an ecommerce checkout
* single_paid_mode: An object representing the single paid mode for the course, if it exists
* sku: (str) The SKU for the single paid mode
* name: (str) The name of the single paid mode
* min_price: (str) The minimum price for the single paid mode, formatted with the currency symbol
* description: (str) The description of the single paid mode
* ecommerce_checkout_link: The ecommerce checkout link for the course, if it exists
* course_image_urls: A list of course image URLs
* start_date_is_still_default: Whether the course start date is still the default value
* advertised_start: The advertised start date of the course
* course_price: The course price, formatted with the currency symbol
* pre_requisite_courses: A list of pre-requisite courses for the course
* about_sidebar_html: The HTML content for the course about section, if enabled
* display_number_with_default: The course number with the org name, if set
* display_org_with_default: The org name with the course number, if set
* content_type_gating_enabled: Whether the content type gating is enabled for the course
* show_calculator: Whether the calculator should be shown in the course details page
* can_access_proctored_exams: Whether the user is eligible to access proctored exams
* notes: An object containing note settings for the course
* enabled: Boolean indicating whether edxnotes feature is enabled for the course
* visible: Boolean indicating whether notes are visible in the course
* marketing_url: The marketing URL for the course
* overview: The overview HTML content for the course
* license: The license for the course
**Parameters:**

View File

@@ -287,7 +287,6 @@ class Model:
"close_reason_code": request_params.get("close_reason_code"),
"closing_user_id": request_params.get("closing_user_id"),
"endorsed": request_params.get("endorsed"),
"read": request_params.get("read"),
}
request_data = {k: v for k, v in request_data.items() if v is not None}
response = forum_api.update_thread(**request_data)

View File

@@ -0,0 +1,116 @@
"""
Management command for updating notification preferences with parameters
"""
import logging
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from openedx.core.djangoapps.notifications.models import NotificationPreference
from openedx.core.djangoapps.notifications.base_notification import (
COURSE_NOTIFICATION_APPS,
COURSE_NOTIFICATION_TYPES
)
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Management command to update boolean notification preferences.
This command updates channel (`web`, `push`, `email`)
in the NotificationPreference model for a given app and type.
Features:
- Requires `app` and `type`, validated against
COURSE_NOTIFICATION_APPS and COURSE_NOTIFICATION_TYPES.
- Allows updating a single channel to `true` or `false`.
- Supports optional `--user_ids` argument to limit updates
to specific users.
- Provides a `--dry-run` mode to preview changes without
committing to the database.
- Logs the number of affected records and affected user IDs.
Example usage:
python manage.py update_notification_preference discussion new_comment_on_response email false
python manage.py update_notification_preference discussion new_comment_on_response push false --user_ids 5 7 12
python manage.py update_notification_preference discussion new_comment_on_response web false --dry-run
"""
help = "Update boolean notification preferences for users at account level."
def add_arguments(self, parser):
parser.add_argument(
"app",
type=str,
choices=list(COURSE_NOTIFICATION_APPS.keys()),
help=f"App key (choices: {', '.join(COURSE_NOTIFICATION_APPS.keys())})",
)
parser.add_argument(
"type",
type=str,
choices=list(COURSE_NOTIFICATION_TYPES.keys()),
help=f"Type key (choices: {', '.join(COURSE_NOTIFICATION_TYPES.keys())})"
)
parser.add_argument(
"channel",
type=str,
choices=["web", "push", "email"],
help="channel to update"
)
parser.add_argument(
"value",
type=str,
choices=["true", "false"],
help="Boolean value (true/false)"
)
parser.add_argument(
"--user_ids",
nargs="+",
type=int,
help="Optional list of user IDs to update only",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Simulate update without saving changes",
)
def handle(self, *args, **options):
app = options["app"]
pref_type = options["type"]
channel = options["channel"]
value_str = options["value"].lower()
dry_run = options["dry_run"]
user_ids = options.get("user_ids")
if value_str in ["true"]:
new_value = True
elif value_str in ["false"]:
new_value = False
else:
raise CommandError("Value must be true/false")
queryset = NotificationPreference.objects.filter(app=app, type=pref_type)
if user_ids:
queryset = queryset.filter(user_id__in=user_ids)
queryset = queryset.exclude(**{channel: new_value}) # only ones that need updating
affected = queryset.count()
if not affected:
logger.info("No records to update.")
return
logger.info(
f"{affected} record(s) will be updated. "
)
if dry_run:
logger.info("Dry-run mode: no changes applied.")
return
with transaction.atomic():
updated = queryset.update(**{channel: new_value})
logger.info(f" Updated {updated} records.")

View File

@@ -100,7 +100,13 @@ class TestThemingViews(TestCase):
assert response.status_code == 302
assert response.url == "/static/images/logo.png"
@override_settings(STATICFILES_STORAGE="openedx.core.storage.DevelopmentStorage")
@override_settings(
STORAGES={
'staticfiles': {
'BACKEND': 'openedx.core.storage.DevelopmentStorage'
}
}
)
def test_asset_with_theme(self):
"""
Fetch theme asset when a theme is set.

View File

@@ -1231,7 +1231,6 @@ class TestAccountsAPI(FilteredQueryCountMixin, CacheIsolationTestCase, UserAPITe
)
def test_profile_backend_with_default_hardcoded_backend(self):
""" In case of empty storages scenario uses the hardcoded backend."""
del settings.DEFAULT_FILE_STORAGE
del settings.STORAGES
storage = get_profile_image_storage()
self.assertIsInstance(storage, FileSystemStorage)

View File

@@ -8,7 +8,6 @@ from urllib.parse import parse_qs, urlsplit, urlunsplit # pylint: disable=impor
import nh3
from django.conf import settings
from django.contrib.auth import logout
from django.shortcuts import redirect
from django.utils.http import urlencode
from django.views.generic import TemplateView
from oauth2_provider.models import Application
@@ -47,7 +46,13 @@ class LogoutView(TemplateView):
If a redirect_url is specified in the querystring for this request, and the value is a safe
url for redirect, the view will redirect to this page after rendering the template.
If it is not specified, we will use the default target url.
Redirect to tpa_logout_url if TPA_AUTOMATIC_LOGOUT_ENABLED is set to True and if
tpa_logout_url is configured.
"""
if getattr(settings, 'TPA_AUTOMATIC_LOGOUT_ENABLED', False) and self.tpa_logout_url:
return self.tpa_logout_url
target_url = self.request.GET.get('redirect_url') or self.request.GET.get('next')
# Some third party apps do not build URLs correctly and send next query param without URL-encoding, resulting
@@ -85,16 +90,6 @@ class LogoutView(TemplateView):
mark_user_change_as_expected(None)
# Redirect to tpa_logout_url if TPA_AUTOMATIC_LOGOUT_ENABLED is set to True and if
# tpa_logout_url is configured.
#
# NOTE: This step skips rendering logout.html, which is used to log the user out from the
# different IDAs. To ensure the user is logged out of all the IDAs be sure to redirect
# back to <LMS>/logout after logging out of the TPA.
if getattr(settings, 'TPA_AUTOMATIC_LOGOUT_ENABLED', False):
if self.tpa_logout_url:
return redirect(self.tpa_logout_url)
return response
def _build_logout_url(self, url):

View File

@@ -211,8 +211,10 @@ class LogoutTests(TestCase):
mock_idp_logout_url.return_value = idp_logout_url
self._authenticate_with_oauth(client)
response = self.client.get(reverse('logout'))
assert response.status_code == 302
assert response.url == idp_logout_url
expected = {
'target': idp_logout_url,
}
self.assertDictContainsSubset(expected, response.context_data)
@mock.patch('django.conf.settings.TPA_AUTOMATIC_LOGOUT_ENABLED', True)
def test_no_automatic_tpa_logout_without_logout_url(self):

View File

@@ -54,7 +54,7 @@ class ProductionMixin(
We use this version on production.
"""
def __init__(self, *args, **kwargs):
kwargs.update(settings.STATICFILES_STORAGE_KWARGS.get(settings.STATICFILES_STORAGE, {}))
kwargs.update(settings.STATICFILES_STORAGE_KWARGS.get(settings.STORAGES['staticfiles']['BACKEND'], {}))
super().__init__(*args, **kwargs) # lint-amnesty, pylint: disable=super-with-arguments
@@ -112,5 +112,5 @@ def get_storage(storage_class=None, **kwargs):
the storage implementation makes http requests when instantiated, for
example.
"""
storage_cls = import_string(storage_class or settings.DEFAULT_FILE_STORAGE)
storage_cls = import_string(storage_class or settings.STORAGES["default"]["BACKEND"])
return storage_cls(**kwargs)

View File

@@ -0,0 +1,54 @@
"""
Tests for the get_storage utility function.
"""
from django.test import TestCase, override_settings
from django.core.files.storage import FileSystemStorage
from openedx.core.storage import get_storage
class TestGetStorage(TestCase):
"""
Tests of the get_storage function
"""
def setUp(self):
super().setUp()
get_storage.cache_clear()
def tearDown(self):
get_storage.cache_clear()
@override_settings(
STORAGES={
'default': {
'BACKEND': 'django.core.files.storage.FileSystemStorage'
}
}
)
def test_get_storage_returns_default_storage_when_no_class_specified(self):
"""Test that get_storage returns the default storage when no storage_class is provided."""
storage = get_storage()
self.assertIsInstance(storage, FileSystemStorage)
def test_get_storage_returns_custom_storage_when_class_specified(self):
"""Test that get_storage returns the specified storage class."""
storage_class = 'django.core.files.storage.FileSystemStorage'
storage = get_storage(storage_class=storage_class)
self.assertIsInstance(storage, FileSystemStorage)
def test_get_storage_caching_behavior(self):
"""Test that get_storage caches instances with identical arguments."""
storage_class = 'django.core.files.storage.FileSystemStorage'
kwargs = {'location': '/test/path'}
# First Call
storage1 = get_storage(storage_class=storage_class, **kwargs)
# Second Call
storage2 = get_storage(storage_class=storage_class, **kwargs)
self.assertIs(storage1, storage2)
def test_get_storage_handles_invalid_storage_class(self):
"""Test that get_storage raises appropriate error for invalid storage class."""
with self.assertRaises(ImportError):
get_storage(storage_class='nonexistent.storage.InvalidStorage')

View File

@@ -47,7 +47,7 @@ django-stubs<6
# The team that owns this package will manually bump this package rather than having it pulled in automatically.
# This is to allow them to better control its deployment and to do it in a process that works better
# for them.
edx-enterprise==6.3.0
edx-enterprise==6.3.2
# Date: 2023-07-26
# Our legacy Sass code is incompatible with anything except this ancient libsass version.
@@ -120,12 +120,6 @@ social-auth-app-django<=5.4.1
# Issue for unpinning: https://github.com/openedx/edx-platform/issues/35126
elasticsearch==7.9.1
# Date 2025-03-21
# social-auth-core>4.5.4 breaks tests with authorization on LinkedIn API
# Both of these constraints will be updated in a follow up PR under the following issue:
# https://github.com/openedx/edx-platform/issues/36425
social-auth-core==4.5.4
# Date 2025-05-09
# lxml and xmlsec need to be constrained because the latest version builds against a newer
# version of libxml2 than what we're running with. This leads to a version mismatch error

View File

@@ -4,7 +4,7 @@
#
# make upgrade
#
cffi==1.17.1
cffi==2.0.0
# via cryptography
chem==2.0.0
# via -r requirements/edx-sandbox/base.in
@@ -14,7 +14,7 @@ codejail-includes==2.0.0
# via -r requirements/edx-sandbox/base.in
contourpy==1.3.3
# via matplotlib
cryptography==45.0.6
cryptography==45.0.7
# via -r requirements/edx-sandbox/base.in
cycler==0.12.1
# via matplotlib
@@ -36,7 +36,7 @@ markupsafe==3.0.2
# via
# chem
# openedx-calc
matplotlib==3.10.5
matplotlib==3.10.6
# via -r requirements/edx-sandbox/base.in
mpmath==1.3.0
# via sympy
@@ -72,7 +72,7 @@ python-dateutil==2.9.0.post0
# via matplotlib
random2==1.0.2
# via -r requirements/edx-sandbox/base.in
regex==2025.7.34
regex==2025.9.1
# via nltk
scipy==1.16.1
# via

View File

@@ -68,14 +68,14 @@ bleach[css]==6.2.0
# xblock-poll
boto==2.49.0
# via -r requirements/edx/kernel.in
boto3==1.40.20
boto3==1.40.26
# via
# -r requirements/edx/kernel.in
# django-ses
# fs-s3fs
# ora2
# snowflake-connector-python
botocore==1.40.20
botocore==1.40.26
# via
# -r requirements/edx/kernel.in
# boto3
@@ -146,7 +146,7 @@ codejail-includes==2.0.0
# via -r requirements/edx/kernel.in
crowdsourcehinter-xblock==0.8
# via -r requirements/edx/bundled.in
cryptography==45.0.6
cryptography==45.0.7
# via
# -r requirements/edx/kernel.in
# django-fernet-fields-v2
@@ -167,7 +167,7 @@ defusedxml==0.7.1
# ora2
# python3-openid
# social-auth-core
django==4.2.23
django==4.2.24
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/kernel.in
@@ -258,7 +258,7 @@ django-config-models==2.9.0
# edx-name-affirmation
# enterprise-integrated-channels
# lti-consumer-xblock
django-cors-headers==4.7.0
django-cors-headers==4.8.0
# via -r requirements/edx/kernel.in
django-countries==7.6.1
# via
@@ -316,7 +316,7 @@ django-mptt==0.18.0
# openedx-django-wiki
django-multi-email-field==0.8.0
# via edx-enterprise
django-mysql==4.17.0
django-mysql==4.18.0
# via -r requirements/edx/kernel.in
django-oauth-toolkit==1.7.1
# via
@@ -397,7 +397,7 @@ djangorestframework==3.16.1
# super-csv
djangorestframework-xml==2.0.0
# via edx-enterprise
dnspython==2.7.0
dnspython==2.8.0
# via pymongo
done-xblock==2.5.0
# via -r requirements/edx/bundled.in
@@ -474,7 +474,7 @@ edx-drf-extensions==10.6.0
# edxval
# enterprise-integrated-channels
# openedx-learning
edx-enterprise==6.3.0
edx-enterprise==6.3.2
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/kernel.in
@@ -523,7 +523,7 @@ edx-rest-api-client==6.2.0
# edx-enterprise
# edx-proctoring
# enterprise-integrated-channels
edx-search==4.1.3
edx-search==4.3.0
# via
# -r requirements/edx/kernel.in
# openedx-forum
@@ -566,7 +566,7 @@ enmerkar==0.7.1
# via enmerkar-underscore
enmerkar-underscore==2.4.0
# via -r requirements/edx/kernel.in
enterprise-integrated-channels==0.1.14
enterprise-integrated-channels==0.1.15
# via -r requirements/edx/bundled.in
event-tracking==3.3.0
# via
@@ -618,7 +618,7 @@ google-cloud-core==2.4.3
# google-cloud-storage
google-cloud-firestore==2.21.0
# via firebase-admin
google-cloud-storage==3.3.0
google-cloud-storage==3.3.1
# via firebase-admin
google-crc32c==1.7.1
# via
@@ -702,7 +702,7 @@ jsonschema==4.25.1
# via
# drf-spectacular
# optimizely-sdk
jsonschema-specifications==2025.4.1
jsonschema-specifications==2025.9.1
# via jsonschema
jwcrypto==1.5.6
# via
@@ -747,7 +747,7 @@ mako==1.3.10
# lti-consumer-xblock
# xblock
# xblock-utils
markdown==3.8.2
markdown==3.9
# via
# -r requirements/edx/kernel.in
# openedx-django-wiki
@@ -770,7 +770,7 @@ mongoengine==0.29.1
# via -r requirements/edx/kernel.in
monotonic==1.6
# via analytics-python
more-itertools==10.7.0
more-itertools==10.8.0
# via cssutils
mpmath==1.3.0
# via sympy
@@ -843,7 +843,7 @@ openedx-filters==2.1.0
# -r requirements/edx/kernel.in
# lti-consumer-xblock
# ora2
openedx-forum==0.3.4
openedx-forum==0.3.6
# via -r requirements/edx/kernel.in
openedx-learning==0.27.1
# via
@@ -1039,7 +1039,7 @@ referencing==0.36.2
# via
# jsonschema
# jsonschema-specifications
regex==2025.7.34
regex==2025.9.1
# via nltk
requests==2.32.5
# via
@@ -1127,16 +1127,15 @@ slumber==0.7.1
# enterprise-integrated-channels
sniffio==1.3.1
# via anyio
snowflake-connector-python==3.17.2
snowflake-connector-python==3.17.3
# via edx-enterprise
social-auth-app-django==5.4.1
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/kernel.in
# edx-auth-backends
social-auth-core==4.5.4
social-auth-core==4.7.0
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/kernel.in
# edx-auth-backends
# social-auth-app-django

View File

@@ -6,7 +6,7 @@
#
chardet==5.2.0
# via diff-cover
coverage==7.10.5
coverage==7.10.6
# via -r requirements/edx/coverage.in
diff-cover==9.6.0
# via -r requirements/edx/coverage.in

View File

@@ -136,7 +136,7 @@ boto==2.49.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
boto3==1.40.20
boto3==1.40.26
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -144,7 +144,7 @@ boto3==1.40.20
# fs-s3fs
# ora2
# snowflake-connector-python
botocore==1.40.20
botocore==1.40.26
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -277,7 +277,7 @@ colorama==0.4.6
# via
# -r requirements/edx/testing.txt
# tox
coverage[toml]==7.10.5
coverage[toml]==7.10.6
# via
# -r requirements/edx/testing.txt
# pytest-cov
@@ -285,7 +285,7 @@ crowdsourcehinter-xblock==0.8
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
cryptography==45.0.6
cryptography==45.0.7
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -331,7 +331,7 @@ distlib==0.4.0
# via
# -r requirements/edx/testing.txt
# virtualenv
django==4.2.23
django==4.2.24
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/doc.txt
@@ -440,7 +440,7 @@ django-config-models==2.9.0
# edx-name-affirmation
# enterprise-integrated-channels
# lti-consumer-xblock
django-cors-headers==4.7.0
django-cors-headers==4.8.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -520,7 +520,7 @@ django-multi-email-field==0.8.0
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
# edx-enterprise
django-mysql==4.17.0
django-mysql==4.18.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -636,7 +636,7 @@ djangorestframework-xml==2.0.0
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
# edx-enterprise
dnspython==2.7.0
dnspython==2.8.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -748,7 +748,7 @@ edx-drf-extensions==10.6.0
# edxval
# enterprise-integrated-channels
# openedx-learning
edx-enterprise==6.3.0
edx-enterprise==6.3.2
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/doc.txt
@@ -817,7 +817,7 @@ edx-rest-api-client==6.2.0
# edx-enterprise
# edx-proctoring
# enterprise-integrated-channels
edx-search==4.1.3
edx-search==4.3.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -877,7 +877,7 @@ enmerkar-underscore==2.4.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
enterprise-integrated-channels==0.1.14
enterprise-integrated-channels==0.1.15
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -985,7 +985,7 @@ google-cloud-firestore==2.21.0
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
# firebase-admin
google-cloud-storage==3.3.0
google-cloud-storage==3.3.1
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -1007,7 +1007,7 @@ googleapis-common-protos==1.70.0
# -r requirements/edx/testing.txt
# google-api-core
# grpcio-status
grimp==3.9
grimp==3.11
# via
# -r requirements/edx/testing.txt
# import-linter
@@ -1138,7 +1138,6 @@ joblib==1.5.2
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
# grimp
# nltk
jsondiff==2.2.1
# via
@@ -1163,7 +1162,7 @@ jsonschema==4.25.1
# drf-spectacular
# optimizely-sdk
# sphinxcontrib-openapi
jsonschema-specifications==2025.4.1
jsonschema-specifications==2025.9.1
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -1237,7 +1236,7 @@ mako==1.3.10
# lti-consumer-xblock
# xblock
# xblock-utils
markdown==3.8.2
markdown==3.9
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -1282,7 +1281,7 @@ monotonic==1.6
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
# analytics-python
more-itertools==10.7.0
more-itertools==10.8.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -1398,7 +1397,7 @@ openedx-filters==2.1.0
# -r requirements/edx/testing.txt
# lti-consumer-xblock
# ora2
openedx-forum==0.3.4
openedx-forum==0.3.6
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -1706,7 +1705,7 @@ pytest==8.2.0
# pytest-xdist
pytest-attrib==0.1.3
# via -r requirements/edx/testing.txt
pytest-cov==6.2.1
pytest-cov==6.3.0
# via -r requirements/edx/testing.txt
pytest-django==4.11.1
# via -r requirements/edx/testing.txt
@@ -1813,7 +1812,7 @@ referencing==0.36.2
# -r requirements/edx/testing.txt
# jsonschema
# jsonschema-specifications
regex==2025.7.34
regex==2025.9.1
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -1955,7 +1954,7 @@ snowballstemmer==3.0.1
# via
# -r requirements/edx/doc.txt
# sphinx
snowflake-connector-python==3.17.2
snowflake-connector-python==3.17.3
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
@@ -1966,9 +1965,8 @@ social-auth-app-django==5.4.1
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
# edx-auth-backends
social-auth-core==4.5.4
social-auth-core==4.7.0
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
# edx-auth-backends

View File

@@ -103,14 +103,14 @@ bleach[css]==6.2.0
# xblock-poll
boto==2.49.0
# via -r requirements/edx/base.txt
boto3==1.40.20
boto3==1.40.26
# via
# -r requirements/edx/base.txt
# django-ses
# fs-s3fs
# ora2
# snowflake-connector-python
botocore==1.40.20
botocore==1.40.26
# via
# -r requirements/edx/base.txt
# boto3
@@ -200,7 +200,7 @@ codejail-includes==2.0.0
# via -r requirements/edx/base.txt
crowdsourcehinter-xblock==0.8
# via -r requirements/edx/base.txt
cryptography==45.0.6
cryptography==45.0.7
# via
# -r requirements/edx/base.txt
# django-fernet-fields-v2
@@ -225,7 +225,7 @@ defusedxml==0.7.1
# ora2
# python3-openid
# social-auth-core
django==4.2.23
django==4.2.24
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
@@ -322,7 +322,7 @@ django-config-models==2.9.0
# edx-name-affirmation
# enterprise-integrated-channels
# lti-consumer-xblock
django-cors-headers==4.7.0
django-cors-headers==4.8.0
# via -r requirements/edx/base.txt
django-countries==7.6.1
# via
@@ -385,7 +385,7 @@ django-multi-email-field==0.8.0
# via
# -r requirements/edx/base.txt
# edx-enterprise
django-mysql==4.17.0
django-mysql==4.18.0
# via -r requirements/edx/base.txt
django-oauth-toolkit==1.7.1
# via
@@ -471,7 +471,7 @@ djangorestframework-xml==2.0.0
# via
# -r requirements/edx/base.txt
# edx-enterprise
dnspython==2.7.0
dnspython==2.8.0
# via
# -r requirements/edx/base.txt
# pymongo
@@ -558,7 +558,7 @@ edx-drf-extensions==10.6.0
# edxval
# enterprise-integrated-channels
# openedx-learning
edx-enterprise==6.3.0
edx-enterprise==6.3.2
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
@@ -608,7 +608,7 @@ edx-rest-api-client==6.2.0
# edx-enterprise
# edx-proctoring
# enterprise-integrated-channels
edx-search==4.1.3
edx-search==4.3.0
# via
# -r requirements/edx/base.txt
# openedx-forum
@@ -655,7 +655,7 @@ enmerkar==0.7.1
# enmerkar-underscore
enmerkar-underscore==2.4.0
# via -r requirements/edx/base.txt
enterprise-integrated-channels==0.1.14
enterprise-integrated-channels==0.1.15
# via -r requirements/edx/base.txt
event-tracking==3.3.0
# via
@@ -725,7 +725,7 @@ google-cloud-firestore==2.21.0
# via
# -r requirements/edx/base.txt
# firebase-admin
google-cloud-storage==3.3.0
google-cloud-storage==3.3.1
# via
# -r requirements/edx/base.txt
# firebase-admin
@@ -849,7 +849,7 @@ jsonschema==4.25.1
# drf-spectacular
# optimizely-sdk
# sphinxcontrib-openapi
jsonschema-specifications==2025.4.1
jsonschema-specifications==2025.9.1
# via
# -r requirements/edx/base.txt
# jsonschema
@@ -904,7 +904,7 @@ mako==1.3.10
# lti-consumer-xblock
# xblock
# xblock-utils
markdown==3.8.2
markdown==3.9
# via
# -r requirements/edx/base.txt
# openedx-django-wiki
@@ -934,7 +934,7 @@ monotonic==1.6
# via
# -r requirements/edx/base.txt
# analytics-python
more-itertools==10.7.0
more-itertools==10.8.0
# via
# -r requirements/edx/base.txt
# cssutils
@@ -1019,7 +1019,7 @@ openedx-filters==2.1.0
# -r requirements/edx/base.txt
# lti-consumer-xblock
# ora2
openedx-forum==0.3.4
openedx-forum==0.3.6
# via -r requirements/edx/base.txt
openedx-learning==0.27.1
# via
@@ -1268,7 +1268,7 @@ referencing==0.36.2
# -r requirements/edx/base.txt
# jsonschema
# jsonschema-specifications
regex==2025.7.34
regex==2025.9.1
# via
# -r requirements/edx/base.txt
# nltk
@@ -1380,7 +1380,7 @@ sniffio==1.3.1
# anyio
snowballstemmer==3.0.1
# via sphinx
snowflake-connector-python==3.17.2
snowflake-connector-python==3.17.3
# via
# -r requirements/edx/base.txt
# edx-enterprise
@@ -1389,9 +1389,8 @@ social-auth-app-django==5.4.1
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
# edx-auth-backends
social-auth-core==4.5.4
social-auth-core==4.7.0
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
# edx-auth-backends
# social-auth-app-django

View File

@@ -49,7 +49,7 @@ importlib-metadata==7.1.0
# via opentelemetry-api
jsonschema==4.25.1
# via semgrep
jsonschema-specifications==2025.4.1
jsonschema-specifications==2025.9.1
# via jsonschema
markdown-it-py==4.0.0
# via rich
@@ -113,7 +113,7 @@ ruamel-yaml==0.18.15
# via semgrep
ruamel-yaml-clib==0.2.12
# via ruamel-yaml
semgrep==1.134.0
semgrep==1.135.0
# via -r requirements/edx/semgrep.in
tomli==2.0.2
# via semgrep

View File

@@ -100,14 +100,14 @@ bleach[css]==6.2.0
# xblock-poll
boto==2.49.0
# via -r requirements/edx/base.txt
boto3==1.40.20
boto3==1.40.26
# via
# -r requirements/edx/base.txt
# django-ses
# fs-s3fs
# ora2
# snowflake-connector-python
botocore==1.40.20
botocore==1.40.26
# via
# -r requirements/edx/base.txt
# boto3
@@ -210,13 +210,13 @@ codejail-includes==2.0.0
# via -r requirements/edx/base.txt
colorama==0.4.6
# via tox
coverage[toml]==7.10.5
coverage[toml]==7.10.6
# via
# -r requirements/edx/coverage.txt
# pytest-cov
crowdsourcehinter-xblock==0.8
# via -r requirements/edx/base.txt
cryptography==45.0.6
cryptography==45.0.7
# via
# -r requirements/edx/base.txt
# django-fernet-fields-v2
@@ -251,7 +251,7 @@ dill==0.4.0
# via pylint
distlib==0.4.0
# via virtualenv
django==4.2.23
django==4.2.24
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
@@ -348,7 +348,7 @@ django-config-models==2.9.0
# edx-name-affirmation
# enterprise-integrated-channels
# lti-consumer-xblock
django-cors-headers==4.7.0
django-cors-headers==4.8.0
# via -r requirements/edx/base.txt
django-countries==7.6.1
# via
@@ -411,7 +411,7 @@ django-multi-email-field==0.8.0
# via
# -r requirements/edx/base.txt
# edx-enterprise
django-mysql==4.17.0
django-mysql==4.18.0
# via -r requirements/edx/base.txt
django-oauth-toolkit==1.7.1
# via
@@ -497,7 +497,7 @@ djangorestframework-xml==2.0.0
# via
# -r requirements/edx/base.txt
# edx-enterprise
dnspython==2.7.0
dnspython==2.8.0
# via
# -r requirements/edx/base.txt
# pymongo
@@ -579,7 +579,7 @@ edx-drf-extensions==10.6.0
# edxval
# enterprise-integrated-channels
# openedx-learning
edx-enterprise==6.3.0
edx-enterprise==6.3.2
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
@@ -631,7 +631,7 @@ edx-rest-api-client==6.2.0
# edx-enterprise
# edx-proctoring
# enterprise-integrated-channels
edx-search==4.1.3
edx-search==4.3.0
# via
# -r requirements/edx/base.txt
# openedx-forum
@@ -678,7 +678,7 @@ enmerkar==0.7.1
# enmerkar-underscore
enmerkar-underscore==2.4.0
# via -r requirements/edx/base.txt
enterprise-integrated-channels==0.1.14
enterprise-integrated-channels==0.1.15
# via -r requirements/edx/base.txt
event-tracking==3.3.0
# via
@@ -756,7 +756,7 @@ google-cloud-firestore==2.21.0
# via
# -r requirements/edx/base.txt
# firebase-admin
google-cloud-storage==3.3.0
google-cloud-storage==3.3.1
# via
# -r requirements/edx/base.txt
# firebase-admin
@@ -774,7 +774,7 @@ googleapis-common-protos==1.70.0
# -r requirements/edx/base.txt
# google-api-core
# grpcio-status
grimp==3.9
grimp==3.11
# via import-linter
grpcio==1.74.0
# via
@@ -870,7 +870,6 @@ jmespath==1.0.1
joblib==1.5.2
# via
# -r requirements/edx/base.txt
# grimp
# nltk
jsondiff==2.2.1
# via
@@ -891,7 +890,7 @@ jsonschema==4.25.1
# -r requirements/edx/base.txt
# drf-spectacular
# optimizely-sdk
jsonschema-specifications==2025.4.1
jsonschema-specifications==2025.9.1
# via
# -r requirements/edx/base.txt
# jsonschema
@@ -947,7 +946,7 @@ mako==1.3.10
# lti-consumer-xblock
# xblock
# xblock-utils
markdown==3.8.2
markdown==3.9
# via
# -r requirements/edx/base.txt
# openedx-django-wiki
@@ -980,7 +979,7 @@ monotonic==1.6
# via
# -r requirements/edx/base.txt
# analytics-python
more-itertools==10.7.0
more-itertools==10.8.0
# via
# -r requirements/edx/base.txt
# cssutils
@@ -1065,7 +1064,7 @@ openedx-filters==2.1.0
# -r requirements/edx/base.txt
# lti-consumer-xblock
# ora2
openedx-forum==0.3.4
openedx-forum==0.3.6
# via -r requirements/edx/base.txt
openedx-learning==0.27.1
# via
@@ -1292,7 +1291,7 @@ pytest==8.2.0
# pytest-xdist
pytest-attrib==0.1.3
# via -r requirements/edx/testing.in
pytest-cov==6.2.1
pytest-cov==6.3.0
# via -r requirements/edx/testing.in
pytest-django==4.11.1
# via -r requirements/edx/testing.in
@@ -1378,7 +1377,7 @@ referencing==0.36.2
# -r requirements/edx/base.txt
# jsonschema
# jsonschema-specifications
regex==2025.7.34
regex==2025.9.1
# via
# -r requirements/edx/base.txt
# nltk
@@ -1487,7 +1486,7 @@ sniffio==1.3.1
# via
# -r requirements/edx/base.txt
# anyio
snowflake-connector-python==3.17.2
snowflake-connector-python==3.17.3
# via
# -r requirements/edx/base.txt
# edx-enterprise
@@ -1496,9 +1495,8 @@ social-auth-app-django==5.4.1
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
# edx-auth-backends
social-auth-core==4.5.4
social-auth-core==4.7.0
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
# edx-auth-backends
# social-auth-app-django

View File

@@ -10,7 +10,7 @@ click==8.2.1
# click-log
click-log==0.4.0
# via -r scripts/structures_pruning/requirements/base.in
dnspython==2.7.0
dnspython==2.8.0
# via pymongo
edx-opaque-keys==3.0.0
# via -r scripts/structures_pruning/requirements/base.in

View File

@@ -12,7 +12,7 @@ click-log==0.4.0
# via -r scripts/structures_pruning/requirements/base.txt
ddt==1.7.2
# via -r scripts/structures_pruning/requirements/testing.in
dnspython==2.7.0
dnspython==2.8.0
# via
# -r scripts/structures_pruning/requirements/base.txt
# pymongo
@@ -30,7 +30,7 @@ pymongo==4.4.0
# via
# -r scripts/structures_pruning/requirements/base.txt
# edx-opaque-keys
pytest==8.4.1
pytest==8.4.2
# via -r scripts/structures_pruning/requirements/testing.in
stevedore==5.5.0
# via

View File

@@ -10,9 +10,9 @@ attrs==25.3.0
# via zeep
backoff==2.2.1
# via -r scripts/user_retirement/requirements/base.in
boto3==1.40.20
boto3==1.40.26
# via -r scripts/user_retirement/requirements/base.in
botocore==1.40.20
botocore==1.40.26
# via
# boto3
# s3transfer
@@ -20,7 +20,7 @@ cachetools==5.5.2
# via google-auth
certifi==2025.8.3
# via requests
cffi==1.17.1
cffi==2.0.0
# via
# cryptography
# pynacl
@@ -30,9 +30,9 @@ click==8.2.1
# via
# -r scripts/user_retirement/requirements/base.in
# edx-django-utils
cryptography==45.0.6
cryptography==45.0.7
# via pyjwt
django==4.2.23
django==4.2.24
# via
# -c scripts/user_retirement/requirements/../../../requirements/constraints.txt
# django-crum
@@ -48,7 +48,7 @@ edx-rest-api-client==6.2.0
# via -r scripts/user_retirement/requirements/base.in
google-api-core==2.25.1
# via google-api-python-client
google-api-python-client==2.179.0
google-api-python-client==2.181.0
# via -r scripts/user_retirement/requirements/base.in
google-auth==2.40.3
# via
@@ -59,7 +59,7 @@ google-auth-httplib2==0.2.0
# via google-api-python-client
googleapis-common-protos==1.70.0
# via google-api-core
httplib2==0.22.0
httplib2==0.30.0
# via
# google-api-python-client
# google-auth-httplib2
@@ -77,7 +77,7 @@ lxml==5.3.2
# via
# -c scripts/user_retirement/requirements/../../../requirements/constraints.txt
# zeep
more-itertools==10.7.0
more-itertools==10.8.0
# via simple-salesforce
platformdirs==4.4.0
# via zeep

View File

@@ -14,11 +14,11 @@ attrs==25.3.0
# zeep
backoff==2.2.1
# via -r scripts/user_retirement/requirements/base.txt
boto3==1.40.20
boto3==1.40.26
# via
# -r scripts/user_retirement/requirements/base.txt
# moto
botocore==1.40.20
botocore==1.40.26
# via
# -r scripts/user_retirement/requirements/base.txt
# boto3
@@ -32,7 +32,7 @@ certifi==2025.8.3
# via
# -r scripts/user_retirement/requirements/base.txt
# requests
cffi==1.17.1
cffi==2.0.0
# via
# -r scripts/user_retirement/requirements/base.txt
# cryptography
@@ -45,14 +45,14 @@ click==8.2.1
# via
# -r scripts/user_retirement/requirements/base.txt
# edx-django-utils
cryptography==45.0.6
cryptography==45.0.7
# via
# -r scripts/user_retirement/requirements/base.txt
# moto
# pyjwt
ddt==1.7.2
# via -r scripts/user_retirement/requirements/testing.in
django==4.2.23
django==4.2.24
# via
# -r scripts/user_retirement/requirements/base.txt
# django-crum
@@ -76,7 +76,7 @@ google-api-core==2.25.1
# via
# -r scripts/user_retirement/requirements/base.txt
# google-api-python-client
google-api-python-client==2.179.0
google-api-python-client==2.181.0
# via -r scripts/user_retirement/requirements/base.txt
google-auth==2.40.3
# via
@@ -92,7 +92,7 @@ googleapis-common-protos==1.70.0
# via
# -r scripts/user_retirement/requirements/base.txt
# google-api-core
httplib2==0.22.0
httplib2==0.30.0
# via
# -r scripts/user_retirement/requirements/base.txt
# google-api-python-client
@@ -126,11 +126,11 @@ markupsafe==3.0.2
# werkzeug
mock==5.2.0
# via -r scripts/user_retirement/requirements/testing.in
more-itertools==10.7.0
more-itertools==10.8.0
# via
# -r scripts/user_retirement/requirements/base.txt
# simple-salesforce
moto==5.1.11
moto==5.1.12
# via -r scripts/user_retirement/requirements/testing.in
packaging==25.0
# via pytest
@@ -182,7 +182,7 @@ pyparsing==3.2.3
# via
# -r scripts/user_retirement/requirements/base.txt
# httplib2
pytest==8.4.1
pytest==8.4.2
# via -r scripts/user_retirement/requirements/testing.in
python-dateutil==2.9.0.post0
# via
@@ -268,7 +268,7 @@ urllib3==2.5.0
# responses
werkzeug==3.1.3
# via moto
xmltodict==0.14.2
xmltodict==0.15.1
# via moto
zeep==4.3.1
# via

View File

@@ -9,6 +9,7 @@ from abc import abstractmethod
from json import dumps
import lxml.etree
from edx_django_utils.monitoring import set_custom_attribute
from fs.osfs import OSFS
from opaque_keys.edx.locator import CourseLocator, LibraryLocator
from xblock.fields import Reference, ReferenceList, ReferenceValueDict, Scope
@@ -207,6 +208,7 @@ class CourseExportManager(ExportManager):
def process_extra(self, root, courselike, root_courselike_dir, xml_centric_courselike_key, export_fs):
# Export the modulestore's asset metadata.
set_custom_attribute("export_asset_started", str(courselike))
asset_dir = root_courselike_dir + '/' + AssetMetadata.EXPORTED_ASSET_DIR + '/'
if not os.path.isdir(asset_dir):
os.makedirs(asset_dir)
@@ -220,6 +222,7 @@ class CourseExportManager(ExportManager):
lxml.etree.ElementTree(asset_root).write(asset_xml_file, encoding='utf-8')
# export the static assets
set_custom_attribute("export_static_assets_started", str(courselike))
policies_dir = export_fs.makedir('policies', recreate=True)
if self.contentstore:
self.contentstore.export_all_for_course(
@@ -248,24 +251,28 @@ class CourseExportManager(ExportManager):
course_image_file.write(course_image.data)
# export the static tabs
set_custom_attribute("export_tabs_started", str(courselike))
export_extra_content(
export_fs, self.modulestore, self.courselike_key, xml_centric_courselike_key,
'static_tab', 'tabs', '.html'
)
# export the custom tags
set_custom_attribute("export_custom_tags_started", str(courselike))
export_extra_content(
export_fs, self.modulestore, self.courselike_key, xml_centric_courselike_key,
'custom_tag_template', 'custom_tags'
)
# export the course updates
set_custom_attribute("export_course_updates_started", str(courselike))
export_extra_content(
export_fs, self.modulestore, self.courselike_key, xml_centric_courselike_key,
'course_info', 'info', '.html'
)
# export the 'about' data (e.g. overview, etc.)
set_custom_attribute("export_about_started", str(courselike))
export_extra_content(
export_fs, self.modulestore, self.courselike_key, xml_centric_courselike_key,
'about', 'about', '.html'
@@ -280,10 +287,12 @@ class CourseExportManager(ExportManager):
sort_keys=True, indent=4).encode('utf-8'))
# export all of the course metadata in policy.json
set_custom_attribute("export_policy_started", str(courselike))
with course_run_policy_dir.open('policy.json', 'wb') as course_policy:
policy = {'course/' + courselike.location.run: own_metadata(courselike)}
course_policy.write(dumps(policy, cls=EdxJSONEncoder, sort_keys=True, indent=4).encode('utf-8'))
set_custom_attribute("export_drafts_started", str(courselike))
_export_drafts(self.modulestore, self.courselike_key, export_fs, xml_centric_courselike_key)
courselike_key_str = str(self.courselike_key)