fix: Various fixes to modulestore_migrator (#37711)
For legacy library_content references in courses, this PR: - **Removes the spurious sync after updating a reference to a migrated library**, so that users don't need to "update" their content _after_ updating their reference, _unless_ there were real content edits that happened since they last synced. We do this by correctly associating a DraftChangeLogRecord with the ModulestoreBlockSource migration artifact, and then comparing that version information before offering a sync. (related issue: https://github.com/openedx/frontend-app-authoring/issues/2626). - **Prompts users to update a reference to a migrated library with higher priority than prompting them to sync legacy content updates for that reference**, so that users don't end up needing to accept legacy content updates in order to get a to a point where they can update to V2 content. - **Ensures the library references in courses always follow the correct migration,** as defined by the data `forwarded` fields in the data model, which are populated based on the REST API spec and the stated product UI requirements. * For the migration itself, this PR: - **Allows non-admins to migrate libraries**, fixing: https://github.com/openedx/edx-platform/issues/37774 - **When triggered via the UI, ensures the migration uses nice title-based target slugs instead of ugly source-hash-based slugs.** We've had this as an option for a long time, but preserve_url_slugs defaulted to True instead of False in the REST API serializer, so we weren't taking advantage of it. - **Unifies logic between single-source and bulk migration**. These were implement as two separate code paths, with drift in their implementations. In particular, the collection update-vs-create-new logic was completely different for single-souce vs. bulk. - **When using the Skip or Update strategies for repeats, it consistently follows mappings established by the latest successful migration** rather than following mappings across arbitrary previous migrations. - **We log unexpected exceptions more often**, although there is so much more room for improvement here. - **Adds more validation to the REST API** so that client mistakes more often become 400s with validation messages rather than 500s. For developers, this PR: - Adds unit tests to the REST API - Ensures that all migration business logic now goes through a general-purpose Python API. - Ensures that the data model (specifically `forwarded`, and `change_log_record`) is now populated and respected. - Adds more type annotations.
This commit is contained in:
@@ -28,26 +28,11 @@ class LibraryViewSerializer(serializers.Serializer):
|
||||
org = serializers.CharField()
|
||||
number = serializers.CharField()
|
||||
can_edit = serializers.BooleanField()
|
||||
is_migrated = serializers.SerializerMethodField()
|
||||
migrated_to_title = serializers.CharField(
|
||||
source="migrations__target__title",
|
||||
required=False
|
||||
)
|
||||
migrated_to_key = serializers.CharField(
|
||||
source="migrations__target__key",
|
||||
required=False
|
||||
)
|
||||
migrated_to_collection_key = serializers.CharField(
|
||||
source="migrations__target_collection__key",
|
||||
required=False
|
||||
)
|
||||
migrated_to_collection_title = serializers.CharField(
|
||||
source="migrations__target_collection__title",
|
||||
required=False
|
||||
)
|
||||
|
||||
def get_is_migrated(self, obj):
|
||||
return "migrations__target__key" in obj
|
||||
is_migrated = serializers.BooleanField()
|
||||
migrated_to_title = serializers.CharField(required=False)
|
||||
migrated_to_key = serializers.CharField(required=False)
|
||||
migrated_to_collection_key = serializers.CharField(required=False)
|
||||
migrated_to_collection_title = serializers.CharField(required=False)
|
||||
|
||||
|
||||
class CourseHomeTabSerializer(serializers.Serializer):
|
||||
|
||||
@@ -236,7 +236,7 @@ class HomePageLibrariesView(APIView):
|
||||
"number": "CPSPR",
|
||||
"can_edit": true
|
||||
}
|
||||
], }
|
||||
],
|
||||
```
|
||||
"""
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ from cms.djangoapps.contentstore.tests.test_libraries import LibraryTestCase
|
||||
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
|
||||
from cms.djangoapps.modulestore_migrator import api as migrator_api
|
||||
from cms.djangoapps.modulestore_migrator.data import CompositionLevel, RepeatHandlingStrategy
|
||||
from cms.djangoapps.modulestore_migrator.tests.factories import ModulestoreSourceFactory
|
||||
from openedx.core.djangoapps.content.course_overviews.tests.factories import CourseOverviewFactory
|
||||
from openedx.core.djangoapps.content_libraries import api as lib_api
|
||||
|
||||
@@ -253,8 +252,9 @@ class HomePageLibrariesViewTest(LibraryTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
# Create an additional legacy library
|
||||
# Create an two additional legacy libaries
|
||||
self.lib_key_1 = self._create_library(library="lib1")
|
||||
self.lib_key_2 = self._create_library(library="lib2")
|
||||
self.organization = OrganizationFactory()
|
||||
|
||||
# Create a new v2 library
|
||||
@@ -269,7 +269,6 @@ class HomePageLibrariesViewTest(LibraryTestCase):
|
||||
library = lib_api.ContentLibrary.objects.get(slug=self.lib_key_v2.slug)
|
||||
learning_package = library.learning_package
|
||||
# Create a migration source for the legacy library
|
||||
self.source = ModulestoreSourceFactory(key=self.lib_key_1)
|
||||
self.url = reverse("cms.djangoapps.contentstore:v1:libraries")
|
||||
# Create a collection to migrate this library to
|
||||
collection_key = "test-collection"
|
||||
@@ -280,20 +279,32 @@ class HomePageLibrariesViewTest(LibraryTestCase):
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
# Migrate self.lib_key_1 to self.lib_key_v2
|
||||
# Migrate both lib_key_1 and lib_key_2 to v2
|
||||
# Only make lib_key_1 a "forwarding" migration.
|
||||
migrator_api.start_migration_to_library(
|
||||
user=self.user,
|
||||
source_key=self.source.key,
|
||||
source_key=self.lib_key_1,
|
||||
target_library_key=self.lib_key_v2,
|
||||
target_collection_slug=collection_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=True,
|
||||
)
|
||||
migrator_api.start_migration_to_library(
|
||||
user=self.user,
|
||||
source_key=self.lib_key_2,
|
||||
target_library_key=self.lib_key_v2,
|
||||
target_collection_slug=collection_key,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
|
||||
def test_home_page_libraries_response(self):
|
||||
"""Check successful response content"""
|
||||
"""Check sucessful response content"""
|
||||
self.maxDiff = None
|
||||
response = self.client.get(self.url)
|
||||
|
||||
expected_response = {
|
||||
@@ -322,6 +333,17 @@ class HomePageLibrariesViewTest(LibraryTestCase):
|
||||
'migrated_to_collection_key': 'test-collection',
|
||||
'migrated_to_collection_title': 'Test Collection',
|
||||
},
|
||||
# Third library was migrated, but not with forwarding.
|
||||
# So, it appears just like the unmigrated library.
|
||||
{
|
||||
'display_name': 'Test Library',
|
||||
'library_key': 'library-v1:org+lib2',
|
||||
'url': '/library/library-v1:org+lib2',
|
||||
'org': 'org',
|
||||
'number': 'lib2',
|
||||
'can_edit': True,
|
||||
'is_migrated': False,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@@ -366,6 +388,15 @@ class HomePageLibrariesViewTest(LibraryTestCase):
|
||||
'can_edit': True,
|
||||
'is_migrated': False,
|
||||
},
|
||||
{
|
||||
'display_name': 'Test Library',
|
||||
'library_key': 'library-v1:org+lib2',
|
||||
'url': '/library/library-v1:org+lib2',
|
||||
'org': 'org',
|
||||
'number': 'lib2',
|
||||
'can_edit': True,
|
||||
'is_migrated': False,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -54,7 +54,8 @@ from cms.djangoapps.contentstore.toggles import (
|
||||
)
|
||||
from cms.djangoapps.models.settings.course_grading import CourseGradingModel
|
||||
from cms.djangoapps.models.settings.course_metadata import CourseMetadata
|
||||
from cms.djangoapps.modulestore_migrator.api import get_migration_info
|
||||
from cms.djangoapps.modulestore_migrator import api as migrator_api
|
||||
from cms.djangoapps.modulestore_migrator.data import ModulestoreMigration
|
||||
from common.djangoapps.course_action_state.managers import CourseActionStateItemNotFoundError
|
||||
from common.djangoapps.course_action_state.models import CourseRerunState, CourseRerunUIStateManager
|
||||
from common.djangoapps.course_modes.models import CourseMode
|
||||
@@ -1572,13 +1573,12 @@ def request_response_format_is_json(request, response_format):
|
||||
|
||||
def get_library_context(request, request_is_json=False):
|
||||
"""
|
||||
Utils is used to get context of course home library tab.
|
||||
It is used for both DRF and django views.
|
||||
Utils is used to get context of course home library tab. Returned in DRF view.
|
||||
"""
|
||||
from cms.djangoapps.contentstore.views.course import (
|
||||
_accessible_libraries_iter,
|
||||
_format_library_for_view,
|
||||
_get_course_creator_status,
|
||||
format_library_for_view,
|
||||
get_allowed_organizations,
|
||||
get_allowed_organizations_for_libraries,
|
||||
user_can_create_organizations,
|
||||
@@ -1590,21 +1590,25 @@ def get_library_context(request, request_is_json=False):
|
||||
user_can_create_library,
|
||||
)
|
||||
|
||||
is_migrated: bool | None # None means: do not filter on is_migrated
|
||||
if (is_migrated_param := request.GET.get('is_migrated')) is not None:
|
||||
is_migrated = BooleanField().to_internal_value(is_migrated_param)
|
||||
else:
|
||||
is_migrated = None
|
||||
libraries = list(_accessible_libraries_iter(request.user) if libraries_v1_enabled() else [])
|
||||
library_keys = [lib.location.library_key for lib in libraries]
|
||||
migration_info = get_migration_info(library_keys)
|
||||
is_migrated_filter = request.GET.get('is_migrated', None)
|
||||
migration_info: dict[LibraryLocator, ModulestoreMigration | None] = {
|
||||
lib.id: migrator_api.get_forwarding(lib.id)
|
||||
for lib in libraries
|
||||
}
|
||||
data = {
|
||||
'libraries': [
|
||||
_format_library_for_view(
|
||||
format_library_for_view(
|
||||
lib,
|
||||
request,
|
||||
migrated_to=migration_info.get(lib.location.library_key)
|
||||
migration=migration_info[lib.id],
|
||||
)
|
||||
for lib in libraries
|
||||
if is_migrated_filter is None or (
|
||||
BooleanField().to_internal_value(is_migrated_filter) == (lib.location.library_key in migration_info)
|
||||
)
|
||||
if is_migrated is None or is_migrated == bool(migration_info[lib.id])
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1713,8 +1717,7 @@ def get_course_context_v2(request):
|
||||
|
||||
def get_home_context(request, no_course=False):
|
||||
"""
|
||||
Utils is used to get context of course home.
|
||||
It is used for both DRF and django views.
|
||||
Utils is used to get context of course home. Returned by DRF view.
|
||||
"""
|
||||
|
||||
from cms.djangoapps.contentstore.views.course import (
|
||||
|
||||
@@ -7,7 +7,7 @@ import logging
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
from typing import Dict, NamedTuple, Optional
|
||||
from typing import Dict
|
||||
|
||||
import django.utils
|
||||
from ccx_keys.locator import CCXLocator
|
||||
@@ -44,6 +44,7 @@ from cms.djangoapps.course_creators.models import CourseCreator
|
||||
from cms.djangoapps.models.settings.course_grading import CourseGradingModel
|
||||
from cms.djangoapps.models.settings.course_metadata import CourseMetadata
|
||||
from cms.djangoapps.models.settings.encoder import CourseSettingsEncoder
|
||||
from cms.djangoapps.modulestore_migrator.data import ModulestoreMigration
|
||||
from cms.djangoapps.contentstore.api.views.utils import get_bool_param
|
||||
from common.djangoapps.course_action_state.managers import CourseActionStateItemNotFoundError
|
||||
from common.djangoapps.course_action_state.models import CourseRerunState, CourseRerunUIStateManager
|
||||
@@ -670,11 +671,18 @@ def library_listing(request):
|
||||
)
|
||||
|
||||
|
||||
def _format_library_for_view(library, request, migrated_to: Optional[NamedTuple]):
|
||||
def format_library_for_view(library, request, migration: ModulestoreMigration | None):
|
||||
"""
|
||||
Return a dict of the data which the view requires for each library
|
||||
"""
|
||||
|
||||
migration_info = {}
|
||||
if migration:
|
||||
migration_info = {
|
||||
'migrated_to_key': migration.target_key,
|
||||
'migrated_to_title': migration.target_title,
|
||||
'migrated_to_collection_key': migration.target_collection_slug,
|
||||
'migrated_to_collection_title': migration.target_collection_title,
|
||||
}
|
||||
return {
|
||||
'display_name': library.display_name,
|
||||
'library_key': str(library.location.library_key),
|
||||
@@ -682,7 +690,8 @@ def _format_library_for_view(library, request, migrated_to: Optional[NamedTuple]
|
||||
'org': library.display_org_with_default,
|
||||
'number': library.display_number_with_default,
|
||||
'can_edit': has_studio_write_access(request.user, library.location.library_key),
|
||||
**(migrated_to._asdict() if migrated_to is not None else {}),
|
||||
'is_migrated': migration is not None,
|
||||
**migration_info,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -147,8 +147,8 @@ class ModulestoreSourceAdmin(admin.ModelAdmin):
|
||||
source_key=source.key,
|
||||
target_library_key=target_library_key,
|
||||
target_collection_slug=target_collection_slug,
|
||||
composition_level=form.cleaned_data['composition_level'],
|
||||
repeat_handling_strategy=form.cleaned_data['repeat_handling_strategy'],
|
||||
composition_level=CompositionLevel(form.cleaned_data['composition_level']),
|
||||
repeat_handling_strategy=RepeatHandlingStrategy(form.cleaned_data['repeat_handling_strategy']),
|
||||
preserve_url_slugs=form.cleaned_data['preserve_url_slugs'],
|
||||
forward_source_to_target=form.cleaned_data['forward_to_target'],
|
||||
)
|
||||
|
||||
@@ -1,211 +0,0 @@
|
||||
"""
|
||||
API for migration from modulestore to learning core
|
||||
"""
|
||||
from uuid import UUID
|
||||
from collections import defaultdict
|
||||
from celery.result import AsyncResult
|
||||
from opaque_keys import InvalidKeyError
|
||||
from opaque_keys.edx.keys import CourseKey, LearningContextKey, UsageKey
|
||||
from opaque_keys.edx.locator import LibraryLocator, LibraryLocatorV2, LibraryUsageLocatorV2
|
||||
from openedx_learning.api.authoring import get_collection
|
||||
from openedx_learning.api.authoring_models import Component
|
||||
from user_tasks.models import UserTaskStatus
|
||||
|
||||
from openedx.core.djangoapps.content_libraries.api import get_library, library_component_usage_key
|
||||
from openedx.core.types.user import AuthUser
|
||||
|
||||
from . import tasks
|
||||
from .models import ModulestoreBlockMigration, ModulestoreSource
|
||||
|
||||
__all__ = (
|
||||
"start_migration_to_library",
|
||||
"start_bulk_migration_to_library",
|
||||
"is_successfully_migrated",
|
||||
"get_migration_info",
|
||||
"get_all_migrations_info",
|
||||
"get_target_block_usage_keys",
|
||||
)
|
||||
|
||||
|
||||
def start_migration_to_library(
|
||||
*,
|
||||
user: AuthUser,
|
||||
source_key: LearningContextKey,
|
||||
target_library_key: LibraryLocatorV2,
|
||||
target_collection_slug: str | None = None,
|
||||
composition_level: str,
|
||||
repeat_handling_strategy: str,
|
||||
preserve_url_slugs: bool,
|
||||
forward_source_to_target: bool,
|
||||
) -> AsyncResult:
|
||||
"""
|
||||
Import a course or legacy library into a V2 library (or, a collection within a V2 library).
|
||||
"""
|
||||
source, _ = ModulestoreSource.objects.get_or_create(key=source_key)
|
||||
target_library = get_library(target_library_key)
|
||||
# get_library ensures that the library is connected to a learning package.
|
||||
target_package_id: int = target_library.learning_package_id # type: ignore[assignment]
|
||||
target_collection_id = None
|
||||
|
||||
if target_collection_slug:
|
||||
target_collection_id = get_collection(target_package_id, target_collection_slug).id
|
||||
|
||||
return tasks.migrate_from_modulestore.delay(
|
||||
user_id=user.id,
|
||||
source_pk=source.id,
|
||||
target_library_key=str(target_library_key),
|
||||
target_collection_pk=target_collection_id,
|
||||
composition_level=composition_level,
|
||||
repeat_handling_strategy=repeat_handling_strategy,
|
||||
preserve_url_slugs=preserve_url_slugs,
|
||||
forward_source_to_target=forward_source_to_target,
|
||||
)
|
||||
|
||||
|
||||
def start_bulk_migration_to_library(
|
||||
*,
|
||||
user: AuthUser,
|
||||
source_key_list: list[LearningContextKey],
|
||||
target_library_key: LibraryLocatorV2,
|
||||
target_collection_slug_list: list[str | None] | None = None,
|
||||
create_collections: bool = False,
|
||||
composition_level: str,
|
||||
repeat_handling_strategy: str,
|
||||
preserve_url_slugs: bool,
|
||||
forward_source_to_target: bool,
|
||||
) -> AsyncResult:
|
||||
"""
|
||||
Import a list of courses or legacy libraries into a V2 library (or, a collections within a V2 library).
|
||||
"""
|
||||
target_library = get_library(target_library_key)
|
||||
# get_library ensures that the library is connected to a learning package.
|
||||
target_package_id: int = target_library.learning_package_id # type: ignore[assignment]
|
||||
|
||||
sources_pks: list[int] = []
|
||||
for source_key in source_key_list:
|
||||
source, _ = ModulestoreSource.objects.get_or_create(key=source_key)
|
||||
sources_pks.append(source.id)
|
||||
|
||||
target_collection_pks: list[int | None] = []
|
||||
if target_collection_slug_list:
|
||||
for target_collection_slug in target_collection_slug_list:
|
||||
if target_collection_slug:
|
||||
target_collection_id = get_collection(target_package_id, target_collection_slug).id
|
||||
target_collection_pks.append(target_collection_id)
|
||||
else:
|
||||
target_collection_pks.append(None)
|
||||
|
||||
return tasks.bulk_migrate_from_modulestore.delay(
|
||||
user_id=user.id,
|
||||
sources_pks=sources_pks,
|
||||
target_library_key=str(target_library_key),
|
||||
target_collection_pks=target_collection_pks,
|
||||
create_collections=create_collections,
|
||||
composition_level=composition_level,
|
||||
repeat_handling_strategy=repeat_handling_strategy,
|
||||
preserve_url_slugs=preserve_url_slugs,
|
||||
forward_source_to_target=forward_source_to_target,
|
||||
)
|
||||
|
||||
|
||||
def is_successfully_migrated(
|
||||
source_key: CourseKey | LibraryLocator,
|
||||
source_version: str | None = None,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the source course/library has been migrated successfully.
|
||||
"""
|
||||
filters = {"task_status__state": UserTaskStatus.SUCCEEDED}
|
||||
if source_version is not None:
|
||||
filters["source_version"] = source_version
|
||||
return ModulestoreSource.objects.get_or_create(key=str(source_key))[0].migrations.filter(**filters).exists()
|
||||
|
||||
|
||||
def get_migration_info(source_keys: list[CourseKey | LibraryLocator]) -> dict:
|
||||
"""
|
||||
Check if the source course/library has been migrated successfully and return the last target info
|
||||
"""
|
||||
return {
|
||||
info.key: info
|
||||
for info in ModulestoreSource.objects.filter(
|
||||
migrations__task_status__state=UserTaskStatus.SUCCEEDED,
|
||||
migrations__is_failed=False,
|
||||
key__in=source_keys,
|
||||
)
|
||||
.values_list(
|
||||
'migrations__target__key',
|
||||
'migrations__target__title',
|
||||
'migrations__target_collection__key',
|
||||
'migrations__target_collection__title',
|
||||
'key',
|
||||
named=True,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def get_all_migrations_info(source_keys: list[CourseKey | LibraryLocator]) -> dict:
|
||||
"""
|
||||
Get all target info of all successful migrations of the source keys
|
||||
"""
|
||||
results = defaultdict(list)
|
||||
for info in ModulestoreSource.objects.filter(
|
||||
migrations__task_status__state=UserTaskStatus.SUCCEEDED,
|
||||
migrations__is_failed=False,
|
||||
key__in=source_keys,
|
||||
).values(
|
||||
'migrations__target__key',
|
||||
'migrations__target__title',
|
||||
'migrations__target_collection__key',
|
||||
'migrations__target_collection__title',
|
||||
'key',
|
||||
):
|
||||
results[info['key']].append(info)
|
||||
return dict(results)
|
||||
|
||||
|
||||
def get_target_block_usage_keys(source_key: CourseKey | LibraryLocator) -> dict[UsageKey, LibraryUsageLocatorV2 | None]:
|
||||
"""
|
||||
For given source_key, get a map of legacy block key and its new location in migrated v2 library.
|
||||
"""
|
||||
query_set = ModulestoreBlockMigration.objects.filter(overall_migration__source__key=source_key).select_related(
|
||||
'source', 'target__component__component_type', 'target__learning_package'
|
||||
)
|
||||
|
||||
def construct_usage_key(lib_key_str: str, component: Component) -> LibraryUsageLocatorV2 | None:
|
||||
try:
|
||||
lib_key = LibraryLocatorV2.from_string(lib_key_str)
|
||||
except InvalidKeyError:
|
||||
return None
|
||||
return library_component_usage_key(lib_key, component)
|
||||
|
||||
# Use LibraryUsageLocatorV2 and construct usage key
|
||||
return {
|
||||
obj.source.key: construct_usage_key(obj.target.learning_package.key, obj.target.component)
|
||||
for obj in query_set
|
||||
if obj.source.key is not None and obj.target is not None
|
||||
}
|
||||
|
||||
|
||||
def get_migration_blocks_info(
|
||||
target_key: str,
|
||||
source_key: str | None,
|
||||
target_collection_key: str | None,
|
||||
task_uuid: str | None,
|
||||
is_failed: bool | None,
|
||||
):
|
||||
"""
|
||||
Given the target key, and optional source key, target collection key, task_uuid and is_failed get a dictionary
|
||||
containing information about migration blocks.
|
||||
"""
|
||||
filters: dict[str, str | UUID | bool] = {
|
||||
'overall_migration__target__key': target_key
|
||||
}
|
||||
if source_key:
|
||||
filters['overall_migration__source__key'] = source_key
|
||||
if target_collection_key:
|
||||
filters['overall_migration__target_collection__key'] = target_collection_key
|
||||
if task_uuid:
|
||||
filters['overall_migration__task_status__uuid'] = UUID(task_uuid)
|
||||
if is_failed is not None:
|
||||
filters['target__isnull'] = is_failed
|
||||
return ModulestoreBlockMigration.objects.filter(**filters)
|
||||
9
cms/djangoapps/modulestore_migrator/api/__init__.py
Normal file
9
cms/djangoapps/modulestore_migrator/api/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
|
||||
"""
|
||||
This is the public API for the modulestore_migrator.
|
||||
"""
|
||||
|
||||
# These wildcard imports are okay because these api modules declare __all__.
|
||||
# pylint: disable=wildcard-import
|
||||
from .read_api import *
|
||||
from .write_api import *
|
||||
244
cms/djangoapps/modulestore_migrator/api/read_api.py
Normal file
244
cms/djangoapps/modulestore_migrator/api/read_api.py
Normal file
@@ -0,0 +1,244 @@
|
||||
"""
|
||||
API for reading information about previous migrations
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from uuid import UUID
|
||||
|
||||
from opaque_keys.edx.keys import UsageKey
|
||||
from opaque_keys.edx.locator import (
|
||||
LibraryLocatorV2, LibraryUsageLocatorV2, LibraryContainerLocator
|
||||
)
|
||||
from openedx_learning.api.authoring import get_draft_version
|
||||
from openedx_learning.api.authoring_models import (
|
||||
PublishableEntityVersion, PublishableEntity, DraftChangeLogRecord
|
||||
)
|
||||
|
||||
from openedx.core.djangoapps.content_libraries.api import (
|
||||
library_component_usage_key, library_container_locator
|
||||
)
|
||||
|
||||
from ..data import (
|
||||
SourceContextKey, ModulestoreMigration, ModulestoreBlockMigrationResult,
|
||||
ModulestoreBlockMigrationSuccess, ModulestoreBlockMigrationFailure
|
||||
)
|
||||
from .. import models
|
||||
|
||||
|
||||
__all__ = (
|
||||
'get_forwarding',
|
||||
'is_forwarded',
|
||||
'get_forwarding_for_blocks',
|
||||
'get_migrations',
|
||||
'get_migration_blocks',
|
||||
)
|
||||
|
||||
|
||||
def get_forwarding_for_blocks(source_keys: t.Iterable[UsageKey]) -> dict[UsageKey, ModulestoreBlockMigrationSuccess]:
|
||||
"""
|
||||
Authoritatively determine how some Modulestore blocks have been migrated to Learning Core.
|
||||
|
||||
Returns a mapping from source usage keys to block migration data objects. Each block migration object
|
||||
holds the target usage key and title. If a source key is missing from the mapping, then it has not
|
||||
been authoritatively migrated.
|
||||
"""
|
||||
sources = models.ModulestoreBlockSource.objects.filter(
|
||||
key__in=[str(sk) for sk in source_keys]
|
||||
).select_related(
|
||||
"forwarded__target__learning_package",
|
||||
# For building component key
|
||||
"forwarded__target__component__component_type",
|
||||
# For building container key
|
||||
"forwarded__target__container__section",
|
||||
"forwarded__target__container__subsection",
|
||||
"forwarded__target__container__unit",
|
||||
# For determining title and version
|
||||
"forwarded__change_log_record__new_version",
|
||||
)
|
||||
result = {}
|
||||
for source in sources:
|
||||
if source.forwarded and source.forwarded.target:
|
||||
result[source.key] = _block_migration_success(
|
||||
source_key=source.key,
|
||||
target=source.forwarded.target,
|
||||
change_log_record=source.forwarded.change_log_record,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def is_forwarded(source_key: SourceContextKey) -> bool:
|
||||
"""
|
||||
Has this course or legacy library been authoratively migrated to Learning Core,
|
||||
such that references to the source course/library should be forwarded to the target library?
|
||||
"""
|
||||
return get_forwarding(source_key) is not None
|
||||
|
||||
|
||||
def get_forwarding(source_key: SourceContextKey) -> ModulestoreMigration | None:
|
||||
"""
|
||||
Authoritatively determine how some Modulestore course or legacy library has been migrated to Learning Core.
|
||||
|
||||
If no such successful migration exists, returns None.
|
||||
|
||||
Note: This function may return None for a course or legacy lib that *has* been migrated 1+ times.
|
||||
This just means that those migrations were non-forwarding. In user parlance, that is,
|
||||
they have been "imported" but not truly "migrated".
|
||||
"""
|
||||
try:
|
||||
source = models.ModulestoreSource.objects.select_related(
|
||||
# The following are used in _migration:
|
||||
"forwarded__source",
|
||||
"forwarded__target",
|
||||
"forwarded__task_status",
|
||||
"forwarded__target_collection",
|
||||
).get(
|
||||
key=str(source_key)
|
||||
)
|
||||
except models.ModulestoreSource.DoesNotExist:
|
||||
return None
|
||||
if not source.forwarded:
|
||||
return None
|
||||
if source.forwarded.is_failed:
|
||||
return None
|
||||
return _migration(source.forwarded)
|
||||
|
||||
|
||||
def get_migrations(
|
||||
source_key: SourceContextKey | None = None,
|
||||
*,
|
||||
target_key: LibraryLocatorV2 | None = None,
|
||||
target_collection_slug: str | None = None,
|
||||
task_uuid: UUID | None = None,
|
||||
is_failed: bool | None = None,
|
||||
) -> t.Generator[ModulestoreMigration]:
|
||||
"""
|
||||
Given some criteria, get all modulestore->LearningCore migrations.
|
||||
|
||||
Returns an iterable, ordered from NEWEST to OLDEST.
|
||||
|
||||
Please note: If you provide no filters, this will return an iterable across the whole
|
||||
ModulestoreMigration table. Please paginate thoughtfully if you do that.
|
||||
"""
|
||||
migrations = models.ModulestoreMigration.objects.all().select_related(
|
||||
"source",
|
||||
"target",
|
||||
"target_collection",
|
||||
"task_status",
|
||||
)
|
||||
if source_key:
|
||||
migrations = migrations.filter(source__key=source_key)
|
||||
if target_key:
|
||||
migrations = migrations.filter(target__key=str(target_key))
|
||||
if target_collection_slug:
|
||||
migrations = migrations.filter(target_collection__key=target_collection_slug)
|
||||
if task_uuid:
|
||||
migrations = migrations.filter(task_status__uuid=task_uuid)
|
||||
if is_failed is not None:
|
||||
migrations = migrations.filter(is_failed=is_failed)
|
||||
return (
|
||||
_migration(migration)
|
||||
for migration in migrations.order_by("-id") # primary key is a proxy for newness
|
||||
)
|
||||
|
||||
|
||||
def get_migration_blocks(migration_pk: int) -> dict[UsageKey, ModulestoreBlockMigrationResult]:
|
||||
"""
|
||||
Get details about the migrations of each individual block within a course/lib migration.
|
||||
"""
|
||||
return {
|
||||
block_migration.source.key: _block_migration_result(block_migration)
|
||||
for block_migration in models.ModulestoreBlockMigration.objects.filter(
|
||||
overall_migration_id=migration_pk
|
||||
).select_related(
|
||||
"source",
|
||||
"target__learning_package",
|
||||
# For building component key
|
||||
"target__component__component_type",
|
||||
# For building container key.
|
||||
# (Hard-coding these exact 3 container types here is not a good pattern, but it's what is needed
|
||||
# here in order to avoid additional SELECTs while determining the container type).
|
||||
"target__container__section",
|
||||
"target__container__subsection",
|
||||
"target__container__unit",
|
||||
# For determining title and version
|
||||
"change_log_record__new_version",
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def _migration(m: models.ModulestoreMigration) -> ModulestoreMigration:
|
||||
"""
|
||||
Build a migration dataclass from the database row
|
||||
"""
|
||||
return ModulestoreMigration(
|
||||
pk=m.id,
|
||||
source_key=m.source.key,
|
||||
target_key=LibraryLocatorV2.from_string(m.target.key),
|
||||
target_title=m.target.title,
|
||||
target_collection_slug=(m.target_collection.key if m.target_collection else None),
|
||||
target_collection_title=(m.target_collection.title if m.target_collection else None),
|
||||
is_failed=m.is_failed,
|
||||
task_uuid=m.task_status.uuid,
|
||||
)
|
||||
|
||||
|
||||
def _block_migration_result(m: models.ModulestoreBlockMigration) -> ModulestoreBlockMigrationResult:
|
||||
"""
|
||||
Build an instance of the migration result (successs/failure) dataclass from a database row
|
||||
"""
|
||||
if m.target:
|
||||
return _block_migration_success(
|
||||
source_key=m.source.key,
|
||||
target=m.target,
|
||||
change_log_record=m.change_log_record,
|
||||
)
|
||||
return ModulestoreBlockMigrationFailure(
|
||||
source_key=m.source.key,
|
||||
unsupported_reason=(m.unsupported_reason or ""),
|
||||
)
|
||||
|
||||
|
||||
def _block_migration_success(
|
||||
source_key: UsageKey,
|
||||
target: PublishableEntity,
|
||||
change_log_record: DraftChangeLogRecord | None,
|
||||
) -> ModulestoreBlockMigrationSuccess:
|
||||
"""
|
||||
Build an instance of the migration success dataclass
|
||||
"""
|
||||
target_library_key = LibraryLocatorV2.from_string(target.learning_package.key)
|
||||
target_key: LibraryUsageLocatorV2 | LibraryContainerLocator
|
||||
if hasattr(target, "component"):
|
||||
target_key = library_component_usage_key(target_library_key, target.component)
|
||||
elif hasattr(target, "container"):
|
||||
target_key = library_container_locator(target_library_key, target.container)
|
||||
else:
|
||||
raise ValueError(f"Entity is neither a container nor component: {target}")
|
||||
# We expect that any successful BlockMigration (that is, one where `target is not None`)
|
||||
# will also have a `change_log_record` with a non-None `new_version`. However, the data model
|
||||
# does not guarantee that `change_log_record` nor `change_log_record.new_version` are non-
|
||||
# None. So, just in case some bug in the modulestore_migrator or some manual modification of
|
||||
# the database leads us to a situation where `target` is set but `change_log_record.new_version`
|
||||
# is not, we have fallback behavior:
|
||||
# * For target_title, use the latest draft's title, which is good enough, because the
|
||||
# title is just there to help users.
|
||||
# * For target_version_num, just use None, because we don't want downstream code to make decisions
|
||||
# about syncing, etc based on incorrect version info.
|
||||
target_version: PublishableEntityVersion | None = (
|
||||
change_log_record.new_version if change_log_record else None
|
||||
)
|
||||
if target_version:
|
||||
target_title = target_version.title
|
||||
target_version_num = target_version.version_num
|
||||
else:
|
||||
latest_draft = get_draft_version(target)
|
||||
target_title = latest_draft.title if latest_draft else ""
|
||||
target_version_num = None
|
||||
return ModulestoreBlockMigrationSuccess(
|
||||
source_key=source_key,
|
||||
target_entity_pk=target.id,
|
||||
target_key=target_key,
|
||||
target_title=target_title,
|
||||
target_version_num=target_version_num,
|
||||
)
|
||||
94
cms/djangoapps/modulestore_migrator/api/write_api.py
Normal file
94
cms/djangoapps/modulestore_migrator/api/write_api.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""
|
||||
API for kicking off new migrations
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from celery.result import AsyncResult
|
||||
from opaque_keys.edx.locator import LibraryLocatorV2
|
||||
from openedx_learning.api.authoring import get_collection
|
||||
|
||||
from openedx.core.types.user import AuthUser
|
||||
from openedx.core.djangoapps.content_libraries.api import get_library
|
||||
|
||||
from ..data import SourceContextKey, CompositionLevel, RepeatHandlingStrategy
|
||||
from .. import tasks, models
|
||||
|
||||
|
||||
__all__ = (
|
||||
'start_migration_to_library',
|
||||
'start_bulk_migration_to_library'
|
||||
)
|
||||
|
||||
|
||||
def start_migration_to_library(
|
||||
*,
|
||||
user: AuthUser,
|
||||
source_key: SourceContextKey,
|
||||
target_library_key: LibraryLocatorV2,
|
||||
target_collection_slug: str | None = None,
|
||||
create_collection: bool = False,
|
||||
composition_level: CompositionLevel,
|
||||
repeat_handling_strategy: RepeatHandlingStrategy,
|
||||
preserve_url_slugs: bool,
|
||||
forward_source_to_target: bool | None
|
||||
) -> AsyncResult:
|
||||
"""
|
||||
Import a course or legacy library into a V2 library (or, a collection within a V2 library).
|
||||
"""
|
||||
return start_bulk_migration_to_library(
|
||||
user=user,
|
||||
source_key_list=[source_key],
|
||||
target_library_key=target_library_key,
|
||||
target_collection_slug_list=[target_collection_slug],
|
||||
create_collections=create_collection,
|
||||
composition_level=composition_level,
|
||||
repeat_handling_strategy=repeat_handling_strategy,
|
||||
preserve_url_slugs=preserve_url_slugs,
|
||||
forward_source_to_target=forward_source_to_target,
|
||||
)
|
||||
|
||||
|
||||
def start_bulk_migration_to_library(
|
||||
*,
|
||||
user: AuthUser,
|
||||
source_key_list: list[SourceContextKey],
|
||||
target_library_key: LibraryLocatorV2,
|
||||
target_collection_slug_list: list[str | None] | None = None,
|
||||
create_collections: bool = False,
|
||||
composition_level: CompositionLevel,
|
||||
repeat_handling_strategy: RepeatHandlingStrategy,
|
||||
preserve_url_slugs: bool,
|
||||
forward_source_to_target: bool | None,
|
||||
) -> AsyncResult:
|
||||
"""
|
||||
Import a list of courses or legacy libraries into a V2 library (or, a collections within a V2 library).
|
||||
"""
|
||||
target_library = get_library(target_library_key)
|
||||
# get_library ensures that the library is connected to a learning package.
|
||||
target_package_id: int = target_library.learning_package_id # type: ignore[assignment]
|
||||
|
||||
sources_pks: list[int] = []
|
||||
for source_key in source_key_list:
|
||||
source, _ = models.ModulestoreSource.objects.get_or_create(key=str(source_key))
|
||||
sources_pks.append(source.id)
|
||||
|
||||
target_collection_pks: list[int | None] = []
|
||||
if target_collection_slug_list:
|
||||
for target_collection_slug in target_collection_slug_list:
|
||||
if target_collection_slug:
|
||||
target_collection_id = get_collection(target_package_id, target_collection_slug).id
|
||||
target_collection_pks.append(target_collection_id)
|
||||
else:
|
||||
target_collection_pks.append(None)
|
||||
|
||||
return tasks.bulk_migrate_from_modulestore.delay(
|
||||
user_id=user.id,
|
||||
sources_pks=sources_pks,
|
||||
target_library_key=str(target_library_key),
|
||||
target_collection_pks=target_collection_pks,
|
||||
create_collections=create_collections,
|
||||
composition_level=composition_level.value,
|
||||
repeat_handling_strategy=repeat_handling_strategy.value,
|
||||
preserve_url_slugs=preserve_url_slugs,
|
||||
forward_source_to_target=forward_source_to_target,
|
||||
)
|
||||
@@ -1,9 +1,23 @@
|
||||
"""
|
||||
Value objects
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from uuid import UUID
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from opaque_keys.edx.keys import UsageKey
|
||||
from opaque_keys.edx.locator import (
|
||||
CourseLocator,
|
||||
LibraryContainerLocator,
|
||||
LibraryLocator,
|
||||
LibraryLocatorV2,
|
||||
LibraryUsageLocatorV2,
|
||||
)
|
||||
|
||||
from openedx.core.djangoapps.content_libraries.api import ContainerType
|
||||
|
||||
@@ -70,3 +84,51 @@ class RepeatHandlingStrategy(Enum):
|
||||
Returns the default repeat handling strategy.
|
||||
"""
|
||||
return cls.Skip
|
||||
|
||||
|
||||
SourceContextKey: t.TypeAlias = CourseLocator | LibraryLocator
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ModulestoreMigration:
|
||||
"""
|
||||
Metadata on a migration of a course or legacy library to a v2 library in learning core.
|
||||
"""
|
||||
pk: int
|
||||
source_key: SourceContextKey
|
||||
target_key: LibraryLocatorV2
|
||||
target_title: str
|
||||
target_collection_slug: str | None
|
||||
target_collection_title: str | None
|
||||
is_failed: bool
|
||||
task_uuid: UUID # the UserTask which executed this migration
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ModulestoreBlockMigrationResult:
|
||||
"""
|
||||
Base class for a modulestore block that was part of an attempted migration to learning core.
|
||||
"""
|
||||
source_key: UsageKey
|
||||
is_failed: t.ClassVar[bool]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ModulestoreBlockMigrationSuccess(ModulestoreBlockMigrationResult):
|
||||
"""
|
||||
Info on a modulestore block which has been successfully migrated into an LC entity
|
||||
"""
|
||||
target_entity_pk: int
|
||||
target_key: LibraryUsageLocatorV2 | LibraryContainerLocator
|
||||
target_title: str
|
||||
target_version_num: int | None
|
||||
is_failed: t.ClassVar[bool] = False
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ModulestoreBlockMigrationFailure(ModulestoreBlockMigrationResult):
|
||||
"""
|
||||
Info on a modulestore block which failed to be migrated into LC
|
||||
"""
|
||||
unsupported_reason: str
|
||||
is_failed: t.ClassVar[bool] = True
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 5.2.9 on 2025-12-14 15:33
|
||||
|
||||
import django.db.models.deletion
|
||||
import opaque_keys.edx.django.models
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('modulestore_migrator', '0004_alter_modulestoreblockmigration_target_squashed_0005_modulestoreblockmigration_unsupported_reason'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='modulestoreblocksource',
|
||||
name='forwarded',
|
||||
field=models.OneToOneField(help_text='If set, the system will forward references of this block source over to the target of this block migration', null=True, on_delete=django.db.models.deletion.SET_NULL, to='modulestore_migrator.modulestoreblockmigration'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='modulestoreblocksource',
|
||||
name='key',
|
||||
field=opaque_keys.edx.django.models.UsageKeyField(help_text='Original usage key of the XBlock that has been imported.', max_length=255, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='modulestoresource',
|
||||
name='forwarded',
|
||||
field=models.OneToOneField(blank=True, help_text='If set, the system will forward references of this source over to the target of this migration', null=True, on_delete=django.db.models.deletion.SET_NULL, to='modulestore_migrator.modulestoremigration'),
|
||||
),
|
||||
]
|
||||
@@ -28,6 +28,23 @@ User = get_user_model()
|
||||
class ModulestoreSource(models.Model):
|
||||
"""
|
||||
A legacy learning context (course or library) which can be a source of a migration.
|
||||
|
||||
One source can be associated with multiple (successful or unsuccessful) ModulestoreMigrations.
|
||||
If a source has been migrated multiple times, then at most one of them can be considered the
|
||||
"official" or "authoritative" migration; this is indicated by setting the `forwarded` field to
|
||||
that ModulestoreMigration object.
|
||||
|
||||
Note that `forwarded` can be NULL even when 1+ migrations have happened for this source. This just
|
||||
means that none of them were authoritative. In other words, they were all "imports"/"copies" rather
|
||||
than true "migrations".
|
||||
|
||||
In practice, as of Ulmo:
|
||||
* The `forwarded` field is used to decide how to update legacy library_content references.
|
||||
* When using the Libraries Migration UI in Studio, `forwarded` is always set to the first
|
||||
successful ModulestoreMigration.
|
||||
* When using the REST API directly, the default is to use the same behavior as the UI, but
|
||||
clients can also explicitly specify the `forward_source_to_target` boolean param in order to
|
||||
control whether `forwarded` is set to any given migration.
|
||||
"""
|
||||
key = LearningContextKeyField(
|
||||
max_length=255,
|
||||
@@ -40,7 +57,6 @@ class ModulestoreSource(models.Model):
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
help_text=_('If set, the system will forward references of this source over to the target of this migration'),
|
||||
related_name="forwards",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
@@ -163,6 +179,9 @@ class ModulestoreMigration(models.Model):
|
||||
class ModulestoreBlockSource(TimeStampedModel):
|
||||
"""
|
||||
A legacy block usage (in a course or library) which can be a source of a block migration.
|
||||
|
||||
The semantics of `forwarded` directly mirror those of `ModulestoreSource.forwarded`. Please see
|
||||
that class's docstring for details.
|
||||
"""
|
||||
overall_source = models.ForeignKey(
|
||||
ModulestoreSource,
|
||||
@@ -171,6 +190,7 @@ class ModulestoreBlockSource(TimeStampedModel):
|
||||
)
|
||||
key = UsageKeyField(
|
||||
max_length=255,
|
||||
unique=True,
|
||||
help_text=_('Original usage key of the XBlock that has been imported.'),
|
||||
)
|
||||
forwarded = models.OneToOneField(
|
||||
@@ -178,11 +198,10 @@ class ModulestoreBlockSource(TimeStampedModel):
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
help_text=_(
|
||||
'If set, the system will forward references of this block source over to the target of this block migration'
|
||||
'If set, the system will forward references of this block source over to the '
|
||||
'target of this block migration'
|
||||
),
|
||||
related_name="forwards",
|
||||
)
|
||||
unique_together = [("overall_source", "key")]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__}('{self.key}')"
|
||||
|
||||
@@ -54,7 +54,7 @@ class ModulestoreMigrationSerializer(serializers.Serializer):
|
||||
preserve_url_slugs = serializers.BooleanField(
|
||||
help_text="If true, current slugs will be preserved.",
|
||||
required=False,
|
||||
default=True,
|
||||
default=False,
|
||||
)
|
||||
target_collection_slug = serializers.CharField(
|
||||
help_text="The target collection slug within the library to import into. Optional.",
|
||||
@@ -62,11 +62,20 @@ class ModulestoreMigrationSerializer(serializers.Serializer):
|
||||
allow_blank=True,
|
||||
default=None,
|
||||
)
|
||||
create_collection = serializers.BooleanField(
|
||||
help_text=(
|
||||
"If true and `target_collection_slug` is not set, "
|
||||
"create the collections in the library where the import will be made"
|
||||
),
|
||||
required=False,
|
||||
default=False,
|
||||
)
|
||||
target_collection = LibraryMigrationCollectionSerializer(required=False)
|
||||
forward_source_to_target = serializers.BooleanField(
|
||||
help_text="Forward references of this block source over to the target of this block migration.",
|
||||
required=False,
|
||||
default=False,
|
||||
allow_null=True,
|
||||
default=None, # Note: "None" means "unspecified"
|
||||
)
|
||||
is_failed = serializers.BooleanField(
|
||||
help_text="It is true if this migration is failed",
|
||||
@@ -195,15 +204,14 @@ class MigrationInfoSerializer(serializers.Serializer):
|
||||
Serializer for the migration info
|
||||
"""
|
||||
|
||||
source_key = serializers.CharField(source="key")
|
||||
target_key = serializers.CharField(source="migrations__target__key")
|
||||
target_title = serializers.CharField(source="migrations__target__title")
|
||||
source_key = serializers.CharField()
|
||||
target_key = serializers.CharField()
|
||||
target_title = serializers.CharField()
|
||||
target_collection_key = serializers.CharField(
|
||||
source="migrations__target_collection__key",
|
||||
source="target_collection_slug",
|
||||
allow_null=True
|
||||
)
|
||||
target_collection_title = serializers.CharField(
|
||||
source="migrations__target_collection__title",
|
||||
allow_null=True
|
||||
)
|
||||
|
||||
@@ -278,6 +286,6 @@ class BlockMigrationInfoSerializer(serializers.Serializer):
|
||||
"""
|
||||
Serializer for the block migration info.
|
||||
"""
|
||||
source_key = serializers.CharField(source="source__key")
|
||||
target_key = serializers.CharField(source="target__key")
|
||||
unsupported_reason = serializers.CharField()
|
||||
source_key = serializers.CharField()
|
||||
target_key = serializers.CharField(allow_null=True)
|
||||
unsupported_reason = serializers.CharField(allow_null=True)
|
||||
|
||||
@@ -2,18 +2,20 @@
|
||||
API v1 views.
|
||||
"""
|
||||
import logging
|
||||
from uuid import UUID
|
||||
|
||||
import edx_api_doc_tools as apidocs
|
||||
from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication
|
||||
from edx_rest_framework_extensions.auth.session.authentication import SessionAuthenticationAllowInactiveUser
|
||||
from opaque_keys import InvalidKeyError
|
||||
from opaque_keys.edx.keys import CourseKey
|
||||
from opaque_keys.edx.locator import LibraryLocatorV2
|
||||
from opaque_keys.edx.locator import LibraryLocatorV2, CourseLocator, LibraryLocator
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import ParseError
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ParseError, PermissionDenied, ValidationError
|
||||
from rest_framework.fields import BooleanField
|
||||
from rest_framework.mixins import ListModelMixin
|
||||
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
@@ -21,18 +23,17 @@ from rest_framework.viewsets import GenericViewSet
|
||||
from user_tasks.models import UserTaskStatus
|
||||
from user_tasks.views import StatusViewSet
|
||||
|
||||
from cms.djangoapps.modulestore_migrator.api import (
|
||||
get_all_migrations_info,
|
||||
get_migration_blocks_info,
|
||||
start_bulk_migration_to_library,
|
||||
start_migration_to_library,
|
||||
)
|
||||
from common.djangoapps.student.auth import has_studio_write_access
|
||||
from cms.djangoapps.modulestore_migrator import api as migrator_api
|
||||
from common.djangoapps.student import auth
|
||||
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
|
||||
from openedx.core.djangoapps.content_libraries import api as lib_api
|
||||
from openedx.core.lib.api.authentication import BearerAuthenticationAllowInactiveUser
|
||||
|
||||
from ...models import ModulestoreMigration
|
||||
from ... import models
|
||||
from ...data import (
|
||||
SourceContextKey, ModulestoreMigration, ModulestoreBlockMigrationResult,
|
||||
CompositionLevel, RepeatHandlingStrategy,
|
||||
)
|
||||
from .serializers import (
|
||||
BlockMigrationInfoSerializer,
|
||||
BulkModulestoreMigrationSerializer,
|
||||
@@ -71,21 +72,11 @@ _error_responses = {
|
||||
See `POST /api/modulestore_migrator/v1/migrations` for details on its schema.
|
||||
""",
|
||||
)
|
||||
@apidocs.schema_for(
|
||||
"cancel",
|
||||
"""
|
||||
Cancel a particular migration or bulk-migration task.
|
||||
|
||||
The response is a migration task status object.
|
||||
See `POST /api/modulestore_migrator/v1/migrations` for details on its schema.
|
||||
""",
|
||||
)
|
||||
class MigrationViewSet(StatusViewSet):
|
||||
"""
|
||||
JSON HTTP API to create and check on ModuleStore-to-Learning-Core migration tasks.
|
||||
"""
|
||||
|
||||
permission_classes = (IsAdminUser,)
|
||||
authentication_classes = (
|
||||
BearerAuthenticationAllowInactiveUser,
|
||||
JwtAuthentication,
|
||||
@@ -97,14 +88,39 @@ class MigrationViewSet(StatusViewSet):
|
||||
# Instead, users can POST to /cancel to cancel running tasks.
|
||||
http_method_names = ["get", "post"]
|
||||
|
||||
lookup_field = "uuid"
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Override the default queryset to filter by the migration event and user.
|
||||
"""
|
||||
return StatusViewSet.queryset.filter(
|
||||
migrations__isnull=False, user=self.request.user
|
||||
migrations__isnull=False,
|
||||
# The filter for `user` here is essentially the auth strategy for the /list and /retreive
|
||||
# endpoints. Basically: you can view migrations if and only if you started them.
|
||||
# Future devs: If you ever refactor this view to remove the user filter, be sure to enforce
|
||||
# permissions some other way.
|
||||
user=self.request.user
|
||||
).distinct().order_by("-created")
|
||||
|
||||
@apidocs.schema()
|
||||
@action(detail=True, methods=['post'])
|
||||
def cancel(self, request, *args, **kwargs):
|
||||
"""
|
||||
Cancel a particular migration or bulk-migration task.
|
||||
|
||||
The response is a migration task status object.
|
||||
See `POST /api/modulestore_migrator/v1/migrations` for details on its schema.
|
||||
|
||||
This endpoint is currently reserved for site-wide administrators.
|
||||
"""
|
||||
# TODO: This should check some sort of "allowed to cancel/migrations" permission
|
||||
# rather than directly looking at the GlobalStaff role.
|
||||
# https://github.com/openedx/edx-platform/issues/37791
|
||||
if not request.user.is_staff:
|
||||
raise PermissionDenied("Only site administrators can cancel migration tasks.")
|
||||
return super().cancel(request, *args, **kwargs)
|
||||
|
||||
@apidocs.schema(
|
||||
body=ModulestoreMigrationSerializer,
|
||||
responses={
|
||||
@@ -114,28 +130,29 @@ class MigrationViewSet(StatusViewSet):
|
||||
)
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""
|
||||
Transfer content from course or legacy library into a content library.
|
||||
Begin a transfer of content from course or legacy library into a content library.
|
||||
|
||||
This begins a migration task to copy content from a ModuleStore-based **source** context
|
||||
to Learning Core-based **target** context. The valid **source** contexts are:
|
||||
Required parameters:
|
||||
* A **source** key, which identifies the course or legacy library containing the items be migrated.
|
||||
* A **target** key, which identifies the content library to which items will be migrated.
|
||||
|
||||
* A course.
|
||||
* A legacy content library.
|
||||
|
||||
The valid **target** contexts are:
|
||||
|
||||
* A content library.
|
||||
* A collection within a content library.
|
||||
|
||||
Other options:
|
||||
Optional parameters:
|
||||
|
||||
* The **target_collection_slug**, which identifies an *existing* collection within the target that
|
||||
should hold the migrated items. If not specified, items will be added to the target library without
|
||||
any collection, unless:
|
||||
* If this source was previously migrated to a collection and the **repeat_handling_strategy** (described
|
||||
below) is not set to *fork*, then that same collection will be re-used.
|
||||
* If **create_collection** is specified as *true*, then the items will be added to a new collection, with
|
||||
a name and slug based on the source's title, but not conflicting with any existing collection.
|
||||
* The **composition_level** (*component*, *unit*, *subsection*, *section*) indicates the highest level of
|
||||
hierarchy to be transferred. Default is *component*. To maximally preserve the source structure,
|
||||
specify *section*.
|
||||
* The **repeat_handling_strategy** specifies how the system should handle source items which have
|
||||
previously been migrated to the target. Specify *skip* to prefer the existing target item, specify
|
||||
*update* to update the existing target item with the latest source content, or specify *fork* to create
|
||||
a new target item with the source content. Default is *skip*.
|
||||
previously been migrated to the target.
|
||||
* Specify *skip* to prefer the existing target item. This is the default.
|
||||
* Specify *update* to update the existing target item with the latest source content.
|
||||
* Specify *fork* to create a new target item with the source content.
|
||||
* Specify **preserve_url_slugs** as *true* in order to use the source-provided block IDs
|
||||
(a.k.a. "URL slugs", "url_names"). Otherwise, the system will use each source item's title
|
||||
to auto-generate an ID in the target context.
|
||||
@@ -150,12 +167,17 @@ class MigrationViewSet(StatusViewSet):
|
||||
content library.
|
||||
* **Example**: Specify *false* if you are copying course content into a content library, but do not
|
||||
want to persist a link between the source source content and destination library contenet.
|
||||
* **Defaults** to *false* if the source has already been mapped to a target by a successful migration,
|
||||
and defaults to *true* if not. In other words, by default, establish the mapping only if it wouldn't
|
||||
override an existing mapping.
|
||||
|
||||
Example request:
|
||||
```json
|
||||
{
|
||||
"source": "course-v1:MyOrganization+MyCourse+MyRun",
|
||||
"target": "lib:MyOrganization:MyUlmoLibrary",
|
||||
"target": "lib-collection:MyOrganization:MyUlmoLibrary",
|
||||
"target_collection_slug": "MyCollection",
|
||||
"create_collection": true,
|
||||
"composition_level": "unit",
|
||||
"repeat_handling_strategy": "update",
|
||||
"preserve_url_slugs": true
|
||||
@@ -203,25 +225,32 @@ class MigrationViewSet(StatusViewSet):
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
This API requires that the requester have author access on both the source and target.
|
||||
"""
|
||||
serializer_data = ModulestoreMigrationSerializer(data=request.data)
|
||||
serializer_data.is_valid(raise_exception=True)
|
||||
validated_data = serializer_data.validated_data
|
||||
|
||||
task = start_migration_to_library(
|
||||
if not auth.has_studio_write_access(request.user, validated_data['source']):
|
||||
raise PermissionDenied("Requester is not an author on the source.")
|
||||
lib_api.require_permission_for_library_key(
|
||||
validated_data['target'],
|
||||
request.user,
|
||||
lib_api.permissions.CAN_EDIT_THIS_CONTENT_LIBRARY,
|
||||
)
|
||||
task = migrator_api.start_migration_to_library(
|
||||
user=request.user,
|
||||
source_key=validated_data['source'],
|
||||
target_library_key=validated_data['target'],
|
||||
target_collection_slug=validated_data['target_collection_slug'],
|
||||
composition_level=validated_data['composition_level'],
|
||||
repeat_handling_strategy=validated_data['repeat_handling_strategy'],
|
||||
composition_level=CompositionLevel(validated_data['composition_level']),
|
||||
create_collection=validated_data['create_collection'],
|
||||
repeat_handling_strategy=RepeatHandlingStrategy(validated_data['repeat_handling_strategy']),
|
||||
preserve_url_slugs=validated_data['preserve_url_slugs'],
|
||||
forward_source_to_target=validated_data['forward_source_to_target'],
|
||||
)
|
||||
|
||||
task_status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
serializer = self.get_serializer(task_status)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
@@ -230,7 +259,6 @@ class BulkMigrationViewSet(StatusViewSet):
|
||||
JSON HTTP API to bulk-create ModuleStore-to-Learning-Core migration tasks.
|
||||
"""
|
||||
|
||||
permission_classes = (IsAdminUser,)
|
||||
authentication_classes = (
|
||||
BearerAuthenticationAllowInactiveUser,
|
||||
JwtAuthentication,
|
||||
@@ -315,19 +343,30 @@ class BulkMigrationViewSet(StatusViewSet):
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
This API requires that the requester have author access on both the source and target.
|
||||
"""
|
||||
serializer_data = BulkModulestoreMigrationSerializer(data=request.data)
|
||||
serializer_data.is_valid(raise_exception=True)
|
||||
validated_data = serializer_data.validated_data
|
||||
|
||||
task = start_bulk_migration_to_library(
|
||||
for source_key in validated_data['sources']:
|
||||
if not auth.has_studio_write_access(request.user, source_key):
|
||||
raise PermissionDenied(
|
||||
f"Requester is not an author on the source: {source_key}. No migrations performed."
|
||||
)
|
||||
lib_api.require_permission_for_library_key(
|
||||
validated_data['target'],
|
||||
request.user,
|
||||
lib_api.permissions.CAN_EDIT_THIS_CONTENT_LIBRARY,
|
||||
)
|
||||
task = migrator_api.start_bulk_migration_to_library(
|
||||
user=request.user,
|
||||
source_key_list=validated_data['sources'],
|
||||
target_library_key=validated_data['target'],
|
||||
target_collection_slug_list=validated_data['target_collection_slug_list'],
|
||||
create_collections=validated_data['create_collections'],
|
||||
composition_level=validated_data['composition_level'],
|
||||
repeat_handling_strategy=validated_data['repeat_handling_strategy'],
|
||||
composition_level=CompositionLevel(validated_data['composition_level']),
|
||||
repeat_handling_strategy=RepeatHandlingStrategy(validated_data['repeat_handling_strategy']),
|
||||
preserve_url_slugs=validated_data['preserve_url_slugs'],
|
||||
forward_source_to_target=validated_data['forward_source_to_target'],
|
||||
)
|
||||
@@ -437,12 +476,15 @@ class MigrationInfoViewSet(APIView):
|
||||
for source_key in source_keys:
|
||||
try:
|
||||
key = CourseKey.from_string(source_key)
|
||||
if has_studio_write_access(request.user, key):
|
||||
if auth.has_studio_read_access(request.user, key):
|
||||
source_keys_validated.append(key)
|
||||
except InvalidKeyError:
|
||||
continue
|
||||
|
||||
data = get_all_migrations_info(source_keys_validated)
|
||||
data = {
|
||||
source_key: migrator_api.get_migrations(source_key, is_failed=False)
|
||||
for source_key in source_keys_validated
|
||||
}
|
||||
serializer = MigrationInfoResponseSerializer(data)
|
||||
return Response(serializer.data)
|
||||
|
||||
@@ -463,7 +505,7 @@ class LibraryCourseMigrationViewSet(GenericViewSet, ListModelMixin):
|
||||
|
||||
serializer_class = LibraryMigrationCourseSerializer
|
||||
pagination_class = None
|
||||
queryset = ModulestoreMigration.objects.all().select_related('target_collection', 'target', 'task_status')
|
||||
queryset = models.ModulestoreMigration.objects.all().select_related('target_collection', 'target', 'task_status')
|
||||
|
||||
def get_serializer_context(self):
|
||||
"""
|
||||
@@ -571,31 +613,58 @@ class BlockMigrationInfo(APIView):
|
||||
"""
|
||||
Handle the migration info `GET` request
|
||||
"""
|
||||
source_key = request.query_params.get("source_key")
|
||||
target_key = request.query_params.get("target_key")
|
||||
target_collection_key = request.query_params.get("target_collection_key")
|
||||
task_uuid = request.query_params.get("task_uuid")
|
||||
is_failed: str | bool | None = request.query_params.get("is_failed")
|
||||
if not target_key:
|
||||
target_key: LibraryLocatorV2 | None
|
||||
if target_key_param := request.query_params.get("target_key"):
|
||||
try:
|
||||
target_key = LibraryLocatorV2.from_string(target_key_param)
|
||||
except InvalidKeyError:
|
||||
return Response({"error": f"Bad target_key: {target_key_param}"}, status=400)
|
||||
else:
|
||||
return Response({"error": "Target key cannot be blank."}, status=400)
|
||||
try:
|
||||
target_key_parsed = LibraryLocatorV2.from_string(target_key)
|
||||
except InvalidKeyError as e:
|
||||
return Response({"error": str(e)}, status=400)
|
||||
target_collection_key = request.query_params.get("target_collection_key")
|
||||
source_key: SourceContextKey | None = None
|
||||
if source_key_param := request.query_params.get("source_key"):
|
||||
try:
|
||||
source_key = CourseLocator.from_string(source_key_param)
|
||||
except InvalidKeyError:
|
||||
try:
|
||||
source_key = LibraryLocator.from_string(source_key_param)
|
||||
except InvalidKeyError:
|
||||
return Response({"error": f"Bad source: {source_key_param}"}, status=400)
|
||||
task_uuid: UUID | None = None
|
||||
if task_uuid_param := request.query_params.get("task_uuid"):
|
||||
try:
|
||||
task_uuid = UUID(task_uuid_param)
|
||||
except ValueError:
|
||||
return Response({"error": f"Bad task_uuid: {task_uuid_param}"}, status=400)
|
||||
is_failed: bool | None = None # None means unspecified -- include both successful and failed.
|
||||
if (is_failed_param := request.query_params.get("is_failed")) is not None:
|
||||
try:
|
||||
is_failed = BooleanField().to_internal_value(is_failed_param)
|
||||
except ValidationError:
|
||||
return Response({"error": f"Bad is_failed value: {is_failed_param}"}, status=400)
|
||||
lib_api.require_permission_for_library_key(
|
||||
target_key_parsed,
|
||||
target_key,
|
||||
request.user,
|
||||
lib_api.permissions.CAN_VIEW_THIS_CONTENT_LIBRARY
|
||||
)
|
||||
if is_failed is not None:
|
||||
is_failed = BooleanField().to_internal_value(is_failed)
|
||||
|
||||
data = get_migration_blocks_info(
|
||||
target_key,
|
||||
source_key,
|
||||
target_collection_key,
|
||||
task_uuid,
|
||||
is_failed,
|
||||
).values('source__key', 'target__key', 'unsupported_reason')
|
||||
migrations: list[ModulestoreMigration] = list(
|
||||
migrator_api.get_migrations(
|
||||
source_key=source_key,
|
||||
target_key=target_key,
|
||||
target_collection_slug=target_collection_key,
|
||||
task_uuid=task_uuid,
|
||||
)
|
||||
)
|
||||
data: list[ModulestoreBlockMigrationResult] = [
|
||||
block_migration
|
||||
for migration in migrations
|
||||
for block_migration in migrator_api.get_migration_blocks(migration.pk).values()
|
||||
# Include the block iff...
|
||||
if is_failed in [
|
||||
None, # we're not filtering on success, or
|
||||
block_migration.is_failed, # we are filtering on success, and this matches.
|
||||
]
|
||||
]
|
||||
serializer = BlockMigrationInfoSerializer(data, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@@ -10,7 +10,6 @@ from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from gettext import ngettext
|
||||
from itertools import groupby
|
||||
|
||||
from celery import shared_task
|
||||
from celery.utils.log import get_task_logger
|
||||
@@ -22,8 +21,9 @@ from edx_django_utils.monitoring import set_code_owner_attribute_from_module
|
||||
from lxml import etree
|
||||
from lxml.etree import _ElementTree as XmlTree
|
||||
from opaque_keys import InvalidKeyError
|
||||
from opaque_keys.edx.keys import CourseKey, UsageKey
|
||||
from opaque_keys.edx.keys import UsageKey
|
||||
from opaque_keys.edx.locator import (
|
||||
BlockUsageLocator,
|
||||
CourseLocator,
|
||||
LibraryContainerLocator,
|
||||
LibraryLocator,
|
||||
@@ -51,9 +51,10 @@ from openedx.core.djangoapps.content_staging import api as staging_api
|
||||
from xmodule.modulestore import exceptions as modulestore_exceptions
|
||||
from xmodule.modulestore.django import modulestore
|
||||
|
||||
from . import models, data
|
||||
from .constants import CONTENT_STAGING_PURPOSE_TEMPLATE
|
||||
from .data import CompositionLevel, RepeatHandlingStrategy
|
||||
from .models import ModulestoreBlockMigration, ModulestoreBlockSource, ModulestoreMigration, ModulestoreSource
|
||||
from .data import CompositionLevel, RepeatHandlingStrategy, SourceContextKey
|
||||
from .api.read_api import get_migrations, get_migration_blocks
|
||||
|
||||
log = get_task_logger(__name__)
|
||||
|
||||
@@ -80,20 +81,6 @@ class MigrationStep(Enum):
|
||||
BULK_MIGRATION_PREFIX = 'Migrating legacy content'
|
||||
|
||||
|
||||
class _MigrationTask(UserTask):
|
||||
"""
|
||||
Base class for migrate_to_modulestore
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def calculate_total_steps(arguments_dict): # pylint: disable=unused-argument
|
||||
"""
|
||||
Get number of in-progress steps in importing process, as shown in the UI.
|
||||
"""
|
||||
# We subtract the BULK_MIGRATION_PREFIX
|
||||
return len(list(MigrationStep)) - 1
|
||||
|
||||
|
||||
class _BulkMigrationTask(UserTask):
|
||||
"""
|
||||
Base class for bulk_migrate_from_modulestore
|
||||
@@ -127,12 +114,15 @@ class _MigrationContext:
|
||||
"""
|
||||
Context for the migration process.
|
||||
"""
|
||||
existing_source_to_target_keys: dict[ # Note: It's intended to be mutable to reflect changes during migration.
|
||||
UsageKey, list[PublishableEntity | None]
|
||||
]
|
||||
# Fields that get mutated as we migrate blocks
|
||||
used_component_keys: set[LibraryUsageLocatorV2]
|
||||
used_container_slugs: set[str]
|
||||
|
||||
# Fields that remain constant
|
||||
previous_block_migrations: dict[UsageKey, data.ModulestoreBlockMigrationResult]
|
||||
target_package_id: int
|
||||
target_library_key: LibraryLocatorV2
|
||||
source_context_key: CourseKey # Note: This includes legacy LibraryLocators, which are sneakily CourseKeys.
|
||||
source_context_key: SourceContextKey
|
||||
content_by_filename: dict[str, int]
|
||||
composition_level: CompositionLevel
|
||||
repeat_handling_strategy: RepeatHandlingStrategy
|
||||
@@ -140,43 +130,6 @@ class _MigrationContext:
|
||||
created_by: int
|
||||
created_at: datetime
|
||||
|
||||
def is_already_migrated(self, source_key: UsageKey) -> bool:
|
||||
return source_key in self.existing_source_to_target_keys
|
||||
|
||||
def get_existing_target(self, source_key: UsageKey) -> PublishableEntity | None:
|
||||
"""
|
||||
Get the target entity for a given source key.
|
||||
|
||||
If the source key is already migrated, return the FIRST target entity.
|
||||
If the source key is not found, raise a KeyError.
|
||||
"""
|
||||
if source_key not in self.existing_source_to_target_keys:
|
||||
raise KeyError(f"Source key {source_key} not found in existing source to target keys")
|
||||
|
||||
# NOTE: This is a list of PublishableEntities, but we always return the first one.
|
||||
return self.existing_source_to_target_keys[source_key][0]
|
||||
|
||||
def is_already_successfully_migrated(self, source_key: UsageKey) -> bool:
|
||||
"""
|
||||
Check whether a source has successfully been migrated. This means it exists and has at least one target.
|
||||
"""
|
||||
return self.is_already_migrated(source_key) and self.get_existing_target(source_key) is not None
|
||||
|
||||
def add_migration(self, source_key: UsageKey, target: PublishableEntity | None) -> None:
|
||||
"""Update the context with a new migration (keeps it current)"""
|
||||
if source_key not in self.existing_source_to_target_keys:
|
||||
self.existing_source_to_target_keys[source_key] = [target]
|
||||
else:
|
||||
self.existing_source_to_target_keys[source_key].append(target)
|
||||
|
||||
def get_existing_target_entity_keys(self, base_key: str) -> set[str]:
|
||||
return set(
|
||||
publishable_entity.key
|
||||
for publishable_entity_list in self.existing_source_to_target_keys.values()
|
||||
for publishable_entity in publishable_entity_list
|
||||
if publishable_entity and publishable_entity.key.startswith(base_key)
|
||||
)
|
||||
|
||||
@property
|
||||
def should_skip_strategy(self) -> bool:
|
||||
"""
|
||||
@@ -204,10 +157,11 @@ class _MigrationSourceData:
|
||||
"""
|
||||
Data related to a ModulestoreSource
|
||||
"""
|
||||
source: ModulestoreSource
|
||||
source_root_usage_key: UsageKey
|
||||
source: models.ModulestoreSource
|
||||
source_root_usage_key: BlockUsageLocator
|
||||
source_version: str | None
|
||||
migration: ModulestoreMigration
|
||||
migration: models.ModulestoreMigration
|
||||
previous_migration: data.ModulestoreMigration | None
|
||||
|
||||
|
||||
def _validate_input(
|
||||
@@ -216,6 +170,7 @@ def _validate_input(
|
||||
repeat_handling_strategy: str,
|
||||
preserve_url_slugs: bool,
|
||||
composition_level: str,
|
||||
target_library_key: LibraryLocatorV2,
|
||||
target_package: LearningPackage,
|
||||
target_collection: Collection | None,
|
||||
) -> _MigrationSourceData | None:
|
||||
@@ -223,7 +178,7 @@ def _validate_input(
|
||||
Validates and build the source data related to `source_pk`
|
||||
"""
|
||||
try:
|
||||
source = ModulestoreSource.objects.get(pk=source_pk)
|
||||
source = models.ModulestoreSource.objects.get(pk=source_pk)
|
||||
except (ObjectDoesNotExist) as exc:
|
||||
status.fail(str(exc))
|
||||
return None
|
||||
@@ -243,7 +198,17 @@ def _validate_input(
|
||||
)
|
||||
return None
|
||||
|
||||
migration = ModulestoreMigration.objects.create(
|
||||
# Find the latest successful migration that occurred, if any.
|
||||
# We're careful to do this before creating the new ModulestoreMigration object,
|
||||
# otherwise we would just end up grabbing that by one accident.
|
||||
# ( mypy gets confused by how use next(...) here )
|
||||
previous_migration = next( # type: ignore[call-overload]
|
||||
get_migrations(
|
||||
source.key, target_key=target_library_key, is_failed=False
|
||||
),
|
||||
None, # default
|
||||
)
|
||||
migration = models.ModulestoreMigration.objects.create(
|
||||
source=source,
|
||||
source_version=source_version,
|
||||
composition_level=composition_level,
|
||||
@@ -253,17 +218,17 @@ def _validate_input(
|
||||
target_collection=target_collection,
|
||||
task_status=status,
|
||||
)
|
||||
|
||||
return _MigrationSourceData(
|
||||
source=source,
|
||||
source_root_usage_key=source_root_usage_key,
|
||||
source_version=source_version,
|
||||
migration=migration,
|
||||
previous_migration=previous_migration,
|
||||
)
|
||||
|
||||
|
||||
def _cancel_old_tasks(
|
||||
source_list: list[ModulestoreSource],
|
||||
source_list: list[models.ModulestoreSource],
|
||||
status: UserTaskStatus,
|
||||
target_package: LearningPackage,
|
||||
migration_ids_to_exclude: list[int],
|
||||
@@ -272,7 +237,7 @@ def _cancel_old_tasks(
|
||||
Cancel all migration tasks related to the user and the source list
|
||||
"""
|
||||
# In order to prevent a user from accidentally starting a bunch of identical import tasks...
|
||||
migrations_to_cancel = ModulestoreMigration.objects.filter(
|
||||
migrations_to_cancel = models.ModulestoreMigration.objects.filter(
|
||||
# get all Migration tasks by this user with the same sources and target
|
||||
task_status__user=status.user,
|
||||
source__in=source_list,
|
||||
@@ -310,7 +275,7 @@ def _load_xblock(
|
||||
return xblock
|
||||
|
||||
|
||||
def _import_assets(migration: ModulestoreMigration) -> dict[str, int]:
|
||||
def _import_assets(migration: models.ModulestoreMigration) -> dict[str, int]:
|
||||
"""
|
||||
Import the assets of the staged content to the migration target
|
||||
"""
|
||||
@@ -341,7 +306,6 @@ def _import_assets(migration: ModulestoreMigration) -> dict[str, int]:
|
||||
|
||||
|
||||
def _import_structure(
|
||||
migration: ModulestoreMigration,
|
||||
source_data: _MigrationSourceData,
|
||||
target_library: libraries_api.ContentLibraryMetadata,
|
||||
content_by_filename: dict[str, int],
|
||||
@@ -376,21 +340,25 @@ def _import_structure(
|
||||
represents the mapping between the legacy root node and its newly created
|
||||
Learning Core equivalent.
|
||||
"""
|
||||
# "key" is locally unique across all PublishableEntities within
|
||||
# a given LearningPackage.
|
||||
# We use this mapping to ensure that we don't create duplicate
|
||||
# PublishableEntities during the migration process for a given LearningPackage.
|
||||
existing_source_to_target_keys: dict[UsageKey, list[PublishableEntity | None]] = {}
|
||||
modulestore_blocks = (
|
||||
ModulestoreBlockMigration.objects.filter(overall_migration__target=migration.target.id).order_by("source__key")
|
||||
)
|
||||
existing_source_to_target_keys = {
|
||||
source_key: list(block.target for block in group) for source_key, group in groupby(
|
||||
modulestore_blocks, key=lambda x: x.source.key)
|
||||
}
|
||||
|
||||
migration = source_data.migration
|
||||
migration_context = _MigrationContext(
|
||||
existing_source_to_target_keys=existing_source_to_target_keys,
|
||||
used_component_keys=set(
|
||||
LibraryUsageLocatorV2(target_library.key, block_type, block_id) # type: ignore[abstract]
|
||||
for block_type, block_id
|
||||
in authoring_api.get_components(migration.target.pk).values_list(
|
||||
"component_type__name", "local_key"
|
||||
)
|
||||
),
|
||||
used_container_slugs=set(
|
||||
authoring_api.get_containers(
|
||||
migration.target.pk
|
||||
).values_list("publishable_entity__key", flat=True)
|
||||
),
|
||||
previous_block_migrations=(
|
||||
get_migration_blocks(source_data.previous_migration.pk)
|
||||
if source_data.previous_migration
|
||||
else {}
|
||||
),
|
||||
target_package_id=migration.target.pk,
|
||||
target_library_key=target_library.key,
|
||||
source_context_key=source_data.source_root_usage_key.course_key,
|
||||
@@ -401,7 +369,6 @@ def _import_structure(
|
||||
created_by=status.user_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
with authoring_api.bulk_draft_changes_for(migration.target.id) as change_log:
|
||||
root_migrated_node = _migrate_node(
|
||||
context=migration_context,
|
||||
@@ -411,11 +378,11 @@ def _import_structure(
|
||||
return change_log, root_migrated_node
|
||||
|
||||
|
||||
def _forwarding_content(source_data: _MigrationSourceData) -> None:
|
||||
def _forward_content(source_data: _MigrationSourceData) -> None:
|
||||
"""
|
||||
Forwarding legacy content to migrated content
|
||||
"""
|
||||
block_migrations = ModulestoreBlockMigration.objects.filter(overall_migration=source_data.migration)
|
||||
block_migrations = models.ModulestoreBlockMigration.objects.filter(overall_migration=source_data.migration)
|
||||
block_sources_to_block_migrations = {
|
||||
block_migration.source: block_migration for block_migration in block_migrations
|
||||
}
|
||||
@@ -427,7 +394,7 @@ def _forwarding_content(source_data: _MigrationSourceData) -> None:
|
||||
source_data.source.save()
|
||||
|
||||
|
||||
def _populate_collection(user_id: int, migration: ModulestoreMigration) -> None:
|
||||
def _populate_collection(user_id: int, migration: models.ModulestoreMigration) -> None:
|
||||
"""
|
||||
Assigning imported items to the specified collection in the migration
|
||||
"""
|
||||
@@ -435,7 +402,7 @@ def _populate_collection(user_id: int, migration: ModulestoreMigration) -> None:
|
||||
return
|
||||
|
||||
block_target_pks: list[int] = list(
|
||||
ModulestoreBlockMigration.objects.filter(
|
||||
models.ModulestoreBlockMigration.objects.filter(
|
||||
overall_migration=migration
|
||||
).values_list('target_id', flat=True)
|
||||
)
|
||||
@@ -485,186 +452,12 @@ def _set_migrations_to_fail(source_data_list: list[_MigrationSourceData]):
|
||||
for source_data in source_data_list:
|
||||
source_data.migration.is_failed = True
|
||||
|
||||
ModulestoreMigration.objects.bulk_update(
|
||||
models.ModulestoreMigration.objects.bulk_update(
|
||||
[x.migration for x in source_data_list],
|
||||
["is_failed"],
|
||||
)
|
||||
|
||||
|
||||
@shared_task(base=_MigrationTask, bind=True)
|
||||
# Note: The decorator @set_code_owner_attribute cannot be used here because the UserTaskMixin
|
||||
# does stack inspection and can't handle additional decorators.
|
||||
def migrate_from_modulestore(
|
||||
self: _MigrationTask,
|
||||
*,
|
||||
user_id: int,
|
||||
source_pk: int,
|
||||
target_library_key: str,
|
||||
target_collection_pk: int | None,
|
||||
repeat_handling_strategy: str,
|
||||
preserve_url_slugs: bool,
|
||||
composition_level: str,
|
||||
forward_source_to_target: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Import a single course or legacy library from modulestore into a V2 legacy library.
|
||||
|
||||
This task performs the end-to-end migration for one legacy source (course or library),
|
||||
including staging, parsing OLX, importing assets and structure, and assigning the
|
||||
migrated content to the specified target library and collection.
|
||||
|
||||
A new `UserTaskStatus` entry is created for each invocation of this task, meaning
|
||||
that each migration runs independently with its own progress tracking and final
|
||||
success or failure state.
|
||||
|
||||
If the migration encounters an unrecoverable error at any step (for example, invalid
|
||||
OLX, missing assets, or database constraints), the task is marked as **failed** and
|
||||
the partial results are rolled back as necessary. The migration state can be queried
|
||||
through the REST API endpoint `/api/modulestore_migrator/v1/migrations/<uuid>/`.
|
||||
|
||||
Args:
|
||||
self (_MigrationTask):
|
||||
The Celery task instance that wraps the user task logic.
|
||||
user_id (int):
|
||||
The ID of the user initiating the migration.
|
||||
source_pk (int):
|
||||
Primary key of the modulestore source to migrate.
|
||||
target_library_key (str):
|
||||
Key of the target V2 library that will receive the imported content.
|
||||
target_collection_pk (int | None):
|
||||
Optional ID of a target collection to which imported content will be assigned.
|
||||
repeat_handling_strategy (str):
|
||||
Strategy for handling repeated imports (e.g., "skip", "update").
|
||||
preserve_url_slugs (bool):
|
||||
Whether to preserve original XBlock URL slugs during import.
|
||||
composition_level (str):
|
||||
The structural level to migrate (e.g., component, unit, or section).
|
||||
forward_source_to_target (bool):
|
||||
Whether to forward legacy content references to the migrated content after import.
|
||||
|
||||
See Also:
|
||||
- `bulk_migrate_from_modulestore`: Multi-source batch migration equivalent.
|
||||
- API docs: `/api/cms/v1/migrations/` for REST behavior and responses.
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-statements
|
||||
# This is a large function, but breaking it up futher would probably not
|
||||
# make it any easier to understand.
|
||||
|
||||
set_code_owner_attribute_from_module(__name__)
|
||||
status: UserTaskStatus = self.status
|
||||
|
||||
# Validating input
|
||||
status.set_state(MigrationStep.VALIDATING_INPUT.value)
|
||||
try:
|
||||
target_library = get_library(LibraryLocatorV2.from_string(target_library_key))
|
||||
if target_library.learning_package_id is None:
|
||||
raise ValueError("Target library has no associated learning package.")
|
||||
|
||||
target_package = LearningPackage.objects.get(pk=target_library.learning_package_id)
|
||||
target_collection = Collection.objects.get(pk=target_collection_pk) if target_collection_pk else None
|
||||
except (ObjectDoesNotExist, InvalidKeyError) as exc:
|
||||
status.fail(str(exc))
|
||||
return
|
||||
|
||||
source_data = _validate_input(
|
||||
status,
|
||||
source_pk,
|
||||
repeat_handling_strategy,
|
||||
preserve_url_slugs,
|
||||
composition_level,
|
||||
target_package,
|
||||
target_collection,
|
||||
)
|
||||
if source_data is None:
|
||||
# Fail
|
||||
return
|
||||
|
||||
migration = source_data.migration
|
||||
status.increment_completed_steps()
|
||||
|
||||
try:
|
||||
# Cancelling old tasks
|
||||
status.set_state(MigrationStep.CANCELLING_OLD.value)
|
||||
_cancel_old_tasks([source_data.source], status, target_package, [migration.id])
|
||||
status.increment_completed_steps()
|
||||
|
||||
# Loading `legacy_root`
|
||||
status.set_state(MigrationStep.LOADING)
|
||||
legacy_root = _load_xblock(status, source_data.source_root_usage_key)
|
||||
if legacy_root is None:
|
||||
# Fail
|
||||
_set_migrations_to_fail([source_data])
|
||||
return
|
||||
status.increment_completed_steps()
|
||||
|
||||
# Staging legacy block
|
||||
status.set_state(MigrationStep.STAGING.value)
|
||||
staged_content = staging_api.stage_xblock_temporarily(
|
||||
block=legacy_root,
|
||||
user_id=status.user.pk,
|
||||
purpose=CONTENT_STAGING_PURPOSE_TEMPLATE.format(source_key=source_pk),
|
||||
)
|
||||
migration.staged_content = staged_content
|
||||
status.increment_completed_steps()
|
||||
|
||||
# Parsing OLX
|
||||
status.set_state(MigrationStep.PARSING.value)
|
||||
parser = etree.XMLParser(strip_cdata=False)
|
||||
try:
|
||||
root_node = etree.fromstring(staged_content.olx, parser=parser)
|
||||
except etree.ParseError as exc:
|
||||
status.fail(f"Failed to parse source OLX (from staged content with id = {staged_content.id}): {exc}")
|
||||
_set_migrations_to_fail([source_data])
|
||||
return
|
||||
status.increment_completed_steps()
|
||||
|
||||
# Importing assets of the legacy block
|
||||
status.set_state(MigrationStep.IMPORTING_ASSETS.value)
|
||||
content_by_filename = _import_assets(migration)
|
||||
status.increment_completed_steps()
|
||||
|
||||
# Importing structure of the legacy block
|
||||
status.set_state(MigrationStep.IMPORTING_STRUCTURE.value)
|
||||
change_log, root_migrated_node = _import_structure(
|
||||
migration,
|
||||
source_data,
|
||||
target_library,
|
||||
content_by_filename,
|
||||
root_node,
|
||||
status,
|
||||
)
|
||||
migration.change_log = change_log
|
||||
status.increment_completed_steps()
|
||||
|
||||
status.set_state(MigrationStep.UNSTAGING.value)
|
||||
staged_content.delete()
|
||||
status.increment_completed_steps()
|
||||
|
||||
_create_migration_artifacts_incrementally(
|
||||
root_migrated_node=root_migrated_node,
|
||||
source=source_data.source,
|
||||
migration=migration,
|
||||
status=status,
|
||||
)
|
||||
status.increment_completed_steps()
|
||||
|
||||
# Forwarding legacy content to migrated content
|
||||
status.set_state(MigrationStep.FORWARDING.value)
|
||||
if forward_source_to_target:
|
||||
_forwarding_content(source_data)
|
||||
status.increment_completed_steps()
|
||||
|
||||
# Populating the collection
|
||||
status.set_state(MigrationStep.POPULATING_COLLECTION.value)
|
||||
if target_collection:
|
||||
_populate_collection(user_id, migration)
|
||||
status.increment_completed_steps()
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
_set_migrations_to_fail([source_data])
|
||||
status.fail(str(exc))
|
||||
|
||||
|
||||
@shared_task(base=_BulkMigrationTask, bind=True)
|
||||
# Note: The decorator @set_code_owner_attribute cannot be used here because the UserTaskMixin
|
||||
# does stack inspection and can't handle additional decorators.
|
||||
@@ -679,19 +472,14 @@ def bulk_migrate_from_modulestore(
|
||||
repeat_handling_strategy: str,
|
||||
preserve_url_slugs: bool,
|
||||
composition_level: str,
|
||||
forward_source_to_target: bool,
|
||||
forward_source_to_target: bool | None,
|
||||
) -> None:
|
||||
"""
|
||||
Import multiple legacy courses or libraries into a single V2 library.
|
||||
|
||||
This task performs the same logical steps as `migrate_from_modulestore`, but allows
|
||||
batching several migrations together under **one single user task** (`UserTaskStatus`).
|
||||
|
||||
Unlike running `migrate_from_modulestore` in a loop (which would create multiple
|
||||
independent Celery tasks and separate statuses), the bulk migration maintains
|
||||
**one unified status record** that tracks progress across all included sources.
|
||||
This simplifies monitoring, since the client only needs to observe one task state.
|
||||
|
||||
The bulk migration maintains **one unified status record** that tracks progress across
|
||||
all included sources. This simplifies monitoring, since the client only needs to observe
|
||||
one task state.
|
||||
Each source item (course or library) still creates its own `ModulestoreMigration`
|
||||
database record, but all of them share the same parent task (`UserTaskStatus`).
|
||||
If any sub-migration fails (for example, due to invalid OLX or missing assets),
|
||||
@@ -716,8 +504,10 @@ def bulk_migrate_from_modulestore(
|
||||
Whether to preserve existing XBlock URL slugs during import.
|
||||
composition_level (str):
|
||||
Composition level at which content should be imported (e.g. course, section).
|
||||
forward_source_to_target (bool):
|
||||
forward_source_to_target (bool | None)
|
||||
Whether to forward legacy content to its migrated equivalent after import.
|
||||
If unspecified (None), then forward legacy content for a source if and only
|
||||
if it's that source's first migration.
|
||||
|
||||
See Also:
|
||||
- `migrate_from_modulestore`: Single-source migration equivalent.
|
||||
@@ -760,13 +550,13 @@ def bulk_migrate_from_modulestore(
|
||||
repeat_handling_strategy,
|
||||
preserve_url_slugs,
|
||||
composition_level,
|
||||
target_library_locator,
|
||||
target_package,
|
||||
target_collection_list[i] if target_collection_list else None,
|
||||
)
|
||||
if source_data is None:
|
||||
# Fail
|
||||
return
|
||||
|
||||
source_data_list.append(source_data)
|
||||
|
||||
status.increment_completed_steps()
|
||||
@@ -833,14 +623,14 @@ def bulk_migrate_from_modulestore(
|
||||
f"{MigrationStep.IMPORTING_STRUCTURE.value}"
|
||||
)
|
||||
change_log, root_migrated_node = _import_structure(
|
||||
source_data.migration,
|
||||
source_data,
|
||||
target_library,
|
||||
content_by_filename,
|
||||
root_node,
|
||||
status,
|
||||
source_data=source_data,
|
||||
target_library=target_library,
|
||||
content_by_filename=content_by_filename,
|
||||
root_node=root_node,
|
||||
status=status,
|
||||
)
|
||||
source_data.migration.change_log = change_log
|
||||
source_data.migration.save() # @@TODO keep or nah?
|
||||
status.increment_completed_steps()
|
||||
|
||||
status.set_state(
|
||||
@@ -857,7 +647,8 @@ def bulk_migrate_from_modulestore(
|
||||
source_pk=source_pk,
|
||||
)
|
||||
status.increment_completed_steps()
|
||||
except: # pylint: disable=bare-except
|
||||
except Exception as _exc: # pylint: disable=broad-exception-caught
|
||||
log.exception("Failed: {source_data.migration}")
|
||||
# Mark this library as failed, migration of other libraries can continue
|
||||
# If this case occurs and the migration ends without any further issues,
|
||||
# the bulk migration status is success,
|
||||
@@ -866,70 +657,54 @@ def bulk_migrate_from_modulestore(
|
||||
|
||||
# Forwarding legacy content to migrated content
|
||||
status.set_state(MigrationStep.FORWARDING.value)
|
||||
if forward_source_to_target:
|
||||
for source_data in source_data_list:
|
||||
if not source_data.migration.is_failed:
|
||||
_forwarding_content(source_data)
|
||||
for source_data in source_data_list:
|
||||
if forward_source_to_target is False:
|
||||
continue # Explicitly requested not to forward.
|
||||
if forward_source_to_target is None and source_data.source.forwarded:
|
||||
# Unspecified whether or not to forward.
|
||||
# So, forward iff there was no previous existing successful migration with forwarding.
|
||||
continue
|
||||
if source_data.migration.is_failed:
|
||||
# Don't forward failed migrations.
|
||||
continue
|
||||
_forward_content(source_data)
|
||||
status.increment_completed_steps()
|
||||
|
||||
# Populating collections
|
||||
status.set_state(MigrationStep.POPULATING_COLLECTION.value)
|
||||
|
||||
# Used to check if the source has a previous migration in a V2 library collection
|
||||
# It is placed here to avoid the circular import
|
||||
from .api import get_migration_info
|
||||
for i, source_data in enumerate(source_data_list):
|
||||
migration = source_data.migration
|
||||
if migration.is_failed:
|
||||
continue
|
||||
|
||||
title = legacy_root_list[i].display_name
|
||||
if migration.target_collection is None and not create_collections:
|
||||
continue
|
||||
if migration.target_collection is None:
|
||||
if not create_collections:
|
||||
continue
|
||||
|
||||
source_key = source_data.source.key
|
||||
|
||||
if migration.repeat_handling_strategy == RepeatHandlingStrategy.Fork.value:
|
||||
# Create a new collection when it is Fork
|
||||
migration.target_collection = _create_collection(target_library_locator, title)
|
||||
else:
|
||||
# It is Skip or Update
|
||||
# We need to verify if there is a previous migration with collection
|
||||
# TODO: This only fetches the latest migration, if different migrations have been done
|
||||
# on different V2 libraries, this could break the logic.
|
||||
previous_migration = get_migration_info([source_key])
|
||||
if (
|
||||
source_key in previous_migration
|
||||
and previous_migration[source_key].migrations__target_collection__key
|
||||
):
|
||||
# Has previous migration with collection
|
||||
try:
|
||||
# Get the previous collection
|
||||
previous_collection = authoring_api.get_collection(
|
||||
target_package.id,
|
||||
previous_migration[source_key].migrations__target_collection__key,
|
||||
)
|
||||
|
||||
migration.target_collection = previous_collection
|
||||
except Collection.DoesNotExist:
|
||||
# The collection no longer exists or is being migrated to a different library.
|
||||
# In that case, create a new collection independent of strategy
|
||||
migration.target_collection = _create_collection(target_library_locator, title)
|
||||
else:
|
||||
# Create collection and save in migration
|
||||
migration.target_collection = _create_collection(target_library_locator, title)
|
||||
|
||||
existing_collection_to_use: Collection | None = None
|
||||
# For Fork strategy: Create an new collection every time.
|
||||
# For Update and Skip strategies: Update an existing collection if possible.
|
||||
if migration.repeat_handling_strategy != RepeatHandlingStrategy.Fork.value:
|
||||
if source_data.previous_migration:
|
||||
if previous_collection_slug := source_data.previous_migration.target_collection_slug:
|
||||
try:
|
||||
existing_collection_to_use = authoring_api.get_collection(
|
||||
target_package.id, previous_collection_slug
|
||||
)
|
||||
except Collection.DoesNotExist:
|
||||
# Collection no longer exists.
|
||||
pass
|
||||
migration.target_collection = (
|
||||
existing_collection_to_use or
|
||||
_create_collection(library_key=target_library_locator, title=legacy_root_list[i].display_name)
|
||||
)
|
||||
_populate_collection(user_id, migration)
|
||||
|
||||
ModulestoreMigration.objects.bulk_update(
|
||||
models.ModulestoreMigration.objects.bulk_update(
|
||||
[x.migration for x in source_data_list],
|
||||
["target_collection", "is_failed"],
|
||||
)
|
||||
status.increment_completed_steps()
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
# If there is an exception in this block, all migrations fail.
|
||||
_set_migrations_to_fail(source_data_list)
|
||||
log.exception("Modulestore migrations failed")
|
||||
status.fail(str(exc))
|
||||
|
||||
|
||||
@@ -1044,7 +819,6 @@ def _migrate_node(
|
||||
)
|
||||
).format(count=children_length)
|
||||
source_to_target = (source_key, target_entity_version, reason)
|
||||
context.add_migration(source_key, target_entity_version.entity if target_entity_version else None)
|
||||
else:
|
||||
log.warning(
|
||||
f"Cannot migrate node from {context.source_context_key} to {context.target_library_key} "
|
||||
@@ -1122,6 +896,7 @@ def _migrate_container(
|
||||
context.created_by,
|
||||
call_post_publish_events_sync=True,
|
||||
)
|
||||
context.used_container_slugs.add(container.container_key.container_id)
|
||||
return container_publishable_entity_version, None
|
||||
|
||||
|
||||
@@ -1193,13 +968,14 @@ def _migrate_component(
|
||||
)
|
||||
|
||||
# Publish the component
|
||||
libraries_api.publish_component_changes(
|
||||
libraries_api.library_component_usage_key(context.target_library_key, component),
|
||||
context.created_by,
|
||||
)
|
||||
libraries_api.publish_component_changes(target_key, context.created_by)
|
||||
context.used_component_keys.add(target_key)
|
||||
return component_version.publishable_entity_version, None
|
||||
|
||||
|
||||
_MAX_UNIQUE_SLUG_ATTEMPTS = 1000
|
||||
|
||||
|
||||
def _get_distinct_target_container_key(
|
||||
context: _MigrationContext,
|
||||
source_key: UsageKey,
|
||||
@@ -1207,41 +983,36 @@ def _get_distinct_target_container_key(
|
||||
title: str,
|
||||
) -> LibraryContainerLocator:
|
||||
"""
|
||||
Find a unique key for block_id by appending a unique identifier if necessary.
|
||||
|
||||
Args:
|
||||
context (_MigrationContext): The migration context.
|
||||
source_key (UsageKey): The source key.
|
||||
container_type (ContainerType): The container type.
|
||||
title (str): The title.
|
||||
|
||||
Returns:
|
||||
LibraryContainerLocator: The target container key.
|
||||
Figure out the appropriate target container for this structural block.
|
||||
"""
|
||||
# Check if we already processed this block and we are not forking. If we are forking, we will
|
||||
# want a new target key.
|
||||
if context.is_already_successfully_migrated(source_key) and not context.should_fork_strategy:
|
||||
existing_version = context.get_existing_target(source_key)
|
||||
# This is not possible, just to satisfy type checker.
|
||||
assert existing_version is not None
|
||||
|
||||
return LibraryContainerLocator(
|
||||
context.target_library_key,
|
||||
container_type.value,
|
||||
existing_version.key
|
||||
)
|
||||
# If we're not forking, then check if this block was part of our past migration.
|
||||
# (If we are forking, we will always want a new target key).
|
||||
if not context.should_fork_strategy:
|
||||
if previous_block_migration := context.previous_block_migrations.get(source_key):
|
||||
if isinstance(previous_block_migration, data.ModulestoreBlockMigrationSuccess):
|
||||
if isinstance(previous_block_migration.target_key, LibraryContainerLocator):
|
||||
return previous_block_migration.target_key
|
||||
# Generate new unique block ID
|
||||
base_slug = (
|
||||
source_key.block_id
|
||||
if context.preserve_url_slugs
|
||||
else (slugify(title) or source_key.block_id)
|
||||
)
|
||||
unique_slug = _find_unique_slug(context, base_slug)
|
||||
|
||||
return LibraryContainerLocator(
|
||||
context.target_library_key,
|
||||
container_type.value,
|
||||
unique_slug
|
||||
# Use base base slug if available
|
||||
if base_slug not in context.used_container_slugs:
|
||||
return LibraryContainerLocator(
|
||||
context.target_library_key, container_type.value, base_slug
|
||||
)
|
||||
# Try numbered variations until we find one that doesn't exist
|
||||
for i in range(1, _MAX_UNIQUE_SLUG_ATTEMPTS + 1):
|
||||
candidate_slug = f"{base_slug}_{i}"
|
||||
if candidate_slug not in context.used_container_slugs:
|
||||
return LibraryContainerLocator(
|
||||
context.target_library_key, container_type.value, candidate_slug
|
||||
)
|
||||
# It would be extremely unlikely for us to run out of attempts
|
||||
raise RuntimeError(
|
||||
f"Unable to find unique slug after {_MAX_UNIQUE_SLUG_ATTEMPTS} attempts for base: {base_slug}"
|
||||
)
|
||||
|
||||
|
||||
@@ -1252,99 +1023,43 @@ def _get_distinct_target_usage_key(
|
||||
title: str,
|
||||
) -> LibraryUsageLocatorV2:
|
||||
"""
|
||||
Find a unique key for block_id by appending a unique identifier if necessary.
|
||||
|
||||
Args:
|
||||
context: The migration context
|
||||
source_key: The original usage key from the source
|
||||
component_type: The component type string
|
||||
olx: The OLX content of the component
|
||||
|
||||
Returns:
|
||||
A unique LibraryUsageLocatorV2 for the target
|
||||
|
||||
Raises:
|
||||
ValueError: If source_key is invalid
|
||||
Figure out the appropriate target component for this block.
|
||||
"""
|
||||
# Check if we already processed this block and we are not forking. If we are forking, we will
|
||||
# want a new target key.
|
||||
if context.is_already_successfully_migrated(source_key) and not context.should_fork_strategy:
|
||||
log.debug(f"Block {source_key} already exists, reusing first existing target")
|
||||
existing_target = context.get_existing_target(source_key)
|
||||
# This is not possible, just to satisfy type checker.
|
||||
assert existing_target is not None
|
||||
block_id = existing_target.component.local_key
|
||||
|
||||
# mypy thinks LibraryUsageLocatorV2 is abstract. It's not.
|
||||
return LibraryUsageLocatorV2( # type: ignore[abstract]
|
||||
context.target_library_key,
|
||||
source_key.block_type,
|
||||
block_id
|
||||
)
|
||||
|
||||
# If we're not forking, then check if this block was part of our past migration.
|
||||
# (If we are forking, we will always want a new target key).
|
||||
if not context.should_fork_strategy:
|
||||
if previous_block_migration := context.previous_block_migrations.get(source_key):
|
||||
if isinstance(previous_block_migration, data.ModulestoreBlockMigrationSuccess):
|
||||
if isinstance(previous_block_migration.target_key, LibraryUsageLocatorV2):
|
||||
return previous_block_migration.target_key
|
||||
# Generate new unique block ID
|
||||
base_slug = (
|
||||
source_key.block_id
|
||||
if context.preserve_url_slugs
|
||||
else (slugify(title) or source_key.block_id)
|
||||
)
|
||||
unique_slug = _find_unique_slug(context, base_slug, component_type)
|
||||
|
||||
# mypy thinks LibraryUsageLocatorV2 is abstract. It's not.
|
||||
return LibraryUsageLocatorV2( # type: ignore[abstract]
|
||||
context.target_library_key,
|
||||
source_key.block_type,
|
||||
unique_slug
|
||||
# Use base base slug if available
|
||||
base_key = LibraryUsageLocatorV2( # type: ignore[abstract]
|
||||
context.target_library_key, component_type.name, base_slug
|
||||
)
|
||||
|
||||
|
||||
def _find_unique_slug(
|
||||
context: _MigrationContext,
|
||||
base_slug: str,
|
||||
component_type: ComponentType | None = None,
|
||||
max_attempts: int = 1000
|
||||
) -> str:
|
||||
"""
|
||||
Find a unique slug by appending incrementing numbers if necessary.
|
||||
Using batch querying to avoid multiple database roundtrips.
|
||||
|
||||
Args:
|
||||
component_type: The component type to check against
|
||||
base_slug: The base slug to make unique
|
||||
max_attempts: Maximum number of attempts to prevent infinite loops
|
||||
|
||||
Returns:
|
||||
A unique slug string
|
||||
|
||||
Raises:
|
||||
RuntimeError: If unable to find unique slug within max_attempts
|
||||
"""
|
||||
if not component_type:
|
||||
base_key = base_slug
|
||||
else:
|
||||
base_key = f"{component_type}:{base_slug}"
|
||||
|
||||
existing_publishable_entity_keys = context.get_existing_target_entity_keys(base_key)
|
||||
|
||||
# Check if base slug is available
|
||||
if base_key not in existing_publishable_entity_keys:
|
||||
return base_slug
|
||||
|
||||
if base_key not in context.used_component_keys:
|
||||
return base_key
|
||||
# Try numbered variations until we find one that doesn't exist
|
||||
for i in range(1, max_attempts + 1):
|
||||
for i in range(1, _MAX_UNIQUE_SLUG_ATTEMPTS + 1):
|
||||
candidate_slug = f"{base_slug}_{i}"
|
||||
candidate_key = f"{component_type}:{candidate_slug}" if component_type else candidate_slug
|
||||
|
||||
if candidate_key not in existing_publishable_entity_keys:
|
||||
return candidate_slug
|
||||
|
||||
raise RuntimeError(f"Unable to find unique slug after {max_attempts} attempts for base: {base_slug}")
|
||||
candidate_key = LibraryUsageLocatorV2( # type: ignore[abstract]
|
||||
context.target_library_key, component_type.name, candidate_slug
|
||||
)
|
||||
if candidate_key not in context.used_component_keys:
|
||||
return candidate_key
|
||||
# It would be extremely unlikely for us to run out of attempts
|
||||
raise RuntimeError(f"Unable to find unique slug after {_MAX_UNIQUE_SLUG_ATTEMPTS} attempts for base: {base_slug}")
|
||||
|
||||
|
||||
def _create_migration_artifacts_incrementally(
|
||||
root_migrated_node: _MigratedNode,
|
||||
source: ModulestoreSource,
|
||||
migration: ModulestoreMigration,
|
||||
source: models.ModulestoreSource,
|
||||
migration: models.ModulestoreMigration,
|
||||
status: UserTaskStatus,
|
||||
source_pk: int | None = None,
|
||||
) -> None:
|
||||
@@ -1355,18 +1070,34 @@ def _create_migration_artifacts_incrementally(
|
||||
total_nodes = len(nodes)
|
||||
processed = 0
|
||||
|
||||
for source_usage_key, target_version, reason in root_migrated_node.all_source_to_target_pairs():
|
||||
block_source, _ = ModulestoreBlockSource.objects.get_or_create(
|
||||
# Load a mapping from each modified entity's primary key
|
||||
# to the primary key of the changelog record that captures its modification.
|
||||
# This will not include any blocks whose migration failed to create a target entity.
|
||||
entity_pks_to_change_log_record_pks: dict[int, int] = dict(
|
||||
migration.change_log.records.values_list("entity_id", "id")
|
||||
) if migration.change_log else {}
|
||||
|
||||
for source_usage_key, target_version, unsupported_reason in root_migrated_node.all_source_to_target_pairs():
|
||||
block_source, _ = models.ModulestoreBlockSource.objects.get_or_create(
|
||||
overall_source=source,
|
||||
key=source_usage_key
|
||||
)
|
||||
# target_entity_pk should be None iff the block migration failed
|
||||
target_entity_pk: int | None = target_version.entity_id if target_version else None
|
||||
|
||||
ModulestoreBlockMigration.objects.create(
|
||||
overall_migration=migration,
|
||||
source=block_source,
|
||||
target_id=target_version.entity_id if target_version else None,
|
||||
unsupported_reason=reason,
|
||||
)
|
||||
change_log_record_pk = entity_pks_to_change_log_record_pks.get(target_entity_pk) if target_entity_pk else None
|
||||
# Only create a migration artifact for this source block if:
|
||||
# (a) we have a record of a change occuring, or
|
||||
# (b) it failed.
|
||||
# If neither a nor b are true, then this source block was skipped.
|
||||
if change_log_record_pk or unsupported_reason:
|
||||
models.ModulestoreBlockMigration.objects.create(
|
||||
overall_migration=migration,
|
||||
source=block_source,
|
||||
target_id=target_entity_pk,
|
||||
change_log_record_id=change_log_record_pk,
|
||||
unsupported_reason=unsupported_reason,
|
||||
)
|
||||
|
||||
processed += 1
|
||||
if processed % 10 == 0 or processed == total_nodes:
|
||||
|
||||
@@ -3,11 +3,10 @@ Test cases for the modulestore migrator API.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from opaque_keys.edx.locator import LibraryLocatorV2
|
||||
from opaque_keys.edx.locator import LibraryLocator, LibraryLocatorV2
|
||||
from openedx_learning.api import authoring as authoring_api
|
||||
from organizations.tests.factories import OrganizationFactory
|
||||
|
||||
from cms.djangoapps.contentstore.tests.test_libraries import LibraryTestCase
|
||||
from cms.djangoapps.modulestore_migrator import api
|
||||
from cms.djangoapps.modulestore_migrator.data import CompositionLevel, RepeatHandlingStrategy
|
||||
from cms.djangoapps.modulestore_migrator.models import ModulestoreMigration
|
||||
@@ -15,42 +14,50 @@ from cms.djangoapps.modulestore_migrator.tests.factories import ModulestoreSourc
|
||||
from common.djangoapps.student.tests.factories import UserFactory
|
||||
from openedx.core.djangoapps.content_libraries import api as lib_api
|
||||
|
||||
from xmodule.modulestore.tests.utils import ModuleStoreTestCase
|
||||
from xmodule.modulestore.tests.factories import BlockFactory, LibraryFactory
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
class TestModulestoreMigratorAPI(ModuleStoreTestCase):
|
||||
"""
|
||||
Test cases for the modulestore migrator API.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
self.organization = OrganizationFactory()
|
||||
self.lib_key_v2 = LibraryLocatorV2.from_string(
|
||||
f"lib:{self.organization.short_name}:test-key"
|
||||
)
|
||||
self.lib_key_v2_2 = LibraryLocatorV2.from_string(
|
||||
f"lib:{self.organization.short_name}:test-key-2"
|
||||
)
|
||||
lib_api.create_library(
|
||||
org=self.organization,
|
||||
slug=self.lib_key_v2.slug,
|
||||
title="Test Library",
|
||||
)
|
||||
lib_api.create_library(
|
||||
org=self.organization,
|
||||
slug=self.lib_key_v2_2.slug,
|
||||
title="Test Library 2",
|
||||
)
|
||||
self.library_v2 = lib_api.ContentLibrary.objects.get(slug=self.lib_key_v2.slug)
|
||||
self.library_v2_2 = lib_api.ContentLibrary.objects.get(slug=self.lib_key_v2_2.slug)
|
||||
self.learning_package = self.library_v2.learning_package
|
||||
self.user = UserFactory(password=self.user_password, is_staff=True)
|
||||
self.organization = OrganizationFactory(name="My Org", short_name="myorg")
|
||||
self.lib_key_v1 = LibraryLocator.from_string("library-v1:myorg+old")
|
||||
LibraryFactory.create(org="myorg", library="old", display_name="Old Library", modulestore=self.store)
|
||||
self.lib_key_v2_1 = LibraryLocatorV2.from_string("lib:myorg:1")
|
||||
self.lib_key_v2_2 = LibraryLocatorV2.from_string("lib:myorg:2")
|
||||
lib_api.create_library(org=self.organization, slug="1", title="Test Library 1")
|
||||
lib_api.create_library(org=self.organization, slug="2", title="Test Library 2")
|
||||
self.library_v2_1 = lib_api.ContentLibrary.objects.get(slug="1")
|
||||
self.library_v2_2 = lib_api.ContentLibrary.objects.get(slug="2")
|
||||
self.learning_package = self.library_v2_1.learning_package
|
||||
self.learning_package_2 = self.library_v2_2.learning_package
|
||||
self.blocks = []
|
||||
for _ in range(3):
|
||||
self.blocks.append(self._add_simple_content_block().usage_key)
|
||||
self.source_unit_keys = [
|
||||
BlockFactory.create(
|
||||
display_name=f"Unit {c}",
|
||||
category="vertical",
|
||||
location=self.lib_key_v1.make_usage_key("vertical", c),
|
||||
parent_location=self.lib_key_v1.make_usage_key("library", "library"),
|
||||
user_id=self.user.id, publish_item=False,
|
||||
).usage_key for c in ["X", "Y", "Z"]
|
||||
]
|
||||
self.source_html_keys = [
|
||||
BlockFactory.create(
|
||||
display_name=f"HTML {c}",
|
||||
category="html",
|
||||
location=self.lib_key_v1.make_usage_key("html", c),
|
||||
parent_location=self.lib_key_v1.make_usage_key("vertical", c),
|
||||
user_id=self.user.id, publish_item=False,
|
||||
).usage_key for c in ["X", "Y", "Z"]
|
||||
]
|
||||
# We load this last so that it has an updated list of children.
|
||||
self.lib_v1 = self.store.get_library(self.lib_key_v1)
|
||||
|
||||
def test_start_migration_to_library(self):
|
||||
"""
|
||||
@@ -62,10 +69,10 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
target_collection_slug=None,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -91,10 +98,10 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_bulk_migration_to_library(
|
||||
user=user,
|
||||
source_key_list=[source.key, source_2.key],
|
||||
target_library_key=self.library_v2.library_key,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
target_collection_slug_list=None,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -138,10 +145,10 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
target_collection_slug=collection_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -167,9 +174,9 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -177,7 +184,7 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
modulestoremigration = ModulestoreMigration.objects.get()
|
||||
assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Skip.value
|
||||
|
||||
migrated_components = lib_api.get_library_components(self.library_v2.library_key)
|
||||
migrated_components = lib_api.get_library_components(self.library_v2_1.library_key)
|
||||
assert len(migrated_components) == 1
|
||||
|
||||
# Update the block, changing its name
|
||||
@@ -188,9 +195,9 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -199,11 +206,11 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
assert modulestoremigration is not None
|
||||
assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Skip.value
|
||||
|
||||
migrated_components_fork = lib_api.get_library_components(self.library_v2.library_key)
|
||||
migrated_components_fork = lib_api.get_library_components(self.library_v2_1.library_key)
|
||||
assert len(migrated_components_fork) == 1
|
||||
|
||||
component = lib_api.LibraryXBlockMetadata.from_component(
|
||||
self.library_v2.library_key, migrated_components_fork[0]
|
||||
self.library_v2_1.library_key, migrated_components_fork[0]
|
||||
)
|
||||
assert component.display_name == "Original Block"
|
||||
|
||||
@@ -225,9 +232,9 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -235,7 +242,7 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
modulestoremigration = ModulestoreMigration.objects.get()
|
||||
assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Skip.value
|
||||
|
||||
migrated_components = lib_api.get_library_components(self.library_v2.library_key)
|
||||
migrated_components = lib_api.get_library_components(self.library_v2_1.library_key)
|
||||
assert len(migrated_components) == 1
|
||||
|
||||
# Update the block, changing its name
|
||||
@@ -246,9 +253,9 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Update.value,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Update,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -257,11 +264,11 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
assert modulestoremigration is not None
|
||||
assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Update.value
|
||||
|
||||
migrated_components_fork = lib_api.get_library_components(self.library_v2.library_key)
|
||||
migrated_components_fork = lib_api.get_library_components(self.library_v2_1.library_key)
|
||||
assert len(migrated_components_fork) == 1
|
||||
|
||||
component = lib_api.LibraryXBlockMetadata.from_component(
|
||||
self.library_v2.library_key, migrated_components_fork[0]
|
||||
self.library_v2_1.library_key, migrated_components_fork[0]
|
||||
)
|
||||
assert component.display_name == "Updated Block"
|
||||
|
||||
@@ -283,9 +290,9 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -293,7 +300,7 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
modulestoremigration = ModulestoreMigration.objects.get()
|
||||
assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Skip.value
|
||||
|
||||
migrated_components = lib_api.get_library_components(self.library_v2.library_key)
|
||||
migrated_components = lib_api.get_library_components(self.library_v2_1.library_key)
|
||||
assert len(migrated_components) == 1
|
||||
|
||||
# Update the block, changing its name
|
||||
@@ -304,9 +311,9 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Fork.value,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Fork,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -315,16 +322,16 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
assert modulestoremigration is not None
|
||||
assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Fork.value
|
||||
|
||||
migrated_components_fork = lib_api.get_library_components(self.library_v2.library_key)
|
||||
migrated_components_fork = lib_api.get_library_components(self.library_v2_1.library_key)
|
||||
assert len(migrated_components_fork) == 2
|
||||
|
||||
first_component = lib_api.LibraryXBlockMetadata.from_component(
|
||||
self.library_v2.library_key, migrated_components_fork[0]
|
||||
self.library_v2_1.library_key, migrated_components_fork[0]
|
||||
)
|
||||
assert first_component.display_name == "Original Block"
|
||||
|
||||
second_component = lib_api.LibraryXBlockMetadata.from_component(
|
||||
self.library_v2.library_key, migrated_components_fork[1]
|
||||
self.library_v2_1.library_key, migrated_components_fork[1]
|
||||
)
|
||||
assert second_component.display_name == "Updated Block"
|
||||
|
||||
@@ -336,9 +343,9 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=source.key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Fork.value,
|
||||
target_library_key=self.library_v2_1.library_key,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Fork,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
@@ -347,129 +354,215 @@ class TestModulestoreMigratorAPI(LibraryTestCase):
|
||||
assert modulestoremigration is not None
|
||||
assert modulestoremigration.repeat_handling_strategy == RepeatHandlingStrategy.Fork.value
|
||||
|
||||
migrated_components_fork = lib_api.get_library_components(self.library_v2.library_key)
|
||||
migrated_components_fork = lib_api.get_library_components(self.library_v2_1.library_key)
|
||||
assert len(migrated_components_fork) == 3
|
||||
|
||||
first_component = lib_api.LibraryXBlockMetadata.from_component(
|
||||
self.library_v2.library_key, migrated_components_fork[0]
|
||||
self.library_v2_1.library_key, migrated_components_fork[0]
|
||||
)
|
||||
assert first_component.display_name == "Original Block"
|
||||
|
||||
second_component = lib_api.LibraryXBlockMetadata.from_component(
|
||||
self.library_v2.library_key, migrated_components_fork[1]
|
||||
self.library_v2_1.library_key, migrated_components_fork[1]
|
||||
)
|
||||
assert second_component.display_name == "Updated Block"
|
||||
|
||||
third_component = lib_api.LibraryXBlockMetadata.from_component(
|
||||
self.library_v2.library_key, migrated_components_fork[2]
|
||||
self.library_v2_1.library_key, migrated_components_fork[2]
|
||||
)
|
||||
assert third_component.display_name == "Updated Block Again"
|
||||
|
||||
def test_get_migration_info(self):
|
||||
def test_migration_api_for_various_scenarios(self):
|
||||
"""
|
||||
Test that the API can retrieve migration info.
|
||||
Test that get_migrations, get_block_migrations, forward_context, and forward_block
|
||||
behave as expected throughout a convoluted series of intertwined migrations.
|
||||
|
||||
Also, ensure that each of the aforementioned api functions only performs 1 query each.
|
||||
"""
|
||||
# pylint: disable=too-many-statements
|
||||
user = UserFactory()
|
||||
|
||||
collection_key = "test-collection"
|
||||
all_source_usage_keys = {*self.source_html_keys, *self.source_unit_keys}
|
||||
all_source_usage_key_strs = {str(sk) for sk in all_source_usage_keys}
|
||||
|
||||
# In this test, we will be migrating self.lib_v1 a total of 6 times.
|
||||
# We will migrate it to each collection (A, B, and C) twice.
|
||||
|
||||
# Lib 1 has Collection A and Collection B
|
||||
# Lib 2 has Collection C
|
||||
authoring_api.create_collection(
|
||||
learning_package_id=self.learning_package.id,
|
||||
key=collection_key,
|
||||
title="Test Collection",
|
||||
key="test-collection-1a",
|
||||
title="Test Collection A in Lib 1",
|
||||
created_by=user.id,
|
||||
)
|
||||
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=self.lib_key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
target_collection_slug=collection_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=True,
|
||||
)
|
||||
with self.assertNumQueries(1):
|
||||
result = api.get_migration_info([self.lib_key])
|
||||
row = result.get(self.lib_key)
|
||||
assert row is not None
|
||||
assert row.migrations__target__key == str(self.lib_key_v2)
|
||||
assert row.migrations__target__title == "Test Library"
|
||||
assert row.migrations__target_collection__key == collection_key
|
||||
assert row.migrations__target_collection__title == "Test Collection"
|
||||
|
||||
def test_get_all_migrations_info(self):
|
||||
"""
|
||||
Test that the API can retrieve all migrations info for source keys.
|
||||
"""
|
||||
user = UserFactory()
|
||||
|
||||
collection_key = "test-collection"
|
||||
collection_key_2 = "test-collection"
|
||||
authoring_api.create_collection(
|
||||
learning_package_id=self.learning_package.id,
|
||||
key=collection_key,
|
||||
title="Test Collection",
|
||||
key="test-collection-1b",
|
||||
title="Test Collection B in Lib 1",
|
||||
created_by=user.id,
|
||||
)
|
||||
authoring_api.create_collection(
|
||||
learning_package_id=self.learning_package_2.id,
|
||||
key=collection_key_2,
|
||||
title="Test Collection 2",
|
||||
key="test-collection-2c",
|
||||
title="Test Collection C in Lib 2",
|
||||
created_by=user.id,
|
||||
)
|
||||
|
||||
# No migrations have happened.
|
||||
# Everything should return None / empty.
|
||||
assert not list(api.get_migrations(self.lib_key_v1))
|
||||
assert not api.get_forwarding(source_key=self.lib_key_v1)
|
||||
assert not api.get_forwarding_for_blocks(all_source_usage_keys)
|
||||
|
||||
# FOUR MIGRATIONS!
|
||||
# * Migrate to Lib1.CollA
|
||||
# * Migrate to Lib1.CollB using FORK strategy
|
||||
# * Migrate to Lib1.CollA using UPDATE strategy
|
||||
# * Migrate to Lib2.CollC
|
||||
# Note: None of these are forwarding migrations!
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=self.lib_key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
target_collection_slug=collection_key,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
source_key=self.lib_key_v1,
|
||||
target_library_key=self.lib_key_v2_1,
|
||||
target_collection_slug="test-collection-1a",
|
||||
composition_level=CompositionLevel.Unit,
|
||||
# repeat_handling_strategy here is arbitrary, as there will be no repeats.
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=self.lib_key,
|
||||
target_library_key=self.library_v2_2.library_key,
|
||||
target_collection_slug=collection_key_2,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
source_key=self.lib_key_v1,
|
||||
target_library_key=self.lib_key_v2_1,
|
||||
target_collection_slug="test-collection-1b",
|
||||
composition_level=CompositionLevel.Unit,
|
||||
# this will create a 2nd copy of every block
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Fork,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=self.lib_key_v1,
|
||||
target_library_key=self.lib_key_v2_1,
|
||||
target_collection_slug="test-collection-1a",
|
||||
composition_level=CompositionLevel.Unit,
|
||||
# this will update the 2nd copies, but put them in the same collection as the first copies
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Update,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=self.lib_key_v1,
|
||||
target_library_key=self.lib_key_v2_2,
|
||||
target_collection_slug="test-collection-2c",
|
||||
composition_level=CompositionLevel.Unit,
|
||||
# repeat_handling_strategy here is arbitrary, as there will be no repeats.
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=False,
|
||||
)
|
||||
# get_migrations returns in reverse chronological order
|
||||
with self.assertNumQueries(1):
|
||||
result = api.get_all_migrations_info([self.lib_key])
|
||||
row = result.get(self.lib_key)
|
||||
assert row is not None
|
||||
assert row[0].get('migrations__target__key') == str(self.lib_key_v2)
|
||||
assert row[0].get('migrations__target__title') == "Test Library"
|
||||
assert row[0].get('migrations__target_collection__key') == collection_key
|
||||
assert row[0].get('migrations__target_collection__title') == "Test Collection"
|
||||
|
||||
assert row[1].get('migrations__target__key') == str(self.lib_key_v2_2)
|
||||
assert row[1].get('migrations__target__title') == "Test Library 2"
|
||||
assert row[1].get('migrations__target_collection__key') == collection_key_2
|
||||
assert row[1].get('migrations__target_collection__title') == "Test Collection 2"
|
||||
|
||||
def test_get_target_block_usage_keys(self):
|
||||
"""
|
||||
Test that the API can get the list of target block usage keys for a given library.
|
||||
"""
|
||||
user = UserFactory()
|
||||
migration_2c_i, migration_1a_ii, migration_1b_i, migration_1a_i = api.get_migrations(self.lib_key_v1)
|
||||
assert not migration_1a_i.is_failed
|
||||
assert not migration_1b_i.is_failed
|
||||
assert not migration_1a_ii.is_failed
|
||||
assert not migration_2c_i.is_failed
|
||||
# Confirm that the metadata came back correctly.
|
||||
assert migration_1a_i.target_key == self.lib_key_v2_1
|
||||
assert migration_1a_i.target_title == "Test Library 1"
|
||||
assert migration_1a_i.target_collection_slug == "test-collection-1a"
|
||||
assert migration_1a_i.target_collection_title == "Test Collection A in Lib 1"
|
||||
assert migration_2c_i.target_key == self.lib_key_v2_2
|
||||
assert migration_2c_i.target_title == "Test Library 2"
|
||||
assert migration_2c_i.target_collection_slug == "test-collection-2c"
|
||||
assert migration_2c_i.target_collection_title == "Test Collection C in Lib 2"
|
||||
# Call get_migration_blocks on each of the four migrations. Convert the mapping
|
||||
# from UsageKey->BlockMigrationResult into str->str just so we can assert things more easily.
|
||||
with self.assertNumQueries(1):
|
||||
mappings_1a_i = {
|
||||
str(sk): str(bm.target_key) for sk, bm in api.get_migration_blocks(migration_1a_i.pk).items()
|
||||
}
|
||||
mappings_1b_i = {
|
||||
str(sk): str(bm.target_key) for sk, bm in api.get_migration_blocks(migration_1b_i.pk).items()
|
||||
}
|
||||
mappings_1a_ii = {
|
||||
str(sk): str(bm.target_key) for sk, bm in api.get_migration_blocks(migration_1a_ii.pk).items()
|
||||
}
|
||||
mappings_2c_i = {
|
||||
str(sk): str(bm.target_key) for sk, bm in api.get_migration_blocks(migration_2c_i.pk).items()
|
||||
}
|
||||
# Each migration should have migrated every source block.
|
||||
assert set(mappings_1a_i.keys()) == all_source_usage_key_strs
|
||||
assert set(mappings_1b_i.keys()) == all_source_usage_key_strs
|
||||
assert set(mappings_1a_ii.keys()) == all_source_usage_key_strs
|
||||
assert set(mappings_2c_i.keys()) == all_source_usage_key_strs
|
||||
# Because the migration to Lib1.CollB used FORK, we expect that there is nothing in
|
||||
# common between it and the prior migration to Lib1.CollA.
|
||||
assert not (set(mappings_1a_i.values()) & set(mappings_1b_i.values()))
|
||||
# Because the second migration to Lib1.CollA used UPDATE, we expect that it
|
||||
# will have all the same mappings as the prior migration to Lib1.CollB.
|
||||
# This is a little countertuitive, since the migrations targeted different collections,
|
||||
# but the rule that the migrator follows is "UPDATE uses the block from the most recent migration".
|
||||
assert mappings_1b_i == mappings_1a_ii
|
||||
# Since forward_source_to_target=False, we have had no authoritative migration yet.
|
||||
assert api.get_forwarding(self.lib_key_v1) is None
|
||||
assert not api.get_forwarding_for_blocks(all_source_usage_keys)
|
||||
|
||||
# ANOTHER MIGRATION!
|
||||
# * Migrate to Lib2.CollC using UPDATE strategy
|
||||
# Note: This *is* a forwarding migration
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=self.lib_key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
target_collection_slug=None,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
source_key=self.lib_key_v1,
|
||||
target_library_key=self.lib_key_v2_2,
|
||||
target_collection_slug="test-collection-2c",
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Update,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=True,
|
||||
)
|
||||
migration_2c_ii, _2c_i, _1a_ii, _1b_i, migration_1a_i_reloaded = api.get_migrations(self.lib_key_v1)
|
||||
assert migration_1a_i_reloaded.pk == migration_1a_i.pk
|
||||
assert not migration_2c_ii.is_failed
|
||||
# Our source lib should now forward to Lib2.
|
||||
with self.assertNumQueries(1):
|
||||
result = api.get_target_block_usage_keys(self.lib_key)
|
||||
for key in self.blocks:
|
||||
assert result.get(key) is not None
|
||||
forwarded = api.get_forwarding(self.lib_key_v1)
|
||||
assert forwarded.target_key == self.lib_key_v2_2
|
||||
assert forwarded.target_collection_slug == "test-collection-2c"
|
||||
assert forwarded.pk == migration_2c_ii.pk
|
||||
# Our source lib's blocks should now forward to ones in Lib2.
|
||||
with self.assertNumQueries(1):
|
||||
forwarded_blocks = api.get_forwarding_for_blocks(all_source_usage_keys)
|
||||
assert forwarded_blocks[self.source_html_keys[1]].target_key.context_key == self.lib_key_v2_2
|
||||
assert forwarded_blocks[self.source_unit_keys[1]].target_key.context_key == self.lib_key_v2_2
|
||||
|
||||
# FINAL MIGRATION!
|
||||
# * Migrate to Lib1.CollB using UPDATE strategy
|
||||
# Note: This *is* a forwarding migration, and should supplant the previous
|
||||
# migration for forwarding purposes.
|
||||
api.start_migration_to_library(
|
||||
user=user,
|
||||
source_key=self.lib_key_v1,
|
||||
target_library_key=self.lib_key_v2_1,
|
||||
target_collection_slug="test-collection-1b",
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Update,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=True,
|
||||
)
|
||||
migration_1b_ii, _2c_ii, _2c_i, _1a_ii, _1b_i, _1a_i = api.get_migrations(self.lib_key_v1)
|
||||
assert not migration_1b_ii.is_failed
|
||||
# Our source lib should now forward to Lib1.
|
||||
forwarded = api.get_forwarding(self.lib_key_v1)
|
||||
assert forwarded.target_key == self.lib_key_v2_1
|
||||
assert forwarded.target_collection_slug == "test-collection-1b"
|
||||
assert forwarded.pk == migration_1b_ii.pk
|
||||
# Our source lib should now forward to Lib1.
|
||||
forwarded_blocks = api.get_forwarding_for_blocks(all_source_usage_keys)
|
||||
assert forwarded_blocks[self.source_html_keys[1]].target_key.context_key == self.lib_key_v2_1
|
||||
assert forwarded_blocks[self.source_unit_keys[1]].target_key.context_key == self.lib_key_v2_1
|
||||
|
||||
1111
cms/djangoapps/modulestore_migrator/tests/test_rest_api.py
Normal file
1111
cms/djangoapps/modulestore_migrator/tests/test_rest_api.py
Normal file
@@ -0,0 +1,1111 @@
|
||||
"""
|
||||
Unit tests for the modulestore_migrator REST API v1 views.
|
||||
|
||||
These tests focus on validation, HTTP status codes, and serialization/deserialization.
|
||||
Business logic is mocked out.
|
||||
"""
|
||||
from unittest.mock import MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.test import TestCase
|
||||
from opaque_keys.edx.locator import (
|
||||
BlockUsageLocator, CourseLocator, LibraryLocatorV2, LibraryUsageLocatorV2
|
||||
)
|
||||
from organizations.tests.factories import OrganizationFactory
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.test import APIRequestFactory, force_authenticate
|
||||
from user_tasks.models import UserTaskStatus
|
||||
|
||||
from cms.djangoapps.modulestore_migrator.data import (
|
||||
ModulestoreMigration,
|
||||
ModulestoreBlockMigrationSuccess,
|
||||
ModulestoreBlockMigrationFailure,
|
||||
)
|
||||
from cms.djangoapps.modulestore_migrator.models import (
|
||||
ModulestoreMigration as ModulestoreMigrationModel,
|
||||
ModulestoreSource,
|
||||
)
|
||||
from cms.djangoapps.modulestore_migrator.rest_api.v1.views import (
|
||||
BlockMigrationInfo,
|
||||
BulkMigrationViewSet,
|
||||
MigrationInfoViewSet,
|
||||
MigrationViewSet,
|
||||
)
|
||||
from openedx.core.djangoapps.content_libraries import api as lib_api
|
||||
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class TestMigrationViewSetCreate(TestCase):
|
||||
"""
|
||||
Test the MigrationViewSet.create() endpoint.
|
||||
|
||||
Focus: validation, return codes, serialization/deserialization.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.factory = APIRequestFactory()
|
||||
self.view = MigrationViewSet.as_view({'post': 'create'})
|
||||
|
||||
# Create test user
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='testuser@test.com',
|
||||
password='password'
|
||||
)
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.migrator_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.UserTaskStatus')
|
||||
def test_create_migration_success_with_minimal_data(
|
||||
self, mock_user_task_status, mock_auth, mock_lib_api, mock_migrator_api
|
||||
):
|
||||
"""
|
||||
Test successful migration creation with minimal required fields.
|
||||
|
||||
Validates:
|
||||
- 201 status code is returned
|
||||
- Response contains expected serialized fields
|
||||
- Request data is properly deserialized
|
||||
- Permission checks are performed for both source and target
|
||||
"""
|
||||
mock_auth.has_studio_write_access.return_value = True
|
||||
mock_lib_api.require_permission_for_library_key.return_value = None
|
||||
|
||||
mock_task = MagicMock(autospec=True)
|
||||
mock_task.id = 'test-task-id'
|
||||
mock_migrator_api.start_migration_to_library.return_value = mock_task
|
||||
|
||||
mock_task_status = MagicMock(autospec=True)
|
||||
mock_task_status.uuid = uuid4()
|
||||
mock_task_status.state = 'Pending'
|
||||
mock_task_status.state_text = 'Pending'
|
||||
mock_task_status.completed_steps = 0
|
||||
mock_task_status.total_steps = 10
|
||||
mock_task_status.attempts = 1
|
||||
mock_task_status.created = '2025-01-01T00:00:00Z'
|
||||
mock_task_status.modified = '2025-01-01T00:00:00Z'
|
||||
mock_task_status.artifacts = []
|
||||
mock_task_status.migrations.all.return_value = []
|
||||
|
||||
mock_user_task_status.objects.get.return_value = mock_task_status
|
||||
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
|
||||
assert 'uuid' in response.data
|
||||
assert 'state' in response.data
|
||||
assert 'state_text' in response.data
|
||||
assert 'completed_steps' in response.data
|
||||
assert 'total_steps' in response.data
|
||||
assert 'parameters' in response.data
|
||||
|
||||
mock_auth.has_studio_write_access.assert_called_once()
|
||||
mock_lib_api.require_permission_for_library_key.assert_called_once()
|
||||
|
||||
mock_migrator_api.start_migration_to_library.assert_called_once()
|
||||
call_kwargs = mock_migrator_api.start_migration_to_library.call_args[1]
|
||||
assert call_kwargs['user'] == self.user
|
||||
assert str(call_kwargs['source_key']) == 'course-v1:TestOrg+TestCourse+TestRun'
|
||||
assert str(call_kwargs['target_library_key']) == 'lib:TestOrg:TestLibrary'
|
||||
|
||||
def test_create_migration_invalid_source_key(self):
|
||||
"""
|
||||
Test that invalid source key returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code is returned
|
||||
- Error message mentions validation failure
|
||||
"""
|
||||
request_data = {
|
||||
'source': 'not-a-valid-key',
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'source' in response.data
|
||||
|
||||
def test_create_migration_invalid_target_key(self):
|
||||
"""
|
||||
Test that invalid target library key returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code is returned
|
||||
- Error message mentions target validation failure
|
||||
"""
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
'target': 'not-a-valid-library-key',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'target' in response.data
|
||||
|
||||
def test_create_migration_missing_required_fields(self):
|
||||
"""
|
||||
Test that missing required fields returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code is returned when source is missing
|
||||
- 400 status code is returned when target is missing
|
||||
"""
|
||||
request_data = {
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'source' in response.data
|
||||
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'target' in response.data
|
||||
|
||||
def test_create_migration_unauthenticated_user(self):
|
||||
"""
|
||||
Test that unauthenticated requests return 401 Unauthorized.
|
||||
|
||||
Validates:
|
||||
- 401 status code for unauthenticated requests
|
||||
"""
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
# Note: No force_authenticate call
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code in [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth')
|
||||
def test_create_migration_without_source_author_access(self, mock_auth):
|
||||
"""
|
||||
Test that users without author access to source cannot create migrations.
|
||||
|
||||
Validates:
|
||||
- 403 Forbidden status code when user lacks author access to source
|
||||
"""
|
||||
mock_auth.has_studio_write_access.return_value = False
|
||||
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth')
|
||||
def test_create_migration_without_target_write_access(self, mock_auth, mock_lib_api):
|
||||
"""
|
||||
Test that users without write access to target cannot create migrations.
|
||||
|
||||
Validates:
|
||||
- 403 Forbidden status code when user lacks write access to target library
|
||||
"""
|
||||
mock_auth.has_studio_write_access.return_value = True
|
||||
mock_lib_api.require_permission_for_library_key.side_effect = PermissionDenied(
|
||||
"User lacks permission to manage content in this library"
|
||||
)
|
||||
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.migrator_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.UserTaskStatus')
|
||||
def test_create_migration_with_optional_fields(
|
||||
self, mock_user_task_status, mock_auth, mock_lib_api, mock_migrator_api
|
||||
):
|
||||
"""
|
||||
Test migration creation with all optional fields provided.
|
||||
|
||||
Validates:
|
||||
- Optional fields are properly deserialized
|
||||
- Default values are not used when explicit values provided
|
||||
"""
|
||||
mock_auth.has_studio_write_access.return_value = True
|
||||
mock_lib_api.require_permission_for_library_key.return_value = None
|
||||
|
||||
mock_task = MagicMock(autospec=True)
|
||||
mock_task.id = 'test-task-id'
|
||||
mock_migrator_api.start_migration_to_library.return_value = mock_task
|
||||
|
||||
mock_task_status = MagicMock(autospec=True)
|
||||
mock_task_status.uuid = uuid4()
|
||||
mock_task_status.state = 'Pending'
|
||||
mock_task_status.state_text = 'Pending'
|
||||
mock_task_status.completed_steps = 0
|
||||
mock_task_status.total_steps = 10
|
||||
mock_task_status.attempts = 1
|
||||
mock_task_status.created = '2025-01-01T00:00:00Z'
|
||||
mock_task_status.modified = '2025-01-01T00:00:00Z'
|
||||
mock_task_status.artifacts = []
|
||||
mock_task_status.migrations.all.return_value = []
|
||||
|
||||
mock_user_task_status.objects.get.return_value = mock_task_status
|
||||
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
'target_collection_slug': 'my-collection',
|
||||
'composition_level': 'unit',
|
||||
'repeat_handling_strategy': 'update',
|
||||
'preserve_url_slugs': False,
|
||||
'forward_source_to_target': True,
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
|
||||
mock_migrator_api.start_migration_to_library.assert_called_once()
|
||||
call_kwargs = mock_migrator_api.start_migration_to_library.call_args[1]
|
||||
assert call_kwargs['target_collection_slug'] == 'my-collection'
|
||||
# CompositionLevel and RepeatHandlingStrategy are enums
|
||||
assert call_kwargs['composition_level'].value == 'unit'
|
||||
assert call_kwargs['repeat_handling_strategy'].value == 'update'
|
||||
assert call_kwargs['preserve_url_slugs'] is False
|
||||
assert call_kwargs['forward_source_to_target'] is True
|
||||
|
||||
def test_create_migration_invalid_composition_level(self):
|
||||
"""
|
||||
Test that invalid composition_level returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code for invalid enum value
|
||||
"""
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
'composition_level': 'invalid_level',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'composition_level' in response.data
|
||||
|
||||
def test_create_migration_invalid_repeat_handling_strategy(self):
|
||||
"""
|
||||
Test that invalid repeat_handling_strategy returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code for invalid enum value
|
||||
"""
|
||||
request_data = {
|
||||
'source': 'course-v1:TestOrg+TestCourse+TestRun',
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
'repeat_handling_strategy': 'invalid_strategy',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/migrations/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'repeat_handling_strategy' in response.data
|
||||
|
||||
|
||||
class TestMigrationViewSetList(TestCase):
|
||||
"""
|
||||
Test the MigrationViewSet.list() endpoint.
|
||||
|
||||
Focus: validation, return codes, serialization/deserialization.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.factory = APIRequestFactory()
|
||||
self.view = MigrationViewSet.as_view({'get': 'list'})
|
||||
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='testuser@test.com',
|
||||
password='password'
|
||||
)
|
||||
self.other_user = User.objects.create_user(
|
||||
username='otheruser',
|
||||
email='otheruser@test.com',
|
||||
password='password'
|
||||
)
|
||||
|
||||
def test_list_migrations_success(self):
|
||||
"""
|
||||
Test successful listing of migrations for the authenticated user.
|
||||
|
||||
Validates:
|
||||
- 200 status code is returned
|
||||
- Response contains list of migrations
|
||||
- Only user's own migrations are returned (other users' migrations filtered out)
|
||||
"""
|
||||
org = OrganizationFactory(short_name="TestOrg", name="Test Org")
|
||||
source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun')
|
||||
source = ModulestoreSource.objects.create(key=str(source_key))
|
||||
target = lib_api.create_library(org=org, slug="TestLib", title="Test Target Lib")
|
||||
user_task_status = UserTaskStatus.objects.create(
|
||||
user=self.user,
|
||||
task_id='user-task-id',
|
||||
task_class='test.Task',
|
||||
name='User Migration',
|
||||
total_steps=10,
|
||||
completed_steps=10,
|
||||
)
|
||||
other_task_status = UserTaskStatus.objects.create(
|
||||
user=self.other_user,
|
||||
task_id='other-task-id',
|
||||
task_class='test.Task',
|
||||
name='Other Migration',
|
||||
total_steps=5,
|
||||
completed_steps=5,
|
||||
)
|
||||
ModulestoreMigrationModel.objects.create(
|
||||
task_status=user_task_status,
|
||||
source=source,
|
||||
target_id=target.learning_package_id,
|
||||
)
|
||||
ModulestoreMigrationModel.objects.create(
|
||||
task_status=other_task_status,
|
||||
source=source,
|
||||
target_id=target.learning_package_id,
|
||||
)
|
||||
|
||||
request = self.factory.get('/api/modulestore_migrator/v1/migrations/')
|
||||
force_authenticate(request, user=self.user)
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
results = response.data['results']
|
||||
assert len(results) == 1
|
||||
assert results[0]['uuid'] == str(user_task_status.uuid)
|
||||
|
||||
def test_list_migrations_unauthenticated(self):
|
||||
"""
|
||||
Test that unauthenticated requests return 401 Unauthorized.
|
||||
|
||||
Validates:
|
||||
- 401 status code for unauthenticated requests
|
||||
"""
|
||||
request = self.factory.get('/api/modulestore_migrator/v1/migrations/')
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
class TestMigrationViewSetRetrieve(TestCase):
|
||||
"""
|
||||
Test the MigrationViewSet.retrieve() endpoint.
|
||||
|
||||
Focus: validation, return codes, serialization/deserialization.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.factory = APIRequestFactory()
|
||||
self.view = MigrationViewSet.as_view({'get': 'retrieve'})
|
||||
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='testuser@test.com',
|
||||
password='password'
|
||||
)
|
||||
|
||||
def test_retrieve_migration_success(self):
|
||||
"""
|
||||
Test successful retrieval of a specific migration by UUID.
|
||||
|
||||
Validates:
|
||||
- 200 status code is returned
|
||||
- Response contains migration details
|
||||
"""
|
||||
org = OrganizationFactory(short_name="TestOrg", name="Test Org")
|
||||
source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun')
|
||||
source = ModulestoreSource.objects.create(key=str(source_key))
|
||||
target = lib_api.create_library(org=org, slug="TestLib", title="Test Target Lib")
|
||||
user_task_status = UserTaskStatus.objects.create(
|
||||
user=self.user,
|
||||
task_id='user-task-id',
|
||||
task_class='test.Task',
|
||||
name='User Migration',
|
||||
total_steps=10,
|
||||
completed_steps=10,
|
||||
)
|
||||
ModulestoreMigrationModel.objects.create(
|
||||
task_status=user_task_status,
|
||||
source=source,
|
||||
target_id=target.learning_package_id,
|
||||
)
|
||||
|
||||
request = self.factory.get(f'/api/modulestore_migrator/v1/migrations/{user_task_status.uuid}/')
|
||||
force_authenticate(request, user=self.user)
|
||||
response = self.view(request, uuid=str(user_task_status.uuid))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.data['uuid'] == str(user_task_status.uuid)
|
||||
assert 'parameters' in response.data
|
||||
|
||||
def test_retrieve_migration_other_user(self):
|
||||
"""
|
||||
Test that users cannot retrieve migrations created by other users.
|
||||
|
||||
Validates:
|
||||
- 404 status code when attempting to retrieve another user's migration
|
||||
- Users are isolated to their own migrations
|
||||
"""
|
||||
other_user = User.objects.create_user(
|
||||
username='otheruser',
|
||||
email='other@test.com',
|
||||
password='password'
|
||||
)
|
||||
org = OrganizationFactory(short_name="TestOrg", name="Test Org")
|
||||
source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun')
|
||||
source = ModulestoreSource.objects.create(key=str(source_key))
|
||||
target = lib_api.create_library(org=org, slug="TestLib", title="Test Target Lib")
|
||||
other_task_status = UserTaskStatus.objects.create(
|
||||
user=other_user,
|
||||
task_id='other-task-id',
|
||||
task_class='test.Task',
|
||||
name='Other Migration',
|
||||
total_steps=10,
|
||||
completed_steps=10,
|
||||
)
|
||||
ModulestoreMigrationModel.objects.create(
|
||||
task_status=other_task_status,
|
||||
source=source,
|
||||
target_id=target.learning_package_id,
|
||||
)
|
||||
|
||||
request = self.factory.get(f'/api/modulestore_migrator/v1/migrations/{other_task_status.uuid}/')
|
||||
force_authenticate(request, user=self.user)
|
||||
response = self.view(request, uuid=str(other_task_status.uuid))
|
||||
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_retrieve_migration_unauthenticated(self):
|
||||
"""
|
||||
Test that unauthenticated requests return 401 Unauthorized.
|
||||
|
||||
Validates:
|
||||
- 401 status code for unauthenticated requests
|
||||
"""
|
||||
task_uuid = uuid4()
|
||||
request = self.factory.get(f'/api/modulestore_migrator/v1/migrations/{task_uuid}/')
|
||||
|
||||
response = self.view(request, uuid=str(task_uuid))
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
class TestMigrationViewSetCancel(TestCase):
|
||||
"""
|
||||
Test the MigrationViewSet.cancel() endpoint.
|
||||
|
||||
Focus: validation, return codes, authorization.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.factory = APIRequestFactory()
|
||||
self.view = MigrationViewSet.as_view({'post': 'cancel'})
|
||||
|
||||
self.staff_user = User.objects.create_user(
|
||||
username='staffuser',
|
||||
email='staff@test.com',
|
||||
password='password',
|
||||
is_staff=True
|
||||
)
|
||||
self.regular_user = User.objects.create_user(
|
||||
username='regularuser',
|
||||
email='regular@test.com',
|
||||
password='password',
|
||||
is_staff=False
|
||||
)
|
||||
|
||||
def test_cancel_migration_as_staff(self):
|
||||
"""
|
||||
Test that staff users can cancel migrations.
|
||||
|
||||
Validates:
|
||||
- Staff users can successfully cancel migrations
|
||||
- UserTaskStatus.cancel is called
|
||||
"""
|
||||
org = OrganizationFactory(short_name="TestOrg", name="Test Org")
|
||||
source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun')
|
||||
source = ModulestoreSource.objects.create(key=str(source_key))
|
||||
target = lib_api.create_library(org=org, slug="TestLib", title="Test Target Lib")
|
||||
user_task_status = UserTaskStatus.objects.create(
|
||||
user=self.staff_user,
|
||||
task_id='staff-task-id',
|
||||
task_class='test.Task',
|
||||
name='Staff Migration',
|
||||
total_steps=10,
|
||||
completed_steps=5,
|
||||
)
|
||||
ModulestoreMigrationModel.objects.create(
|
||||
task_status=user_task_status,
|
||||
source=source,
|
||||
target_id=target.learning_package_id,
|
||||
)
|
||||
|
||||
with patch.object(UserTaskStatus, 'cancel') as mock_cancel:
|
||||
request = self.factory.post(
|
||||
f'/api/modulestore_migrator/v1/migrations/{user_task_status.uuid}/cancel/'
|
||||
)
|
||||
force_authenticate(request, user=self.staff_user)
|
||||
response = self.view(request, uuid=str(user_task_status.uuid))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
mock_cancel.assert_called_once()
|
||||
|
||||
def test_cancel_migration_not_found(self):
|
||||
"""
|
||||
Test that attempting to cancel a non-existent migration returns 404.
|
||||
|
||||
Validates:
|
||||
- 404 status code when migration UUID does not exist
|
||||
"""
|
||||
nonexistent_uuid = uuid4()
|
||||
request = self.factory.post(
|
||||
f'/api/modulestore_migrator/v1/migrations/{nonexistent_uuid}/cancel/'
|
||||
)
|
||||
force_authenticate(request, user=self.staff_user)
|
||||
|
||||
response = self.view(request, uuid=str(nonexistent_uuid))
|
||||
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_cancel_migration_as_non_staff(self):
|
||||
"""
|
||||
Test that non-staff users cannot cancel migrations.
|
||||
|
||||
Validates:
|
||||
- 403 Forbidden status code for non-staff users
|
||||
"""
|
||||
task_uuid = uuid4()
|
||||
request = self.factory.post(
|
||||
f'/api/modulestore_migrator/v1/migrations/{task_uuid}/cancel/'
|
||||
)
|
||||
force_authenticate(request, user=self.regular_user)
|
||||
|
||||
response = self.view(request, uuid=str(task_uuid))
|
||||
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_cancel_migration_unauthenticated(self):
|
||||
"""
|
||||
Test that unauthenticated users cannot cancel migrations.
|
||||
"""
|
||||
task_uuid = uuid4()
|
||||
request = self.factory.post(
|
||||
f'/api/modulestore_migrator/v1/migrations/{task_uuid}/cancel/'
|
||||
)
|
||||
|
||||
response = self.view(request, uuid=str(task_uuid))
|
||||
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
class TestBulkMigrationViewSetCreate(TestCase):
|
||||
"""
|
||||
Test the BulkMigrationViewSet.create() endpoint.
|
||||
|
||||
Focus: validation, return codes, serialization/deserialization.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.factory = APIRequestFactory()
|
||||
self.view = BulkMigrationViewSet.as_view({'post': 'create'})
|
||||
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='testuser@test.com',
|
||||
password='password'
|
||||
)
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.migrator_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.UserTaskStatus')
|
||||
def test_create_bulk_migration_success(
|
||||
self, mock_user_task_status, mock_auth, mock_lib_api, mock_migrator_api
|
||||
):
|
||||
"""
|
||||
Test successful bulk migration creation with multiple sources.
|
||||
|
||||
Validates:
|
||||
- 201 status code is returned
|
||||
- Response contains expected serialized fields
|
||||
- Multiple sources are properly deserialized
|
||||
"""
|
||||
mock_auth.has_studio_write_access.return_value = True
|
||||
mock_lib_api.require_permission_for_library_key.return_value = None
|
||||
|
||||
mock_task = MagicMock(autospec=True)
|
||||
mock_task.id = 'test-task-id'
|
||||
mock_migrator_api.start_bulk_migration_to_library.return_value = mock_task
|
||||
|
||||
mock_task_status = MagicMock(autospec=True)
|
||||
mock_task_status.uuid = uuid4()
|
||||
mock_task_status.state = 'Pending'
|
||||
mock_task_status.state_text = 'Pending'
|
||||
mock_task_status.completed_steps = 0
|
||||
mock_task_status.total_steps = 10
|
||||
mock_task_status.attempts = 1
|
||||
mock_task_status.created = '2025-01-01T00:00:00Z'
|
||||
mock_task_status.modified = '2025-01-01T00:00:00Z'
|
||||
mock_task_status.artifacts = []
|
||||
mock_task_status.migrations.all.return_value = []
|
||||
|
||||
mock_user_task_status.objects.get.return_value = mock_task_status
|
||||
|
||||
request_data = {
|
||||
'sources': [
|
||||
'course-v1:TestOrg+TestCourse1+Run1',
|
||||
'course-v1:TestOrg+TestCourse2+Run2'
|
||||
],
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/bulk_migration/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert 'uuid' in response.data
|
||||
assert 'parameters' in response.data
|
||||
|
||||
mock_migrator_api.start_bulk_migration_to_library.assert_called_once()
|
||||
call_kwargs = mock_migrator_api.start_bulk_migration_to_library.call_args[1]
|
||||
assert call_kwargs['source_key_list'] == [
|
||||
CourseLocator.from_string('course-v1:TestOrg+TestCourse1+Run1'),
|
||||
CourseLocator.from_string('course-v1:TestOrg+TestCourse2+Run2'),
|
||||
]
|
||||
assert call_kwargs['target_collection_slug_list'] is None
|
||||
assert call_kwargs['create_collections'] is False
|
||||
# CompositionLevel and RepeatHandlingStrategy are enums
|
||||
assert call_kwargs['composition_level'].value == 'component'
|
||||
assert call_kwargs['repeat_handling_strategy'].value == 'skip'
|
||||
assert call_kwargs['preserve_url_slugs'] is False
|
||||
assert call_kwargs['forward_source_to_target'] is None
|
||||
|
||||
def test_create_bulk_migration_invalid_source_key(self):
|
||||
"""
|
||||
Test that invalid source key in list returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code when one or more sources are invalid
|
||||
"""
|
||||
request_data = {
|
||||
'sources': ['not-a-valid-key', 'course-v1:TestOrg+TestCourse+TestRun'],
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/bulk_migration/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'sources' in response.data
|
||||
|
||||
def test_create_bulk_migration_missing_sources(self):
|
||||
"""
|
||||
Test that missing sources field returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code when sources is missing
|
||||
"""
|
||||
request_data = {
|
||||
'target': 'lib:TestOrg:TestLibrary',
|
||||
}
|
||||
request = self.factory.post(
|
||||
'/api/modulestore_migrator/v1/bulk_migration/',
|
||||
data=request_data,
|
||||
format='json'
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'sources' in response.data
|
||||
|
||||
|
||||
class TestMigrationInfoViewSet(TestCase):
|
||||
"""
|
||||
Test the MigrationInfoViewSet.get() endpoint.
|
||||
|
||||
Focus: validation, return codes, serialization/deserialization.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.factory = APIRequestFactory()
|
||||
self.view = MigrationInfoViewSet.as_view()
|
||||
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='testuser@test.com',
|
||||
password='password'
|
||||
)
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.migrator_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth')
|
||||
def test_get_migration_info_success(self, mock_auth, mock_migrator_api):
|
||||
"""
|
||||
Test successful retrieval of migration info.
|
||||
|
||||
Validates:
|
||||
- 200 status code is returned
|
||||
- Response contains migration info for requested sources
|
||||
- Permission checks are performed for each source
|
||||
"""
|
||||
mock_auth.has_studio_read_access.return_value = True
|
||||
|
||||
source_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun')
|
||||
target_key = LibraryLocatorV2.from_string('lib:TestOrg:TestLibrary')
|
||||
|
||||
migration = ModulestoreMigration(
|
||||
pk=1,
|
||||
source_key=source_key,
|
||||
target_key=target_key,
|
||||
target_title='Test Library',
|
||||
target_collection_slug='test-collection',
|
||||
target_collection_title='Test Collection',
|
||||
is_failed=False,
|
||||
task_uuid=uuid4(),
|
||||
)
|
||||
mock_migrator_api.get_migrations.return_value = [migration]
|
||||
|
||||
request = self.factory.get(
|
||||
'/api/modulestore_migrator/v1/migration_info/',
|
||||
{'source_keys': ['course-v1:TestOrg+TestCourse+TestRun']}
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert str(source_key) in response.data
|
||||
assert len(response.data[str(source_key)]) == 1
|
||||
assert response.data[str(source_key)][0]['source_key'] == str(source_key)
|
||||
assert response.data[str(source_key)][0]['target_key'] == str(target_key)
|
||||
assert response.data[str(source_key)][0]['target_title'] == 'Test Library'
|
||||
|
||||
def test_get_migration_info_missing_source_keys(self):
|
||||
"""
|
||||
Test that missing source_keys parameter returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code when source_keys is not provided
|
||||
"""
|
||||
request = self.factory.get('/api/modulestore_migrator/v1/migration_info/')
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'source_keys' in response.data.get('detail', '').lower()
|
||||
|
||||
def test_get_migration_info_invalid_source_key(self):
|
||||
"""
|
||||
Test that invalid source keys are silently skipped.
|
||||
|
||||
Validates:
|
||||
- 200 status code (invalid keys are filtered out, not errored)
|
||||
- Invalid keys don't appear in response
|
||||
"""
|
||||
request = self.factory.get(
|
||||
'/api/modulestore_migrator/v1/migration_info/',
|
||||
{'source_keys': ['not-a-valid-key']}
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.auth')
|
||||
def test_get_migration_info_without_read_access(self, mock_auth):
|
||||
"""
|
||||
Test that sources without read access are silently filtered.
|
||||
|
||||
Validates:
|
||||
- 200 status code (unauthorized sources are filtered, not errored)
|
||||
"""
|
||||
mock_auth.has_studio_read_access.return_value = False
|
||||
|
||||
request = self.factory.get(
|
||||
'/api/modulestore_migrator/v1/migration_info/',
|
||||
{'source_keys': ['course-v1:TestOrg+TestCourse+TestRun']}
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
|
||||
class TestBlockMigrationInfo(TestCase):
|
||||
"""
|
||||
Test the BlockMigrationInfo.get() endpoint.
|
||||
|
||||
Focus: validation, return codes, serialization/deserialization.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.factory = APIRequestFactory()
|
||||
self.view = BlockMigrationInfo.as_view()
|
||||
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='testuser@test.com',
|
||||
password='password'
|
||||
)
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.migrator_api')
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api')
|
||||
def test_get_block_migration_info_success(self, mock_lib_api, mock_migrator_api):
|
||||
"""
|
||||
Test successful retrieval of block migration info.
|
||||
|
||||
Validates:
|
||||
- 200 status code is returned
|
||||
- Response contains block migration information
|
||||
- Target library permission is checked
|
||||
"""
|
||||
mock_lib_api.require_permission_for_library_key.return_value = None
|
||||
|
||||
source_course_key = CourseLocator.from_string('course-v1:TestOrg+TestCourse+TestRun')
|
||||
target_library_key = LibraryLocatorV2.from_string('lib:TestOrg:TestLibrary')
|
||||
|
||||
migration = ModulestoreMigration(
|
||||
pk=1,
|
||||
source_key=source_course_key,
|
||||
target_key=target_library_key,
|
||||
target_title='Test Library',
|
||||
target_collection_slug='test-collection',
|
||||
target_collection_title='Test Collection',
|
||||
is_failed=False,
|
||||
task_uuid=uuid4(),
|
||||
)
|
||||
source_key_success = BlockUsageLocator.from_string(
|
||||
'block-v1:TestOrg+TestCourse+TestRun+type@problem+block@test_problem'
|
||||
)
|
||||
target_key_success = LibraryUsageLocatorV2.from_string(
|
||||
'lb:TestOrg:TestLibrary:problem:test_problem'
|
||||
)
|
||||
block_success = ModulestoreBlockMigrationSuccess(
|
||||
source_key=source_key_success,
|
||||
target_key=target_key_success,
|
||||
target_entity_pk=123,
|
||||
target_title='Test Problem',
|
||||
target_version_num=1,
|
||||
)
|
||||
source_key_failure = BlockUsageLocator.from_string(
|
||||
'block-v1:TestOrg+TestCourse+TestRun+type@bork+block@test_bork'
|
||||
)
|
||||
block_failure = ModulestoreBlockMigrationFailure(
|
||||
source_key=source_key_failure,
|
||||
unsupported_reason='bork blocks are not supported',
|
||||
)
|
||||
|
||||
mock_migrator_api.get_migrations.return_value = [migration]
|
||||
mock_migrator_api.get_migration_blocks.return_value = {
|
||||
source_key_success: block_success,
|
||||
source_key_failure: block_failure,
|
||||
}
|
||||
|
||||
request = self.factory.get(
|
||||
'/api/modulestore_migrator/v1/migration_blocks/',
|
||||
{'target_key': 'lib:TestOrg:TestLibrary'}
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.data) == 2
|
||||
assert response.data[0]['source_key'] == str(block_success.source_key)
|
||||
assert response.data[0]['target_key'] == str(block_success.target_key)
|
||||
assert response.data[0]['unsupported_reason'] is None
|
||||
assert response.data[1]['source_key'] == str(block_failure.source_key)
|
||||
assert response.data[1]['target_key'] is None
|
||||
assert response.data[1]['unsupported_reason'] == 'bork blocks are not supported'
|
||||
mock_lib_api.require_permission_for_library_key.assert_called_once()
|
||||
|
||||
def test_get_block_migration_info_missing_target_key(self):
|
||||
"""
|
||||
Test that missing target_key parameter returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code when target_key is not provided
|
||||
"""
|
||||
request = self.factory.get('/api/modulestore_migrator/v1/migration_blocks/')
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'target' in response.data.get('error', '').lower()
|
||||
|
||||
def test_get_block_migration_info_invalid_target_key(self):
|
||||
"""
|
||||
Test that invalid target_key returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code when target_key format is invalid
|
||||
"""
|
||||
request = self.factory.get(
|
||||
'/api/modulestore_migrator/v1/migration_blocks/',
|
||||
{'target_key': 'not-a-valid-key'}
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
def test_get_block_migration_info_invalid_is_failed_value(self):
|
||||
"""
|
||||
Test that invalid is_failed parameter returns 400 Bad Request.
|
||||
|
||||
Validates:
|
||||
- 400 status code when is_failed value is not a valid boolean
|
||||
"""
|
||||
request = self.factory.get(
|
||||
'/api/modulestore_migrator/v1/migration_blocks/',
|
||||
{
|
||||
'target_key': 'lib:TestOrg:TestLibrary',
|
||||
'is_failed': 'not-a-boolean'
|
||||
}
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'is_failed' in response.data.get('error', '').lower()
|
||||
|
||||
@patch('cms.djangoapps.modulestore_migrator.rest_api.v1.views.lib_api')
|
||||
def test_get_block_migration_info_without_library_access(self, mock_lib_api):
|
||||
"""
|
||||
Test that users without library view access get 403 Forbidden.
|
||||
|
||||
Validates:
|
||||
- 403 status code when user lacks view permission on target library
|
||||
"""
|
||||
mock_lib_api.require_permission_for_library_key.side_effect = PermissionDenied(
|
||||
"User lacks permission to view this library"
|
||||
)
|
||||
|
||||
request = self.factory.get(
|
||||
'/api/modulestore_migrator/v1/migration_blocks/',
|
||||
{'target_key': 'lib:TestOrg:TestLibrary'}
|
||||
)
|
||||
force_authenticate(request, user=self.user)
|
||||
|
||||
response = self.view(request)
|
||||
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
@@ -28,20 +28,20 @@ from cms.djangoapps.modulestore_migrator.tasks import (
|
||||
_migrate_node,
|
||||
_MigratedNode,
|
||||
_MigrationContext,
|
||||
_MigrationTask,
|
||||
bulk_migrate_from_modulestore,
|
||||
migrate_from_modulestore,
|
||||
)
|
||||
from common.djangoapps.student.tests.factories import UserFactory
|
||||
from openedx.core.djangoapps.content_libraries import api as lib_api
|
||||
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
|
||||
from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory
|
||||
from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory, BlockFactory
|
||||
|
||||
from .. import api as migrator_api
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
"""
|
||||
Test the migrate_from_modulestore task
|
||||
Test the bulk_migrate_from_modulestore task
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
@@ -101,6 +101,28 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
title="Test Collection 2",
|
||||
)
|
||||
|
||||
def _make_migration_context(self, **kwargs) -> _MigrationContext:
|
||||
"""
|
||||
Builds a _MigrationContext object with default values, overridable with kwargs
|
||||
"""
|
||||
return _MigrationContext(
|
||||
**{
|
||||
"used_component_keys": set(),
|
||||
"used_container_slugs": set(),
|
||||
"previous_block_migrations": {},
|
||||
"target_package_id": self.learning_package.id,
|
||||
"target_library_key": self.library.library_key,
|
||||
"source_context_key": self.course.id,
|
||||
"content_by_filename": {},
|
||||
"composition_level": CompositionLevel.Unit,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip,
|
||||
"preserve_url_slugs": True,
|
||||
"created_at": timezone.now(),
|
||||
"created_by": self.user.id,
|
||||
**kwargs,
|
||||
},
|
||||
)
|
||||
|
||||
def _get_task_status_fail_message(self, status):
|
||||
"""
|
||||
Helper method to get the failure message from a UserTaskStatus object.
|
||||
@@ -114,18 +136,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
Test _migrate_node ignores wiki tags
|
||||
"""
|
||||
wiki_node = etree.fromstring("<wiki />")
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
context = self._make_migration_context()
|
||||
|
||||
result = _migrate_node(
|
||||
context=context,
|
||||
@@ -144,19 +155,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
'<chapter url_name="chapter1" display_name="Chapter 1" />'
|
||||
"</course>"
|
||||
)
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context()
|
||||
result = _migrate_node(
|
||||
context=context,
|
||||
source_node=course_node,
|
||||
@@ -176,18 +175,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
'<problem url_name="problem1" display_name="Problem 1" />'
|
||||
"</library>"
|
||||
)
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
context = self._make_migration_context()
|
||||
result = _migrate_node(
|
||||
context=context,
|
||||
source_node=library_node,
|
||||
@@ -216,19 +204,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
container_node = etree.fromstring(
|
||||
f'<{tag_name} url_name="test_{tag_name}" display_name="Test {tag_name.title()}" />'
|
||||
)
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=composition_level,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context(composition_level=composition_level)
|
||||
result = _migrate_node(
|
||||
context=context,
|
||||
source_node=container_node,
|
||||
@@ -250,19 +226,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
node_without_url_name = etree.fromstring(
|
||||
'<problem display_name="No URL Name" />'
|
||||
)
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context()
|
||||
result = _migrate_node(
|
||||
context=context,
|
||||
source_node=node_without_url_name,
|
||||
@@ -281,19 +245,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
<problem display_name="Test Problem2"><multiplechoiceresponse></multiplechoiceresponse></problem>
|
||||
</library_content>
|
||||
''')
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context()
|
||||
result = _migrate_node(
|
||||
context=context,
|
||||
source_node=node_without_url_name,
|
||||
@@ -338,27 +290,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
self.assertEqual(pairs[1][0], key2)
|
||||
self.assertEqual(pairs[2][0], key3)
|
||||
|
||||
def test_migrate_from_modulestore_invalid_source(self):
|
||||
"""
|
||||
Test migrate_from_modulestore with invalid source
|
||||
"""
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": 999999, # Non-existent source
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
self.assertEqual(self._get_task_status_fail_message(status), "ModulestoreSource matching query does not exist.")
|
||||
|
||||
def test_bulk_migrate_invalid_sources(self):
|
||||
"""
|
||||
Test bulk_migrate_from_modulestore with invalid source
|
||||
@@ -380,31 +311,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
self.assertEqual(self._get_task_status_fail_message(status), "ModulestoreSource matching query does not exist.")
|
||||
|
||||
def test_migrate_from_modulestore_invalid_collection(self):
|
||||
"""
|
||||
Test migrate_from_modulestore with invalid collection
|
||||
"""
|
||||
source = ModulestoreSource.objects.create(
|
||||
key=self.course.id,
|
||||
)
|
||||
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": 999999, # Non-existent collection
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
self.assertEqual(self._get_task_status_fail_message(status), "Collection matching query does not exist.")
|
||||
|
||||
def test_bulk_migrate_invalid_collection(self):
|
||||
"""
|
||||
Test bulk_migrate_from_modulestore with invalid collection
|
||||
@@ -430,14 +336,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
self.assertEqual(self._get_task_status_fail_message(status), "Collection matching query does not exist.")
|
||||
|
||||
def test_migration_task_calculate_total_steps(self):
|
||||
"""
|
||||
Test _MigrationTask.calculate_total_steps returns correct count
|
||||
"""
|
||||
total_steps = _MigrationTask.calculate_total_steps({})
|
||||
expected_steps = len(list(MigrationStep)) - 1
|
||||
self.assertEqual(total_steps, expected_steps)
|
||||
|
||||
def test_bulk_migration_task_calculate_total_steps(self):
|
||||
"""
|
||||
Test _BulkMigrationTask.calculate_total_steps returns correct count
|
||||
@@ -454,19 +352,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
"""
|
||||
source_key = self.course.id.make_usage_key("problem", "test_problem")
|
||||
olx = '<problem display_name="Test Problem"><multiplechoiceresponse></multiplechoiceresponse></problem>'
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context()
|
||||
result, reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
@@ -496,19 +382,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
<problem display_name="Test Problem2"><multiplechoiceresponse></multiplechoiceresponse></problem>
|
||||
</library_content>
|
||||
'''
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context()
|
||||
result, reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
@@ -538,18 +412,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
created=timezone.now(),
|
||||
)
|
||||
content_by_filename = {"test_image.png": test_content.id}
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename=content_by_filename,
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
context = self._make_migration_context(content_by_filename=content_by_filename)
|
||||
result, reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
@@ -566,166 +429,204 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
self.assertIsNotNone(component_content)
|
||||
self.assertEqual(component_content.content_id, test_content.id)
|
||||
|
||||
def test_migrate_component_replace_existing_false(self):
|
||||
def test_migrate_skip_repeats(self):
|
||||
"""
|
||||
Test _migrate_component with replace_existing=False returns existing component
|
||||
Test that, when requested, the migration will Skip blocks that have previously been migrated
|
||||
|
||||
Tests with both a container and a component
|
||||
"""
|
||||
source_key = self.course.id.make_usage_key("problem", "existing_problem")
|
||||
olx = '<problem display_name="Test Problem"><multiplechoiceresponse></multiplechoiceresponse></problem>'
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
source = ModulestoreSource.objects.create(key=self.course.id)
|
||||
|
||||
# Create a legacy lib with 2 blocks and migrate it
|
||||
source_html = BlockFactory.create(
|
||||
category="html",
|
||||
display_name="Test HTML for Skip",
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
source_unit = BlockFactory.create(
|
||||
category="vertical",
|
||||
display_name="Test Unit for Skip",
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pks": [],
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, # arbitrary
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
first_result, first_reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
olx=olx,
|
||||
title="test_problem"
|
||||
# Update both blocks, and add a new one. Then migrate again.
|
||||
source_html.display_name = "Test HTML for Skip - Source Updated"
|
||||
source_html.save()
|
||||
self.store.update_item(source_html, self.user.id)
|
||||
source_unit.display_name = "Test Unit for Skip - Source Updated"
|
||||
source_unit.save()
|
||||
self.store.update_item(source_unit, self.user.id)
|
||||
source_html_new = BlockFactory.create(
|
||||
category="html",
|
||||
display_name="Test HTML New",
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pks": [],
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value, # <-- important
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
context.existing_source_to_target_keys[source_key] = [first_result.entity]
|
||||
# The first migration's info includes the initial two blocks.
|
||||
migration_1, migration_0 = list(migrator_api.get_migrations(source_key=source.key))
|
||||
mappings_0 = migrator_api.get_migration_blocks(migration_0.pk)
|
||||
assert set(mappings_0) == {source_html.usage_key, source_unit.usage_key}
|
||||
assert mappings_0[source_html.usage_key].target_title == "Test HTML for Skip"
|
||||
assert mappings_0[source_unit.usage_key].target_title == "Test Unit for Skip"
|
||||
|
||||
second_result, second_reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
olx='<problem display_name="Updated Problem"><multiplechoiceresponse></multiplechoiceresponse></problem>',
|
||||
title="updated_problem"
|
||||
)
|
||||
self.assertIsNone(first_reason)
|
||||
self.assertIsNone(second_reason)
|
||||
|
||||
self.assertEqual(first_result.entity_id, second_result.entity_id)
|
||||
self.assertEqual(first_result.version_num, second_result.version_num)
|
||||
# The next migration's info includes the newly-added block,
|
||||
# but not the edited blocks, because we chose Skip.
|
||||
mappings_1 = migrator_api.get_migration_blocks(migration_1.pk)
|
||||
assert set(mappings_1) == {source_html_new.usage_key}
|
||||
assert mappings_1[source_html_new.usage_key].target_title == "Test HTML New"
|
||||
|
||||
def test_migrate_component_same_title(self):
|
||||
"""
|
||||
Test _migrate_component for two components with the same title
|
||||
Test a migration with two components of the same title, when updating.
|
||||
|
||||
Using preserve_url_slugs=False to create a new component with
|
||||
a different URL slug based on the component's Title.
|
||||
We expect that both blocks will be migrated to target components with usage keys
|
||||
based on the shared title, but disambiguated by a _1 suffix.
|
||||
"""
|
||||
source_key_1 = self.course.id.make_usage_key("problem", "existing_problem_1")
|
||||
source_key_2 = self.course.id.make_usage_key("problem", "existing_problem_2")
|
||||
olx = '<problem display_name="Test Problem"><multiplechoiceresponse></multiplechoiceresponse></problem>'
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=False,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
source = ModulestoreSource.objects.create(key=self.course.id)
|
||||
source_key_1 = self.course.id.make_usage_key("html", "existing_html_1")
|
||||
source_key_2 = self.course.id.make_usage_key("html", "existing_html_2")
|
||||
BlockFactory.create(
|
||||
category="html",
|
||||
display_name="Test HTML Same Title",
|
||||
location=source_key_1,
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
|
||||
first_result, first_reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key_1,
|
||||
olx=olx,
|
||||
title="test_problem"
|
||||
BlockFactory.create(
|
||||
category="html",
|
||||
display_name="Test HTML Same Title",
|
||||
location=source_key_2,
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
|
||||
context.existing_source_to_target_keys[source_key_1] = [first_result.entity]
|
||||
|
||||
second_result, second_reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key_2,
|
||||
olx=olx,
|
||||
title="test_problem"
|
||||
bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pks": [],
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": False,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
self.assertIsNone(first_reason)
|
||||
self.assertIsNone(second_reason)
|
||||
migrations = list(migrator_api.get_migrations(source_key=source.key))
|
||||
assert len(migrations) == 1
|
||||
mappings = migrator_api.get_migration_blocks(migrations[0].pk)
|
||||
assert (html_migration_1 := mappings.get(source_key_1))
|
||||
assert (block_migration_2 := mappings.get(source_key_2))
|
||||
assert html_migration_1.target_title == "Test HTML Same Title"
|
||||
assert block_migration_2.target_title == "Test HTML Same Title"
|
||||
assert str(html_migration_1.target_key) == "lb:testorg:test-key:html:test-html-same-title"
|
||||
assert str(block_migration_2.target_key) == "lb:testorg:test-key:html:test-html-same-title_1"
|
||||
|
||||
self.assertNotEqual(first_result.entity_id, second_result.entity_id)
|
||||
self.assertNotEqual(first_result.entity.key, second_result.entity.key)
|
||||
|
||||
def test_migrate_component_replace_existing_true(self):
|
||||
def test_migrate_update_repeats(self):
|
||||
"""
|
||||
Test _migrate_component with replace_existing=True creates new version
|
||||
Test that, when requested, the migration will update blocks that have previously been migrated
|
||||
|
||||
Tests with both a container and a component
|
||||
"""
|
||||
source_key = self.course.id.make_usage_key("problem", "replaceable_problem")
|
||||
original_olx = '<problem display_name="Original"><multiplechoiceresponse></multiplechoiceresponse></problem>'
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Update,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
source = ModulestoreSource.objects.create(key=self.course.id)
|
||||
source_html = BlockFactory.create(
|
||||
category="html",
|
||||
display_name="Test HTML for Update",
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
|
||||
first_result, first_reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
olx=original_olx,
|
||||
title="original"
|
||||
source_unit = BlockFactory.create(
|
||||
category="vertical",
|
||||
display_name="Test Unit for Update",
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
|
||||
context.existing_source_to_target_keys[source_key] = [first_result.entity]
|
||||
|
||||
updated_olx = '<problem display_name="Updated"><multiplechoiceresponse></multiplechoiceresponse></problem>'
|
||||
second_result, second_reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
olx=updated_olx,
|
||||
title="updated"
|
||||
bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pks": [],
|
||||
# (the value of repeat_handling_strategy here doesn't matter for this test)
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
self.assertIsNone(first_reason)
|
||||
self.assertIsNone(second_reason)
|
||||
source_html.display_name = "Test HTML for Update - Source Updated"
|
||||
source_html.save()
|
||||
self.store.update_item(source_html, self.user.id)
|
||||
source_unit.display_name = "Test Unit for Update - Source Updated"
|
||||
source_unit.save()
|
||||
self.store.update_item(source_unit, self.user.id)
|
||||
bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pks": [],
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Update.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
migration_1, migration_0 = list(migrator_api.get_migrations(source_key=source.key))
|
||||
mappings_0 = migrator_api.get_migration_blocks(migration_0.pk)
|
||||
mappings_1 = migrator_api.get_migration_blocks(migration_1.pk)
|
||||
assert (html_migration_0 := mappings_0.get(source_html.usage_key))
|
||||
assert (unit_migration_0 := mappings_0.get(source_unit.usage_key))
|
||||
assert (html_migration_1 := mappings_1.get(source_html.usage_key))
|
||||
assert (unit_migration_1 := mappings_1.get(source_unit.usage_key))
|
||||
|
||||
self.assertEqual(first_result.entity_id, second_result.entity_id)
|
||||
self.assertNotEqual(first_result.version_num, second_result.version_num)
|
||||
# The targets of both migrations are the same
|
||||
assert str(html_migration_0.target_key) == "lb:testorg:test-key:html:Test_HTML_for_Update"
|
||||
assert str(html_migration_1.target_key) == "lb:testorg:test-key:html:Test_HTML_for_Update"
|
||||
assert html_migration_0.target_entity_pk == html_migration_1.target_entity_pk
|
||||
assert str(unit_migration_0.target_key) == "lct:testorg:test-key:unit:Test_Unit_for_Update"
|
||||
assert unit_migration_0.target_entity_pk == unit_migration_1.target_entity_pk
|
||||
|
||||
def test_migrate_component_different_block_types(self):
|
||||
"""
|
||||
Test _migrate_component with different block types
|
||||
"""
|
||||
block_types = ["problem", "html", "video", "discussion"]
|
||||
|
||||
for block_type in block_types:
|
||||
source_key = self.course.id.make_usage_key(block_type, f"test_{block_type}")
|
||||
olx = f'<{block_type} display_name="Test {block_type.title()}"></{block_type}>'
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
result, reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
olx=olx,
|
||||
title="test"
|
||||
)
|
||||
|
||||
self.assertIsNotNone(result, f"Failed to migrate {block_type}")
|
||||
self.assertIsNone(reason)
|
||||
|
||||
self.assertEqual(
|
||||
block_type, result.componentversion.component.component_type.name
|
||||
)
|
||||
# And because we specified Update, the targets were updated on the 2nd migration
|
||||
assert html_migration_0.target_title == "Test HTML for Update"
|
||||
assert unit_migration_0.target_title == "Test Unit for Update"
|
||||
assert html_migration_1.target_title == "Test HTML for Update - Source Updated"
|
||||
assert unit_migration_1.target_title == "Test Unit for Update - Source Updated"
|
||||
assert html_migration_0.target_version_num == html_migration_1.target_version_num - 1
|
||||
assert unit_migration_0.target_version_num == unit_migration_1.target_version_num - 1
|
||||
|
||||
def test_migrate_component_content_filename_not_in_olx(self):
|
||||
"""
|
||||
@@ -754,19 +655,10 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
"referenced.png": referenced_content.id,
|
||||
"unreferenced.png": unreferenced_content.id,
|
||||
}
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
context = self._make_migration_context(
|
||||
content_by_filename=content_by_filename,
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
result, reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
@@ -798,19 +690,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
library_key = LibraryLocator(org="TestOrg", library="TestLibrary")
|
||||
source_key = library_key.make_usage_key("problem", "library_problem")
|
||||
olx = '<problem display_name="Library Problem"><multiplechoiceresponse></multiplechoiceresponse></problem>'
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context()
|
||||
result, reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
@@ -825,58 +705,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
"problem", result.componentversion.component.component_type.name
|
||||
)
|
||||
|
||||
def test_migrate_component_duplicate_content_integrity_error(self):
|
||||
"""
|
||||
Test _migrate_component handles IntegrityError when content already exists
|
||||
"""
|
||||
source_key = self.course.id.make_usage_key(
|
||||
"problem", "test_problem_duplicate_content"
|
||||
)
|
||||
olx = '<problem display_name="Test Problem"><p>See image: duplicate.png</p></problem>'
|
||||
|
||||
media_type = authoring_api.get_or_create_media_type("image/png")
|
||||
test_content = authoring_api.get_or_create_file_content(
|
||||
self.learning_package.id,
|
||||
media_type.id,
|
||||
data=b"test_image_data",
|
||||
created=timezone.now(),
|
||||
)
|
||||
content_by_filename = {"duplicate.png": test_content.id}
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename=content_by_filename,
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Update,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
first_result, first_reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
olx=olx,
|
||||
title="test_problem"
|
||||
)
|
||||
self.assertIsNone(first_reason)
|
||||
|
||||
context.existing_source_to_target_keys[source_key] = [first_result.entity]
|
||||
|
||||
second_result, second_reason = _migrate_component(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
olx=olx,
|
||||
title="test_problem"
|
||||
)
|
||||
self.assertIsNone(second_reason)
|
||||
|
||||
self.assertIsNotNone(first_result)
|
||||
self.assertIsNotNone(second_result)
|
||||
self.assertEqual(first_result.entity_id, second_result.entity_id)
|
||||
|
||||
def test_migrate_container_creates_new_container(self):
|
||||
"""
|
||||
Test _migrate_container creates a new container when none exists
|
||||
@@ -919,18 +747,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
child_version_1.publishable_entity_version,
|
||||
child_version_2.publishable_entity_version,
|
||||
]
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip)
|
||||
|
||||
result, reason = _migrate_container(
|
||||
context=context,
|
||||
@@ -962,18 +779,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
(lib_api.ContainerType.Subsection, "sequential"),
|
||||
(lib_api.ContainerType.Section, "chapter"),
|
||||
]
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip)
|
||||
|
||||
for container_type, block_type in container_types:
|
||||
with self.subTest(container_type=container_type, block_type=block_type):
|
||||
@@ -997,154 +803,52 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
# The container is published
|
||||
self.assertFalse(authoring_api.contains_unpublished_changes(container_version.container.pk))
|
||||
|
||||
def test_migrate_container_replace_existing_false(self):
|
||||
"""
|
||||
Test _migrate_container returns existing container when replace_existing=False
|
||||
"""
|
||||
source_key = self.course.id.make_usage_key("vertical", "existing_vertical")
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
first_result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
container_type=lib_api.ContainerType.Unit,
|
||||
title="Original Title",
|
||||
children=[],
|
||||
)
|
||||
|
||||
context.existing_source_to_target_keys[source_key] = [first_result.entity]
|
||||
|
||||
second_result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
container_type=lib_api.ContainerType.Unit,
|
||||
title="Updated Title",
|
||||
children=[],
|
||||
)
|
||||
|
||||
self.assertEqual(first_result.entity_id, second_result.entity_id)
|
||||
self.assertEqual(first_result.version_num, second_result.version_num)
|
||||
|
||||
container_version = second_result.containerversion
|
||||
self.assertEqual(container_version.title, "Original Title")
|
||||
|
||||
def test_migrate_container_same_title(self):
|
||||
"""
|
||||
Test _migrate_container for two containers with the same title
|
||||
Test a migration with two containers of the same title and preserve_url_slugs=False
|
||||
|
||||
Using preserve_url_slugs=False to create a new Unit with
|
||||
a different URL slug based on the container's Title.
|
||||
We expect that both units will be migrated to target units with container keys
|
||||
based on the shared title, but disambiguated by a _1 suffix.
|
||||
"""
|
||||
source_key_1 = self.course.id.make_usage_key("vertical", "human_readable_vertical_1")
|
||||
source_key_2 = self.course.id.make_usage_key("vertical", "human_readable_vertical_2")
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=False,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
source = ModulestoreSource.objects.create(key=self.course.id)
|
||||
source_key_1 = self.course.id.make_usage_key("vertical", "existing_unit_1")
|
||||
source_key_2 = self.course.id.make_usage_key("vertical", "existing_unit_2")
|
||||
BlockFactory.create(
|
||||
category="vertical",
|
||||
display_name="Test Unit Same Title",
|
||||
location=source_key_1,
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
|
||||
first_result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=source_key_1,
|
||||
container_type=lib_api.ContainerType.Unit,
|
||||
title="Original Human Readable Title",
|
||||
children=[],
|
||||
BlockFactory.create(
|
||||
category="html",
|
||||
display_name="Test Unit Same Title",
|
||||
location=source_key_2,
|
||||
parent_location=self.course.usage_key,
|
||||
user_id=self.user.id,
|
||||
publish_item=False
|
||||
)
|
||||
|
||||
context.existing_source_to_target_keys[source_key_1] = [first_result.entity]
|
||||
|
||||
second_result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=source_key_2,
|
||||
container_type=lib_api.ContainerType.Unit,
|
||||
title="Original Human Readable Title",
|
||||
children=[],
|
||||
bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pks": [],
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": False,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
self.assertNotEqual(first_result.entity_id, second_result.entity_id)
|
||||
self.assertNotEqual(first_result.entity.key, second_result.entity.key)
|
||||
# Make sure the current logic from tasts::_find_unique_slug is used
|
||||
self.assertEqual(second_result.entity.key, first_result.entity.key + "_1")
|
||||
|
||||
container_version = second_result.containerversion
|
||||
self.assertEqual(container_version.title, "Original Human Readable Title")
|
||||
|
||||
def test_migrate_container_replace_existing_true(self):
|
||||
"""
|
||||
Test _migrate_container creates new version when replace_existing=True
|
||||
"""
|
||||
source_key = self.course.id.make_usage_key("vertical", "replaceable_vertical")
|
||||
|
||||
child_component = authoring_api.create_component(
|
||||
self.learning_package.id,
|
||||
component_type=authoring_api.get_or_create_component_type(
|
||||
"xblock.v1", "problem"
|
||||
),
|
||||
local_key="child_problem",
|
||||
created=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
child_version = authoring_api.create_next_component_version(
|
||||
child_component.pk,
|
||||
content_to_replace={},
|
||||
created=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Update,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
first_result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
container_type=lib_api.ContainerType.Unit,
|
||||
title="Original Title",
|
||||
children=[],
|
||||
)
|
||||
|
||||
context.existing_source_to_target_keys[source_key] = [first_result.entity]
|
||||
|
||||
second_result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
container_type=lib_api.ContainerType.Unit,
|
||||
title="Updated Title",
|
||||
children=[child_version.publishable_entity_version],
|
||||
)
|
||||
|
||||
self.assertEqual(first_result.entity_id, second_result.entity_id)
|
||||
self.assertNotEqual(first_result.version_num, second_result.version_num)
|
||||
|
||||
container_version = second_result.containerversion
|
||||
self.assertEqual(container_version.title, "Updated Title")
|
||||
self.assertEqual(container_version.entity_list.entitylistrow_set.count(), 1)
|
||||
(migration,) = list(migrator_api.get_migrations(source_key=source.key))
|
||||
mappings = migrator_api.get_migration_blocks(migration.pk)
|
||||
assert (html_migration_1 := mappings.get(source_key_1))
|
||||
assert (block_migration_2 := mappings.get(source_key_2))
|
||||
assert html_migration_1.target_title == "Test Unit Same Title"
|
||||
assert block_migration_2.target_title == "Test Unit Same Title"
|
||||
assert str(html_migration_1.target_key) == "lct:testorg:test-key:unit:test-unit-same-title"
|
||||
assert str(block_migration_2.target_key) == "lct:testorg:test-key:unit:test-unit-same-title_1"
|
||||
|
||||
def test_migrate_container_with_library_source_key(self):
|
||||
"""
|
||||
@@ -1152,18 +856,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
"""
|
||||
library_key = LibraryLocator(org="TestOrg", library="TestLibrary")
|
||||
source_key = library_key.make_usage_key("vertical", "library_vertical")
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip)
|
||||
|
||||
result, _ = _migrate_container(
|
||||
context=context,
|
||||
@@ -1183,19 +876,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
Test _migrate_container handles empty children list
|
||||
"""
|
||||
source_key = self.course.id.make_usage_key("vertical", "empty_vertical")
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip)
|
||||
result, reason = _migrate_container(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
@@ -1215,18 +896,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
Test _migrate_container preserves the order of children
|
||||
"""
|
||||
source_key = self.course.id.make_usage_key("vertical", "ordered_vertical")
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip)
|
||||
children = []
|
||||
for i in range(3):
|
||||
child_component = authoring_api.create_component(
|
||||
@@ -1322,19 +992,7 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
html_version.publishable_entity_version,
|
||||
video_version.publishable_entity_version,
|
||||
]
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
context = self._make_migration_context(repeat_handling_strategy=RepeatHandlingStrategy.Skip)
|
||||
result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=source_key,
|
||||
@@ -1356,76 +1014,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
expected_entity_ids = {child.entity_id for child in children}
|
||||
self.assertEqual(child_entity_ids, expected_entity_ids)
|
||||
|
||||
def test_migrate_container_generates_correct_target_key(self):
|
||||
"""
|
||||
Test _migrate_container generates correct target key from source key
|
||||
"""
|
||||
course_source_key = self.course.id.make_usage_key("vertical", "test_vertical")
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
content_by_filename={},
|
||||
composition_level=CompositionLevel.Unit,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
created_at=timezone.now(),
|
||||
created_by=self.user.id,
|
||||
)
|
||||
|
||||
course_result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=course_source_key,
|
||||
container_type=lib_api.ContainerType.Unit,
|
||||
title="Course Vertical",
|
||||
children=[],
|
||||
)
|
||||
context.add_migration(course_source_key, course_result.entity)
|
||||
|
||||
library_key = LibraryLocator(org="TestOrg", library="TestLibrary")
|
||||
library_source_key = library_key.make_usage_key("vertical", "test_vertical")
|
||||
|
||||
library_result, _ = _migrate_container(
|
||||
context=context,
|
||||
source_key=library_source_key,
|
||||
container_type=lib_api.ContainerType.Unit,
|
||||
title="Library Vertical",
|
||||
children=[],
|
||||
)
|
||||
|
||||
self.assertIsNotNone(course_result)
|
||||
self.assertIsNotNone(library_result)
|
||||
self.assertNotEqual(course_result.entity_id, library_result.entity_id)
|
||||
|
||||
def test_migrate_from_modulestore_success_course(self):
|
||||
"""
|
||||
Test successful migration from course to library
|
||||
"""
|
||||
source = ModulestoreSource.objects.create(key=self.course.id)
|
||||
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
self.assertEqual(status.state, UserTaskStatus.SUCCEEDED)
|
||||
|
||||
migration = ModulestoreMigration.objects.get(
|
||||
source=source, target=self.learning_package
|
||||
)
|
||||
self.assertEqual(migration.composition_level, CompositionLevel.Unit.value)
|
||||
self.assertEqual(migration.repeat_handling_strategy, RepeatHandlingStrategy.Skip.value)
|
||||
|
||||
def test_bulk_migrate_success_courses(self):
|
||||
"""
|
||||
Test successful bulk migration from courses to library
|
||||
@@ -1467,12 +1055,12 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
"""
|
||||
source = ModulestoreSource.objects.create(key=self.legacy_library.location.library_key)
|
||||
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
task = bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"target_collection_pks": [self.collection.id],
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
@@ -1618,7 +1206,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
migrations = ModulestoreMigration.objects.filter(
|
||||
source=source, target=self.learning_package
|
||||
)
|
||||
|
||||
for migration in migrations:
|
||||
self.assertEqual(migration.composition_level, CompositionLevel.Unit.value)
|
||||
self.assertEqual(migration.repeat_handling_strategy, repeat_handling_strategy.value)
|
||||
@@ -1758,64 +1345,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
self.assertEqual(migrations[1].target_collection.title, f"{self.legacy_library.display_name}_1")
|
||||
self.assertNotEqual(migrations[1].target_collection.id, previous_collection.id)
|
||||
|
||||
def test_migrate_from_modulestore_library_validation_failure(self):
|
||||
"""
|
||||
Test migration from legacy library fails when modulestore content doesn't exist
|
||||
"""
|
||||
library_key = LibraryLocator(org="TestOrg", library="TestLibrary")
|
||||
|
||||
source = ModulestoreSource.objects.create(key=library_key)
|
||||
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": None,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Update.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Section.value,
|
||||
"forward_source_to_target": True,
|
||||
}
|
||||
)
|
||||
|
||||
status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
|
||||
# Should fail at loading step since we don't have real modulestore content
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
self.assertEqual(
|
||||
self._get_task_status_fail_message(status),
|
||||
"Failed to load source item 'lib-block-v1:TestOrg+TestLibrary+type@library+block@library' "
|
||||
"from ModuleStore: library-v1:TestOrg+TestLibrary+branch@library"
|
||||
)
|
||||
|
||||
def test_migrate_from_modulestore_invalid_source_key_type(self):
|
||||
"""
|
||||
Test migration with invalid source key type
|
||||
"""
|
||||
invalid_key = LibraryLocatorV2.from_string("lib:testorg:invalid")
|
||||
source = ModulestoreSource.objects.create(key=invalid_key)
|
||||
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
self.assertEqual(
|
||||
self._get_task_status_fail_message(status),
|
||||
f"Not a valid source context key: {invalid_key}. Source key must reference a course or a legacy library."
|
||||
)
|
||||
|
||||
def test_bulk_migrate_invalid_source_key_type(self):
|
||||
"""
|
||||
Test bulk migration with invalid source key type
|
||||
@@ -1850,7 +1379,9 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
source_key = self.course.id.make_usage_key("fake_block", "test_fake_block")
|
||||
olx = '<fake_block display_name="Test fake_block"></fake_block>'
|
||||
context = _MigrationContext(
|
||||
existing_source_to_target_keys={},
|
||||
used_component_keys=set(),
|
||||
used_container_slugs=set(),
|
||||
previous_block_migrations={},
|
||||
target_package_id=self.learning_package.id,
|
||||
target_library_key=self.library.library_key,
|
||||
source_context_key=self.course.id,
|
||||
@@ -1872,36 +1403,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
self.assertIsNone(result)
|
||||
self.assertEqual(reason, "Invalid block type: fake_block")
|
||||
|
||||
def test_migrate_from_modulestore_nonexistent_modulestore_item(self):
|
||||
"""
|
||||
Test migration when modulestore item doesn't exist
|
||||
"""
|
||||
nonexistent_course_key = CourseKey.from_string(
|
||||
"course-v1:NonExistent+Course+Run"
|
||||
)
|
||||
source = ModulestoreSource.objects.create(key=nonexistent_course_key)
|
||||
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
self.assertEqual(
|
||||
self._get_task_status_fail_message(status),
|
||||
"Failed to load source item 'block-v1:NonExistent+Course+Run+type@course+block@course' "
|
||||
"from ModuleStore: course-v1:NonExistent+Course+Run+branch@draft-branch"
|
||||
)
|
||||
|
||||
def test_bulk_migrate_nonexistent_modulestore_item(self):
|
||||
"""
|
||||
Test bulk migration when modulestore item doesn't exist
|
||||
@@ -1932,16 +1433,47 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
"from ModuleStore: course-v1:NonExistent+Course+Run+branch@draft-branch"
|
||||
)
|
||||
|
||||
def test_migrate_from_modulestore_task_status_progression(self):
|
||||
def test_bulk_migrate_nonexistent_library(self):
|
||||
"""
|
||||
Test migration from legacy library fails when modulestore content doesn't exist
|
||||
"""
|
||||
library_key = LibraryLocator(org="TestOrg", library="TestLibrary")
|
||||
|
||||
source = ModulestoreSource.objects.create(key=library_key)
|
||||
|
||||
task = bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pks": [None],
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Update.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Section.value,
|
||||
"forward_source_to_target": True,
|
||||
}
|
||||
)
|
||||
|
||||
status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
|
||||
# Should fail at loading step since we don't have real modulestore content
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
self.assertEqual(
|
||||
self._get_task_status_fail_message(status),
|
||||
"Failed to load source item 'lib-block-v1:TestOrg+TestLibrary+type@library+block@library' "
|
||||
"from ModuleStore: library-v1:TestOrg+TestLibrary+branch@library"
|
||||
)
|
||||
|
||||
def test_bulk_migrate_from_modulestore_task_status_progression(self):
|
||||
"""Test that task status progresses through expected steps"""
|
||||
source = ModulestoreSource.objects.create(key=self.course.id)
|
||||
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
task = bulk_migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"sources_pks": [source.id],
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"target_collection_pks": [self.collection.id],
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
@@ -1959,48 +1491,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
)
|
||||
self.assertEqual(migration.task_status, status)
|
||||
|
||||
def test_migrate_from_modulestore_multiple_users_no_interference(self):
|
||||
"""
|
||||
Test that migrations by different users don't interfere with each other
|
||||
"""
|
||||
source = ModulestoreSource.objects.create(key=self.course.id)
|
||||
other_user = UserFactory()
|
||||
|
||||
task1 = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
task2 = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": other_user.id,
|
||||
"source_pk": source.id,
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
status1 = UserTaskStatus.objects.get(task_id=task1.id)
|
||||
status2 = UserTaskStatus.objects.get(task_id=task2.id)
|
||||
|
||||
self.assertEqual(status1.user, self.user)
|
||||
self.assertEqual(status2.user, other_user)
|
||||
|
||||
# The first task should not be cancelled since it's from a different user
|
||||
self.assertNotEqual(status1.state, UserTaskStatus.CANCELED)
|
||||
|
||||
def test_bulk_migrate_multiple_users_no_interference(self):
|
||||
"""
|
||||
Test that migrations by different users don't interfere with each other
|
||||
@@ -2043,35 +1533,6 @@ class TestMigrateFromModulestore(ModuleStoreTestCase):
|
||||
# The first task should not be cancelled since it's from a different user
|
||||
self.assertNotEqual(status1.state, UserTaskStatus.CANCELED)
|
||||
|
||||
@patch("cms.djangoapps.modulestore_migrator.tasks._import_assets")
|
||||
def test_migrate_fails_on_import(self, mock_import_assets):
|
||||
"""
|
||||
Test failed migration from legacy library to V2 library
|
||||
"""
|
||||
mock_import_assets.side_effect = Exception("Simulated import error")
|
||||
source = ModulestoreSource.objects.create(key=self.legacy_library.location.library_key)
|
||||
|
||||
task = migrate_from_modulestore.apply_async(
|
||||
kwargs={
|
||||
"user_id": self.user.id,
|
||||
"source_pk": source.id,
|
||||
"target_library_key": str(self.lib_key),
|
||||
"target_collection_pk": self.collection.id,
|
||||
"repeat_handling_strategy": RepeatHandlingStrategy.Skip.value,
|
||||
"preserve_url_slugs": True,
|
||||
"composition_level": CompositionLevel.Unit.value,
|
||||
"forward_source_to_target": False,
|
||||
}
|
||||
)
|
||||
|
||||
status = UserTaskStatus.objects.get(task_id=task.id)
|
||||
self.assertEqual(status.state, UserTaskStatus.FAILED)
|
||||
|
||||
migration = ModulestoreMigration.objects.get(
|
||||
source=source, target=self.learning_package
|
||||
)
|
||||
self.assertTrue(migration.is_failed)
|
||||
|
||||
@patch("cms.djangoapps.modulestore_migrator.tasks._import_assets")
|
||||
def test_bulk_migrate_fails_on_import(self, mock_import_assets):
|
||||
"""
|
||||
|
||||
@@ -175,6 +175,7 @@ ignore_imports =
|
||||
name = Do not depend on non-public API of isolated apps.
|
||||
type = isolated_apps
|
||||
isolated_apps =
|
||||
cms.djangoapps.modulestore_migrator
|
||||
openedx.core.djangoapps.agreements
|
||||
openedx.core.djangoapps.bookmarks
|
||||
openedx.core.djangoapps.content_libraries
|
||||
|
||||
@@ -12,11 +12,12 @@ from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import typing as t
|
||||
from gettext import gettext, ngettext
|
||||
|
||||
import nh3
|
||||
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
|
||||
from opaque_keys.edx.locator import LibraryLocator
|
||||
from opaque_keys.edx.locator import LibraryLocator, LibraryLocatorV2
|
||||
from web_fragments.fragment import Fragment
|
||||
from webob import Response
|
||||
from xblock.core import XBlock
|
||||
@@ -29,6 +30,10 @@ from xmodule.modulestore.exceptions import ItemNotFoundError
|
||||
from xmodule.validation import StudioValidation, StudioValidationMessage
|
||||
from xmodule.x_module import XModuleToXBlockMixin
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from xmodule.library_tools import LegacyLibraryToolsService
|
||||
|
||||
|
||||
_ = lambda text: text
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -118,20 +123,20 @@ class LegacyLibraryContentBlock(ItemBankMixin, XModuleToXBlockMixin, XBlock):
|
||||
return LibraryLocator.from_string(self.source_library_id)
|
||||
|
||||
@property
|
||||
def is_source_lib_migrated_to_v2(self):
|
||||
def is_source_lib_migrated_to_v2(self) -> bool:
|
||||
"""
|
||||
Determines whether the source library has been migrated to v2.
|
||||
"""
|
||||
from cms.djangoapps.modulestore_migrator.api import is_successfully_migrated
|
||||
from cms.djangoapps.modulestore_migrator.api import is_forwarded
|
||||
|
||||
return (
|
||||
self.source_library_id
|
||||
and self.source_library_version
|
||||
and is_successfully_migrated(self.source_library_key, source_version=self.source_library_version)
|
||||
and is_forwarded(self.source_library_key)
|
||||
)
|
||||
|
||||
@property
|
||||
def is_ready_to_migrated_to_v2(self):
|
||||
def is_ready_to_migrated_to_v2(self) -> bool:
|
||||
"""
|
||||
Returns whether the block can be migrated to v2.
|
||||
"""
|
||||
@@ -198,7 +203,7 @@ class LegacyLibraryContentBlock(ItemBankMixin, XModuleToXBlockMixin, XBlock):
|
||||
])
|
||||
return non_editable_fields
|
||||
|
||||
def get_tools(self, to_read_library_content: bool = False) -> 'LegacyLibraryToolsService':
|
||||
def get_tools(self, to_read_library_content: bool = False) -> LegacyLibraryToolsService:
|
||||
"""
|
||||
Grab the library tools service and confirm that it'll work for us. Else, raise LibraryToolsUnavailable.
|
||||
"""
|
||||
@@ -315,16 +320,20 @@ class LegacyLibraryContentBlock(ItemBankMixin, XModuleToXBlockMixin, XBlock):
|
||||
Update the upstream and upstream version fields of all children to point to library v2 version of the legacy
|
||||
library blocks. This essentially converts this legacy block to new ItemBankBlock.
|
||||
"""
|
||||
from cms.djangoapps.modulestore_migrator.api import get_target_block_usage_keys
|
||||
blocks = get_target_block_usage_keys(self.source_library_key)
|
||||
from cms.djangoapps.modulestore_migrator import api as migrator_api
|
||||
store = modulestore()
|
||||
with store.bulk_operations(self.course_id):
|
||||
for child in self.get_children():
|
||||
source_key, _ = self.runtime.modulestore.get_block_original_usage(child.usage_key)
|
||||
child.upstream = str(blocks.get(source_key, ""))
|
||||
# Since after migration, the component in library is in draft state, we want to make sure that sync icon
|
||||
# appears when it is published
|
||||
child.upstream_version = 0
|
||||
children = self.get_children()
|
||||
child_migrations = migrator_api.get_forwarding_for_blocks([child.usage_key for child in children])
|
||||
for child in children:
|
||||
old_upstream_key, _ = self.runtime.modulestore.get_block_original_usage(child.usage_key)
|
||||
upstream_migration = child_migrations.get(old_upstream_key)
|
||||
if upstream_migration and isinstance(upstream_migration.target_key, LibraryLocatorV2):
|
||||
child.upstream = str(upstream_migration.target_key)
|
||||
if upstream_migration.target_version_num:
|
||||
child.upstream_version = upstream_migration.target_version_num
|
||||
else:
|
||||
child.upstream = ""
|
||||
# Use `modulestore()` instead of `self.runtime.modulestore` to make sure that the XBLOCK_UPDATED signal
|
||||
# is triggered
|
||||
store.update_item(child, None)
|
||||
|
||||
@@ -762,8 +762,8 @@ class TestMigratedLibraryContentRender(LegacyLibraryContentTest):
|
||||
source_key=self.library.location.library_key,
|
||||
target_library_key=self.library_v2.library_key,
|
||||
target_collection_slug=None,
|
||||
composition_level=CompositionLevel.Component.value,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip.value,
|
||||
composition_level=CompositionLevel.Component,
|
||||
repeat_handling_strategy=RepeatHandlingStrategy.Skip,
|
||||
preserve_url_slugs=True,
|
||||
forward_source_to_target=True,
|
||||
)
|
||||
@@ -791,48 +791,3 @@ class TestMigratedLibraryContentRender(LegacyLibraryContentTest):
|
||||
assert '<li>html 2</li>' in rendered.content
|
||||
assert '<li>html 3</li>' in rendered.content
|
||||
assert '<li>html 4</li>' in rendered.content
|
||||
|
||||
def test_xml_export_import_cycle(self):
|
||||
"""
|
||||
Test the export-import cycle.
|
||||
"""
|
||||
# Render block to migrate it first
|
||||
self.lc_block.render(AUTHOR_VIEW, {})
|
||||
# Set the virtual FS to export the olx to.
|
||||
export_fs = MemoryFS()
|
||||
self.lc_block.runtime.export_fs = export_fs # pylint: disable=protected-access
|
||||
|
||||
# Export the olx.
|
||||
node = etree.Element("unknown_root")
|
||||
self.lc_block.add_xml_to_node(node)
|
||||
|
||||
# Read back the olx.
|
||||
file_path = f'{self.lc_block.scope_ids.usage_id.block_type}/{self.lc_block.scope_ids.usage_id.block_id}.xml'
|
||||
with export_fs.open(file_path) as f:
|
||||
exported_olx = f.read()
|
||||
|
||||
expected_olx_export = (
|
||||
f'<library_content display_name="{self.lc_block.display_name}" is_migrated_to_v2="true"'
|
||||
f' max_count="{self.lc_block.max_count}" source_library_id="{self.lc_block.source_library_id}" '
|
||||
f'source_library_version="{self.lc_block.source_library_version}">\n'
|
||||
f' <html url_name="{self.lc_block.children[0].block_id}"/>\n'
|
||||
f' <html url_name="{self.lc_block.children[1].block_id}"/>\n'
|
||||
f' <html url_name="{self.lc_block.children[2].block_id}"/>\n'
|
||||
f' <html url_name="{self.lc_block.children[3].block_id}"/>\n'
|
||||
'</library_content>\n'
|
||||
)
|
||||
# And compare.
|
||||
assert exported_olx == expected_olx_export
|
||||
|
||||
# Now import it.
|
||||
runtime = DummyModuleStoreRuntime(load_error_blocks=True, course_id=self.lc_block.location.course_key)
|
||||
runtime.resources_fs = export_fs
|
||||
olx_element = etree.fromstring(exported_olx)
|
||||
imported_lc_block = LegacyLibraryContentBlock.parse_xml(olx_element, runtime, None)
|
||||
|
||||
self._verify_xblock_properties(imported_lc_block)
|
||||
# Verify migration info in the child
|
||||
assert imported_lc_block.is_migrated_to_v2
|
||||
for child in imported_lc_block.get_children():
|
||||
assert child.xml_attributes.get('upstream') is not None
|
||||
assert str(child.xml_attributes.get('upstream_version')) == '0'
|
||||
|
||||
Reference in New Issue
Block a user