fix: Bugs with "Publish All Changes" in Library [FC-0083] (#36640)

* fix: "[created] received a naive datetime"
* fix: leaky "isolation" of events was causing test failures
* fix: make lib events more specific, emit them async, handle hierarchy correctly
* chore: bump openedx-events to 10.2.0 for new library PUBLISHED events
This commit is contained in:
Braden MacDonald
2025-05-08 11:57:07 -07:00
committed by GitHub
parent 4ff7c3936f
commit 2e91a05112
23 changed files with 1043 additions and 827 deletions

View File

@@ -804,6 +804,12 @@ class TestDuplicateItem(ItemTest, DuplicateHelper, OpenEdxEventsTestMixin):
super().setUpClass()
cls.start_events_isolation()
@classmethod
def tearDownClass(cls):
""" Don't let our event isolation affect other test cases """
super().tearDownClass()
cls.enable_all_events() # Re-enable events other than the ENABLED_OPENEDX_EVENTS subset we isolated.
def setUp(self):
"""Creates the test course structure and a few components to 'duplicate'."""
super().setUp()

View File

@@ -653,29 +653,6 @@ def _delete_index_doc(doc_id) -> None:
_wait_for_meili_tasks(tasks)
def delete_all_draft_docs_for_library(library_key: LibraryLocatorV2) -> None:
"""
Deletes draft documents for the given XBlocks from the search index
"""
current_rebuild_index_name = _get_running_rebuild_index_name()
client = _get_meilisearch_client()
# Delete all documents where last_published is null i.e. never published before.
delete_filter = [
f'{Fields.context_key}="{library_key}"',
# This field should only be NULL or have a value, but we're also checking IS EMPTY just in case.
# Inner arrays are connected by an OR
[f'{Fields.last_published} IS EMPTY', f'{Fields.last_published} IS NULL'],
]
tasks = []
if current_rebuild_index_name:
# If there is a rebuild in progress, the documents will also be deleted from the new index.
tasks.append(client.index(current_rebuild_index_name).delete_documents(filter=delete_filter))
tasks.append(client.index(STUDIO_INDEX_NAME).delete_documents(filter=delete_filter))
_wait_for_meili_tasks(tasks)
def upsert_library_block_index_doc(usage_key: UsageKey) -> None:
"""
Creates or updates the document for the given Library Block in the search index

View File

@@ -23,12 +23,14 @@ from openedx_events.content_authoring.signals import (
LIBRARY_BLOCK_CREATED,
LIBRARY_BLOCK_DELETED,
LIBRARY_BLOCK_UPDATED,
LIBRARY_BLOCK_PUBLISHED,
LIBRARY_COLLECTION_CREATED,
LIBRARY_COLLECTION_DELETED,
LIBRARY_COLLECTION_UPDATED,
LIBRARY_CONTAINER_CREATED,
LIBRARY_CONTAINER_DELETED,
LIBRARY_CONTAINER_UPDATED,
LIBRARY_CONTAINER_PUBLISHED,
XBLOCK_CREATED,
XBLOCK_DELETED,
XBLOCK_UPDATED,
@@ -37,6 +39,7 @@ from openedx_events.content_authoring.signals import (
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.content.search.models import SearchAccess
from openedx.core.djangoapps.content_libraries import api as lib_api
from .api import (
only_if_meilisearch_enabled,
@@ -136,6 +139,32 @@ def library_block_updated_handler(**kwargs) -> None:
upsert_library_block_index_doc.apply(args=[str(library_block_data.usage_key)])
@receiver(LIBRARY_BLOCK_PUBLISHED)
@only_if_meilisearch_enabled
def library_block_published_handler(**kwargs) -> None:
"""
Update the index for the content library block when its published version
has changed.
"""
library_block_data = kwargs.get("library_block", None)
if not library_block_data or not isinstance(library_block_data, LibraryBlockData): # pragma: no cover
log.error("Received null or incorrect data for event")
return
# The PUBLISHED event is sent for any change to the published version including deletes, so check if it exists:
try:
lib_api.get_library_block(library_block_data.usage_key)
except lib_api.ContentLibraryBlockNotFound:
log.info(f"Observed published deletion of library block {str(library_block_data.usage_key)}.")
# The document should already have been deleted from the search index
# via the DELETED handler, so there's nothing to do now.
return
# Update content library index synchronously to make sure that search index is updated before
# the frontend invalidates/refetches results. This is only a single document update so is very fast.
upsert_library_block_index_doc.apply(args=[str(library_block_data.usage_key)])
@receiver(LIBRARY_BLOCK_DELETED)
@only_if_meilisearch_enabled
def library_block_deleted(**kwargs) -> None:
@@ -162,14 +191,14 @@ def content_library_updated_handler(**kwargs) -> None:
if not content_library_data or not isinstance(content_library_data, ContentLibraryData): # pragma: no cover
log.error("Received null or incorrect data for event")
return
library_key = content_library_data.library_key
# Update content library index synchronously to make sure that search index is updated before
# the frontend invalidates/refetches index.
# Currently, this is only required to make sure that removed/discarded components are removed
# from the search index and displayed to user properly. If it becomes a performance bottleneck
# for other update operations other than discard, we can update CONTENT_LIBRARY_UPDATED event
# to include a parameter which can help us decide if the task needs to run sync or async.
update_content_library_index_docs.apply(args=[str(content_library_data.library_key)])
# For now we assume the library has been renamed. Few other things will trigger this event.
# Update ALL items in the library, because their breadcrumbs will be outdated.
# TODO: just patch the "breadcrumbs" field? It's the same on every one.
# TODO: check if the library display_name has actually changed before updating all items?
update_content_library_index_docs.apply(args=[str(library_key)])
@receiver(LIBRARY_COLLECTION_CREATED)
@@ -248,17 +277,34 @@ def library_container_updated_handler(**kwargs) -> None:
log.error("Received null or incorrect data for event")
return
if library_container.background:
update_library_container_index_doc.delay(
str(library_container.container_key),
)
else:
# Update container index synchronously to make sure that search index is updated before
# the frontend invalidates/refetches index.
# See content_library_updated_handler for more details.
update_library_container_index_doc.apply(args=[
str(library_container.container_key),
])
update_library_container_index_doc.apply(args=[
str(library_container.container_key),
])
@receiver(LIBRARY_CONTAINER_PUBLISHED)
@only_if_meilisearch_enabled
def library_container_published_handler(**kwargs) -> None:
"""
Update the index for the content library container when its published
version has changed.
"""
library_container = kwargs.get("library_container", None)
if not library_container or not isinstance(library_container, LibraryContainerData): # pragma: no cover
log.error("Received null or incorrect data for event")
return
# The PUBLISHED event is sent for any change to the published version including deletes, so check if it exists:
try:
lib_api.get_container(library_container.container_key)
except lib_api.ContentLibraryContainerNotFound:
log.info(f"Observed published deletion of container {str(library_container.container_key)}.")
# The document should already have been deleted from the search index
# via the DELETED handler, so there's nothing to do now.
return
update_library_container_index_doc.apply(args=[
str(library_container.container_key),
])
@receiver(LIBRARY_CONTAINER_DELETED)
@@ -275,3 +321,6 @@ def library_container_deleted(**kwargs) -> None:
# Update content library index synchronously to make sure that search index is updated before
# the frontend invalidates/refetches results. This is only a single document update so is very fast.
delete_library_container_index_doc.apply(args=[str(library_container.container_key)])
# TODO: post-Teak, move all the celery tasks directly inline into this handlers? Because now the
# events are emitted in an [async] worker, so it doesn't matter if the handlers are synchronous.
# See https://github.com/openedx/edx-platform/pull/36640 discussion.

View File

@@ -86,9 +86,6 @@ def update_content_library_index_docs(library_key_str: str) -> None:
log.info("Updating content index documents for library with id: %s", library_key)
api.upsert_content_library_index_docs(library_key)
# Delete all documents in this library that were not published by above function
# as this task is also triggered on discard event.
api.delete_all_draft_docs_for_library(library_key)
@shared_task(base=LoggedTask, autoretry_for=(MeilisearchError, ConnectionError))

View File

@@ -734,21 +734,6 @@ class TestSearchApi(ModuleStoreTestCase):
[self.doc_problem1, self.doc_problem2]
)
@override_settings(MEILISEARCH_ENABLED=True)
def test_delete_all_drafts(self, mock_meilisearch):
"""
Test deleting all draft documents from the index.
"""
api.delete_all_draft_docs_for_library(self.library.key)
delete_filter = [
f'context_key="{self.library.key}"',
['last_published IS EMPTY', 'last_published IS NULL'],
]
mock_meilisearch.return_value.index.return_value.delete_documents.assert_called_once_with(
filter=delete_filter
)
@override_settings(MEILISEARCH_ENABLED=True)
def test_index_tags_in_collections(self, mock_meilisearch):
# Tag collection

View File

@@ -63,10 +63,9 @@ from .containers import (
ContainerMetadata,
ContainerType,
)
from .libraries import (
library_collection_locator,
PublishableItem,
)
from .collections import library_collection_locator
from .libraries import PublishableItem
from .. import tasks
# This content_libraries API is sometimes imported in the LMS (should we prevent that?), but the content_staging app
# cannot be. For now we only need this one type import at module scope, so only import it during type checks.
@@ -836,24 +835,13 @@ def publish_component_changes(usage_key: LibraryUsageLocatorV2, user: UserType):
# The core publishing API is based on draft objects, so find the draft that corresponds to this component:
drafts_to_publish = authoring_api.get_all_drafts(learning_package.id).filter(entity__key=component.key)
# Publish the component and update anything that needs to be updated (e.g. search index):
authoring_api.publish_from_drafts(learning_package.id, draft_qset=drafts_to_publish, published_by=user.id)
LIBRARY_BLOCK_UPDATED.send_event(
library_block=LibraryBlockData(
library_key=usage_key.lib_key,
usage_key=usage_key,
)
publish_log = authoring_api.publish_from_drafts(
learning_package.id, draft_qset=drafts_to_publish, published_by=user.id,
)
# For each container, trigger LIBRARY_CONTAINER_UPDATED signal and set background=True to trigger
# container indexing asynchronously.
affected_containers = get_containers_contains_component(usage_key)
for container in affected_containers:
LIBRARY_CONTAINER_UPDATED.send_event(
library_container=LibraryContainerData(
container_key=container.container_key,
background=True,
)
)
# Since this is a single component, it should be safe to process synchronously and in-process:
tasks.send_events_after_publish(publish_log.pk, str(library_key))
# IF this is found to be a performance issue, we could instead make it async where necessary:
# tasks.wait_for_post_publish_events(publish_log, library_key=library_key)
def _component_exists(usage_key: UsageKeyV2) -> bool:

View File

@@ -4,7 +4,7 @@ API for containers (Sections, Subsections, Units) in Content Libraries
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from datetime import datetime, timezone
from enum import Enum
import logging
from uuid import uuid4
@@ -14,13 +14,11 @@ from opaque_keys.edx.keys import UsageKeyV2
from opaque_keys.edx.locator import LibraryContainerLocator, LibraryLocatorV2, LibraryUsageLocatorV2
from openedx_events.content_authoring.data import (
ContentObjectChangedData,
LibraryBlockData,
LibraryCollectionData,
LibraryContainerData,
)
from openedx_events.content_authoring.signals import (
CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
LIBRARY_BLOCK_UPDATED,
LIBRARY_COLLECTION_UPDATED,
LIBRARY_CONTAINER_CREATED,
LIBRARY_CONTAINER_DELETED,
@@ -34,8 +32,9 @@ from openedx.core.djangoapps.xblock.api import get_component_from_usage_key
from ..models import ContentLibrary
from .exceptions import ContentLibraryContainerNotFound
from .libraries import PublishableItem, library_component_usage_key
from .libraries import PublishableItem
from .block_metadata import LibraryXBlockMetadata
from .. import tasks
# The public API is only the following symbols:
__all__ = [
@@ -250,7 +249,7 @@ def create_container(
content_library.learning_package_id,
key=slug,
title=title,
created=created or datetime.now(),
created=created or datetime.now(tz=timezone.utc),
created_by=user_id,
)
case _:
@@ -280,7 +279,7 @@ def update_container(
unit_version = authoring_api.create_next_unit_version(
container.unit,
title=display_name,
created=datetime.now(),
created=datetime.now(tz=timezone.utc),
created_by=user_id,
)
@@ -427,7 +426,7 @@ def update_container_children(
new_version = authoring_api.create_next_unit_version(
container.unit,
components=components, # type: ignore[arg-type]
created=datetime.now(),
created=datetime.now(tz=timezone.utc),
created_by=user_id,
entities_action=entities_action,
)
@@ -478,21 +477,6 @@ def publish_container_changes(container_key: LibraryContainerLocator, user_id: i
draft_qset=drafts_to_publish,
published_by=user_id,
)
# Update anything that needs to be updated (e.g. search index):
for record in publish_log.records.select_related("entity", "entity__container", "entity__component").all():
if hasattr(record.entity, "component"):
# This is a child component like an XBLock in a Unit that was published:
usage_key = library_component_usage_key(library_key, record.entity.component)
LIBRARY_BLOCK_UPDATED.send_event(
library_block=LibraryBlockData(library_key=library_key, usage_key=usage_key)
)
elif hasattr(record.entity, "container"):
# This is a child container like a Unit, or is the same "container" we published above.
LIBRARY_CONTAINER_UPDATED.send_event(
library_container=LibraryContainerData(container_key=container_key)
)
else:
log.warning(
f"PublishableEntity {record.entity.pk} / {record.entity.key} was modified during publish operation "
"but is of unknown type."
)
# Update the search index (and anything else) for the affected container + blocks
# This is mostly synchronous but may complete some work asynchronously if there are a lot of changes.
tasks.wait_for_post_publish_events(publish_log, library_key)

View File

@@ -55,15 +55,11 @@ from django.utils.translation import gettext as _
from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2
from openedx_events.content_authoring.data import (
ContentLibraryData,
LibraryCollectionData,
ContentObjectChangedData,
)
from openedx_events.content_authoring.signals import (
CONTENT_LIBRARY_CREATED,
CONTENT_LIBRARY_DELETED,
CONTENT_LIBRARY_UPDATED,
LIBRARY_COLLECTION_UPDATED,
CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
)
from openedx_learning.api import authoring as authoring_api
from openedx_learning.api.authoring_models import Component
@@ -75,7 +71,7 @@ from openedx.core.types import User as UserType
from .. import permissions
from ..constants import ALL_RIGHTS_RESERVED
from ..models import ContentLibrary, ContentLibraryPermission
from .collections import library_collection_locator
from .. import tasks
from .exceptions import (
LibraryAlreadyExists,
LibraryPermissionIntegrityError,
@@ -666,14 +662,15 @@ def publish_changes(library_key: LibraryLocatorV2, user_id: int | None = None):
"""
learning_package = ContentLibrary.objects.get_by_key(library_key).learning_package
assert learning_package is not None # shouldn't happen but it's technically possible.
authoring_api.publish_all_drafts(learning_package.id, published_by=user_id)
publish_log = authoring_api.publish_all_drafts(learning_package.id, published_by=user_id)
CONTENT_LIBRARY_UPDATED.send_event(
content_library=ContentLibraryData(
library_key=library_key,
update_blocks=True
)
)
# Update the search index (and anything else) for the affected blocks
# This is mostly synchronous but may complete some work asynchronously if there are a lot of changes.
tasks.wait_for_post_publish_events(publish_log, library_key)
# Unlike revert_changes below, we do not have to re-index collections,
# because publishing changes does not affect the component counts, and
# collections themselves don't have draft/published/unpublished status.
def revert_changes(library_key: LibraryLocatorV2, user_id: int | None = None) -> None:
@@ -683,46 +680,8 @@ def revert_changes(library_key: LibraryLocatorV2, user_id: int | None = None) ->
"""
learning_package = ContentLibrary.objects.get_by_key(library_key).learning_package
assert learning_package is not None # shouldn't happen but it's technically possible.
authoring_api.reset_drafts_to_published(learning_package.id, reset_by=user_id)
with authoring_api.bulk_draft_changes_for(learning_package.id) as draft_change_log:
authoring_api.reset_drafts_to_published(learning_package.id, reset_by=user_id)
CONTENT_LIBRARY_UPDATED.send_event(
content_library=ContentLibraryData(
library_key=library_key,
update_blocks=True
)
)
# For each collection, trigger LIBRARY_COLLECTION_UPDATED signal and set background=True to trigger
# collection indexing asynchronously.
#
# This is to update component counts in all library collections,
# because there may be components that have been discarded in the revert.
for collection in authoring_api.get_collections(learning_package.id):
LIBRARY_COLLECTION_UPDATED.send_event(
library_collection=LibraryCollectionData(
collection_key=library_collection_locator(
library_key=library_key,
collection_key=collection.key,
),
background=True,
)
)
# Reindex components that are in collections
#
# Use case: When a component that was within a collection has been deleted
# and the changes are reverted, the component should appear in the
# collection again.
components_in_collections = authoring_api.get_components(
learning_package.id, draft=True, namespace='xblock.v1',
).filter(publishable_entity__collections__isnull=False)
for component in components_in_collections:
usage_key = library_component_usage_key(library_key, component)
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event(
content_object=ContentObjectChangedData(
object_id=str(usage_key),
changes=["collections"],
),
)
# Call the event handlers as needed.
tasks.wait_for_post_revert_events(draft_change_log, library_key)

View File

@@ -23,11 +23,34 @@ from celery_utils.logged_task import LoggedTask
from celery.utils.log import get_task_logger
from edx_django_utils.monitoring import set_code_owner_attribute, set_code_owner_attribute_from_module
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import (
BlockUsageLocator,
LibraryCollectionLocator,
LibraryContainerLocator,
LibraryLocatorV2,
)
from openedx_learning.api import authoring as authoring_api
from openedx_learning.api.authoring_models import DraftChangeLog, PublishLog
from openedx_events.content_authoring.data import (
LibraryBlockData,
LibraryCollectionData,
LibraryContainerData,
)
from openedx_events.content_authoring.signals import (
LIBRARY_BLOCK_CREATED,
LIBRARY_BLOCK_DELETED,
LIBRARY_BLOCK_UPDATED,
LIBRARY_BLOCK_PUBLISHED,
LIBRARY_COLLECTION_UPDATED,
LIBRARY_CONTAINER_CREATED,
LIBRARY_CONTAINER_DELETED,
LIBRARY_CONTAINER_UPDATED,
LIBRARY_CONTAINER_PUBLISHED,
)
from user_tasks.tasks import UserTask, UserTaskStatus
from xblock.fields import Scope
from opaque_keys.edx.locator import BlockUsageLocator
from openedx.core.lib import ensure_cms
from xmodule.capa_block import ProblemBlock
from xmodule.library_content_block import ANY_CAPA_TYPE_VALUE, LegacyLibraryContentBlock
@@ -39,10 +62,197 @@ from xmodule.modulestore.mixed import MixedModuleStore
from . import api
from .models import ContentLibraryBlockImportTask
logger = logging.getLogger(__name__)
log = logging.getLogger(__name__)
TASK_LOGGER = get_task_logger(__name__)
@shared_task(base=LoggedTask)
@set_code_owner_attribute
def send_events_after_publish(publish_log_pk: int, library_key_str: str) -> None:
"""
Send events to trigger actions like updating the search index, after we've
published some items in a library.
We use the PublishLog record so we can detect exactly what was changed,
including any auto-published changes like child items in containers.
This happens in a celery task so that it can be run asynchronously if
needed, because the "publish all changes" action can potentially publish
hundreds or even thousands of components/containers at once, and synchronous
event handlers like updating the search index may a while to complete in
that case.
"""
publish_log = PublishLog.objects.get(pk=publish_log_pk)
library_key = LibraryLocatorV2.from_string(library_key_str)
affected_entities = publish_log.records.select_related("entity", "entity__container", "entity__component").all()
affected_containers: set[LibraryContainerLocator] = set()
# Update anything that needs to be updated (e.g. search index):
for record in affected_entities:
if hasattr(record.entity, "component"):
usage_key = api.library_component_usage_key(library_key, record.entity.component)
# Note that this item may be newly created, updated, or even deleted - but all we care about for this event
# is that the published version is now different. Only for draft changes do we send differentiated events.
LIBRARY_BLOCK_PUBLISHED.send_event(
library_block=LibraryBlockData(library_key=library_key, usage_key=usage_key)
)
# Publishing a container will auto-publish its children, but publishing a single component or all changes
# in the library will NOT usually include any parent containers. But we do need to notify listeners that the
# parent container(s) have changed, e.g. so the search index can update the "has_unpublished_changes"
for parent_container in api.get_containers_contains_component(usage_key):
affected_containers.add(parent_container.container_key)
# TODO: should this be a CONTAINER_CHILD_PUBLISHED event instead of CONTAINER_PUBLISHED ?
elif hasattr(record.entity, "container"):
container_key = api.library_container_locator(library_key, record.entity.container)
affected_containers.add(container_key)
else:
log.warning(
f"PublishableEntity {record.entity.pk} / {record.entity.key} was modified during publish operation "
"but is of unknown type."
)
for container_key in affected_containers:
LIBRARY_CONTAINER_PUBLISHED.send_event(
library_container=LibraryContainerData(container_key=container_key)
)
def wait_for_post_publish_events(publish_log: PublishLog, library_key: LibraryLocatorV2):
"""
After publishing some changes, trigger the required event handlers (e.g.
update the search index). Try to wait for that to complete before returning,
up to some reasonable timeout, and then finish anything remaining
asynchonrously.
"""
# Update the search index (and anything else) for the affected blocks
result = send_events_after_publish.apply_async(args=(publish_log.pk, str(library_key)))
# Try waiting a bit for those post-publish events to be handled:
try:
result.get(timeout=15)
except TimeoutError:
pass
# This is fine! The search index is still being updated, and/or other
# event handlers are still following up on the results, but the publish
# already *did* succeed, and the events will continue to be processed in
# the background by the celery worker until everything is updated.
@shared_task(base=LoggedTask)
@set_code_owner_attribute
def send_events_after_revert(draft_change_log_id: int, library_key_str: str) -> None:
"""
Send events to trigger actions like updating the search index, after we've
reverted some unpublished changes in a library.
See notes on the analogous function above, send_events_after_publish.
"""
try:
draft_change_log = DraftChangeLog.objects.get(id=draft_change_log_id)
except DraftChangeLog.DoesNotExist:
# When a revert operation is a no-op, Learning Core deletes the empty
# DraftChangeLog, so we'll assume that's what happened here.
log.info(f"Library revert in {library_key_str} did not result in any changes.")
return
library_key = LibraryLocatorV2.from_string(library_key_str)
affected_entities = draft_change_log.records.select_related(
"entity", "entity__container", "entity__component",
).all()
created_container_keys: set[LibraryContainerLocator] = set()
updated_container_keys: set[LibraryContainerLocator] = set()
deleted_container_keys: set[LibraryContainerLocator] = set()
affected_collection_keys: set[LibraryCollectionLocator] = set()
# Update anything that needs to be updated (e.g. search index):
for record in affected_entities:
# This will be true if the entity was [soft] deleted, but we're now reverting that deletion:
is_undeleted = (record.old_version is None and record.new_version is not None)
# This will be true if the entity was created and we're now deleting it by reverting that creation:
is_deleted = (record.old_version is not None and record.new_version is None)
if hasattr(record.entity, "component"):
usage_key = api.library_component_usage_key(library_key, record.entity.component)
event = LIBRARY_BLOCK_UPDATED
if is_deleted:
event = LIBRARY_BLOCK_DELETED
elif is_undeleted:
event = LIBRARY_BLOCK_CREATED
event.send_event(library_block=LibraryBlockData(library_key=library_key, usage_key=usage_key))
# If any containers contain this component, their child list / component count may need to be updated
# e.g. if this was a newly created component in the container and is now deleted, or this was deleted and
# is now restored.
for parent_container in api.get_containers_contains_component(usage_key):
updated_container_keys.add(parent_container.container_key)
# TODO: do we also need to send CONTENT_OBJECT_ASSOCIATIONS_CHANGED for this component, or is
# LIBRARY_BLOCK_UPDATED sufficient?
elif hasattr(record.entity, "container"):
container_key = api.library_container_locator(library_key, record.entity.container)
if is_deleted:
deleted_container_keys.add(container_key)
elif is_undeleted:
created_container_keys.add(container_key)
else:
updated_container_keys.add(container_key)
else:
log.warning(
f"PublishableEntity {record.entity.pk} / {record.entity.key} was modified during publish operation "
"but is of unknown type."
)
# If any collections contain this entity, their item count may need to be updated, e.g. if this was a
# newly created component in the collection and is now deleted, or this was deleted and is now re-added.
for parent_collection in authoring_api.get_entity_collections(
record.entity.learning_package_id, record.entity.key,
):
collection_key = api.library_collection_locator(
library_key=library_key,
collection_key=parent_collection.key,
)
affected_collection_keys.add(collection_key)
for container_key in deleted_container_keys:
LIBRARY_CONTAINER_DELETED.send_event(
library_container=LibraryContainerData(container_key=container_key)
)
# Don't bother sending UPDATED events for these containers that are now deleted
created_container_keys.discard(container_key)
for container_key in created_container_keys:
LIBRARY_CONTAINER_CREATED.send_event(
library_container=LibraryContainerData(container_key=container_key)
)
for container_key in updated_container_keys:
LIBRARY_CONTAINER_UPDATED.send_event(
library_container=LibraryContainerData(container_key=container_key)
)
for collection_key in affected_collection_keys:
LIBRARY_COLLECTION_UPDATED.send_event(
library_collection=LibraryCollectionData(collection_key=collection_key)
)
def wait_for_post_revert_events(draft_change_log: DraftChangeLog, library_key: LibraryLocatorV2):
"""
After discard all changes in a library, trigger the required event handlers
(e.g. update the search index). Try to wait for that to complete before
returning, up to some reasonable timeout, and then finish anything remaining
asynchonrously.
"""
# Update the search index (and anything else) for the affected blocks
result = send_events_after_revert.apply_async(args=(draft_change_log.pk, str(library_key)))
# Try waiting a bit for those post-publish events to be handled:
try:
result.get(timeout=15)
except TimeoutError:
pass
# This is fine! The search index is still being updated, and/or other
# event handlers are still following up on the results, but the revert
# already *did* succeed, and the events will continue to be processed in
# the background by the celery worker until everything is updated.
@shared_task(base=LoggedTask)
@set_code_owner_attribute
def import_blocks_from_course(import_task_id, course_key_str, use_course_key_as_block_id_suffix=True):
@@ -57,9 +267,9 @@ def import_blocks_from_course(import_task_id, course_key_str, use_course_key_as_
def on_progress(block_key, block_num, block_count, exception=None):
if exception:
logger.exception('Import block failed: %s', block_key)
log.exception('Import block failed: %s', block_key)
else:
logger.info('Import block succesful: %s', block_key)
log.info('Import block succesful: %s', block_key)
import_task.save_progress(block_num / block_count)
edx_client = api.EdxModulestoreImportClient(
@@ -121,6 +331,9 @@ def sync_from_library(
) -> None:
"""
Celery task to update the children of the library_content block at `dest_block_id`.
FIXME: this is related to legacy modulestore libraries and shouldn't be part of the
openedx.core.djangoapps.content_libraries app, which is the app for v2 libraries.
"""
set_code_owner_attribute_from_module(__name__)
store = modulestore()
@@ -143,6 +356,9 @@ def duplicate_children(
) -> None:
"""
Celery task to duplicate the children from `source_block_id` to `dest_block_id`.
FIXME: this is related to legacy modulestore libraries and shouldn't be part of the
openedx.core.djangoapps.content_libraries app, which is the app for v2 libraries.
"""
set_code_owner_attribute_from_module(__name__)
store = modulestore()
@@ -180,6 +396,9 @@ def _sync_children(
Implementation helper for `sync_from_library` and `duplicate_children` Celery tasks.
Can update children with a specific library `library_version`, or latest (`library_version=None`).
FIXME: this is related to legacy modulestore libraries and shouldn't be part of the
openedx.core.djangoapps.content_libraries app, which is the app for v2 libraries.
"""
source_blocks = []
library_key = dest_block.source_library_key.for_branch(
@@ -220,6 +439,9 @@ def _copy_overrides(
) -> None:
"""
Copy any overrides the user has made on children of `source` over to the children of `dest_block`, recursively.
FIXME: this is related to legacy modulestore libraries and shouldn't be part of the
openedx.core.djangoapps.content_libraries app, which is the app for v2 libraries.
"""
for field in source_block.fields.values():
if field.scope == Scope.settings and field.is_set_on(source_block):

View File

@@ -8,6 +8,8 @@ from urllib.parse import urlencode
from organizations.models import Organization
from rest_framework.test import APITransactionTestCase, APIClient
from opaque_keys.edx.keys import ContainerKey, UsageKey
from opaque_keys.edx.locator import LibraryLocatorV2, LibraryCollectionLocator
from common.djangoapps.student.tests.factories import UserFactory
from common.djangoapps.util.json_request import JsonResponse as SpecialJsonResponse
@@ -25,6 +27,7 @@ URL_LIB_LINKS = URL_LIB_DETAIL + 'links/' # Get the list of links in this libra
URL_LIB_COMMIT = URL_LIB_DETAIL + 'commit/' # Commit (POST) or revert (DELETE) all pending changes to this library
URL_LIB_BLOCKS = URL_LIB_DETAIL + 'blocks/' # Get the list of XBlocks in this library, or add a new one
URL_LIB_CONTAINERS = URL_LIB_DETAIL + 'containers/' # Create a new container in this library
URL_LIB_COLLECTIONS = URL_LIB_DETAIL + 'collections/' # Create a new collection in this library
URL_LIB_TEAM = URL_LIB_DETAIL + 'team/' # Get the list of users/groups authorized to use this library
URL_LIB_TEAM_USER = URL_LIB_TEAM + 'user/{username}/' # Add/edit/remove a user's permission to use this library
URL_LIB_TEAM_GROUP = URL_LIB_TEAM + 'group/{group_name}/' # Add/edit/remove a group's permission to use this library
@@ -39,6 +42,8 @@ URL_LIB_CONTAINER_COMPONENTS = URL_LIB_CONTAINER + 'children/' # Get, add or de
URL_LIB_CONTAINER_RESTORE = URL_LIB_CONTAINER + 'restore/' # Restore a deleted container
URL_LIB_CONTAINER_COLLECTIONS = URL_LIB_CONTAINER + 'collections/' # Handle associated collections
URL_LIB_CONTAINER_PUBLISH = URL_LIB_CONTAINER + 'publish/' # Publish changes to the specified container + children
URL_LIB_COLLECTION = URL_LIB_COLLECTIONS + '{collection_key}/' # Get a collection in this library
URL_LIB_COLLECTION_ITEMS = URL_LIB_COLLECTION + 'items/' # Get a collection in this library
URL_LIB_LTI_PREFIX = URL_PREFIX + 'lti/1.3/'
URL_LIB_LTI_JWKS = URL_LIB_LTI_PREFIX + 'pub/jwks/'
@@ -70,11 +75,6 @@ class ContentLibrariesRestApiTest(APITransactionTestCase):
entire response has some specific shape. That way, things like adding
new fields to an API response, which are backwards compatible, won't
break any tests, but backwards-incompatible API changes will.
WARNING: every test should have a unique library slug, because even though
the django/mysql database gets reset for each test case, the lookup between
library slug and bundle UUID does not because it's assumed to be immutable
and cached forever.
"""
def setUp(self):
@@ -379,24 +379,24 @@ class ContentLibrariesRestApiTest(APITransactionTestCase):
data["slug"] = slug
return self._api('post', URL_LIB_CONTAINERS.format(lib_key=lib_key), data, expect_response)
def _get_container(self, container_key: str, expect_response=200):
def _get_container(self, container_key: ContainerKey | str, expect_response=200):
""" Get a container (unit etc.) """
return self._api('get', URL_LIB_CONTAINER.format(container_key=container_key), None, expect_response)
def _update_container(self, container_key: str, display_name: str, expect_response=200):
def _update_container(self, container_key: ContainerKey | str, display_name: str, expect_response=200):
""" Update a container (unit etc.) """
data = {"display_name": display_name}
return self._api('patch', URL_LIB_CONTAINER.format(container_key=container_key), data, expect_response)
def _delete_container(self, container_key: str, expect_response=204):
def _delete_container(self, container_key: ContainerKey | str, expect_response=204):
""" Delete a container (unit etc.) """
return self._api('delete', URL_LIB_CONTAINER.format(container_key=container_key), None, expect_response)
def _restore_container(self, container_key: str, expect_response=204):
def _restore_container(self, container_key: ContainerKey | str, expect_response=204):
""" Restore a deleted a container (unit etc.) """
return self._api('post', URL_LIB_CONTAINER_RESTORE.format(container_key=container_key), None, expect_response)
def _get_container_components(self, container_key: str, expect_response=200):
def _get_container_components(self, container_key: ContainerKey | str, expect_response=200):
""" Get container components"""
return self._api(
'get',
@@ -407,7 +407,7 @@ class ContentLibrariesRestApiTest(APITransactionTestCase):
def _add_container_components(
self,
container_key: str,
container_key: ContainerKey | str,
children_ids: list[str],
expect_response=200,
):
@@ -421,7 +421,7 @@ class ContentLibrariesRestApiTest(APITransactionTestCase):
def _remove_container_components(
self,
container_key: str,
container_key: ContainerKey | str,
children_ids: list[str],
expect_response=200,
):
@@ -435,7 +435,7 @@ class ContentLibrariesRestApiTest(APITransactionTestCase):
def _patch_container_components(
self,
container_key: str,
container_key: ContainerKey | str,
children_ids: list[str],
expect_response=200,
):
@@ -449,7 +449,7 @@ class ContentLibrariesRestApiTest(APITransactionTestCase):
def _patch_container_collections(
self,
container_key: str,
container_key: ContainerKey | str,
collection_keys: list[str],
expect_response=200,
):
@@ -461,6 +461,52 @@ class ContentLibrariesRestApiTest(APITransactionTestCase):
expect_response
)
def _publish_container(self, container_key, expect_response=200):
def _publish_container(self, container_key: ContainerKey | str, expect_response=200):
""" Publish all changes in the specified container + children """
return self._api('post', URL_LIB_CONTAINER_PUBLISH.format(container_key=container_key), None, expect_response)
def _create_collection(
self,
lib_key: LibraryLocatorV2 | str,
title: str,
description: str = "",
expect_response=200,
):
""" Create a new collection in this library """
data = {"title": title, "description": description}
return self._api('post', URL_LIB_COLLECTIONS.format(lib_key=lib_key), data, expect_response)
def _soft_delete_collection(self, collection_key: LibraryCollectionLocator, expect_response=204):
""" Soft delete (disable) a collection """
url = URL_LIB_COLLECTION.format(lib_key=collection_key.lib_key, collection_key=collection_key.collection_id)
return self._api('delete', url, {}, expect_response)
def _update_collection(
self,
collection_key: LibraryCollectionLocator,
title: str | None = None,
description: str | None = None,
expect_response=200,
):
""" Update a collection's title/description """
data = {}
if title is not None:
data["title"] = title
if description is not None:
data["description"] = description
url = URL_LIB_COLLECTION.format(lib_key=collection_key.lib_key, collection_key=collection_key.collection_id)
return self._api('patch', url, data, expect_response)
def _add_items_to_collection(
self,
collection_key: LibraryCollectionLocator,
item_keys: list[str | UsageKey | ContainerKey],
expect_response=200,
):
""" Add components/containers to a collection """
data = {"usage_keys": [str(k) for k in item_keys]}
url = URL_LIB_COLLECTION_ITEMS.format(
lib_key=collection_key.lib_key,
collection_key=collection_key.collection_id,
)
return self._api('patch', url, data, expect_response)

View File

@@ -25,7 +25,6 @@ from openedx_events.content_authoring.signals import (
LIBRARY_COLLECTION_UPDATED,
LIBRARY_CONTAINER_UPDATED,
)
from openedx_events.tests.utils import OpenEdxEventsTestMixin
from openedx_learning.api import authoring as authoring_api
from .. import api
@@ -259,30 +258,12 @@ class EdxApiImportClientTest(TestCase):
mock_publish_changes.assert_not_called()
class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest, OpenEdxEventsTestMixin):
class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest):
"""
Tests for Content Library API collections methods.
Same guidelines as ContentLibrariesTestCase.
"""
ENABLED_OPENEDX_EVENTS = [
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.event_type,
LIBRARY_COLLECTION_CREATED.event_type,
LIBRARY_COLLECTION_DELETED.event_type,
LIBRARY_COLLECTION_UPDATED.event_type,
]
@classmethod
def setUpClass(cls):
"""
Set up class method for the Test class.
TODO: It's unclear why we need to call start_events_isolation ourselves rather than relying on
OpenEdxEventsTestMixin.setUpClass to handle it. It fails it we don't, and many other test cases do it,
so we're following a pattern here. But that pattern doesn't really make sense.
"""
super().setUpClass()
cls.start_events_isolation()
def setUp(self):
super().setUp()
@@ -555,45 +536,28 @@ class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest, OpenEdxEventsTe
assert len(authoring_api.get_collection(self.lib2.learning_package_id, self.col2.key).entities.all()) == 1
assert len(authoring_api.get_collection(self.lib2.learning_package_id, self.col3.key).entities.all()) == 1
self.assertDictContainsSubset(
{
"signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
"sender": None,
"content_object": ContentObjectChangedData(
object_id=self.lib2_problem_block["id"],
changes=["collections"],
),
},
event_receiver.call_args_list[0].kwargs,
)
self.assertDictContainsSubset(
{
"signal": LIBRARY_COLLECTION_UPDATED,
"sender": None,
"library_collection": LibraryCollectionData(
collection_key=api.library_collection_locator(
self.lib2.library_key,
collection_key=self.col2.key,
),
background=True,
),
},
collection_update_event_receiver.call_args_list[0].kwargs,
)
self.assertDictContainsSubset(
{
"signal": LIBRARY_COLLECTION_UPDATED,
"sender": None,
"library_collection": LibraryCollectionData(
collection_key=api.library_collection_locator(
self.lib2.library_key,
collection_key=self.col3.key,
),
background=True,
),
},
collection_update_event_receiver.call_args_list[1].kwargs,
)
assert {
"signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
"sender": None,
"content_object": ContentObjectChangedData(
object_id=self.lib2_problem_block["id"],
changes=["collections"],
),
}.items() <= event_receiver.call_args_list[0].kwargs.items()
assert len(collection_update_event_receiver.call_args_list) == 2
collection_update_events = [call.kwargs for call in collection_update_event_receiver.call_args_list]
assert all(event["signal"] == LIBRARY_COLLECTION_UPDATED for event in collection_update_events)
assert {event["library_collection"] for event in collection_update_events} == {
LibraryCollectionData(
collection_key=api.library_collection_locator(self.lib2.library_key, collection_key=self.col2.key),
background=True,
),
LibraryCollectionData(
collection_key=api.library_collection_locator(self.lib2.library_key, collection_key=self.col3.key),
background=True,
)
}
def test_delete_library_block(self):
api.update_library_collection_items(
@@ -690,72 +654,46 @@ class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest, OpenEdxEventsTe
)
def test_add_component_and_revert(self):
# Add component and publish
api.update_library_collection_items(
self.lib1.library_key,
self.col1.key,
opaque_keys=[
UsageKey.from_string(self.lib1_problem_block["id"]),
],
)
# Publish changes
api.publish_changes(self.lib1.library_key)
# Add component and revert
# Create a new component that will only exist as a draft
new_problem_block = self._add_block_to_library(
self.lib1.library_key, "problem", "problemNEW",
)
# Add component. Note: collections are not part of the draft/publish cycle so this is not a draft change.
api.update_library_collection_items(
self.lib1.library_key,
self.col1.key,
opaque_keys=[
UsageKey.from_string(self.lib1_html_block["id"]),
UsageKey.from_string(new_problem_block["id"]),
],
)
event_receiver = mock.Mock()
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.connect(event_receiver)
collection_update_event_receiver = mock.Mock()
LIBRARY_COLLECTION_UPDATED.connect(collection_update_event_receiver)
api.revert_changes(self.lib1.library_key)
assert collection_update_event_receiver.call_count == 1
assert event_receiver.call_count == 2
self.assertDictContainsSubset(
{
"signal": LIBRARY_COLLECTION_UPDATED,
"sender": None,
"library_collection": LibraryCollectionData(
collection_key=api.library_collection_locator(
self.lib1.library_key,
collection_key=self.col1.key,
),
background=True,
assert {
"signal": LIBRARY_COLLECTION_UPDATED,
"sender": None,
"library_collection": LibraryCollectionData(
collection_key=api.library_collection_locator(
self.lib1.library_key,
collection_key=self.col1.key,
),
},
collection_update_event_receiver.call_args_list[0].kwargs,
)
self.assertDictContainsSubset(
{
"signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
"sender": None,
"content_object": ContentObjectChangedData(
object_id=str(self.lib1_problem_block["id"]),
changes=["collections"],
),
},
event_receiver.call_args_list[0].kwargs,
)
self.assertDictContainsSubset(
{
"signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
"sender": None,
"content_object": ContentObjectChangedData(
object_id=str(self.lib1_html_block["id"]),
changes=["collections"],
),
},
event_receiver.call_args_list[1].kwargs,
)
),
}.items() <= collection_update_event_receiver.call_args_list[0].kwargs.items()
def test_delete_component_and_revert(self):
"""
When a component is deleted and then the delete is reverted, signals
will be emitted to update any containing collections.
"""
# Add components and publish
api.update_library_collection_items(
self.lib1.library_key,
@@ -770,72 +708,28 @@ class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest, OpenEdxEventsTe
# Delete component and revert
api.delete_library_block(UsageKey.from_string(self.lib1_problem_block["id"]))
event_receiver = mock.Mock()
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.connect(event_receiver)
collection_update_event_receiver = mock.Mock()
LIBRARY_COLLECTION_UPDATED.connect(collection_update_event_receiver)
api.revert_changes(self.lib1.library_key)
assert collection_update_event_receiver.call_count == 1
assert event_receiver.call_count == 2
self.assertDictContainsSubset(
{
"signal": LIBRARY_COLLECTION_UPDATED,
"sender": None,
"library_collection": LibraryCollectionData(
collection_key=api.library_collection_locator(
self.lib1.library_key,
collection_key=self.col1.key,
),
background=True,
assert {
"signal": LIBRARY_COLLECTION_UPDATED,
"sender": None,
"library_collection": LibraryCollectionData(
collection_key=api.library_collection_locator(
self.lib1.library_key,
collection_key=self.col1.key,
),
},
collection_update_event_receiver.call_args_list[0].kwargs,
)
self.assertDictContainsSubset(
{
"signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
"sender": None,
"content_object": ContentObjectChangedData(
object_id=str(self.lib1_problem_block["id"]),
changes=["collections"],
),
},
event_receiver.call_args_list[0].kwargs,
)
self.assertDictContainsSubset(
{
"signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
"sender": None,
"content_object": ContentObjectChangedData(
object_id=str(self.lib1_html_block["id"]),
changes=["collections"],
),
},
event_receiver.call_args_list[1].kwargs,
)
),
}.items() <= collection_update_event_receiver.call_args_list[0].kwargs.items()
class ContentLibraryContainersTest(ContentLibrariesRestApiTest, OpenEdxEventsTestMixin):
class ContentLibraryContainersTest(ContentLibrariesRestApiTest):
"""
Tests for Content Library API containers methods.
"""
ENABLED_OPENEDX_EVENTS = [
LIBRARY_CONTAINER_UPDATED.event_type,
]
@classmethod
def setUpClass(cls):
"""
Set up class method for the Test class.
TODO: It's unclear why we need to call start_events_isolation ourselves rather than relying on
OpenEdxEventsTestMixin.setUpClass to handle it. It fails it we don't, and many other test cases do it,
so we're following a pattern here. But that pattern doesn't really make sense.
"""
super().setUpClass()
cls.start_events_isolation()
def setUp(self):
super().setUp()
@@ -944,3 +838,29 @@ class ContentLibraryContainersTest(ContentLibrariesRestApiTest, OpenEdxEventsTes
self._set_library_block_fields(self.html_block_usage_key, {"data": block_olx, "metadata": {}})
self._validate_calls_of_html_block(container_update_event_receiver)
def test_delete_component_and_revert(self):
"""
When a component is deleted and then the delete is reverted, signals
will be emitted to update any containing containers.
"""
# Add components and publish
api.update_container_children(self.unit1.container_key, [
UsageKey.from_string(self.problem_block["id"]),
], user_id=None)
api.publish_changes(self.lib1.library_key)
# Delete component and revert
api.delete_library_block(UsageKey.from_string(self.problem_block["id"]))
container_event_receiver = mock.Mock()
LIBRARY_CONTAINER_UPDATED.connect(container_event_receiver)
api.revert_changes(self.lib1.library_key)
assert container_event_receiver.call_count == 1
assert {
"signal": LIBRARY_CONTAINER_UPDATED,
"sender": None,
"library_container": LibraryContainerData(container_key=self.unit1.container_key),
}.items() <= container_event_receiver.call_args_list[0].kwargs.items()

View File

@@ -2,20 +2,11 @@
Tests for Learning-Core-based Content Libraries
"""
from datetime import datetime, timezone
from unittest import mock
import ddt
from freezegun import freeze_time
from opaque_keys.edx.locator import LibraryContainerLocator, LibraryLocatorV2, LibraryUsageLocatorV2
from openedx_events.content_authoring.data import LibraryContainerData
from openedx_events.content_authoring.signals import (
LIBRARY_BLOCK_UPDATED,
LIBRARY_CONTAINER_CREATED,
LIBRARY_CONTAINER_DELETED,
LIBRARY_CONTAINER_UPDATED,
)
from openedx_events.tests.utils import OpenEdxEventsTestMixin
from opaque_keys.edx.locator import LibraryLocatorV2
from common.djangoapps.student.tests.factories import UserFactory
from openedx.core.djangoapps.content_libraries import api
@@ -25,7 +16,7 @@ from openedx.core.djangolib.testing.utils import skip_unless_cms
@skip_unless_cms
@ddt.ddt
class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
class ContainersTestCase(ContentLibrariesRestApiTest):
"""
Tests for containers (Sections, Subsections, Units) in Content Libraries.
@@ -43,12 +34,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
new fields to an API response, which are backwards compatible, won't
break any tests, but backwards-incompatible API changes will.
"""
ENABLED_OPENEDX_EVENTS = [
LIBRARY_BLOCK_UPDATED.event_type,
LIBRARY_CONTAINER_CREATED.event_type,
LIBRARY_CONTAINER_DELETED.event_type,
LIBRARY_CONTAINER_UPDATED.event_type,
]
def test_unit_crud(self):
"""
@@ -57,15 +42,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
lib = self._create_library(slug="containers", title="Container Test Library", description="Units and more")
lib_key = LibraryLocatorV2.from_string(lib["id"])
create_receiver = mock.Mock()
LIBRARY_CONTAINER_CREATED.connect(create_receiver)
update_receiver = mock.Mock()
LIBRARY_CONTAINER_UPDATED.connect(update_receiver)
delete_receiver = mock.Mock()
LIBRARY_CONTAINER_DELETED.connect(delete_receiver)
# Create a unit:
create_date = datetime(2024, 9, 8, 7, 6, 5, tzinfo=timezone.utc)
with freeze_time(create_date):
@@ -85,20 +61,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
}
self.assertDictContainsEntries(container_data, expected_data)
assert create_receiver.call_count == 1
container_key = LibraryContainerLocator.from_string(
"lct:CL-TEST:containers:unit:u1",
)
self.assertDictContainsSubset(
{
"signal": LIBRARY_CONTAINER_CREATED,
"sender": None,
"library_container": LibraryContainerData(
container_key,
),
},
create_receiver.call_args_list[0].kwargs,
)
# Fetch the unit:
unit_as_read = self._get_container(container_data["id"])
@@ -113,18 +75,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
expected_data['display_name'] = 'Unit ABC'
self.assertDictContainsEntries(container_data, expected_data)
assert update_receiver.call_count == 1
self.assertDictContainsSubset(
{
"signal": LIBRARY_CONTAINER_UPDATED,
"sender": None,
"library_container": LibraryContainerData(
container_key,
),
},
update_receiver.call_args_list[0].kwargs,
)
# Re-fetch the unit
unit_as_re_read = self._get_container(container_data["id"])
# make sure it contains the same data when we read it back:
@@ -133,17 +83,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
# Delete the unit
self._delete_container(container_data["id"])
self._get_container(container_data["id"], expect_response=404)
assert delete_receiver.call_count == 1
self.assertDictContainsSubset(
{
"signal": LIBRARY_CONTAINER_DELETED,
"sender": None,
"library_container": LibraryContainerData(
container_key,
),
},
delete_receiver.call_args_list[0].kwargs,
)
def test_unit_permissions(self):
"""
@@ -186,8 +125,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
"""
Test that we can add and get unit children components
"""
update_receiver = mock.Mock()
LIBRARY_CONTAINER_UPDATED.connect(update_receiver)
lib = self._create_library(slug="containers", title="Container Test Library", description="Units and more")
lib_key = LibraryLocatorV2.from_string(lib["id"])
@@ -212,18 +149,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
container_data["id"],
children_ids=[problem_block_2["id"], html_block_2["id"]]
)
self.assertDictContainsSubset(
{
"signal": LIBRARY_CONTAINER_UPDATED,
"sender": None,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string(
container_data["id"],
),
),
},
update_receiver.call_args_list[0].kwargs,
)
data = self._get_container_components(container_data["id"])
# Verify total number of components to be 2 + 2 = 4
assert len(data) == 4
@@ -236,8 +161,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
"""
Test that we can remove unit children components
"""
update_receiver = mock.Mock()
LIBRARY_CONTAINER_UPDATED.connect(update_receiver)
lib = self._create_library(slug="containers", title="Container Test Library", description="Units and more")
lib_key = LibraryLocatorV2.from_string(lib["id"])
@@ -262,25 +185,11 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
assert len(data) == 2
assert data[0]['id'] == html_block['id']
assert data[1]['id'] == html_block_2['id']
self.assertDictContainsSubset(
{
"signal": LIBRARY_CONTAINER_UPDATED,
"sender": None,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string(
container_data["id"],
),
),
},
update_receiver.call_args_list[0].kwargs,
)
def test_unit_replace_children(self):
"""
Test that we can completely replace/reorder unit children components.
"""
update_receiver = mock.Mock()
LIBRARY_CONTAINER_UPDATED.connect(update_receiver)
lib = self._create_library(slug="containers", title="Container Test Library", description="Units and more")
lib_key = LibraryLocatorV2.from_string(lib["id"])
@@ -324,18 +233,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
assert len(data) == 2
assert data[0]['id'] == new_problem_block['id']
assert data[1]['id'] == new_html_block['id']
self.assertDictContainsSubset(
{
"signal": LIBRARY_CONTAINER_UPDATED,
"sender": None,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string(
container_data["id"],
),
),
},
update_receiver.call_args_list[0].kwargs,
)
def test_restore_unit(self):
"""
@@ -352,9 +249,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
# Delete the unit
self._delete_container(container_data["id"])
create_receiver = mock.Mock()
LIBRARY_CONTAINER_CREATED.connect(create_receiver)
# Restore container
self._restore_container(container_data["id"])
new_container_data = self._get_container(container_data["id"])
@@ -372,20 +266,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
'collections': [],
}
self.assertDictContainsEntries(new_container_data, expected_data)
assert create_receiver.call_count == 1
self.assertDictContainsSubset(
{
"signal": LIBRARY_CONTAINER_CREATED,
"sender": None,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string("lct:CL-TEST:containers:unit:u1"),
),
},
create_receiver.call_args_list[0].kwargs,
)
def test_container_collections(self):
# Create a library
lib = self._create_library(slug="containers", title="Container Test Library", description="Units and more")
@@ -444,12 +324,6 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
c2_before = self._get_container(container2["id"])
assert c2_before["has_unpublished_changes"]
# Set up event receivers after the initial mock data setup is complete:
updated_container_receiver = mock.Mock()
updated_block_receiver = mock.Mock()
LIBRARY_CONTAINER_UPDATED.connect(updated_container_receiver)
LIBRARY_BLOCK_UPDATED.connect(updated_block_receiver)
# Now publish only Container 1
self._publish_container(container1["id"])
@@ -476,27 +350,3 @@ class ContainersTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest):
assert c2_components_after[1]["id"] == html_block2["id"]
assert c2_components_after[1]["has_unpublished_changes"] # unaffected
assert c2_components_after[1]["published_by"] is None
# Make sure that the right events were sent out.
# First, there should be one container updated event:
assert len(updated_container_receiver.call_args_list) == 1
self.assertDictContainsSubset(
{
"signal": LIBRARY_CONTAINER_UPDATED,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string(container1["id"]),
),
},
updated_container_receiver.call_args_list[0].kwargs,
)
# Second, two XBlock updated events:
assert len(updated_block_receiver.call_args_list) == 2
updated_block_ids = set(
call.kwargs["library_block"].usage_key for call in updated_block_receiver.call_args_list
)
assert updated_block_ids == {
LibraryUsageLocatorV2.from_string(problem_block["id"]),
LibraryUsageLocatorV2.from_string(html_block["id"]),
}
assert all(call.kwargs["signal"] == LIBRARY_BLOCK_UPDATED for call in updated_block_receiver.call_args_list)

View File

@@ -3,7 +3,7 @@ Tests for Learning-Core-based Content Libraries
"""
from datetime import datetime, timezone
from unittest import skip
from unittest.mock import Mock, patch
from unittest.mock import patch
import ddt
from django.contrib.auth.models import Group
@@ -11,16 +11,6 @@ from django.test import override_settings
from django.test.client import Client
from freezegun import freeze_time
from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2
from openedx_events.content_authoring.data import ContentLibraryData, LibraryBlockData
from openedx_events.content_authoring.signals import (
CONTENT_LIBRARY_CREATED,
CONTENT_LIBRARY_DELETED,
CONTENT_LIBRARY_UPDATED,
LIBRARY_BLOCK_CREATED,
LIBRARY_BLOCK_DELETED,
LIBRARY_BLOCK_UPDATED
)
from openedx_events.tests.utils import OpenEdxEventsTestMixin
from organizations.models import Organization
from rest_framework.test import APITestCase
@@ -31,7 +21,7 @@ from openedx.core.djangoapps.content_libraries.tests.base import (
URL_BLOCK_METADATA_URL,
URL_BLOCK_RENDER_VIEW,
URL_BLOCK_XBLOCK_HANDLER,
ContentLibrariesRestApiTest
ContentLibrariesRestApiTest,
)
from openedx.core.djangoapps.xblock import api as xblock_api
from openedx.core.djangolib.testing.utils import skip_unless_cms
@@ -39,7 +29,7 @@ from openedx.core.djangolib.testing.utils import skip_unless_cms
@skip_unless_cms
@ddt.ddt
class ContentLibrariesTestCase(ContentLibrariesRestApiTest, OpenEdxEventsTestMixin):
class ContentLibrariesTestCase(ContentLibrariesRestApiTest):
"""
General tests for Learning-Core-based Content Libraries
@@ -62,26 +52,6 @@ class ContentLibrariesTestCase(ContentLibrariesRestApiTest, OpenEdxEventsTestMix
library slug and bundle UUID does not because it's assumed to be immutable
and cached forever.
"""
ENABLED_OPENEDX_EVENTS = [
CONTENT_LIBRARY_CREATED.event_type,
CONTENT_LIBRARY_DELETED.event_type,
CONTENT_LIBRARY_UPDATED.event_type,
LIBRARY_BLOCK_CREATED.event_type,
LIBRARY_BLOCK_DELETED.event_type,
LIBRARY_BLOCK_UPDATED.event_type,
]
@classmethod
def setUpClass(cls):
"""
Set up class method for the Test class.
TODO: It's unclear why we need to call start_events_isolation ourselves rather than relying on
OpenEdxEventsTestMixin.setUpClass to handle it. It fails it we don't, and many other test cases do it,
so we're following a pattern here. But that pattern doesn't really make sense.
"""
super().setUpClass()
cls.start_events_isolation()
def test_library_crud(self):
"""
@@ -792,294 +762,6 @@ class ContentLibrariesTestCase(ContentLibrariesRestApiTest, OpenEdxEventsTestMix
# Second block should throw error
self._add_block_to_library(lib_id, "problem", "problem1", expect_response=400)
def test_content_library_create_event(self):
"""
Check that CONTENT_LIBRARY_CREATED event is sent when a content library is created.
"""
event_receiver = Mock()
CONTENT_LIBRARY_CREATED.connect(event_receiver)
lib = self._create_library(
slug="test_lib_event_create",
title="Event Test Library",
description="Testing event in library"
)
library_key = LibraryLocatorV2.from_string(lib['id'])
event_receiver.assert_called_once()
self.assertDictContainsSubset(
{
"signal": CONTENT_LIBRARY_CREATED,
"sender": None,
"content_library": ContentLibraryData(
library_key=library_key,
update_blocks=False,
),
},
event_receiver.call_args.kwargs
)
def test_content_library_update_event(self):
"""
Check that CONTENT_LIBRARY_UPDATED event is sent when a content library is updated.
"""
event_receiver = Mock()
CONTENT_LIBRARY_UPDATED.connect(event_receiver)
lib = self._create_library(
slug="test_lib_event_update",
title="Event Test Library",
description="Testing event in library"
)
lib2 = self._update_library(lib["id"], title="New Title")
library_key = LibraryLocatorV2.from_string(lib2['id'])
event_receiver.assert_called_once()
self.assertDictContainsSubset(
{
"signal": CONTENT_LIBRARY_UPDATED,
"sender": None,
"content_library": ContentLibraryData(
library_key=library_key,
update_blocks=False,
),
},
event_receiver.call_args.kwargs
)
def test_content_library_delete_event(self):
"""
Check that CONTENT_LIBRARY_DELETED event is sent when a content library is deleted.
"""
event_receiver = Mock()
CONTENT_LIBRARY_DELETED.connect(event_receiver)
lib = self._create_library(
slug="test_lib_event_delete",
title="Event Test Library",
description="Testing event in library"
)
library_key = LibraryLocatorV2.from_string(lib['id'])
self._delete_library(lib["id"])
event_receiver.assert_called_once()
self.assertDictContainsSubset(
{
"signal": CONTENT_LIBRARY_DELETED,
"sender": None,
"content_library": ContentLibraryData(
library_key=library_key,
update_blocks=False,
),
},
event_receiver.call_args.kwargs
)
def test_library_block_create_event(self):
"""
Check that LIBRARY_BLOCK_CREATED event is sent when a library block is created.
"""
event_receiver = Mock()
LIBRARY_BLOCK_CREATED.connect(event_receiver)
lib = self._create_library(
slug="test_lib_block_event_create",
title="Event Test Library",
description="Testing event in library"
)
lib_id = lib["id"]
self._add_block_to_library(lib_id, "problem", "problem1")
library_key = LibraryLocatorV2.from_string(lib_id)
usage_key = LibraryUsageLocatorV2(
lib_key=library_key,
block_type="problem",
usage_id="problem1"
)
event_receiver.assert_called_once()
self.assertDictContainsSubset(
{
"signal": LIBRARY_BLOCK_CREATED,
"sender": None,
"library_block": LibraryBlockData(
library_key=library_key,
usage_key=usage_key
),
},
event_receiver.call_args.kwargs
)
def test_library_block_olx_update_event(self):
"""
Check that LIBRARY_BLOCK_CREATED event is sent when the OLX source is updated.
"""
event_receiver = Mock()
LIBRARY_BLOCK_UPDATED.connect(event_receiver)
lib = self._create_library(
slug="test_lib_block_event_olx_update",
title="Event Test Library",
description="Testing event in library"
)
lib_id = lib["id"]
library_key = LibraryLocatorV2.from_string(lib_id)
block = self._add_block_to_library(lib_id, "problem", "problem1")
block_id = block["id"]
usage_key = LibraryUsageLocatorV2(
lib_key=library_key,
block_type="problem",
usage_id="problem1"
)
new_olx = """
<problem display_name="New Multi Choice Question" max_attempts="5">
<multiplechoiceresponse>
<p>This is a normal capa problem with unicode 🔥. It has "maximum attempts" set to **5**.</p>
<label>Learning Core is designed to store.</label>
<choicegroup type="MultipleChoice">
<choice correct="false">XBlock metadata only</choice>
<choice correct="true">XBlock data/metadata and associated static asset files</choice>
<choice correct="false">Static asset files for XBlocks and courseware</choice>
<choice correct="false">XModule metadata only</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""".strip()
self._set_library_block_olx(block_id, new_olx)
event_receiver.assert_called_once()
self.assertDictContainsSubset(
{
"signal": LIBRARY_BLOCK_UPDATED,
"sender": None,
"library_block": LibraryBlockData(
library_key=library_key,
usage_key=usage_key
),
},
event_receiver.call_args.kwargs
)
def test_library_block_add_asset_update_event(self):
"""
Check that LIBRARY_BLOCK_CREATED event is sent when a static asset is
uploaded associated with the XBlock.
"""
event_receiver = Mock()
LIBRARY_BLOCK_UPDATED.connect(event_receiver)
lib = self._create_library(
slug="test_lib_block_event_add_asset_update",
title="Event Test Library",
description="Testing event in library"
)
lib_id = lib["id"]
library_key = LibraryLocatorV2.from_string(lib_id)
block = self._add_block_to_library(lib_id, "html", "h1")
block_id = block["id"]
self._set_library_block_asset(block_id, "static/test.txt", b"data")
usage_key = LibraryUsageLocatorV2(
lib_key=library_key,
block_type="html",
usage_id="h1"
)
event_receiver.assert_called_once()
self.assertDictContainsSubset(
{
"signal": LIBRARY_BLOCK_UPDATED,
"sender": None,
"library_block": LibraryBlockData(
library_key=library_key,
usage_key=usage_key
),
},
event_receiver.call_args.kwargs
)
def test_library_block_del_asset_update_event(self):
"""
Check that LIBRARY_BLOCK_CREATED event is sent when a static asset is
removed from XBlock.
"""
event_receiver = Mock()
LIBRARY_BLOCK_UPDATED.connect(event_receiver)
lib = self._create_library(
slug="test_lib_block_event_del_asset_update",
title="Event Test Library",
description="Testing event in library"
)
lib_id = lib["id"]
library_key = LibraryLocatorV2.from_string(lib_id)
block = self._add_block_to_library(lib_id, "html", "h321")
block_id = block["id"]
self._set_library_block_asset(block_id, "static/test.txt", b"data")
self._delete_library_block_asset(block_id, 'static/text.txt')
usage_key = LibraryUsageLocatorV2(
lib_key=library_key,
block_type="html",
usage_id="h321"
)
event_receiver.assert_called()
self.assertDictContainsSubset(
{
"signal": LIBRARY_BLOCK_UPDATED,
"sender": None,
"library_block": LibraryBlockData(
library_key=library_key,
usage_key=usage_key
),
},
event_receiver.call_args.kwargs
)
def test_library_block_delete_event(self):
"""
Check that LIBRARY_BLOCK_DELETED event is sent when a content library is deleted.
"""
event_receiver = Mock()
LIBRARY_BLOCK_DELETED.connect(event_receiver)
lib = self._create_library(
slug="test_lib_block_event_delete",
title="Event Test Library",
description="Testing event in library"
)
lib_id = lib["id"]
library_key = LibraryLocatorV2.from_string(lib_id)
block = self._add_block_to_library(lib_id, "problem", "problem1")
block_id = block['id']
usage_key = LibraryUsageLocatorV2(
lib_key=library_key,
block_type="problem",
usage_id="problem1"
)
self._delete_library_block(block_id)
event_receiver.assert_called()
self.assertDictContainsSubset(
{
"signal": LIBRARY_BLOCK_DELETED,
"sender": None,
"library_block": LibraryBlockData(
library_key=library_key,
usage_key=usage_key
),
},
event_receiver.call_args.kwargs
)
def test_library_paste_xblock(self):
"""
Check the a new block is created in the library after pasting from clipboard.

View File

@@ -3,8 +3,6 @@ Tests for Imports from Courses to Learning-Core-based Content Libraries
"""
import ddt
from opaque_keys.edx.locator import LibraryContainerLocator
from openedx_events.content_authoring import signals
from openedx_events.tests.utils import OpenEdxEventsTestMixin
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import ToyCourseFactory
@@ -15,15 +13,10 @@ from openedx.core.djangolib.testing.utils import skip_unless_cms
@skip_unless_cms
@ddt.ddt
class CourseToLibraryTestCase(OpenEdxEventsTestMixin, ContentLibrariesRestApiTest, ModuleStoreTestCase):
class CourseToLibraryTestCase(ContentLibrariesRestApiTest, ModuleStoreTestCase):
"""
Tests that involve copying content from courses to libraries.
"""
ENABLED_OPENEDX_EVENTS = [
signals.LIBRARY_CONTAINER_CREATED.event_type,
signals.LIBRARY_CONTAINER_DELETED.event_type,
signals.LIBRARY_CONTAINER_UPDATED.event_type,
]
def test_library_paste_unit_from_course(self):
"""

View File

@@ -8,7 +8,6 @@ import re
import ddt
from django.core.exceptions import ValidationError
from django.test.utils import override_settings
from openedx_events.tests.utils import OpenEdxEventsTestMixin
import pytest
from xblock.core import XBlock
@@ -22,7 +21,7 @@ from .fields_test_block import FieldsTestBlock
@skip_unless_cms
@ddt.ddt
@override_settings(CORS_ORIGIN_WHITELIST=[]) # For some reason, this setting isn't defined in our test environment?
class LibrariesEmbedViewTestCase(ContentLibrariesRestApiTest, OpenEdxEventsTestMixin):
class LibrariesEmbedViewTestCase(ContentLibrariesRestApiTest):
"""
Tests for embed_view and interacting with draft/published/past versions of
Learning-Core-based XBlocks (in Content Libraries).

View File

@@ -0,0 +1,548 @@
"""
Tests for Learning-Core-based Content Libraries
"""
from opaque_keys.edx.locator import (
LibraryCollectionLocator,
LibraryContainerLocator,
LibraryLocatorV2,
LibraryUsageLocatorV2,
)
from openedx_events.content_authoring.signals import (
ContentLibraryData,
LibraryBlockData,
LibraryCollectionData,
LibraryContainerData,
CONTENT_LIBRARY_CREATED,
CONTENT_LIBRARY_DELETED,
CONTENT_LIBRARY_UPDATED,
LIBRARY_BLOCK_CREATED,
LIBRARY_BLOCK_DELETED,
LIBRARY_BLOCK_UPDATED,
LIBRARY_BLOCK_PUBLISHED,
LIBRARY_COLLECTION_CREATED,
LIBRARY_COLLECTION_DELETED,
LIBRARY_COLLECTION_UPDATED,
LIBRARY_CONTAINER_CREATED,
LIBRARY_CONTAINER_DELETED,
LIBRARY_CONTAINER_UPDATED,
LIBRARY_CONTAINER_PUBLISHED,
)
from openedx.core.djangoapps.content_libraries.tests.base import ContentLibrariesRestApiTest
from openedx.core.djangolib.testing.utils import skip_unless_cms
@skip_unless_cms
class ContentLibrariesEventsTestCase(ContentLibrariesRestApiTest):
"""
Event tests for Learning-Core-based Content Libraries
These tests use the REST API, which in turn relies on the Python API.
"""
# Note: we assume all events are already enabled, as they should be. We do
# NOT use OpenEdxEventsTestMixin, because it disables any events that you
# don't explicitly enable and does so in a way that interferes with other
# test cases, causing flakiness and failures in *other* test modules.
ALL_EVENTS = [
CONTENT_LIBRARY_CREATED,
CONTENT_LIBRARY_DELETED,
CONTENT_LIBRARY_UPDATED,
LIBRARY_BLOCK_CREATED,
LIBRARY_BLOCK_DELETED,
LIBRARY_BLOCK_UPDATED,
LIBRARY_BLOCK_PUBLISHED,
LIBRARY_COLLECTION_CREATED,
LIBRARY_COLLECTION_DELETED,
LIBRARY_COLLECTION_UPDATED,
LIBRARY_CONTAINER_CREATED,
LIBRARY_CONTAINER_DELETED,
LIBRARY_CONTAINER_UPDATED,
LIBRARY_CONTAINER_PUBLISHED,
]
def setUp(self) -> None:
super().setUp()
# Create some useful data:
self.lib1 = self._create_library(
slug="test_lib_1",
title="Library 1",
description="First Library for testing",
)
self.lib1_key = LibraryLocatorV2.from_string(self.lib1['id'])
# From now on, every time an event is emitted, add it to this set:
self.new_events: list[dict] = []
def event_receiver(**kwargs) -> None:
self.new_events.append(kwargs)
for e in self.ALL_EVENTS:
e.connect(event_receiver)
def disconnect_all() -> None:
for e in self.ALL_EVENTS:
e.disconnect(event_receiver)
self.addCleanup(disconnect_all)
def clear_events(self) -> None:
""" Clear the log of events that we've seen so far. """
self.new_events.clear()
def expect_new_events(self, *expected_events: dict) -> None:
"""
assert the the specified events have been emitted since the last call to
this function.
"""
# We assume the events may not be in order. Assuming a specific order can lead to flaky tests.
for expected in expected_events:
found = False
for i, actual in enumerate(self.new_events):
if expected.items() <= actual.items():
self.new_events.pop(i)
found = True
break
if not found:
raise AssertionError(f"Event {expected} not found among actual events: {self.new_events}")
if len(self.new_events) > 0:
raise AssertionError(f"Events were emitted but not expected: {self.new_events}")
self.clear_events()
############################## Libraries ##################################
def test_content_library_crud_events(self) -> None:
"""
Check that CONTENT_LIBRARY_CREATED event is sent when a content library is created, updated, and deleted
"""
# Setup: none
# Action - create a library
new_lib = self._create_library(
slug="new_lib",
title="New Testing Library",
description="New Library for testing",
)
lib_key = LibraryLocatorV2.from_string(new_lib['id'])
# Expect a CREATED event:
self.expect_new_events({
"signal": CONTENT_LIBRARY_CREATED,
"content_library": ContentLibraryData(library_key=lib_key),
})
# Action - change the library name:
self._update_library(lib_key=str(lib_key), title="New title")
# Expect an UPDATED event:
self.expect_new_events({
"signal": CONTENT_LIBRARY_UPDATED,
"content_library": ContentLibraryData(library_key=lib_key),
})
# Action - delete the library:
self._delete_library(str(lib_key))
# Expect a DELETED event:
self.expect_new_events({
"signal": CONTENT_LIBRARY_DELETED,
"content_library": ContentLibraryData(library_key=lib_key),
})
# Should deleting a library send out _DELETED events for all the items in the library too?
############################## Components (XBlocks) ##################################
def test_library_block_create_event(self) -> None:
"""
Check that LIBRARY_BLOCK_CREATED event is sent when a library block is created.
"""
add_result = self._add_block_to_library(self.lib1_key, "problem", "problem1")
usage_key = LibraryUsageLocatorV2.from_string(add_result["id"])
self.expect_new_events({
"signal": LIBRARY_BLOCK_CREATED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
def test_library_block_update_and_publish_events(self) -> None:
"""
Check that appropriate events are emitted when an existing block is updated.
"""
# This block should be ignored:
self._add_block_to_library(self.lib1_key, "problem", "problem1")
# This block will be used in the tests:
add_result = self._add_block_to_library(self.lib1_key, "problem", "problem2")
usage_key = LibraryUsageLocatorV2.from_string(add_result["id"])
# Clear events from creating the blocks:
self.clear_events()
# Now update the block's OLX:
new_olx = """
<problem display_name="New Multi Choice Question" max_attempts="5">
<multiplechoiceresponse>...</multiplechoiceresponse>
</problem>
""".strip()
self._set_library_block_olx(usage_key, new_olx)
self.expect_new_events({
"signal": LIBRARY_BLOCK_UPDATED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
# Now add a static asset file to the block:
self._set_library_block_asset(usage_key, "static/test.txt", b"data")
self.expect_new_events({
"signal": LIBRARY_BLOCK_UPDATED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
# Then delete the static asset:
self._delete_library_block_asset(usage_key, 'static/text.txt')
self.expect_new_events({
"signal": LIBRARY_BLOCK_UPDATED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
# Then publish the block:
self._publish_library_block(usage_key)
self.expect_new_events({
"signal": LIBRARY_BLOCK_PUBLISHED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
def test_revert_delete(self) -> None:
"""
Test that when a block is deleted and then the delete is reverted, a
_CREATED event is sent.
"""
# This block should be ignored:
self._add_block_to_library(self.lib1_key, "problem", "problem1")
# This block will be used in the tests:
add_result = self._add_block_to_library(self.lib1_key, "problem", "problem2")
usage_key = LibraryUsageLocatorV2.from_string(add_result["id"])
# Publish changes
self._commit_library_changes(self.lib1_key)
# Clear events from creating the blocks:
self.clear_events()
# Delete the block:
self._delete_library_block(usage_key)
# That should emit a _DELETED event:
self.expect_new_events({
"signal": LIBRARY_BLOCK_DELETED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
# Revert the change:
self._revert_library_changes(self.lib1_key)
# That should result in a _CREATED event:
self.expect_new_events({
"signal": LIBRARY_BLOCK_CREATED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
def test_revert_create(self) -> None:
"""
Test that when a block is created and then the changes are reverted, a
_DELETED event is sent.
"""
# Publish any changes from setUp()
self._commit_library_changes(self.lib1_key)
# Clear events:
self.clear_events()
# Create the block:
add_result = self._add_block_to_library(self.lib1_key, "problem", "problem2")
usage_key = LibraryUsageLocatorV2.from_string(add_result["id"])
# That should result in a _CREATED event:
self.expect_new_events({
"signal": LIBRARY_BLOCK_CREATED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
# Revert the change:
self._revert_library_changes(self.lib1_key)
# That should result in a _DELETED event:
self.expect_new_events({
"signal": LIBRARY_BLOCK_DELETED,
"library_block": LibraryBlockData(self.lib1_key, usage_key),
})
############################## Containers ##################################
def test_unit_crud(self) -> None:
"""
Test Create, Read, Update, and Delete of a Unit
"""
# Create a unit:
container_data = self._create_container(self.lib1_key, "unit", slug="u1", display_name="Test Unit")
container_key = LibraryContainerLocator.from_string(container_data["id"])
self.expect_new_events({
"signal": LIBRARY_CONTAINER_CREATED,
"library_container": LibraryContainerData(container_key),
})
# Update the unit:
self._update_container(container_key, display_name="Unit ABC")
self.expect_new_events({
"signal": LIBRARY_CONTAINER_UPDATED,
"library_container": LibraryContainerData(container_key),
})
# Delete the unit
self._delete_container(container_key)
self._get_container(container_key, expect_response=404)
self.expect_new_events({
"signal": LIBRARY_CONTAINER_DELETED,
"library_container": LibraryContainerData(container_key),
})
def test_publish_all_lib_changes(self) -> None:
"""
Test the events that get emitted when we publish all changes in the library
"""
# Create two containers and add some components
# -> container 1: problem_block, html_block
# -> container 2: html_block, html_block2
container1 = self._create_container(self.lib1_key, "unit", display_name="Alpha Unit", slug=None)
container2 = self._create_container(self.lib1_key, "unit", display_name="Bravo Unit", slug=None)
problem_block = self._add_block_to_library(self.lib1_key, "problem", "Problem1", can_stand_alone=False)
html_block = self._add_block_to_library(self.lib1_key, "html", "Html1", can_stand_alone=False)
html_block2 = self._add_block_to_library(self.lib1_key, "html", "Html2", can_stand_alone=False)
self._add_container_components(container1["id"], children_ids=[problem_block["id"], html_block["id"]])
self._add_container_components(container2["id"], children_ids=[html_block["id"], html_block2["id"]])
# Now publish only Container 2 (which will auto-publish both HTML blocks since they're children)
self._publish_container(container2["id"])
# Container 2 is published, container 1 and its contents is unpublished:
assert self._get_container(container2["id"])["has_unpublished_changes"] is False
assert self._get_container(container1["id"])["has_unpublished_changes"]
assert self._get_library_block(problem_block["id"])["has_unpublished_changes"]
assert self._get_library_block(html_block["id"])["has_unpublished_changes"] is False # in containers 1+2
# clear event log up to this point
self.clear_events()
# Now publish ALL remaining changes in the library:
self._commit_library_changes(self.lib1_key)
# Container 1 is now published:
assert self._get_container(container1["id"])["has_unpublished_changes"] is False
# And publish events were emitted:
self.expect_new_events(
{ # An event for container 1 being published:
"signal": LIBRARY_CONTAINER_PUBLISHED,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string(container1["id"]),
),
},
{ # An event for the problem block in container 1:
"signal": LIBRARY_BLOCK_PUBLISHED,
"library_block": LibraryBlockData(
self.lib1_key, LibraryUsageLocatorV2.from_string(problem_block["id"]),
),
},
# The HTML block in container 1 is not part of this publish event group, because it was
# already published when we published container 2
)
def test_publish_child_block(self) -> None:
"""
Test the events that get emitted when we publish changes to a child of a container
"""
# Create a container and a block
container1 = self._create_container(self.lib1_key, "unit", display_name="Alpha Unit", slug=None)
problem_block = self._add_block_to_library(self.lib1_key, "problem", "Problem1", can_stand_alone=False)
self._add_container_components(container1["id"], children_ids=[problem_block["id"]])
# Publish all changes
self._commit_library_changes(self.lib1_key)
assert self._get_container(container1["id"])["has_unpublished_changes"] is False
# Change only the block, not the container:
self._set_library_block_olx(problem_block["id"], "<problem>UPDATED</problem>")
# Since we modified the block, the container now contains changes (technically it is unchanged and its
# version is the same, but it *contains* unpublished changes)
assert self._get_library_block(problem_block["id"])["has_unpublished_changes"]
assert self._get_container(container1["id"])["has_unpublished_changes"]
# clear event log up to this point
self.clear_events()
# Now publish ALL remaining changes in the library - should only affect the problem block
self._commit_library_changes(self.lib1_key)
# The container no longer contains unpublished changes:
assert self._get_container(container1["id"])["has_unpublished_changes"] is False
# And publish events were emitted:
self.expect_new_events(
{ # An event for container 1 being affected indirectly by the child being published:
# TODO: should this be a CONTAINER_CHILD_PUBLISHED event?
"signal": LIBRARY_CONTAINER_PUBLISHED,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string(container1["id"]),
),
},
{ # An event for the problem block:
"signal": LIBRARY_BLOCK_PUBLISHED,
"library_block": LibraryBlockData(
self.lib1_key, LibraryUsageLocatorV2.from_string(problem_block["id"]),
),
},
)
def test_publish_container(self) -> None:
"""
Test the events that get emitted when we publish the changes to a specific container
"""
# Create two containers and add some components
container1 = self._create_container(self.lib1_key, "unit", display_name="Alpha Unit", slug=None)
container2 = self._create_container(self.lib1_key, "unit", display_name="Bravo Unit", slug=None)
problem_block = self._add_block_to_library(self.lib1_key, "problem", "Problem1", can_stand_alone=False)
html_block = self._add_block_to_library(self.lib1_key, "html", "Html1", can_stand_alone=False)
html_block2 = self._add_block_to_library(self.lib1_key, "html", "Html2", can_stand_alone=False)
self._add_container_components(container1["id"], children_ids=[problem_block["id"], html_block["id"]])
self._add_container_components(container2["id"], children_ids=[html_block["id"], html_block2["id"]])
# At first everything is unpublished:
c1_before = self._get_container(container1["id"])
assert c1_before["has_unpublished_changes"]
c2_before = self._get_container(container2["id"])
assert c2_before["has_unpublished_changes"]
# clear event log after the initial mock data setup is complete:
self.clear_events()
# Now publish only Container 1
self._publish_container(container1["id"])
# Now it is published:
c1_after = self._get_container(container1["id"])
assert c1_after["has_unpublished_changes"] is False
# And publish events were emitted:
self.expect_new_events(
{ # An event for container 1 being published:
"signal": LIBRARY_CONTAINER_PUBLISHED,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string(container1["id"]),
),
},
{ # An event for the problem block in container 1:
"signal": LIBRARY_BLOCK_PUBLISHED,
"library_block": LibraryBlockData(
self.lib1_key, LibraryUsageLocatorV2.from_string(problem_block["id"]),
),
},
{ # An event for the html block in container 1 (and container 2):
"signal": LIBRARY_BLOCK_PUBLISHED,
"library_block": LibraryBlockData(
self.lib1_key, LibraryUsageLocatorV2.from_string(html_block["id"]),
),
},
{ # Not 100% sure we want this, but a PUBLISHED event is emitted for container 2
# because one of its children's published versions has changed, so whether or
# not it contains unpublished changes may have changed and the search index
# may need to be updated. It is not actually published though.
# TODO: should this be a CONTAINER_CHILD_PUBLISHED event?
"signal": LIBRARY_CONTAINER_PUBLISHED,
"library_container": LibraryContainerData(
container_key=LibraryContainerLocator.from_string(container2["id"]),
),
},
)
# note that container 2 is still unpublished
c2_after = self._get_container(container2["id"])
assert c2_after["has_unpublished_changes"]
def test_restore_unit(self) -> None:
"""
Test restoring a deleted unit via the "restore" API.
"""
# Create a unit:
container_data = self._create_container(self.lib1_key, "unit", slug="u1", display_name="Test Unit")
container_key = LibraryContainerLocator.from_string(container_data["id"])
self.expect_new_events({
"signal": LIBRARY_CONTAINER_CREATED,
"library_container": LibraryContainerData(container_key),
})
# Delete the unit
self._delete_container(container_data["id"])
self.expect_new_events({
"signal": LIBRARY_CONTAINER_DELETED,
"library_container": LibraryContainerData(container_key),
})
# Restore the unit
self._restore_container(container_data["id"])
self.expect_new_events({
"signal": LIBRARY_CONTAINER_CREATED,
"library_container": LibraryContainerData(container_key),
})
def test_restore_unit_via_revert(self) -> None:
"""
Test restoring a deleted unit by reverting changes.
"""
# Publish the existing setup and clear events
self._commit_library_changes(self.lib1_key)
self.clear_events()
# Create a unit:
container_data = self._create_container(self.lib1_key, "unit", slug="u1", display_name="Test Unit")
container_key = LibraryContainerLocator.from_string(container_data["id"])
self.expect_new_events({
"signal": LIBRARY_CONTAINER_CREATED,
"library_container": LibraryContainerData(container_key),
})
# Publish changes
self._publish_container(container_key)
self.expect_new_events({
"signal": LIBRARY_CONTAINER_PUBLISHED,
"library_container": LibraryContainerData(container_key),
})
# Delete the unit
self._delete_container(container_data["id"])
self.expect_new_events({
"signal": LIBRARY_CONTAINER_DELETED,
"library_container": LibraryContainerData(container_key),
})
# Revert changes, which will re-create the unit:
self._revert_library_changes(self.lib1_key)
self.expect_new_events({
"signal": LIBRARY_CONTAINER_CREATED,
"library_container": LibraryContainerData(container_key),
})
############################## Collections ##################################
def test_collection_crud(self) -> None:
""" Test basic create, update, and delete events for collections """
collection = self._create_collection(self.lib1_key, "Test Collection")
# To fix? The response from _create_collection should have the opaque key as the "id" field, not an integer.
collection_key = LibraryCollectionLocator(lib_key=self.lib1_key, collection_id=collection["key"])
self.expect_new_events({
"signal": LIBRARY_COLLECTION_CREATED,
"library_collection": LibraryCollectionData(collection_key),
})
# Update the collection:
self._update_collection(collection_key, description="Updated description")
self.expect_new_events({
"signal": LIBRARY_COLLECTION_UPDATED,
"library_collection": LibraryCollectionData(collection_key),
})
# Soft delete the collection. NOTE: at the moment, it's only possible to "soft delete" collections via
# the REST API, which sends an UPDATED event because the collection is now "disabled" but not deleted.
self._soft_delete_collection(collection_key)
self.expect_new_events({
"signal": LIBRARY_COLLECTION_UPDATED, # UPDATED not DELETED. If we do a hard delete, it should be DELETED.
"library_collection": LibraryCollectionData(collection_key),
})
# TODO: move more of the event-related collection tests from test_api.py to here, and convert them to use REST APIs

View File

@@ -2,7 +2,6 @@
Tests that several XBlock APIs support versioning
"""
from django.test.utils import override_settings
from openedx_events.tests.utils import OpenEdxEventsTestMixin
from xblock.core import XBlock
from openedx.core.djangoapps.content_libraries.tests.base import (
@@ -14,7 +13,7 @@ from .fields_test_block import FieldsTestBlock
@skip_unless_cms
@override_settings(CORS_ORIGIN_WHITELIST=[]) # For some reason, this setting isn't defined in our test environment?
class VersionedXBlockApisTestCase(ContentLibrariesRestApiTest, OpenEdxEventsTestMixin):
class VersionedXBlockApisTestCase(ContentLibrariesRestApiTest):
"""
Tests for three APIs implemented by djangoapps.xblock, and used by content
libraries. These tests focus on versioning.

View File

@@ -44,6 +44,12 @@ class TestCohortSignals(TestCase, OpenEdxEventsTestMixin):
super().setUpClass()
cls.start_events_isolation()
@classmethod
def tearDownClass(cls):
""" Don't let our event isolation affect other test cases """
super().tearDownClass()
cls.enable_all_events() # Re-enable events other than the ENABLED_OPENEDX_EVENTS subset we isolated.
def setUp(self):
super().setUp()
self.course_key = CourseLocator("dummy", "dummy", "dummy")

View File

@@ -46,6 +46,12 @@ class CohortEventTest(SharedModuleStoreTestCase, OpenEdxEventsTestMixin):
super().setUpClass()
cls.start_events_isolation()
@classmethod
def tearDownClass(cls):
""" Don't let our event isolation affect other test cases """
super().tearDownClass()
cls.enable_all_events() # Re-enable events other than the ENABLED_OPENEDX_EVENTS subset we isolated.
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
self.course = CourseOverviewFactory()

View File

@@ -801,7 +801,7 @@ openedx-django-require==2.1.0
# via -r requirements/edx/kernel.in
openedx-django-wiki==2.1.0
# via -r requirements/edx/kernel.in
openedx-events==10.1.0
openedx-events==10.2.0
# via
# -r requirements/edx/kernel.in
# edx-enterprise

View File

@@ -1359,7 +1359,7 @@ openedx-django-wiki==2.1.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
openedx-events==10.1.0
openedx-events==10.2.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt

View File

@@ -972,7 +972,7 @@ openedx-django-require==2.1.0
# via -r requirements/edx/base.txt
openedx-django-wiki==2.1.0
# via -r requirements/edx/base.txt
openedx-events==10.1.0
openedx-events==10.2.0
# via
# -r requirements/edx/base.txt
# edx-enterprise

View File

@@ -1031,7 +1031,7 @@ openedx-django-require==2.1.0
# via -r requirements/edx/base.txt
openedx-django-wiki==2.1.0
# via -r requirements/edx/base.txt
openedx-events==10.1.0
openedx-events==10.2.0
# via
# -r requirements/edx/base.txt
# edx-enterprise