Merge branch 'master' of github.com:openedx/edx-platform into hamzawaleed01/upgrade-edx-enterprise-7d22784

This commit is contained in:
hamzawaleed01
2024-09-16 16:03:18 +05:00
35 changed files with 727 additions and 85 deletions

View File

@@ -6,7 +6,12 @@
<base target="_blank">
<meta charset="UTF-8">
<!-- gettext & XBlock JS i18n code -->
<script type="text/javascript" src="{{ lms_root_url }}/static/js/i18n/en/djangojs.js"></script>
{% if is_development %}
<!-- in development, the djangojs file isn't available so use fallback-->
<script type="text/javascript" src="{{ lms_root_url }}/static/js/src/gettext_fallback.js"></script>
{% else %}
<script type="text/javascript" src="{{ lms_root_url }}/static/js/i18n/en/djangojs.js"></script>
{% endif %}
<!-- Most XBlocks require jQuery: -->
<script src="https://code.jquery.com/jquery-2.2.4.min.js"></script>
<!-- The Video XBlock requires "ajaxWithPrefix" -->

View File

@@ -126,9 +126,13 @@ class ChooseModeView(View):
if ecommerce_service.is_enabled(request.user):
professional_mode = modes.get(CourseMode.NO_ID_PROFESSIONAL_MODE) or modes.get(CourseMode.PROFESSIONAL)
if purchase_workflow == "single" and professional_mode.sku:
redirect_url = ecommerce_service.get_checkout_page_url(professional_mode.sku)
redirect_url = ecommerce_service.get_checkout_page_url(
professional_mode.sku, course_run_keys=[course_id]
)
if purchase_workflow == "bulk" and professional_mode.bulk_sku:
redirect_url = ecommerce_service.get_checkout_page_url(professional_mode.bulk_sku)
redirect_url = ecommerce_service.get_checkout_page_url(
professional_mode.bulk_sku, course_run_keys=[course_id]
)
return redirect(redirect_url)
course = modulestore().get_course(course_key)

View File

@@ -244,10 +244,6 @@ Content Authoring Events
- org.openedx.content_authoring.library_block.deleted.v1
- 2023-07-20
* - `CONTENT_OBJECT_TAGS_CHANGED <https://github.com/openedx/openedx-events/blob/c0eb4ba1a3d7d066d58e5c87920b8ccb0645f769/openedx_events/content_authoring/signals.py#L207>`_
- org.openedx.content_authoring.content.object.tags.changed.v1
- 2024-03-31
* - `LIBRARY_COLLECTION_CREATED <https://github.com/openedx/openedx-events/blob/main/openedx_events/content_authoring/signals.py#L219>`_
- org.openedx.content_authoring.content_library.collection.created.v1
- 2024-08-23
@@ -259,3 +255,7 @@ Content Authoring Events
* - `LIBRARY_COLLECTION_DELETED <https://github.com/openedx/openedx-events/blob/main/openedx_events/content_authoring/signals.py#L241>`_
- org.openedx.content_authoring.content_library.collection.deleted.v1
- 2024-08-23
* - `CONTENT_OBJECT_ASSOCIATIONS_CHANGED <https://github.com/openedx/openedx-events/blob/eb17e03f075b272ad8a29e8435d6a514f8884131/openedx_events/content_authoring/signals.py#L205-L214>`_
- org.openedx.content_authoring.content.object.associations.changed.v1
- 2024-09-06

View File

@@ -55,5 +55,6 @@ def ace_email_sent_handler(sender, **kwargs):
'channel': channel,
'course_id': course_id,
'user_id': user_id,
'user_email': message.recipient.email_address,
}
)

View File

@@ -533,7 +533,7 @@ def _send_course_email(entry_id, email_id, to_list, global_email_context, subtas
email_context['email'] = email
email_context['name'] = profile_name
email_context['user_id'] = user_id
email_context['course_id'] = course_email.course_id
email_context['course_id'] = str(course_email.course_id)
email_context['unsubscribe_link'] = get_unsubscribed_link(current_recipient['username'],
str(course_email.course_id))

View File

@@ -64,6 +64,7 @@ def opt_out_email_updates(request, token, course_id):
event_name = 'edx.bulk_email.opt_out'
event_data = {
"username": user.username,
"user_email": user.email,
"user_id": user.id,
"course_id": course_id,
}

View File

@@ -27,6 +27,7 @@ from .waffle import ( # lint-amnesty, pylint: disable=invalid-django-waffle-imp
should_redirect_to_commerce_coordinator_checkout,
should_redirect_to_commerce_coordinator_refunds,
)
from edx_django_utils.plugins import pluggable_override
log = logging.getLogger(__name__)
@@ -56,6 +57,7 @@ class EcommerceService:
""" Retrieve Ecommerce service public url root. """
return configuration_helpers.get_value('ECOMMERCE_PUBLIC_URL_ROOT', settings.ECOMMERCE_PUBLIC_URL_ROOT)
@pluggable_override('OVERRIDE_GET_ABSOLUTE_ECOMMERCE_URL')
def get_absolute_ecommerce_url(self, ecommerce_page_url):
""" Return the absolute URL to the ecommerce page.
@@ -110,7 +112,6 @@ class EcommerceService:
def get_add_to_basket_url(self):
""" Return the URL for the payment page based on the waffle switch.
Example:
http://localhost/enabled_service_api_path
"""
@@ -118,12 +119,14 @@ class EcommerceService:
return urljoin(settings.COMMERCE_COORDINATOR_URL_ROOT, settings.COORDINATOR_CHECKOUT_REDIRECT_PATH)
return self.payment_page_url()
@pluggable_override('OVERRIDE_GET_CHECKOUT_PAGE_URL')
def get_checkout_page_url(self, *skus, **kwargs):
""" Construct the URL to the ecommerce checkout page and include products.
Args:
skus (list): List of SKUs associated with products to be added to basket
program_uuid (string): The UUID of the program, if applicable
course_run_keys (list): The course run keys of the products to be added to basket.
Returns:
Absolute path to the ecommerce checkout page showing basket that contains specified products.
@@ -153,10 +156,12 @@ class EcommerceService:
"""
Returns the URL for the user to upgrade, or None if not applicable.
"""
course_run_key = str(course_key)
verified_mode = CourseMode.verified_mode_for_course(course_key)
if verified_mode:
if self.is_enabled(user):
return self.get_checkout_page_url(verified_mode.sku)
return self.get_checkout_page_url(verified_mode.sku, course_run_keys=[course_run_key])
else:
return reverse('dashboard')
return None

View File

@@ -825,9 +825,13 @@ def course_about(request, course_id): # pylint: disable=too-many-statements
single_paid_mode = modes.get(CourseMode.PROFESSIONAL)
if single_paid_mode and single_paid_mode.sku:
ecommerce_checkout_link = ecomm_service.get_checkout_page_url(single_paid_mode.sku)
ecommerce_checkout_link = ecomm_service.get_checkout_page_url(
single_paid_mode.sku, course_run_keys=[course_id]
)
if single_paid_mode and single_paid_mode.bulk_sku:
ecommerce_bulk_checkout_link = ecomm_service.get_checkout_page_url(single_paid_mode.bulk_sku)
ecommerce_bulk_checkout_link = ecomm_service.get_checkout_page_url(
single_paid_mode.bulk_sku, course_run_keys=[course_id]
)
registration_price, course_price = get_course_prices(course) # lint-amnesty, pylint: disable=unused-variable

View File

@@ -503,7 +503,8 @@ class PayAndVerifyView(View):
if ecommerce_service.is_enabled(user):
url = ecommerce_service.get_checkout_page_url(
sku,
catalog=self.request.GET.get('catalog')
catalog=self.request.GET.get('catalog'),
course_run_keys=[str(course_key)]
)
# Redirect if necessary, otherwise implicitly return None

View File

@@ -0,0 +1 @@
../../../cms/templates/content_libraries/xblock_iframe.html

View File

@@ -34,6 +34,7 @@ from .documents import (
searchable_doc_for_course_block,
searchable_doc_for_collection,
searchable_doc_for_library_block,
searchable_doc_collections,
searchable_doc_tags,
)
@@ -322,6 +323,9 @@ def rebuild_index(status_cb: Callable[[str], None] | None = None) -> None:
Fields.tags + "." + Fields.tags_level1,
Fields.tags + "." + Fields.tags_level2,
Fields.tags + "." + Fields.tags_level3,
Fields.collections,
Fields.collections + "." + Fields.collections_display_name,
Fields.collections + "." + Fields.collections_key,
Fields.type,
Fields.access_id,
Fields.last_published,
@@ -333,8 +337,9 @@ def rebuild_index(status_cb: Callable[[str], None] | None = None) -> None:
Fields.display_name,
Fields.block_id,
Fields.content,
Fields.tags,
Fields.description,
Fields.tags,
Fields.collections,
# If we don't list the following sub-fields _explicitly_, they're only sometimes searchable - that is, they
# are searchable only if at least one document in the index has a value. If we didn't list them here and,
# say, there were no tags.level3 tags in the index, the client would get an error if trying to search for
@@ -344,6 +349,8 @@ def rebuild_index(status_cb: Callable[[str], None] | None = None) -> None:
Fields.tags + "." + Fields.tags_level1,
Fields.tags + "." + Fields.tags_level2,
Fields.tags + "." + Fields.tags_level3,
Fields.collections + "." + Fields.collections_display_name,
Fields.collections + "." + Fields.collections_key,
])
# Mark which attributes can be used for sorting search results:
client.index(temp_index_name).update_sortable_attributes([
@@ -375,6 +382,7 @@ def rebuild_index(status_cb: Callable[[str], None] | None = None) -> None:
doc = {}
doc.update(searchable_doc_for_library_block(metadata))
doc.update(searchable_doc_tags(metadata.usage_key))
doc.update(searchable_doc_collections(metadata.usage_key))
docs.append(doc)
except Exception as err: # pylint: disable=broad-except
status_cb(f"Error indexing library component {component}: {err}")
@@ -549,6 +557,22 @@ def upsert_library_block_index_doc(usage_key: UsageKey) -> None:
_update_index_docs(docs)
def upsert_library_collection_index_doc(library_key: LibraryLocatorV2, collection_key: str) -> None:
"""
Creates or updates the document for the given Library Collection in the search index
"""
content_library = lib_api.ContentLibrary.objects.get_by_key(library_key)
collection = authoring_api.get_collection(
learning_package_id=content_library.learning_package_id,
collection_key=collection_key,
)
docs = [
searchable_doc_for_collection(collection)
]
_update_index_docs(docs)
def upsert_content_library_index_docs(library_key: LibraryLocatorV2) -> None:
"""
Creates or updates the documents for the given Content Library in the search index
@@ -571,6 +595,15 @@ def upsert_block_tags_index_docs(usage_key: UsageKey):
_update_index_docs([doc])
def upsert_block_collections_index_docs(usage_key: UsageKey):
"""
Updates the collections data in documents for the given Course/Library block
"""
doc = {Fields.id: meili_id_from_opaque_key(usage_key)}
doc.update(searchable_doc_collections(usage_key))
_update_index_docs([doc])
def _get_user_orgs(request: Request) -> list[str]:
"""
Get the org.short_names for the organizations that the requesting user has OrgStaffRole or OrgInstructorRole.

View File

@@ -7,7 +7,9 @@ import logging
from hashlib import blake2b
from django.utils.text import slugify
from django.core.exceptions import ObjectDoesNotExist
from opaque_keys.edx.keys import LearningContextKey, UsageKey
from openedx_learning.api import authoring as authoring_api
from openedx.core.djangoapps.content.search.models import SearchAccess
from openedx.core.djangoapps.content_libraries import api as lib_api
@@ -26,7 +28,11 @@ class Fields:
id = "id"
usage_key = "usage_key"
type = "type" # DocType.course_block or DocType.library_block (see below)
block_id = "block_id" # The block_id part of the usage key. Sometimes human-readable, sometimes a random hex ID
# The block_id part of the usage key for course or library blocks.
# If it's a collection, the collection.key is stored here.
# Sometimes human-readable, sometimes a random hex ID
# Is only unique within the given context_key.
block_id = "block_id"
display_name = "display_name"
description = "description"
modified = "modified"
@@ -52,11 +58,21 @@ class Fields:
tags_level1 = "level1"
tags_level2 = "level2"
tags_level3 = "level3"
# Collections (dictionary) that this object belongs to.
# Similarly to tags above, we collect the collection.titles and collection.keys into hierarchical facets.
collections = "collections"
collections_display_name = "display_name"
collections_key = "key"
# The "content" field is a dictionary of arbitrary data, depending on the block_type.
# It comes from each XBlock's index_dictionary() method (if present) plus some processing.
# Text (html) blocks have an "html_content" key in here, capa has "capa_content" and "problem_types", and so on.
content = "content"
# Collections use this field to communicate how many entities/components they contain.
# Structural XBlocks may use this one day to indicate how many child blocks they ocntain.
num_children = "num_children"
# Note: new fields or values can be added at any time, but if they need to be indexed for filtering or keyword
# search, the index configuration will need to be changed, which is only done as part of the 'reindex_studio'
# command (changing those settings on an large active index is not recommended).
@@ -223,6 +239,51 @@ def _tags_for_content_object(object_id: UsageKey | LearningContextKey) -> dict:
return {Fields.tags: result}
def _collections_for_content_object(object_id: UsageKey | LearningContextKey) -> dict:
"""
Given an XBlock, course, library, etc., get the collections for its index doc.
e.g. for something in Collections "COL_A" and "COL_B", this would return:
{
"collections": {
"display_name": ["Collection A", "Collection B"],
"key": ["COL_A", "COL_B"],
}
}
If the object is in no collections, returns:
{
"collections": {},
}
"""
# Gather the collections associated with this object
collections = None
try:
component = lib_api.get_component_from_usage_key(object_id)
collections = authoring_api.get_entity_collections(
component.learning_package_id,
component.key,
)
except ObjectDoesNotExist:
log.warning(f"No component found for {object_id}")
if not collections:
return {Fields.collections: {}}
result = {
Fields.collections: {
Fields.collections_display_name: [],
Fields.collections_key: [],
}
}
for collection in collections:
result[Fields.collections][Fields.collections_display_name].append(collection.title)
result[Fields.collections][Fields.collections_key].append(collection.key)
return result
def searchable_doc_for_library_block(xblock_metadata: lib_api.LibraryXBlockMetadata) -> dict:
"""
Generate a dictionary document suitable for ingestion into a search engine
@@ -265,6 +326,19 @@ def searchable_doc_tags(usage_key: UsageKey) -> dict:
return doc
def searchable_doc_collections(usage_key: UsageKey) -> dict:
"""
Generate a dictionary document suitable for ingestion into a search engine
like Meilisearch or Elasticsearch, with the collections data for the given content object.
"""
doc = {
Fields.id: meili_id_from_opaque_key(usage_key),
}
doc.update(_collections_for_content_object(usage_key))
return doc
def searchable_doc_for_course_block(block) -> dict:
"""
Generate a dictionary document suitable for ingestion into a search engine
@@ -289,6 +363,7 @@ def searchable_doc_for_collection(collection) -> dict:
"""
doc = {
Fields.id: collection.id,
Fields.block_id: collection.key,
Fields.type: DocType.collection,
Fields.display_name: collection.title,
Fields.description: collection.description,
@@ -298,6 +373,7 @@ def searchable_doc_for_collection(collection) -> dict:
# If related contentlibrary is found, it will override this value below.
# Mostly contentlibrary.library_key == learning_package.key
Fields.context_key: collection.learning_package.key,
Fields.num_children: collection.entities.count(),
}
# Just in case learning_package is not related to a library
try:

View File

@@ -6,30 +6,40 @@ import logging
from django.db.models.signals import post_delete
from django.dispatch import receiver
from openedx_events.content_authoring.data import ContentLibraryData, ContentObjectData, LibraryBlockData, XBlockData
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import UsageKey
from openedx_events.content_authoring.data import (
ContentLibraryData,
ContentObjectChangedData,
LibraryBlockData,
LibraryCollectionData,
XBlockData,
)
from openedx_events.content_authoring.signals import (
CONTENT_LIBRARY_DELETED,
CONTENT_LIBRARY_UPDATED,
LIBRARY_BLOCK_CREATED,
LIBRARY_BLOCK_DELETED,
LIBRARY_BLOCK_UPDATED,
LIBRARY_COLLECTION_CREATED,
LIBRARY_COLLECTION_UPDATED,
XBLOCK_CREATED,
XBLOCK_DELETED,
XBLOCK_UPDATED,
CONTENT_OBJECT_TAGS_CHANGED,
CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
)
from openedx.core.djangoapps.content_tagging.utils import get_content_key_from_string
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.content.search.models import SearchAccess
from .api import only_if_meilisearch_enabled, upsert_block_tags_index_docs
from .api import only_if_meilisearch_enabled, upsert_block_collections_index_docs, upsert_block_tags_index_docs
from .tasks import (
delete_library_block_index_doc,
delete_xblock_index_doc,
update_content_library_index_docs,
update_library_collection_index_doc,
upsert_library_block_index_doc,
upsert_xblock_index_doc
upsert_xblock_index_doc,
)
log = logging.getLogger(__name__)
@@ -145,22 +155,48 @@ def content_library_updated_handler(**kwargs) -> None:
update_content_library_index_docs.apply(args=[str(content_library_data.library_key)])
@receiver(CONTENT_OBJECT_TAGS_CHANGED)
@receiver(LIBRARY_COLLECTION_CREATED)
@receiver(LIBRARY_COLLECTION_UPDATED)
@only_if_meilisearch_enabled
def content_object_tags_changed_handler(**kwargs) -> None:
def library_collection_updated_handler(**kwargs) -> None:
"""
Update the tags data in the index for the Content Object
Create or update the index for the content library collection
"""
content_object_tags = kwargs.get("content_object", None)
if not content_object_tags or not isinstance(content_object_tags, ContentObjectData):
library_collection = kwargs.get("library_collection", None)
if not library_collection or not isinstance(library_collection, LibraryCollectionData): # pragma: no cover
log.error("Received null or incorrect data for event")
return
# Update collection index synchronously to make sure that search index is updated before
# the frontend invalidates/refetches index.
# See content_library_updated_handler for more details.
update_library_collection_index_doc.apply(args=[
str(library_collection.library_key),
library_collection.collection_key,
])
@receiver(CONTENT_OBJECT_ASSOCIATIONS_CHANGED)
@only_if_meilisearch_enabled
def content_object_associations_changed_handler(**kwargs) -> None:
"""
Update the collections/tags data in the index for the Content Object
"""
content_object = kwargs.get("content_object", None)
if not content_object or not isinstance(content_object, ContentObjectChangedData):
log.error("Received null or incorrect data for event")
return
try:
# Check if valid if course or library block
get_content_key_from_string(content_object_tags.object_id)
except ValueError:
usage_key = UsageKey.from_string(str(content_object.object_id))
except InvalidKeyError:
log.error("Received invalid content object id")
return
upsert_block_tags_index_docs(content_object_tags.object_id)
# This event's changes may contain both "tags" and "collections", but this will happen rarely, if ever.
# So we allow a potential double "upsert" here.
if not content_object.changes or "tags" in content_object.changes:
upsert_block_tags_index_docs(usage_key)
if not content_object.changes or "collections" in content_object.changes:
upsert_block_collections_index_docs(usage_key)

View File

@@ -84,3 +84,16 @@ def update_content_library_index_docs(library_key_str: str) -> None:
# Delete all documents in this library that were not published by above function
# as this task is also triggered on discard event.
api.delete_all_draft_docs_for_library(library_key)
@shared_task(base=LoggedTask, autoretry_for=(MeilisearchError, ConnectionError))
@set_code_owner_attribute
def update_library_collection_index_doc(library_key_str: str, collection_key: str) -> None:
"""
Celery task to update the content index documents for a library collection
"""
library_key = LibraryLocatorV2.from_string(library_key_str)
log.info("Updating content index documents for collection %s in library%s", collection_key, library_key)
api.upsert_library_collection_index_doc(library_key, collection_key)

View File

@@ -186,16 +186,18 @@ class TestSearchApi(ModuleStoreTestCase):
description="my collection description"
)
self.collection_dict = {
'id': self.collection.id,
'type': 'collection',
'display_name': 'my_collection',
'description': 'my collection description',
'context_key': 'lib:org1:lib',
'org': 'org1',
'created': created_date.timestamp(),
'modified': created_date.timestamp(),
"id": self.collection.id,
"block_id": self.collection.key,
"type": "collection",
"display_name": "my_collection",
"description": "my collection description",
"num_children": 0,
"context_key": "lib:org1:lib",
"org": "org1",
"created": created_date.timestamp(),
"modified": created_date.timestamp(),
"access_id": lib_access.id,
'breadcrumbs': [{'display_name': 'Library'}]
"breadcrumbs": [{"display_name": "Library"}],
}
@override_settings(MEILISEARCH_ENABLED=False)
@@ -215,10 +217,13 @@ class TestSearchApi(ModuleStoreTestCase):
doc_vertical["tags"] = {}
doc_problem1 = copy.deepcopy(self.doc_problem1)
doc_problem1["tags"] = {}
doc_problem1["collections"] = {}
doc_problem2 = copy.deepcopy(self.doc_problem2)
doc_problem2["tags"] = {}
doc_problem2["collections"] = {}
api.rebuild_index()
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 3
mock_meilisearch.return_value.index.return_value.add_documents.assert_has_calls(
[
call([doc_sequential, doc_vertical]),
@@ -254,6 +259,7 @@ class TestSearchApi(ModuleStoreTestCase):
doc_vertical["tags"] = {}
doc_problem2 = copy.deepcopy(self.doc_problem2)
doc_problem2["tags"] = {}
doc_problem2["collections"] = {}
orig_from_component = library_api.LibraryXBlockMetadata.from_component
@@ -346,6 +352,7 @@ class TestSearchApi(ModuleStoreTestCase):
}
}
assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2
mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls(
[
call([doc_sequential_with_tags1]),
@@ -400,6 +407,7 @@ class TestSearchApi(ModuleStoreTestCase):
}
}
assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2
mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls(
[
call([doc_problem_with_tags1]),
@@ -408,6 +416,130 @@ class TestSearchApi(ModuleStoreTestCase):
any_order=True,
)
@override_settings(MEILISEARCH_ENABLED=True)
def test_index_library_block_and_collections(self, mock_meilisearch):
"""
Test indexing an Library Block and the Collections it's in.
"""
# Create collections (these internally call `upsert_library_collection_index_doc`)
created_date = datetime(2023, 5, 6, 7, 8, 9, tzinfo=timezone.utc)
with freeze_time(created_date):
collection1 = library_api.create_library_collection(
self.library.key,
collection_key="COL1",
title="Collection 1",
created_by=None,
description="First Collection",
)
collection2 = library_api.create_library_collection(
self.library.key,
collection_key="COL2",
title="Collection 2",
created_by=None,
description="Second Collection",
)
# Add Problem1 to both Collections (these internally call `upsert_block_collections_index_docs` and
# `upsert_library_collection_index_doc`)
# (adding in reverse order to test sorting of collection tag)
updated_date = datetime(2023, 6, 7, 8, 9, 10, tzinfo=timezone.utc)
with freeze_time(updated_date):
for collection in (collection2, collection1):
library_api.update_library_collection_components(
self.library.key,
collection_key=collection.key,
usage_keys=[
self.problem1.usage_key,
],
)
# Build expected docs at each stage
lib_access, _ = SearchAccess.objects.get_or_create(context_key=self.library.key)
doc_collection1_created = {
"id": collection1.id,
"block_id": collection1.key,
"type": "collection",
"display_name": "Collection 1",
"description": "First Collection",
"num_children": 0,
"context_key": "lib:org1:lib",
"org": "org1",
"created": created_date.timestamp(),
"modified": created_date.timestamp(),
"access_id": lib_access.id,
"breadcrumbs": [{"display_name": "Library"}],
}
doc_collection2_created = {
"id": collection2.id,
"block_id": collection2.key,
"type": "collection",
"display_name": "Collection 2",
"description": "Second Collection",
"num_children": 0,
"context_key": "lib:org1:lib",
"org": "org1",
"created": created_date.timestamp(),
"modified": created_date.timestamp(),
"access_id": lib_access.id,
"breadcrumbs": [{"display_name": "Library"}],
}
doc_collection2_updated = {
"id": collection2.id,
"block_id": collection2.key,
"type": "collection",
"display_name": "Collection 2",
"description": "Second Collection",
"num_children": 1,
"context_key": "lib:org1:lib",
"org": "org1",
"created": created_date.timestamp(),
"modified": updated_date.timestamp(),
"access_id": lib_access.id,
"breadcrumbs": [{"display_name": "Library"}],
}
doc_collection1_updated = {
"id": collection1.id,
"block_id": collection1.key,
"type": "collection",
"display_name": "Collection 1",
"description": "First Collection",
"num_children": 1,
"context_key": "lib:org1:lib",
"org": "org1",
"created": created_date.timestamp(),
"modified": updated_date.timestamp(),
"access_id": lib_access.id,
"breadcrumbs": [{"display_name": "Library"}],
}
doc_problem_with_collection1 = {
"id": self.doc_problem1["id"],
"collections": {
"display_name": ["Collection 2"],
"key": ["COL2"],
},
}
doc_problem_with_collection2 = {
"id": self.doc_problem1["id"],
"collections": {
"display_name": ["Collection 1", "Collection 2"],
"key": ["COL1", "COL2"],
},
}
assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 6
mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls(
[
call([doc_collection1_created]),
call([doc_collection2_created]),
call([doc_collection2_updated]),
call([doc_collection1_updated]),
call([doc_problem_with_collection1]),
call([doc_problem_with_collection2]),
],
any_order=True,
)
@override_settings(MEILISEARCH_ENABLED=True)
def test_delete_index_library_block(self, mock_meilisearch):
"""

View File

@@ -8,6 +8,7 @@ from freezegun import freeze_time
from openedx_learning.api import authoring as authoring_api
from openedx.core.djangoapps.content_tagging import api as tagging_api
from openedx.core.djangoapps.content_libraries import api as library_api
from openedx.core.djangolib.testing.utils import skip_unless_cms
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
@@ -15,12 +16,19 @@ from xmodule.modulestore.tests.factories import BlockFactory, ToyCourseFactory
try:
# This import errors in the lms because content.search is not an installed app there.
from ..documents import searchable_doc_for_course_block, searchable_doc_tags, searchable_doc_for_collection
from ..documents import (
searchable_doc_for_course_block,
searchable_doc_tags,
searchable_doc_collections,
searchable_doc_for_collection,
searchable_doc_for_library_block,
)
from ..models import SearchAccess
except RuntimeError:
searchable_doc_for_course_block = lambda x: x
searchable_doc_tags = lambda x: x
searchable_doc_for_collection = lambda x: x
searchable_doc_for_library_block = lambda x: x
SearchAccess = {}
@@ -38,12 +46,13 @@ class StudioDocumentsTest(SharedModuleStoreTestCase):
def setUpClass(cls):
super().setUpClass()
cls.store = modulestore()
cls.org = Organization.objects.create(name="edX", short_name="edX")
cls.toy_course = ToyCourseFactory.create() # See xmodule/modulestore/tests/sample_courses.py
cls.toy_course_key = cls.toy_course.id
# Get references to some blocks in the toy course
cls.html_block_key = cls.toy_course_key.make_usage_key("html", "toyjumpto")
# Create a problem in library
# Create a problem in course
cls.problem_block = BlockFactory.create(
category="problem",
parent_location=cls.toy_course_key.make_usage_key("vertical", "vertical_test"),
@@ -51,8 +60,38 @@ class StudioDocumentsTest(SharedModuleStoreTestCase):
data="<problem>What is a test?<multiplechoiceresponse></multiplechoiceresponse></problem>",
)
# Create a library and collection with a block
created_date = datetime(2023, 4, 5, 6, 7, 8, tzinfo=timezone.utc)
with freeze_time(created_date):
cls.library = library_api.create_library(
org=cls.org,
slug="2012_Fall",
title="some content_library",
description="some description",
)
cls.collection = library_api.create_library_collection(
cls.library.key,
collection_key="TOY_COLLECTION",
title="Toy Collection",
created_by=None,
description="my toy collection description"
)
cls.library_block = library_api.create_library_block(
cls.library.key,
"html",
"text2",
)
# Add the problem block to the collection
library_api.update_library_collection_components(
cls.library.key,
collection_key="TOY_COLLECTION",
usage_keys=[
cls.library_block.usage_key,
]
)
# Create a couple taxonomies and some tags
cls.org = Organization.objects.create(name="edX", short_name="edX")
cls.difficulty_tags = tagging_api.create_taxonomy(name="Difficulty", orgs=[cls.org], allow_multiple=False)
tagging_api.add_tag_to_taxonomy(cls.difficulty_tags, tag="Easy")
tagging_api.add_tag_to_taxonomy(cls.difficulty_tags, tag="Normal")
@@ -69,6 +108,7 @@ class StudioDocumentsTest(SharedModuleStoreTestCase):
tagging_api.tag_object(str(cls.problem_block.usage_key), cls.difficulty_tags, tags=["Easy"])
tagging_api.tag_object(str(cls.html_block_key), cls.subject_tags, tags=["Chinese", "Jump Links"])
tagging_api.tag_object(str(cls.html_block_key), cls.difficulty_tags, tags=["Normal"])
tagging_api.tag_object(str(cls.library_block.usage_key), cls.difficulty_tags, tags=["Normal"])
@property
def toy_course_access_id(self):
@@ -80,6 +120,16 @@ class StudioDocumentsTest(SharedModuleStoreTestCase):
"""
return SearchAccess.objects.get(context_key=self.toy_course_key).id
@property
def library_access_id(self):
"""
Returns the SearchAccess.id created for the library.
This SearchAccess object is created when documents are added to the search index, so this method must be called
after this step, or risk a DoesNotExist error.
"""
return SearchAccess.objects.get(context_key=self.library.key).id
def test_problem_block(self):
"""
Test how a problem block gets represented in the search index
@@ -205,6 +255,62 @@ class StudioDocumentsTest(SharedModuleStoreTestCase):
# This video has no tags.
}
def test_html_library_block(self):
"""
Test how a library block gets represented in the search index
"""
doc = {}
doc.update(searchable_doc_for_library_block(self.library_block))
doc.update(searchable_doc_tags(self.library_block.usage_key))
doc.update(searchable_doc_collections(self.library_block.usage_key))
assert doc == {
"id": "lbedx2012_fallhtmltext2-4bb47d67",
"type": "library_block",
"block_type": "html",
"usage_key": "lb:edX:2012_Fall:html:text2",
"block_id": "text2",
"context_key": "lib:edX:2012_Fall",
"org": "edX",
"access_id": self.library_access_id,
"display_name": "Text",
"breadcrumbs": [
{
"display_name": "some content_library",
},
],
"last_published": None,
"created": 1680674828.0,
"modified": 1680674828.0,
"content": {
"html_content": "",
},
"collections": {
"key": ["TOY_COLLECTION"],
"display_name": ["Toy Collection"],
},
"tags": {
"taxonomy": ["Difficulty"],
"level0": ["Difficulty > Normal"],
},
}
def test_collection_with_library(self):
doc = searchable_doc_for_collection(self.collection)
assert doc == {
"id": self.collection.id,
"block_id": self.collection.key,
"type": "collection",
"org": "edX",
"display_name": "Toy Collection",
"description": "my toy collection description",
"num_children": 1,
"context_key": "lib:edX:2012_Fall",
"access_id": self.library_access_id,
"breadcrumbs": [{"display_name": "some content_library"}],
"created": 1680674828.0,
"modified": 1680674828.0,
}
def test_collection_with_no_library(self):
created_date = datetime(2023, 4, 5, 6, 7, 8, tzinfo=timezone.utc)
with freeze_time(created_date):
@@ -223,9 +329,11 @@ class StudioDocumentsTest(SharedModuleStoreTestCase):
doc = searchable_doc_for_collection(collection)
assert doc == {
"id": collection.id,
"block_id": collection.key,
"type": "collection",
"display_name": "my_collection",
"description": "my collection description",
"num_children": 0,
"context_key": learning_package.key,
"access_id": self.toy_course_access_id,
"breadcrumbs": [{"display_name": "some learning_package"}],

View File

@@ -78,12 +78,12 @@ from opaque_keys.edx.locator import (
from opaque_keys import InvalidKeyError
from openedx_events.content_authoring.data import (
ContentLibraryData,
ContentObjectData,
ContentObjectChangedData,
LibraryBlockData,
LibraryCollectionData,
)
from openedx_events.content_authoring.signals import (
CONTENT_OBJECT_TAGS_CHANGED,
CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
CONTENT_LIBRARY_CREATED,
CONTENT_LIBRARY_DELETED,
CONTENT_LIBRARY_UPDATED,
@@ -1235,10 +1235,13 @@ def update_library_collection_components(
)
)
# Emit a CONTENT_OBJECT_TAGS_CHANGED event for each of the objects added/removed
# Emit a CONTENT_OBJECT_ASSOCIATIONS_CHANGED event for each of the objects added/removed
for usage_key in usage_keys:
CONTENT_OBJECT_TAGS_CHANGED.send_event(
content_object=ContentObjectData(object_id=usage_key),
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event(
content_object=ContentObjectChangedData(
object_id=str(usage_key),
changes=["collections"],
),
)
return collection

View File

@@ -14,11 +14,11 @@ from opaque_keys.edx.keys import (
)
from opaque_keys.edx.locator import LibraryLocatorV2
from openedx_events.content_authoring.data import (
ContentObjectData,
ContentObjectChangedData,
LibraryCollectionData,
)
from openedx_events.content_authoring.signals import (
CONTENT_OBJECT_TAGS_CHANGED,
CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
LIBRARY_COLLECTION_CREATED,
LIBRARY_COLLECTION_UPDATED,
)
@@ -262,7 +262,7 @@ class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest, OpenEdxEventsTe
Same guidelines as ContentLibrariesTestCase.
"""
ENABLED_OPENEDX_EVENTS = [
CONTENT_OBJECT_TAGS_CHANGED.event_type,
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.event_type,
LIBRARY_COLLECTION_CREATED.event_type,
LIBRARY_COLLECTION_UPDATED.event_type,
]
@@ -411,10 +411,10 @@ class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest, OpenEdxEventsTe
def test_update_library_collection_components_event(self):
"""
Check that a CONTENT_OBJECT_TAGS_CHANGED event is raised for each added/removed component.
Check that a CONTENT_OBJECT_ASSOCIATIONS_CHANGED event is raised for each added/removed component.
"""
event_receiver = mock.Mock()
CONTENT_OBJECT_TAGS_CHANGED.connect(event_receiver)
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.connect(event_receiver)
LIBRARY_COLLECTION_UPDATED.connect(event_receiver)
api.update_library_collection_components(
@@ -440,20 +440,22 @@ class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest, OpenEdxEventsTe
)
self.assertDictContainsSubset(
{
"signal": CONTENT_OBJECT_TAGS_CHANGED,
"signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
"sender": None,
"content_object": ContentObjectData(
object_id=UsageKey.from_string(self.lib1_problem_block["id"]),
"content_object": ContentObjectChangedData(
object_id=self.lib1_problem_block["id"],
changes=["collections"],
),
},
event_receiver.call_args_list[1].kwargs,
)
self.assertDictContainsSubset(
{
"signal": CONTENT_OBJECT_TAGS_CHANGED,
"signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
"sender": None,
"content_object": ContentObjectData(
object_id=UsageKey.from_string(self.lib1_html_block["id"]),
"content_object": ContentObjectChangedData(
object_id=self.lib1_html_block["id"],
changes=["collections"],
),
},
event_receiver.call_args_list[2].kwargs,

View File

@@ -12,14 +12,17 @@ from opaque_keys.edx.keys import UsageKey
import openedx_tagging.core.tagging.api as oel_tagging
from django.db.models import Exists, OuterRef, Q, QuerySet
from django.utils.timezone import now
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.keys import CourseKey, LibraryCollectionKey
from opaque_keys.edx.locator import LibraryLocatorV2
from openedx_tagging.core.tagging.models import ObjectTag, Taxonomy
from openedx_tagging.core.tagging.models.utils import TAGS_CSV_SEPARATOR
from organizations.models import Organization
from .helpers.objecttag_export_helpers import build_object_tree_with_objecttags, iterate_with_level
from openedx_events.content_authoring.data import ContentObjectData
from openedx_events.content_authoring.signals import CONTENT_OBJECT_TAGS_CHANGED
from openedx_events.content_authoring.data import ContentObjectData, ContentObjectChangedData
from openedx_events.content_authoring.signals import (
CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
CONTENT_OBJECT_TAGS_CHANGED,
)
from .models import TaxonomyOrg
from .types import ContentKey, TagValuesByObjectIdDict, TagValuesByTaxonomyIdDict, TaxonomyDict
@@ -227,7 +230,7 @@ def generate_csv_rows(object_id, buffer) -> Iterator[str]:
"""
content_key = get_content_key_from_string(object_id)
if isinstance(content_key, UsageKey):
if isinstance(content_key, (UsageKey, LibraryCollectionKey)):
raise ValueError("The object_id must be a CourseKey or a LibraryLocatorV2.")
all_object_tags, taxonomies = get_all_object_tags(content_key)
@@ -301,6 +304,16 @@ def set_exported_object_tags(
create_invalid=True,
taxonomy_export_id=str(taxonomy_export_id),
)
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event(
time=now(),
content_object=ContentObjectChangedData(
object_id=content_key_str,
changes=["tags"],
)
)
# Emit a (deprecated) CONTENT_OBJECT_TAGS_CHANGED event too
CONTENT_OBJECT_TAGS_CHANGED.send_event(
time=now(),
content_object=ContentObjectData(object_id=content_key_str)
@@ -378,7 +391,7 @@ def tag_object(
Replaces the existing ObjectTag entries for the given taxonomy + object_id
with the given list of tags, if the taxonomy can be used by the given object_id.
This is a wrapper around oel_tagging.tag_object that adds emitting the `CONTENT_OBJECT_TAGS_CHANGED` event
This is a wrapper around oel_tagging.tag_object that adds emitting the `CONTENT_OBJECT_ASSOCIATIONS_CHANGED` event
when tagging an object.
tags: A list of the values of the tags from this taxonomy to apply.
@@ -399,6 +412,15 @@ def tag_object(
taxonomy=taxonomy,
tags=tags,
)
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event(
time=now(),
content_object=ContentObjectChangedData(
object_id=object_id,
changes=["tags"],
)
)
# Emit a (deprecated) CONTENT_OBJECT_TAGS_CHANGED event too
CONTENT_OBJECT_TAGS_CHANGED.send_event(
time=now(),
content_object=ContentObjectData(object_id=object_id)

View File

@@ -13,7 +13,7 @@ from urllib.parse import parse_qs, urlparse
import ddt
from django.contrib.auth import get_user_model
from django.core.files.uploadedfile import SimpleUploadedFile
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator, LibraryCollectionLocator
from openedx_tagging.core.tagging.models import Tag, Taxonomy
from openedx_tagging.core.tagging.models.system_defined import SystemDefinedTaxonomy
from openedx_tagging.core.tagging.rest_api.v1.serializers import TaxonomySerializer
@@ -111,6 +111,9 @@ class TestTaxonomyObjectsMixin:
)
self.libraryA = str(self.content_libraryA.key)
def _setUp_collection(self):
self.collection_key = str(LibraryCollectionLocator(self.content_libraryA.key, 'test-collection'))
def _setUp_users(self):
"""
Create users for testing
@@ -284,6 +287,7 @@ class TestTaxonomyObjectsMixin:
self._setUp_library()
self._setUp_users()
self._setUp_taxonomies()
self._setUp_collection()
# Clear the rules cache in between test runs to keep query counts consistent.
rules_cache.clear()
@@ -1653,6 +1657,87 @@ class TestObjectTagViewSet(TestObjectTagMixin, APITestCase):
response = self._call_put_request(self.libraryA, taxonomy.pk, ["invalid"])
assert response.status_code == status.HTTP_400_BAD_REQUEST
@ddt.data(
# staffA and staff are staff in collection and can tag using enabled taxonomies
("user", "tA1", ["Tag 1"], status.HTTP_403_FORBIDDEN),
("staffA", "tA1", ["Tag 1"], status.HTTP_200_OK),
("staff", "tA1", ["Tag 1"], status.HTTP_200_OK),
("user", "tA1", [], status.HTTP_403_FORBIDDEN),
("staffA", "tA1", [], status.HTTP_200_OK),
("staff", "tA1", [], status.HTTP_200_OK),
("user", "multiple_taxonomy", ["Tag 1", "Tag 2"], status.HTTP_403_FORBIDDEN),
("staffA", "multiple_taxonomy", ["Tag 1", "Tag 2"], status.HTTP_200_OK),
("staff", "multiple_taxonomy", ["Tag 1", "Tag 2"], status.HTTP_200_OK),
("user", "open_taxonomy", ["tag1"], status.HTTP_403_FORBIDDEN),
("staffA", "open_taxonomy", ["tag1"], status.HTTP_200_OK),
("staff", "open_taxonomy", ["tag1"], status.HTTP_200_OK),
)
@ddt.unpack
def test_tag_collection(self, user_attr, taxonomy_attr, tag_values, expected_status):
"""
Tests that only staff and org level users can tag collections
"""
user = getattr(self, user_attr)
self.client.force_authenticate(user=user)
taxonomy = getattr(self, taxonomy_attr)
response = self._call_put_request(self.collection_key, taxonomy.pk, tag_values)
assert response.status_code == expected_status
if status.is_success(expected_status):
tags_by_taxonomy = response.data[str(self.collection_key)]["taxonomies"]
if tag_values:
response_taxonomy = tags_by_taxonomy[0]
assert response_taxonomy["name"] == taxonomy.name
response_tags = response_taxonomy["tags"]
assert [t["value"] for t in response_tags] == tag_values
else:
assert tags_by_taxonomy == [] # No tags are set from any taxonomy
# Check that re-fetching the tags returns what we set
url = OBJECT_TAG_UPDATE_URL.format(object_id=self.collection_key)
new_response = self.client.get(url, format="json")
assert status.is_success(new_response.status_code)
assert new_response.data == response.data
@ddt.data(
"staffA",
"staff",
)
def test_tag_collection_disabled_taxonomy(self, user_attr):
"""
Nobody can use disabled taxonomies to tag objects
"""
user = getattr(self, user_attr)
self.client.force_authenticate(user=user)
disabled_taxonomy = self.tA2
assert disabled_taxonomy.enabled is False
response = self._call_put_request(self.collection_key, disabled_taxonomy.pk, ["Tag 1"])
assert response.status_code == status.HTTP_403_FORBIDDEN
@ddt.data(
("staffA", "tA1"),
("staff", "tA1"),
("staffA", "multiple_taxonomy"),
("staff", "multiple_taxonomy"),
)
@ddt.unpack
def test_tag_collection_invalid(self, user_attr, taxonomy_attr):
"""
Tests that nobody can add invalid tags to a collection using a closed taxonomy
"""
user = getattr(self, user_attr)
self.client.force_authenticate(user=user)
taxonomy = getattr(self, taxonomy_attr)
response = self._call_put_request(self.collection_key, taxonomy.pk, ["invalid"])
assert response.status_code == status.HTTP_400_BAD_REQUEST
@ddt.data(
("superuser", status.HTTP_200_OK),
("staff", status.HTTP_403_FORBIDDEN),
@@ -1768,10 +1853,14 @@ class TestObjectTagViewSet(TestObjectTagMixin, APITestCase):
@ddt.data(
('staff', 'courseA', 8),
('staff', 'libraryA', 8),
('staff', 'collection_key', 8),
("content_creatorA", 'courseA', 11, False),
("content_creatorA", 'libraryA', 11, False),
("content_creatorA", 'collection_key', 11, False),
("library_staffA", 'libraryA', 11, False), # Library users can only view objecttags, not change them?
("library_staffA", 'collection_key', 11, False),
("library_userA", 'libraryA', 11, False),
("library_userA", 'collection_key', 11, False),
("instructorA", 'courseA', 11),
("course_instructorA", 'courseA', 11),
("course_staffA", 'courseA', 11),

View File

@@ -13,8 +13,11 @@ from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from openedx_events.content_authoring.data import ContentObjectData
from openedx_events.content_authoring.signals import CONTENT_OBJECT_TAGS_CHANGED
from openedx_events.content_authoring.data import ContentObjectData, ContentObjectChangedData
from openedx_events.content_authoring.signals import (
CONTENT_OBJECT_ASSOCIATIONS_CHANGED,
CONTENT_OBJECT_TAGS_CHANGED,
)
from ...auth import has_view_object_tags_access
from ...api import (
@@ -149,14 +152,24 @@ class ObjectTagOrgView(ObjectTagView):
def update(self, request, *args, **kwargs) -> Response:
"""
Extend the update method to fire CONTENT_OBJECT_TAGS_CHANGED event
Extend the update method to fire CONTENT_OBJECT_ASSOCIATIONS_CHANGED event
"""
response = super().update(request, *args, **kwargs)
if response.status_code == 200:
object_id = kwargs.get('object_id')
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event(
content_object=ContentObjectChangedData(
object_id=object_id,
changes=["tags"],
)
)
# Emit a (deprecated) CONTENT_OBJECT_TAGS_CHANGED event too
CONTENT_OBJECT_TAGS_CHANGED.send_event(
content_object=ContentObjectData(object_id=object_id)
)
return response

View File

@@ -5,7 +5,7 @@ import tempfile
import ddt
from django.test.testcases import TestCase
from fs.osfs import OSFS
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.keys import CourseKey, UsageKey, LibraryCollectionKey
from opaque_keys.edx.locator import LibraryLocatorV2
from openedx_tagging.core.tagging.models import ObjectTag
from organizations.models import Organization
@@ -380,6 +380,23 @@ class TestAPIObjectTags(TestGetAllObjectTagsMixin, TestCase):
with self.assertNumQueries(31): # TODO why so high?
self._test_copy_object_tags(src_key, dst_key, expected_tags)
def test_tag_collection(self):
collection_key = LibraryCollectionKey.from_string("lib-collection:orgA:libX:1")
api.tag_object(
object_id=str(collection_key),
taxonomy=self.taxonomy_3,
tags=["Tag 3.1"],
)
with self.assertNumQueries(1):
object_tags, taxonomies = api.get_all_object_tags(collection_key)
assert object_tags == {'lib-collection:orgA:libX:1': {3: ['Tag 3.1']}}
assert taxonomies == {
self.taxonomy_3.id: self.taxonomy_3,
}
class TestExportImportTags(TaggedCourseMixin):
"""

View File

@@ -5,11 +5,11 @@ from __future__ import annotations
from typing import Dict, List, Union
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.keys import CourseKey, UsageKey, LibraryCollectionKey
from opaque_keys.edx.locator import LibraryLocatorV2
from openedx_tagging.core.tagging.models import Taxonomy
ContentKey = Union[LibraryLocatorV2, CourseKey, UsageKey]
ContentKey = Union[LibraryLocatorV2, CourseKey, UsageKey, LibraryCollectionKey]
ContextKey = Union[LibraryLocatorV2, CourseKey]
TagValuesByTaxonomyIdDict = Dict[int, List[str]]

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from edx_django_utils.cache import RequestCache
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.keys import CourseKey, UsageKey, LibraryCollectionKey
from opaque_keys.edx.locator import LibraryLocatorV2
from openedx_tagging.core.tagging.models import Taxonomy
from organizations.models import Organization
@@ -26,8 +26,14 @@ def get_content_key_from_string(key_str: str) -> ContentKey:
except InvalidKeyError:
try:
return UsageKey.from_string(key_str)
except InvalidKeyError as usage_key_error:
raise ValueError("object_id must be a CourseKey, LibraryLocatorV2 or a UsageKey") from usage_key_error
except InvalidKeyError:
try:
return LibraryCollectionKey.from_string(key_str)
except InvalidKeyError as usage_key_error:
raise ValueError(
"object_id must be one of the following "
"keys: CourseKey, LibraryLocatorV2, UsageKey or LibCollectionKey"
) from usage_key_error
def get_context_key_from_key(content_key: ContentKey) -> ContextKey:
@@ -38,6 +44,10 @@ def get_context_key_from_key(content_key: ContentKey) -> ContextKey:
if isinstance(content_key, (CourseKey, LibraryLocatorV2)):
return content_key
# If the content key is a LibraryCollectionKey, return the LibraryLocatorV2
if isinstance(content_key, LibraryCollectionKey):
return content_key.library_key
# If the content key is a UsageKey, return the context key
context_key = content_key.context_key

View File

@@ -631,7 +631,9 @@ class ProgramDataExtender:
ecommerce = EcommerceService()
sku = getattr(required_mode, "sku", None)
if ecommerce.is_enabled(self.user) and sku:
run_mode["upgrade_url"] = ecommerce.get_checkout_page_url(required_mode.sku)
run_mode["upgrade_url"] = ecommerce.get_checkout_page_url(
required_mode.sku, course_run_keys=[self.course_run_key]
)
else:
run_mode["upgrade_url"] = None
else:

View File

@@ -29,4 +29,9 @@ urlpatterns = [
),
])),
])),
path('xblocks/v2/<str:usage_key_str>/', include([
# render one of this XBlock's views (e.g. student_view) for embedding in an iframe
# NOTE: this endpoint is **unstable** and subject to changes after Sumac
re_path(r'^embed/(?P<view_name>[\w\-]+)/$', views.embed_block_view),
])),
]

View File

@@ -1,12 +1,16 @@
"""
Views that implement a RESTful API for interacting with XBlocks.
"""
import itertools
import json
from common.djangoapps.util.json_request import JsonResponse
from corsheaders.signals import check_request_enabled
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db.transaction import atomic
from django.http import Http404
from django.shortcuts import render
from django.utils.translation import gettext as _
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import csrf_exempt
@@ -21,6 +25,7 @@ from xblock.fields import Scope
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import UsageKey
import openedx.core.djangoapps.site_configuration.helpers as configuration_helpers
from openedx.core.djangoapps.xblock.learning_context.manager import get_learning_context_impl
from openedx.core.lib.api.view_utils import view_auth_classes
from ..api import (
@@ -87,6 +92,47 @@ def render_block_view(request, usage_key_str, view_name):
return Response(response_data)
@api_view(['GET'])
@view_auth_classes(is_authenticated=False)
@permission_classes((permissions.AllowAny, )) # Permissions are handled at a lower level, by the learning context
@xframe_options_exempt
def embed_block_view(request, usage_key_str, view_name):
"""
Render the given XBlock in an <iframe>
Unstable - may change after Sumac
"""
try:
usage_key = UsageKey.from_string(usage_key_str)
except InvalidKeyError as e:
raise NotFound(invalid_not_found_fmt.format(usage_key=usage_key_str)) from e
try:
block = load_block(usage_key, request.user)
except NoSuchUsage as exc:
raise NotFound(f"{usage_key} not found") from exc
fragment = _render_block_view(block, view_name, request.user)
handler_urls = {
str(key): _get_handler_url(key, 'handler_name', request.user)
for key in itertools.chain([block.scope_ids.usage_id], getattr(block, 'children', []))
}
lms_root_url = configuration_helpers.get_value('LMS_ROOT_URL', settings.LMS_ROOT_URL)
context = {
'fragment': fragment,
'handler_urls_json': json.dumps(handler_urls),
'lms_root_url': lms_root_url,
'is_development': settings.DEBUG,
}
response = render(request, 'xblock_v2/xblock_iframe.html', context, content_type='text/html')
# Only allow this iframe be embedded if the parent is in the CORS_ORIGIN_WHITELIST
cors_origin_whitelist = configuration_helpers.get_value('CORS_ORIGIN_WHITELIST', settings.CORS_ORIGIN_WHITELIST)
response["Content-Security-Policy"] = f"frame-ancestors 'self' {' '.join(cors_origin_whitelist)};"
return response
@api_view(['GET'])
@view_auth_classes(is_authenticated=False)
def get_handler_url(request, usage_key_str, handler_name):
@@ -185,8 +231,8 @@ class BlockFieldsView(APIView):
View to get/edit the field values of an XBlock as JSON (in the v2 runtime)
This class mimics the functionality of xblock_handler in block.py (for v1 xblocks), but for v2 xblocks.
However, it only implements the exact subset of functionality needed to support the v2 editors (from
the frontend-lib-content-components project). As such, it only supports GET and POST, and only the
However, it only implements the exact subset of functionality needed to support the v2 editors (in
the Course Authoring MFE). As such, it only supports GET and POST, and only the
POSTing of data/metadata fields.
"""

View File

@@ -91,7 +91,7 @@ class ContentTypeGatingPartition(UserPartition):
if expiration_datetime and expiration_datetime < datetime.datetime.now(pytz.UTC):
ecommerce_checkout_link = None
else:
ecommerce_checkout_link = self._get_checkout_link(user, verified_mode.sku)
ecommerce_checkout_link = self._get_checkout_link(user, verified_mode.sku, str(course_key))
request = crum.get_current_request()
@@ -118,11 +118,11 @@ class ContentTypeGatingPartition(UserPartition):
else:
return _("Graded assessments are available to Verified Track learners. Upgrade to Unlock.")
def _get_checkout_link(self, user, sku):
def _get_checkout_link(self, user, sku, course_run_key):
ecomm_service = EcommerceService()
ecommerce_checkout = ecomm_service.is_enabled(user)
if ecommerce_checkout and sku:
return ecomm_service.get_checkout_page_url(sku) or ''
return ecomm_service.get_checkout_page_url(sku, course_run_keys=[course_run_key]) or ''
def _get_course_key_from_course_block(self, block):
"""

View File

@@ -8,6 +8,7 @@ import pytz
from django.conf import settings
from django.utils.translation import get_language
from django.utils.translation import gettext as _
from edx_django_utils.plugins import pluggable_override
from common.djangoapps.course_modes.models import format_course_price, get_course_prices
from lms.djangoapps.experiments.models import ExperimentData
@@ -73,6 +74,7 @@ def _get_discount_prices(user, course, assume_discount=False):
return format_course_price(base_price), None, None
@pluggable_override("OVERRIDE_GENERATE_OFFER_DATA")
def generate_offer_data(user, course):
"""
Create a dictionary of information about the current discount offer.

View File

@@ -142,3 +142,7 @@ django-storages<1.14.4
# We are pinning this until after all the smaller migrations get handled and then we can migrate this all at once.
# Ticket to unpin: https://github.com/edx/edx-arch-experiments/issues/760
social-auth-app-django<=5.4.1
# Temporary pin as to prevent a new version of edx-name-affirmation from being merged before we modify it to work
# properly along with work in this PR: https://github.com/openedx/edx-platform/pull/35468
edx-name-affirmation==2.4.0

View File

@@ -483,8 +483,10 @@ edx-i18n-tools==1.5.0
edx-milestones==0.6.0
# via -r requirements/edx/kernel.in
edx-name-affirmation==2.4.0
# via -r requirements/edx/kernel.in
edx-opaque-keys[django]==2.10.0
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/kernel.in
edx-opaque-keys[django]==2.11.0
# via
# -r requirements/edx/kernel.in
# -r requirements/edx/paver.txt

View File

@@ -768,9 +768,10 @@ edx-milestones==0.6.0
# -r requirements/edx/testing.txt
edx-name-affirmation==2.4.0
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt
edx-opaque-keys[django]==2.10.0
edx-opaque-keys[django]==2.11.0
# via
# -r requirements/edx/doc.txt
# -r requirements/edx/testing.txt

View File

@@ -563,8 +563,10 @@ edx-i18n-tools==1.5.0
edx-milestones==0.6.0
# via -r requirements/edx/base.txt
edx-name-affirmation==2.4.0
# via -r requirements/edx/base.txt
edx-opaque-keys[django]==2.10.0
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
edx-opaque-keys[django]==2.11.0
# via
# -r requirements/edx/base.txt
# edx-bulk-grades

View File

@@ -12,7 +12,7 @@ charset-normalizer==2.0.12
# requests
dnspython==2.6.1
# via pymongo
edx-opaque-keys==2.10.0
edx-opaque-keys==2.11.0
# via -r requirements/edx/paver.in
idna==3.7
# via requests

View File

@@ -589,8 +589,10 @@ edx-lint==5.3.7
edx-milestones==0.6.0
# via -r requirements/edx/base.txt
edx-name-affirmation==2.4.0
# via -r requirements/edx/base.txt
edx-opaque-keys[django]==2.10.0
# via
# -c requirements/edx/../constraints.txt
# -r requirements/edx/base.txt
edx-opaque-keys[django]==2.11.0
# via
# -r requirements/edx/base.txt
# edx-bulk-grades