Merge branch 'master' into dependabot/github_actions/codecov/codecov-action-5

This commit is contained in:
Usama Sadiq
2025-07-25 12:03:06 +05:00
committed by GitHub
1059 changed files with 59027 additions and 45774 deletions

View File

@@ -142,8 +142,6 @@ workflow.AssessmentWorkflowStep:
# Via edx-celeryutils
celery_utils.ChordData:
".. no_pii:": "No PII"
celery_utils.FailedTask:
".. no_pii:": "No PII"
# Via completion XBlock
completion.BlockCompletion:

View File

@@ -22,10 +22,10 @@ omit =
lms/envs/*
lms/djangoapps/*/migrations/*
lms/djangoapps/*/features/*
common/djangoapps/terrain/*
common/djangoapps/*/migrations/*
openedx/core/djangoapps/*/migrations/*
openedx/core/djangoapps/debug/*
openedx/envs/*
openedx/features/*/migrations/*
concurrency=multiprocessing

View File

@@ -21,10 +21,10 @@ omit =
lms/envs/*
lms/djangoapps/*/migrations/*
lms/djangoapps/*/features/*
common/djangoapps/terrain/*
common/djangoapps/*/migrations/*
openedx/core/djangoapps/*/migrations/*
openedx/core/djangoapps/debug/*
openedx/envs/*
openedx/features/*/migrations/*
concurrency=multiprocessing

View File

@@ -20,6 +20,9 @@ cms/envs/private.py
### Python artifacts
**/*.pyc
**/__pycache__
.venv
venv
### Editor and IDE artifacts
**/*~
@@ -148,5 +151,3 @@ openedx/core/djangoapps/django_comment_common/comment_client/python
# Locally generated PII reports
**/pii_report
/Dockerfile

View File

@@ -64,7 +64,7 @@
# SERIOUSLY.
#
# ------------------------------
# Generated by edx-lint version: 5.3.0
# Generated by edx-lint version: 5.6.0
# ------------------------------
[*]
end_of_line = lf
@@ -97,4 +97,4 @@ max_line_length = 72
[*.rst]
max_line_length = 79
# eecef7d3f7f334de2348fe1b4b0b48d605f7dcab
# 3eb1e01bd9ba6cdf1e5d0a493581c4ea14404b67

View File

@@ -1,70 +0,0 @@
# Vendor files and generated test artifacts
**/vendor
test_root/staticfiles
# Vendor files living outside the /vendor/ dir
*.min.js
*-min.js
*.nocache.js
**/bootstrap*.js
**/jquery*.js
**/d3*.js
# Translations files
**/static/js/i18n
# Gitignored xmodule stuff
common/static/xmodule
# Symlinks into xmodule/js
cms/static/xmodule_js
lms/static/xmodule_js
# Mako templates that generate .js files
cms/djangoapps/pipeline_js/templates
# These are es2015 spec files that used to be in an ignored path.
# Now they live with the rest of the code, but we want to ignore them
# until the surrounding code is es2015 and we have a chance to clean them.
# We need to ignore them here, because es2015 will cause a parse error
# even if we add an eslint-disable line to the file.
cms/static/js/spec/models/course_spec.js
cms/static/js/spec/models/metadata_spec.js
cms/static/js/spec/models/section_spec.js
cms/static/js/spec/models/settings_course_grader_spec.js
cms/static/js/spec/models/settings_grading_spec.js
cms/static/js/spec/models/textbook_spec.js
cms/static/js/spec/models/upload_spec.js
cms/static/js/spec/views/assets_squire_spec.js
cms/static/js/spec/views/course_info_spec.js
cms/static/js/spec/views/metadata_edit_spec.js
cms/static/js/spec/views/textbook_spec.js
cms/static/js/spec/views/upload_spec.js
xmodule/capa/tests/test_files/js/test_problem_display.js
xmodule/capa/tests/test_files/js/test_problem_generator.js
xmodule/capa/tests/test_files/js/test_problem_grader.js
xmodule/capa/tests/test_files/js/xproblem.js
lms/static/js/spec/calculator_spec.js
lms/static/js/spec/courseware_spec.js
lms/static/js/spec/feedback_form_spec.js
lms/static/js/spec/helper.js
lms/static/js/spec/histogram_spec.js
lms/static/js/spec/modules/tab_spec.js
lms/static/js/spec/requirejs_spec.js
xmodule/js/spec/annotatable/display_spec.js
xmodule/js/spec/capa/display_spec.js
xmodule/js/spec/html/edit_spec.js
xmodule/js/spec/problem/edit_spec_hint.js
xmodule/js/spec/problem/edit_spec.js
xmodule/js/spec/tabs/edit.js
xmodule/js/public/js
xmodule/assets/*/public/js
!**/.eslintrc.js

View File

@@ -1,77 +0,0 @@
{
"extends": "@edx/eslint-config",
"globals": { // Try to avoid adding any new globals.
// Old compatibility things and hacks
"edx": true,
"XBlock": true,
// added by Django i18n tools
"gettext": true,
"ngettext": true,
// added by jasmine-jquery
"loadFixtures": true,
"appendLoadFixtures": true,
"readFixtures": true,
"setFixtures": true,
"appendSetFixtures": true,
"spyOnEvent": true,
// used by our requirejs implementation
"RequireJS": true,
// enable jquery
"$": true
},
"rules": {
"func-names": "off",
"indent": ["error", 4],
"react/jsx-indent": ["error", 4],
"react/jsx-indent-props": ["error", 4],
"new-cap": "off",
"no-else-return": "off",
"no-shadow": "error",
"object-curly-spacing": ["error", "never"],
"one-var": "off",
"one-var-declaration-per-line": ["error", "initializations"],
"space-before-function-paren": ["error", "never"],
"strict": "off",
// Temporary Rules (Will be removed one-by-one to minimize file changes)
"block-scoped-var": "off",
"camelcase": "off",
"comma-dangle": "off",
"consistent-return": "off",
"eqeqeq": "off",
"function-call-argument-newline": "off",
"function-paren-newline": "off",
"import/extensions": "off",
"import/no-amd": "off",
"import/no-dynamic-require": "off",
"import/no-unresolved": "off",
"max-len": "off",
"no-console": "off",
"no-lonely-if": "off",
"no-param-reassign": "off",
"no-proto": "off",
"no-prototype-builtins": "off",
"no-redeclare": "off",
"no-restricted-globals": "off",
"no-restricted-syntax": "off",
"no-throw-literal": "off",
"no-undef": "off",
"no-underscore-dangle": "off",
"no-unused-vars": "off",
"no-use-before-define": "off",
"no-useless-escape": "off",
"no-var": "off",
"object-shorthand": "off",
"prefer-arrow-callback": "off",
"prefer-destructuring": "off",
"prefer-rest-params": "off",
"prefer-template": "off",
"radix": "off",
"react/prop-types": "off",
"vars-on-top": "off"
}
}

13
.github/CODEOWNERS vendored
View File

@@ -1,9 +1,5 @@
# This does not cover all the code in edx-platform but it's a good start.
# Ensure that the team responsible for upgrades sees any PRs that would
# add GitHub-hosted dependencies to that platform.
requirements/edx/github.in @openedx/2u-arbi-bom
# Core
common/djangoapps/student/
common/djangoapps/student/models/__init__.py @openedx/2u-tnl
@@ -22,7 +18,7 @@ openedx/core/djangoapps/enrollments/ @openedx/2U-
openedx/core/djangoapps/heartbeat/
openedx/core/djangoapps/oauth_dispatch
openedx/core/djangoapps/user_api/ @openedx/2U-aperture
openedx/core/djangoapps/user_authn/ @openedx/2U-vanguards
openedx/core/djangoapps/user_authn/ @openedx/2U-infinity
openedx/core/djangoapps/verified_track_content/ @openedx/2u-infinity
openedx/features/course_experience/
xmodule/
@@ -58,3 +54,10 @@ lms/templates/dashboard.html @openedx/ax
# Ensure minimal.yml stays minimal, this could be a team in the future
# but it's just me for now, others can sign up if they care as well.
lms/envs/minimal.yml @feanil
# Ensure that un-necessary changes don't happen to the settings files as we're cleaning them up.
lms/envs/production.py @feanil @kdmccormick
cms/envs/production.py @feanil @kdmccormick
# Ensure that this file is only used when strictly necessary
requirements/edx/github.in @feanil @kdmccormick

View File

@@ -1,39 +1,45 @@
// This file is written in "JSON5" (https://json5.org/) so that we can use comments.
{
"extends": [
"config:base",
"schedule:weekly",
":automergeLinters",
":automergeMinor",
":automergeTesters",
":enableVulnerabilityAlerts",
":semanticCommits",
":updateNotScheduled"
extends: [
'config:recommended',
'schedule:weekly',
':automergeLinters',
':automergeMinor',
':automergeTesters',
':enableVulnerabilityAlerts',
':semanticCommits',
':updateNotScheduled',
],
"packageRules": [
packageRules: [
{
"matchDepTypes": [
"devDependencies"
matchDepTypes: [
'devDependencies',
],
"matchUpdateTypes": [
"lockFileMaintenance",
"minor",
"patch",
"pin"
matchUpdateTypes: [
'lockFileMaintenance',
'minor',
'patch',
'pin',
],
"automerge": true
automerge: true,
},
{
"matchPackagePatterns": ["@edx", "@openedx"],
"matchUpdateTypes": ["minor", "patch"],
"automerge": true
}
matchUpdateTypes: [
'minor',
'patch',
],
automerge: true,
matchPackageNames: [
'/@edx/',
'/@openedx/',
],
},
],
ignoreDeps: [
'karma-spec-reporter',
],
timezone: 'America/New_York',
prConcurrentLimit: 3,
enabledManagers: [
'npm',
],
// When adding an ignoreDep, please include a reason and a public link that we can use to follow up and ensure
// that the ignoreDep is removed.
// This can be done as a comment within the ignoreDeps list.
"ignoreDeps": [],
"timezone": "America/New_York",
"prConcurrentLimit": 3,
"enabledManagers": ["npm"]
}

View File

@@ -17,4 +17,3 @@ on:
jobs:
add_remove_labels:
uses: openedx/.github/.github/workflows/add-remove-label-on-comment.yml@master

View File

@@ -14,27 +14,23 @@ jobs:
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install repo-tools
run: pip install edx-repo-tools[find_dependencies]
- name: Install setuptool
run: pip install setuptools
run: pip install setuptools
- name: Run Python script
run: |
find_python_dependencies \
--req-file requirements/edx/base.txt \
--req-file requirements/edx/testing.txt \
--ignore https://github.com/edx/codejail-includes \
--ignore https://github.com/edx/braze-client \
--ignore https://github.com/edx/edx-name-affirmation \
--ignore https://github.com/mitodl/edx-sga \
--ignore https://github.com/edx/token-utils \
--ignore https://github.com/open-craft/xblock-poll

View File

@@ -1,43 +0,0 @@
name: Push Docker Images
on:
push:
branches:
- master
jobs:
# Push image to GitHub Packages.
# See also https://docs.docker.com/docker-hub/builds/
push:
runs-on: ubuntu-latest
if: github.event_name == 'push'
strategy:
matrix:
variant:
- "lms_dev"
- "cms_dev"
- "cms"
- "lms"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Build and push lms/cms base docker images
env:
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
run: make docker_tag_build_push_${{matrix.variant}}

View File

@@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest]
node-version: [18, 20]
node-version: [20]
python-version:
- "3.11"
@@ -26,9 +26,10 @@ jobs:
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- name: Setup npm
run: npm i -g npm@10.5.x
run: npm i -g npm@10.7.x
- name: Install Firefox 123.0
run: |
@@ -63,14 +64,12 @@ jobs:
run: |
make base-requirements
- uses: c-hive/gha-npm-cache@v1
- name: Install npm
run: npm ci
- name: Run JS Tests
env:
TEST_SUITE: js-unit
SCRIPT_TO_RUN: ./scripts/generic-ci-tests.sh
run: |
npm install -g jest
xvfb-run --auto-servernum ./scripts/all-tests.sh
npm run test
- name: Save Job Artifacts
uses: actions/upload-artifact@v4

View File

@@ -1,43 +0,0 @@
name: Push CI Runner Docker Image
on:
workflow_dispatch:
schedule:
- cron: "0 1 * * 3"
jobs:
push:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# This has to happen after checkout in order for gh to work.
- name: "Cancel scheduled job on forks"
if: github.repository != 'openedx/edx-platform' && github.event_name == 'schedule'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
run: |
gh run cancel "${{ github.run_id }}"
gh run watch "${{ github.run_id }}"
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.TOOLS_EDX_ECR_USER_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.TOOLS_EDX_ECR_USER_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Log in to ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Build, tag, and push image to Amazon ECR
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
ECR_REPOSITORY: actions-runner
IMAGE_TAG: latest
run: |
docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -f scripts/ci-runner.Dockerfile .
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG

View File

@@ -16,13 +16,13 @@ jobs:
- module-name: lms-1
path: "lms/djangoapps/badges/ lms/djangoapps/branding/ lms/djangoapps/bulk_email/ lms/djangoapps/bulk_enroll/ lms/djangoapps/bulk_user_retirement/ lms/djangoapps/ccx/ lms/djangoapps/certificates/ lms/djangoapps/commerce/ lms/djangoapps/course_api/ lms/djangoapps/course_blocks/ lms/djangoapps/course_home_api/ lms/djangoapps/course_wiki/ lms/djangoapps/coursewarehistoryextended/ lms/djangoapps/debug/ lms/djangoapps/courseware/ lms/djangoapps/course_goals/ lms/djangoapps/rss_proxy/"
- module-name: lms-2
path: "lms/djangoapps/gating/ lms/djangoapps/grades/ lms/djangoapps/instructor/ lms/djangoapps/instructor_analytics/ lms/djangoapps/discussion/ lms/djangoapps/edxnotes/ lms/djangoapps/email_marketing/ lms/djangoapps/experiments/ lms/djangoapps/instructor_task/ lms/djangoapps/learner_dashboard/ lms/djangoapps/learner_home/ lms/djangoapps/lms_initialization/ lms/djangoapps/lms_xblock/ lms/djangoapps/lti_provider/ lms/djangoapps/mailing/ lms/djangoapps/mobile_api/ lms/djangoapps/monitoring/ lms/djangoapps/ora_staff_grader/ lms/djangoapps/program_enrollments/ lms/djangoapps/rss_proxy lms/djangoapps/static_template_view/ lms/djangoapps/staticbook/ lms/djangoapps/support/ lms/djangoapps/survey/ lms/djangoapps/teams/ lms/djangoapps/tests/ lms/djangoapps/user_tours/ lms/djangoapps/verify_student/ lms/djangoapps/mfe_config_api/ lms/envs/ lms/lib/ lms/tests.py"
path: "lms/djangoapps/gating/ lms/djangoapps/grades/ lms/djangoapps/instructor/ lms/djangoapps/instructor_analytics/ lms/djangoapps/discussion/ lms/djangoapps/edxnotes/ lms/djangoapps/experiments/ lms/djangoapps/instructor_task/ lms/djangoapps/learner_dashboard/ lms/djangoapps/learner_home/ lms/djangoapps/lms_initialization/ lms/djangoapps/lms_xblock/ lms/djangoapps/lti_provider/ lms/djangoapps/mailing/ lms/djangoapps/mobile_api/ lms/djangoapps/monitoring/ lms/djangoapps/ora_staff_grader/ lms/djangoapps/program_enrollments/ lms/djangoapps/rss_proxy lms/djangoapps/static_template_view/ lms/djangoapps/staticbook/ lms/djangoapps/support/ lms/djangoapps/survey/ lms/djangoapps/teams/ lms/djangoapps/tests/ lms/djangoapps/user_tours/ lms/djangoapps/verify_student/ lms/djangoapps/mfe_config_api/ lms/envs/ lms/lib/ lms/tests.py"
- module-name: openedx-1
path: "openedx/core/types/ openedx/core/djangoapps/ace_common/ openedx/core/djangoapps/agreements/ openedx/core/djangoapps/api_admin/ openedx/core/djangoapps/auth_exchange/ openedx/core/djangoapps/bookmarks/ openedx/core/djangoapps/cache_toolbox/ openedx/core/djangoapps/catalog/ openedx/core/djangoapps/ccxcon/ openedx/core/djangoapps/commerce/ openedx/core/djangoapps/common_initialization/ openedx/core/djangoapps/common_views/ openedx/core/djangoapps/config_model_utils/ openedx/core/djangoapps/content/ openedx/core/djangoapps/content_libraries/ openedx/core/djangoapps/content_staging/ openedx/core/djangoapps/contentserver/ openedx/core/djangoapps/cookie_metadata/ openedx/core/djangoapps/cors_csrf/ openedx/core/djangoapps/course_apps/ openedx/core/djangoapps/course_date_signals/ openedx/core/djangoapps/course_groups/ openedx/core/djangoapps/courseware_api/ openedx/core/djangoapps/crawlers/ openedx/core/djangoapps/credentials/ openedx/core/djangoapps/credit/ openedx/core/djangoapps/dark_lang/ openedx/core/djangoapps/debug/ openedx/core/djangoapps/discussions/ openedx/core/djangoapps/django_comment_common/ openedx/core/djangoapps/embargo/ openedx/core/djangoapps/enrollments/ openedx/core/djangoapps/external_user_ids/ openedx/core/djangoapps/zendesk_proxy/ openedx/core/djangolib/ openedx/core/lib/ openedx/core/tests/ openedx/core/djangoapps/course_live/"
path: "openedx/core/types/ openedx/core/djangoapps/ace_common/ openedx/core/djangoapps/agreements/ openedx/core/djangoapps/api_admin/ openedx/core/djangoapps/auth_exchange/ openedx/core/djangoapps/bookmarks/ openedx/core/djangoapps/cache_toolbox/ openedx/core/djangoapps/catalog/ openedx/core/djangoapps/ccxcon/ openedx/core/djangoapps/commerce/ openedx/core/djangoapps/common_initialization/ openedx/core/djangoapps/common_views/ openedx/core/djangoapps/config_model_utils/ openedx/core/djangoapps/content/ openedx/core/djangoapps/content_libraries/ openedx/core/djangoapps/content_staging/ openedx/core/djangoapps/contentserver/ openedx/core/djangoapps/cookie_metadata/ openedx/core/djangoapps/cors_csrf/ openedx/core/djangoapps/course_apps/ openedx/core/djangoapps/course_date_signals/ openedx/core/djangoapps/course_groups/ openedx/core/djangoapps/courseware_api/ openedx/core/djangoapps/crawlers/ openedx/core/djangoapps/credentials/ openedx/core/djangoapps/credit/ openedx/core/djangoapps/dark_lang/ openedx/core/djangoapps/debug/ openedx/core/djangoapps/discussions/ openedx/core/djangoapps/django_comment_common/ openedx/core/djangoapps/embargo/ openedx/core/djangoapps/enrollments/ openedx/core/djangoapps/external_user_ids/ openedx/core/djangoapps/zendesk_proxy/ openedx/core/djangolib/ openedx/core/lib/ openedx/core/djangoapps/course_live/"
- module-name: openedx-2
path: "openedx/core/djangoapps/geoinfo/ openedx/core/djangoapps/header_control/ openedx/core/djangoapps/heartbeat/ openedx/core/djangoapps/lang_pref/ openedx/core/djangoapps/models/ openedx/core/djangoapps/monkey_patch/ openedx/core/djangoapps/oauth_dispatch/ openedx/core/djangoapps/olx_rest_api/ openedx/core/djangoapps/password_policy/ openedx/core/djangoapps/plugin_api/ openedx/core/djangoapps/plugins/ openedx/core/djangoapps/profile_images/ openedx/core/djangoapps/programs/ openedx/core/djangoapps/safe_sessions/ openedx/core/djangoapps/schedules/ openedx/core/djangoapps/service_status/ openedx/core/djangoapps/session_inactivity_timeout/ openedx/core/djangoapps/signals/ openedx/core/djangoapps/site_configuration/ openedx/core/djangoapps/system_wide_roles/ openedx/core/djangoapps/theming/ openedx/core/djangoapps/user_api/ openedx/core/djangoapps/user_authn/ openedx/core/djangoapps/util/ openedx/core/djangoapps/verified_track_content/ openedx/core/djangoapps/video_config/ openedx/core/djangoapps/video_pipeline/ openedx/core/djangoapps/waffle_utils/ openedx/core/djangoapps/xblock/ openedx/core/djangoapps/xmodule_django/ openedx/core/tests/ openedx/features/ openedx/testing/ openedx/tests/ openedx/core/djangoapps/notifications/ openedx/core/djangoapps/staticfiles/ openedx/core/djangoapps/content_tagging/"
path: "openedx/core/djangoapps/geoinfo/ openedx/core/djangoapps/header_control/ openedx/core/djangoapps/heartbeat/ openedx/core/djangoapps/lang_pref/ openedx/core/djangoapps/models/ openedx/core/djangoapps/monkey_patch/ openedx/core/djangoapps/oauth_dispatch/ openedx/core/djangoapps/olx_rest_api/ openedx/core/djangoapps/password_policy/ openedx/core/djangoapps/plugin_api/ openedx/core/djangoapps/plugins/ openedx/core/djangoapps/profile_images/ openedx/core/djangoapps/programs/ openedx/core/djangoapps/safe_sessions/ openedx/core/djangoapps/schedules/ openedx/core/djangoapps/service_status/ openedx/core/djangoapps/session_inactivity_timeout/ openedx/core/djangoapps/signals/ openedx/core/djangoapps/site_configuration/ openedx/core/djangoapps/system_wide_roles/ openedx/core/djangoapps/theming/ openedx/core/djangoapps/user_api/ openedx/core/djangoapps/user_authn/ openedx/core/djangoapps/util/ openedx/core/djangoapps/verified_track_content/ openedx/core/djangoapps/video_config/ openedx/core/djangoapps/video_pipeline/ openedx/core/djangoapps/waffle_utils/ openedx/core/djangoapps/xblock/ openedx/core/djangoapps/xmodule_django/ openedx/core/tests/ openedx/features/ openedx/testing/ openedx/tests/ openedx/envs/ openedx/core/djangoapps/notifications/ openedx/core/djangoapps/staticfiles/ openedx/core/djangoapps/content_tagging/"
- module-name: common
path: "common pavelib"
path: "common"
- module-name: cms
path: "cms"
- module-name: xmodule

View File

@@ -61,14 +61,26 @@ jobs:
run: |
make test-requirements
- name: Install npm
env:
PIP_SRC: ${{ runner.temp }}
run: npm ci
- name: Install python packages
env:
PIP_SRC: ${{ runner.temp }}
run: |
pip install -e .
- name: Run Quality Tests
env:
TEST_SUITE: quality
SCRIPT_TO_RUN: ./scripts/generic-ci-tests.sh
PIP_SRC: ${{ runner.temp }}
TARGET_BRANCH: ${{ github.base_ref }}
run: |
./scripts/all-tests.sh
make pycodestyle
make xsslint
make pii_check
make check_keywords
- name: Save Job Artifacts
if: always()

View File

@@ -15,8 +15,8 @@ jobs:
os: [ubuntu-24.04]
python-version:
- "3.11"
node-version: [18, 20]
npm-version: [10.5.x]
node-version: [20]
npm-version: [10.7.x]
mongo-version:
- "7.0"

View File

@@ -34,7 +34,6 @@
"paths": [
"lms/djangoapps/discussion/",
"lms/djangoapps/edxnotes/",
"lms/djangoapps/email_marketing/",
"lms/djangoapps/experiments/"
]
},
@@ -239,6 +238,7 @@
"cms/djangoapps/cms_user_tasks/",
"cms/djangoapps/course_creators/",
"cms/djangoapps/export_course_metadata/",
"cms/djangoapps/import_from_modulestore/",
"cms/djangoapps/maintenance/",
"cms/djangoapps/models/",
"cms/djangoapps/pipeline_js/",
@@ -256,15 +256,13 @@
"common-with-lms": {
"settings": "lms.envs.test",
"paths": [
"common/djangoapps/",
"pavelib/"
"common/djangoapps/"
]
},
"common-with-cms": {
"settings": "cms.envs.test",
"paths": [
"common/djangoapps/",
"pavelib/"
"common/djangoapps/"
]
},
"xmodule-with-lms": {

View File

@@ -71,29 +71,15 @@ jobs:
- name: install system requirements
run: |
sudo apt-get update && sudo apt-get install libmysqlclient-dev libxmlsec1-dev lynx openssl
sudo apt-get update && sudo apt-get install libmysqlclient-dev libxmlsec1-dev lynx
# This is needed until the ENABLE_BLAKE2B_HASHING can be removed and we
# can stop using MD4 by default.
- name: enable md4 hashing in libssl
run: |
cat <<EOF | sudo tee /etc/ssl/openssl.cnf
# Use this in order to automatically load providers.
openssl_conf = openssl_init
[openssl_init]
providers = provider_sect
[provider_sect]
default = default_sect
legacy = legacy_sect
[default_sect]
activate = 1
[legacy_sect]
activate = 1
EOF
# We pull this image a lot, and Dockerhub will rate limit us if we pull too often.
# This is an attempt to cache the image for better performance and to work around that.
# It will cache all pulled images, so if we add new images to this we'll need to update the key.
- name: Cache Docker images
uses: ScribeMD/docker-cache@0.5.0
with:
key: docker-${{ runner.os }}-mongo-${{ matrix.mongo-version }}
- name: Start MongoDB
uses: supercharge/mongodb-github-action@1.11.0
@@ -180,7 +166,7 @@ jobs:
shell: bash
run: |
echo "root_cms_unit_tests_count=$(pytest --disable-warnings --collect-only --ds=cms.envs.test cms/ -q | head -n -2 | wc -l)" >> $GITHUB_ENV
echo "root_lms_unit_tests_count=$(pytest --disable-warnings --collect-only --ds=lms.envs.test lms/ openedx/ common/djangoapps/ xmodule/ pavelib/ -q | head -n -2 | wc -l)" >> $GITHUB_ENV
echo "root_lms_unit_tests_count=$(pytest --disable-warnings --collect-only --ds=lms.envs.test lms/ openedx/ common/djangoapps/ xmodule/ -q | head -n -2 | wc -l)" >> $GITHUB_ENV
- name: get GHA unit test paths
shell: bash
@@ -219,7 +205,6 @@ jobs:
to add any missing apps and match the count. for more details please take a look at scripts/gha-shards-readme.md"
exit 1
# This job aggregates test results. It's the required check for branch protection.
# https://github.com/marketplace/actions/alls-green#why
# https://github.com/orgs/community/discussions/33579

2
.nvmrc
View File

@@ -1 +1 @@
18
20

View File

@@ -1,7 +1,7 @@
source_path: ./
report_path: pii_report
safelist_path: .annotation_safe_list.yml
coverage_target: 94.5
coverage_target: 85.3
# See OEP-30 for more information on these values and what they mean:
# https://open-edx-proposals.readthedocs.io/en/latest/oep-0030-arch-pii-markup-and-auditing.html#docstring-annotations
annotations:

View File

@@ -3,7 +3,7 @@ version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.12"
python: "3.11"
sphinx:
configuration: docs/conf.py

View File

@@ -1,5 +0,0 @@
xmodule/css
common/static/sass/bourbon
common/static/xmodule/modules/css
common/test/test-theme
lms/static/sass/vendor

View File

@@ -1,8 +1,8 @@
We don't maintain a detailed changelog. For details of changes, please see
either the `edX Release Notes`_ or the `GitHub commit history`_.
either the `Open edX Release Notes`_ or the `GitHub commit history`_.
.. _edX Release Notes: https://edx.readthedocs.io/projects/open-edx-release-notes/en/latest/
.. _Open edX Release Notes: https://docs.openedx.org/en/latest/community/release_notes/index.html
.. _GitHub commit history: https://github.com/openedx/edx-platform/commits/master

View File

@@ -1,200 +0,0 @@
FROM ubuntu:focal as minimal-system
# Warning: This file is experimental.
#
# Short-term goals:
# * Be a suitable replacement for the `edxops/edxapp` image in devstack (in progress).
# * Take advantage of Docker caching layers: aim to put commands in order of
# increasing cache-busting frequency.
# * Related to ^, use no Ansible or Paver.
# Long-term goal:
# * Be a suitable base for production LMS and CMS images (THIS IS NOT YET THE CASE!).
ARG DEBIAN_FRONTEND=noninteractive
ARG SERVICE_VARIANT
ARG SERVICE_PORT
# Env vars: paver
# We intentionally don't use paver in this Dockerfile, but Devstack may invoke paver commands
# during provisioning. Enabling NO_PREREQ_INSTALL tells paver not to re-install Python
# requirements for every paver command, potentially saving a lot of developer time.
ARG NO_PREREQ_INSTALL='1'
# Env vars: locale
ENV LANG='en_US.UTF-8'
ENV LANGUAGE='en_US:en'
ENV LC_ALL='en_US.UTF-8'
# Env vars: configuration
ENV CONFIG_ROOT='/edx/etc'
ENV LMS_CFG="$CONFIG_ROOT/lms.yml"
ENV CMS_CFG="$CONFIG_ROOT/cms.yml"
# Env vars: path
ENV VIRTUAL_ENV="/edx/app/edxapp/venvs/edxapp"
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
ENV PATH="/edx/app/edxapp/edx-platform/node_modules/.bin:${PATH}"
ENV PATH="/edx/app/edxapp/edx-platform/bin:${PATH}"
ENV PATH="/edx/app/edxapp/nodeenv/bin:${PATH}"
WORKDIR /edx/app/edxapp/edx-platform
# Create user before assigning any directory ownership to it.
RUN useradd -m --shell /bin/false app
# Use debconf to set locales to be generated when the locales apt package is installed later.
RUN echo "locales locales/default_environment_locale select en_US.UTF-8" | debconf-set-selections
RUN echo "locales locales/locales_to_be_generated multiselect en_US.UTF-8 UTF-8" | debconf-set-selections
# Setting up ppa deadsnakes to get python 3.11
RUN apt-get update && \
apt-get install -y software-properties-common && \
apt-add-repository -y ppa:deadsnakes/ppa
# Install requirements that are absolutely necessary
RUN apt-get update && \
apt-get -y dist-upgrade && \
apt-get -y install --no-install-recommends \
python3-pip \
python3.11 \
# python3-dev: required for building mysqlclient python package
python3.11-dev \
python3.11-venv \
libpython3.11 \
libpython3.11-stdlib \
libmysqlclient21 \
# libmysqlclient-dev: required for building mysqlclient python package
libmysqlclient-dev \
pkg-config \
libssl1.1 \
libxmlsec1-openssl \
# lynx: Required by https://github.com/openedx/edx-platform/blob/b489a4ecb122/openedx/core/lib/html_to_text.py#L16
lynx \
ntp \
git \
build-essential \
gettext \
gfortran \
graphviz \
locales \
swig \
&& \
apt-get clean all && \
rm -rf /var/lib/apt/*
RUN mkdir -p /edx/var/edxapp
RUN mkdir -p /edx/etc
RUN chown app:app /edx/var/edxapp
# The builder-production stage is a temporary stage that installs required packages and builds the python virtualenv,
# installs nodejs and node_modules.
# The built artifacts from this stage are then copied to the base stage.
FROM minimal-system as builder-production
RUN apt-get update && \
apt-get -y install --no-install-recommends \
curl \
libssl-dev \
libffi-dev \
libfreetype6-dev \
libgeos-dev \
libgraphviz-dev \
libjpeg8-dev \
liblapack-dev \
libpng-dev \
libsqlite3-dev \
libxml2-dev \
libxmlsec1-dev \
libxslt1-dev
# Setup python virtual environment
# It is already 'activated' because $VIRTUAL_ENV/bin was put on $PATH
RUN python3.11 -m venv "${VIRTUAL_ENV}"
# Install python requirements
# Requires copying over requirements files, but not entire repository
COPY requirements requirements
RUN pip install -r requirements/pip.txt
RUN pip install -r requirements/edx/base.txt
# Install node and npm
RUN nodeenv /edx/app/edxapp/nodeenv --node=18.19.0 --prebuilt
RUN npm install -g npm@10.5.x
# This script is used by an npm post-install hook.
# We copy it into the image now so that it will be available when we run `npm install` in the next step.
# The script itself will copy certain modules into some uber-legacy parts of edx-platform which still use RequireJS.
COPY scripts/copy-node-modules.sh scripts/copy-node-modules.sh
# Install node modules
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm set progress=false && npm ci
# The builder-development stage is a temporary stage that installs python modules required for development purposes
# The built artifacts from this stage are then copied to the development stage.
FROM builder-production as builder-development
RUN pip install -r requirements/edx/development.txt
# base stage
FROM minimal-system as base
# Copy python virtual environment, nodejs and node_modules
COPY --from=builder-production /edx/app/edxapp/venvs/edxapp /edx/app/edxapp/venvs/edxapp
COPY --from=builder-production /edx/app/edxapp/nodeenv /edx/app/edxapp/nodeenv
COPY --from=builder-production /edx/app/edxapp/edx-platform/node_modules /edx/app/edxapp/edx-platform/node_modules
# Copy over remaining parts of repository (including all code)
COPY . .
# Install Python requirements again in order to capture local projects
RUN pip install -e .
# Setting edx-platform directory as safe for git commands
RUN git config --global --add safe.directory /edx/app/edxapp/edx-platform
# Production target
FROM base as production
USER app
ENV EDX_PLATFORM_SETTINGS='docker-production'
ENV SERVICE_VARIANT="${SERVICE_VARIANT}"
ENV SERVICE_PORT="${SERVICE_PORT}"
ENV DJANGO_SETTINGS_MODULE="${SERVICE_VARIANT}.envs.$EDX_PLATFORM_SETTINGS"
EXPOSE ${SERVICE_PORT}
CMD gunicorn \
-c /edx/app/edxapp/edx-platform/${SERVICE_VARIANT}/docker_${SERVICE_VARIANT}_gunicorn.py \
--name ${SERVICE_VARIANT} \
--bind=0.0.0.0:${SERVICE_PORT} \
--max-requests=1000 \
--access-logfile \
- ${SERVICE_VARIANT}.wsgi:application
# Development target
FROM base as development
RUN apt-get update && \
apt-get -y install --no-install-recommends \
# wget is used in Makefile for common_constraints.txt
wget \
&& \
apt-get clean all && \
rm -rf /var/lib/apt/*
COPY --from=builder-development /edx/app/edxapp/venvs/edxapp /edx/app/edxapp/venvs/edxapp
RUN ln -s "$(pwd)/lms/envs/devstack-experimental.yml" "$LMS_CFG"
RUN ln -s "$(pwd)/cms/envs/devstack-experimental.yml" "$CMS_CFG"
# Temporary compatibility hack while devstack is supporting both the old `edxops/edxapp` image and this image.
# * Add in a dummy ../edxapp_env file
# * devstack sets /edx/etc/studio.yml as CMS_CFG.
RUN ln -s "$(pwd)/cms/envs/devstack-experimental.yml" "/edx/etc/studio.yml"
RUN touch ../edxapp_env
ENV EDX_PLATFORM_SETTINGS='devstack_docker'
ENV SERVICE_VARIANT="${SERVICE_VARIANT}"
EXPOSE ${SERVICE_PORT}
CMD ./manage.py ${SERVICE_VARIANT} runserver 0.0.0.0:${SERVICE_PORT}

View File

@@ -1,8 +1,7 @@
# Do things in edx-platform
.PHONY: base-requirements check-types clean \
compile-requirements detect_changed_source_translations dev-requirements \
docker_auth docker_build docker_tag_build_push_lms docker_tag_build_push_lms_dev \
docker_tag_build_push_cms docker_tag_build_push_cms_dev docs extract_translations \
docs extract_translations \
guides help lint-imports local-requirements migrate migrate-lms migrate-cms \
pre-requirements pull pull_xblock_translations pull_translations push_translations \
requirements shell swagger \
@@ -67,9 +66,6 @@ pull_translations: clean_translations ## pull translations via atlas
detect_changed_source_translations: ## check if translation files are up-to-date
i18n_tool changed
pull: ## update the Docker image used by "make shell"
docker pull edxops/edxapp:latest
pre-requirements: ## install Python requirements for running pip-tools
pip install -r requirements/pip.txt
pip install -r requirements/pip-tools.txt
@@ -94,17 +90,9 @@ test-requirements: pre-requirements
requirements: dev-requirements ## install development environment requirements
shell: ## launch a bash shell in a Docker container with all edx-platform dependencies installed
docker run -it -e "NO_PYTHON_UNINSTALL=1" -e "PIP_INDEX_URL=https://pypi.python.org/simple" -e TERM \
-v `pwd`:/edx/app/edxapp/edx-platform:cached \
-v edxapp_lms_assets:/edx/var/edxapp/staticfiles/ \
-v edxapp_node_modules:/edx/app/edxapp/edx-platform/node_modules \
edxops/edxapp:latest /edx/app/edxapp/devstack.sh open
# Order is very important in this list: files must appear after everything they include!
REQ_FILES = \
requirements/edx/coverage \
requirements/edx/paver \
requirements/edx-sandbox/base \
requirements/edx/base \
requirements/edx/doc \
@@ -164,27 +152,6 @@ upgrade-package: ## update just one package to the latest usable release
check-types: ## run static type-checking tests
mypy
docker_auth:
echo "$$DOCKERHUB_PASSWORD" | docker login -u "$$DOCKERHUB_USERNAME" --password-stdin
docker_build: docker_auth
DOCKER_BUILDKIT=1 docker build . --build-arg SERVICE_VARIANT=lms --build-arg SERVICE_PORT=8000 --target development -t openedx/lms-dev
DOCKER_BUILDKIT=1 docker build . --build-arg SERVICE_VARIANT=lms --build-arg SERVICE_PORT=8000 --target production -t openedx/lms
DOCKER_BUILDKIT=1 docker build . --build-arg SERVICE_VARIANT=cms --build-arg SERVICE_PORT=8010 --target development -t openedx/cms-dev
DOCKER_BUILDKIT=1 docker build . --build-arg SERVICE_VARIANT=cms --build-arg SERVICE_PORT=8010 --target production -t openedx/cms
docker_tag_build_push_lms: docker_auth
docker buildx build -t openedx/lms:latest -t openedx/lms:${GITHUB_SHA} --platform linux/amd64,linux/arm64 --build-arg SERVICE_VARIANT=lms --build-arg SERVICE_PORT=8000 --target production --push .
docker_tag_build_push_lms_dev: docker_auth
docker buildx build -t openedx/lms-dev:latest -t openedx/lms-dev:${GITHUB_SHA} --platform linux/amd64,linux/arm64 --build-arg SERVICE_VARIANT=lms --build-arg SERVICE_PORT=8000 --target development --push .
docker_tag_build_push_cms: docker_auth
docker buildx build -t openedx/cms:latest -t openedx/cms:${GITHUB_SHA} --platform linux/amd64,linux/arm64 --build-arg SERVICE_VARIANT=cms --build-arg SERVICE_PORT=8010 --target production --push .
docker_tag_build_push_cms_dev: docker_auth
docker buildx build -t openedx/cms-dev:latest -t openedx/cms-dev:${GITHUB_SHA} --platform linux/amd64,linux/arm64 --build-arg SERVICE_VARIANT=cms --build-arg SERVICE_PORT=8010 --target development --push .
lint-imports:
lint-imports
@@ -204,3 +171,37 @@ migrate: migrate-lms migrate-cms
# Part of https://github.com/openedx/wg-developer-experience/issues/136
ubuntu-requirements: ## Install ubuntu 22.04 system packages needed for `pip install` to work on ubuntu.
sudo apt install libmysqlclient-dev libxmlsec1-dev
xsslint: ## check xss for quality issuest
python scripts/xsslint/xss_linter.py \
--rule-totals \
--config=scripts.xsslint_config \
--thresholds=scripts/xsslint_thresholds.json
pycodestyle: ## check python files for quality issues
pycodestyle .
## Re-enable --lint flag when this issue https://github.com/openedx/edx-platform/issues/35775 is resolved
pii_check: ## check django models for pii annotations
DJANGO_SETTINGS_MODULE=cms.envs.test \
code_annotations django_find_annotations \
--config_file .pii_annotations.yml \
--app_name cms \
--coverage \
--lint
DJANGO_SETTINGS_MODULE=lms.envs.test \
code_annotations django_find_annotations \
--config_file .pii_annotations.yml \
--app_name lms \
--coverage \
--lint
check_keywords: ## check django models for reserve keywords
DJANGO_SETTINGS_MODULE=cms.envs.test \
python manage.py cms check_reserved_keywords \
--override_file db_keyword_overrides.yml
DJANGO_SETTINGS_MODULE=lms.envs.test \
python manage.py lms check_reserved_keywords \
--override_file db_keyword_overrides.yml

View File

@@ -12,11 +12,11 @@ Open edX Platform
Purpose
*******
The `Open edX Platform <https://openedx.org>`_ is a service-oriented platform for authoring and
delivering online learning at any scale. The platform is written in
The `Open edX Platform <https://openedx.org>`_ enables the authoring and
delivery of online learning at any scale. The platform is written in
Python and JavaScript and makes extensive use of the Django
framework. At the highest level, the platform is composed of a
monolith, some independently deployable applications (IDAs), and
modular monolith, some independently deployable applications (IDAs), and
micro-frontends (MFEs) based on the ReactJS.
This repository hosts the monolith at the center of the Open edX
@@ -71,15 +71,15 @@ System Dependencies
-------------------
OS:
* Ubuntu 20.04
* Ubuntu 22.04
* Ubuntu 24.04
Interperters/Tools:
* Python 3.11
* Node 18
* Node: See the ``.nvmrc`` file in this repository.
Services:
@@ -103,10 +103,19 @@ Language Packages:
* Backend application:
- ``pip install -r requirements/edx/base.txt`` (production)
- ``pip install -r requirements/edx/dev.txt`` (development)
- ``pip install -r requirements/edx/development.txt`` (development)
Some Python packages have system dependencies. For example, installing these packages on Debian or Ubuntu will require first running ``sudo apt install python3-dev default-libmysqlclient-dev build-essential pkg-config`` to satisfy the requirements of the ``mysqlclient`` Python package.
Codejail Setup
--------------
As a part of the baremetal setup, you will need to configure your system to
work properly with codejail. See the `codejail installation steps`_ for more
details.
.. _codejail installation steps: https://github.com/openedx/codejail?tab=readme-ov-file#installation
Build Steps
-----------

View File

@@ -1,7 +1,7 @@
CMS
===
This directory contains code relating to the Open edX Content Management System ("CMS"). It allows learning content to be created, edited, versioned, and eventually published to the `Open edX Learning Mangement System <../lms>`_ ("LMS"). The main user-facing application that CMS powers is the `Open edX Studio <https://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/latest/getting_started/CA_get_started_Studio.html#>`_
This directory contains code relating to the Open edX Content Management System ("CMS"). It allows learning content to be created, edited, versioned, and eventually published to the `Open edX Learning Mangement System <../lms>`_ ("LMS"). The main user-facing application that CMS powers is the `Open edX Studio <https://docs.openedx.org/en/latest/educators/concepts/open_edx_platform/what_is_studio.html>`_
See also
--------

View File

@@ -6,10 +6,7 @@ pytest from looking for the conftest.py module in the parent directory when
only running cms tests.
"""
import importlib
import logging
import os
import pytest
@@ -29,13 +26,6 @@ def pytest_configure(config):
else:
logging.info("pytest did not register json_report correctly")
if config.getoption('help'):
return
settings_module = os.environ.get('DJANGO_SETTINGS_MODULE')
startup_module = 'cms.startup' if settings_module.startswith('cms') else 'lms.startup'
startup = importlib.import_module(startup_module)
startup.run()
@pytest.fixture(autouse=True, scope='function')
def _django_clear_site_cache():

View File

@@ -286,7 +286,7 @@ class CourseRunViewSetTests(ModuleStoreTestCase):
data['team'] = [{'user': 'invalid-username'}]
response = self.client.post(self.list_url, data, format='json')
self.assertEqual(response.status_code, 400)
self.assertDictContainsSubset({'team': ['Course team user does not exist']}, response.data)
self.assertEqual(response.data.get('team'), ['Course team user does not exist'])
def test_images_upload(self):
# http://www.django-rest-framework.org/api-guide/parsers/#fileuploadparser

View File

@@ -23,6 +23,7 @@ class CourseRunViewSet(viewsets.GenericViewSet): # lint-amnesty, pylint: disabl
lookup_value_regex = settings.COURSE_KEY_REGEX
permission_classes = (permissions.IsAdminUser,)
serializer_class = CourseRunSerializer
queryset = []
def get_object(self):
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field

View File

@@ -86,9 +86,16 @@ def user_task_stopped_handler(sender, **kwargs): # pylint: disable=unused-argum
reverse('usertaskstatus-detail', args=[status.uuid])
)
# check if this is a course optimizer task
is_course_optimizer_task = False
course_optimizer_artifact = UserTaskArtifact.objects.filter(status=status, name="BrokenLinks").first()
if course_optimizer_artifact:
is_course_optimizer_task = True
user_email = status.user.email
olx_validation_text = get_olx_validation_from_artifact()
task_args = [task_name, str(status.state_text), user_email, detail_url, olx_validation_text]
task_args = [task_name, str(status.state_text), user_email, detail_url,
olx_validation_text, is_course_optimizer_task]
try:
send_task_complete_email.delay(*task_args)
except Exception: # pylint: disable=broad-except

View File

@@ -21,17 +21,18 @@ TASK_COMPLETE_EMAIL_TIMEOUT = 60
@shared_task(bind=True)
@set_code_owner_attribute
def send_task_complete_email(self, task_name, task_state_text, dest_addr, detail_url, olx_validation_text=None):
def send_task_complete_email(self, task_name, task_state_text, dest_addr, detail_url,
olx_validation_text=None, is_course_optimizer_task=False):
"""
Sending an email to the users when an async task completes.
"""
retries = self.request.retries
context = {
'task_name': task_name,
'task_status': task_state_text,
'detail_url': detail_url,
'olx_validation_errors': {},
'is_course_optimizer_task': is_course_optimizer_task,
}
if olx_validation_text:
try:

View File

@@ -13,13 +13,15 @@ from edx_django_utils.admin.mixins import ReadOnlyAdminMixin
from cms.djangoapps.contentstore.models import (
BackfillCourseTabsConfig,
CleanStaleCertificateAvailabilityDatesConfig,
VideoUploadConfig
ComponentLink,
ContainerLink,
LearningContextLinksStatus,
VideoUploadConfig,
)
from cms.djangoapps.contentstore.outlines_regenerate import CourseOutlineRegenerate
from openedx.core.djangoapps.content.learning_sequences.api import key_supports_outlines
from .tasks import update_outline_from_modulestore_task, update_all_outlines_from_modulestore_task
from .tasks import update_all_outlines_from_modulestore_task, update_outline_from_modulestore_task
log = logging.getLogger(__name__)
@@ -86,6 +88,110 @@ class CleanStaleCertificateAvailabilityDatesConfigAdmin(ConfigurationModelAdmin)
pass
@admin.register(ComponentLink)
class ComponentLinkAdmin(admin.ModelAdmin):
"""
ComponentLink admin.
"""
fields = (
"uuid",
"upstream_block",
"upstream_usage_key",
"upstream_context_key",
"downstream_usage_key",
"downstream_context_key",
"version_synced",
"version_declined",
"created",
"updated",
)
readonly_fields = fields
list_display = [
"upstream_block",
"upstream_usage_key",
"downstream_usage_key",
"version_synced",
"updated",
]
search_fields = [
"upstream_usage_key",
"upstream_context_key",
"downstream_usage_key",
"downstream_context_key",
]
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return False
@admin.register(ContainerLink)
class ContainerLinkAdmin(admin.ModelAdmin):
"""
ContainerLink admin.
"""
fields = (
"uuid",
"upstream_container",
"upstream_container_key",
"upstream_context_key",
"downstream_usage_key",
"downstream_context_key",
"version_synced",
"version_declined",
"created",
"updated",
)
readonly_fields = fields
list_display = [
"upstream_container",
"upstream_container_key",
"downstream_usage_key",
"version_synced",
"updated",
]
search_fields = [
"upstream_container_key",
"upstream_context_key",
"downstream_usage_key",
"downstream_context_key",
]
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return False
@admin.register(LearningContextLinksStatus)
class LearningContextLinksStatusAdmin(admin.ModelAdmin):
"""
LearningContextLinksStatus admin.
"""
fields = (
"context_key",
"status",
"created",
"updated",
)
readonly_fields = ("created", "updated")
list_display = (
"context_key",
"status",
"created",
"updated",
)
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return False
admin.site.register(BackfillCourseTabsConfig, ConfigurationModelAdmin)
admin.site.register(VideoUploadConfig, ConfigurationModelAdmin)
admin.site.register(CourseOutlineRegenerate, CourseOutlineRegenerateAdmin)

View File

@@ -103,7 +103,7 @@ class CourseValidationViewTest(SharedModuleStoreTestCase, APITestCase):
'has_update': True,
},
'certificates': {
'is_enabled': True,
'is_enabled': False,
'is_activated': False,
'has_certificate': False,
},

View File

@@ -106,7 +106,7 @@ class CourseImportView(CourseImportExportViewMixin, GenericAPIView):
# TODO: ARCH-91
# This view is excluded from Swagger doc generation because it
# does not specify a serializer class.
exclude_from_schema = True
swagger_schema = None
@course_author_access_required
def post(self, request, course_key):

View File

@@ -3,7 +3,7 @@ import logging
import time
import numpy as np
from edxval.api import get_videos_for_course
from edxval.api import get_course_videos_qset
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from scipy import stats
@@ -77,6 +77,11 @@ class CourseQualityView(DeveloperErrorViewMixin, GenericAPIView):
* mode
"""
# TODO: ARCH-91
# This view is excluded from Swagger doc generation because it
# does not specify a serializer class.
swagger_schema = None
@course_author_access_required
def get(self, request, course_key):
"""
@@ -180,13 +185,11 @@ class CourseQualityView(DeveloperErrorViewMixin, GenericAPIView):
def _videos_quality(self, course): # lint-amnesty, pylint: disable=missing-function-docstring
video_blocks_in_course = modulestore().get_items(course.id, qualifiers={'category': 'video'})
videos, __ = get_videos_for_course(course.id)
videos_in_val = list(videos)
video_durations = [video['duration'] for video in videos_in_val]
video_durations = [cv.video.duration for cv in get_course_videos_qset(course.id)]
return dict(
total_number=len(video_blocks_in_course),
num_mobile_encoded=len(videos_in_val),
num_mobile_encoded=len(video_durations),
num_with_val_id=len([v for v in video_blocks_in_course if v.edx_video_id]),
durations=self._stats_dict(video_durations),
)

View File

@@ -65,6 +65,11 @@ class CourseValidationView(DeveloperErrorViewMixin, GenericAPIView):
* has_proctoring_escalation_email - whether the course has a proctoring escalation email
"""
# TODO: ARCH-91
# This view is excluded from Swagger doc generation because it
# does not specify a serializer class.
swagger_schema = None
@course_author_access_required
def get(self, request, course_key):
"""
@@ -212,7 +217,7 @@ class CourseValidationView(DeveloperErrorViewMixin, GenericAPIView):
def _certificates_validation(self, course):
is_activated, certificates = CertificateManager.is_activated(course)
certificates_enabled = certificates is not None
certificates_enabled = CertificateManager.is_enabled(course)
return dict(
is_activated=is_activated,
has_certificate=certificates_enabled and len(certificates) > 0,

View File

@@ -25,7 +25,8 @@ from common.djangoapps.util.date_utils import get_default_time_display
from common.djangoapps.util.json_request import JsonResponse
from openedx.core.djangoapps.contentserver.caching import del_cached_content
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx_filters.course_authoring.filters import LMSPageURLRequested
from openedx.core.djangoapps.user_api.models import UserPreference
from openedx_filters.content_authoring.filters import LMSPageURLRequested
from xmodule.contentstore.content import StaticContent # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.contentstore.django import contentstore # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.exceptions import NotFoundError # lint-amnesty, pylint: disable=wrong-import-order
@@ -194,7 +195,9 @@ def _assets_json(request, course_key):
'''
request_options = _parse_request_to_dictionary(request)
filter_parameters = {}
filter_parameters = {
'user_language': UserPreference.get_value(request.user, 'pref-lang') or 'en',
}
if request_options['requested_asset_type']:
filters_are_invalid_error = _get_error_if_invalid_parameters(request_options['requested_asset_type'])
@@ -717,7 +720,7 @@ def get_asset_json(display_name, content_type, date, location, thumbnail_locatio
asset_url = StaticContent.serialize_asset_key_with_slash(location)
## .. filter_implemented_name: LMSPageURLRequested
## .. filter_type: org.openedx.course_authoring.lms.page.url.requested.v1
## .. filter_type: org.openedx.content_authoring.lms.page.url.requested.v1
lms_root, _ = LMSPageURLRequested.run_filter(
url=configuration_helpers.get_value('LMS_ROOT_URL', settings.LMS_ROOT_URL),
org=location.org,

View File

@@ -0,0 +1,319 @@
"""
Logic for handling actions in Studio related to Course Optimizer.
"""
import json
from user_tasks.conf import settings as user_tasks_settings
from user_tasks.models import UserTaskArtifact, UserTaskStatus
from cms.djangoapps.contentstore.tasks import CourseLinkCheckTask, LinkState
from cms.djangoapps.contentstore.xblock_storage_handlers.view_handlers import get_xblock
from cms.djangoapps.contentstore.xblock_storage_handlers.xblock_helpers import usage_key_with_run
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
# Restricts status in the REST API to only those which the requesting user has permission to view.
# These can be overwritten in django settings.
# By default, these should be the UserTaskStatus statuses:
# 'Pending', 'In Progress', 'Succeeded', 'Failed', 'Canceled', 'Retrying'
STATUS_FILTERS = user_tasks_settings.USER_TASKS_STATUS_FILTERS
def get_link_check_data(request, course_id):
"""
Retrives data and formats it for the link check get request.
"""
task_status = _latest_task_status(request, course_id)
status = None
created_at = None
broken_links_dto = None
error = None
if task_status is None:
# The task hasn't been initialized yet; did we store info in the session already?
try:
session_status = request.session['link_check_status']
status = session_status[course_id]
except KeyError:
status = 'Uninitiated'
else:
status = task_status.state
created_at = task_status.created
if task_status.state == UserTaskStatus.SUCCEEDED:
artifact = UserTaskArtifact.objects.get(status=task_status, name='BrokenLinks')
with artifact.file as file:
content = file.read()
json_content = json.loads(content)
broken_links_dto = generate_broken_links_descriptor(json_content, request.user)
elif task_status.state in (UserTaskStatus.FAILED, UserTaskStatus.CANCELED):
errors = UserTaskArtifact.objects.filter(status=task_status, name='Error')
if errors:
error = errors[0].text
try:
error = json.loads(error)
except ValueError:
# Wasn't JSON, just use the value as a string
pass
data = {
'LinkCheckStatus': status,
**({'LinkCheckCreatedAt': created_at} if created_at else {}),
**({'LinkCheckOutput': broken_links_dto} if broken_links_dto else {}),
**({'LinkCheckError': error} if error else {})
}
return data
def _latest_task_status(request, course_key_string, view_func=None):
"""
Get the most recent link check status update for the specified course
key.
"""
args = {'course_key_string': course_key_string}
name = CourseLinkCheckTask.generate_name(args)
task_status = UserTaskStatus.objects.filter(name=name)
for status_filter in STATUS_FILTERS:
task_status = status_filter().filter_queryset(request, task_status, view_func)
return task_status.order_by('-created').first()
def generate_broken_links_descriptor(json_content, request_user):
"""
Returns a Data Transfer Object for frontend given a list of broken links.
** Example json_content structure **
Note: link_state is locked if the link is a studio link and returns 403
link_state is external-forbidden if the link is not a studio link and returns 403
[
['block_id_1', 'link_1', link_state],
['block_id_1', 'link_2', link_state],
['block_id_2', 'link_3', link_state],
...
]
** Example DTO structure **
{
'sections': [
{
'id': 'section_id',
'displayName': 'section name',
'subsections': [
{
'id': 'subsection_id',
'displayName': 'subsection name',
'units': [
{
'id': 'unit_id',
'displayName': 'unit name',
'blocks': [
{
'id': 'block_id',
'displayName': 'block name',
'url': 'url/to/block',
'brokenLinks: [],
'lockedLinks: [],
},
...,
]
},
...,
]
},
...,
]
},
...,
]
}
"""
xblock_node_tree = {} # tree representation of xblock relationships
xblock_dictionary = {} # dictionary of xblock attributes
for item in json_content:
block_id, link, *rest = item
if rest:
link_state = rest[0]
else:
link_state = ''
usage_key = usage_key_with_run(block_id)
block = get_xblock(usage_key, request_user)
xblock_node_tree, xblock_dictionary = _update_node_tree_and_dictionary(
block=block,
link=link,
link_state=link_state,
node_tree=xblock_node_tree,
dictionary=xblock_dictionary
)
return _create_dto_recursive(xblock_node_tree, xblock_dictionary)
def _update_node_tree_and_dictionary(block, link, link_state, node_tree, dictionary):
"""
Inserts a block into the node tree and add its attributes to the dictionary.
** Example node tree structure **
{
'section_id1': {
'subsection_id1': {
'unit_id1': {
'block_id1': {},
'block_id2': {},
...,
},
'unit_id2': {
'block_id3': {},
...,
},
...,
},
...,
},
...,
}
** Example dictionary structure **
{
'xblock_id: {
'display_name': 'xblock name',
'category': 'chapter'
},
'html_block_id': {
'display_name': 'xblock name',
'category': 'chapter',
'url': 'url_1',
'locked_links': [...],
'broken_links': [...],
'external_forbidden_links': [...],
}
...,
}
"""
updated_tree, updated_dictionary = node_tree, dictionary
path = _get_node_path(block)
current_node = updated_tree
xblock_id = ''
# Traverse the path and build the tree structure
for xblock in path:
xblock_id = xblock.location.block_id
updated_dictionary.setdefault(
xblock_id,
{
'display_name': xblock.display_name,
'category': getattr(xblock, 'category', ''),
}
)
# Sets new current node and creates the node if it doesn't exist
current_node = current_node.setdefault(xblock_id, {})
# Add block-level details for the last xblock in the path (URL and broken/locked links)
updated_dictionary[xblock_id].setdefault(
'url',
f'/course/{block.course_id}/editor/{block.category}/{block.location}'
)
# The link_state == True condition is maintained for backward compatibility.
# Previously, the is_locked attribute was used instead of link_state.
# If is_locked is True, it indicates that the link is locked.
if link_state is True or link_state == LinkState.LOCKED:
updated_dictionary[xblock_id].setdefault('locked_links', []).append(link)
elif link_state == LinkState.EXTERNAL_FORBIDDEN:
updated_dictionary[xblock_id].setdefault('external_forbidden_links', []).append(link)
else:
updated_dictionary[xblock_id].setdefault('broken_links', []).append(link)
return updated_tree, updated_dictionary
def _get_node_path(block):
"""
Retrieves the path from the course root node to a specific block, excluding the root.
** Example Path structure **
[chapter_node, sequential_node, vertical_node, html_node]
"""
path = []
current_node = block
while current_node.get_parent():
path.append(current_node)
current_node = current_node.get_parent()
return list(reversed(path))
CATEGORY_TO_LEVEL_MAP = {
"chapter": "sections",
"sequential": "subsections",
"vertical": "units"
}
def _create_dto_recursive(xblock_node, xblock_dictionary, parent_id=None):
"""
Recursively build the Data Transfer Object by using
the structure from the node tree and data from the dictionary.
"""
# Exit condition when there are no more child nodes (at block level)
if not xblock_node:
return None
level = None
xblock_children = []
for xblock_id, node in xblock_node.items():
child_blocks = _create_dto_recursive(node, xblock_dictionary, parent_id=xblock_id)
xblock_data = xblock_dictionary.get(xblock_id, {})
xblock_entry = {
'id': xblock_id,
'displayName': xblock_data.get('display_name', ''),
}
if child_blocks is None: # Leaf node
level = 'blocks'
xblock_entry.update({
'url': xblock_data.get('url', ''),
'brokenLinks': xblock_data.get('broken_links', []),
'lockedLinks': xblock_data.get('locked_links', []),
'externalForbiddenLinks': xblock_data.get('external_forbidden_links', [])
})
else: # Non-leaf node
category = xblock_data.get('category', None)
# If parent and child has same IDs and level is 'sections', change it to 'subsections'
# And if parent and child has same IDs and level is 'subsections', change it to 'units'
if xblock_id == parent_id:
if category == "chapter":
category = "sequential"
elif category == "sequential":
category = "vertical"
level = CATEGORY_TO_LEVEL_MAP.get(category, None)
xblock_entry.update(child_blocks)
xblock_children.append(xblock_entry)
return {level: xblock_children} if level else None
def sort_course_sections(course_key, data):
"""Retrieve and sort course sections based on the published course structure."""
course_blocks = modulestore().get_items(
course_key,
qualifiers={'category': 'course'},
revision=ModuleStoreEnum.RevisionOption.published_only
)
if not course_blocks or 'LinkCheckOutput' not in data or 'sections' not in data['LinkCheckOutput']:
return data # Return unchanged data if course_blocks or required keys are missing
sorted_section_ids = [section.location.block_id for section in course_blocks[0].get_children()]
sections_map = {section['id']: section for section in data['LinkCheckOutput']['sections']}
data['LinkCheckOutput']['sections'] = [
sections_map[section_id]
for section_id in sorted_section_ids
if section_id in sections_map
]
return data

View File

@@ -0,0 +1,297 @@
"""
Tests for course optimizer
"""
from unittest import mock
from unittest.mock import Mock
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from cms.djangoapps.contentstore.core.course_optimizer_provider import (
_update_node_tree_and_dictionary,
_create_dto_recursive,
sort_course_sections
)
from cms.djangoapps.contentstore.tasks import LinkState
class TestLinkCheckProvider(CourseTestCase):
"""
Tests for functions that generate a json structure of locked and broken links
to send to the frontend.
"""
def setUp(self):
"""Setup course blocks for tests"""
super().setUp()
self.mock_course = Mock()
self.mock_section = Mock(
location=Mock(block_id='chapter_1'),
display_name='Section Name',
category='chapter'
)
self.mock_subsection = Mock(
location=Mock(block_id='sequential_1'),
display_name='Subsection Name',
category='sequential'
)
self.mock_unit = Mock(
location=Mock(block_id='vertical_1'),
display_name='Unit Name',
category='vertical'
)
self.mock_block = Mock(
location=Mock(block_id='block_1'),
display_name='Block Name',
course_id=self.course.id,
category='html'
)
self.mock_course.get_parent.return_value = None
self.mock_section.get_parent.return_value = self.mock_course
self.mock_subsection.get_parent.return_value = self.mock_section
self.mock_unit.get_parent.return_value = self.mock_subsection
self.mock_block.get_parent.return_value = self.mock_unit
def test_update_node_tree_and_dictionary_returns_node_tree(self):
"""
Verify _update_node_tree_and_dictionary creates a node tree structure
when passed a block level xblock.
"""
expected_tree = {
'chapter_1': {
'sequential_1': {
'vertical_1': {
'block_1': {}
}
}
}
}
result_tree, result_dictionary = _update_node_tree_and_dictionary(
self.mock_block, 'example_link', LinkState.LOCKED, {}, {}
)
self.assertEqual(expected_tree, result_tree)
def test_update_node_tree_and_dictionary_returns_dictionary(self):
"""
Verify _update_node_tree_and_dictionary creates a dictionary of parent xblock entries
when passed a block level xblock.
"""
expected_dictionary = {
'chapter_1': {
'display_name': 'Section Name',
'category': 'chapter'
},
'sequential_1': {
'display_name': 'Subsection Name',
'category': 'sequential'
},
'vertical_1': {
'display_name': 'Unit Name',
'category': 'vertical'
},
'block_1': {
'display_name': 'Block Name',
'category': 'html',
'url': f'/course/{self.course.id}/editor/html/{self.mock_block.location}',
'locked_links': ['example_link']
}
}
result_tree, result_dictionary = _update_node_tree_and_dictionary(
self.mock_block, 'example_link', LinkState.LOCKED, {}, {}
)
self.assertEqual(expected_dictionary, result_dictionary)
def test_create_dto_recursive_returns_for_empty_node(self):
"""
Test _create_dto_recursive behavior at the end of recursion.
Function should return None when given empty node tree and empty dictionary.
"""
expected = _create_dto_recursive({}, {})
self.assertEqual(None, expected)
def test_create_dto_recursive_returns_for_leaf_node(self):
"""
Test _create_dto_recursive behavior at the step before the end of recursion.
When evaluating a leaf node in the node tree, the function should return broken links
and locked links data from the leaf node.
"""
expected_result = {
'blocks': [
{
'id': 'block_1',
'displayName': 'Block Name',
'url': '/block/1',
'brokenLinks': ['broken_link_1', 'broken_link_2'],
'lockedLinks': ['locked_link'],
'externalForbiddenLinks': ['forbidden_link_1'],
}
]
}
mock_node_tree = {
'block_1': {}
}
mock_dictionary = {
'chapter_1': {
'display_name': 'Section Name',
'category': 'chapter'
},
'sequential_1': {
'display_name': 'Subsection Name',
'category': 'sequential'
},
'vertical_1': {
'display_name': 'Unit Name',
'category': 'vertical'
},
'block_1': {
'display_name': 'Block Name',
'url': '/block/1',
'broken_links': ['broken_link_1', 'broken_link_2'],
'locked_links': ['locked_link'],
'external_forbidden_links': ['forbidden_link_1'],
}
}
expected = _create_dto_recursive(mock_node_tree, mock_dictionary)
self.assertEqual(expected_result, expected)
def test_create_dto_recursive_returns_for_full_tree(self):
"""
Test _create_dto_recursive behavior when recursing many times.
When evaluating a fully mocked node tree and dictionary, the function should return
a full json DTO prepared for frontend.
"""
expected_result = {
'sections': [
{
'id': 'chapter_1',
'displayName': 'Section Name',
'subsections': [
{
'id': 'sequential_1',
'displayName': 'Subsection Name',
'units': [
{
'id': 'vertical_1',
'displayName': 'Unit Name',
'blocks': [
{
'id': 'block_1',
'displayName': 'Block Name',
'url': '/block/1',
'brokenLinks': ['broken_link_1', 'broken_link_2'],
'lockedLinks': ['locked_link'],
'externalForbiddenLinks': ['forbidden_link_1'],
}
]
}
]
}
]
}
]
}
mock_node_tree = {
'chapter_1': {
'sequential_1': {
'vertical_1': {
'block_1': {}
}
}
}
}
mock_dictionary = {
'chapter_1': {
'display_name': 'Section Name',
'category': 'chapter'
},
'sequential_1': {
'display_name': 'Subsection Name',
'category': 'sequential'
},
'vertical_1': {
'display_name': 'Unit Name',
'category': 'vertical'
},
'block_1': {
'display_name': 'Block Name',
'url': '/block/1',
'broken_links': ['broken_link_1', 'broken_link_2'],
'locked_links': ['locked_link'],
'external_forbidden_links': ['forbidden_link_1'],
}
}
expected = _create_dto_recursive(mock_node_tree, mock_dictionary)
self.assertEqual(expected_result, expected)
@mock.patch('cms.djangoapps.contentstore.core.course_optimizer_provider.modulestore', autospec=True)
def test_returns_unchanged_data_if_no_course_blocks(self, mock_modulestore):
"""Test that the function returns unchanged data if no course blocks exist."""
mock_modulestore_instance = Mock()
mock_modulestore.return_value = mock_modulestore_instance
mock_modulestore_instance.get_items.return_value = []
data = {}
result = sort_course_sections("course-v1:Test+Course", data)
assert result == data # Should return the original data
@mock.patch('cms.djangoapps.contentstore.core.course_optimizer_provider.modulestore', autospec=True)
def test_returns_unchanged_data_if_linkcheckoutput_missing(self, mock_modulestore):
"""Test that the function returns unchanged data if 'LinkCheckOutput' is missing."""
mock_modulestore_instance = Mock()
mock_modulestore.return_value = mock_modulestore_instance
data = {'LinkCheckStatus': 'Uninitiated'} # No 'LinkCheckOutput'
mock_modulestore_instance.get_items.return_value = data
result = sort_course_sections("course-v1:Test+Course", data)
assert result == data
@mock.patch('cms.djangoapps.contentstore.core.course_optimizer_provider.modulestore', autospec=True)
def test_returns_unchanged_data_if_sections_missing(self, mock_modulestore):
"""Test that the function returns unchanged data if 'sections' is missing."""
mock_modulestore_instance = Mock()
mock_modulestore.return_value = mock_modulestore_instance
data = {'LinkCheckStatus': 'Success', 'LinkCheckOutput': {}} # No 'LinkCheckOutput'
mock_modulestore_instance.get_items.return_value = data
result = sort_course_sections("course-v1:Test+Course", data)
assert result == data
@mock.patch('cms.djangoapps.contentstore.core.course_optimizer_provider.modulestore', autospec=True)
def test_sorts_sections_correctly(self, mock_modulestore):
"""Test that the function correctly sorts sections based on published course structure."""
mock_course_block = Mock()
mock_course_block.get_children.return_value = [
Mock(location=Mock(block_id="section2")),
Mock(location=Mock(block_id="section3")),
Mock(location=Mock(block_id="section1")),
]
mock_modulestore_instance = Mock()
mock_modulestore.return_value = mock_modulestore_instance
mock_modulestore_instance.get_items.return_value = [mock_course_block]
data = {
"LinkCheckOutput": {
"sections": [
{"id": "section1", "name": "Intro"},
{"id": "section2", "name": "Advanced"},
{"id": "section3", "name": "Bonus"}, # Not in course structure
]
}
}
result = sort_course_sections("course-v1:Test+Course", data)
expected_sections = [
{"id": "section2", "name": "Advanced"},
{"id": "section3", "name": "Bonus"},
{"id": "section1", "name": "Intro"},
]
assert result["LinkCheckOutput"]["sections"] == expected_sections

View File

@@ -14,7 +14,7 @@ from search.search_engine_base import SearchEngine
from cms.djangoapps.contentstore.course_group_config import GroupConfiguration
from common.djangoapps.course_modes.models import CourseMode
from openedx.core.lib.courses import course_image_url
from openedx.core.lib.courses import course_image_url, course_organization_image_url
from xmodule.annotator_mixin import html_to_text # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.library_tools import normalize_key_for_search # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore import ModuleStoreEnum # lint-amnesty, pylint: disable=wrong-import-order
@@ -612,6 +612,7 @@ class CourseAboutSearchIndexer(CoursewareSearchIndexer):
'course': course_id,
'content': {},
'image_url': course_image_url(course),
'org_image_url': course_organization_image_url(course),
}
# load data for all of the 'about' blocks for this course into a dictionary

View File

@@ -0,0 +1,64 @@
==============================================
How to test View Auth for course-related views
==============================================
What to test
------------
Each view endpoint that exposes an internal API endpoint - like in files in the rest_api folder - must
be tested for the following.
- Only authenticated users can access the endpoint.
- Only users with the correct permissions (authorization) can access the endpoint.
- All data and params that are part of the request are properly validated.
How to test
-----------
The `AuthorizeStaffTestCase` class provides a set of tests that can be used to test the authorization
of a view. If you inherit from this class, these tests will be automatically run. For details,
please look at the source code of the `AuthorizeStaffTestCase` class.
A lot of these tests can be easily implemented by inheriting from the `AuthorizeStaffTestCase`.
This parent class assumes that the view is for a specific course and that only users who have access
to the course can access the view. (They are either staff or instructors for the course, or global admin).
Here is an example of how to test a view that requires a user to be authenticated and have access to a course.
.. code-block:: python
from cms.djangoapps.contentstore.tests.test_utils import AuthorizeStaffTestCase
from django.test import TestCase
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from django.urls import reverse
class TestMyGetView(AuthorizeStaffTestCase, ModuleStoreTestCase, TestCase):
def make_request(self, course_id=None, data=None):
url = self.get_url(self.course.id)
response = self.client.get(url, data)
return response
def get_url(self, course_key):
url = reverse(
'cms.djangoapps.contentstore:v0:my_get_view',
kwargs={'course_id': self.course.id}
)
return url
As you can see, you need to inherit from `AuthorizeStaffTestCase` and `ModuleStoreTestCase`, and then either
`TestCase` or `APITestCase` depending on the type of view you are testing. For cookie-based
authentication, `TestCase` is sufficient, for Oauth2 use `ApiTestCase`.
The only two methods you need to implement are `make_request` and `get_url`. The `make_request` method
should make the request to the view and return the response. The `get_url` method should return the URL
for the view you are testing.
Overwriting Tests
-----------------
If you need different behavior you can overwrite the tests from the parent class.
For example, if students should have access to the view, simply implement the
`test_student` method in your test class.
Adding other tests
------------------
If you want to test other things in the view - let's say validation -
it's easy to just add another `test_...` function to your test class
and you can use the `make_request` method to make the request.

View File

@@ -71,7 +71,16 @@ def register_exams(course_key):
timed_exam.is_onboarding_exam
)
due_date = timed_exam.due.isoformat() if timed_exam.due else (course.end.isoformat() if course.end else None)
# Exams in courses not using an LTI based proctoring provider should use the original definition of due_date
# from contentstore/proctoring.py. These exams are powered by the edx-proctoring plugin and not the edx-exams
# microservice.
if course.proctoring_provider == 'lti_external':
due_date = (
timed_exam.due.isoformat() if timed_exam.due
else (course.end.isoformat() if course.end else None)
)
else:
due_date = timed_exam.due if not course.self_paced else None
exams_list.append({
'course_id': str(course_key),

View File

@@ -7,8 +7,10 @@ import pathlib
import urllib
from lxml import etree
from mimetypes import guess_type
import re
from attrs import frozen, Factory
from django.core.files.base import ContentFile
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import gettext as _
@@ -22,12 +24,19 @@ from xmodule.contentstore.django import contentstore
from xmodule.exceptions import NotFoundError
from xmodule.modulestore.django import modulestore
from xmodule.xml_block import XmlMixin
from xmodule.video_block.transcripts_utils import Transcript, build_components_import_path
from edxval.api import (
create_external_video,
create_or_update_video_transcript,
)
from cms.djangoapps.models.settings.course_grading import CourseGradingModel
from cms.lib.xblock.upstream_sync import UpstreamLink, UpstreamLinkException, fetch_customizable_fields
from cms.lib.xblock.upstream_sync import UpstreamLink, UpstreamLinkException
from cms.lib.xblock.upstream_sync_block import fetch_customizable_fields_from_block
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
import openedx.core.djangoapps.content_staging.api as content_staging_api
import openedx.core.djangoapps.content_tagging.api as content_tagging_api
from openedx.core.djangoapps.content_staging.data import LIBRARY_SYNC_PURPOSE
from .utils import reverse_course_url, reverse_library_url, reverse_usage_url
@@ -74,6 +83,22 @@ def is_unit(xblock, parent_xblock=None):
return False
def is_library_content(xblock):
"""
Returns true if the specified xblock is library content.
"""
return xblock.category == 'library_content'
def get_parent_if_split_test(xblock):
"""
Returns the parent of the specified xblock if it is a split test, otherwise returns None.
"""
parent_xblock = get_parent_xblock(xblock)
if parent_xblock and parent_xblock.category == 'split_test':
return parent_xblock
def xblock_has_own_studio_page(xblock, parent_xblock=None):
"""
Returns true if the specified xblock has an associated Studio page. Most xblocks do
@@ -261,6 +286,66 @@ class StaticFileNotices:
error_files: list[str] = Factory(list)
def _insert_static_files_into_downstream_xblock(
downstream_xblock: XBlock, staged_content_id: int, request
) -> StaticFileNotices:
"""
Gets static files from staged content, and inserts them into the downstream XBlock.
"""
static_files = content_staging_api.get_staged_content_static_files(staged_content_id)
notices, substitutions = _import_files_into_course(
course_key=downstream_xblock.context_key,
staged_content_id=staged_content_id,
static_files=static_files,
usage_key=downstream_xblock.usage_key,
)
# FIXME: This code shouldn't have any special cases for specific block types like video
# in the future.
if downstream_xblock.usage_key.block_type == 'video':
_import_transcripts(
downstream_xblock,
staged_content_id=staged_content_id,
static_files=static_files,
)
# Rewrite the OLX's static asset references to point to the new
# locations for those assets. See _import_files_into_course for more
# info on why this is necessary.
store = modulestore()
if hasattr(downstream_xblock, "data") and substitutions:
data_with_substitutions = downstream_xblock.data
for old_static_ref, new_static_ref in substitutions.items():
data_with_substitutions = _replace_strings(
data_with_substitutions,
old_static_ref,
new_static_ref,
)
downstream_xblock.data = data_with_substitutions
if store is not None:
store.update_item(downstream_xblock, request.user.id)
return notices
def _replace_strings(obj: dict | list | str, old_str: str, new_str: str):
"""
Replacing any instances of the given `old_str` string with `new_str` in any strings found in the the given object.
Returns the updated object.
"""
if isinstance(obj, dict):
for key, value in obj.items():
obj[key] = _replace_strings(value, old_str, new_str)
elif isinstance(obj, list):
for index, item in enumerate(obj):
obj[index] = _replace_strings(item, old_str, new_str)
elif isinstance(obj, str):
return obj.replace(old_str, new_str)
return obj
def import_staged_content_from_user_clipboard(parent_key: UsageKey, request) -> tuple[XBlock | None, StaticFileNotices]:
"""
Import a block (along with its children and any required static assets) from
@@ -274,8 +359,6 @@ def import_staged_content_from_user_clipboard(parent_key: UsageKey, request) ->
"""
from cms.djangoapps.contentstore.views.preview import _load_preview_block
if not content_staging_api:
raise RuntimeError("The required content_staging app is not installed")
user_clipboard = content_staging_api.get_user_clipboard(request.user.id)
if not user_clipboard:
# Clipboard is empty or expired/error/loading
@@ -298,31 +381,56 @@ def import_staged_content_from_user_clipboard(parent_key: UsageKey, request) ->
tags=user_clipboard.content.tags,
)
# Now handle static files that need to go into Files & Uploads.
static_files = content_staging_api.get_staged_content_static_files(user_clipboard.content.id)
notices, substitutions = _import_files_into_course(
course_key=parent_key.context_key,
staged_content_id=user_clipboard.content.id,
static_files=static_files,
usage_key=new_xblock.scope_ids.usage_id,
)
# Rewrite the OLX's static asset references to point to the new
# locations for those assets. See _import_files_into_course for more
# info on why this is necessary.
if hasattr(new_xblock, 'data') and substitutions:
data_with_substitutions = new_xblock.data
for old_static_ref, new_static_ref in substitutions.items():
data_with_substitutions = data_with_substitutions.replace(
old_static_ref,
new_static_ref,
)
new_xblock.data = data_with_substitutions
usage_key = new_xblock.usage_key
if usage_key.block_type == 'video':
# The edx_video_id must always be new so as not
# to interfere with the data of the copied block
new_xblock.edx_video_id = create_external_video(display_name='external video')
store.update_item(new_xblock, request.user.id)
notices = _insert_static_files_into_downstream_xblock(new_xblock, user_clipboard.content.id, request)
return new_xblock, notices
def import_static_assets_for_library_sync(downstream_xblock: XBlock, lib_block: XBlock, request) -> StaticFileNotices:
"""
Import the static assets from the library xblock to the downstream xblock
through staged content. Also updates the OLX references to point to the new
locations of those assets in the downstream course.
Does not deal with permissions or REST stuff - do that before calling this.
Returns a summary of changes made to static files in the destination
course.
"""
if not lib_block.runtime.get_block_assets(lib_block, fetch_asset_data=False):
return StaticFileNotices()
staged_content = content_staging_api.stage_xblock_temporarily(lib_block, request.user.id, LIBRARY_SYNC_PURPOSE)
if not staged_content:
# expired/error/loading
return StaticFileNotices()
store = modulestore()
try:
with store.bulk_operations(downstream_xblock.context_key):
# FIXME: This code shouldn't have any special cases for specific block types like video
# in the future.
if downstream_xblock.usage_key.block_type == 'video' and not downstream_xblock.edx_video_id:
# If the `downstream_xblock` is a new created block, we need to create
# a new `edx_video_id` to import the transcripts.
downstream_xblock.edx_video_id = create_external_video(display_name='external video')
store.update_item(downstream_xblock, request.user.id)
# Now handle static files that need to go into Files & Uploads.
# If the required files already exist, nothing will happen besides updating the olx.
notices = _insert_static_files_into_downstream_xblock(downstream_xblock, staged_content.id, request)
finally:
staged_content.delete()
return notices
def _fetch_and_set_upstream_link(
copied_from_block: str,
copied_from_version_num: int,
@@ -330,7 +438,7 @@ def _fetch_and_set_upstream_link(
user: User
):
"""
Fetch and set upstream link for the given xblock. This function handles following cases:
Fetch and set upstream link for the given xblock which is being pasted. This function handles following cases:
* the xblock is copied from a v2 library; the library block is set as upstream.
* the xblock is copied from a course; no upstream is set, only copied_from_block is set.
* the xblock is copied from a course where the source block was imported from a library; the original libary block
@@ -339,7 +447,7 @@ def _fetch_and_set_upstream_link(
# Try to link the pasted block (downstream) to the copied block (upstream).
temp_xblock.upstream = copied_from_block
try:
UpstreamLink.get_for_block(temp_xblock)
upstream_link = UpstreamLink.get_for_block(temp_xblock)
except UpstreamLinkException:
# Usually this will fail. For example, if the copied block is a modulestore course block, it can't be an
# upstream. That's fine! Instead, we store a reference to where this block was copied from, in the
@@ -370,7 +478,8 @@ def _fetch_and_set_upstream_link(
# later wants to restore it, it will restore to the value that the field had when the block was pasted. Of
# course, if the author later syncs updates from a *future* published upstream version, then that will fetch
# new values from the published upstream content.
fetch_customizable_fields(upstream=temp_xblock, downstream=temp_xblock, user=user)
if isinstance(upstream_link.upstream_key, UsageKey): # only if upstream is a block, not a container
fetch_customizable_fields_from_block(downstream=temp_xblock, user=user, upstream=temp_xblock)
def _import_xml_node_to_parent(
@@ -447,16 +556,20 @@ def _import_xml_node_to_parent(
temp_xblock = xblock_class.parse_xml(node_without_children, runtime, keys)
child_nodes = list(node)
if issubclass(xblock_class, XmlMixin) and "x-is-pointer-node" in getattr(temp_xblock, "data", ""):
# Undo the "pointer node" hack if needed (e.g. for capa problems)
temp_xblock.data = re.sub(r'([^>]+) x-is-pointer-node="no"', r'\1', temp_xblock.data, count=1)
# Restore the original id_generator
runtime.id_generator = original_id_generator
if xblock_class.has_children and temp_xblock.children:
raise NotImplementedError("We don't yet support pasting XBlocks with children")
temp_xblock.parent = parent_key
if copied_from_block:
_fetch_and_set_upstream_link(copied_from_block, copied_from_version_num, temp_xblock, user)
# Save the XBlock into modulestore. We need to save the block and its parent for this to work:
new_xblock = store.update_item(temp_xblock, user.id, allow_not_found=True)
new_xblock.parent = parent_key
parent_xblock.children.append(new_xblock.location)
store.update_item(parent_xblock, user.id)
@@ -543,6 +656,9 @@ def _import_files_into_course(
if result is True:
new_files.append(file_data_obj.filename)
substitutions.update(substitution_for_file)
elif substitution_for_file:
# substitutions need to be made because OLX references to these files need to be updated
substitutions.update(substitution_for_file)
elif result is None:
pass # This file already exists; no action needed.
else:
@@ -578,8 +694,8 @@ def _import_file_into_course(
# we're not going to attempt to change.
if clipboard_file_path.startswith('static/'):
# If it's in this form, it came from a library and assumes component-local assets
file_path = clipboard_file_path.lstrip('static/')
import_path = f"components/{usage_key.block_type}/{usage_key.block_id}/{file_path}"
file_path = clipboard_file_path.removeprefix('static/')
import_path = build_components_import_path(usage_key, file_path)
filename = pathlib.Path(file_path).name
new_key = course_key.make_asset_key("asset", import_path.replace("/", "_"))
else:
@@ -613,13 +729,57 @@ def _import_file_into_course(
contentstore().save(content)
return True, {clipboard_file_path: f"static/{import_path}"}
elif current_file.content_digest == file_data_obj.md5_hash:
# The file already exists and matches exactly, so no action is needed
return None, {}
# The file already exists and matches exactly, so no action is needed except substitutions
return None, {clipboard_file_path: f"static/{import_path}"}
else:
# There is a conflict with some other file that has the same name.
return False, {}
def _import_transcripts(
block: XBlock,
staged_content_id: int,
static_files: list[content_staging_api.StagedContentFileData],
):
"""
Adds transcripts to VAL using the new edx_video_id.
"""
for file_data_obj in static_files:
clipboard_file_path = file_data_obj.filename
data = content_staging_api.get_staged_content_static_file_data(
staged_content_id,
clipboard_file_path
)
if data is None:
raise NotFoundError(file_data_obj.source_key)
if clipboard_file_path.startswith('static/'):
# If it's in this form, it came from a library and assumes component-local assets
file_path = clipboard_file_path.removeprefix('static/')
else:
# Otherwise it came from a course...
file_path = clipboard_file_path
filename = pathlib.Path(file_path).name
language_code = next((k for k, v in block.transcripts.items() if v == filename), None)
if language_code:
sjson_subs = Transcript.convert(
content=data,
input_format=Transcript.SRT,
output_format=Transcript.SJSON
).encode()
create_or_update_video_transcript(
video_id=block.edx_video_id,
language_code=language_code,
metadata={
'file_format': Transcript.SJSON,
'language_code': language_code
},
file_data=ContentFile(sjson_subs),
)
def is_item_in_course_tree(item):
"""
Check that the item is in the course tree.
@@ -653,3 +813,26 @@ def _get_usage_key_from_node(node, parent_id: str) -> UsageKey | None:
)
return usage_key
def concat_static_file_notices(notices: list[StaticFileNotices]) -> StaticFileNotices:
"""Combines multiple static file notices into a single object
Args:
notices: list of StaticFileNotices
Returns:
Single StaticFileNotices
"""
new_files = []
conflicting_files = []
error_files = []
for notice in notices:
new_files.extend(notice.new_files)
conflicting_files.extend(notice.conflicting_files)
error_files.extend(notice.error_files)
return StaticFileNotices(
new_files=list(set(new_files)),
conflicting_files=list(set(conflicting_files)),
error_files=list(set(error_files)),
)

View File

@@ -71,6 +71,5 @@ class Command(BaseCommand):
if error_keys:
msg = 'The following courses encountered errors and were not updated:\n'
for error_key in error_keys:
msg += f' - {error_key}\n'
msg += '\n'.join(f' - {error_key}' for error_key in error_keys)
logger.info(msg)

View File

@@ -51,16 +51,15 @@ class Command(BaseCommand):
tarball = tasks.create_export_tarball(library, library_key, {}, None)
except Exception as e:
raise CommandError(f'Failed to export "{library_key}" with "{e}"') # lint-amnesty, pylint: disable=raise-missing-from
else:
with tarball:
# Save generated archive with keyed filename
prefix, suffix, n = str(library_key).replace(':', '+'), '.tar.gz', 0
while os.path.exists(prefix + suffix):
n += 1
prefix = '{}_{}'.format(prefix.rsplit('_', 1)[0], n) if n > 1 else f'{prefix}_1'
filename = prefix + suffix
target = os.path.join(dest_path, filename)
tarball.file.seek(0)
with open(target, 'wb') as f:
shutil.copyfileobj(tarball.file, f)
print(f'Library "{library.location.library_key}" exported to "{target}"')
with tarball:
# Save generated archive with keyed filename
prefix, suffix, n = str(library_key).replace(':', '+'), '.tar.gz', 0
while os.path.exists(prefix + suffix):
n += 1
prefix = '{}_{}'.format(prefix.rsplit('_', 1)[0], n) if n > 1 else f'{prefix}_1'
filename = prefix + suffix
target = os.path.join(dest_path, filename)
tarball.file.seek(0)
with open(target, 'wb') as f:
shutil.copyfileobj(tarball.file, f)
print(f'Library "{library.location.library_key}" exported to "{target}"')

View File

@@ -19,7 +19,7 @@ import os
import re
import shutil
import tarfile
from tempfile import mkdtemp, mktemp
from tempfile import mkdtemp, mkstemp
from textwrap import dedent
from django.core.management.base import BaseCommand, CommandError
@@ -55,7 +55,9 @@ class Command(BaseCommand):
pipe_results = False
if filename is None:
filename = mktemp()
fd, filename = mkstemp()
os.close(fd)
os.unlink(filename)
pipe_results = True
export_course_to_tarfile(course_key, filename)

View File

@@ -0,0 +1,94 @@
"""
Management command to recreate upstream-dowstream links in ComponentLink for course(s).
This command can be run for all the courses or for given list of courses.
"""
from __future__ import annotations
import logging
from datetime import datetime, timezone
from django.core.management.base import BaseCommand, CommandError
from django.utils.translation import gettext as _
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from ...tasks import create_or_update_upstream_links
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Recreate upstream links for course(s) in ComponentLink and ContainerLink tables.
Examples:
# Recreate upstream links for two courses.
$ ./manage.py cms recreate_upstream_links --course course-v1:edX+DemoX.1+2014 \
--course course-v1:edX+DemoX.2+2015
# Force recreate upstream links for one or more courses including processed ones.
$ ./manage.py cms recreate_upstream_links --course course-v1:edX+DemoX.1+2014 \
--course course-v1:edX+DemoX.2+2015 --force
# Recreate upstream links for all courses.
$ ./manage.py cms recreate_upstream_links --all
# Force recreate links for all courses including completely processed ones.
$ ./manage.py cms recreate_upstream_links --all --force
# Delete all links and force recreate links for all courses
$ ./manage.py cms recreate_upstream_links --all --force --replace
"""
def add_arguments(self, parser):
parser.add_argument(
'--course',
metavar=_('COURSE_KEY'),
action='append',
help=_('Recreate links for xblocks under given course keys. For eg. course-v1:edX+DemoX.1+2014'),
default=[],
)
parser.add_argument(
'--all',
action='store_true',
help=_(
'Recreate links for xblocks under all courses. NOTE: this can take long time depending'
' on number of course and xblocks'
),
)
parser.add_argument(
'--force',
action='store_true',
help=_('Recreate links even for completely processed courses.'),
)
parser.add_argument(
'--replace',
action='store_true',
help=_('Delete all and create links for given course(s).'),
)
def handle(self, *args, **options):
"""
Handle command
"""
courses = options['course']
should_process_all = options['all']
force = options['force']
replace = options['replace']
time_now = datetime.now(tz=timezone.utc)
if not courses and not should_process_all:
raise CommandError('Either --course or --all argument should be provided.')
if should_process_all and courses:
raise CommandError('Only one of --course or --all argument should be provided.')
if should_process_all:
courses = CourseOverview.get_all_course_keys()
for course in courses:
log.info(f"Start processing upstream->dowstream links in course: {course}")
try:
CourseKey.from_string(str(course))
except InvalidKeyError:
log.error(f"Invalid course key: {course}, skipping..")
continue
create_or_update_upstream_links.delay(str(course), force=force, replace=replace, created=time_now)

View File

@@ -0,0 +1,93 @@
# Generated by Django 4.2.18 on 2025-02-05 05:33
import uuid
import django.db.models.deletion
import opaque_keys.edx.django.models
import openedx_learning.lib.fields
import openedx_learning.lib.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('oel_publishing', '0002_alter_learningpackage_key_and_more'),
('contentstore', '0008_cleanstalecertificateavailabilitydatesconfig'),
]
operations = [
migrations.CreateModel(
name='LearningContextLinksStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
(
'context_key',
opaque_keys.edx.django.models.CourseKeyField(
help_text='Linking status for course context key', max_length=255, unique=True
),
),
(
'status',
models.CharField(
choices=[
('pending', 'Pending'),
('processing', 'Processing'),
('failed', 'Failed'),
('completed', 'Completed'),
],
help_text='Status of links in given learning context/course.',
max_length=20,
),
),
('created', models.DateTimeField(validators=[openedx_learning.lib.validators.validate_utc_datetime])),
('updated', models.DateTimeField(validators=[openedx_learning.lib.validators.validate_utc_datetime])),
],
options={
'verbose_name': 'Learning Context Links status',
'verbose_name_plural': 'Learning Context Links status',
},
),
migrations.CreateModel(
name='PublishableEntityLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True, verbose_name='UUID')),
(
'upstream_usage_key',
opaque_keys.edx.django.models.UsageKeyField(
help_text='Upstream block usage key, this value cannot be null and useful to track upstream library blocks that do not exist yet',
max_length=255,
),
),
(
'upstream_context_key',
openedx_learning.lib.fields.MultiCollationCharField(
db_collations={'mysql': 'utf8mb4_bin', 'sqlite': 'BINARY'},
db_index=True,
help_text='Upstream context key i.e., learning_package/library key',
max_length=500,
),
),
('downstream_usage_key', opaque_keys.edx.django.models.UsageKeyField(max_length=255, unique=True)),
('downstream_context_key', opaque_keys.edx.django.models.CourseKeyField(db_index=True, max_length=255)),
('version_synced', models.IntegerField()),
('version_declined', models.IntegerField(blank=True, null=True)),
('created', models.DateTimeField(validators=[openedx_learning.lib.validators.validate_utc_datetime])),
('updated', models.DateTimeField(validators=[openedx_learning.lib.validators.validate_utc_datetime])),
(
'upstream_block',
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name='links',
to='oel_publishing.publishableentity',
),
),
],
options={
'verbose_name': 'Publishable Entity Link',
'verbose_name_plural': 'Publishable Entity Links',
},
),
]

View File

@@ -0,0 +1,59 @@
# Generated by Django 4.2.20 on 2025-04-22 15:08
import uuid
import django.db.models.deletion
import opaque_keys.edx.django.models
import openedx_learning.lib.fields
import openedx_learning.lib.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('oel_publishing', '0003_containers'),
('oel_components', '0003_remove_componentversioncontent_learner_downloadable'),
('contentstore', '0009_learningcontextlinksstatus_publishableentitylink'),
]
operations = [
migrations.RenameModel(
old_name='PublishableEntityLink',
new_name='ComponentLink',
),
migrations.AlterModelOptions(
name='componentlink',
options={'verbose_name': 'Component Link', 'verbose_name_plural': 'Component Links'},
),
migrations.AlterField(
model_name='componentlink',
name='upstream_block',
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name='links',
to='oel_components.component',
),
),
migrations.CreateModel(
name='ContainerLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False, unique=True, verbose_name='UUID')),
('upstream_context_key', openedx_learning.lib.fields.MultiCollationCharField(db_collations={'mysql': 'utf8mb4_bin', 'sqlite': 'BINARY'}, db_index=True, help_text='Upstream context key i.e., learning_package/library key', max_length=500)),
('downstream_usage_key', opaque_keys.edx.django.models.UsageKeyField(max_length=255, unique=True)),
('downstream_context_key', opaque_keys.edx.django.models.CourseKeyField(db_index=True, max_length=255)),
('version_synced', models.IntegerField()),
('version_declined', models.IntegerField(blank=True, null=True)),
('created', models.DateTimeField(validators=[openedx_learning.lib.validators.validate_utc_datetime])),
('updated', models.DateTimeField(validators=[openedx_learning.lib.validators.validate_utc_datetime])),
('upstream_container_key', opaque_keys.edx.django.models.ContainerKeyField(help_text='Upstream block key (e.g. lct:...), this value cannot be null and is useful to track upstream library blocks that do not exist yet or were deleted.', max_length=255)),
('upstream_container', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='links', to='oel_publishing.container')),
],
options={
'abstract': False,
'verbose_name': 'Container Link',
'verbose_name_plural': 'Container Links',
},
),
]

View File

@@ -0,0 +1,25 @@
from django.db import migrations
from cms.djangoapps.contentstore.toggles import (
ENABLE_REACT_MARKDOWN_EDITOR
)
def create_flag(apps, schema_editor):
Flag = apps.get_model('waffle', 'Flag')
Flag.objects.get_or_create(
name=ENABLE_REACT_MARKDOWN_EDITOR.name, defaults={'everyone': True}
)
class Migration(migrations.Migration):
dependencies = [
('contentstore', '0010_container_link_models'),
('waffle', '0001_initial'),
]
operations = [
# Do not remove the flags for rollback. We don't want to lose originals if
# they already existed, and it won't hurt if they are created.
migrations.RunPython(create_flag, reverse_code=migrations.RunPython.noop),
]

View File

@@ -3,8 +3,25 @@ Models for contentstore
"""
from datetime import datetime, timezone
from config_models.models import ConfigurationModel
from django.db import models
from django.db.models import Count, F, Q, QuerySet, Max
from django.db.models.fields import IntegerField, TextField
from django.db.models.functions import Coalesce
from django.db.models.lookups import GreaterThan
from django.utils.translation import gettext_lazy as _
from opaque_keys.edx.django.models import CourseKeyField, ContainerKeyField, UsageKeyField
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locator import LibraryContainerLocator
from openedx_learning.api.authoring import get_published_version
from openedx_learning.api.authoring_models import Component, Container
from openedx_learning.lib.fields import (
immutable_uuid_field,
key_field,
manual_date_time_field,
)
class VideoUploadConfig(ConfigurationModel):
@@ -63,3 +80,403 @@ class CleanStaleCertificateAvailabilityDatesConfig(ConfigurationModel):
"`clean_stale_certificate_available_dates` management command.' See the management command for options."
)
)
class EntityLinkBase(models.Model):
"""
Abstract base class that defines fields and functions for storing link between two publishable entities
or links between publishable entity and a course xblock.
"""
uuid = immutable_uuid_field()
# Search by library/upstream context key
upstream_context_key = key_field(
help_text=_("Upstream context key i.e., learning_package/library key"),
db_index=True,
)
# A downstream entity can only link to single upstream entity
# whereas an entity can be upstream for multiple downstream entities.
downstream_usage_key = UsageKeyField(max_length=255, unique=True)
# Search by course/downstream key
downstream_context_key = CourseKeyField(max_length=255, db_index=True)
version_synced = models.IntegerField()
version_declined = models.IntegerField(null=True, blank=True)
created = manual_date_time_field()
updated = manual_date_time_field()
class Meta:
abstract = True
class ComponentLink(EntityLinkBase):
"""
This represents link between any two publishable entities or link between publishable entity and a course
XBlock. It helps in tracking relationship between XBlocks imported from libraries and used in different courses.
"""
upstream_block = models.ForeignKey(
Component,
on_delete=models.SET_NULL,
related_name="links",
null=True,
blank=True,
)
upstream_usage_key = UsageKeyField(
max_length=255,
help_text=_(
"Upstream block usage key, this value cannot be null"
" and useful to track upstream library blocks that do not exist yet"
)
)
class Meta:
verbose_name = _("Component Link")
verbose_name_plural = _("Component Links")
def __str__(self):
return f"ComponentLink<{self.upstream_usage_key}->{self.downstream_usage_key}>"
@property
def upstream_version_num(self) -> int | None:
"""
Returns upstream block version number if available.
"""
published_version = get_published_version(self.upstream_block.publishable_entity.id)
return published_version.version_num if published_version else None
@property
def upstream_context_title(self) -> str:
"""
Returns upstream context title.
"""
return self.upstream_block.publishable_entity.learning_package.title
@classmethod
def filter_links(
cls,
**link_filter,
) -> QuerySet["EntityLinkBase"]:
"""
Get all links along with sync flag, upstream context title and version, with optional filtering.
"""
ready_to_sync = link_filter.pop('ready_to_sync', None)
result = cls.objects.filter(**link_filter).select_related(
"upstream_block__publishable_entity__published__version",
"upstream_block__publishable_entity__learning_package",
"upstream_block__publishable_entity__published__publish_log_record__publish_log",
).annotate(
ready_to_sync=(
GreaterThan(
Coalesce("upstream_block__publishable_entity__published__version__version_num", 0),
Coalesce("version_synced", 0)
) & GreaterThan(
Coalesce("upstream_block__publishable_entity__published__version__version_num", 0),
Coalesce("version_declined", 0)
)
)
)
if ready_to_sync is not None:
result = result.filter(ready_to_sync=ready_to_sync)
return result
@classmethod
def summarize_by_downstream_context(cls, downstream_context_key: CourseKey) -> QuerySet:
"""
Returns a summary of links by upstream context for given downstream_context_key.
Example:
[
{
"upstream_context_title": "CS problems 3",
"upstream_context_key": "lib:OpenedX:CSPROB3",
"ready_to_sync_count": 11,
"total_count": 14,
"last_published_at": "2025-05-02T20:20:44.989042Z"
},
{
"upstream_context_title": "CS problems 2",
"upstream_context_key": "lib:OpenedX:CSPROB2",
"ready_to_sync_count": 15,
"total_count": 24,
"last_published_at": "2025-05-03T21:20:44.989042Z"
},
]
"""
result = cls.filter_links(downstream_context_key=downstream_context_key).values(
"upstream_context_key",
upstream_context_title=F("upstream_block__publishable_entity__learning_package__title"),
).annotate(
ready_to_sync_count=Count("id", Q(ready_to_sync=True)),
total_count=Count("id"),
last_published_at=Max(
"upstream_block__publishable_entity__published__publish_log_record__publish_log__published_at"
)
)
return result
@classmethod
def update_or_create(
cls,
upstream_block: Component | None,
/,
upstream_usage_key: UsageKey,
upstream_context_key: str,
downstream_usage_key: UsageKey,
downstream_context_key: CourseKey,
version_synced: int,
version_declined: int | None = None,
created: datetime | None = None,
) -> "ComponentLink":
"""
Update or create entity link. This will only update `updated` field if something has changed.
"""
if not created:
created = datetime.now(tz=timezone.utc)
new_values = {
'upstream_usage_key': upstream_usage_key,
'upstream_context_key': upstream_context_key,
'downstream_usage_key': downstream_usage_key,
'downstream_context_key': downstream_context_key,
'version_synced': version_synced,
'version_declined': version_declined,
}
if upstream_block:
new_values['upstream_block'] = upstream_block
try:
link = cls.objects.get(downstream_usage_key=downstream_usage_key)
has_changes = False
for key, new_value in new_values.items():
prev_value = getattr(link, key)
if prev_value != new_value:
has_changes = True
setattr(link, key, new_value)
if has_changes:
link.updated = created
link.save()
except cls.DoesNotExist:
link = cls(**new_values)
link.created = created
link.updated = created
link.save()
return link
class ContainerLink(EntityLinkBase):
"""
This represents link between any two publishable entities or link between publishable entity and a course
xblock. It helps in tracking relationship between xblocks imported from libraries and used in different courses.
"""
upstream_container = models.ForeignKey(
Container,
on_delete=models.SET_NULL,
related_name="links",
null=True,
blank=True,
)
upstream_container_key = ContainerKeyField(
max_length=255,
help_text=_(
"Upstream block key (e.g. lct:...), this value cannot be null "
"and is useful to track upstream library blocks that do not exist yet "
"or were deleted."
)
)
class Meta:
verbose_name = _("Container Link")
verbose_name_plural = _("Container Links")
def __str__(self):
return f"ContainerLink<{self.upstream_container_key}->{self.downstream_usage_key}>"
@property
def upstream_version_num(self) -> int | None:
"""
Returns upstream container version number if available.
"""
published_version = get_published_version(self.upstream_container.publishable_entity.id)
return published_version.version_num if published_version else None
@property
def upstream_context_title(self) -> str:
"""
Returns upstream context title.
"""
return self.upstream_container.publishable_entity.learning_package.title
@classmethod
def filter_links(
cls,
**link_filter,
) -> QuerySet["EntityLinkBase"]:
"""
Get all links along with sync flag, upstream context title and version, with optional filtering.
"""
ready_to_sync = link_filter.pop('ready_to_sync', None)
result = cls.objects.filter(**link_filter).select_related(
"upstream_container__publishable_entity__published__version",
"upstream_container__publishable_entity__learning_package",
"upstream_container__publishable_entity__published__publish_log_record__publish_log",
).annotate(
ready_to_sync=(
GreaterThan(
Coalesce("upstream_container__publishable_entity__published__version__version_num", 0),
Coalesce("version_synced", 0)
) & GreaterThan(
Coalesce("upstream_container__publishable_entity__published__version__version_num", 0),
Coalesce("version_declined", 0)
)
)
)
if ready_to_sync is not None:
result = result.filter(ready_to_sync=ready_to_sync)
return result
@classmethod
def summarize_by_downstream_context(cls, downstream_context_key: CourseKey) -> QuerySet:
"""
Returns a summary of links by upstream context for given downstream_context_key.
Example:
[
{
"upstream_context_title": "CS problems 3",
"upstream_context_key": "lib:OpenedX:CSPROB3",
"ready_to_sync_count": 11,
"total_count": 14,
"last_published_at": "2025-05-02T20:20:44.989042Z"
},
{
"upstream_context_title": "CS problems 2",
"upstream_context_key": "lib:OpenedX:CSPROB2",
"ready_to_sync_count": 15,
"total_count": 24,
"last_published_at": "2025-05-03T21:20:44.989042Z"
},
]
"""
result = cls.filter_links(downstream_context_key=downstream_context_key).values(
"upstream_context_key",
upstream_context_title=F("upstream_container__publishable_entity__learning_package__title"),
).annotate(
ready_to_sync_count=Count("id", Q(ready_to_sync=True)),
total_count=Count('id'),
last_published_at=Max(
"upstream_container__publishable_entity__published__publish_log_record__publish_log__published_at"
)
)
return result
@classmethod
def update_or_create(
cls,
upstream_container_id: int | None,
/,
upstream_container_key: LibraryContainerLocator,
upstream_context_key: str,
downstream_usage_key: UsageKey,
downstream_context_key: CourseKey,
version_synced: int,
version_declined: int | None = None,
created: datetime | None = None,
) -> "ContainerLink":
"""
Update or create entity link. This will only update `updated` field if something has changed.
"""
if not created:
created = datetime.now(tz=timezone.utc)
new_values = {
'upstream_container_key': upstream_container_key,
'upstream_context_key': upstream_context_key,
'downstream_usage_key': downstream_usage_key,
'downstream_context_key': downstream_context_key,
'version_synced': version_synced,
'version_declined': version_declined,
}
if upstream_container_id:
new_values['upstream_container_id'] = upstream_container_id
try:
link = cls.objects.get(downstream_usage_key=downstream_usage_key)
has_changes = False
for key, new_value in new_values.items():
prev_value = getattr(link, key)
if prev_value != new_value:
has_changes = True
setattr(link, key, new_value)
if has_changes:
link.updated = created
link.save()
except cls.DoesNotExist:
link = cls(**new_values)
link.created = created
link.updated = created
link.save()
return link
class LearningContextLinksStatusChoices(models.TextChoices):
"""
Enumerates the states that a LearningContextLinksStatus can be in.
"""
PENDING = "pending", _("Pending")
PROCESSING = "processing", _("Processing")
FAILED = "failed", _("Failed")
COMPLETED = "completed", _("Completed")
class LearningContextLinksStatus(models.Model):
"""
This table stores current processing status of upstream-downstream links in ComponentLink table for a
course or a learning context.
"""
context_key = CourseKeyField(
max_length=255,
# Single entry for a learning context or course
unique=True,
help_text=_("Linking status for course context key"),
)
status = models.CharField(
max_length=20,
choices=LearningContextLinksStatusChoices.choices,
help_text=_("Status of links in given learning context/course."),
)
created = manual_date_time_field()
updated = manual_date_time_field()
class Meta:
verbose_name = _("Learning Context Links status")
verbose_name_plural = _("Learning Context Links status")
def __str__(self):
return f"{self.status}|{self.context_key}"
@classmethod
def get_or_create(cls, context_key: str, created: datetime | None = None) -> "LearningContextLinksStatus":
"""
Get or create course link status row from LearningContextLinksStatus table for given course key.
Args:
context_key: Learning context or Course key
Returns:
LearningContextLinksStatus object
"""
if not created:
created = datetime.now(tz=timezone.utc)
status, _ = cls.objects.get_or_create(
context_key=context_key,
defaults={
'status': LearningContextLinksStatusChoices.PENDING,
'created': created,
'updated': created,
},
)
return status
def update_status(
self,
status: LearningContextLinksStatusChoices,
updated: datetime | None = None
) -> None:
"""
Updates entity links processing status of given learning context.
"""
self.status = status
self.updated = updated or datetime.now(tz=timezone.utc)
self.save()

View File

@@ -4,6 +4,7 @@ Serializers for v0 contentstore API.
from .advanced_settings import AdvancedSettingsFieldSerializer, CourseAdvancedSettingsSerializer
from .assets import AssetSerializer
from .authoring_grading import CourseGradingModelSerializer
from .course_optimizer import LinkCheckSerializer
from .tabs import CourseTabSerializer, CourseTabUpdateSerializer, TabIDLocatorSerializer
from .transcripts import TranscriptSerializer, YoutubeTranscriptCheckSerializer, YoutubeTranscriptUploadSerializer
from .xblock import XblockSerializer

View File

@@ -14,7 +14,13 @@ class GradersSerializer(serializers.Serializer):
weight = serializers.IntegerField()
id = serializers.IntegerField()
class Meta:
ref_name = "authoring_grading.Graders.v0"
class CourseGradingModelSerializer(serializers.Serializer):
""" Serializer for course grading model data """
graders = GradersSerializer(many=True, allow_null=True, allow_empty=True)
class Meta:
ref_name = "authoring_grading.CourseGrading.v0"

View File

@@ -0,0 +1,49 @@
"""
API Serializers for Course Optimizer
"""
from rest_framework import serializers
class LinkCheckBlockSerializer(serializers.Serializer):
""" Serializer for broken links block model data """
id = serializers.CharField(required=True, allow_null=False, allow_blank=False)
displayName = serializers.CharField(required=True, allow_null=False, allow_blank=True)
url = serializers.CharField(required=True, allow_null=False, allow_blank=False)
brokenLinks = serializers.ListField(required=False)
lockedLinks = serializers.ListField(required=False)
externalForbiddenLinks = serializers.ListField(required=False)
class LinkCheckUnitSerializer(serializers.Serializer):
""" Serializer for broken links unit model data """
id = serializers.CharField(required=True, allow_null=False, allow_blank=False)
displayName = serializers.CharField(required=True, allow_null=False, allow_blank=True)
blocks = LinkCheckBlockSerializer(many=True)
class LinkCheckSubsectionSerializer(serializers.Serializer):
""" Serializer for broken links subsection model data """
id = serializers.CharField(required=True, allow_null=False, allow_blank=False)
displayName = serializers.CharField(required=True, allow_null=False, allow_blank=True)
units = LinkCheckUnitSerializer(many=True)
class LinkCheckSectionSerializer(serializers.Serializer):
""" Serializer for broken links section model data """
id = serializers.CharField(required=True, allow_null=False, allow_blank=False)
displayName = serializers.CharField(required=True, allow_null=False, allow_blank=True)
subsections = LinkCheckSubsectionSerializer(many=True)
class LinkCheckOutputSerializer(serializers.Serializer):
""" Serializer for broken links output model data """
sections = LinkCheckSectionSerializer(many=True)
class LinkCheckSerializer(serializers.Serializer):
""" Serializer for broken links """
LinkCheckStatus = serializers.CharField(required=True)
LinkCheckCreatedAt = serializers.DateTimeField(required=False)
LinkCheckOutput = LinkCheckOutputSerializer(required=False)
LinkCheckError = serializers.CharField(required=False)

View File

@@ -6,14 +6,11 @@ import json
import ddt
from django.test import override_settings
from django.urls import reverse
from edx_toggles.toggles.testutils import override_waffle_flag
from milestones.tests.utils import MilestonesTestCaseMixin
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from cms.djangoapps.contentstore.toggles import ENABLE_NEW_STUDIO_ADVANCED_SETTINGS_PAGE
@override_waffle_flag(ENABLE_NEW_STUDIO_ADVANCED_SETTINGS_PAGE, active=True)
@ddt.ddt
class CourseAdvanceSettingViewTest(CourseTestCase, MilestonesTestCaseMixin):
"""

View File

@@ -60,13 +60,8 @@ class AssetsViewTestCase(AuthorizeStaffTestCase):
}
),
)
@patch(
f"cms.djangoapps.contentstore.rest_api.{VERSION}.views.xblock.toggles.use_studio_content_api",
return_value=True,
)
def make_request(
self,
mock_use_studio_content_api,
mock_handle_assets,
run_assertions=None,
course_id=None,
@@ -125,13 +120,6 @@ class AssetsViewGetTest(AssetsViewTestCase, ModuleStoreTestCase, APITestCase):
def send_request(self, url, data):
return self.client.get(url)
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_assets_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password
@@ -182,13 +170,6 @@ class AssetsViewPostTest(AssetsViewTestCase, ModuleStoreTestCase, APITestCase):
def send_request(self, url, data):
return self.client.post(url, data=data, format="multipart")
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_assets_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password
@@ -232,13 +213,6 @@ class AssetsViewPutTest(AssetsViewTestCase, ModuleStoreTestCase, APITestCase):
def send_request(self, url, data):
return self.client.put(url, data=data, format="json")
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.put(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_assets_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password
@@ -277,13 +251,6 @@ class AssetsViewDeleteTest(AssetsViewTestCase, ModuleStoreTestCase, APITestCase)
def send_request(self, url, data):
return self.client.delete(url)
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_assets_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password

View File

@@ -0,0 +1,79 @@
"""
Unit tests for course optimizer
"""
from django.test import TestCase
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from django.urls import reverse
from cms.djangoapps.contentstore.tests.test_utils import AuthorizeStaffTestCase
class TestGetLinkCheckStatus(AuthorizeStaffTestCase, ModuleStoreTestCase, TestCase):
'''
Authentication and Authorization Tests for CourseOptimizer.
For concrete tests that are run, check `AuthorizeStaffTestCase`.
'''
def make_request(self, course_id=None, data=None, **kwargs):
url = self.get_url(self.course.id)
response = self.client.get(url, data)
return response
def get_url(self, course_key):
url = reverse(
'cms.djangoapps.contentstore:v0:link_check_status',
kwargs={'course_id': self.course.id}
)
return url
def test_produces_4xx_when_invalid_course_id(self):
'''
Test course_id validation
'''
response = self.make_request(course_id='invalid_course_id')
self.assertIn(response.status_code, range(400, 500))
def test_produces_4xx_when_additional_kwargs(self):
'''
Test additional kwargs validation
'''
response = self.make_request(course_id=self.course.id, malicious_kwarg='malicious_kwarg')
self.assertIn(response.status_code, range(400, 500))
class TestPostLinkCheck(AuthorizeStaffTestCase, ModuleStoreTestCase, TestCase):
'''
Authentication and Authorization Tests for CourseOptimizer.
For concrete tests that are run, check `AuthorizeStaffTestCase`.
'''
def make_request(self, course_id=None, data=None, **kwargs):
url = self.get_url(self.course.id)
response = self.client.post(url, data)
return response
def get_url(self, course_key):
url = reverse(
'cms.djangoapps.contentstore:v0:link_check',
kwargs={'course_id': self.course.id}
)
return url
def test_produces_4xx_when_invalid_course_id(self):
'''
Test course_id validation
'''
response = self.make_request(course_id='invalid_course_id')
self.assertIn(response.status_code, range(400, 500))
def test_produces_4xx_when_additional_kwargs(self):
'''
Test additional kwargs validation
'''
response = self.make_request(course_id=self.course.id, malicious_kwarg='malicious_kwarg')
self.assertIn(response.status_code, range(400, 500))
def test_produces_4xx_when_unexpected_data(self):
'''
Test validation when request contains unexpected data
'''
response = self.make_request(course_id=self.course.id, data={'unexpected_data': 'unexpected_data'})
self.assertIn(response.status_code, range(400, 500))

View File

@@ -8,15 +8,12 @@ from urllib.parse import urlencode
import ddt
from django.urls import reverse
from edx_toggles.toggles.testutils import override_waffle_flag
from xmodule.modulestore.tests.factories import BlockFactory
from xmodule.tabs import CourseTabList
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from cms.djangoapps.contentstore.toggles import ENABLE_NEW_STUDIO_CUSTOM_PAGES
@override_waffle_flag(ENABLE_NEW_STUDIO_CUSTOM_PAGES, active=True)
@ddt.ddt
class TabsAPITests(CourseTestCase):
"""

View File

@@ -55,13 +55,8 @@ class XBlockViewTestCase(AuthorizeStaffTestCase):
}
),
)
@patch(
f"cms.djangoapps.contentstore.rest_api.{VERSION}.views.xblock.toggles.use_studio_content_api",
return_value=True,
)
def make_request(
self,
mock_use_studio_content_api,
mock_handle_xblock,
run_assertions=None,
course_id=None,
@@ -111,13 +106,6 @@ class XBlockViewGetTest(XBlockViewTestCase, ModuleStoreTestCase, APITestCase):
def send_request(self, url, data):
return self.client.get(url)
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_xblock_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password
@@ -167,13 +155,6 @@ class XBlockViewPostTest(XBlockViewTestCase, ModuleStoreTestCase, APITestCase):
def send_request(self, url, data):
return self.client.post(url, data=data, format="json")
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_xblock_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password
@@ -218,13 +199,6 @@ class XBlockViewPutTest(XBlockViewTestCase, ModuleStoreTestCase, APITestCase):
def send_request(self, url, data):
return self.client.put(url, data=data, format="json")
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.put(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_xblock_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password
@@ -269,13 +243,6 @@ class XBlockViewPatchTest(XBlockViewTestCase, ModuleStoreTestCase, APITestCase):
def send_request(self, url, data):
return self.client.patch(url, data=data, format="json")
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.patch(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_xblock_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password
@@ -310,13 +277,6 @@ class XBlockViewDeleteTest(XBlockViewTestCase, ModuleStoreTestCase, APITestCase)
def send_request(self, url, data):
return self.client.delete(url)
def test_api_behind_feature_flag(self):
# should return 404 if the feature flag is not enabled
url = self.get_url()
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_xblock_handler_called_with_correct_arguments(self):
self.client.login(
username=self.course_instructor.username, password=self.password

View File

@@ -7,14 +7,16 @@ from openedx.core.constants import COURSE_ID_PATTERN
from .views import (
AdvancedCourseSettingsView,
APIHeartBeatView,
AuthoringGradingView,
CourseTabSettingsView,
CourseTabListView,
CourseTabReorderView,
LinkCheckView,
LinkCheckStatusView,
TranscriptView,
YoutubeTranscriptCheckView,
YoutubeTranscriptUploadView,
APIHeartBeatView
)
from .views import assets
from .views import authoring_videos
@@ -63,7 +65,7 @@ urlpatterns = [
authoring_videos.VideoEncodingsDownloadView.as_view(), name='cms_api_videos_encodings'
),
re_path(
fr'grading/{settings.COURSE_ID_PATTERN}',
fr'grading/{settings.COURSE_ID_PATTERN}$',
AuthoringGradingView.as_view(), name='cms_api_update_grading'
),
path(
@@ -102,4 +104,14 @@ urlpatterns = [
fr'^youtube_transcripts/{settings.COURSE_ID_PATTERN}/upload?$',
YoutubeTranscriptUploadView.as_view(), name='cms_api_youtube_transcripts_upload'
),
# Course Optimizer
re_path(
fr'^link_check/{settings.COURSE_ID_PATTERN}$',
LinkCheckView.as_view(), name='link_check'
),
re_path(
fr'^link_check_status/{settings.COURSE_ID_PATTERN}$',
LinkCheckStatusView.as_view(), name='link_check_status'
),
]

View File

@@ -2,7 +2,8 @@
Views for v0 contentstore API.
"""
from .advanced_settings import AdvancedCourseSettingsView
from .api_heartbeat import APIHeartBeatView
from .authoring_grading import AuthoringGradingView
from .course_optimizer import LinkCheckView, LinkCheckStatusView
from .tabs import CourseTabSettingsView, CourseTabListView, CourseTabReorderView
from .transcripts import TranscriptView, YoutubeTranscriptCheckView, YoutubeTranscriptUploadView
from .api_heartbeat import APIHeartBeatView

View File

@@ -5,7 +5,6 @@ from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework import status
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, view_auth_classes
import cms.djangoapps.contentstore.toggles as toggles
class APIHeartBeatView(DeveloperErrorViewMixin, APIView):
@@ -43,6 +42,4 @@ class APIHeartBeatView(DeveloperErrorViewMixin, APIView):
}
```
"""
if toggles.use_studio_content_api():
return Response({'status': 'heartbeat successful'}, status=status.HTTP_200_OK)
return Response(status=status.HTTP_403_FORBIDDEN)
return Response({'status': 'heartbeat successful'}, status=status.HTTP_200_OK)

View File

@@ -4,7 +4,6 @@ Public rest API endpoints for the CMS API Assets.
import logging
from rest_framework.generics import CreateAPIView, RetrieveAPIView, UpdateAPIView, DestroyAPIView
from django.views.decorators.csrf import csrf_exempt
from django.http import Http404
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, view_auth_classes
from common.djangoapps.util.json_request import expect_json_in_class_view
@@ -12,7 +11,6 @@ from common.djangoapps.util.json_request import expect_json_in_class_view
from cms.djangoapps.contentstore.api import course_author_access_required
from cms.djangoapps.contentstore.asset_storage_handlers import handle_assets
import cms.djangoapps.contentstore.toggles as contentstore_toggles
from ..serializers.assets import AssetSerializer
from .utils import validate_request_with_serializer
@@ -20,7 +18,6 @@ from rest_framework.parsers import (MultiPartParser, FormParser, JSONParser)
from openedx.core.lib.api.parsers import TypedFileUploadParser
log = logging.getLogger(__name__)
toggles = contentstore_toggles
@view_auth_classes()
@@ -33,17 +30,6 @@ class AssetsCreateRetrieveView(DeveloperErrorViewMixin, CreateAPIView, RetrieveA
serializer_class = AssetSerializer
parser_classes = (JSONParser, MultiPartParser, FormParser, TypedFileUploadParser)
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
@csrf_exempt
@course_author_access_required
@validate_request_with_serializer
@@ -66,17 +52,6 @@ class AssetsUpdateDestroyView(DeveloperErrorViewMixin, UpdateAPIView, DestroyAPI
serializer_class = AssetSerializer
parser_classes = (JSONParser, MultiPartParser, FormParser, TypedFileUploadParser)
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
@course_author_access_required
@expect_json_in_class_view
@validate_request_with_serializer

View File

@@ -9,7 +9,6 @@ from rest_framework.generics import (
)
from rest_framework.parsers import (MultiPartParser, FormParser)
from django.views.decorators.csrf import csrf_exempt
from django.http import Http404
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, view_auth_classes
from openedx.core.lib.api.parsers import TypedFileUploadParser
@@ -27,12 +26,10 @@ from cms.djangoapps.contentstore.rest_api.v1.serializers import (
VideoUploadSerializer,
VideoImageSerializer,
)
import cms.djangoapps.contentstore.toggles as contentstore_toggles
from .utils import validate_request_with_serializer
log = logging.getLogger(__name__)
toggles = contentstore_toggles
@view_auth_classes()
@@ -44,17 +41,6 @@ class VideosUploadsView(DeveloperErrorViewMixin, RetrieveAPIView, DestroyAPIView
"""
serializer_class = VideoUploadSerializer
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
@course_author_access_required
def retrieve(self, request, course_key, edx_video_id=None): # pylint: disable=arguments-differ
return handle_videos(request, course_key.html_id(), edx_video_id)
@@ -73,17 +59,6 @@ class VideosCreateUploadView(DeveloperErrorViewMixin, CreateAPIView):
"""
serializer_class = VideoUploadSerializer
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
@csrf_exempt
@course_author_access_required
@expect_json_in_class_view
@@ -102,17 +77,6 @@ class VideoImagesView(DeveloperErrorViewMixin, CreateAPIView):
serializer_class = VideoImageSerializer
parser_classes = (MultiPartParser, FormParser, TypedFileUploadParser)
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
@csrf_exempt
@course_author_access_required
@expect_json_in_class_view
@@ -128,16 +92,10 @@ class VideoEncodingsDownloadView(DeveloperErrorViewMixin, RetrieveAPIView):
course_key: required argument, needed to authorize course authors and identify relevant videos.
"""
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
# TODO: ARCH-91
# This view is excluded from Swagger doc generation because it
# does not specify a serializer class.
swagger_schema = None
@csrf_exempt
@course_author_access_required
@@ -151,16 +109,10 @@ class VideoFeaturesView(DeveloperErrorViewMixin, RetrieveAPIView):
public rest API endpoint providing a list of enabled video features.
"""
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
# TODO: ARCH-91
# This view is excluded from Swagger doc generation because it
# does not specify a serializer class.
swagger_schema = None
@csrf_exempt
def retrieve(self, request): # pylint: disable=arguments-differ

View File

@@ -0,0 +1,145 @@
""" API Views for Course Optimizer. """
import edx_api_doc_tools as apidocs
from opaque_keys.edx.keys import CourseKey
from rest_framework.views import APIView
from rest_framework.request import Request
from rest_framework.response import Response
from user_tasks.models import UserTaskStatus
from cms.djangoapps.contentstore.core.course_optimizer_provider import get_link_check_data, sort_course_sections
from cms.djangoapps.contentstore.rest_api.v0.serializers.course_optimizer import LinkCheckSerializer
from cms.djangoapps.contentstore.tasks import check_broken_links
from common.djangoapps.student.auth import has_course_author_access, has_studio_read_access
from common.djangoapps.util.json_request import JsonResponse
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, verify_course_exists, view_auth_classes
@view_auth_classes(is_authenticated=True)
class LinkCheckView(DeveloperErrorViewMixin, APIView):
"""
View for queueing a celery task to scan a course for broken links.
"""
@apidocs.schema(
parameters=[
apidocs.string_parameter("course_id", apidocs.ParameterLocation.PATH, description="Course ID"),
],
responses={
200: "Celery task queued.",
401: "The requester is not authenticated.",
403: "The requester cannot access the specified course.",
404: "The requested course does not exist.",
},
)
@verify_course_exists()
def post(self, request: Request, course_id: str):
"""
Queue celery task to scan a course for broken links.
**Example Request**
POST /api/contentstore/v0/link_check/{course_id}
**Response Values**
```json
{
"LinkCheckStatus": "Pending"
}
"""
course_key = CourseKey.from_string(course_id)
if not has_studio_read_access(request.user, course_key):
self.permission_denied(request)
check_broken_links.delay(request.user.id, course_id, request.LANGUAGE_CODE)
return JsonResponse({'LinkCheckStatus': UserTaskStatus.PENDING})
@view_auth_classes()
class LinkCheckStatusView(DeveloperErrorViewMixin, APIView):
"""
View for checking the status of the celery task and returning the results.
"""
@apidocs.schema(
parameters=[
apidocs.string_parameter("course_id", apidocs.ParameterLocation.PATH, description="Course ID"),
],
responses={
200: "OK",
401: "The requester is not authenticated.",
403: "The requester cannot access the specified course.",
404: "The requested course does not exist.",
},
)
def get(self, request: Request, course_id: str):
"""
GET handler to return the status of the link_check task from UserTaskStatus.
If no task has been started for the course, return 'Uninitiated'.
If link_check task was successful, an output result is also returned.
For reference, the following status are in UserTaskStatus:
'Pending', 'In Progress' (sent to frontend as 'In-Progress'),
'Succeeded', 'Failed', 'Canceled', 'Retrying'
This function adds a status for when status from UserTaskStatus is None:
'Uninitiated'
**Example Request**
GET /api/contentstore/v0/link_check_status/{course_id}
**Example Response**
```json
{
"LinkCheckStatus": "Succeeded",
"LinkCheckCreatedAt": "2025-02-05T14:32:01.294587Z",
"LinkCheckOutput": {
sections: [
{
id: <string>,
displayName: <string>,
subsections: [
{
id: <string>,
displayName: <string>,
units: [
{
id: <string>,
displayName: <string>,
blocks: [
{
id: <string>,
url: <string>,
brokenLinks: [
<string>,
<string>,
<string>,
...,
],
lockedLinks: [
<string>,
<string>,
<string>,
...,
],
},
{ <another block> },
],
},
{ <another unit> },
],
},
{ <another subsection },
],
},
{ <another section> },
],
},
}
"""
course_key = CourseKey.from_string(course_id)
if not has_course_author_access(request.user, course_key):
print('missing course author access')
self.permission_denied(request)
data = get_link_check_data(request, course_id)
data = sort_course_sections(course_key, data)
serializer = LinkCheckSerializer(data)
return Response(serializer.data)

View File

@@ -8,7 +8,6 @@ from rest_framework.generics import (
DestroyAPIView
)
from django.views.decorators.csrf import csrf_exempt
from django.http import Http404
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, view_auth_classes
from common.djangoapps.util.json_request import expect_json_in_class_view
@@ -20,7 +19,6 @@ from cms.djangoapps.contentstore.transcript_storage_handlers import (
delete_video_transcript_or_404,
handle_transcript_download,
)
import cms.djangoapps.contentstore.toggles as contentstore_toggles
from ..serializers import TranscriptSerializer, YoutubeTranscriptCheckSerializer, YoutubeTranscriptUploadSerializer
from rest_framework.parsers import (MultiPartParser, FormParser)
from openedx.core.lib.api.parsers import TypedFileUploadParser
@@ -28,7 +26,6 @@ from openedx.core.lib.api.parsers import TypedFileUploadParser
from cms.djangoapps.contentstore.rest_api.v0.views.utils import validate_request_with_serializer
log = logging.getLogger(__name__)
toggles = contentstore_toggles
@view_auth_classes()
@@ -42,11 +39,6 @@ class TranscriptView(DeveloperErrorViewMixin, CreateAPIView, RetrieveAPIView, De
serializer_class = TranscriptSerializer
parser_classes = (MultiPartParser, FormParser, TypedFileUploadParser)
def dispatch(self, request, *args, **kwargs):
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
@csrf_exempt
@course_author_access_required
@expect_json_in_class_view
@@ -81,11 +73,6 @@ class YoutubeTranscriptCheckView(DeveloperErrorViewMixin, RetrieveAPIView):
serializer_class = YoutubeTranscriptCheckSerializer
parser_classes = (MultiPartParser, FormParser, TypedFileUploadParser)
def dispatch(self, request, *args, **kwargs):
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
@course_author_access_required
def retrieve(self, request, course_key_string): # pylint: disable=arguments-differ
"""
@@ -104,11 +91,6 @@ class YoutubeTranscriptUploadView(DeveloperErrorViewMixin, RetrieveAPIView):
serializer_class = YoutubeTranscriptUploadSerializer
parser_classes = (MultiPartParser, FormParser, TypedFileUploadParser)
def dispatch(self, request, *args, **kwargs):
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
@course_author_access_required
def retrieve(self, request, course_key_string): # pylint: disable=arguments-differ
"""

View File

@@ -4,21 +4,18 @@ Public rest API endpoints for the CMS API.
import logging
from rest_framework.generics import RetrieveUpdateDestroyAPIView, CreateAPIView
from django.views.decorators.csrf import csrf_exempt
from django.http import Http404
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, view_auth_classes
from common.djangoapps.util.json_request import expect_json_in_class_view
from cms.djangoapps.contentstore.api import course_author_access_required
from cms.djangoapps.contentstore.xblock_storage_handlers import view_handlers
import cms.djangoapps.contentstore.toggles as contentstore_toggles
from ..serializers import XblockSerializer
from .utils import validate_request_with_serializer
log = logging.getLogger(__name__)
toggles = contentstore_toggles
handle_xblock = view_handlers.handle_xblock
@@ -32,17 +29,6 @@ class XblockView(DeveloperErrorViewMixin, RetrieveUpdateDestroyAPIView):
"""
serializer_class = XblockSerializer
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
# pylint: disable=arguments-differ
@course_author_access_required
@expect_json_in_class_view
@@ -77,17 +63,6 @@ class XblockCreateView(DeveloperErrorViewMixin, CreateAPIView):
"""
serializer_class = XblockSerializer
def dispatch(self, request, *args, **kwargs):
# TODO: probably want to refactor this to a decorator.
"""
The dispatch method of a View class handles HTTP requests in general
and calls other methods to handle specific HTTP methods.
We use this to raise a 404 if the content api is disabled.
"""
if not toggles.use_studio_content_api():
raise Http404
return super().dispatch(request, *args, **kwargs)
# pylint: disable=arguments-differ
@csrf_exempt
@course_author_access_required

View File

@@ -32,3 +32,4 @@ class CourseIndexSerializer(serializers.Serializer):
rerun_notification_id = serializers.IntegerField()
advance_settings_url = serializers.CharField()
is_custom_relative_dates_active = serializers.BooleanField()
created_on = serializers.DateTimeField()

View File

@@ -27,6 +27,9 @@ class CourseWaffleFlagsSerializer(serializers.Serializer):
use_new_certificates_page = serializers.SerializerMethodField()
use_new_textbooks_page = serializers.SerializerMethodField()
use_new_group_configurations_page = serializers.SerializerMethodField()
enable_course_optimizer = serializers.SerializerMethodField()
use_react_markdown_editor = serializers.SerializerMethodField()
use_video_gallery_flow = serializers.SerializerMethodField()
def get_course_key(self):
"""
@@ -144,3 +147,23 @@ class CourseWaffleFlagsSerializer(serializers.Serializer):
"""
course_key = self.get_course_key()
return toggles.use_new_group_configurations_page(course_key)
def get_enable_course_optimizer(self, obj):
"""
Method to get the enable_course_optimizer waffle flag
"""
course_key = self.get_course_key()
return toggles.enable_course_optimizer(course_key)
def get_use_react_markdown_editor(self, obj):
"""
Method to get the use_react_markdown_editor waffle flag
"""
course_key = self.get_course_key()
return toggles.use_react_markdown_editor(course_key)
def get_use_video_gallery_flow(self, obj):
"""
Method to get the use_video_gallery_flow waffle flag
"""
return toggles.use_video_gallery_flow()

View File

@@ -50,6 +50,10 @@ class StudioHomeSerializer(serializers.Serializer):
child=serializers.CharField(),
allow_empty=True
)
allowed_organizations_for_libraries = serializers.ListSerializer(
child=serializers.CharField(),
allow_empty=True
)
archived_courses = CourseCommonSerializer(required=False, many=True)
can_access_advanced_settings = serializers.BooleanField()
can_create_organizations = serializers.BooleanField()
@@ -62,10 +66,10 @@ class StudioHomeSerializer(serializers.Serializer):
libraries_v2_enabled = serializers.BooleanField()
taxonomies_enabled = serializers.BooleanField()
taxonomy_list_mfe_url = serializers.CharField()
optimization_enabled = serializers.BooleanField()
request_course_creator_url = serializers.CharField()
rerun_creator_status = serializers.BooleanField()
show_new_library_button = serializers.BooleanField()
show_new_library_v2_button = serializers.BooleanField()
split_studio_home = serializers.BooleanField()
studio_name = serializers.CharField()
studio_short_name = serializers.CharField()

View File

@@ -61,7 +61,9 @@ class CourseWaffleFlagsView(APIView):
"use_new_course_team_page": true,
"use_new_certificates_page": true,
"use_new_textbooks_page": true,
"use_new_group_configurations_page": true
"use_new_group_configurations_page": true,
"use_react_markdown_editor": true,
"use_video_gallery_flow": true
}
```
"""

View File

@@ -5,6 +5,7 @@ from django.conf import settings
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from organizations import api as org_api
from openedx.core.lib.api.view_utils import view_auth_classes
from ....utils import get_home_context, get_course_context, get_library_context
@@ -51,6 +52,7 @@ class HomePageView(APIView):
"allow_to_create_new_org": true,
"allow_unicode_course_id": false,
"allowed_organizations": [],
"allowed_organizations_for_libraries": [],
"archived_courses": [],
"can_access_advanced_settings": true,
"can_create_organizations": true,
@@ -62,10 +64,10 @@ class HomePageView(APIView):
"libraries_v1_enabled": true,
"libraries_v2_enabled": true,
"library_authoring_mfe_url": "//localhost:3001/course/course-v1:edX+P315+2T2023",
"optimization_enabled": true,
"request_course_creator_url": "/request_course_creator",
"rerun_creator_status": true,
"show_new_library_button": true,
"show_new_library_v2_button": true,
"split_studio_home": false,
"studio_name": "Studio",
"studio_short_name": "Studio",
@@ -79,7 +81,12 @@ class HomePageView(APIView):
home_context = get_home_context(request, True)
home_context.update({
'allow_to_create_new_org': settings.FEATURES.get('ENABLE_CREATOR_GROUP', True) and request.user.is_staff,
# 'allow_to_create_new_org' is actually about auto-creating organizations
# (e.g. when creating a course or library), so we add an additional test.
'allow_to_create_new_org': (
home_context['can_create_organizations'] and
org_api.is_autocreate_enabled()
),
'studio_name': settings.STUDIO_NAME,
'studio_short_name': settings.STUDIO_SHORT_NAME,
'studio_request_email': settings.FEATURES.get('STUDIO_REQUEST_EMAIL', ''),

View File

@@ -22,6 +22,7 @@ class CourseIndexViewTest(CourseTestCase, PermissionAccessMixin):
"""
Tests for CourseIndexView.
"""
maxDiff = None # Show the entire dictionary in the diff
def setUp(self):
super().setUp()
@@ -74,7 +75,10 @@ class CourseIndexViewTest(CourseTestCase, PermissionAccessMixin):
},
"language_code": "en",
"lms_link": get_lms_link_for_item(self.course.location),
"mfe_proctored_exam_settings_url": "",
"mfe_proctored_exam_settings_url": (
f"http://course-authoring-mfe/course/{self.course.id}"
"/pages-and-resources/proctoring/settings"
),
"notification_dismiss_url": None,
"proctoring_errors": [],
"reindex_link": f"/course/{self.course.id}/search_reindex",
@@ -86,6 +90,7 @@ class CourseIndexViewTest(CourseTestCase, PermissionAccessMixin):
'discussion_configuration_url': f'{get_pages_and_resources_url(self.course.id)}/discussion/settings',
},
"advance_settings_url": f"/settings/advanced/{self.course.id}",
'created_on': None,
}
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -121,7 +126,10 @@ class CourseIndexViewTest(CourseTestCase, PermissionAccessMixin):
},
"language_code": "en",
"lms_link": get_lms_link_for_item(self.course.location),
"mfe_proctored_exam_settings_url": "",
"mfe_proctored_exam_settings_url": (
f"http://course-authoring-mfe/course/{self.course.id}"
"/pages-and-resources/proctoring/settings"
),
"notification_dismiss_url": None,
"proctoring_errors": [],
"reindex_link": f"/course/{self.course.id}/search_reindex",
@@ -133,6 +141,7 @@ class CourseIndexViewTest(CourseTestCase, PermissionAccessMixin):
'discussion_configuration_url': f'{get_pages_and_resources_url(self.course.id)}/discussion/settings',
},
"advance_settings_url": f"/settings/advanced/{self.course.id}",
'created_on': None,
}
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -151,6 +160,6 @@ class CourseIndexViewTest(CourseTestCase, PermissionAccessMixin):
"""
Test to check number of queries made to mysql and mongo
"""
with self.assertNumQueries(32, table_ignorelist=WAFFLE_TABLES):
with self.assertNumQueries(34, table_ignorelist=WAFFLE_TABLES):
with check_mongo_calls(3):
self.client.get(self.url)

View File

@@ -1,112 +1,62 @@
"""
Unit tests for the course waffle flags view
"""
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework import status
from cms.djangoapps.contentstore import toggles
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from openedx.core.djangoapps.waffle_utils.models import WaffleFlagCourseOverrideModel
User = get_user_model()
class CourseWaffleFlagsViewTest(CourseTestCase):
"""
Tests for the CourseWaffleFlagsView endpoint, which returns waffle flag states
Basic test for the CourseWaffleFlagsView endpoint, which returns waffle flag states
for a specific course or globally if no course ID is provided.
"""
maxDiff = None # Show the whole dictionary in the diff
course_waffle_flags = [
"use_new_custom_pages",
"use_new_schedule_details_page",
"use_new_advanced_settings_page",
"use_new_grading_page",
"use_new_updates_page",
"use_new_import_page",
"use_new_export_page",
"use_new_files_uploads_page",
"use_new_video_uploads_page",
"use_new_course_outline_page",
"use_new_unit_page",
"use_new_course_team_page",
"use_new_certificates_page",
"use_new_textbooks_page",
"use_new_group_configurations_page",
]
defaults = {
'enable_course_optimizer': False,
'use_new_advanced_settings_page': True,
'use_new_certificates_page': True,
'use_new_course_outline_page': True,
'use_new_course_team_page': True,
'use_new_custom_pages': True,
'use_new_export_page': True,
'use_new_files_uploads_page': True,
'use_new_grading_page': True,
'use_new_group_configurations_page': True,
'use_new_home_page': True,
'use_new_import_page': True,
'use_new_schedule_details_page': True,
'use_new_textbooks_page': True,
'use_new_unit_page': True,
'use_new_updates_page': True,
'use_new_video_uploads_page': False,
'use_react_markdown_editor': False,
'use_video_gallery_flow': False,
}
def setUp(self):
"""
Set up test data and state before each test method.
This method initializes the endpoint URL and creates a set of waffle flags
for the test course, setting each flag's value to `True`.
"""
super().setUp()
self.url = reverse("cms.djangoapps.contentstore:v1:course_waffle_flags")
self.create_waffle_flags(self.course_waffle_flags)
WaffleFlagCourseOverrideModel.objects.create(
waffle_flag=toggles.ENABLE_COURSE_OPTIMIZER.name,
course_id=self.course.id,
enabled=True,
)
def create_waffle_flags(self, flags, enabled=True):
"""
Helper method to create waffle flag entries in the database for the test course.
def test_global_defaults(self):
url = reverse("cms.djangoapps.contentstore:v1:course_waffle_flags")
response = self.client.get(url)
assert response.data == self.defaults
Args:
flags (list): A list of flag names to set up.
enabled (bool): The value to set for each flag's enabled state.
"""
for flag in flags:
WaffleFlagCourseOverrideModel.objects.create(
waffle_flag=f"contentstore.new_studio_mfe.{flag}",
course_id=self.course.id,
enabled=enabled,
)
def expected_response(self, enabled=False):
"""
Generate an expected response dictionary based on the enabled flag.
Args:
enabled (bool): State to assign to each waffle flag in the response.
Returns:
dict: A dictionary with each flag set to the value of `enabled`.
"""
return {flag: enabled for flag in self.course_waffle_flags}
def test_get_course_waffle_flags_with_course_id(self):
"""
Test that waffle flags for a specific course are correctly returned when
a valid course ID is provided.
Expected Behavior:
- The response should return HTTP 200 status.
- Each flag returned should be `True` as set up in the `setUp` method.
"""
course_url = reverse(
def test_course_override(self):
url = reverse(
"cms.djangoapps.contentstore:v1:course_waffle_flags",
kwargs={"course_id": self.course.id},
)
expected_response = self.expected_response(enabled=True)
expected_response["use_new_home_page"] = False
response = self.client.get(course_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(expected_response, response.data)
def test_get_course_waffle_flags_without_course_id(self):
"""
Test that the default waffle flag states are returned when no course ID is provided.
Expected Behavior:
- The response should return HTTP 200 status.
- Each flag returned should default to `False`, representing the global
default state for each flag.
"""
expected_response = self.expected_response(enabled=False)
expected_response["use_new_home_page"] = False
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(expected_response, response.data)
response = self.client.get(url)
assert response.data == {
**self.defaults,
"enable_course_optimizer": True,
}

View File

@@ -2,26 +2,17 @@
Unit tests for home page view.
"""
import ddt
import pytz
from collections import OrderedDict
from datetime import datetime, timedelta
from django.conf import settings
from django.test import override_settings
from django.urls import reverse
from edx_toggles.toggles.testutils import (
override_waffle_switch,
)
from rest_framework import status
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from cms.djangoapps.contentstore.tests.test_libraries import LibraryTestCase
from cms.djangoapps.contentstore.views.course import ENABLE_GLOBAL_STAFF_OPTIMIZATION
from openedx.core.djangoapps.content.course_overviews.tests.factories import CourseOverviewFactory
from xmodule.modulestore.tests.factories import CourseFactory
FEATURES_WITH_HOME_PAGE_COURSE_V2_API = settings.FEATURES.copy()
FEATURES_WITH_HOME_PAGE_COURSE_V2_API['ENABLE_HOME_PAGE_COURSE_API_V2'] = True
FEATURES_WITHOUT_HOME_PAGE_COURSE_V2_API = settings.FEATURES.copy()
FEATURES_WITHOUT_HOME_PAGE_COURSE_V2_API['ENABLE_HOME_PAGE_COURSE_API_V2'] = False
@ddt.ddt
@@ -35,9 +26,10 @@ class HomePageViewTest(CourseTestCase):
self.url = reverse("cms.djangoapps.contentstore:v1:home")
self.expected_response = {
"allow_course_reruns": True,
"allow_to_create_new_org": False,
"allow_to_create_new_org": True,
"allow_unicode_course_id": False,
"allowed_organizations": [],
"allowed_organizations_for_libraries": [],
"archived_courses": [],
"can_access_advanced_settings": True,
"can_create_organizations": True,
@@ -50,10 +42,10 @@ class HomePageViewTest(CourseTestCase):
"libraries_v2_enabled": False,
"taxonomies_enabled": True,
"taxonomy_list_mfe_url": 'http://course-authoring-mfe/taxonomies',
"optimization_enabled": False,
"request_course_creator_url": "/request_course_creator",
"rerun_creator_status": True,
"show_new_library_button": True,
"show_new_library_v2_button": True,
"split_studio_home": False,
"studio_name": settings.STUDIO_NAME,
"studio_short_name": settings.STUDIO_SHORT_NAME,
@@ -81,6 +73,17 @@ class HomePageViewTest(CourseTestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(expected_response, response.data)
@override_settings(ORGANIZATIONS_AUTOCREATE=False)
def test_home_page_studio_with_org_autocreate_disabled(self):
"""Check response content when Organization autocreate is disabled"""
response = self.client.get(self.url)
expected_response = self.expected_response
expected_response["allow_to_create_new_org"] = False
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(expected_response, response.data)
def test_taxonomy_list_link(self):
response = self.client.get(self.url)
self.assertTrue(response.data['taxonomies_enabled'])
@@ -90,7 +93,6 @@ class HomePageViewTest(CourseTestCase):
)
@override_settings(FEATURES=FEATURES_WITHOUT_HOME_PAGE_COURSE_V2_API)
@ddt.ddt
class HomePageCoursesViewTest(CourseTestCase):
"""
@@ -100,12 +102,13 @@ class HomePageCoursesViewTest(CourseTestCase):
def setUp(self):
super().setUp()
self.url = reverse("cms.djangoapps.contentstore:v1:courses")
CourseOverviewFactory.create(
self.course_overview = CourseOverviewFactory.create(
id=self.course.id,
org=self.course.org,
display_name=self.course.display_name,
display_number_with_default=self.course.number,
)
self.non_staff_client, _ = self.create_non_staff_authed_user_client()
def test_home_page_response(self):
"""Check successful response content"""
@@ -155,31 +158,83 @@ class HomePageCoursesViewTest(CourseTestCase):
"in_process_course_actions": [],
}
with override_settings(FEATURES=FEATURES_WITH_HOME_PAGE_COURSE_V2_API):
response = self.client.get(self.url)
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(expected_response, response.data)
@override_waffle_switch(ENABLE_GLOBAL_STAFF_OPTIMIZATION, True)
def test_org_query_if_passed(self):
"""Test home page when org filter passed as a query param"""
foo_course = self.store.make_course_key('foo-org', 'bar-number', 'baz-run')
test_course = CourseFactory.create(
org=foo_course.org,
number=foo_course.course,
run=foo_course.run
@ddt.data(
("active_only", "true", 2, 0),
("archived_only", "true", 0, 1),
("search", "sample", 1, 0),
("search", "demo", 0, 1),
("order", "org", 2, 1),
("order", "display_name", 2, 1),
("order", "number", 2, 1),
("order", "run", 2, 1)
)
@ddt.unpack
def test_filter_and_ordering_courses(
self,
filter_key,
filter_value,
expected_active_length,
expected_archived_length
):
"""Test home page with org filter and ordering for a staff user.
The test creates an active/archived course, and then filters/orders them using the query parameters.
"""
archived_course_key = self.store.make_course_key("demo-org", "demo-number", "demo-run")
CourseOverviewFactory.create(
display_name="Course (Demo)",
id=archived_course_key,
org=archived_course_key.org,
end=(datetime.now() - timedelta(days=365)).replace(tzinfo=pytz.UTC),
)
CourseOverviewFactory.create(id=test_course.id, org='foo-org')
response = self.client.get(self.url, {"org": "foo-org"})
self.assertEqual(len(response.data['courses']), 1)
active_course_key = self.store.make_course_key("sample-org", "sample-number", "sample-run")
CourseOverviewFactory.create(
display_name="Course (Sample)",
id=active_course_key,
org=active_course_key.org,
)
response = self.client.get(self.url, {filter_key: filter_value})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data["archived_courses"]), expected_archived_length)
self.assertEqual(len(response.data["courses"]), expected_active_length)
@ddt.data(
("active_only", "true"),
("archived_only", "true"),
("search", "sample"),
("order", "org"),
)
@ddt.unpack
def test_filter_and_ordering_no_courses_staff(self, filter_key, filter_value):
"""Test home page with org filter and ordering when there are no courses for a staff user."""
self.course_overview.delete()
response = self.client.get(self.url, {filter_key: filter_value})
self.assertEqual(len(response.data["courses"]), 0)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@override_waffle_switch(ENABLE_GLOBAL_STAFF_OPTIMIZATION, True)
def test_org_query_if_empty(self):
"""Test home page with an empty org query param"""
response = self.client.get(self.url)
self.assertEqual(len(response.data['courses']), 0)
@ddt.data(
("active_only", "true"),
("archived_only", "true"),
("search", "sample"),
("order", "org"),
)
@ddt.unpack
def test_home_page_response_no_courses_non_staff(self, filter_key, filter_value):
"""Test home page with org filter and ordering when there are no courses for a non-staff user."""
self.course_overview.delete()
response = self.non_staff_client.get(self.url, {filter_key: filter_value})
self.assertEqual(len(response.data["courses"]), 0)
self.assertEqual(response.status_code, status.HTTP_200_OK)

View File

@@ -226,7 +226,7 @@ class ContainerVerticalViewTest(BaseXBlockContainer):
"version_synced": 5,
"version_available": None,
"version_declined": None,
"error_message": "Linked library item was not found in the system",
"error_message": "Linked upstream library block was not found in the system",
"ready_to_sync": False,
},
"user_partition_info": expected_user_partition_info,
@@ -236,7 +236,8 @@ class ContainerVerticalViewTest(BaseXBlockContainer):
},
]
self.maxDiff = None
self.assertEqual(response.data["children"], expected_response)
# Using json() shows meaningful diff in case of error
self.assertEqual(response.json()["children"], expected_response)
def test_not_valid_usage_key_string(self):
"""

View File

@@ -283,7 +283,7 @@ class VerticalContainerView(APIView, ContainerHandlerMixin):
child_info = modulestore().get_item(child)
user_partition_info = get_visibility_partition_info(child_info, course=course)
user_partitions = get_user_partition_info(child_info, course=course)
upstream_link = UpstreamLink.try_get_for_block(child_info)
upstream_link = UpstreamLink.try_get_for_block(child_info, log_error=False)
validation_messages = get_xblock_validation_messages(child_info)
render_error = get_xblock_render_error(request, child_info)

View File

@@ -1,3 +1,17 @@
"""Module for v2 serializers."""
from cms.djangoapps.contentstore.rest_api.v2.serializers.downstreams import (
ComponentLinksSerializer,
ContainerLinksSerializer,
PublishableEntityLinksSummarySerializer,
PublishableEntityLinkSerializer
)
from cms.djangoapps.contentstore.rest_api.v2.serializers.home import CourseHomeTabSerializerV2
__all__ = [
'CourseHomeTabSerializerV2',
'ComponentLinksSerializer',
'PublishableEntityLinkSerializer',
'ContainerLinksSerializer',
'PublishableEntityLinksSummarySerializer',
]

View File

@@ -0,0 +1,68 @@
"""
Serializers for upstream -> downstream entity links.
"""
from rest_framework import serializers
from cms.djangoapps.contentstore.models import ComponentLink, ContainerLink
class ComponentLinksSerializer(serializers.ModelSerializer):
"""
Serializer for publishable component entity links.
"""
upstream_context_title = serializers.CharField(read_only=True)
upstream_version = serializers.IntegerField(read_only=True, source="upstream_version_num")
ready_to_sync = serializers.BooleanField()
class Meta:
model = ComponentLink
exclude = ['upstream_block', 'uuid']
class PublishableEntityLinksSummarySerializer(serializers.Serializer):
"""
Serializer for summary for publishable entity links
"""
upstream_context_title = serializers.CharField(read_only=True)
upstream_context_key = serializers.CharField(read_only=True)
ready_to_sync_count = serializers.IntegerField(read_only=True)
total_count = serializers.IntegerField(read_only=True)
last_published_at = serializers.DateTimeField(read_only=True)
class ContainerLinksSerializer(serializers.ModelSerializer):
"""
Serializer for publishable container entity links.
"""
upstream_context_title = serializers.CharField(read_only=True)
upstream_version = serializers.IntegerField(read_only=True, source="upstream_version_num")
ready_to_sync = serializers.BooleanField()
class Meta:
model = ContainerLink
exclude = ['upstream_container', 'uuid']
class PublishableEntityLinkSerializer(serializers.Serializer):
"""
Serializer for publishable component or container entity links.
"""
upstream_key = serializers.CharField(read_only=True)
upstream_type = serializers.ChoiceField(read_only=True, choices=['component', 'container'])
def to_representation(self, instance):
if isinstance(instance, ComponentLink):
data = ComponentLinksSerializer(instance).data
data['upstream_key'] = data.get('upstream_usage_key')
data['upstream_type'] = 'component'
del data['upstream_usage_key']
elif isinstance(instance, ContainerLink):
data = ContainerLinksSerializer(instance).data
data['upstream_key'] = data.get('upstream_container_key')
data['upstream_type'] = 'container'
del data['upstream_container_key']
else:
raise Exception("Unexpected type")
return data

View File

@@ -3,7 +3,8 @@
from django.conf import settings
from django.urls import path, re_path
from cms.djangoapps.contentstore.rest_api.v2.views import home, downstreams
from cms.djangoapps.contentstore.rest_api.v2.views import downstreams, home
app_name = "v2"
urlpatterns = [
@@ -12,17 +13,34 @@ urlpatterns = [
home.HomePageCoursesViewV2.as_view(),
name="courses",
),
# TODO: Potential future path.
# re_path(
# fr'^downstreams/$',
# downstreams.DownstreamsListView.as_view(),
# name="downstreams_list",
# ),
# TODO: Rename this to `downstreams/` after full deprecate `DownstreamComponentsListView`
re_path(
r'^downstreams-all/$',
downstreams.DownstreamListView.as_view(),
name="downstreams_list_all",
),
# [DEPRECATED], use `downstreams-all/` instead.
re_path(
r'^downstreams/$',
downstreams.DownstreamComponentsListView.as_view(),
name="downstreams_list",
),
# [DEPRECATED], use `downstreams-all/` instead.
re_path(
r'^downstream-containers/$',
downstreams.DownstreamContainerListView.as_view(),
name="container_downstreams_list",
),
re_path(
fr'^downstreams/{settings.USAGE_KEY_PATTERN}$',
downstreams.DownstreamView.as_view(),
name="downstream"
),
re_path(
f'^downstreams/{settings.COURSE_KEY_PATTERN}/summary$',
downstreams.DownstreamSummaryView.as_view(),
name='upstream-summary-list'
),
re_path(
fr'^downstreams/{settings.USAGE_KEY_PATTERN}/sync$',
downstreams.SyncFromUpstreamView.as_view(),

View File

@@ -40,11 +40,34 @@ https://github.com/openedx/edx-platform/issues/35653):
400: Downstream block is not linked to upstream content.
404: Downstream block not found or user lacks permission to edit it.
# NOT YET IMPLEMENTED -- Will be needed for full Libraries Relaunch in ~Teak.
/api/contentstore/v2/upstream/{usage_key_string}/downstream-links
GET: List all downstream blocks linked to a library block.
200: A list of downstream usage_keys linked to the library block.
/api/contentstore/v2/downstreams
/api/contentstore/v2/downstreams?course_id=course-v1:A+B+C&ready_to_sync=true
GET: List downstream blocks that can be synced, filterable by course or sync-readiness.
200: A paginated list of applicable & accessible downstream blocks. Entries are UpstreamLinks.
200: A paginated list of applicable & accessible downstream blocks. Entries are ComponentLinks.
/api/contentstore/v2/downstreams/<course_key>/summary
GET: List summary of links by course key
200: A list of summary of links by course key
Example:
[
{
"upstream_context_title": "CS problems 3",
"upstream_context_key": "lib:OpenedX:CSPROB3",
"ready_to_sync_count": 11,
"total_count": 14
},
{
"upstream_context_title": "CS problems 2",
"upstream_context_key": "lib:OpenedX:CSPROB2",
"ready_to_sync_count": 15,
"total_count": 24
},
]
UpstreamLink response schema:
{
@@ -56,29 +79,53 @@ UpstreamLink response schema:
"ready_to_sync": Boolean
}
"""
import logging
import logging
import warnings
from attrs import asdict as attrs_asdict
from django.db.models import QuerySet
from django.contrib.auth.models import User # pylint: disable=imported-auth-user
from edx_rest_framework_extensions.paginators import DefaultPagination
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import UsageKey
from rest_framework.exceptions import NotFound, ValidationError
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locator import LibraryUsageLocatorV2, LibraryContainerLocator, LibraryLocatorV2
from rest_framework.exceptions import NotFound, ValidationError, PermissionDenied
from rest_framework.fields import BooleanField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from itertools import chain
from xblock.core import XBlock
from cms.lib.xblock.upstream_sync import (
UpstreamLink, UpstreamLinkException, NoUpstream, BadUpstream, BadDownstream,
fetch_customizable_fields, sync_from_upstream, decline_sync, sever_upstream_link
from cms.djangoapps.contentstore.models import ComponentLink, ContainerLink, EntityLinkBase
from cms.djangoapps.contentstore.rest_api.v2.serializers import (
PublishableEntityLinkSerializer,
ComponentLinksSerializer,
ContainerLinksSerializer,
PublishableEntityLinksSummarySerializer,
)
from common.djangoapps.student.auth import has_studio_write_access, has_studio_read_access
from cms.djangoapps.contentstore.xblock_storage_handlers.view_handlers import sync_library_content
from cms.lib.xblock.upstream_sync import (
BadDownstream,
BadUpstream,
NoUpstream,
UpstreamLink,
UpstreamLinkException,
decline_sync,
sever_upstream_link,
)
from cms.lib.xblock.upstream_sync_block import fetch_customizable_fields_from_block
from cms.lib.xblock.upstream_sync_container import fetch_customizable_fields_from_container
from common.djangoapps.student.auth import has_studio_read_access, has_studio_write_access
from openedx.core.lib.api.view_utils import (
DeveloperErrorViewMixin,
view_auth_classes,
)
from openedx.core.djangoapps.content_libraries import api as lib_api
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.video_block.transcripts_utils import clear_transcripts
logger = logging.getLogger(__name__)
@@ -93,24 +140,212 @@ class _AuthenticatedRequest(Request):
user: User
# TODO: Potential future view.
# @view_auth_classes(is_authenticated=True)
# class DownstreamListView(DeveloperErrorViewMixin, APIView):
# """
# List all blocks which are linked to upstream content, with optional filtering.
# """
# def get(self, request: _AuthenticatedRequest) -> Response:
# """
# Handle the request.
# """
# course_key_string = request.GET['course_id']
# syncable = request.GET['ready_to_sync']
# ...
class DownstreamListPaginator(DefaultPagination):
"""Custom paginator for downstream entity links"""
page_size = 100
max_page_size = 1000
def paginate_queryset(self, queryset, request, view=None):
if 'no_page' in request.query_params:
return queryset
return super().paginate_queryset(queryset, request, view)
def get_paginated_response(self, data, *args, **kwargs):
if 'no_page' in args[0].query_params:
return Response(data)
response = super().get_paginated_response(data)
# replace next and previous links by next and previous page number
response.data.update({
'next_page_num': self.page.next_page_number() if self.page.has_next() else None,
'previous_page_num': self.page.previous_page_number() if self.page.has_previous() else None,
})
return response
@view_auth_classes()
class DownstreamListView(DeveloperErrorViewMixin, APIView):
"""
[ 🛑 UNSTABLE ]
List all items (components and containers) wich are linked to an upstream context, with optional filtering.
"""
def get(self, request: _AuthenticatedRequest):
"""
Fetches publishable entity links for given course key
"""
course_key_string = request.GET.get('course_id')
ready_to_sync = request.GET.get('ready_to_sync')
upstream_key = request.GET.get('upstream_key')
item_type = request.GET.get('item_type')
link_filter: dict[str, CourseKey | UsageKey | LibraryContainerLocator | bool] = {}
paginator = DownstreamListPaginator()
if course_key_string is None and upstream_key is None and not request.user.is_superuser:
# This case without course or upstream filter means that the user need permissions to
# multiple courses/libraries, so raise `PermissionDenied` if the user is not superuser.
raise PermissionDenied
if course_key_string:
try:
course_key = CourseKey.from_string(course_key_string)
link_filter["downstream_context_key"] = course_key
except InvalidKeyError as exc:
raise ValidationError(detail=f"Malformed course key: {course_key_string}") from exc
if not has_studio_read_access(request.user, course_key):
raise PermissionDenied
if ready_to_sync is not None:
link_filter["ready_to_sync"] = BooleanField().to_internal_value(ready_to_sync)
if upstream_key:
try:
upstream_usage_key = UsageKey.from_string(upstream_key)
link_filter["upstream_usage_key"] = upstream_usage_key
# Verify that the user has permission to view the library that contains
# the upstream component
lib_api.require_permission_for_library_key(
LibraryLocatorV2.from_string(str(upstream_usage_key.context_key)),
request.user,
permission=lib_api.permissions.CAN_VIEW_THIS_CONTENT_LIBRARY,
)
# At this point we just need to bring components
item_type = 'components'
except InvalidKeyError:
try:
upstream_container_key = LibraryContainerLocator.from_string(upstream_key)
link_filter["upstream_container_key"] = upstream_container_key
# Verify that the user has permission to view the library that contains
# the upstream container
lib_api.require_permission_for_library_key(
upstream_container_key.lib_key,
request.user,
permission=lib_api.permissions.CAN_VIEW_THIS_CONTENT_LIBRARY,
)
# At this point we just need to bring containers
item_type = 'containers'
except InvalidKeyError as exc:
raise ValidationError(detail=f"Malformed key: {upstream_key}") from exc
links: list[EntityLinkBase] | QuerySet[EntityLinkBase] = []
if item_type is None or item_type == 'all':
links = list(chain(
ComponentLink.filter_links(**link_filter),
ContainerLink.filter_links(**link_filter)
))
elif item_type == 'components':
links = ComponentLink.filter_links(**link_filter)
elif item_type == 'containers':
links = ContainerLink.filter_links(**link_filter)
paginated_links = paginator.paginate_queryset(links, self.request, view=self)
serializer = PublishableEntityLinkSerializer(paginated_links, many=True)
return paginator.get_paginated_response(serializer.data, self.request)
@view_auth_classes()
class DownstreamComponentsListView(DeveloperErrorViewMixin, APIView):
"""
[DEPRECATED], use DownstreamListView instead.
List all components which are linked to an upstream context, with optional filtering.
"""
def get(self, request: _AuthenticatedRequest):
"""
[DEPRECATED], use DownstreamListView.get instead, with `item_type='components'`
Fetches publishable entity links for given course key
"""
warnings.warn(
'`downstreams/` API is deprecated. Please use `downstreams-all/?item_type=components` instead.',
DeprecationWarning, stacklevel=3,
)
course_key_string = request.GET.get('course_id')
ready_to_sync = request.GET.get('ready_to_sync')
upstream_usage_key = request.GET.get('upstream_usage_key')
link_filter: dict[str, CourseKey | UsageKey | bool] = {}
paginator = DownstreamListPaginator()
if course_key_string:
try:
link_filter["downstream_context_key"] = CourseKey.from_string(course_key_string)
except InvalidKeyError as exc:
raise ValidationError(detail=f"Malformed course key: {course_key_string}") from exc
if ready_to_sync is not None:
link_filter["ready_to_sync"] = BooleanField().to_internal_value(ready_to_sync)
if upstream_usage_key:
try:
link_filter["upstream_usage_key"] = UsageKey.from_string(upstream_usage_key)
except InvalidKeyError as exc:
raise ValidationError(detail=f"Malformed usage key: {upstream_usage_key}") from exc
links = ComponentLink.filter_links(**link_filter)
paginated_links = paginator.paginate_queryset(links, self.request, view=self)
serializer = ComponentLinksSerializer(paginated_links, many=True)
return paginator.get_paginated_response(serializer.data, self.request)
@view_auth_classes()
class DownstreamSummaryView(DeveloperErrorViewMixin, APIView):
"""
[ 🛑 UNSTABLE ]
Serves course->library publishable entity links summary
"""
def get(self, request: _AuthenticatedRequest, course_key_string: str):
"""
Fetches publishable entity links summary for given course key
Example:
[
{
"upstream_context_title": "CS problems 3",
"upstream_context_key": "lib:OpenedX:CSPROB3",
"ready_to_sync_count": 11,
"total_count": 14
"last_published_at": "2025-05-02T20:20:44.989042Z"
},
{
"upstream_context_title": "CS problems 2",
"upstream_context_key": "lib:OpenedX:CSPROB2",
"ready_to_sync_count": 15,
"total_count": 24,
"last_published_at": "2025-05-03T21:20:44.989042Z"
},
]
"""
try:
course_key = CourseKey.from_string(course_key_string)
except InvalidKeyError as exc:
raise ValidationError(detail=f"Malformed course key: {course_key_string}") from exc
component_links = ComponentLink.summarize_by_downstream_context(downstream_context_key=course_key)
container_links = ContainerLink.summarize_by_downstream_context(downstream_context_key=course_key)
merged = {}
def process_list(lst):
"""
Process a list to merge it with values in `merged`
"""
for item in lst:
key = item["upstream_context_key"]
if key not in merged:
merged[key] = item.copy()
else:
merged[key]["ready_to_sync_count"] += item["ready_to_sync_count"]
merged[key]["total_count"] += item["total_count"]
if item["last_published_at"] > merged[key]["last_published_at"]:
merged[key]["last_published_at"] = item["last_published_at"]
# Merge `component_links` and `container_links` by adding the values of
# `ready_to_sync_count` and `total_count` of each library.
process_list(component_links)
process_list(container_links)
links = list(merged.values())
serializer = PublishableEntityLinksSummarySerializer(links, many=True)
return Response(serializer.data)
@view_auth_classes(is_authenticated=True)
class DownstreamView(DeveloperErrorViewMixin, APIView):
"""
[ 🛑 UNSTABLE ]
Inspect or manage an XBlock's link to upstream content.
"""
def get(self, request: _AuthenticatedRequest, usage_key_string: str) -> Response:
@@ -138,12 +373,21 @@ class DownstreamView(DeveloperErrorViewMixin, APIView):
raise ValidationError({"sync": "must be 'true' or 'false'"})
try:
if sync_param == "true" or sync_param is True:
sync_from_upstream(downstream=downstream, user=request.user)
sync_library_content(
downstream=downstream,
request=request,
store=modulestore()
)
else:
# Even if we're not syncing (i.e., updating the downstream's values with the upstream's), we still need
# to fetch the upstream's customizable values and store them as hidden fields on the downstream. This
# ensures that downstream authors can restore defaults based on the upstream.
fetch_customizable_fields(downstream=downstream, user=request.user)
link = UpstreamLink.get_for_block(downstream)
if isinstance(link.upstream_key, LibraryUsageLocatorV2):
fetch_customizable_fields_from_block(downstream=downstream, user=request.user)
else:
assert isinstance(link.upstream_key, LibraryContainerLocator)
fetch_customizable_fields_from_container(downstream=downstream)
except BadDownstream as exc:
logger.exception(
"'%s' is an invalid downstream; refusing to set its upstream to '%s'",
@@ -172,7 +416,7 @@ class DownstreamView(DeveloperErrorViewMixin, APIView):
downstream = _load_accessible_block(request.user, usage_key_string, require_write_access=True)
try:
sever_upstream_link(downstream)
except NoUpstream as exc:
except NoUpstream:
logger.exception(
"Tried to DELETE upstream link of '%s', but it wasn't linked to anything in the first place. "
"Will do nothing. ",
@@ -186,6 +430,7 @@ class DownstreamView(DeveloperErrorViewMixin, APIView):
@view_auth_classes(is_authenticated=True)
class SyncFromUpstreamView(DeveloperErrorViewMixin, APIView):
"""
[ 🛑 UNSTABLE ]
Accept or decline an opportunity to sync a downstream block from its upstream content.
"""
@@ -195,7 +440,14 @@ class SyncFromUpstreamView(DeveloperErrorViewMixin, APIView):
"""
downstream = _load_accessible_block(request.user, usage_key_string, require_write_access=True)
try:
sync_from_upstream(downstream, request.user)
if downstream.usage_key.block_type == "video":
# Delete all transcripts so we can copy new ones from upstream
clear_transcripts(downstream)
static_file_notices = sync_library_content(
downstream=downstream,
request=request,
store=modulestore()
)
except UpstreamLinkException as exc:
logger.exception(
"Could not sync from upstream '%s' to downstream '%s'",
@@ -203,10 +455,11 @@ class SyncFromUpstreamView(DeveloperErrorViewMixin, APIView):
usage_key_string,
)
raise ValidationError(detail=str(exc)) from exc
modulestore().update_item(downstream, request.user.id)
# Note: We call `get_for_block` (rather than `try_get_for_block`) because if anything is wrong with the
# upstream at this point, then that is completely unexpected, so it's appropriate to let the 500 happen.
return Response(UpstreamLink.get_for_block(downstream).to_json())
response = UpstreamLink.get_for_block(downstream).to_json()
response["static_file_notices"] = attrs_asdict(static_file_notices)
return Response(response)
def delete(self, request: _AuthenticatedRequest, usage_key_string: str) -> Response:
"""
@@ -230,6 +483,47 @@ class SyncFromUpstreamView(DeveloperErrorViewMixin, APIView):
return Response(status=204)
@view_auth_classes()
class DownstreamContainerListView(DeveloperErrorViewMixin, APIView):
"""
[DEPRECATED], use DownstreamListView instead.
List all container blocks which are linked to an upstream context, with optional filtering.
"""
def get(self, request: _AuthenticatedRequest):
"""
[DEPRECATED], use DownstreamListView.get instead, with `item_type='containers'`
Fetches publishable container entity links for given course key
"""
warnings.warn(
'`downstreams/` API is deprecated. Please use `downstreams-all/?item_type=components` instead.',
DeprecationWarning, stacklevel=3,
)
course_key_string = request.GET.get('course_id')
ready_to_sync = request.GET.get('ready_to_sync')
upstream_container_key = request.GET.get('upstream_container_key')
link_filter: dict[str, CourseKey | LibraryContainerLocator | bool] = {}
paginator = DownstreamListPaginator()
if course_key_string:
try:
link_filter["downstream_context_key"] = CourseKey.from_string(course_key_string)
except InvalidKeyError as exc:
raise ValidationError(detail=f"Malformed course key: {course_key_string}") from exc
if ready_to_sync is not None:
link_filter["ready_to_sync"] = BooleanField().to_internal_value(ready_to_sync)
if upstream_container_key:
try:
link_filter["upstream_container_key"] = LibraryContainerLocator.from_string(upstream_container_key)
except InvalidKeyError as exc:
raise ValidationError(detail=f"Malformed usage key: {upstream_container_key}") from exc
links = ContainerLink.filter_links(**link_filter)
paginated_links = paginator.paginate_queryset(links, self.request, view=self)
serializer = ContainerLinksSerializer(paginated_links, many=True)
return paginator.get_paginated_response(serializer.data, self.request)
def _load_accessible_block(user: User, usage_key_string: str, *, require_write_access: bool) -> XBlock:
"""
Given a logged in-user and a serialized usage key of an upstream-linked XBlock, load it from the ModuleStore,

View File

@@ -1,8 +1,7 @@
"""HomePageCoursesViewV2 APIView for getting content available to the logged in user."""
import edx_api_doc_tools as apidocs
from collections import OrderedDict
from django.conf import settings
from django.http import HttpResponseNotFound
from rest_framework.response import Response
from rest_framework.request import Request
from rest_framework.views import APIView
@@ -126,13 +125,7 @@ class HomePageCoursesViewV2(APIView):
"in_process_course_actions": [],
}
```
if the `ENABLE_HOME_PAGE_COURSE_API_V2` feature flag is not enabled, an HTTP 404 "Not Found" response
is returned.
"""
if not settings.FEATURES.get('ENABLE_HOME_PAGE_COURSE_API_V2', False):
return HttpResponseNotFound()
courses, in_process_course_actions = get_course_context_v2(request)
paginator = HomePageCoursesPaginator()
courses_page = paginator.paginate_queryset(

View File

@@ -0,0 +1,462 @@
"""
Unit and integration tests to ensure that syncing content from libraries to
courses is working.
"""
from typing import Any
from xml.etree import ElementTree
import ddt
from opaque_keys.edx.keys import UsageKey
from openedx.core.djangoapps.content_libraries.tests import ContentLibrariesRestApiTest
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import BlockFactory, CourseFactory
@ddt.ddt
class CourseToLibraryTestCase(ContentLibrariesRestApiTest, ModuleStoreTestCase):
"""
Tests that involve syncing content from libraries to courses.
"""
maxDiff = None # Necessary for debugging OLX differences
def setUp(self):
super().setUp()
# self.user is set up by ContentLibrariesRestApiTest
# The source library (contains the upstreams):
self.library = self._create_library(slug="testlib", title="Upstream Library")
lib_id = self.library["id"] # the library ID as a string
self.upstream_problem1 = self._add_block_to_library(lib_id, "problem", "prob1", can_stand_alone=True)
self._set_library_block_olx(
self.upstream_problem1["id"],
'<problem display_name="Problem 1 Display Name" weight="1" markdown="MD 1">multiple choice...</problem>'
)
self.upstream_problem2 = self._add_block_to_library(lib_id, "problem", "prob2", can_stand_alone=True)
self._set_library_block_olx(
self.upstream_problem2["id"],
'<problem display_name="Problem 2 Display Name" max_attempts="22">multi select...</problem>'
)
self.upstream_html1 = self._add_block_to_library(lib_id, "html", "html1", can_stand_alone=False)
self._set_library_block_olx(
self.upstream_html1["id"],
'<html display_name="Text Content">This is the HTML.</html>'
)
self.upstream_unit = self._create_container(lib_id, "unit", slug="u1", display_name="Unit 1 Title")
self._add_container_children(self.upstream_unit["id"], [
self.upstream_html1["id"],
self.upstream_problem1["id"],
self.upstream_problem2["id"],
])
self._commit_library_changes(lib_id) # publish everything
# The destination course:
self.course = CourseFactory.create()
self.course_section = BlockFactory.create(category='chapter', parent=self.course)
self.course_subsection = BlockFactory.create(category='sequential', parent=self.course_section)
self.course_unit = BlockFactory.create(category='vertical', parent=self.course_subsection)
def _get_sync_status(self, usage_key: str):
return self._api('get', f"/api/contentstore/v2/downstreams/{usage_key}", {}, expect_response=200)
def _sync_downstream(self, usage_key: str):
return self._api('post', f"/api/contentstore/v2/downstreams/{usage_key}/sync", {}, expect_response=200)
def _get_course_block_olx(self, usage_key: str):
data = self._api('get', f'/api/olx-export/v1/xblock/{usage_key}/', {}, expect_response=200)
return data["blocks"][data["root_block_id"]]["olx"]
# def _get_course_block_fields(self, usage_key: str):
# return self._api('get', f'/xblock/{usage_key}', {}, expect_response=200)
def _get_course_block_children(self, usage_key: str) -> list[str]:
""" Get the IDs of the child XBlocks of the given XBlock """
# TODO: is there really no REST API to get the children of an XBlock in Studio?
# Maybe this one: /api/contentstore/v1/container/vertical/{usage_key_string}/children
return [str(k) for k in modulestore().get_item(UsageKey.from_string(usage_key), depth=0).children]
def _create_block_from_upstream(
self,
block_category: str,
parent_usage_key: str,
upstream_key: str,
expect_response: int = 200,
):
"""
Call the CMS API for inserting an XBlock that's cloned from a library
item. i.e. copy a *published* library block into a course, and create an
upstream link.
"""
return self._api('post', "/xblock/", {
"category": block_category,
"parent_locator": parent_usage_key,
"library_content_key": upstream_key,
}, expect_response=expect_response)
def _update_course_block_fields(self, usage_key: str, fields: dict[str, Any] = None):
""" Update fields of an XBlock """
return self._api('patch', f"/xblock/{usage_key}", {
"metadata": fields,
}, expect_response=200)
def assertXmlEqual(self, xml_str_a: str, xml_str_b: str) -> bool:
""" Assert that the given XML strings are equal, ignoring attribute order and some whitespace variations. """
self.assertEqual(
ElementTree.canonicalize(xml_str_a, strip_text=True),
ElementTree.canonicalize(xml_str_b, strip_text=True),
)
# OLX attributes that will appear on capa problems when saved/exported. Excludes "markdown"
standard_capa_attributes = """
markdown_edited="false"
matlab_api_key="null"
name="null"
rerandomize="never"
source_code="null"
tags="[]"
use_latex_compiler="false"
"""
####################################################################################################################
def test_problem_sync(self):
"""
Test that we can sync a problem from a library into a course.
"""
# 1⃣ First, create the problem in the course, using the upstream problem as a template:
downstream_problem1 = self._create_block_from_upstream(
block_category="problem",
parent_usage_key=str(self.course_subsection.usage_key),
upstream_key=self.upstream_problem1["id"],
)
status = self._get_sync_status(downstream_problem1["locator"])
self.assertDictContainsEntries(status, {
'upstream_ref': self.upstream_problem1["id"], # e.g. 'lb:CL-TEST:testlib:problem:prob1'
'version_available': 2,
'version_synced': 2,
'version_declined': None,
'ready_to_sync': False,
'error_message': None,
# 'upstream_link': 'http://course-authoring-mfe/library/lib:CL-TEST:testlib/components?usageKey=...'
})
assert status["upstream_link"].startswith("http://course-authoring-mfe/library/")
assert status["upstream_link"].endswith(f"/components?usageKey={self.upstream_problem1['id']}")
# Check the OLX of the downstream block. Notice that:
# (1) fields display_name and markdown, as well as the 'data' (content/body of the <problem>) are synced.
# (2) per UpstreamSyncMixin.get_customizable_fields(), some fields like weight and max_attempts are
# DROPPED entirely from the upstream version when creating the downstream:
self.assertXmlEqual(self._get_course_block_olx(downstream_problem1["locator"]), f"""
<problem
display_name="Problem 1 Display Name"
markdown="MD 1"
upstream="{self.upstream_problem1['id']}"
upstream_display_name="Problem 1 Display Name"
upstream_version="2"
{self.standard_capa_attributes}
>multiple choice...</problem>
""")
# 2⃣ Now, lets modify the upstream problem AND the downstream problem:
self._update_course_block_fields(downstream_problem1["locator"], {
"display_name": "Custom Display Name",
"max_attempts": 3,
"markdown": "blow me away, scotty!", # This change will be lost
})
self._set_library_block_olx(
self.upstream_problem1["id"],
'<problem display_name="Problem 1 NEW name" markdown="updated">multiple choice v2...</problem>'
)
self._publish_library_block(self.upstream_problem1["id"])
# Here's how the downstream OLX looks now, before we sync:
self.assertXmlEqual(self._get_course_block_olx(downstream_problem1["locator"]), f"""
<problem
display_name="Custom Display Name"
markdown="blow me away, scotty!"
max_attempts="3"
upstream="{self.upstream_problem1['id']}"
upstream_display_name="Problem 1 Display Name"
upstream_version="2"
{self.standard_capa_attributes}
>multiple choice...</problem>
""")
status = self._get_sync_status(downstream_problem1["locator"])
self.assertDictContainsEntries(status, {
'upstream_ref': self.upstream_problem1["id"], # e.g. 'lb:CL-TEST:testlib:problem:prob1'
'version_available': 3, # <--- updated
'version_synced': 2,
'version_declined': None,
'ready_to_sync': True, # <--- updated
'error_message': None,
})
# 3⃣ Now, sync and check the resulting OLX of the downstream
self._sync_downstream(downstream_problem1["locator"])
# Here's how the downstream OLX looks now, after we synced it.
# Notice:
# (1) content like "markdown" and the body XML content are synced
# (2) the "display_name" is left alone (customized downstream), but
# (3) "upstream_display_name" is updated.
# (4) The customized "max_attempts" is also still present.
self.assertXmlEqual(self._get_course_block_olx(downstream_problem1["locator"]), f"""
<problem
display_name="Custom Display Name"
markdown="updated"
max_attempts="3"
upstream="{self.upstream_problem1['id']}"
upstream_display_name="Problem 1 NEW name"
upstream_version="3"
{self.standard_capa_attributes}
>multiple choice v2...</problem>
""")
def test_unit_sync(self):
"""
Test that we can sync a unit from the library into the course
"""
# 1⃣ Create a "vertical" block in the course based on a "unit" container:
downstream_unit = self._create_block_from_upstream(
# The API consumer needs to specify "vertical" here, even though upstream is "unit".
# In the future we could create a nicer REST API endpoint for this that's not part of
# the messy '/xblock/' API and which auto-detects the types based on the upstream_key.
block_category="vertical",
parent_usage_key=str(self.course_subsection.usage_key),
upstream_key=self.upstream_unit["id"],
)
status = self._get_sync_status(downstream_unit["locator"])
self.assertDictContainsEntries(status, {
'upstream_ref': self.upstream_unit["id"], # e.g. 'lct:CL-TEST:testlib:unit:u1'
'version_available': 2,
'version_synced': 2,
'version_declined': None,
'ready_to_sync': False,
'error_message': None,
# 'upstream_link': 'http://course-authoring-mfe/library/lib:CL-TEST:testlib/units/...'
})
assert status["upstream_link"].startswith("http://course-authoring-mfe/library/")
assert status["upstream_link"].endswith(f"/units/{self.upstream_unit['id']}")
# Check that the downstream container matches our expectations.
# Note that:
# (1) Every XBlock has an "upstream" field
# (2) some "downstream only" fields like weight and max_attempts are omitted.
self.assertXmlEqual(self._get_course_block_olx(downstream_unit["locator"]), f"""
<vertical
display_name="Unit 1 Title"
upstream_display_name="Unit 1 Title"
upstream="{self.upstream_unit['id']}"
upstream_version="2"
>
<html
display_name="Text Content"
upstream_display_name="Text Content"
editor="visual"
upstream="{self.upstream_html1['id']}"
upstream_version="2"
>This is the HTML.</html>
<problem
display_name="Problem 1 Display Name"
upstream_display_name="Problem 1 Display Name"
markdown="MD 1"
{self.standard_capa_attributes}
upstream="{self.upstream_problem1['id']}"
upstream_version="2"
>multiple choice...</problem>
<problem
display_name="Problem 2 Display Name"
upstream_display_name="Problem 2 Display Name"
markdown="null"
{self.standard_capa_attributes}
upstream="{self.upstream_problem2['id']}"
upstream_version="2"
>multi select...</problem>
</vertical>
""")
# 2⃣ Now, lets modify the upstream problem 1:
self._set_library_block_olx(
self.upstream_problem1["id"],
'<problem display_name="Problem 1 NEW name" markdown="updated">multiple choice v2...</problem>'
)
self._publish_container(self.upstream_unit["id"])
status = self._get_sync_status(downstream_unit["locator"])
self.assertDictContainsEntries(status, {
'upstream_ref': self.upstream_unit["id"], # e.g. 'lct:CL-TEST:testlib:unit:u1'
'version_available': 2, # <--- not updated since we didn't directly modify the unit
'version_synced': 2,
'version_declined': None,
# FIXME: ready_to_sync should be true, since a child block needs syncing.
# This may need to be fixed post-Teak, as syncing the children directly is still possible.
'ready_to_sync': False,
'error_message': None,
})
# Check the upstream/downstream status of [one of] the children
downstream_problem1 = self._get_course_block_children(downstream_unit["locator"])[1]
assert "type@problem" in downstream_problem1
self.assertDictContainsEntries(self._get_sync_status(downstream_problem1), {
'upstream_ref': self.upstream_problem1["id"],
'version_available': 3, # <--- updated since we modified the problem
'version_synced': 2,
'version_declined': None,
'ready_to_sync': True, # <--- updated
'error_message': None,
})
# 3⃣ Now, sync and check the resulting OLX of the downstream
self._sync_downstream(downstream_unit["locator"])
self.assertXmlEqual(self._get_course_block_olx(downstream_unit["locator"]), f"""
<vertical
display_name="Unit 1 Title"
upstream_display_name="Unit 1 Title"
upstream="{self.upstream_unit['id']}"
upstream_version="2"
>
<html
display_name="Text Content"
upstream_display_name="Text Content"
editor="visual"
upstream="{self.upstream_html1['id']}"
upstream_version="2"
>This is the HTML.</html>
<!-- 🟢 the problem below has been updated: -->
<problem
display_name="Problem 1 NEW name"
upstream_display_name="Problem 1 NEW name"
markdown="updated"
{self.standard_capa_attributes}
upstream="{self.upstream_problem1['id']}"
upstream_version="3"
>multiple choice v2...</problem>
<problem
display_name="Problem 2 Display Name"
upstream_display_name="Problem 2 Display Name"
markdown="null"
{self.standard_capa_attributes}
upstream="{self.upstream_problem2['id']}"
upstream_version="2"
>multi select...</problem>
</vertical>
""")
# Now, add and delete a component
upstream_problem3 = self._add_block_to_library(
self.library["id"],
"problem",
"prob3",
can_stand_alone=True
)
self._set_library_block_olx(
upstream_problem3["id"],
'<problem display_name="Problem 3 Display Name" max_attempts="22">single select...</problem>'
)
self._add_container_children(self.upstream_unit["id"], [upstream_problem3["id"]])
self._remove_container_components(self.upstream_unit["id"], [self.upstream_problem2["id"]])
self._commit_library_changes(self.library["id"]) # publish everything
status = self._get_sync_status(downstream_unit["locator"])
self.assertDictContainsEntries(status, {
'upstream_ref': self.upstream_unit["id"], # e.g. 'lct:CL-TEST:testlib:unit:u1'
'version_available': 4, # <--- updated twice, delete and add component
'version_synced': 2,
'version_declined': None,
'ready_to_sync': True,
'error_message': None,
})
# 3⃣ Now, sync and check the resulting OLX of the downstream
self._sync_downstream(downstream_unit["locator"])
self.assertXmlEqual(self._get_course_block_olx(downstream_unit["locator"]), f"""
<vertical
display_name="Unit 1 Title"
upstream_display_name="Unit 1 Title"
upstream="{self.upstream_unit['id']}"
upstream_version="4"
>
<html
display_name="Text Content"
upstream_display_name="Text Content"
editor="visual"
upstream="{self.upstream_html1['id']}"
upstream_version="2"
>This is the HTML.</html>
<problem
display_name="Problem 1 NEW name"
upstream_display_name="Problem 1 NEW name"
markdown="updated"
{self.standard_capa_attributes}
upstream="{self.upstream_problem1['id']}"
upstream_version="3"
>multiple choice v2...</problem>
<!-- 🟢 the problem 2 has been deleted: -->
<!-- 🟢 the problem 3 has been added: -->
<problem
display_name="Problem 3 Display Name"
upstream_display_name="Problem 3 Display Name"
markdown="null"
{self.standard_capa_attributes}
upstream="{upstream_problem3['id']}"
upstream_version="2"
>single select...</problem>
</vertical>
""")
# Now, reorder components
self._patch_container_components(self.upstream_unit["id"], [
upstream_problem3["id"],
self.upstream_problem1["id"],
self.upstream_html1["id"],
])
self._publish_container(self.upstream_unit["id"])
# 3⃣ Now, sync and check the resulting OLX of the downstream
self._sync_downstream(downstream_unit["locator"])
self.assertXmlEqual(self._get_course_block_olx(downstream_unit["locator"]), f"""
<vertical
display_name="Unit 1 Title"
upstream_display_name="Unit 1 Title"
upstream="{self.upstream_unit['id']}"
upstream_version="5"
>
<!-- 🟢 the problem 3 has been moved to top: -->
<problem
display_name="Problem 3 Display Name"
upstream_display_name="Problem 3 Display Name"
markdown="null"
{self.standard_capa_attributes}
upstream="{upstream_problem3['id']}"
upstream_version="2"
>single select...</problem>
<!-- 🟢 the problem 1 has been moved to middle: -->
<problem
display_name="Problem 1 NEW name"
upstream_display_name="Problem 1 NEW name"
markdown="updated"
{self.standard_capa_attributes}
upstream="{self.upstream_problem1['id']}"
upstream_version="3"
>multiple choice v2...</problem>
<!-- 🟢 the html 1 has been moved to end: -->
<html
display_name="Text Content"
upstream_display_name="Text Content"
editor="visual"
upstream="{self.upstream_html1['id']}"
upstream_version="2"
>This is the HTML.</html>
</vertical>
""")

View File

@@ -1,31 +1,47 @@
"""
Unit tests for /api/contentstore/v2/downstreams/* JSON APIs.
"""
from unittest.mock import patch
from django.conf import settings
import json
import ddt
from datetime import datetime, timezone
from unittest.mock import patch, MagicMock
from cms.lib.xblock.upstream_sync import UpstreamLink, BadUpstream
from django.conf import settings
from django.urls import reverse
from freezegun import freeze_time
from organizations.models import Organization
from cms.djangoapps.contentstore.helpers import StaticFileNotices
from cms.lib.xblock.upstream_sync import BadUpstream, UpstreamLink
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from cms.djangoapps.contentstore.xblock_storage_handlers import view_handlers as xblock_view_handlers
from opaque_keys.edx.keys import ContainerKey, UsageKey
from opaque_keys.edx.locator import LibraryLocatorV2
from common.djangoapps.student.tests.factories import UserFactory
from common.djangoapps.student.auth import add_users
from common.djangoapps.student.roles import CourseStaffRole
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, BlockFactory
from xmodule.modulestore.tests.factories import BlockFactory, CourseFactory
from openedx.core.djangoapps.content_libraries import api as lib_api
from .. import downstreams as downstreams_views
MOCK_LIB_KEY = "lib:OpenedX:CSPROB3"
MOCK_UPSTREAM_REF = "lb:OpenedX:CSPROB3:html:843b4c73-1e2d-4ced-a0ff-24e503cdb3e4"
MOCK_UPSTREAM_LINK = "{mfe_url}/library/{lib_key}/components?usageKey={usage_key}".format(
mfe_url=settings.COURSE_AUTHORING_MICROFRONTEND_URL,
lib_key=MOCK_LIB_KEY,
usage_key=MOCK_UPSTREAM_REF,
)
MOCK_UPSTREAM_ERROR = "your LibraryGPT subscription has expired"
URL_PREFIX = '/api/libraries/v2/'
URL_LIB_CREATE = URL_PREFIX
URL_LIB_BLOCKS = URL_PREFIX + '{lib_key}/blocks/'
URL_LIB_BLOCK_PUBLISH = URL_PREFIX + 'blocks/{block_key}/publish/'
URL_LIB_BLOCK_OLX = URL_PREFIX + 'blocks/{block_key}/olx/'
URL_LIB_CONTAINER = URL_PREFIX + 'containers/{container_key}/' # Get a container in this library
URL_LIB_CONTAINERS = URL_PREFIX + '{lib_key}/containers/' # Create a new container in this library
URL_LIB_CONTAINER_PUBLISH = URL_LIB_CONTAINER + 'publish/' # Publish changes to the specified container + children
def _get_upstream_link_good_and_syncable(downstream):
return UpstreamLink(
upstream_ref=downstream.upstream,
upstream_key=UsageKey.from_string(downstream.upstream),
version_synced=downstream.upstream_version,
version_available=(downstream.upstream_version or 0) + 1,
version_declined=downstream.upstream_version_declined,
@@ -37,31 +53,161 @@ def _get_upstream_link_bad(_downstream):
raise BadUpstream(MOCK_UPSTREAM_ERROR)
class _DownstreamViewTestMixin:
class _BaseDownstreamViewTestMixin:
"""
Shared data and error test cases.
"""
def setUp(self):
"""
Create a simple course with one unit and two videos, one of which is linked to an "upstream".
"""
# pylint: disable=too-many-statements
super().setUp()
self.now = datetime.now(timezone.utc)
freezer = freeze_time(self.now)
self.addCleanup(freezer.stop)
freezer.start()
self.maxDiff = 2000
self.organization, _ = Organization.objects.get_or_create(
short_name="CL-TEST",
defaults={"name": "Content Libraries Tachyon Exploration & Survey Team"},
)
self.superuser = UserFactory(username="superuser", password="password", is_staff=True, is_superuser=True)
self.simple_user = UserFactory(username="simple_user", password="password")
self.course_user = UserFactory(username="course_user", password="password")
self.lib_user = UserFactory(username="lib_user", password="password")
self.client.login(username=self.superuser.username, password="password")
self.library_title = "Test Library 1"
self.library_id = self._create_library(
slug="testlib1_preview",
title=self.library_title,
description="Testing XBlocks"
)["id"]
self.library_key = LibraryLocatorV2.from_string(self.library_id)
lib_api.set_library_user_permissions(self.library_key, self.lib_user, access_level="read")
self.html_lib_id = self._add_block_to_library(self.library_id, "html", "html-baz")["id"]
self.video_lib_id = self._add_block_to_library(self.library_id, "video", "video-baz")["id"]
self.unit_id = self._create_container(self.library_id, "unit", "unit-1", "Unit 1")["id"]
self.subsection_id = self._create_container(self.library_id, "subsection", "subsection-1", "Subsection 1")["id"]
self.section_id = self._create_container(self.library_id, "section", "section-1", "Section 1")["id"]
self._publish_library_block(self.html_lib_id)
self._publish_library_block(self.video_lib_id)
self._publish_container(self.unit_id)
self._publish_container(self.subsection_id)
self._publish_container(self.section_id)
self.mock_upstream_link = f"{settings.COURSE_AUTHORING_MICROFRONTEND_URL}/library/{self.library_id}/components?usageKey={self.video_lib_id}" # pylint: disable=line-too-long # noqa: E501
self.course = CourseFactory.create()
add_users(self.superuser, CourseStaffRole(self.course.id), self.course_user)
chapter = BlockFactory.create(category='chapter', parent=self.course)
sequential = BlockFactory.create(category='sequential', parent=chapter)
unit = BlockFactory.create(category='vertical', parent=sequential)
self.regular_video_key = BlockFactory.create(category='video', parent=unit).usage_key
self.downstream_video_key = BlockFactory.create(
category='video', parent=unit, upstream=MOCK_UPSTREAM_REF, upstream_version=123,
category='video', parent=unit, upstream=self.video_lib_id, upstream_version=1,
).usage_key
self.downstream_html_key = BlockFactory.create(
category='html', parent=unit, upstream=self.html_lib_id, upstream_version=1,
).usage_key
self.downstream_chapter_key = BlockFactory.create(
category='chapter', parent=self.course, upstream=self.section_id, upstream_version=1,
).usage_key
self.downstream_sequential_key = BlockFactory.create(
category='sequential', parent=chapter, upstream=self.subsection_id, upstream_version=1,
).usage_key
self.downstream_unit_key = BlockFactory.create(
category='vertical', parent=sequential, upstream=self.unit_id, upstream_version=1,
).usage_key
self.another_course = CourseFactory.create(display_name="Another Course")
another_chapter = BlockFactory.create(category="chapter", parent=self.another_course)
another_sequential = BlockFactory.create(category="sequential", parent=another_chapter)
another_unit = BlockFactory.create(category="vertical", parent=another_sequential)
self.another_video_keys = []
for _ in range(3):
# Adds 3 videos linked to the same upstream
self.another_video_keys.append(
BlockFactory.create(
category="video",
parent=another_unit,
upstream=self.video_lib_id,
upstream_version=1
).usage_key
)
self.fake_video_key = self.course.id.make_usage_key("video", "NoSuchVideo")
self.superuser = UserFactory(username="superuser", password="password", is_staff=True, is_superuser=True)
self.learner = UserFactory(username="learner", password="password")
self._update_container(self.unit_id, display_name="Unit 2")
self._publish_container(self.unit_id)
self._set_library_block_olx(self.html_lib_id, "<html><b>Hello world!</b></html>")
self._publish_library_block(self.html_lib_id)
self._publish_library_block(self.video_lib_id)
self._publish_library_block(self.html_lib_id)
def _api(self, method, url, data, expect_response):
"""
Call a REST API
"""
response = getattr(self.client, method)(url, data, format="json", content_type="application/json")
assert response.status_code == expect_response,\
'Unexpected response code {}:\n{}'.format(response.status_code, getattr(response, 'data', '(no data)'))
return response.data
def _create_library(
self, slug, title, description="", org=None,
license_type='', expect_response=200,
):
""" Create a library """
if org is None:
org = self.organization.short_name
return self._api('post', URL_LIB_CREATE, {
"org": org,
"slug": slug,
"title": title,
"description": description,
"license": license_type,
}, expect_response)
def _add_block_to_library(self, lib_key, block_type, slug, parent_block=None, expect_response=200):
""" Add a new XBlock to the library """
data = {"block_type": block_type, "definition_id": slug}
if parent_block:
data["parent_block"] = parent_block
return self._api('post', URL_LIB_BLOCKS.format(lib_key=lib_key), data, expect_response)
def _publish_library_block(self, block_key, expect_response=200):
""" Publish changes from a specified XBlock """
return self._api('post', URL_LIB_BLOCK_PUBLISH.format(block_key=block_key), None, expect_response)
def _publish_container(self, container_key: ContainerKey | str, expect_response=200):
""" Publish all changes in the specified container + children """
return self._api('post', URL_LIB_CONTAINER_PUBLISH.format(container_key=container_key), None, expect_response)
def _update_container(self, container_key: ContainerKey | str, display_name: str, expect_response=200):
""" Update a container (unit etc.) """
data = {"display_name": display_name}
return self._api('patch', URL_LIB_CONTAINER.format(container_key=container_key), data, expect_response)
def _set_library_block_olx(self, block_key, new_olx, expect_response=200):
""" Overwrite the OLX of a specific block in the library """
return self._api('post', URL_LIB_BLOCK_OLX.format(block_key=block_key), {"olx": new_olx}, expect_response)
def call_api(self, usage_key_string):
raise NotImplementedError
def _create_container(self, lib_key, container_type, slug: str | None, display_name: str, expect_response=200):
""" Create a container (unit etc.) """
data = {"container_type": container_type, "display_name": display_name}
if slug:
data["slug"] = slug
return self._api('post', URL_LIB_CONTAINERS.format(lib_key=lib_key), data, expect_response)
class SharedErrorTestCases(_BaseDownstreamViewTestMixin):
"""
Shared error test cases.
"""
def test_404_downstream_not_found(self):
"""
Do we raise 404 if the specified downstream block could not be loaded?
@@ -81,7 +227,7 @@ class _DownstreamViewTestMixin:
assert "not found" in response.data["developer_message"]
class GetDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCase):
class GetComponentDownstreamViewTest(SharedErrorTestCases, SharedModuleStoreTestCase):
"""
Test that `GET /api/v2/contentstore/downstreams/...` inspects a downstream's link to an upstream.
"""
@@ -96,10 +242,10 @@ class GetDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCase)
self.client.login(username="superuser", password="password")
response = self.call_api(self.downstream_video_key)
assert response.status_code == 200
assert response.data['upstream_ref'] == MOCK_UPSTREAM_REF
assert response.data['upstream_ref'] == self.video_lib_id
assert response.data['error_message'] is None
assert response.data['ready_to_sync'] is True
assert response.data['upstream_link'] == MOCK_UPSTREAM_LINK
assert response.data['upstream_link'] == self.mock_upstream_link
@patch.object(UpstreamLink, "get_for_block", _get_upstream_link_bad)
def test_200_bad_upstream(self):
@@ -109,7 +255,7 @@ class GetDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCase)
self.client.login(username="superuser", password="password")
response = self.call_api(self.downstream_video_key)
assert response.status_code == 200
assert response.data['upstream_ref'] == MOCK_UPSTREAM_REF
assert response.data['upstream_ref'] == self.video_lib_id
assert response.data['error_message'] == MOCK_UPSTREAM_ERROR
assert response.data['ready_to_sync'] is False
assert response.data['upstream_link'] is None
@@ -127,37 +273,37 @@ class GetDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCase)
assert response.data['upstream_link'] is None
class PutDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCase):
class PutDownstreamViewTest(SharedErrorTestCases, SharedModuleStoreTestCase):
"""
Test that `PUT /api/v2/contentstore/downstreams/...` edits a downstream's link to an upstream.
"""
def call_api(self, usage_key_string, sync: str | None = None):
return self.client.put(
f"/api/contentstore/v2/downstreams/{usage_key_string}",
data={
"upstream_ref": MOCK_UPSTREAM_REF,
data=json.dumps({
"upstream_ref": str(self.video_lib_id),
**({"sync": sync} if sync else {}),
},
}),
content_type="application/json",
)
@patch.object(downstreams_views, "fetch_customizable_fields")
@patch.object(downstreams_views, "sync_from_upstream")
@patch.object(downstreams_views, "fetch_customizable_fields_from_block")
@patch.object(downstreams_views, "sync_library_content")
@patch.object(UpstreamLink, "get_for_block", _get_upstream_link_good_and_syncable)
def test_200_with_sync(self, mock_sync, mock_fetch):
"""
Does the happy path work (with sync=True)?
"""
self.client.login(username="superuser", password="password")
response = self.call_api(self.regular_video_key, sync='true')
response = self.call_api(str(self.regular_video_key), sync='true')
assert response.status_code == 200
video_after = modulestore().get_item(self.regular_video_key)
assert mock_sync.call_count == 1
assert mock_fetch.call_count == 0
assert video_after.upstream == MOCK_UPSTREAM_REF
assert video_after.upstream == self.video_lib_id
@patch.object(downstreams_views, "fetch_customizable_fields")
@patch.object(downstreams_views, "sync_from_upstream")
@patch.object(downstreams_views, "fetch_customizable_fields_from_block")
@patch.object(downstreams_views, "sync_library_content")
@patch.object(UpstreamLink, "get_for_block", _get_upstream_link_good_and_syncable)
def test_200_no_sync(self, mock_sync, mock_fetch):
"""
@@ -169,9 +315,11 @@ class PutDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCase)
video_after = modulestore().get_item(self.regular_video_key)
assert mock_sync.call_count == 0
assert mock_fetch.call_count == 1
assert video_after.upstream == MOCK_UPSTREAM_REF
assert video_after.upstream == self.video_lib_id
@patch.object(downstreams_views, "fetch_customizable_fields", side_effect=BadUpstream(MOCK_UPSTREAM_ERROR))
@patch.object(
downstreams_views, "fetch_customizable_fields_from_block", side_effect=BadUpstream(MOCK_UPSTREAM_ERROR),
)
def test_400(self, sync: str):
"""
Do we raise a 400 if the provided upstream reference is malformed or not accessible?
@@ -184,7 +332,7 @@ class PutDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCase)
assert video_after.upstream is None
class DeleteDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCase):
class DeleteDownstreamViewTest(SharedErrorTestCases, SharedModuleStoreTestCase):
"""
Test that `DELETE /api/v2/contentstore/downstreams/...` severs a downstream's link to an upstream.
"""
@@ -213,7 +361,7 @@ class DeleteDownstreamViewTest(_DownstreamViewTestMixin, SharedModuleStoreTestCa
assert mock_sever.call_count == 1
class _DownstreamSyncViewTestMixin(_DownstreamViewTestMixin):
class _DownstreamSyncViewTestMixin(SharedErrorTestCases):
"""
Shared tests between the /api/contentstore/v2/downstreams/.../sync endpoints.
"""
@@ -238,6 +386,60 @@ class _DownstreamSyncViewTestMixin(_DownstreamViewTestMixin):
assert "is not linked" in response.data["developer_message"][0]
class CreateDownstreamViewTest(CourseTestCase, _BaseDownstreamViewTestMixin, SharedModuleStoreTestCase):
"""
Tests create new downstream blocks
"""
def call_api_post(self, library_content_key, category):
"""
Call the api to create a downstream block using
`library_content_key` as upstream
"""
data = {
"parent_locator": str(self.course.location),
"display_name": "Test block",
"library_content_key": library_content_key,
"category": category,
}
return self.client.post(
reverse("xblock_handler"),
data=json.dumps(data),
content_type="application/json",
)
def test_200(self):
response = self.call_api_post(self.html_lib_id, "html")
assert response.status_code == 200
data = response.json()
assert data["upstreamRef"] == self.html_lib_id
usage_key = UsageKey.from_string(data["locator"])
item = modulestore().get_item(usage_key)
assert item.upstream == self.html_lib_id
@patch("cms.djangoapps.contentstore.helpers._insert_static_files_into_downstream_xblock")
@patch("cms.djangoapps.contentstore.helpers.content_staging_api.stage_xblock_temporarily")
@patch("cms.djangoapps.contentstore.xblock_storage_handlers.view_handlers.sync_from_upstream_block")
def test_200_video(self, mock_sync, mock_stage, mock_insert):
mock_lib_block = MagicMock()
mock_lib_block.runtime.get_block_assets.return_value = ['mocked_asset']
mock_sync.return_value = mock_lib_block
mock_stage.return_value = MagicMock()
mock_insert.return_value = StaticFileNotices()
response = self.call_api_post(self.video_lib_id, "video")
assert response.status_code == 200
data = response.json()
assert data["upstreamRef"] == self.video_lib_id
usage_key = UsageKey.from_string(data["locator"])
item = modulestore().get_item(usage_key)
assert item.upstream == self.video_lib_id
assert item.edx_video_id is not None
class PostDownstreamSyncViewTest(_DownstreamSyncViewTestMixin, SharedModuleStoreTestCase):
"""
Test that `POST /api/v2/contentstore/downstreams/.../sync` initiates a sync from the linked upstream.
@@ -246,18 +448,23 @@ class PostDownstreamSyncViewTest(_DownstreamSyncViewTestMixin, SharedModuleStore
return self.client.post(f"/api/contentstore/v2/downstreams/{usage_key_string}/sync")
@patch.object(UpstreamLink, "get_for_block", _get_upstream_link_good_and_syncable)
@patch.object(downstreams_views, "sync_from_upstream")
def test_200(self, mock_sync_from_upstream):
@patch.object(xblock_view_handlers, "import_static_assets_for_library_sync", return_value=StaticFileNotices())
@patch.object(downstreams_views, "clear_transcripts")
def test_200(self, mock_import_staged_content, mock_clear_transcripts):
"""
Does the happy path work?
"""
self.client.login(username="superuser", password="password")
response = self.call_api(self.downstream_video_key)
assert response.status_code == 200
assert mock_sync_from_upstream.call_count == 1
assert mock_import_staged_content.call_count == 1
assert mock_clear_transcripts.call_count == 1
class DeleteDownstreamSyncViewtest(_DownstreamSyncViewTestMixin, SharedModuleStoreTestCase):
class DeleteDownstreamSyncViewtest(
_DownstreamSyncViewTestMixin,
SharedModuleStoreTestCase,
):
"""
Test that `DELETE /api/v2/contentstore/downstreams/.../sync` declines a sync from the linked upstream.
"""
@@ -274,3 +481,504 @@ class DeleteDownstreamSyncViewtest(_DownstreamSyncViewTestMixin, SharedModuleSto
response = self.call_api(self.downstream_video_key)
assert response.status_code == 204
assert mock_decline_sync.call_count == 1
@ddt.ddt
class GetUpstreamViewTest(
_BaseDownstreamViewTestMixin,
SharedModuleStoreTestCase,
):
"""
Test that `GET /api/v2/contentstore/downstreams-all?...` returns list of links based on the provided filter.
"""
def call_api(
self,
course_id: str | None = None,
ready_to_sync: bool | None = None,
upstream_key: str | None = None,
item_type: str | None = None,
):
data = {}
if course_id is not None:
data["course_id"] = str(course_id)
if ready_to_sync is not None:
data["ready_to_sync"] = str(ready_to_sync)
if upstream_key is not None:
data["upstream_key"] = str(upstream_key)
if item_type is not None:
data["item_type"] = str(item_type)
return self.client.get("/api/contentstore/v2/downstreams-all/", data=data)
def test_200_all_downstreams_for_a_course(self):
"""
Returns all links for given course
"""
self.client.login(username="course_user", password="password")
response = self.call_api(course_id=self.course.id)
assert response.status_code == 200
data = response.json()
date_format = self.now.isoformat().split("+")[0] + 'Z'
expected = [
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_video_key),
'id': 1,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.video_lib_id,
'upstream_type': 'component',
'upstream_version': 1,
'version_declined': None,
'version_synced': 1
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_html_key),
'id': 2,
'ready_to_sync': True,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.html_lib_id,
'upstream_type': 'component',
'upstream_version': 2,
'version_declined': None,
'version_synced': 1,
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_chapter_key),
'id': 1,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.section_id,
'upstream_type': 'container',
'upstream_version': 1,
'version_declined': None,
'version_synced': 1,
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_sequential_key),
'id': 2,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.subsection_id,
'upstream_type': 'container',
'upstream_version': 1,
'version_declined': None,
'version_synced': 1,
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_unit_key),
'id': 3,
'ready_to_sync': True,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.unit_id,
'upstream_type': 'container',
'upstream_version': 2,
'version_declined': None,
'version_synced': 1
},
]
self.assertListEqual(data["results"], expected)
self.assertEqual(data["count"], 5)
def test_permission_denied_with_course_filter(self):
self.client.login(username="simple_user", password="password")
response = self.call_api(course_id=self.course.id)
assert response.status_code == 403
def test_200_component_downstreams_for_a_course(self):
"""
Returns all component links for given course
"""
self.client.login(username="course_user", password="password")
response = self.call_api(
course_id=self.course.id,
item_type='components',
)
assert response.status_code == 200
data = response.json()
date_format = self.now.isoformat().split("+")[0] + 'Z'
expected = [
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_video_key),
'id': 1,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.video_lib_id,
'upstream_type': 'component',
'upstream_version': 1,
'version_declined': None,
'version_synced': 1
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_html_key),
'id': 2,
'ready_to_sync': True,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.html_lib_id,
'upstream_type': 'component',
'upstream_version': 2,
'version_declined': None,
'version_synced': 1,
},
]
self.assertListEqual(data["results"], expected)
self.assertEqual(data["count"], 2)
def test_200_container_downstreams_for_a_course(self):
"""
Returns all container links for given course
"""
self.client.login(username="course_user", password="password")
response = self.call_api(
course_id=self.course.id,
item_type='containers',
)
assert response.status_code == 200
data = response.json()
date_format = self.now.isoformat().split("+")[0] + 'Z'
expected = [
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_chapter_key),
'id': 1,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.section_id,
'upstream_type': 'container',
'upstream_version': 1,
'version_declined': None,
'version_synced': 1,
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_sequential_key),
'id': 2,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.subsection_id,
'upstream_type': 'container',
'upstream_version': 1,
'version_declined': None,
'version_synced': 1,
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_unit_key),
'id': 3,
'ready_to_sync': True,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_key': self.unit_id,
'upstream_type': 'container',
'upstream_version': 2,
'version_declined': None,
'version_synced': 1
},
]
self.assertListEqual(data["results"], expected)
self.assertEqual(data["count"], 3)
@ddt.data(
('all', 2),
('components', 1),
('containers', 1),
)
@ddt.unpack
def test_200_downstreams_ready_to_sync(self, item_type, expected_count):
"""
Returns all links that are syncable
"""
self.client.login(username="superuser", password="password")
response = self.call_api(
ready_to_sync=True,
item_type=item_type,
)
assert response.status_code == 200
data = response.json()
self.assertTrue(all(o["ready_to_sync"] for o in data["results"]))
self.assertEqual(data["count"], expected_count)
def test_permission_denied_without_filter(self):
self.client.login(username="simple_user", password="password")
response = self.call_api()
assert response.status_code == 403
def test_200_component_downstream_context_list(self):
"""
Returns all entity downstream links for given component
"""
self.client.login(username="lib_user", password="password")
response = self.call_api(upstream_key=self.video_lib_id)
assert response.status_code == 200
data = response.json()
expected = [str(self.downstream_video_key)] + [str(key) for key in self.another_video_keys]
got = [str(o["downstream_usage_key"]) for o in data["results"]]
self.assertListEqual(got, expected)
self.assertEqual(data["count"], 4)
def test_200_container_downstream_context_list(self):
"""
Returns all entity downstream links for given container
"""
self.client.login(username="lib_user", password="password")
response = self.call_api(upstream_key=self.unit_id)
assert response.status_code == 200
data = response.json()
expected = [str(self.downstream_unit_key)]
got = [str(o["downstream_usage_key"]) for o in data["results"]]
self.assertListEqual(got, expected)
self.assertEqual(data["count"], 1)
class GetComponentUpstreamViewTest(
_BaseDownstreamViewTestMixin,
SharedModuleStoreTestCase,
):
"""
Test that `GET /api/v2/contentstore/downstreams?...` returns list of component links based on the provided filter.
"""
def call_api(
self,
course_id: str | None = None,
ready_to_sync: bool | None = None,
upstream_usage_key: str | None = None,
):
data = {}
if course_id is not None:
data["course_id"] = str(course_id)
if ready_to_sync is not None:
data["ready_to_sync"] = str(ready_to_sync)
if upstream_usage_key is not None:
data["upstream_usage_key"] = str(upstream_usage_key)
return self.client.get("/api/contentstore/v2/downstreams/", data=data)
def test_200_all_component_downstreams_for_a_course(self):
"""
Returns all component links for given course
"""
self.client.login(username="superuser", password="password")
response = self.call_api(course_id=self.course.id)
assert response.status_code == 200
data = response.json()
date_format = self.now.isoformat().split("+")[0] + 'Z'
expected = [
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_video_key),
'id': 1,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_usage_key': self.video_lib_id,
'upstream_version': 1,
'version_declined': None,
'version_synced': 1
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_html_key),
'id': 2,
'ready_to_sync': True,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_usage_key': self.html_lib_id,
'upstream_version': 2,
'version_declined': None,
'version_synced': 1,
},
]
self.assertListEqual(data["results"], expected)
self.assertEqual(data["count"], 2)
def test_200_all_component_downstreams_ready_to_sync(self):
"""
Returns all component links that are syncable
"""
self.client.login(username="superuser", password="password")
response = self.call_api(ready_to_sync=True)
assert response.status_code == 200
data = response.json()
self.assertTrue(all(o["ready_to_sync"] for o in data["results"]))
self.assertEqual(data["count"], 1)
def test_200_component_downstream_context_list(self):
"""
Returns all component downstream courses for given library block
"""
self.client.login(username="superuser", password="password")
response = self.call_api(upstream_usage_key=self.video_lib_id)
assert response.status_code == 200
data = response.json()
expected = [str(self.downstream_video_key)] + [str(key) for key in self.another_video_keys]
got = [str(o["downstream_usage_key"]) for o in data["results"]]
self.assertListEqual(got, expected)
self.assertEqual(data["count"], 4)
class GetDownstreamSummaryViewTest(
_BaseDownstreamViewTestMixin,
SharedModuleStoreTestCase,
):
"""
Test that `GET /api/v2/contentstore/downstreams/<course_id>/summary` returns summary of links in course.
"""
def call_api(self, course_id):
return self.client.get(f"/api/contentstore/v2/downstreams/{course_id}/summary")
@patch.object(UpstreamLink, "get_for_block", _get_upstream_link_good_and_syncable)
def test_200_summary(self):
"""
Does the happy path work?
"""
self.client.login(username="superuser", password="password")
response = self.call_api(str(self.another_course.id))
assert response.status_code == 200
data = response.json()
expected = [{
'upstream_context_title': 'Test Library 1',
'upstream_context_key': self.library_id,
'ready_to_sync_count': 0,
'total_count': 3,
'last_published_at': self.now.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
}]
self.assertListEqual(data, expected)
response = self.call_api(str(self.course.id))
assert response.status_code == 200
data = response.json()
expected = [{
'upstream_context_title': 'Test Library 1',
'upstream_context_key': self.library_id,
'ready_to_sync_count': 2,
'total_count': 5,
'last_published_at': self.now.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
}]
self.assertListEqual(data, expected)
class GetContainerUpstreamViewTest(
_BaseDownstreamViewTestMixin,
SharedModuleStoreTestCase,
):
"""
Test that `GET /api/v2/contentstore/downstream-containers?...` returns list of links based on the provided filter.
"""
def call_api(
self,
course_id: str | None = None,
ready_to_sync: bool | None = None,
upstream_container_key: str | None = None,
):
data = {}
if course_id is not None:
data["course_id"] = str(course_id)
if ready_to_sync is not None:
data["ready_to_sync"] = str(ready_to_sync)
if upstream_container_key is not None:
data["upstream_container_key"] = str(upstream_container_key)
return self.client.get("/api/contentstore/v2/downstream-containers/", data=data)
def test_200_all_container_downstreams_for_a_course(self):
"""
Returns all container links for given course
"""
self.client.login(username="superuser", password="password")
response = self.call_api(course_id=self.course.id)
assert response.status_code == 200
data = response.json()
date_format = self.now.isoformat().split("+")[0] + 'Z'
expected = [
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_chapter_key),
'id': 1,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_container_key': self.section_id,
'upstream_version': 1,
'version_declined': None,
'version_synced': 1,
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_sequential_key),
'id': 2,
'ready_to_sync': False,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_container_key': self.subsection_id,
'upstream_version': 1,
'version_declined': None,
'version_synced': 1,
},
{
'created': date_format,
'downstream_context_key': str(self.course.id),
'downstream_usage_key': str(self.downstream_unit_key),
'id': 3,
'ready_to_sync': True,
'updated': date_format,
'upstream_context_key': self.library_id,
'upstream_context_title': self.library_title,
'upstream_container_key': self.unit_id,
'upstream_version': 2,
'version_declined': None,
'version_synced': 1
},
]
self.assertListEqual(data["results"], expected)
self.assertEqual(data["count"], 3)
def test_200_all_downstreams_ready_to_sync(self):
"""
Returns all links that are syncable
"""
self.client.login(username="superuser", password="password")
response = self.call_api(ready_to_sync=True)
assert response.status_code == 200
data = response.json()
self.assertTrue(all(o["ready_to_sync"] for o in data["results"]))
self.assertEqual(data["count"], 1)

View File

@@ -1,28 +1,21 @@
"""
Unit tests for home page view.
"""
from collections import OrderedDict
from datetime import datetime, timedelta
from unittest.mock import patch
import ddt
import pytz
from django.conf import settings
from django.test import override_settings
from django.urls import reverse
from edx_toggles.toggles.testutils import override_waffle_switch
from rest_framework import status
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from cms.djangoapps.contentstore.utils import reverse_course_url
from cms.djangoapps.contentstore.views.course import ENABLE_GLOBAL_STAFF_OPTIMIZATION
from openedx.core.djangoapps.content.course_overviews.tests.factories import CourseOverviewFactory
FEATURES_WITH_HOME_PAGE_COURSE_V2_API = settings.FEATURES.copy()
FEATURES_WITH_HOME_PAGE_COURSE_V2_API['ENABLE_HOME_PAGE_COURSE_API_V2'] = True
@override_settings(FEATURES=FEATURES_WITH_HOME_PAGE_COURSE_V2_API)
@ddt.ddt
class HomePageCoursesViewV2Test(CourseTestCase):
"""
@@ -45,6 +38,7 @@ class HomePageCoursesViewV2Test(CourseTestCase):
org=archived_course_key.org,
end=(datetime.now() - timedelta(days=365)).replace(tzinfo=pytz.UTC),
)
self.non_staff_client, _ = self.create_non_staff_authed_user_client()
def test_home_page_response(self):
"""Get list of courses available to the logged in user.
@@ -103,30 +97,6 @@ class HomePageCoursesViewV2Test(CourseTestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(expected_response, response.data)
@override_waffle_switch(ENABLE_GLOBAL_STAFF_OPTIMIZATION, True)
def test_org_query_if_passed(self):
"""Get list of courses when org filter passed as a query param.
Expected result:
- A list of courses available to the logged in user for the specified org.
"""
response = self.client.get(self.api_v2_url, {"org": "demo-org"})
self.assertEqual(len(response.data['results']['courses']), 1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@override_waffle_switch(ENABLE_GLOBAL_STAFF_OPTIMIZATION, True)
def test_org_query_if_empty(self):
"""Get home page with an empty org query param.
Expected result:
- An empty list of courses available to the logged in user.
"""
response = self.client.get(self.api_v2_url)
self.assertEqual(len(response.data['results']['courses']), 0)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_active_only_query_if_passed(self):
"""Get list of active courses only.
@@ -233,17 +203,98 @@ class HomePageCoursesViewV2Test(CourseTestCase):
self.assertEqual(response.data["count"], 2)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@patch("cms.djangoapps.contentstore.views.course.CourseOverview")
@patch("cms.djangoapps.contentstore.views.course.modulestore")
def test_api_v2_is_disabled(self, mock_modulestore, mock_course_overview):
"""Get list of courses when home page course v2 API is disabled.
@ddt.data(
("active_only", "true"),
("archived_only", "true"),
("search", "sample"),
("order", "org"),
("page", 1),
)
@ddt.unpack
def test_if_empty_list_of_courses(self, query_param, value):
"""Get list of courses when no courses are available.
Expected result:
- Courses are read from the modulestore.
- An empty list of courses available to the logged in user.
"""
with override_settings(FEATURES={'ENABLE_HOME_PAGE_COURSE_API_V2': False}):
response = self.client.get(self.api_v1_url)
self.active_course.delete()
self.archived_course.delete()
response = self.client.get(self.api_v2_url, {query_param: value})
self.assertEqual(len(response.data['results']['courses']), 0)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@ddt.data(
("active_only", "true", 2, 0),
("archived_only", "true", 0, 1),
("search", "foo", 1, 0),
("search", "demo", 0, 1),
("order", "org", 2, 1),
("order", "display_name", 2, 1),
("order", "number", 2, 1),
("order", "run", 2, 1)
)
@ddt.unpack
def test_filter_and_ordering_courses(
self,
filter_key,
filter_value,
expected_active_length,
expected_archived_length
):
"""Get list of courses when filter and ordering are applied.
This test creates two courses besides the default courses created in the setUp method.
Then filters and orders them based on the filter_key and filter_value passed as query parameters.
Expected result:
- A list of courses available to the logged in user for the specified filter and order.
"""
archived_course_key = self.store.make_course_key("demo-org", "demo-number", "demo-run")
CourseOverviewFactory.create(
display_name="Course (Demo)",
id=archived_course_key,
org=archived_course_key.org,
end=(datetime.now() - timedelta(days=365)).replace(tzinfo=pytz.UTC),
)
active_course_key = self.store.make_course_key("foo-org", "foo-number", "foo-run")
CourseOverviewFactory.create(
display_name="Course (Foo)",
id=active_course_key,
org=active_course_key.org,
)
response = self.client.get(self.api_v2_url, {filter_key: filter_value})
self.assertEqual(response.status_code, status.HTTP_200_OK)
mock_modulestore().get_course_summaries.assert_called_once()
mock_course_overview.get_all_courses.assert_not_called()
self.assertEqual(
len([course for course in response.data["results"]["courses"] if course["is_active"]]),
expected_active_length
)
self.assertEqual(
len([course for course in response.data["results"]["courses"] if not course["is_active"]]),
expected_archived_length
)
@ddt.data(
("active_only", "true"),
("archived_only", "true"),
("search", "sample"),
("order", "org"),
("page", 1),
)
@ddt.unpack
def test_if_empty_list_of_courses_non_staff(self, query_param, value):
"""Get list of courses when no courses are available for non-staff users.
Expected result:
- An empty list of courses available to the logged in user.
"""
self.active_course.delete()
self.archived_course.delete()
response = self.non_staff_client.get(self.api_v2_url, {query_param: value})
self.assertEqual(len(response.data["results"]["courses"]), 0)
self.assertEqual(response.status_code, status.HTTP_200_OK)

View File

@@ -12,8 +12,23 @@ from django.db import transaction
from django.dispatch import receiver
from edx_toggles.toggles import SettingToggle
from opaque_keys.edx.keys import CourseKey
from openedx_events.content_authoring.data import CourseCatalogData, CourseScheduleData
from openedx_events.content_authoring.signals import COURSE_CATALOG_INFO_CHANGED
from openedx_events.content_authoring.data import (
CourseCatalogData,
CourseData,
CourseScheduleData,
LibraryBlockData,
LibraryContainerData,
XBlockData,
)
from openedx_events.content_authoring.signals import (
COURSE_CATALOG_INFO_CHANGED,
COURSE_IMPORT_COMPLETED,
LIBRARY_BLOCK_DELETED,
LIBRARY_CONTAINER_DELETED,
XBLOCK_CREATED,
XBLOCK_DELETED,
XBLOCK_UPDATED,
)
from pytz import UTC
from cms.djangoapps.contentstore.courseware_index import (
@@ -29,6 +44,15 @@ from openedx.core.djangoapps.discussions.tasks import update_discussions_setting
from openedx.core.lib.gating import api as gating_api
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import SignalHandler, modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from ..models import ComponentLink, ContainerLink
from ..tasks import (
create_or_update_upstream_links,
handle_create_or_update_xblock_upstream_link,
handle_unlink_upstream_block,
handle_unlink_upstream_container,
)
from .signals import GRADING_POLICY_CHANGED
log = logging.getLogger(__name__)
@@ -107,6 +131,8 @@ def emit_catalog_info_changed_signal(course_key: CourseKey):
if SEND_CATALOG_INFO_SIGNAL.is_enabled():
timestamp, catalog_info = _create_catalog_data_for_signal(course_key)
if catalog_info is not None:
# .. event_implemented_name: COURSE_CATALOG_INFO_CHANGED
# .. event_type: org.openedx.content_authoring.course.catalog_info.changed.v1
COURSE_CATALOG_INFO_CHANGED.send_event(time=timestamp, catalog_info=catalog_info)
@@ -197,12 +223,19 @@ def handle_item_deleted(**kwargs):
# Strip branch info
usage_key = usage_key.for_branch(None)
course_key = usage_key.course_key
deleted_block = modulestore().get_item(usage_key)
try:
deleted_block = modulestore().get_item(usage_key)
except ItemNotFoundError:
return
id_list = {deleted_block.location}
for block in yield_dynamic_block_descendants(deleted_block, kwargs.get('user_id')):
# Remove prerequisite milestone data
gating_api.remove_prerequisite(block.location)
# Remove any 'requires' course content milestone relationships
gating_api.set_required_content(course_key, block.location, None, None, None)
id_list.add(block.location)
ComponentLink.objects.filter(downstream_usage_key__in=id_list).delete()
@receiver(GRADING_POLICY_CHANGED)
@@ -224,3 +257,78 @@ def handle_grading_policy_changed(sender, **kwargs):
task_id=result.task_id,
kwargs=kwargs,
))
@receiver(XBLOCK_CREATED)
@receiver(XBLOCK_UPDATED)
def create_or_update_upstream_downstream_link_handler(**kwargs):
"""
Automatically create or update upstream->downstream link in database.
"""
xblock_info = kwargs.get("xblock_info", None)
if not xblock_info or not isinstance(xblock_info, XBlockData):
log.error("Received null or incorrect data for event")
return
handle_create_or_update_xblock_upstream_link.delay(str(xblock_info.usage_key))
@receiver(XBLOCK_DELETED)
def delete_upstream_downstream_link_handler(**kwargs):
"""
Delete upstream->downstream link from database on xblock delete.
"""
xblock_info = kwargs.get("xblock_info", None)
if not xblock_info or not isinstance(xblock_info, XBlockData):
log.error("Received null or incorrect data for event")
return
ComponentLink.objects.filter(
downstream_usage_key=xblock_info.usage_key
).delete()
ContainerLink.objects.filter(
downstream_usage_key=xblock_info.usage_key
).delete()
@receiver(COURSE_IMPORT_COMPLETED)
def handle_new_course_import(**kwargs):
"""
Automatically create upstream->downstream links for course in database on new import.
"""
course_data = kwargs.get("course", None)
if not course_data or not isinstance(course_data, CourseData):
log.error("Received null or incorrect data for event")
return
create_or_update_upstream_links.delay(
str(course_data.course_key),
force=True,
replace=True
)
@receiver(LIBRARY_BLOCK_DELETED)
def unlink_upstream_block_handler(**kwargs):
"""
Handle unlinking the upstream (library) block from any downstream (course) blocks.
"""
library_block = kwargs.get("library_block", None)
if not library_block or not isinstance(library_block, LibraryBlockData):
log.error("Received null or incorrect data for event")
return
handle_unlink_upstream_block.delay(str(library_block.usage_key))
@receiver(LIBRARY_CONTAINER_DELETED)
def unlink_upstream_container_handler(**kwargs):
"""
Handle unlinking the upstream (library) container from any downstream (course) blocks.
"""
library_container = kwargs.get("library_container", None)
if not library_container or not isinstance(library_container, LibraryContainerData): # pragma: no cover
log.error("Received null or incorrect data for event")
return
handle_unlink_upstream_container.delay(str(library_container.container_key))

View File

@@ -4,7 +4,7 @@ Storage backend for course import and export.
from django.conf import settings
from django.core.files.storage import get_storage_class
from common.djangoapps.util.storage import resolve_storage_backend
from storages.backends.s3boto3 import S3Boto3Storage
from storages.utils import setting
@@ -19,4 +19,7 @@ class ImportExportS3Storage(S3Boto3Storage): # pylint: disable=abstract-method
super().__init__(bucket_name=bucket, custom_domain=None, querystring_auth=True)
# pylint: disable=invalid-name
course_import_export_storage = get_storage_class(settings.COURSE_IMPORT_EXPORT_STORAGE)()
course_import_export_storage = resolve_storage_backend(
storage_key="course_import_export",
legacy_setting_key="COURSE_IMPORT_EXPORT_STORAGE"
)

View File

@@ -2,16 +2,19 @@
This file contains celery tasks for contentstore views
"""
import asyncio
import base64
import json
import os
import re
import shutil
import tarfile
from datetime import datetime
from datetime import datetime, timezone
from importlib.metadata import entry_points
from tempfile import NamedTemporaryFile, mkdtemp
import aiohttp
import olxcleaner
import pkg_resources
from ccx_keys.locator import CCXLocator
from celery import shared_task
from celery.utils.log import get_task_logger
@@ -25,15 +28,16 @@ from edx_django_utils.monitoring import (
set_code_owner_attribute,
set_code_owner_attribute_from_module,
set_custom_attribute,
set_custom_attributes_for_course_key
set_custom_attributes_for_course_key,
)
from olxcleaner.exceptions import ErrorLevel
from olxcleaner.reporting import report_error_summary, report_errors
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import LibraryLocator
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locator import LibraryLocator, LibraryContainerLocator
from organizations.api import add_organization_course, ensure_organization
from organizations.exceptions import InvalidOrganizationException
from organizations.models import Organization, OrganizationCourse
from organizations.models import Organization
from path import Path as path
from pytz import UTC
from user_tasks.models import UserTaskArtifact, UserTaskStatus
@@ -43,28 +47,33 @@ import cms.djangoapps.contentstore.errors as UserErrors
from cms.djangoapps.contentstore.courseware_index import (
CoursewareSearchIndexer,
LibrarySearchIndexer,
SearchIndexingError
SearchIndexingError,
)
from cms.djangoapps.contentstore.storage import course_import_export_storage
from cms.djangoapps.contentstore.utils import (
IMPORTABLE_FILE_TYPES,
create_or_update_xblock_upstream_link,
delete_course,
initialize_permissions,
reverse_usage_url,
translation_language,
delete_course
)
from cms.djangoapps.contentstore.xblock_storage_handlers.view_handlers import get_block_info
from cms.djangoapps.models.settings.course_metadata import CourseMetadata
from common.djangoapps.course_action_state.models import CourseRerunState
from common.djangoapps.static_replace import replace_static_urls
from common.djangoapps.student.auth import has_course_author_access
from common.djangoapps.student.roles import CourseInstructorRole, CourseStaffRole, LibraryUserRole
from common.djangoapps.util.monitoring import monitor_import_failure
from openedx.core.djangoapps.content.learning_sequences.api import key_supports_outlines
from openedx.core.djangoapps.content_libraries import api as v2contentlib_api
from openedx.core.djangoapps.content_tagging.api import make_copied_tags_editable
from openedx.core.djangoapps.course_apps.toggles import exams_ida_enabled
from openedx.core.djangoapps.discussions.config.waffle import ENABLE_NEW_STRUCTURE_DISCUSSIONS
from openedx.core.djangoapps.discussions.models import DiscussionsConfiguration, Provider
from openedx.core.djangoapps.discussions.tasks import update_unit_discussion_state_from_discussion_blocks
from openedx.core.djangoapps.embargo.models import CountryAccessRule, RestrictedCourse
from openedx.core.lib import ensure_cms
from openedx.core.lib.extract_archive import safe_extractall
from xmodule.contentstore.django import contentstore
from xmodule.course_block import CourseFields
@@ -74,6 +83,8 @@ from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import DuplicateCourseError, InvalidProctoringProvider, ItemNotFoundError
from xmodule.modulestore.xml_exporter import export_course_to_xml, export_library_to_xml
from xmodule.modulestore.xml_importer import CourseImportException, import_course_from_xml, import_library_from_xml
from .models import ContainerLink, LearningContextLinksStatus, LearningContextLinksStatusChoices, ComponentLink
from .outlines import update_outline_from_modulestore
from .outlines_regenerate import CourseOutlineRegenerate
from .toggles import bypass_olx_failure_enabled
@@ -85,8 +96,26 @@ LOGGER = get_task_logger(__name__)
FILE_READ_CHUNK = 1024 # bytes
FULL_COURSE_REINDEX_THRESHOLD = 1
ALL_ALLOWED_XBLOCKS = frozenset(
[entry_point.name for entry_point in pkg_resources.iter_entry_points("xblock.v1")]
[entry_point.name for entry_point in entry_points(group="xblock.v1")]
)
DEFAULT_HEADERS = {
"User-Agent": (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/115.0.0.0 Safari/537.36"
),
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Connection": "keep-alive",
}
class LinkState:
"""
Links State Enumeration
"""
BROKEN = 'broken'
LOCKED = 'locked'
EXTERNAL_FORBIDDEN = 'external-forbidden'
def clone_instance(instance, field_values):
@@ -143,12 +172,6 @@ def rerun_course(source_course_key_string, destination_course_key_string, user_i
# call edxval to attach videos to the rerun
copy_course_videos(source_course_key, destination_course_key)
# Copy OrganizationCourse
organization_course = OrganizationCourse.objects.filter(course_id=source_course_key_string).first()
if organization_course:
clone_instance(organization_course, {'course_id': destination_course_key_string})
# Copy RestrictedCourse
restricted_course = RestrictedCourse.objects.filter(course_key=source_course_key).first()
@@ -158,7 +181,7 @@ def rerun_course(source_course_key_string, destination_course_key_string, user_i
for country_access_rule in country_access_rules:
clone_instance(country_access_rule, {'restricted_course': new_restricted_course})
org_data = ensure_organization(source_course_key.org)
org_data = ensure_organization(destination_course_key.org)
add_organization_course(org_data, destination_course_key)
return "succeeded"
@@ -453,12 +476,12 @@ def sync_discussion_settings(course_key, user):
if (
ENABLE_NEW_STRUCTURE_DISCUSSIONS.is_enabled()
and not course.discussions_settings['provider_type'] == Provider.OPEN_EDX
and not course.discussions_settings.get('provider_type', None) == Provider.OPEN_EDX
and not course.discussions_settings.get('provider', None) == Provider.OPEN_EDX
):
LOGGER.info(f"New structure is enabled, also updating {course_key} to use new provider")
course.discussions_settings['enable_graded_units'] = False
course.discussions_settings['unit_level_visibility'] = True
course.discussions_settings['provider'] = Provider.OPEN_EDX
course.discussions_settings['provider_type'] = Provider.OPEN_EDX
modulestore().update_item(course, user.id)
@@ -890,7 +913,7 @@ def copy_v1_user_roles_into_v2_library(v2_library_key, v1_library_key):
def _create_copy_content_task(v2_library_key, v1_library_key):
"""
spin up a celery task to import the V1 Library's content into the V2 library.
This utalizes the fact that course and v1 library content is stored almost identically.
This utilizes the fact that course and v1 library content is stored almost identically.
"""
return v2contentlib_api.import_blocks_create_task(
v2_library_key, v1_library_key,
@@ -1066,3 +1089,444 @@ def undo_all_library_source_blocks_ids_for_course(course_key_string, v1_to_v2_li
store.update_item(draft_library_source_block, None)
# return success
return
class CourseLinkCheckTask(UserTask): # pylint: disable=abstract-method
"""
Base class for course link check tasks.
"""
@staticmethod
def calculate_total_steps(arguments_dict):
"""
Get the number of in-progress steps in the link check process, as shown in the UI.
For reference, these are:
1. Scanning
"""
return 1
@classmethod
def generate_name(cls, arguments_dict):
"""
Create a name for this particular task instance.
Arguments:
arguments_dict (dict): The arguments given to the task function
Returns:
str: The generated name
"""
key = arguments_dict['course_key_string']
return f'Broken link check of {key}'
# -------------- Course optimizer functions ------------------
@shared_task(base=CourseLinkCheckTask, bind=True)
# Note: The decorator @set_code_owner_attribute cannot be used here because the UserTaskMixin
# does stack inspection and can't handle additional decorators.
def check_broken_links(self, user_id, course_key_string, language):
"""
Checks for broken links in a course and store the results in a file.
"""
set_code_owner_attribute_from_module(__name__)
return _check_broken_links(self, user_id, course_key_string, language)
def _check_broken_links(task_instance, user_id, course_key_string, language):
"""
Checks for broken links in a course and store the results in a file.
"""
user = _validate_user(task_instance, user_id, language)
task_instance.status.set_state(UserTaskStatus.IN_PROGRESS)
course_key = CourseKey.from_string(course_key_string)
url_list = _scan_course_for_links(course_key)
validated_url_list = asyncio.run(_validate_urls_access_in_batches(url_list, course_key, batch_size=100))
broken_or_locked_urls, retry_list = _filter_by_status(validated_url_list)
if retry_list:
retry_results = _retry_validation(retry_list, course_key, retry_count=3)
broken_or_locked_urls.extend(retry_results)
try:
task_instance.status.increment_completed_steps()
file_name = str(course_key)
broken_links_file = NamedTemporaryFile(prefix=file_name + '.', suffix='.json')
LOGGER.debug(f'[Link Check] json file being generated at {broken_links_file.name}')
with open(broken_links_file.name, 'w') as file:
json.dump(broken_or_locked_urls, file, indent=4)
_write_broken_links_to_file(broken_or_locked_urls, broken_links_file)
artifact = UserTaskArtifact(status=task_instance.status, name='BrokenLinks')
_save_broken_links_file(artifact, broken_links_file)
# catch all exceptions so we can record useful error messages
except Exception as e: # pylint: disable=broad-except
LOGGER.exception('Error checking links for course %s', course_key, exc_info=True)
if task_instance.status.state != UserTaskStatus.FAILED:
task_instance.status.fail({'raw_error_msg': str(e)})
def _validate_user(task, user_id, language):
"""Validate if the user exists. Otherwise log an unknown user id error."""
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist as exc:
with translation_language(language):
task.status.fail(UserErrors.UNKNOWN_USER_ID.format(user_id))
return
def _scan_course_for_links(course_key):
"""
Scans a course for links found in the data contents of blocks.
Returns:
list: block id and URL pairs
Example return:
[
[block_id1, url1],
[block_id2, url2],
...
]
"""
verticals = modulestore().get_items(
course_key,
qualifiers={'category': 'vertical'},
revision=ModuleStoreEnum.RevisionOption.published_only
)
blocks = []
urls_to_validate = []
for vertical in verticals:
blocks.extend(vertical.get_children())
for block in blocks:
# Excluding 'drag-and-drop-v2' as it contains data of object type instead of string, causing errors,
# and it doesn't contain user-facing links to scan.
if block.category == 'drag-and-drop-v2':
continue
block_id = str(block.usage_key)
block_info = get_block_info(block)
block_data = block_info['data']
url_list = _get_urls(block_data)
urls_to_validate += [[block_id, url] for url in url_list]
return urls_to_validate
def _get_urls(content):
"""
Finds and returns a list of URLs in the given content.
Includes strings following 'href=' and 'src='.
Excludes strings that are only '#' or start with 'data:'.
Arguments:
content (str): entire content of a block
Returns:
list: urls
"""
regex = r'\s+(?:href|src)=["\'](?!#|data:)([^"\']*)["\']'
url_list = re.findall(regex, content)
return url_list
async def _validate_urls_access_in_batches(url_list, course_key, batch_size=100):
"""
Returns the statuses of a list of URL requests.
Arguments:
url_list (list): block id and URL pairs
Returns:
list: dictionary containing URL, associated block id, and request status
"""
responses = []
url_count = len(url_list)
for i in range(0, url_count, batch_size):
batch = url_list[i:i + batch_size]
batch_results = await _validate_batch(batch, course_key)
responses.extend(batch_results)
LOGGER.debug(f'[Link Check] request batch {i // batch_size + 1} of {url_count // batch_size + 1}')
return responses
async def _validate_batch(batch, course_key):
"""Validate a batch of URLs"""
async with aiohttp.ClientSession(headers=DEFAULT_HEADERS) as session:
tasks = [_validate_url_access(session, url_data, course_key) for url_data in batch]
batch_results = await asyncio.gather(*tasks)
return batch_results
async def _validate_url_access(session, url_data, course_key):
"""
Validates a URL.
Arguments:
url_data (list): block id and URL pairs
course_key (str): locator id for a course
Returns:
dict: URL, associated block id, and request status
Example return:
{
'block_id': block_id1,
'url': url1,
'status': status
}
"""
block_id, url = url_data
url = url.strip() # Trim leading/trailing whitespace
result = {'block_id': block_id, 'url': url}
standardized_url = _convert_to_standard_url(url, course_key)
try:
async with session.get(standardized_url, timeout=5) as response:
result.update({'status': response.status})
except Exception as e: # lint-amnesty, pylint: disable=broad-except
result.update({'status': None})
LOGGER.debug(f'[Link Check] Request error when validating {url}: {str(e)}')
return result
def _convert_to_standard_url(url, course_key):
"""
Returns standard URLs when given studio URLs. Otherwise returns the URL as is.
Example URLs:
/assets/courseware/v1/506da5d6f866e8f0be44c5df8b6e6b2a/...
...asset-v1:edX+DemoX+Demo_Course+type@asset+block/getting-started_x250.png
/static/getting-started_x250.png
/container/block-v1:edX+DemoX+Demo_Course+type@vertical+block@2152d4a4aadc4cb0af5256394a3d1fc7
/jump_to_id/2152d4a4aadc4cb0af5256394a3d1fc7
"""
if _is_studio_url_without_base(url):
if url.startswith('/static/'):
processed_url = replace_static_urls(f'\"{url}\"', course_id=course_key)[1:-1]
return 'https://' + settings.CMS_BASE + processed_url
elif url.startswith('/jump_to_id/'):
return f'https://{settings.LMS_BASE}/courses/{course_key}{url}'
elif url.startswith('/'):
return 'https://' + settings.CMS_BASE + url
else:
return 'https://' + settings.CMS_BASE + '/container/' + url
else:
return url
def _is_studio_url(url):
"""Returns True if url is a studio url."""
return _is_studio_url_with_base(url) or _is_studio_url_without_base(url)
def _is_studio_url_with_base(url):
"""Returns True if url is a studio url with cms base."""
return url.startswith('http://' + settings.CMS_BASE) or url.startswith('https://' + settings.CMS_BASE)
def _is_studio_url_without_base(url):
"""Returns True if url is a studio url without cms base."""
return not url.startswith('http://') and not url.startswith('https://')
def _filter_by_status(results):
"""
Filter results by status.
Statuses:
200: OK. No need to do more
403: Forbidden. Record as locked link if it is studio link.
403: Forbidden. Record as external-forbidden link if it is external link
None: Error. Retry up to 3 times.
Other: Failure. Record as broken link.
Arguments:
results (list): URL, associated block id, and request status
Returns:
filtered_results (list): list of block id, URL and if URL is locked
retry_list (list): block id and url pairs
Example return:
[
[block_id1, filtered_results_url1, link_state],
...
],
[
[block_id1, retry_url1],
...
]
"""
filtered_results = []
retry_list = []
for result in results:
status, block_id, url = result['status'], result['block_id'], result['url']
if status is None and _is_studio_url(url):
retry_list.append([block_id, url])
elif status == 200:
continue
elif status == 403 and _is_studio_url(url):
filtered_results.append([block_id, url, LinkState.LOCKED])
elif status in [403, 500, None] and not _is_studio_url(url):
filtered_results.append([block_id, url, LinkState.EXTERNAL_FORBIDDEN])
else:
filtered_results.append([block_id, url, LinkState.BROKEN])
return filtered_results, retry_list
def _retry_validation(url_list, course_key, retry_count=3):
"""
Retry validation for URLs that failed due to connection error.
Returns:
list: URLs that could not be validated due to being locked or due to persistent connection problems
"""
results = []
retry_list = url_list
for i in range(retry_count):
if retry_list:
LOGGER.debug(f'[Link Check] retry attempt #{i + 1}')
retry_list = _retry_validation_and_filter_results(course_key, results, retry_list)
results.extend(retry_list)
return results
def _retry_validation_and_filter_results(course_key, results, retry_list):
"""
Validates URLs and then filter them by status.
Arguments:
retry_list: list of urls to retry
Returns:
list: URLs that did not pass validation and should be retried
"""
validated_url_list = asyncio.run(
_validate_urls_access_in_batches(retry_list, course_key, batch_size=100)
)
filtered_url_list, retry_list = _filter_by_status(validated_url_list)
results.extend(filtered_url_list)
return retry_list
def _save_broken_links_file(artifact, file_to_save):
artifact.file.save(name=os.path.basename(file_to_save.name), content=File(file_to_save))
artifact.save()
return True
def _write_broken_links_to_file(broken_or_locked_urls, broken_links_file):
with open(broken_links_file.name, 'w') as file:
json.dump(broken_or_locked_urls, file, indent=4)
@shared_task
@set_code_owner_attribute
def handle_create_or_update_xblock_upstream_link(usage_key):
"""
Create or update upstream link for a single xblock.
"""
ensure_cms("handle_create_or_update_xblock_upstream_link may only be executed in a CMS context")
try:
xblock = modulestore().get_item(UsageKey.from_string(usage_key))
except (ItemNotFoundError, InvalidKeyError):
LOGGER.exception(f'Could not find item for given usage_key: {usage_key}')
return
if not xblock.upstream or not xblock.upstream_version:
return
create_or_update_xblock_upstream_link(xblock, xblock.course_id)
@shared_task
@set_code_owner_attribute
def create_or_update_upstream_links(
course_key_str: str,
force: bool = False,
replace: bool = False,
created: datetime | None = None,
):
"""
A Celery task to create or update upstream downstream links in database from course xblock content.
"""
ensure_cms("create_or_update_upstream_links may only be executed in a CMS context")
if not created:
created = datetime.now(timezone.utc)
course_status = LearningContextLinksStatus.get_or_create(course_key_str, created)
if course_status.status in [
LearningContextLinksStatusChoices.COMPLETED,
LearningContextLinksStatusChoices.PROCESSING
] and not force:
return
store = modulestore()
course_key = CourseKey.from_string(course_key_str)
course_status.update_status(
LearningContextLinksStatusChoices.PROCESSING,
updated=created,
)
if replace:
ComponentLink.objects.filter(downstream_context_key=course_key).delete()
ContainerLink.objects.filter(downstream_context_key=course_key).delete()
try:
xblocks = store.get_items(course_key, settings={"upstream": lambda x: x is not None})
except ItemNotFoundError:
LOGGER.exception(f'Could not find items for given course: {course_key}')
course_status.update_status(LearningContextLinksStatusChoices.FAILED)
return
for xblock in xblocks:
create_or_update_xblock_upstream_link(xblock, course_key, created)
course_status.update_status(LearningContextLinksStatusChoices.COMPLETED)
@shared_task
@set_code_owner_attribute
def handle_unlink_upstream_block(upstream_usage_key_string: str) -> None:
"""
Handle updates needed to downstream blocks when the upstream link is severed.
"""
ensure_cms("handle_unlink_upstream_block may only be executed in a CMS context")
try:
upstream_usage_key = UsageKey.from_string(upstream_usage_key_string)
except (InvalidKeyError):
LOGGER.exception(f'Invalid upstream usage_key: {upstream_usage_key_string}')
return
for link in ComponentLink.objects.filter(
upstream_usage_key=upstream_usage_key,
):
make_copied_tags_editable(str(link.downstream_usage_key))
@shared_task
@set_code_owner_attribute
def handle_unlink_upstream_container(upstream_container_key_string: str) -> None:
"""
Handle updates needed to downstream blocks when the upstream link is severed.
"""
ensure_cms("handle_unlink_upstream_container may only be executed in a CMS context")
try:
upstream_container_key = LibraryContainerLocator.from_string(upstream_container_key_string)
except (InvalidKeyError):
LOGGER.exception(f'Invalid upstream container_key: {upstream_container_key_string}')
return
for link in ContainerLink.objects.filter(
upstream_container_key=upstream_container_key,
):
make_copied_tags_editable(str(link.downstream_usage_key))

View File

@@ -0,0 +1,117 @@
"""
Test the enable/disable discussions for all units API endpoint.
"""
import json
from django.urls import reverse
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, BlockFactory
from cms.djangoapps.contentstore.tests.utils import AjaxEnabledTestClient
from common.djangoapps.student.tests.factories import UserFactory
class BulkEnableDisableDiscussionsTestCase(ModuleStoreTestCase):
"""
Test the enable/disable discussions for all units API endpoint.
"""
def setUp(self):
super().setUp()
self.user = UserFactory(is_staff=True, is_superuser=True)
self.user.set_password(self.user_password)
self.user.save()
self.course_key = CourseKey.from_string("course-v1:edx+TestX+2025")
self.url = reverse('bulk_enable_disable_discussions', args=[str(self.course_key)])
self.client = AjaxEnabledTestClient()
self.client.login(username=self.user.username, password=self.user_password)
# Create a test course
self.course = CourseFactory.create(
org=self.course_key.org,
course=self.course_key.course,
run=self.course_key.run,
default_store=ModuleStoreEnum.Type.split,
display_name="EnableDisableDiscussionsTestCase Course",
)
with self.store.bulk_operations(self.course_key):
section = BlockFactory.create(
parent=self.course,
category='chapter',
display_name="Generated Section",
)
sequence = BlockFactory.create(
parent=section,
category='sequential',
display_name="Generated Sequence",
)
unit1 = BlockFactory.create(
parent=sequence,
category='vertical',
display_name="Unit in Section1",
discussion_enabled=True,
)
unit2 = BlockFactory.create(
parent=sequence,
category='vertical',
display_name="Unit in Section2",
discussion_enabled=True,
)
def test_disable_discussions_for_all_units(self):
"""
Test that the API successfully disables discussions for all units.
"""
self.enable_disable_discussions_for_all_units(False)
def test_enable_discussions_for_all_units(self):
"""
Test that the API successfully enables discussions for all units.
"""
self.enable_disable_discussions_for_all_units(True)
def enable_disable_discussions_for_all_units(self, is_enabled):
"""
Test that the API successfully enables/disables discussions for all units.
"""
data = {
"discussion_enabled": is_enabled
}
response = self.client.put(self.url, data=json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, 200)
response_data = response.json()
print(response_data)
self.assertEqual(response_data['units_updated_and_republished'], 0 if is_enabled else 2)
# Check that all verticals now have discussion_enabled set to the expected value
with self.store.bulk_operations(self.course_key):
verticals = self.store.get_items(self.course_key, qualifiers={'block_type': 'vertical'})
for vertical in verticals:
self.assertEqual(vertical.discussion_enabled, is_enabled)
def test_permission_denied_for_non_staff(self):
"""
Test that non-staff users are denied access to the API.
"""
# Create a non-staff user
non_staff_user = UserFactory(is_staff=False, is_superuser=False)
non_staff_user.set_password(self.user_password)
non_staff_user.save()
# Create a new client for the non-staff user
non_staff_client = AjaxEnabledTestClient()
non_staff_client.login(username=non_staff_user.username, password=self.user_password)
response = non_staff_client.put(self.url, content_type='application/json')
self.assertEqual(response.status_code, 403)
def test_badrequest_for_empty_request_body(self):
"""
Test that the API returns a 400 for an empty request body.
"""
response = self.client.put(self.url, data=json.dumps({}), content_type='application/json')
self.assertEqual(response.status_code, 400)

View File

@@ -1,5 +1,9 @@
# lint-amnesty, pylint: disable=missing-module-docstring
# TODO: Rewrite several of these assertions so that they check the output of the REST or Python
# APIs rather than parsing HTML from the deprecated legacy frontend pages. In particular, any
# test case using override_waffle_flag(toggles.LEGACY_STUDIO_*, True) will need to be fixed.
# Part of https://github.com/openedx/edx-platform/issues/36275.
import copy
import re
@@ -17,7 +21,7 @@ from django.conf import settings
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.test import TestCase
from django.test.utils import override_settings
from edx_toggles.toggles.testutils import override_waffle_switch
from edx_toggles.toggles.testutils import override_waffle_switch, override_waffle_flag
from edxval.api import create_video, get_videos_for_course
from fs.osfs import OSFS
from lxml import etree
@@ -43,6 +47,7 @@ from xmodule.modulestore.xml_importer import import_course_from_xml, perform_xli
from xmodule.seq_block import SequenceBlock
from xmodule.video_block import VideoBlock
from cms.djangoapps.contentstore import toggles
from cms.djangoapps.contentstore.config import waffle
from cms.djangoapps.contentstore.tests.utils import AjaxEnabledTestClient, CourseTestCase, get_url, parse_json
from cms.djangoapps.contentstore.utils import (
@@ -587,6 +592,7 @@ class MiscCourseTests(ContentStoreTestCase):
for expected in expected_types:
self.assertContains(resp, expected)
@override_waffle_flag(toggles.LEGACY_STUDIO_UNIT_EDITOR, True)
@ddt.data("<script>alert(1)</script>", "alert('hi')", "</script><script>alert(1)</script>")
def test_container_handler_xss_prevent(self, malicious_code):
"""
@@ -596,6 +602,7 @@ class MiscCourseTests(ContentStoreTestCase):
# Test that malicious code does not appear in html
self.assertNotContains(resp, malicious_code)
@override_waffle_flag(toggles.LEGACY_STUDIO_UNIT_EDITOR, True)
def test_advanced_components_in_edit_unit(self):
# This could be made better, but for now let's just assert that we see the advanced modules mentioned in the
# page response HTML
@@ -697,9 +704,11 @@ class MiscCourseTests(ContentStoreTestCase):
# Remove tempdir
shutil.rmtree(root_dir)
@override_waffle_flag(toggles.LEGACY_STUDIO_UNIT_EDITOR, True)
def test_advanced_components_require_two_clicks(self):
self.check_components_on_page(['word_cloud'], ['Word cloud'])
@override_waffle_flag(toggles.LEGACY_STUDIO_UNIT_EDITOR, True)
def test_edit_unit(self):
"""Verifies rendering the editor in all the verticals in the given test course"""
self._check_verticals([self.vert_loc])
@@ -1379,6 +1388,7 @@ class ContentStoreTest(ContentStoreTestCase):
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 403)
@override_waffle_flag(toggles.LEGACY_STUDIO_HOME, True)
def test_course_index_view_with_no_courses(self):
"""Test viewing the index page with no courses"""
resp = self.client.get_html('/home/')
@@ -1400,6 +1410,7 @@ class ContentStoreTest(ContentStoreTestCase):
item = BlockFactory.create(parent_location=course.location)
self.assertIsInstance(item, SequenceBlock)
@override_waffle_flag(toggles.LEGACY_STUDIO_COURSE_OUTLINE, True)
def test_course_overview_view_with_course(self):
"""Test viewing the course overview page with an existing course"""
course = CourseFactory.create()
@@ -1499,7 +1510,8 @@ class ContentStoreTest(ContentStoreTestCase):
)
course_key = course_items[0].id
resp = self._show_course_overview(course_key)
with override_waffle_flag(toggles.LEGACY_STUDIO_COURSE_OUTLINE, True):
resp = self._show_course_overview(course_key)
# course_handler raise 404 for old mongo course
if course_key.deprecated:
@@ -1510,20 +1522,31 @@ class ContentStoreTest(ContentStoreTestCase):
self.assertContains(resp, 'Chapter 2')
# go to various pages
test_get_html('import_handler')
test_get_html('export_handler')
test_get_html('course_team_handler')
test_get_html('course_info_handler')
test_get_html('assets_handler')
test_get_html('tabs_handler')
test_get_html('settings_handler')
test_get_html('grading_handler')
test_get_html('advanced_settings_handler')
test_get_html('textbooks_list_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_IMPORT, True):
test_get_html('import_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_EXPORT, True):
test_get_html('export_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_COURSE_TEAM, True):
test_get_html('course_team_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_UPDATES, True):
test_get_html('course_info_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_FILES_UPLOADS, True):
test_get_html('assets_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_CUSTOM_PAGES, True):
test_get_html('tabs_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True):
test_get_html('settings_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_GRADING, True):
test_get_html('grading_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_ADVANCED_SETTINGS, True):
test_get_html('advanced_settings_handler')
with override_waffle_flag(toggles.LEGACY_STUDIO_TEXTBOOKS, True):
test_get_html('textbooks_list_handler')
# go look at the Edit page
unit_key = course_key.make_usage_key('vertical', 'test_vertical')
resp = self.client.get_html(get_url('container_handler', unit_key))
with override_waffle_flag(toggles.LEGACY_STUDIO_UNIT_EDITOR, True):
resp = self.client.get_html(get_url('container_handler', unit_key))
self.assertEqual(resp.status_code, 200)
def delete_item(category, name):
@@ -1856,20 +1879,23 @@ class RerunCourseTest(ContentStoreTestCase):
"""
Asserts that the given course key is NOT in the unsucceeded course action section of the html.
"""
course_listing = lxml.html.fromstring(self.client.get_html('/home/').content)
with override_waffle_flag(toggles.LEGACY_STUDIO_HOME, True):
course_listing = lxml.html.fromstring(self.client.get_html('/home/').content)
self.assertEqual(len(self.get_unsucceeded_course_action_elements(course_listing, course_key)), 0)
def assertInUnsucceededCourseActions(self, course_key):
"""
Asserts that the given course key is in the unsucceeded course action section of the html.
"""
course_listing = lxml.html.fromstring(self.client.get_html('/home/').content)
with override_waffle_flag(toggles.LEGACY_STUDIO_HOME, True):
course_listing = lxml.html.fromstring(self.client.get_html('/home/').content)
self.assertEqual(len(self.get_unsucceeded_course_action_elements(course_listing, course_key)), 1)
def verify_rerun_course(self, source_course_key, destination_course_key, destination_display_name):
"""
Verify the contents of the course rerun action
"""
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
expected_states = {
'state': CourseRerunUIStateManager.State.SUCCEEDED,
@@ -2138,9 +2164,13 @@ class EntryPageTestCase(TestCase):
resp = self.client.get_html(page)
self.assertEqual(resp.status_code, status_code)
def test_how_it_works(self):
@override_waffle_flag(toggles.LEGACY_STUDIO_LOGGED_OUT_HOME, True)
def test_how_it_works_legacy(self):
self._test_page("/howitworks")
def test_how_it_works_redirect_to_signin(self):
self._test_page("/howitworks", 302)
def test_signup(self):
# deprecated signup url redirects to LMS register.
self._test_page("/signup", 301)

View File

@@ -3,16 +3,17 @@ Unit tests for getting the list of courses for a user through iterating all cour
by reversing group name formats.
"""
import random
from unittest.mock import Mock, patch
import ddt
from ccx_keys.locator import CCXLocator
from django.conf import settings
from django.test import RequestFactory, override_settings
from django.test import RequestFactory
from edx_toggles.toggles.testutils import override_waffle_flag
from opaque_keys.edx.locations import CourseLocator
from cms.djangoapps.contentstore import toggles
from cms.djangoapps.contentstore.tests.utils import AjaxEnabledTestClient
from cms.djangoapps.contentstore.utils import delete_course
from cms.djangoapps.contentstore.views.course import (
@@ -35,17 +36,12 @@ from common.djangoapps.student.tests.factories import UserFactory
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.content.course_overviews.tests.factories import CourseOverviewFactory
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES
from xmodule.course_block import CourseSummary # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore import ModuleStoreEnum # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.factories import CourseFactory, check_mongo_calls # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.factories import CourseFactory # lint-amnesty, pylint: disable=wrong-import-order
TOTAL_COURSES_COUNT = 10
USER_COURSES_COUNT = 1
FEATURES_WITH_HOME_PAGE_COURSE_V2_API = settings.FEATURES.copy()
FEATURES_WITH_HOME_PAGE_COURSE_V2_API['ENABLE_HOME_PAGE_COURSE_API_V2'] = True
FEATURES_WITHOUT_HOME_PAGE_COURSE_V2_API = settings.FEATURES.copy()
FEATURES_WITHOUT_HOME_PAGE_COURSE_V2_API['ENABLE_HOME_PAGE_COURSE_API_V2'] = False
@ddt.ddt
@@ -93,6 +89,7 @@ class TestCourseListing(ModuleStoreTestCase):
self.client.logout()
ModuleStoreTestCase.tearDown(self) # pylint: disable=non-parent-method-called
@override_waffle_flag(toggles.LEGACY_STUDIO_HOME, True)
def test_empty_course_listing(self):
"""
Test on empty course listing, studio name is properly displayed
@@ -185,19 +182,11 @@ class TestCourseListing(ModuleStoreTestCase):
courses_list_by_staff, __ = get_courses_accessible_to_user(self.request)
self.assertEqual(len(list(courses_list_by_staff)), TOTAL_COURSES_COUNT)
self.assertTrue(all(isinstance(course, CourseOverview) for course in courses_list_by_staff))
with override_settings(FEATURES=FEATURES_WITH_HOME_PAGE_COURSE_V2_API):
# Verify fetched accessible courses list is a list of CourseOverview instances when home page course v2
# api is enabled.
self.assertTrue(all(isinstance(course, CourseOverview) for course in courses_list_by_staff))
with override_settings(FEATURES=FEATURES_WITHOUT_HOME_PAGE_COURSE_V2_API):
# Verify fetched accessible courses list is a list of CourseSummery instances
self.assertTrue(all(isinstance(course, CourseSummary) for course in courses_list_by_staff))
# Now count the db queries for staff
with check_mongo_calls(2):
list(_accessible_courses_summary_iter(self.request))
# Now count the db queries for staff
with self.assertNumQueries(2):
list(_accessible_courses_summary_iter(self.request))
def test_get_course_list_with_invalid_course_location(self):
"""
@@ -212,21 +201,10 @@ class TestCourseListing(ModuleStoreTestCase):
courses_list = list(courses_iter)
self.assertEqual(len(courses_list), 1)
with override_settings(FEATURES=FEATURES_WITH_HOME_PAGE_COURSE_V2_API):
# Verify fetched accessible courses list is a list of CourseOverview instances when home page course v2
# api is enabled.
courses_summary_iter, __ = _accessible_courses_summary_iter(self.request)
courses_summary_list = list(courses_summary_iter)
self.assertTrue(all(isinstance(course, CourseOverview) for course in courses_summary_list))
self.assertEqual(len(courses_summary_list), 1)
with override_settings(FEATURES=FEATURES_WITHOUT_HOME_PAGE_COURSE_V2_API):
# Verify fetched accessible courses list is a list of CourseSummery instances and only one course
# is returned
courses_summary_iter, __ = _accessible_courses_summary_iter(self.request)
courses_summary_list = list(courses_summary_iter)
self.assertTrue(all(isinstance(course, CourseSummary) for course in courses_summary_list))
self.assertEqual(len(courses_summary_list), 1)
courses_summary_iter, __ = _accessible_courses_summary_iter(self.request)
courses_summary_list = list(courses_summary_iter)
self.assertTrue(all(isinstance(course, CourseOverview) for course in courses_summary_list))
self.assertEqual(len(courses_summary_list), 1)
# get courses by reversing group name formats
courses_list_by_groups, __ = _accessible_courses_list_from_groups(self.request)

View File

@@ -1,7 +1,15 @@
"""
Tests for Studio Course Settings.
"""
# TODO: Remove each `override_waffle_flag(toggles.LEGACY_STUDIO_*)` by Ulmo. For each occurance:
# * If the test case is just testing the legacy frontend, and we've got the underlying
# functionality tested elsewhere, then just delete the whole test case.
# * Otherwise (i.e., the test is using the legacy UI test a unique backend behavior), then
# rewrite the test to make assertions about the output of the Python API (preferred) or
# REST API (if necessary) so that we can delete the legacy UI without sacrificing the test
# coverage.
# Part of https://github.com/openedx/edx-platform/issues/36275.
"""
import copy
import datetime
@@ -20,6 +28,7 @@ from milestones.models import MilestoneRelationshipType
from milestones.tests.utils import MilestonesTestCaseMixin
from pytz import UTC
from cms.djangoapps.contentstore import toggles
from cms.djangoapps.contentstore.utils import reverse_course_url, reverse_usage_url
from cms.djangoapps.models.settings.course_grading import (
GRADING_POLICY_CHANGED_EVENT_TYPE,
@@ -115,6 +124,7 @@ class CourseAdvanceSettingViewTest(CourseTestCase, MilestonesTestCaseMixin):
CourseStaffRole(self.course.id).add_users(self.nonstaff)
@override_settings(FEATURES={'DISABLE_MOBILE_COURSE_AVAILABLE': True})
@override_waffle_flag(toggles.LEGACY_STUDIO_ADVANCED_SETTINGS, True)
def test_mobile_field_available(self):
"""
@@ -137,6 +147,7 @@ class CourseAdvanceSettingViewTest(CourseTestCase, MilestonesTestCaseMixin):
(False, True, True)
)
@ddt.unpack
@override_waffle_flag(toggles.LEGACY_STUDIO_ADVANCED_SETTINGS, True)
def test_discussion_fields_available(self, is_pages_and_resources_enabled,
is_legacy_discussion_setting_enabled, fields_visible):
"""
@@ -152,6 +163,16 @@ class CourseAdvanceSettingViewTest(CourseTestCase, MilestonesTestCaseMixin):
self.assertEqual('discussion_topics' in response, fields_visible)
@ddt.data(False, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_ADVANCED_SETTINGS, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_IMPORT, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_EXPORT, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_COURSE_TEAM, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_UPDATES, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_FILES_UPLOADS, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_CUSTOM_PAGES, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_GRADING, True)
@override_waffle_flag(toggles.LEGACY_STUDIO_TEXTBOOKS, True)
def test_disable_advanced_settings_feature(self, disable_advanced_settings):
"""
If this feature is enabled, only Django Staff/Superuser should be able to access the "Advanced Settings" page.
@@ -292,6 +313,7 @@ class CourseDetailsViewTest(CourseTestCase, MilestonesTestCaseMixin):
(True, True),
)
@ddt.unpack
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_upgrade_deadline(self, has_verified_mode, has_expiration_date):
if has_verified_mode:
deadline = None
@@ -310,6 +332,7 @@ class CourseDetailsViewTest(CourseTestCase, MilestonesTestCaseMixin):
self.assertEqual(b"Upgrade Deadline Date" in response.content, has_expiration_date and has_verified_mode)
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_PREREQUISITE_COURSES': True})
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_pre_requisite_course_list_present(self):
settings_details_url = get_url(self.course.id)
response = self.client.get_html(settings_details_url)
@@ -370,6 +393,7 @@ class CourseDetailsViewTest(CourseTestCase, MilestonesTestCaseMixin):
(False, True, False),
(True, True, True),
)
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_visibility_of_entrance_exam_section(self, feature_flags):
"""
Tests entrance exam section is available if ENTRANCE_EXAMS feature is enabled no matter any other
@@ -386,6 +410,7 @@ class CourseDetailsViewTest(CourseTestCase, MilestonesTestCaseMixin):
b'<h3 id="heading-entrance-exam">' in resp.content
)
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_marketing_site_fetch(self):
settings_details_url = get_url(self.course.id)
@@ -593,6 +618,7 @@ class CourseDetailsViewTest(CourseTestCase, MilestonesTestCaseMixin):
assert milestones_helpers.any_unfulfilled_milestones(self.course.id, self.user.id), \
'The entrance exam should be required.'
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_editable_short_description_fetch(self):
settings_details_url = get_url(self.course.id)
@@ -631,6 +657,7 @@ class CourseDetailsViewTest(CourseTestCase, MilestonesTestCaseMixin):
self.assertEqual(response.status_code, 200)
self.assertEqual(course_details.overview, '<p>&nbsp;</p>')
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_regular_site_fetch(self):
settings_details_url = get_url(self.course.id)
@@ -1504,7 +1531,6 @@ class CourseMetadataEditingTest(CourseTestCase):
'test_proctoring_provider': {},
'proctortrack': {}
},
FEATURES={'ENABLE_EXAM_SETTINGS_HTML_VIEW': True},
)
def test_validate_update_requires_escalation_email_for_proctortrack(self, include_blank_email):
json_data = {
@@ -1552,7 +1578,6 @@ class CourseMetadataEditingTest(CourseTestCase):
'DEFAULT': 'proctortrack',
'proctortrack': {}
},
FEATURES={'ENABLE_EXAM_SETTINGS_HTML_VIEW': True},
)
def test_validate_update_cannot_unset_escalation_email_when_proctortrack_is_provider(self):
course = CourseFactory.create()
@@ -1982,6 +2007,7 @@ id=\"course-enrollment-end-time\" value=\"\" placeholder=\"HH:MM\" autocomplete=
self.assertNotContains(response, element)
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_PUBLISHER': False})
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_course_details_with_disabled_setting_global_staff(self):
"""
Test that user enrollment end date is editable in response.
@@ -1992,6 +2018,7 @@ id=\"course-enrollment-end-time\" value=\"\" placeholder=\"HH:MM\" autocomplete=
self._verify_editable(self._get_course_details_response(True))
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_PUBLISHER': False})
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_course_details_with_disabled_setting_non_global_staff(self):
"""
Test that user enrollment end date is editable in response.
@@ -2002,6 +2029,7 @@ id=\"course-enrollment-end-time\" value=\"\" placeholder=\"HH:MM\" autocomplete=
self._verify_editable(self._get_course_details_response(False))
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_PUBLISHER': True})
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_course_details_with_enabled_setting_global_staff(self):
"""
Test that user enrollment end date is editable in response.
@@ -2013,6 +2041,7 @@ id=\"course-enrollment-end-time\" value=\"\" placeholder=\"HH:MM\" autocomplete=
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_PUBLISHER': True})
@override_settings(PLATFORM_NAME='edX')
@override_waffle_flag(toggles.LEGACY_STUDIO_SCHEDULE_DETAILS, True)
def test_course_details_with_enabled_setting_non_global_staff(self):
"""
Test that user enrollment end date is not editable in response.

View File

@@ -1,13 +1,15 @@
"""
Test the exams service integration into Studio
"""
import itertools
from datetime import datetime, timedelta, timezone
from unittest.mock import patch, Mock
import ddt
from django.conf import settings
from edx_toggles.toggles.testutils import override_waffle_flag
from pytz import UTC
from freezegun import freeze_time
from pytz import utc
from cms.djangoapps.contentstore.signals.handlers import listen_for_course_publish
from openedx.core.djangoapps.course_apps.toggles import EXAMS_IDA
@@ -17,6 +19,7 @@ from xmodule.modulestore.tests.factories import CourseFactory, BlockFactory
@ddt.ddt
@override_waffle_flag(EXAMS_IDA, active=True)
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_PROCTORED_EXAMS': True})
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_SPECIAL_EXAMS': True})
@patch('cms.djangoapps.contentstore.exams._patch_course_exams')
@patch('cms.djangoapps.contentstore.signals.handlers.transaction.on_commit',
@@ -51,27 +54,46 @@ class TestExamService(ModuleStoreTestCase):
display_name='Homework 1',
graded=True,
is_time_limited=False,
due=datetime.now(UTC) + timedelta(minutes=60),
due=datetime.now(utc) + timedelta(minutes=60),
)
def _get_exams_url(self, course_id):
return f'{settings.EXAMS_SERVICE_URL}/exams/course_id/{course_id}/'
@ddt.data(
(False, False, False, 'timed'),
(True, False, False, 'proctored'),
(True, True, False, 'practice'),
(True, True, True, 'onboarding'),
)
def _get_exam_due_date(self, course, sequential):
"""
Return the expected exam due date for the exam, based on the selected course proctoring provider and the
exam due date or the course end date.
Arguments:
* course: the course that the exam subsection is in; may have a course.end attribute
* sequential: the exam subsection; may have a sequential.due attribute
"""
if course.proctoring_provider == 'lti_external':
return sequential.due.isoformat() if sequential.due else (course.end.isoformat() if course.end else None)
elif course.self_paced:
return None
else:
return sequential.due
@ddt.data(*(tuple(base) + (extra,) for base, extra in itertools.product(
[
(False, False, False, 'timed'),
(True, False, False, 'proctored'),
(True, True, False, 'practice'),
(True, True, True, 'onboarding'),
],
('null', 'lti_external')
)))
@ddt.unpack
@freeze_time('2024-01-01')
def test_publishing_exam(self, is_proctored_exam, is_practice_exam,
is_onboarding_exam, expected_type, mock_patch_course_exams):
is_onboarding_exam, expected_type, proctoring_provider, mock_patch_course_exams):
"""
When a course is published it will register all exams sections with the exams service
"""
default_time_limit_minutes = 10
due_date = datetime.now(UTC) + timedelta(minutes=default_time_limit_minutes + 1)
due_date = datetime.now(utc) + timedelta(minutes=default_time_limit_minutes + 1)
sequence = BlockFactory.create(
parent=self.chapter,
category='sequential',
@@ -86,17 +108,22 @@ class TestExamService(ModuleStoreTestCase):
is_onboarding_exam=is_onboarding_exam,
)
self.course.proctoring_provider = proctoring_provider
self.course = self.update_course(self.course, 1)
expected_due_date = self._get_exam_due_date(self.course, sequence)
expected_exams = [{
'course_id': self.course_key,
'content_id': str(sequence.location),
'exam_name': sequence.display_name,
'time_limit_mins': sequence.default_time_limit_minutes,
'due_date': due_date.isoformat(),
'due_date': expected_due_date,
'exam_type': expected_type,
'is_active': True,
'hide_after_due': True,
# backend is only required for edx-proctoring support edx-exams will maintain LTI backends
'backend': 'null',
'backend': proctoring_provider,
}]
listen_for_course_publish(self, self.course.id)
mock_patch_course_exams.assert_called_once_with(expected_exams, self.course_key)
@@ -147,23 +174,31 @@ class TestExamService(ModuleStoreTestCase):
listen_for_course_publish(self, self.course.id)
mock_patch_course_exams.assert_not_called()
# MODIFY DUE DATE HERE
@ddt.data(
(True, datetime(2035, 1, 1, 0, 0, tzinfo=timezone.utc)),
(False, datetime(2035, 1, 1, 0, 0, tzinfo=timezone.utc)),
(True, None),
(False, None),
*itertools.product(
(True, False),
(datetime(2035, 1, 1, 0, 0, tzinfo=timezone.utc), None),
('null', 'lti_external'),
)
)
@ddt.unpack
def test_no_due_dates(self, is_self_paced, course_end_date, mock_patch_course_exams):
def test_no_due_dates(self, is_self_paced, course_end_date, proctoring_provider, mock_patch_course_exams):
"""
Test that the coures end date is registered as the due date when the subsection does not have a due date for
both self-paced and instructor-paced exams.
Test that the the correct due date is registered for the exam when the subsection does not have a due date,
depending on the proctoring provider.
* lti_external
* The course end date is registered as the due date when the subsection does not have a due date for both
self-paced and instructor-paced exams.
* not lti_external
* None is registered as the due date when the subsection does not have a due date for both
self-paced and instructor-paced exams.
"""
self.course.self_paced = is_self_paced
self.course.end = course_end_date
self.course.proctoring_provider = proctoring_provider
self.course = self.update_course(self.course, 1)
BlockFactory.create(
sequence = BlockFactory.create(
parent=self.chapter,
category='sequential',
display_name='Test Proctored Exam',
@@ -179,20 +214,38 @@ class TestExamService(ModuleStoreTestCase):
listen_for_course_publish(self, self.course.id)
called_exams, called_course = mock_patch_course_exams.call_args[0]
assert called_exams[0]['due_date'] == (course_end_date.isoformat() if course_end_date else None)
@ddt.data(True, False)
def test_subsection_due_date_prioritized(self, is_self_paced, mock_patch_course_exams):
expected_due_date = self._get_exam_due_date(self.course, sequence)
assert called_exams[0]['due_date'] == expected_due_date
@ddt.data(*itertools.product((True, False), ('lti_external', 'null')))
@ddt.unpack
@freeze_time('2024-01-01')
def test_subsection_due_date_prioritized(self, is_self_paced, proctoring_provider, mock_patch_course_exams):
"""
Test that the subsection due date is registered as the due date when both the subsection has a due date and the
course has an end date for both self-paced and instructor-paced exams.
Test that the the correct due date is registered for the exam when the subsection has a due date, depending on
the proctoring provider.
* lti_external
* The subsection due date is registered as the due date when both the subsection has a due date and the
course has an end date for both self-paced and instructor-paced exams
* not lti_external
* None is registered as the due date when both the subsection has a due date and the course has an end date
for self-paced exams.
* The subsection due date is registered as the due date when both the subsection has a due date and the
course has an end date for instructor-paced exams.
"""
self.course.self_paced = is_self_paced
self.course.end = datetime(2035, 1, 1, 0, 0)
self.course.proctoring_provider = proctoring_provider
self.course = self.update_course(self.course, 1)
sequential_due_date = datetime.now(UTC) + timedelta(minutes=60)
BlockFactory.create(
sequential_due_date = datetime.now(utc) + timedelta(minutes=60)
sequence = BlockFactory.create(
parent=self.chapter,
category='sequential',
display_name='Test Proctored Exam',
@@ -208,4 +261,7 @@ class TestExamService(ModuleStoreTestCase):
listen_for_course_publish(self, self.course.id)
called_exams, called_course = mock_patch_course_exams.call_args[0]
assert called_exams[0]['due_date'] == sequential_due_date.isoformat()
expected_due_date = self._get_exam_due_date(self.course, sequence)
assert called_exams[0]['due_date'] == expected_due_date

View File

@@ -48,7 +48,7 @@ class LMSPageURLRequestedFiltersTest(ModuleStoreTestCase):
@override_settings(
OPEN_EDX_FILTERS_CONFIG={
"org.openedx.course_authoring.lms.page.url.requested.v1": {
"org.openedx.content_authoring.lms.page.url.requested.v1": {
"pipeline": [
"common.djangoapps.util.tests.test_filters.TestPageURLRequestedPipelineStep",
],

View File

@@ -5,6 +5,8 @@ import gettext
from unittest import mock, skip
from django.utils import translation
from edx_toggles.toggles.testutils import override_waffle_flag
from django.utils.translation import get_language
from xblock.core import XBlock
from xmodule.modulestore.django import XBlockI18nService
@@ -12,6 +14,7 @@ from xmodule.modulestore.tests.django_utils import TEST_DATA_SPLIT_MODULESTORE,
from xmodule.modulestore.tests.factories import CourseFactory, BlockFactory
from xmodule.tests.test_export import PureXBlock
from cms.djangoapps.contentstore import toggles
from cms.djangoapps.contentstore.tests.utils import AjaxEnabledTestClient
from cms.djangoapps.contentstore.views.preview import _prepare_runtime_for_preview
from common.djangoapps.student.tests.factories import UserFactory
@@ -202,6 +205,7 @@ class InternationalizationTest(ModuleStoreTestCase):
'display_name': 'Robot Super Course',
}
@override_waffle_flag(toggles.LEGACY_STUDIO_HOME, True)
def test_course_plain_english(self):
"""Test viewing the index page with no courses"""
self.client = AjaxEnabledTestClient() # lint-amnesty, pylint: disable=attribute-defined-outside-init
@@ -213,6 +217,7 @@ class InternationalizationTest(ModuleStoreTestCase):
status_code=200,
html=True)
@override_waffle_flag(toggles.LEGACY_STUDIO_HOME, True)
def test_course_explicit_english(self):
"""Test viewing the index page with no courses"""
self.client = AjaxEnabledTestClient() # lint-amnesty, pylint: disable=attribute-defined-outside-init

View File

@@ -21,6 +21,9 @@ from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.xml_importer import import_course_from_xml
from common.djangoapps.util.storage import resolve_storage_backend
from storages.backends.s3boto3 import S3Boto3Storage
TEST_DATA_CONTENTSTORE = copy.deepcopy(settings.CONTENTSTORE)
TEST_DATA_CONTENTSTORE['DOC_STORE_CONFIG']['db'] = 'test_xcontent_%s' % uuid4().hex
@@ -146,6 +149,7 @@ class ContentStoreImportTest(ModuleStoreTestCase):
import_course_from_xml(
module_store, self.user.id, TEST_DATA_DIR, ['toy'],
static_content_store=content_store, do_import_static=False,
do_import_python_lib=False, # python_lib.zip is special-cased -- exclude it too
create_if_not_present=True, verbose=True
)
@@ -153,7 +157,7 @@ class ContentStoreImportTest(ModuleStoreTestCase):
# make sure we have NO assets in our contentstore
all_assets, count = content_store.get_all_content_for_course(course.id)
self.assertEqual(len(all_assets), 0)
self.assertEqual(all_assets, [])
self.assertEqual(count, 0)
def test_no_static_link_rewrites_on_import(self):
@@ -274,3 +278,81 @@ class ContentStoreImportTest(ModuleStoreTestCase):
video = module_store.get_item(vertical.children[1])
self.assertEqual(video.display_name, 'default')
@override_settings(
COURSE_IMPORT_EXPORT_STORAGE="cms.djangoapps.contentstore.storage.ImportExportS3Storage",
DEFAULT_FILE_STORAGE="django.core.files.storage.FileSystemStorage"
)
def test_resolve_default_storage(self):
""" Ensure the default storage is invoked, even if course export storage is configured """
storage = resolve_storage_backend(
storage_key="default",
legacy_setting_key="DEFAULT_FILE_STORAGE"
)
self.assertEqual(storage.__class__.__name__, "FileSystemStorage")
@override_settings(
COURSE_IMPORT_EXPORT_STORAGE="cms.djangoapps.contentstore.storage.ImportExportS3Storage",
DEFAULT_FILE_STORAGE="django.core.files.storage.FileSystemStorage",
COURSE_IMPORT_EXPORT_BUCKET="bucket_name_test"
)
def test_resolve_happy_path_storage(self):
""" Make sure that the correct course export storage is being used """
storage = resolve_storage_backend(
storage_key="course_import_export",
legacy_setting_key="COURSE_IMPORT_EXPORT_STORAGE"
)
self.assertEqual(storage.__class__.__name__, "ImportExportS3Storage")
self.assertEqual(storage.bucket_name, "bucket_name_test")
@override_settings()
def test_resolve_storage_with_no_config(self):
""" If no storage setup is defined, we get FileSystemStorage by default """
del settings.DEFAULT_FILE_STORAGE
del settings.COURSE_IMPORT_EXPORT_STORAGE
del settings.COURSE_IMPORT_EXPORT_BUCKET
storage = resolve_storage_backend(
storage_key="course_import_export",
legacy_setting_key="COURSE_IMPORT_EXPORT_STORAGE"
)
self.assertEqual(storage.__class__.__name__, "FileSystemStorage")
@override_settings(
COURSE_IMPORT_EXPORT_STORAGE=None,
COURSE_IMPORT_EXPORT_BUCKET="bucket_name_test",
STORAGES={
'course_import_export': {
'BACKEND': 'cms.djangoapps.contentstore.storage.ImportExportS3Storage',
'OPTIONS': {}
}
}
)
def test_resolve_storage_using_django5_settings(self):
""" Simulating a Django 4 environment using Django 5 Storages configuration """
storage = resolve_storage_backend(
storage_key="course_import_export",
legacy_setting_key="COURSE_IMPORT_EXPORT_STORAGE"
)
self.assertEqual(storage.__class__.__name__, "ImportExportS3Storage")
self.assertEqual(storage.bucket_name, "bucket_name_test")
@override_settings(
STORAGES={
'course_import_export': {
'BACKEND': 'storages.backends.s3boto3.S3Boto3Storage',
'OPTIONS': {
'bucket_name': 'bucket_name_test'
}
}
}
)
def test_resolve_storage_using_django5_settings_with_options(self):
""" Ensure we call the storage class with the correct parameters and Django 5 setup """
del settings.COURSE_IMPORT_EXPORT_STORAGE
del settings.COURSE_IMPORT_EXPORT_BUCKET
storage = resolve_storage_backend(
storage_key="course_import_export",
legacy_setting_key="COURSE_IMPORT_EXPORT_STORAGE"
)
self.assertEqual(storage.__class__.__name__, S3Boto3Storage.__name__)
self.assertEqual(storage.bucket_name, "bucket_name_test")

Some files were not shown because too many files have changed in this diff Show More