Merge pull request #20354 from edx/bom/adr-python-api
Initial top-level ADRs
This commit is contained in:
@@ -10,7 +10,7 @@ from pytz import UTC
|
||||
|
||||
from contentstore.courseware_index import CoursewareSearchIndexer, LibrarySearchIndexer
|
||||
from contentstore.proctoring import register_special_exams
|
||||
from lms.djangoapps.grades.tasks import compute_all_grades_for_course
|
||||
from lms.djangoapps.grades.api import task_compute_all_grades_for_course
|
||||
from openedx.core.djangoapps.credit.signals import on_course_publish
|
||||
from openedx.core.lib.gating import api as gating_api
|
||||
from track.event_transaction_utils import get_event_transaction_id, get_event_transaction_type
|
||||
@@ -118,9 +118,9 @@ def handle_grading_policy_changed(sender, **kwargs):
|
||||
'event_transaction_id': unicode(get_event_transaction_id()),
|
||||
'event_transaction_type': unicode(get_event_transaction_type()),
|
||||
}
|
||||
result = compute_all_grades_for_course.apply_async(kwargs=kwargs, countdown=GRADING_POLICY_COUNTDOWN_SECONDS)
|
||||
result = task_compute_all_grades_for_course.apply_async(kwargs=kwargs, countdown=GRADING_POLICY_COUNTDOWN_SECONDS)
|
||||
log.info(u"Grades: Created {task_name}[{task_id}] with arguments {kwargs}".format(
|
||||
task_name=compute_all_grades_for_course.name,
|
||||
task_name=task_compute_all_grades_for_course.name,
|
||||
task_id=result.task_id,
|
||||
kwargs=kwargs,
|
||||
))
|
||||
|
||||
@@ -25,7 +25,7 @@ class LockedTest(ModuleStoreTestCase):
|
||||
|
||||
@patch('cms.djangoapps.contentstore.signals.handlers.cache.add')
|
||||
@patch('cms.djangoapps.contentstore.signals.handlers.cache.delete')
|
||||
@patch('cms.djangoapps.contentstore.signals.handlers.compute_all_grades_for_course.apply_async')
|
||||
@patch('cms.djangoapps.contentstore.signals.handlers.task_compute_all_grades_for_course.apply_async')
|
||||
@ddt.data(True, False)
|
||||
def test_locked(self, lock_available, compute_grades_async_mock, delete_mock, add_mock):
|
||||
add_mock.return_value = lock_available
|
||||
|
||||
@@ -28,7 +28,7 @@ from lms.djangoapps.certificates.models import (
|
||||
CertificateStatuses,
|
||||
certificate_status_for_student
|
||||
)
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lms.djangoapps.verify_student.models import VerificationDeadline
|
||||
from lms.djangoapps.verify_student.services import IDVerificationService
|
||||
from lms.djangoapps.verify_student.utils import is_verification_expiring_soon, verification_for_datetime
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
See `index.rst <index.rst>`_ for details.
|
||||
@@ -1,57 +0,0 @@
|
||||
#######################################
|
||||
edx-platform Development Best Practices
|
||||
#######################################
|
||||
|
||||
There are many general best practices documented for `Open edX Development in
|
||||
Confluence`_. The following best-practices are specific to edx-platform.
|
||||
|
||||
Course Access in LMS
|
||||
********************
|
||||
|
||||
the following technologies can be used to access course-related data:
|
||||
|
||||
* `Course Overviews`_: Provide performant access to course metadata.
|
||||
|
||||
* `Course Blocks`_: Provide performant access to the blocks in a course,
|
||||
including filtering and access control capabilities.
|
||||
|
||||
* `Modulestore`_ - Contains all course related data, including course metadata
|
||||
course blocks, and student module data. Course Overviews and Course Blocks are
|
||||
cached performant versions of a subset of this source data.
|
||||
|
||||
When coding in the LMS, it is generally preferred to use `Course Overviews`_ and
|
||||
`Course Blocks`_, due to the following benefits:
|
||||
|
||||
1. Cached versions of course data that are better optimized for Learners.
|
||||
|
||||
2. A start of the separation of LMS and Studio data to move us closer to the
|
||||
ultimate ability to separate the two.
|
||||
|
||||
**Note**: At this time, these preferred methods are for coding in the LMS, but
|
||||
outside of the courseware. Inside the courseware, there is more work to be
|
||||
done to take advantage of these techniques.
|
||||
|
||||
Prefer using `Course Overviews`_ where possible when you just need course
|
||||
metadata, rather than loading the full course. For example, this could be done
|
||||
by calling ``get_course_overview_with_access()`` in place of
|
||||
``get_course_with_access``. If the course overview doesn't contain the data you
|
||||
need, you should consider whether it makes sense to expand what is available via
|
||||
the course overview. See an `example use of course overviews`_ in the course
|
||||
outline feature.
|
||||
|
||||
Prefer using `Course Blocks`_ over loading a full course directly from the
|
||||
`modulestore`_. The following is an `example of using course blocks`_ in the
|
||||
course outline feature.
|
||||
|
||||
If you need to load student user data to combine with the data you retrieve from
|
||||
the `Course Blocks`_, you can load the student module data from the modulestore
|
||||
without loading the full course. Here is an `example loading the student module
|
||||
data`_ in the course outline feature.
|
||||
|
||||
.. _Open edX Development in Confluence: https://openedx.atlassian.net/wiki/spaces/OpenDev/overview
|
||||
.. _Course Overviews: https://github.com/edx/edx-platform/blob/master/openedx/core/djangoapps/content/course_overviews/__init__.py
|
||||
.. _example use of course overviews: https://github.com/edx/edx-platform/blob/f81c21902eb0e8d026612b052557142ce1527153/openedx/features/course_experience/views/course_outline.py#L26
|
||||
.. _Course Blocks: https://openedx.atlassian.net/wiki/display/EDUCATOR/Course+Blocks
|
||||
.. _modulestore: https://edx.readthedocs.io/projects/edx-developer-guide/en/latest/modulestores/index.html
|
||||
.. _example of using course blocks: https://github.com/edx/edx-platform/blob/f81c21902eb0e8d026612b052557142ce1527153/openedx/features/course_experience/utils.py#L65-L72
|
||||
.. _example loading the student module data: https://github.com/edx/edx-platform/blob/f81c21902eb0e8d026612b052557142ce1527153/openedx/features/course_experience/utils.py#L49
|
||||
73
docs/decisions/0001-courses-in-lms.rst
Normal file
73
docs/decisions/0001-courses-in-lms.rst
Normal file
@@ -0,0 +1,73 @@
|
||||
Courses in LMS
|
||||
--------------
|
||||
|
||||
Status
|
||||
======
|
||||
|
||||
Accepted
|
||||
|
||||
Context
|
||||
=======
|
||||
|
||||
**Note:** Within the context of the LMS, "Course" means "Course Runs", as defined in the `edX DDD Ubiquitous Language`_.
|
||||
|
||||
In the LMS, the following technologies can be used to access course content and metadata:
|
||||
|
||||
* `Course Overviews`_: Provides performant access to course metadata.
|
||||
|
||||
* `Course Blocks`_: Provides performant access to the blocks in a course, including filtering and access control capabilities.
|
||||
|
||||
* `Modulestore`_ - Contains all course related data, including course metadata, course blocks, and student module data. `Course Overviews`_ and `Course Blocks`_ are performant read-optimized versions of subsets of data in the Modulestore.
|
||||
|
||||
.. _edX DDD Ubiquitous Language: https://openedx.atlassian.net/wiki/spaces/AC/pages/188032048/edX+DDD+Ubiquitous+Language
|
||||
.. _Course Overviews: https://github.com/edx/edx-platform/blob/master/openedx/core/djangoapps/content/course_overviews/__init__.py
|
||||
.. _Course Blocks: https://openedx.atlassian.net/wiki/display/EDUCATOR/Course+Blocks
|
||||
.. _Modulestore: https://edx.readthedocs.io/projects/edx-developer-guide/en/latest/modulestores/index.html
|
||||
|
||||
Decisions
|
||||
=========
|
||||
|
||||
When coding in the LMS, prefer to use `Course Overviews`_ and `Course Blocks`_, rather than the `Modulestore`_, for the following reasons:
|
||||
|
||||
1. `Course Overviews`_ and `Course Blocks`_ are optimized for read-access for Learners.
|
||||
|
||||
2. We eventually want to separate the LMS from Studio. Studio can have its own read-write storage layer (currently `Modulestore`_).
|
||||
|
||||
Course Overviews
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Use `Course Overviews`_ when you just need course metadata and not the course context. For example, call ``get_course_overview_with_access()`` in place of ``get_course_with_access``. If `Course Overviews`_ doesn't contain the data you need, expand its model or create a new joint table (if the newly desired data is conceptually different from what's in `Course Overviews`_).
|
||||
|
||||
**Example:** See `example use of course overviews`_ in the course outline feature.
|
||||
|
||||
Course Blocks
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
Use `Course Blocks`_ instead of loading a full course directly from the `modulestore`_.
|
||||
|
||||
**Example:** See `example of using course blocks`_ in the course outline feature.
|
||||
|
||||
User's Course State
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you need to combine user data with `Course Blocks`_ data, load the users's data directly from the Courseware Student Module instead of loading the course from the `Modulestore`_.
|
||||
|
||||
**Example**: See `example loading the student module data`_ in the course outline feature.
|
||||
|
||||
.. _example use of course overviews: https://github.com/edx/edx-platform/blob/f81c21902eb0e8d026612b052557142ce1527153/openedx/features/course_experience/views/course_outline.py#L26
|
||||
.. _example of using course blocks: https://github.com/edx/edx-platform/blob/f81c21902eb0e8d026612b052557142ce1527153/openedx/features/course_experience/utils.py#L65-L72
|
||||
.. _example loading the student module data: https://github.com/edx/edx-platform/blob/f81c21902eb0e8d026612b052557142ce1527153/openedx/features/course_experience/utils.py#L49
|
||||
|
||||
Tech Debt
|
||||
=========
|
||||
|
||||
At this time, `LMS courseware rendering`_ still uses the `Modulestore`_ instead of `Course Blocks`_. This is technical debt that needs to be addressed, so we can have a fuller separation between the storage systems.
|
||||
|
||||
.. _LMS courseware rendering: https://github.com/edx/edx-platform/blob/67008cec68806b77631e8c40ede98ace8a83ce4f/lms/djangoapps/courseware/module_render.py#L291
|
||||
|
||||
Consequences
|
||||
============
|
||||
|
||||
* LMS-specific course content storage will evolve separately from Studio's storage, allowing for decoupled optimizations.
|
||||
|
||||
* LMS views will perform better when using storage that is read-optimized.
|
||||
44
docs/decisions/0002-inter-app-apis.rst
Normal file
44
docs/decisions/0002-inter-app-apis.rst
Normal file
@@ -0,0 +1,44 @@
|
||||
Inter-app APIs
|
||||
--------------
|
||||
|
||||
Status
|
||||
======
|
||||
|
||||
Accepted
|
||||
|
||||
Context
|
||||
=======
|
||||
|
||||
The edx-platform Django project is a conglomeration of different LMS and Studio features written in the structure of Django apps. Over the years, the boundaries between features have become muddled for various reasons. We now find apps intruding into the Python innards of other apps, making the intrusive apps tightly dependent on the inner behaviors of other apps.
|
||||
|
||||
Due to this lack of clearly delimited separation of concerns, the monolith has become hard to maneuver, comprehend, and modify.
|
||||
|
||||
Decisions
|
||||
=========
|
||||
|
||||
#. Each Django app should clearly define a set of Python APIs that it exposes to other Django apps, which run within the LMS/Studio process.
|
||||
|
||||
#. Each app's Python APIs should be defined/exported in a Python module called "api.py", located in the top-most directory of the app.
|
||||
|
||||
#. The app's Python APIs should be well-named, self-consistent, and relevant to its own "domain" (without exposing technical and implementation details).
|
||||
|
||||
#. An app's Django models and other internal data structures should not be exposed via its Python APIs.
|
||||
|
||||
#. Ideally, tests should use only Python APIs declared in other apps' "api.py" files. However, if an app's API is needed only for testing (and not needed as part of the app's domain API), then test-relevant Python APIs should be defined/exported in an intentional Python module called "api_for_tests.py".
|
||||
|
||||
Exmaples
|
||||
~~~~~~~~
|
||||
|
||||
As a reference example, see the Python APIs exposed by the grades app in the `grades/api.py module`_.
|
||||
|
||||
.. _`grades/api.py module`: https://github.com/edx/edx-platform/blob/master/lms/djangoapps/grades/api.py
|
||||
|
||||
|
||||
Consequences
|
||||
============
|
||||
|
||||
#. Explicitly defining Python APIs will prevent the proliferation of unintended entanglement between apps in the monolith.
|
||||
|
||||
#. When developers invest time in carefully considering Python APIs, they will need to consider good SOLID design practices for abstractions and encapsulation, leading to cleaner and more comprehensible code.
|
||||
|
||||
#. Python clients outside of an app will interface only through declared APIs in either "api.py" or "test_api.py". The provider app therefore has better control and oversight to support its intentional APIs.
|
||||
@@ -4,9 +4,8 @@ Guides
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
best_practices
|
||||
testing
|
||||
javascript
|
||||
styling
|
||||
bootstrap
|
||||
static_assets
|
||||
testing/testing
|
||||
frontend/javascript
|
||||
frontend/styling
|
||||
frontend/bootstrap
|
||||
frontend/static_assets
|
||||
|
||||
@@ -25,7 +25,7 @@ locations.
|
||||
:maxdepth: 2
|
||||
|
||||
guides
|
||||
docstrings
|
||||
docstrings/docstrings
|
||||
|
||||
|
||||
Change History
|
||||
|
||||
|
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
@@ -2,8 +2,12 @@
|
||||
Testing
|
||||
#######
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
:depth: 3
|
||||
|
||||
Overview
|
||||
--------
|
||||
========
|
||||
|
||||
We maintain three kinds of tests: unit tests, integration tests, and
|
||||
acceptance tests.
|
||||
@@ -22,6 +26,9 @@ The pyramid above shows the relative number of unit tests, integration
|
||||
tests, and acceptance tests. Most of our tests are unit tests or
|
||||
integration tests.
|
||||
|
||||
Test Types
|
||||
----------
|
||||
|
||||
Unit Tests
|
||||
~~~~~~~~~~
|
||||
|
||||
@@ -64,7 +71,8 @@ Integration Tests
|
||||
UI Acceptance Tests
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Use these to test that major program features are working correctly.
|
||||
- There should be very few UI acceptance tests since they are generally slow and
|
||||
flaky. Use these to test only bare minimum happy paths for necessary features.
|
||||
|
||||
- We use `Bok Choy`_ to write end-user acceptance tests directly in Python,
|
||||
using the framework to maximize reliability and maintainability.
|
||||
@@ -72,12 +80,6 @@ UI Acceptance Tests
|
||||
.. _Bok Choy: https://bok-choy.readthedocs.org/en/latest/tutorial.html
|
||||
|
||||
|
||||
Internationalization
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Any new text that is added should be internationalized and translated.
|
||||
|
||||
|
||||
Test Locations
|
||||
--------------
|
||||
|
||||
@@ -100,23 +102,6 @@ Test Locations
|
||||
- Bok Choy Accessibility Tests: located under ``common/test/acceptance/tests`` and tagged with ``@attr("a11y")``
|
||||
- Bok Choy PageObjects: located under ``common/test/acceptance/pages``
|
||||
|
||||
Factories
|
||||
---------
|
||||
|
||||
Many tests delegate set-up to a "factory" class. For example, there are
|
||||
factories for creating courses, problems, and users. This encapsulates
|
||||
set-up logic from tests.
|
||||
|
||||
Factories are often implemented using `FactoryBoy`_.
|
||||
|
||||
In general, factories should be located close to the code they use. For
|
||||
example, the factory for creating problem XML definitions is located in
|
||||
``common/lib/capa/capa/tests/response_xml_factory.py`` because the
|
||||
``capa`` package handles problem XML.
|
||||
|
||||
.. _FactoryBoy: https://readthedocs.org/projects/factoryboy/
|
||||
|
||||
|
||||
Running Tests
|
||||
=============
|
||||
|
||||
@@ -132,27 +117,6 @@ Note -
|
||||
|
||||
paver -h
|
||||
|
||||
Connecting to Browser
|
||||
---------------------
|
||||
|
||||
If you want to see the browser being automated for JavaScript or bok-choy tests,
|
||||
you can connect to the container running it via VNC.
|
||||
|
||||
+------------------------+----------------------+
|
||||
| Browser | VNC connection |
|
||||
+========================+======================+
|
||||
| Firefox (Default) | vnc://0.0.0.0:25900 |
|
||||
+------------------------+----------------------+
|
||||
| Chrome (via Selenium) | vnc://0.0.0.0:15900 |
|
||||
+------------------------+----------------------+
|
||||
|
||||
On macOS, enter the VNC connection string in Safari to connect via VNC. The VNC
|
||||
passwords for both browsers are randomly generated and logged at container
|
||||
startup, and can be found by running ``make vnc-passwords``.
|
||||
|
||||
Most tests are run in Firefox by default. To use Chrome for tests that normally
|
||||
use Firefox instead, prefix the test command with
|
||||
``SELENIUM_BROWSER=chrome SELENIUM_HOST=edx.devstack.chrome``.
|
||||
|
||||
Running Python Unit tests
|
||||
-------------------------
|
||||
@@ -202,7 +166,7 @@ To run a single django test class use this command::
|
||||
paver test_system -t lms/djangoapps/courseware/tests/tests.py::ActivateLoginTest
|
||||
|
||||
Running a Single Test
|
||||
---------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
When developing tests, it is often helpful to be able to really just run
|
||||
one single test without the overhead of PIP installs, UX builds, etc. In
|
||||
@@ -288,7 +252,7 @@ This is an example of how to run a single test and get stdout shown immediately,
|
||||
pytest cms/djangoapps/contentstore/tests/test_import.py -s
|
||||
|
||||
How to output coverage locally
|
||||
------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
These are examples of how to run a single test and get coverage::
|
||||
|
||||
@@ -372,7 +336,7 @@ If you run into flakiness, check (and feel free to contribute to) this
|
||||
`confluence document <https://openedx.atlassian.net/wiki/spaces/TE/pages/884998163/Debugging+test+failures+with+pytest-xdist>`__ for help.
|
||||
|
||||
Running Javascript Unit Tests
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
-----------------------------
|
||||
|
||||
We use Jasmine to run JavaScript unit tests. To run all the JavaScript
|
||||
tests::
|
||||
@@ -412,7 +376,7 @@ These paver commands call through to Karma. For more
|
||||
info, see `karma-runner.github.io <https://karma-runner.github.io/>`__.
|
||||
|
||||
Running Bok Choy Acceptance Tests
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
---------------------------------
|
||||
|
||||
We use `Bok Choy`_ for acceptance testing. Bok Choy is a UI-level acceptance
|
||||
test framework for writing robust `Selenium`_ tests in `Python`_. Bok Choy
|
||||
@@ -583,145 +547,8 @@ You must run BOTH `--testsonly` and `--fasttest`.
|
||||
Control-C. *Warning*: Only hit Control-C one time so the pytest framework can
|
||||
properly clean up.
|
||||
|
||||
Running Tests on Paver Scripts
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
To run tests on the scripts that power the various Paver commands, use the following command::
|
||||
|
||||
pytest pavelib
|
||||
|
||||
|
||||
Testing internationalization with dummy translations
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Any text you add to the platform should be internationalized. To generate
|
||||
translations for your new strings, run the following command::
|
||||
|
||||
paver i18n_dummy
|
||||
|
||||
This command generates dummy translations for each dummy language in the
|
||||
platform and puts the dummy strings in the appropriate language files.
|
||||
You can then preview the dummy languages on your local machine and also in
|
||||
your sandbox, if and when you create one.
|
||||
|
||||
The dummy language files that are generated during this process can be
|
||||
found in the following locations::
|
||||
|
||||
conf/locale/{LANG_CODE}
|
||||
|
||||
There are a few JavaScript files that are generated from this process. You
|
||||
can find those in the following locations::
|
||||
|
||||
lms/static/js/i18n/{LANG_CODE}
|
||||
cms/static/js/i18n/{LANG_CODE}
|
||||
|
||||
Do not commit the ``.po``, ``.mo``, ``.js`` files that are generated
|
||||
in the above locations during the dummy translation process!
|
||||
|
||||
Viewing Test Coverage
|
||||
---------------------
|
||||
|
||||
We currently collect test coverage information for Python
|
||||
unit/integration tests.
|
||||
|
||||
To view test coverage:
|
||||
|
||||
1. Run the test suite with this command::
|
||||
|
||||
paver test
|
||||
|
||||
2. Generate reports with this command::
|
||||
|
||||
paver coverage
|
||||
|
||||
3. Reports are located in the ``reports`` folder. The command generates
|
||||
HTML and XML (Cobertura format) reports.
|
||||
|
||||
Python Code Style Quality
|
||||
-------------------------
|
||||
|
||||
To view Python code style quality (including PEP 8 and pylint violations) run this command::
|
||||
|
||||
paver run_quality
|
||||
|
||||
More specific options are below.
|
||||
|
||||
- These commands run a particular quality report::
|
||||
|
||||
paver run_pep8
|
||||
paver run_pylint
|
||||
|
||||
- This command runs a report, and sets it to fail if it exceeds a given number
|
||||
of violations::
|
||||
|
||||
paver run_pep8 --limit=800
|
||||
|
||||
- The ``run_quality`` uses the underlying diff-quality tool (which is packaged
|
||||
with `diff-cover`_). With that, the command can be set to fail if a certain
|
||||
diff threshold is not met. For example, to cause the process to fail if
|
||||
quality expectations are less than 100% when compared to master (or in other
|
||||
words, if style quality is worse than what is already on master)::
|
||||
|
||||
paver run_quality --percentage=100
|
||||
|
||||
- Note that 'fixme' violations are not counted with run\_quality. To
|
||||
see all 'TODO' lines, use this command::
|
||||
|
||||
paver find_fixme --system=lms
|
||||
|
||||
``system`` is an optional argument here. It defaults to
|
||||
``cms,lms,common``.
|
||||
|
||||
.. _diff-cover: https://github.com/Bachmann1234/diff-cover
|
||||
|
||||
|
||||
JavaScript Code Style Quality
|
||||
-----------------------------
|
||||
|
||||
To view JavaScript code style quality run this command::
|
||||
|
||||
paver run_eslint
|
||||
|
||||
- This command also comes with a ``--limit`` switch, this is an example of that switch::
|
||||
|
||||
paver run_eslint --limit=50000
|
||||
|
||||
|
||||
|
||||
Code Complexity Tools
|
||||
---------------------
|
||||
|
||||
Two tools are available for evaluating complexity of edx-platform code:
|
||||
|
||||
- `radon <https://radon.readthedocs.org/en/latest/>`__ for Python code
|
||||
complexity. To obtain complexity, run::
|
||||
|
||||
paver run_complexity
|
||||
|
||||
- `plato <https://github.com/es-analysis/plato>`__ for JavaScript code
|
||||
complexity. Several options are available on the command line; see
|
||||
documentation. Below, the following command will produce an HTML report in a
|
||||
subdirectory called "jscomplexity"::
|
||||
|
||||
plato -q -x common/static/js/vendor/ -t common -e .eslintrc.json -r -d jscomplexity common/static/js/
|
||||
|
||||
|
||||
|
||||
Testing using queue servers
|
||||
---------------------------
|
||||
|
||||
When testing problems that use a queue server on AWS (e.g.
|
||||
sandbox-xqueue.edx.org), you'll need to run your server on your public
|
||||
IP, like so::
|
||||
|
||||
./manage.py lms runserver 0.0.0.0:8000
|
||||
|
||||
When you connect to the LMS, you need to use the public ip. Use
|
||||
``ifconfig`` to figure out the number, and connect e.g. to
|
||||
``http://18.3.4.5:8000/``
|
||||
|
||||
Acceptance Test Techniques
|
||||
--------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
1. **Element existence on the page**: Do not use splinter's built-in browser
|
||||
methods directly for determining if elements exist. Use the
|
||||
@@ -766,3 +593,177 @@ Acceptance Test Techniques
|
||||
and "LMS" when they follow this convention: name your feature in the
|
||||
.feature file CMS or LMS with a single period and then no other
|
||||
periods in the name. The name can contain spaces. E.g. "CMS.Sign Up"
|
||||
|
||||
|
||||
Testing internationalization with dummy translations
|
||||
----------------------------------------------------
|
||||
|
||||
Any text you add to the platform should be internationalized. To generate translations for your new strings, run the following command::
|
||||
|
||||
paver i18n_dummy
|
||||
|
||||
This command generates dummy translations for each dummy language in the
|
||||
platform and puts the dummy strings in the appropriate language files.
|
||||
You can then preview the dummy languages on your local machine and also in your sandbox, if and when you create one.
|
||||
|
||||
The dummy language files that are generated during this process can be
|
||||
found in the following locations::
|
||||
|
||||
conf/locale/{LANG_CODE}
|
||||
|
||||
There are a few JavaScript files that are generated from this process. You can find those in the following locations::
|
||||
|
||||
lms/static/js/i18n/{LANG_CODE}
|
||||
cms/static/js/i18n/{LANG_CODE}
|
||||
|
||||
Do not commit the ``.po``, ``.mo``, ``.js`` files that are generated
|
||||
in the above locations during the dummy translation process!
|
||||
|
||||
Test Coverage and Quality
|
||||
-------------------------
|
||||
|
||||
Viewing Test Coverage
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
We currently collect test coverage information for Python
|
||||
unit/integration tests.
|
||||
|
||||
To view test coverage:
|
||||
|
||||
1. Run the test suite with this command::
|
||||
|
||||
paver test
|
||||
|
||||
2. Generate reports with this command::
|
||||
|
||||
paver coverage
|
||||
|
||||
3. Reports are located in the ``reports`` folder. The command generates
|
||||
HTML and XML (Cobertura format) reports.
|
||||
|
||||
Python Code Style Quality
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
To view Python code style quality (including PEP 8 and pylint violations) run this command::
|
||||
|
||||
paver run_quality
|
||||
|
||||
More specific options are below.
|
||||
|
||||
- These commands run a particular quality report::
|
||||
|
||||
paver run_pep8
|
||||
paver run_pylint
|
||||
|
||||
- This command runs a report, and sets it to fail if it exceeds a given number
|
||||
of violations::
|
||||
|
||||
paver run_pep8 --limit=800
|
||||
|
||||
- The ``run_quality`` uses the underlying diff-quality tool (which is packaged
|
||||
with `diff-cover`_). With that, the command can be set to fail if a certain
|
||||
diff threshold is not met. For example, to cause the process to fail if
|
||||
quality expectations are less than 100% when compared to master (or in other
|
||||
words, if style quality is worse than what is already on master)::
|
||||
|
||||
paver run_quality --percentage=100
|
||||
|
||||
- Note that 'fixme' violations are not counted with run\_quality. To
|
||||
see all 'TODO' lines, use this command::
|
||||
|
||||
paver find_fixme --system=lms
|
||||
|
||||
``system`` is an optional argument here. It defaults to
|
||||
``cms,lms,common``.
|
||||
|
||||
.. _diff-cover: https://github.com/Bachmann1234/diff-cover
|
||||
|
||||
|
||||
JavaScript Code Style Quality
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
To view JavaScript code style quality run this command::
|
||||
|
||||
paver run_eslint
|
||||
|
||||
- This command also comes with a ``--limit`` switch, this is an example of that switch::
|
||||
|
||||
paver run_eslint --limit=50000
|
||||
|
||||
|
||||
Code Complexity Tools
|
||||
=====================
|
||||
|
||||
Two tools are available for evaluating complexity of edx-platform code:
|
||||
|
||||
- `radon <https://radon.readthedocs.org/en/latest/>`__ for Python code
|
||||
complexity. To obtain complexity, run::
|
||||
|
||||
paver run_complexity
|
||||
|
||||
- `plato <https://github.com/es-analysis/plato>`__ for JavaScript code
|
||||
complexity. Several options are available on the command line; see
|
||||
documentation. Below, the following command will produce an HTML report in a
|
||||
subdirectory called "jscomplexity"::
|
||||
|
||||
plato -q -x common/static/js/vendor/ -t common -e .eslintrc.json -r -d jscomplexity common/static/js/
|
||||
|
||||
Other Testing Tips
|
||||
==================
|
||||
|
||||
Connecting to Browser
|
||||
---------------------
|
||||
|
||||
If you want to see the browser being automated for JavaScript or bok-choy tests,
|
||||
you can connect to the container running it via VNC.
|
||||
|
||||
+------------------------+----------------------+
|
||||
| Browser | VNC connection |
|
||||
+========================+======================+
|
||||
| Firefox (Default) | vnc://0.0.0.0:25900 |
|
||||
+------------------------+----------------------+
|
||||
| Chrome (via Selenium) | vnc://0.0.0.0:15900 |
|
||||
+------------------------+----------------------+
|
||||
|
||||
On macOS, enter the VNC connection string in Safari to connect via VNC. The VNC
|
||||
passwords for both browsers are randomly generated and logged at container
|
||||
startup, and can be found by running ``make vnc-passwords``.
|
||||
|
||||
Most tests are run in Firefox by default. To use Chrome for tests that normally
|
||||
use Firefox instead, prefix the test command with
|
||||
``SELENIUM_BROWSER=chrome SELENIUM_HOST=edx.devstack.chrome``
|
||||
|
||||
Factories
|
||||
---------
|
||||
|
||||
Many tests delegate set-up to a "factory" class. For example, there are
|
||||
factories for creating courses, problems, and users. This encapsulates
|
||||
set-up logic from tests.
|
||||
|
||||
Factories are often implemented using `FactoryBoy`_.
|
||||
|
||||
In general, factories should be located close to the code they use. For
|
||||
example, the factory for creating problem XML definitions is located in
|
||||
``common/lib/capa/capa/tests/response_xml_factory.py`` because the
|
||||
``capa`` package handles problem XML.
|
||||
|
||||
.. _FactoryBoy: https://readthedocs.org/projects/factoryboy/
|
||||
|
||||
Running Tests on Paver Scripts
|
||||
------------------------------
|
||||
|
||||
To run tests on the scripts that power the various Paver commands, use the following command::
|
||||
|
||||
pytest pavelib
|
||||
|
||||
Testing using queue servers
|
||||
---------------------------
|
||||
|
||||
When testing problems that use a queue server on AWS (e.g.
|
||||
sandbox-xqueue.edx.org), you'll need to run your server on your public IP, like so::
|
||||
|
||||
./manage.py lms runserver 0.0.0.0:8000
|
||||
|
||||
When you connect to the LMS, you need to use the public ip. Use
|
||||
``ifconfig`` to figure out the number, and connect e.g. to
|
||||
``http://18.3.4.5:8000/``
|
||||
@@ -32,7 +32,7 @@ from lms.djangoapps.ccx.tests.utils import CcxTestCase, flatten
|
||||
from lms.djangoapps.ccx.utils import ccx_course, is_email
|
||||
from lms.djangoapps.ccx.views import get_date
|
||||
from lms.djangoapps.discussion.django_comment_client.utils import has_forum_access
|
||||
from lms.djangoapps.grades.tasks import compute_all_grades_for_course
|
||||
from lms.djangoapps.grades.api import task_compute_all_grades_for_course
|
||||
from lms.djangoapps.instructor.access import allow_access, list_with_level
|
||||
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
|
||||
from openedx.core.djangoapps.django_comment_common.models import FORUM_ROLE_ADMINISTRATOR
|
||||
@@ -109,7 +109,7 @@ def setup_students_and_grades(context):
|
||||
module_state_key=problem.location
|
||||
)
|
||||
|
||||
compute_all_grades_for_course.apply_async(kwargs={'course_key': unicode(context.course.id)})
|
||||
task_compute_all_grades_for_course.apply_async(kwargs={'course_key': unicode(context.course.id)})
|
||||
|
||||
|
||||
def unhide(unit):
|
||||
|
||||
@@ -46,7 +46,7 @@ from lms.djangoapps.ccx.utils import (
|
||||
parse_date,
|
||||
)
|
||||
from lms.djangoapps.courseware.field_overrides import disable_overrides
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lms.djangoapps.instructor.enrollment import enroll_email, get_email_params
|
||||
from lms.djangoapps.instructor.views.gradebook_api import get_grade_book_page
|
||||
from openedx.core.djangoapps.django_comment_common.models import FORUM_ROLE_ADMINISTRATOR, assign_role
|
||||
|
||||
@@ -127,6 +127,25 @@ def get_certificate_for_user(username, course_key):
|
||||
return format_certificate_for_user(username, cert)
|
||||
|
||||
|
||||
def get_recently_modified_certificates(course_keys=None, start_date=None, end_date=None):
|
||||
"""
|
||||
Returns a QuerySet of GeneratedCertificate objects filtered by the input
|
||||
parameters and ordered by modified_date.
|
||||
"""
|
||||
cert_filter_args = {}
|
||||
|
||||
if course_keys:
|
||||
cert_filter_args['course_id__in'] = course_keys
|
||||
|
||||
if start_date:
|
||||
cert_filter_args['modified_date__gte'] = start_date
|
||||
|
||||
if end_date:
|
||||
cert_filter_args['modified_date__lte'] = end_date
|
||||
|
||||
return GeneratedCertificate.objects.filter(**cert_filter_args).order_by('modified_date') # pylint: disable=no-member
|
||||
|
||||
|
||||
def generate_user_certificates(student, course_key, course=None, insecure=False, generation_mode='batch',
|
||||
forced_grade=None):
|
||||
"""
|
||||
|
||||
@@ -6,8 +6,8 @@ import logging
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from lms.djangoapps.certificates.models import GeneratedCertificate
|
||||
from courseware import courses
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.courseware import courses
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ from lxml.etree import ParserError, XMLSyntaxError
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from capa.xqueue_interface import XQueueInterface, make_hashkey, make_xheader
|
||||
from course_modes.models import CourseMode
|
||||
from lms.djangoapps.certificates.models import CertificateStatuses as status
|
||||
from lms.djangoapps.certificates.models import (
|
||||
CertificateWhitelist,
|
||||
@@ -19,8 +20,7 @@ from lms.djangoapps.certificates.models import (
|
||||
GeneratedCertificate,
|
||||
certificate_status_for_student
|
||||
)
|
||||
from course_modes.models import CourseMode
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lms.djangoapps.verify_student.services import IDVerificationService
|
||||
from student.models import CourseEnrollment, UserProfile
|
||||
from xmodule.modulestore.django import modulestore
|
||||
|
||||
@@ -13,7 +13,7 @@ from lms.djangoapps.certificates.models import (
|
||||
CertificateStatuses
|
||||
)
|
||||
from lms.djangoapps.certificates.tasks import generate_certificate
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lms.djangoapps.verify_student.services import IDVerificationService
|
||||
from openedx.core.djangoapps.certificates.api import auto_certificate_generation_enabled
|
||||
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
|
||||
@@ -102,6 +102,7 @@ def _listen_for_id_verification_status_changed(sender, user, **kwargs): # pylin
|
||||
return
|
||||
|
||||
user_enrollments = CourseEnrollment.enrollments_for_user(user=user)
|
||||
|
||||
grade_factory = CourseGradeFactory()
|
||||
expected_verification_status = IDVerificationService.user_status(user)
|
||||
expected_verification_status = expected_verification_status['status']
|
||||
|
||||
@@ -53,11 +53,10 @@ from courseware.model_data import DjangoKeyValueStore, FieldDataCache
|
||||
from edxmako.shortcuts import render_to_string
|
||||
from eventtracking import tracker
|
||||
from lms.djangoapps.courseware.field_overrides import OverrideFieldData
|
||||
from lms.djangoapps.grades.signals.signals import SCORE_PUBLISHED
|
||||
from lms.djangoapps.grades.api import signals as grades_signals, GradesUtilService
|
||||
from lms.djangoapps.lms_xblock.field_data import LmsFieldData
|
||||
from lms.djangoapps.lms_xblock.models import XBlockAsidesConfig
|
||||
from lms.djangoapps.lms_xblock.runtime import LmsModuleSystem
|
||||
from lms.djangoapps.grades.util_services import GradesUtilService
|
||||
from lms.djangoapps.verify_student.services import XBlockVerificationService
|
||||
from openedx.core.djangoapps.bookmarks.services import BookmarksService
|
||||
from openedx.core.djangoapps.crawlers.models import CrawlersConfig
|
||||
@@ -572,7 +571,7 @@ def get_module_system_for_user(
|
||||
"""
|
||||
Submit a grade for the block.
|
||||
"""
|
||||
SCORE_PUBLISHED.send(
|
||||
grades_signals.SCORE_PUBLISHED.send(
|
||||
sender=None,
|
||||
block=block,
|
||||
user=user,
|
||||
|
||||
@@ -28,8 +28,7 @@ from capa.tests.response_xml_factory import (
|
||||
from course_modes.models import CourseMode
|
||||
from courseware.models import BaseStudentModuleHistory, StudentModule
|
||||
from courseware.tests.helpers import LoginEnrollmentTestCase
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.tasks import compute_all_grades_for_course
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory, task_compute_all_grades_for_course
|
||||
from openedx.core.djangoapps.credit.api import get_credit_requirement_status, set_credit_requirements
|
||||
from openedx.core.djangoapps.credit.models import CreditCourse, CreditProvider
|
||||
from openedx.core.djangoapps.user_api.tests.factories import UserCourseTagFactory
|
||||
@@ -406,7 +405,7 @@ class TestCourseGrader(TestSubmittingProblems):
|
||||
]
|
||||
}
|
||||
self.add_grading_policy(grading_policy)
|
||||
compute_all_grades_for_course.apply_async(kwargs={'course_key': unicode(self.course.id)})
|
||||
task_compute_all_grades_for_course.apply_async(kwargs={'course_key': unicode(self.course.id)})
|
||||
|
||||
def dropping_setup(self):
|
||||
"""
|
||||
|
||||
@@ -29,7 +29,7 @@ from lms.djangoapps.courseware.courses import allow_public_access
|
||||
from lms.djangoapps.courseware.exceptions import CourseAccessRedirect
|
||||
from lms.djangoapps.experiments.utils import get_experiment_user_metadata_context
|
||||
from lms.djangoapps.gating.api import get_entrance_exam_score_ratio, get_entrance_exam_usage_key
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from openedx.core.djangoapps.crawlers.models import CrawlersConfig
|
||||
from openedx.core.djangoapps.lang_pref import LANGUAGE_KEY
|
||||
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference
|
||||
|
||||
@@ -68,7 +68,7 @@ from lms.djangoapps.commerce.utils import EcommerceService
|
||||
from lms.djangoapps.courseware.courses import allow_public_access
|
||||
from lms.djangoapps.courseware.exceptions import CourseAccessRedirect, Redirect
|
||||
from lms.djangoapps.experiments.utils import get_experiment_user_metadata_context
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lms.djangoapps.instructor.enrollment import uses_shib
|
||||
from lms.djangoapps.instructor.views.api import require_global_staff
|
||||
from lms.djangoapps.verify_student.services import IDVerificationService
|
||||
|
||||
@@ -7,11 +7,11 @@ from django.dispatch import receiver
|
||||
from completion.models import BlockCompletion
|
||||
from gating import api as gating_api
|
||||
from gating.tasks import task_evaluate_subsection_completion_milestones
|
||||
from lms.djangoapps.grades.signals.signals import SUBSECTION_SCORE_CHANGED
|
||||
from lms.djangoapps.grades.api import signals as grades_signals
|
||||
from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED
|
||||
|
||||
|
||||
@receiver(SUBSECTION_SCORE_CHANGED)
|
||||
@receiver(grades_signals.SUBSECTION_SCORE_CHANGED)
|
||||
def evaluate_subsection_gated_milestones(**kwargs):
|
||||
"""
|
||||
Receives the SUBSECTION_SCORE_CHANGED signal and triggers the
|
||||
|
||||
@@ -9,7 +9,7 @@ from milestones import api as milestones_api
|
||||
from milestones.tests.utils import MilestonesTestCaseMixin
|
||||
|
||||
from lms.djangoapps.courseware.access import has_access
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lms.djangoapps.grades.tests.utils import answer_problem
|
||||
from openedx.core.djangolib.testing.utils import get_mock_request
|
||||
from openedx.core.lib.gating import api as gating_api
|
||||
|
||||
24
lms/djangoapps/grades/api.py
Normal file
24
lms/djangoapps/grades/api.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# pylint: disable=unused-import,wildcard-import
|
||||
"""
|
||||
Python APIs exposed by the grades app to other in-process apps.
|
||||
"""
|
||||
|
||||
# Public Grades Factories
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.subsection_grade_factory import SubsectionGradeFactory
|
||||
|
||||
# Public Grades Functions
|
||||
from lms.djangoapps.grades.models_api import *
|
||||
from lms.djangoapps.grades.tasks import compute_all_grades_for_course as task_compute_all_grades_for_course
|
||||
|
||||
# Public Grades Modules
|
||||
from lms.djangoapps.grades import events, constants, context
|
||||
from lms.djangoapps.grades.signals import signals
|
||||
from lms.djangoapps.grades.util_services import GradesUtilService
|
||||
|
||||
# TODO exposing functionality from Grades handlers seems fishy.
|
||||
from lms.djangoapps.grades.signals.handlers import disconnect_submissions_signal_receiver
|
||||
|
||||
# Grades APIs that should NOT belong within the Grades subsystem
|
||||
# TODO move Gradebook to be an external feature outside of core Grades
|
||||
from lms.djangoapps.grades.config.waffle import is_writable_gradebook_enabled
|
||||
@@ -21,7 +21,7 @@ class GradesConfig(AppConfig):
|
||||
ProjectType.LMS: {
|
||||
PluginURLs.NAMESPACE: u'grades_api',
|
||||
PluginURLs.REGEX: u'^api/grades/',
|
||||
PluginURLs.RELATIVE_PATH: u'api.urls',
|
||||
PluginURLs.RELATIVE_PATH: u'rest_api.urls',
|
||||
}
|
||||
},
|
||||
PluginSettings.CONFIG: {
|
||||
|
||||
@@ -48,3 +48,10 @@ def waffle_flags():
|
||||
flag_undefined_default=True,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def is_writable_gradebook_enabled(course_key):
|
||||
"""
|
||||
Returns whether the writable gradebook app is enabled for the given course.
|
||||
"""
|
||||
return waffle_flags()[WRITABLE_GRADEBOOK].is_enabled(course_key)
|
||||
|
||||
@@ -10,3 +10,8 @@ class ScoreDatabaseTableEnum(object):
|
||||
courseware_student_module = 'csm'
|
||||
submissions = 'submissions'
|
||||
overrides = 'overrides'
|
||||
|
||||
|
||||
class GradeOverrideFeatureEnum(object):
|
||||
proctoring = 'PROCTORING'
|
||||
gradebook = 'GRADEBOOK'
|
||||
|
||||
@@ -17,16 +17,6 @@ def grading_context_for_course(course):
|
||||
return grading_context(course, course_structure)
|
||||
|
||||
|
||||
def visible_to_staff_only(subsection):
|
||||
"""
|
||||
Returns True if the given subsection is visible to staff only else False
|
||||
"""
|
||||
try:
|
||||
return subsection.transformer_data['visibility'].fields['merged_visible_to_staff_only']
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
|
||||
def graded_subsections_for_course(course_structure):
|
||||
"""
|
||||
Given a course block structure, yields the subsections of the course that are graded
|
||||
@@ -37,7 +27,7 @@ def graded_subsections_for_course(course_structure):
|
||||
for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
|
||||
for subsection_key in course_structure.get_children(chapter_key):
|
||||
subsection = course_structure[subsection_key]
|
||||
if not visible_to_staff_only(subsection) and subsection.graded:
|
||||
if not _visible_to_staff_only(subsection) and subsection.graded:
|
||||
yield subsection
|
||||
|
||||
|
||||
@@ -93,3 +83,13 @@ def grading_context(course, course_structure):
|
||||
'count_all_graded_blocks': count_all_graded_blocks,
|
||||
'subsection_type_graders': CourseGrade.get_subsection_type_graders(course)
|
||||
}
|
||||
|
||||
|
||||
def _visible_to_staff_only(subsection):
|
||||
"""
|
||||
Returns True if the given subsection is visible to staff only else False
|
||||
"""
|
||||
try:
|
||||
return subsection.transformer_data['visibility'].fields['merged_visible_to_staff_only']
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
@@ -13,7 +13,9 @@ from openedx.core.djangoapps.signals.signals import (COURSE_GRADE_CHANGED,
|
||||
from .config import assume_zero_if_absent, should_persist_grades
|
||||
from .course_data import CourseData
|
||||
from .course_grade import CourseGrade, ZeroCourseGrade
|
||||
from .models import PersistentCourseGrade, prefetch
|
||||
from .models import PersistentCourseGrade
|
||||
from .models_api import prefetch_grade_overrides_and_visible_blocks
|
||||
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
||||
@@ -170,7 +172,7 @@ class CourseGradeFactory(object):
|
||||
"""
|
||||
should_persist = should_persist_grades(course_data.course_key)
|
||||
if should_persist and force_update_subsections:
|
||||
prefetch(user, course_data.course_key)
|
||||
prefetch_grade_overrides_and_visible_blocks(user, course_data.course_key)
|
||||
|
||||
course_grade = CourseGrade(
|
||||
user,
|
||||
|
||||
@@ -23,7 +23,7 @@ from opaque_keys.edx.django.models import CourseKeyField, UsageKeyField
|
||||
from opaque_keys.edx.keys import CourseKey, UsageKey
|
||||
|
||||
from coursewarehistoryextended.fields import UnsignedBigIntAutoField, UnsignedBigIntOneToOneField
|
||||
from lms.djangoapps.grades import events
|
||||
from lms.djangoapps.grades import events, constants
|
||||
from openedx.core.lib.cache_utils import get_cache
|
||||
|
||||
|
||||
@@ -743,11 +743,9 @@ class PersistentSubsectionGradeOverrideHistory(models.Model):
|
||||
|
||||
.. no_pii:
|
||||
"""
|
||||
PROCTORING = 'PROCTORING'
|
||||
GRADEBOOK = 'GRADEBOOK'
|
||||
OVERRIDE_FEATURES = (
|
||||
(PROCTORING, 'proctoring'),
|
||||
(GRADEBOOK, 'gradebook'),
|
||||
(constants.GradeOverrideFeatureEnum.proctoring, 'proctoring'),
|
||||
(constants.GradeOverrideFeatureEnum.gradebook, 'gradebook'),
|
||||
)
|
||||
|
||||
CREATE_OR_UPDATE = 'CREATEORUPDATE'
|
||||
@@ -764,7 +762,7 @@ class PersistentSubsectionGradeOverrideHistory(models.Model):
|
||||
feature = models.CharField(
|
||||
max_length=32,
|
||||
choices=OVERRIDE_FEATURES,
|
||||
default=PROCTORING
|
||||
default=constants.GradeOverrideFeatureEnum.proctoring,
|
||||
)
|
||||
action = models.CharField(
|
||||
max_length=32,
|
||||
@@ -793,8 +791,3 @@ class PersistentSubsectionGradeOverrideHistory(models.Model):
|
||||
@classmethod
|
||||
def get_override_history(cls, override_id):
|
||||
return cls.objects.filter(override_id=override_id)
|
||||
|
||||
|
||||
def prefetch(user, course_key):
|
||||
PersistentSubsectionGradeOverride.prefetch(user.id, course_key)
|
||||
VisibleBlocks.bulk_read(user.id, course_key)
|
||||
|
||||
48
lms/djangoapps/grades/models_api.py
Normal file
48
lms/djangoapps/grades/models_api.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""
|
||||
Provides Python APIs exposed from Grades models.
|
||||
"""
|
||||
from lms.djangoapps.grades.models import (
|
||||
PersistentCourseGrade as _PersistentCourseGrade,
|
||||
PersistentSubsectionGrade as _PersistentSubsectionGrade,
|
||||
PersistentSubsectionGradeOverride as _PersistentSubsectionGradeOverride,
|
||||
VisibleBlocks as _VisibleBlocks,
|
||||
)
|
||||
|
||||
|
||||
def prefetch_grade_overrides_and_visible_blocks(user, course_key):
|
||||
_PersistentSubsectionGradeOverride.prefetch(user.id, course_key)
|
||||
_VisibleBlocks.bulk_read(user.id, course_key)
|
||||
|
||||
|
||||
def prefetch_course_grades(course_key, users):
|
||||
_PersistentCourseGrade.prefetch(course_key, users)
|
||||
|
||||
|
||||
def prefetch_course_and_subsection_grades(course_key, users):
|
||||
_PersistentCourseGrade.prefetch(course_key, users)
|
||||
_PersistentSubsectionGrade.prefetch(course_key, users)
|
||||
|
||||
|
||||
def clear_prefetched_course_grades(course_key):
|
||||
_PersistentCourseGrade.clear_prefetched_data(course_key)
|
||||
_PersistentSubsectionGrade.clear_prefetched_data(course_key)
|
||||
|
||||
|
||||
def clear_prefetched_course_and_subsection_grades(course_key):
|
||||
_PersistentCourseGrade.clear_prefetched_data(course_key)
|
||||
|
||||
|
||||
def get_recently_modified_grades(course_keys, start_date, end_date):
|
||||
"""
|
||||
Returns a QuerySet of PersistentCourseGrade objects filtered by the input
|
||||
parameters and ordered by modified date.
|
||||
"""
|
||||
grade_filter_args = {}
|
||||
if course_keys:
|
||||
grade_filter_args['course_id__in'] = course_keys
|
||||
if start_date:
|
||||
grade_filter_args['modified__gte'] = start_date
|
||||
if end_date:
|
||||
grade_filter_args['modified__lte'] = end_date
|
||||
|
||||
return _PersistentCourseGrade.objects.filter(**grade_filter_args).order_by('modified')
|
||||
@@ -8,5 +8,5 @@ from django.conf.urls import include, url
|
||||
app_name = 'lms.djangoapps.grades'
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^v1/', include('grades.api.v1.urls', namespace='v1'))
|
||||
url(r'^v1/', include('grades.rest_api.v1.urls', namespace='v1'))
|
||||
]
|
||||
@@ -15,24 +15,32 @@ from six import text_type
|
||||
from util.date_utils import to_timestamp
|
||||
|
||||
from courseware.courses import get_course_by_id
|
||||
from lms.djangoapps.grades.api.serializers import StudentGradebookEntrySerializer, SubsectionGradeResponseSerializer
|
||||
from lms.djangoapps.grades.api.v1.utils import (
|
||||
USER_MODEL,
|
||||
CourseEnrollmentPagination,
|
||||
GradeViewMixin,
|
||||
from lms.djangoapps.grades.api import (
|
||||
CourseGradeFactory,
|
||||
is_writable_gradebook_enabled,
|
||||
prefetch_course_and_subsection_grades,
|
||||
clear_prefetched_course_and_subsection_grades,
|
||||
constants as grades_constants,
|
||||
context as grades_context,
|
||||
events as grades_events,
|
||||
)
|
||||
from lms.djangoapps.grades.config.waffle import WRITABLE_GRADEBOOK, waffle_flags
|
||||
from lms.djangoapps.grades.constants import ScoreDatabaseTableEnum
|
||||
from lms.djangoapps.grades.context import graded_subsections_for_course
|
||||
from lms.djangoapps.grades.course_data import CourseData
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.events import SUBSECTION_GRADE_CALCULATED, subsection_grade_calculated
|
||||
# TODO these imports break abstraction of the core Grades layer. This code needs
|
||||
# to be refactored so Gradebook views only access public Grades APIs.
|
||||
from lms.djangoapps.grades.models import (
|
||||
PersistentCourseGrade,
|
||||
PersistentSubsectionGrade,
|
||||
PersistentSubsectionGradeOverride,
|
||||
PersistentSubsectionGradeOverrideHistory,
|
||||
)
|
||||
from lms.djangoapps.grades.rest_api.serializers import (
|
||||
StudentGradebookEntrySerializer,
|
||||
SubsectionGradeResponseSerializer,
|
||||
)
|
||||
from lms.djangoapps.grades.rest_api.v1.utils import (
|
||||
USER_MODEL,
|
||||
CourseEnrollmentPagination,
|
||||
GradeViewMixin,
|
||||
)
|
||||
from lms.djangoapps.grades.subsection_grade import CreateSubsectionGrade
|
||||
from lms.djangoapps.grades.tasks import recalculate_subsection_grade_v3
|
||||
from lms.djangoapps.grades.grade_utils import are_grades_frozen
|
||||
@@ -70,16 +78,14 @@ def bulk_gradebook_view_context(course_key, users):
|
||||
list of users, also, fetch all the score relavant data,
|
||||
storing the result in a RequestCache and deleting grades on context exit.
|
||||
"""
|
||||
PersistentSubsectionGrade.prefetch(course_key, users)
|
||||
PersistentCourseGrade.prefetch(course_key, users)
|
||||
prefetch_course_and_subsection_grades(course_key, users)
|
||||
CourseEnrollment.bulk_fetch_enrollment_states(users, course_key)
|
||||
cohorts.bulk_cache_cohorts(course_key, users)
|
||||
BulkRoleCache.prefetch(users)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
PersistentSubsectionGrade.clear_prefetched_data(course_key)
|
||||
PersistentCourseGrade.clear_prefetched_data(course_key)
|
||||
clear_prefetched_course_and_subsection_grades(course_key)
|
||||
|
||||
|
||||
def verify_writable_gradebook_enabled(view_func):
|
||||
@@ -95,7 +101,7 @@ def verify_writable_gradebook_enabled(view_func):
|
||||
Wraps the given view function.
|
||||
"""
|
||||
course_key = get_course_key(request, kwargs.get('course_id'))
|
||||
if not waffle_flags()[WRITABLE_GRADEBOOK].is_enabled(course_key):
|
||||
if not is_writable_gradebook_enabled(course_key):
|
||||
raise self.api_error(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
developer_message='The writable gradebook feature is not enabled for this course.',
|
||||
@@ -504,7 +510,7 @@ class GradebookView(GradeViewMixin, PaginatedAPIView):
|
||||
# over users to determine their subsection grades. We purposely avoid fetching
|
||||
# the user-specific course structure for each user, because that is very expensive.
|
||||
course_data = CourseData(user=None, course=course)
|
||||
graded_subsections = list(graded_subsections_for_course(course_data.collected_structure))
|
||||
graded_subsections = list(grades_context.graded_subsections_for_course(course_data.collected_structure))
|
||||
|
||||
if request.GET.get('username'):
|
||||
with self._get_user_or_raise(request, course_key) as grade_user:
|
||||
@@ -718,11 +724,11 @@ class GradebookBulkUpdateView(GradeViewMixin, PaginatedAPIView):
|
||||
override = PersistentSubsectionGradeOverride.update_or_create_override(
|
||||
requesting_user=request_user,
|
||||
subsection_grade_model=subsection_grade_model,
|
||||
feature=PersistentSubsectionGradeOverrideHistory.GRADEBOOK,
|
||||
feature=grades_constants.GradeOverrideFeatureEnum.gradebook,
|
||||
**override_data
|
||||
)
|
||||
|
||||
set_event_transaction_type(SUBSECTION_GRADE_CALCULATED)
|
||||
set_event_transaction_type(grades_events.SUBSECTION_GRADE_CALCULATED)
|
||||
create_new_event_transaction_id()
|
||||
|
||||
recalculate_subsection_grade_v3.apply(
|
||||
@@ -736,12 +742,12 @@ class GradebookBulkUpdateView(GradeViewMixin, PaginatedAPIView):
|
||||
score_deleted=False,
|
||||
event_transaction_id=unicode(get_event_transaction_id()),
|
||||
event_transaction_type=unicode(get_event_transaction_type()),
|
||||
score_db_table=ScoreDatabaseTableEnum.overrides,
|
||||
score_db_table=grades_constants.ScoreDatabaseTableEnum.overrides,
|
||||
force_update_subsections=True,
|
||||
)
|
||||
)
|
||||
# Emit events to let our tracking system to know we updated subsection grade
|
||||
subsection_grade_calculated(subsection_grade_model)
|
||||
grades_events.subsection_grade_calculated(subsection_grade_model)
|
||||
return override
|
||||
|
||||
@staticmethod
|
||||
@@ -19,9 +19,8 @@ from six import text_type
|
||||
|
||||
from course_modes.models import CourseMode
|
||||
from lms.djangoapps.courseware.tests.factories import InstructorFactory, StaffFactory
|
||||
from lms.djangoapps.grades.api.v1.tests.mixins import GradeViewTestMixin
|
||||
from lms.djangoapps.grades.api.v1.views import CourseEnrollmentPagination
|
||||
from lms.djangoapps.grades.config.waffle import WRITABLE_GRADEBOOK, waffle_flags
|
||||
from lms.djangoapps.grades.constants import GradeOverrideFeatureEnum
|
||||
from lms.djangoapps.grades.course_data import CourseData
|
||||
from lms.djangoapps.grades.course_grade import CourseGrade
|
||||
from lms.djangoapps.grades.models import (
|
||||
@@ -31,6 +30,8 @@ from lms.djangoapps.grades.models import (
|
||||
PersistentSubsectionGradeOverride,
|
||||
PersistentSubsectionGradeOverrideHistory,
|
||||
)
|
||||
from lms.djangoapps.grades.rest_api.v1.tests.mixins import GradeViewTestMixin
|
||||
from lms.djangoapps.grades.rest_api.v1.views import CourseEnrollmentPagination
|
||||
from lms.djangoapps.certificates.models import (
|
||||
GeneratedCertificate,
|
||||
CertificateStatuses,
|
||||
@@ -221,7 +222,7 @@ class CourseGradingViewTest(SharedModuleStoreTestCase, APITestCase):
|
||||
self.assertEqual(expected_data, resp.data)
|
||||
|
||||
def test_course_grade_frozen(self):
|
||||
with patch('lms.djangoapps.grades.api.v1.gradebook_views.are_grades_frozen') as mock_frozen_grades:
|
||||
with patch('lms.djangoapps.grades.rest_api.v1.gradebook_views.are_grades_frozen') as mock_frozen_grades:
|
||||
mock_frozen_grades.return_value = True
|
||||
self.client.login(username=self.staff.username, password=self.password)
|
||||
resp = self.client.get(self.get_url(self.course_key))
|
||||
@@ -907,7 +908,7 @@ class GradebookBulkUpdateViewTest(GradebookViewTestBase):
|
||||
"""
|
||||
Should receive a 403 when grades have been frozen for a course.
|
||||
"""
|
||||
with patch('lms.djangoapps.grades.api.v1.gradebook_views.are_grades_frozen', return_value=True):
|
||||
with patch('lms.djangoapps.grades.rest_api.v1.gradebook_views.are_grades_frozen', return_value=True):
|
||||
with override_waffle_flag(self.waffle_flag, active=True):
|
||||
getattr(self, login_method)()
|
||||
post_data = [
|
||||
@@ -1171,7 +1172,7 @@ class GradebookBulkUpdateViewTest(GradebookViewTestBase):
|
||||
for audit_item in update_records:
|
||||
self.assertEqual(audit_item.user, request_user)
|
||||
self.assertIsNotNone(audit_item.created)
|
||||
self.assertEqual(audit_item.feature, PersistentSubsectionGradeOverrideHistory.GRADEBOOK)
|
||||
self.assertEqual(audit_item.feature, GradeOverrideFeatureEnum.gradebook)
|
||||
self.assertEqual(audit_item.action, PersistentSubsectionGradeOverrideHistory.CREATE_OR_UPDATE)
|
||||
|
||||
def test_update_failing_grade(self):
|
||||
@@ -1348,7 +1349,7 @@ class SubsectionGradeViewTest(GradebookViewTestBase):
|
||||
subsection_grade_model=self.grade,
|
||||
earned_all_override=0.0,
|
||||
earned_graded_override=0.0,
|
||||
feature=PersistentSubsectionGradeOverrideHistory.GRADEBOOK,
|
||||
feature=GradeOverrideFeatureEnum.gradebook,
|
||||
)
|
||||
|
||||
resp = self.client.get(
|
||||
@@ -1419,7 +1420,7 @@ class SubsectionGradeViewTest(GradebookViewTestBase):
|
||||
subsection_grade_model=self.grade,
|
||||
earned_all_override=0.0,
|
||||
earned_graded_override=0.0,
|
||||
feature=PersistentSubsectionGradeOverrideHistory.GRADEBOOK,
|
||||
feature=GradeOverrideFeatureEnum.gradebook,
|
||||
)
|
||||
|
||||
resp = self.client.get(
|
||||
@@ -11,8 +11,8 @@ from opaque_keys import InvalidKeyError
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from lms.djangoapps.grades.api.v1.tests.mixins import GradeViewTestMixin
|
||||
from lms.djangoapps.grades.api.v1.views import CourseGradesView
|
||||
from lms.djangoapps.grades.rest_api.v1.tests.mixins import GradeViewTestMixin
|
||||
from lms.djangoapps.grades.rest_api.v1.views import CourseGradesView
|
||||
from openedx.core.djangoapps.user_authn.tests.utils import AuthAndScopesTestMixin
|
||||
from student.tests.factories import UserFactory
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from django.conf import settings
|
||||
from django.conf.urls import url
|
||||
|
||||
from lms.djangoapps.grades.api.v1 import gradebook_views, views
|
||||
from lms.djangoapps.grades.rest_api.v1 import gradebook_views, views
|
||||
|
||||
|
||||
app_name = 'lms.djangoapps.grades'
|
||||
@@ -8,8 +8,6 @@ from rest_framework import status
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.pagination import CursorPagination
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from six import text_type
|
||||
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin
|
||||
@@ -10,13 +10,12 @@ from edx_rest_framework_extensions import permissions
|
||||
from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication
|
||||
from edx_rest_framework_extensions.auth.session.authentication import SessionAuthenticationAllowInactiveUser
|
||||
from lms.djangoapps.courseware.access import has_access
|
||||
from lms.djangoapps.grades.api.serializers import GradingPolicySerializer
|
||||
from lms.djangoapps.grades.api.v1.utils import (
|
||||
from lms.djangoapps.grades.rest_api.serializers import GradingPolicySerializer
|
||||
from lms.djangoapps.grades.rest_api.v1.utils import (
|
||||
CourseEnrollmentPagination,
|
||||
GradeViewMixin,
|
||||
)
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.models import PersistentCourseGrade
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory, prefetch_course_grades, clear_prefetched_course_grades
|
||||
from opaque_keys import InvalidKeyError
|
||||
from openedx.core.lib.api.authentication import OAuth2AuthenticationAllowInactiveUser
|
||||
from openedx.core.lib.api.view_utils import PaginatedAPIView, get_course_key, verify_course_exists
|
||||
@@ -32,11 +31,11 @@ def bulk_course_grade_context(course_key, users):
|
||||
within a context, storing in a RequestCache and deleting
|
||||
on context exit.
|
||||
"""
|
||||
PersistentCourseGrade.prefetch(course_key, users)
|
||||
prefetch_course_grades(course_key, users)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
PersistentCourseGrade.clear_prefetched_data(course_key)
|
||||
clear_prefetched_course_grades(course_key)
|
||||
|
||||
|
||||
class CourseGradesView(GradeViewMixin, PaginatedAPIView):
|
||||
@@ -7,7 +7,6 @@ from django.contrib.auth import get_user_model
|
||||
import pytz
|
||||
from six import text_type
|
||||
|
||||
from lms.djangoapps.courseware.courses import get_course
|
||||
from lms.djangoapps.grades.course_data import CourseData
|
||||
from lms.djangoapps.grades.subsection_grade import CreateSubsectionGrade
|
||||
from lms.djangoapps.utils import _get_key
|
||||
@@ -15,7 +14,7 @@ from opaque_keys.edx.keys import CourseKey, UsageKey
|
||||
from track.event_transaction_utils import create_new_event_transaction_id, set_event_transaction_type
|
||||
|
||||
from .config.waffle import waffle_flags, REJECTED_EXAM_OVERRIDES_GRADE
|
||||
from .constants import ScoreDatabaseTableEnum
|
||||
from .constants import ScoreDatabaseTableEnum, GradeOverrideFeatureEnum
|
||||
from .events import SUBSECTION_OVERRIDE_EVENT_TYPE
|
||||
from .models import (
|
||||
PersistentSubsectionGrade,
|
||||
@@ -89,7 +88,7 @@ class GradesService(object):
|
||||
override = PersistentSubsectionGradeOverride.update_or_create_override(
|
||||
requesting_user=None,
|
||||
subsection_grade_model=grade,
|
||||
feature=PersistentSubsectionGradeOverrideHistory.PROCTORING,
|
||||
feature=GradeOverrideFeatureEnum.proctoring,
|
||||
earned_all_override=earned_all,
|
||||
earned_graded_override=earned_graded,
|
||||
)
|
||||
@@ -130,7 +129,7 @@ class GradesService(object):
|
||||
if override is not None:
|
||||
_ = PersistentSubsectionGradeOverrideHistory.objects.create(
|
||||
override_id=override.id,
|
||||
feature=PersistentSubsectionGradeOverrideHistory.PROCTORING,
|
||||
feature=GradeOverrideFeatureEnum.proctoring,
|
||||
action=PersistentSubsectionGradeOverrideHistory.DELETE
|
||||
)
|
||||
override.delete()
|
||||
@@ -163,6 +162,7 @@ class GradesService(object):
|
||||
Given a user_id, course_key, and subsection usage_key,
|
||||
creates a new ``PersistentSubsectionGrade``.
|
||||
"""
|
||||
from lms.djangoapps.courseware.courses import get_course
|
||||
course = get_course(course_key, depth=None)
|
||||
subsection = course.get_child(usage_key)
|
||||
if not subsection:
|
||||
|
||||
@@ -16,6 +16,7 @@ from freezegun import freeze_time
|
||||
from mock import patch
|
||||
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
|
||||
|
||||
from lms.djangoapps.grades.constants import GradeOverrideFeatureEnum
|
||||
from lms.djangoapps.grades.models import (
|
||||
BLOCK_RECORD_LIST_VERSION,
|
||||
BlockRecord,
|
||||
@@ -309,7 +310,7 @@ class PersistentSubsectionGradeTest(GradesModelTestCase):
|
||||
subsection_grade_model=grade,
|
||||
earned_all_override=0.0,
|
||||
earned_graded_override=0.0,
|
||||
feature=PersistentSubsectionGradeOverrideHistory.GRADEBOOK,
|
||||
feature=GradeOverrideFeatureEnum.gradebook,
|
||||
)
|
||||
|
||||
grade = PersistentSubsectionGrade.update_or_create_grade(**self.params)
|
||||
|
||||
@@ -5,6 +5,7 @@ from datetime import datetime
|
||||
import ddt
|
||||
import pytz
|
||||
from freezegun import freeze_time
|
||||
from lms.djangoapps.grades.constants import GradeOverrideFeatureEnum
|
||||
from lms.djangoapps.grades.models import (
|
||||
PersistentSubsectionGrade,
|
||||
PersistentSubsectionGradeOverride,
|
||||
@@ -145,7 +146,7 @@ class GradesServiceTests(ModuleStoreTestCase):
|
||||
def _verify_override_history(self, override_history, history_action):
|
||||
self.assertIsNone(override_history.user)
|
||||
self.assertIsNotNone(override_history.created)
|
||||
self.assertEqual(override_history.feature, PersistentSubsectionGradeOverrideHistory.PROCTORING)
|
||||
self.assertEqual(override_history.feature, GradeOverrideFeatureEnum.proctoring)
|
||||
self.assertEqual(override_history.action, history_action)
|
||||
|
||||
@ddt.data(
|
||||
|
||||
@@ -9,10 +9,10 @@ from courseware.tests.test_submitting_problems import ProblemSubmissionTestMixin
|
||||
from lms.djangoapps.grades.config.tests.utils import persistent_grades_feature_flags
|
||||
from student.tests.factories import UserFactory
|
||||
|
||||
from ..constants import GradeOverrideFeatureEnum
|
||||
from ..models import (
|
||||
PersistentSubsectionGrade,
|
||||
PersistentSubsectionGradeOverride,
|
||||
PersistentSubsectionGradeOverrideHistory,
|
||||
)
|
||||
from ..subsection_grade_factory import ZeroSubsectionGrade
|
||||
from .base import GradeTestBase
|
||||
@@ -143,7 +143,7 @@ class TestSubsectionGradeFactory(ProblemSubmissionTestMixin, GradeTestBase):
|
||||
earned_graded_override=earned_graded_override,
|
||||
earned_all_override=earned_graded_override,
|
||||
possible_graded_override=possible_graded_override,
|
||||
feature=PersistentSubsectionGradeOverrideHistory.GRADEBOOK,
|
||||
feature=GradeOverrideFeatureEnum.gradebook,
|
||||
)
|
||||
|
||||
# Now, even if the problem scores interface gives us a 2/3,
|
||||
|
||||
@@ -22,10 +22,12 @@ from six import text_type
|
||||
from course_modes.models import CourseMode
|
||||
from courseware.models import StudentModule
|
||||
from eventtracking import tracker
|
||||
from lms.djangoapps.grades.constants import ScoreDatabaseTableEnum
|
||||
from lms.djangoapps.grades.events import STATE_DELETED_EVENT_TYPE
|
||||
from lms.djangoapps.grades.signals.handlers import disconnect_submissions_signal_receiver
|
||||
from lms.djangoapps.grades.signals.signals import PROBLEM_RAW_SCORE_CHANGED
|
||||
from lms.djangoapps.grades.api import (
|
||||
constants as grades_constants,
|
||||
events as grades_events,
|
||||
signals as grades_signals,
|
||||
disconnect_submissions_signal_receiver,
|
||||
)
|
||||
from lms.djangoapps.instructor.message_types import (
|
||||
AccountCreationAndEnrollment,
|
||||
AddBetaTester,
|
||||
@@ -298,16 +300,16 @@ def reset_student_attempts(course_id, student, module_state_key, requesting_user
|
||||
if delete_module:
|
||||
module_to_reset.delete()
|
||||
create_new_event_transaction_id()
|
||||
set_event_transaction_type(STATE_DELETED_EVENT_TYPE)
|
||||
set_event_transaction_type(grades_events.STATE_DELETED_EVENT_TYPE)
|
||||
tracker.emit(
|
||||
unicode(STATE_DELETED_EVENT_TYPE),
|
||||
unicode(grades_events.STATE_DELETED_EVENT_TYPE),
|
||||
{
|
||||
'user_id': unicode(student.id),
|
||||
'course_id': unicode(course_id),
|
||||
'problem_id': unicode(module_state_key),
|
||||
'instructor_id': unicode(requesting_user.id),
|
||||
'event_transaction_id': unicode(get_event_transaction_id()),
|
||||
'event_transaction_type': unicode(STATE_DELETED_EVENT_TYPE),
|
||||
'event_transaction_type': unicode(grades_events.STATE_DELETED_EVENT_TYPE),
|
||||
}
|
||||
)
|
||||
if not submission_cleared:
|
||||
@@ -351,7 +353,7 @@ def _fire_score_changed_for_block(
|
||||
if block and block.has_score:
|
||||
max_score = block.max_score()
|
||||
if max_score is not None:
|
||||
PROBLEM_RAW_SCORE_CHANGED.send(
|
||||
grades_signals.PROBLEM_RAW_SCORE_CHANGED.send(
|
||||
sender=None,
|
||||
raw_earned=0,
|
||||
raw_possible=max_score,
|
||||
@@ -362,7 +364,7 @@ def _fire_score_changed_for_block(
|
||||
score_deleted=True,
|
||||
only_if_higher=False,
|
||||
modified=datetime.now().replace(tzinfo=pytz.UTC),
|
||||
score_db_table=ScoreDatabaseTableEnum.courseware_student_module,
|
||||
score_db_table=grades_constants.ScoreDatabaseTableEnum.courseware_student_module,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from six import text_type
|
||||
|
||||
from capa.tests.response_xml_factory import StringResponseXMLFactory
|
||||
from courseware.tests.factories import StudentModuleFactory
|
||||
from lms.djangoapps.grades.tasks import compute_all_grades_for_course
|
||||
from lms.djangoapps.grades.api import task_compute_all_grades_for_course
|
||||
from student.tests.factories import AdminFactory, CourseEnrollmentFactory, UserFactory
|
||||
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
|
||||
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
|
||||
@@ -73,7 +73,7 @@ class TestGradebook(SharedModuleStoreTestCase):
|
||||
course_id=self.course.id,
|
||||
module_state_key=item.location
|
||||
)
|
||||
compute_all_grades_for_course.apply_async(kwargs={'course_key': text_type(self.course.id)})
|
||||
task_compute_all_grades_for_course.apply_async(kwargs={'course_key': text_type(self.course.id)})
|
||||
|
||||
self.response = self.client.get(reverse(
|
||||
'spoc_gradebook',
|
||||
|
||||
@@ -12,7 +12,7 @@ from opaque_keys.edx.keys import CourseKey
|
||||
|
||||
from courseware.courses import get_course_with_access
|
||||
from edxmako.shortcuts import render_to_response
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lms.djangoapps.instructor.views.api import require_level
|
||||
from xmodule.modulestore.django import modulestore
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ from lms.djangoapps.certificates.models import (
|
||||
GeneratedCertificate
|
||||
)
|
||||
from lms.djangoapps.courseware.module_render import get_module_by_usage_id
|
||||
from lms.djangoapps.grades.config.waffle import WRITABLE_GRADEBOOK, waffle_flags
|
||||
from lms.djangoapps.grades.api import is_writable_gradebook_enabled
|
||||
from openedx.core.djangoapps.course_groups.cohorts import DEFAULT_COHORT_NAME, get_course_cohorts, is_course_cohorted
|
||||
from openedx.core.djangoapps.django_comment_common.models import FORUM_ROLE_ADMINISTRATOR, CourseDiscussionSettings
|
||||
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
|
||||
@@ -594,7 +594,7 @@ def _section_student_admin(course, access):
|
||||
kwargs={'course_id': unicode(course_key)}),
|
||||
'spoc_gradebook_url': reverse('spoc_gradebook', kwargs={'course_id': unicode(course_key)}),
|
||||
}
|
||||
if waffle_flags()[WRITABLE_GRADEBOOK].is_enabled(course_key) and settings.WRITABLE_GRADEBOOK_URL:
|
||||
if is_writable_gradebook_enabled(course_key) and settings.WRITABLE_GRADEBOOK_URL:
|
||||
section_data['writable_gradebook_url'] = urljoin(settings.WRITABLE_GRADEBOOK_URL, '/' + text_type(course_key))
|
||||
return section_data
|
||||
|
||||
|
||||
@@ -16,10 +16,10 @@ from edx_proctoring.api import get_exam_violation_report
|
||||
from opaque_keys.edx.keys import UsageKey
|
||||
from six import text_type
|
||||
|
||||
from courseware.models import StudentModule
|
||||
import xmodule.graders as xmgraders
|
||||
from lms.djangoapps.certificates.models import CertificateStatuses, GeneratedCertificate
|
||||
from courseware.models import StudentModule
|
||||
from lms.djangoapps.grades.context import grading_context_for_course
|
||||
from lms.djangoapps.grades.api import context as grades_context
|
||||
from lms.djangoapps.verify_student.services import IDVerificationService
|
||||
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
|
||||
from openedx.core.djangolib.markup import HTML, Text
|
||||
@@ -525,7 +525,7 @@ def dump_grading_context(course):
|
||||
msg += hbar
|
||||
msg += u"Listing grading context for course %s\n" % text_type(course.id)
|
||||
|
||||
gcontext = grading_context_for_course(course)
|
||||
gcontext = grades_context.grading_context_for_course(course)
|
||||
msg += "graded sections:\n"
|
||||
|
||||
msg += '%s\n' % gcontext['all_graded_subsections_by_type'].keys()
|
||||
|
||||
@@ -21,9 +21,11 @@ from courseware.user_state_client import DjangoXBlockUserStateClient
|
||||
from instructor_analytics.basic import list_problem_responses
|
||||
from instructor_analytics.csvs import format_dictlist
|
||||
from lms.djangoapps.certificates.models import CertificateWhitelist, GeneratedCertificate, certificate_info_for_user
|
||||
from lms.djangoapps.grades.context import grading_context, grading_context_for_course
|
||||
from lms.djangoapps.grades.models import PersistentCourseGrade, PersistentSubsectionGrade
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import (
|
||||
CourseGradeFactory,
|
||||
context as grades_context,
|
||||
prefetch_course_and_subsection_grades,
|
||||
)
|
||||
from lms.djangoapps.teams.models import CourseTeamMembership
|
||||
from lms.djangoapps.verify_student.services import IDVerificationService
|
||||
from openedx.core.djangoapps.content.block_structure.api import get_course_in_cache
|
||||
@@ -114,7 +116,7 @@ class _CourseGradeReportContext(object):
|
||||
Returns an OrderedDict that maps an assignment type to a dict of
|
||||
subsection-headers and average-header.
|
||||
"""
|
||||
grading_cxt = grading_context(self.course, self.course_structure)
|
||||
grading_cxt = grades_context.grading_context(self.course, self.course_structure)
|
||||
graded_assignments_map = OrderedDict()
|
||||
for assignment_type_name, subsection_infos in grading_cxt['all_graded_subsections_by_type'].iteritems():
|
||||
graded_subsections_map = OrderedDict()
|
||||
@@ -189,8 +191,7 @@ class _CourseGradeBulkContext(object):
|
||||
self.enrollments = _EnrollmentBulkContext(context, users)
|
||||
bulk_cache_cohorts(context.course_id, users)
|
||||
BulkRoleCache.prefetch(users)
|
||||
PersistentCourseGrade.prefetch(context.course_id, users)
|
||||
PersistentSubsectionGrade.prefetch(context.course_id, users)
|
||||
prefetch_course_and_subsection_grades(context.course_id, users)
|
||||
BulkCourseTags.prefetch(context.course_id, users)
|
||||
|
||||
|
||||
@@ -579,7 +580,7 @@ class ProblemGradeReport(object):
|
||||
headers in the final report.
|
||||
"""
|
||||
scorable_blocks_map = OrderedDict()
|
||||
grading_context = grading_context_for_course(course)
|
||||
grading_context = grades_context.grading_context_for_course(course)
|
||||
for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
|
||||
for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
|
||||
for scorable_block in subsection_info['scored_descendants']:
|
||||
|
||||
@@ -13,7 +13,7 @@ from courseware.courses import get_course_by_id, get_problems_in_section
|
||||
from courseware.model_data import DjangoKeyValueStore, FieldDataCache
|
||||
from courseware.models import StudentModule
|
||||
from courseware.module_render import get_module_for_descriptor_internal
|
||||
from lms.djangoapps.grades.events import GRADES_OVERRIDE_EVENT_TYPE, GRADES_RESCORE_EVENT_TYPE
|
||||
from lms.djangoapps.grades.api import events as grades_events
|
||||
from student.models import get_user_by_username_or_email
|
||||
from track.event_transaction_utils import create_new_event_transaction_id, set_event_transaction_type
|
||||
from track.views import task_track
|
||||
@@ -162,7 +162,7 @@ def rescore_problem_module_state(xmodule_instance_args, module_descriptor, stude
|
||||
# calls that create events. We retrieve and store the id here because
|
||||
# the request cache will be erased during downstream calls.
|
||||
create_new_event_transaction_id()
|
||||
set_event_transaction_type(GRADES_RESCORE_EVENT_TYPE)
|
||||
set_event_transaction_type(grades_events.GRADES_RESCORE_EVENT_TYPE)
|
||||
|
||||
# specific events from CAPA are not propagated up the stack. Do we want this?
|
||||
try:
|
||||
@@ -246,7 +246,7 @@ def override_score_module_state(xmodule_instance_args, module_descriptor, studen
|
||||
# calls that create events. We retrieve and store the id here because
|
||||
# the request cache will be erased during downstream calls.
|
||||
create_new_event_transaction_id()
|
||||
set_event_transaction_type(GRADES_OVERRIDE_EVENT_TYPE)
|
||||
set_event_transaction_type(grades_events.GRADES_OVERRIDE_EVENT_TYPE)
|
||||
|
||||
problem_weight = instance.weight if instance.weight is not None else 1
|
||||
if problem_weight == 0:
|
||||
|
||||
@@ -20,7 +20,7 @@ from six import text_type
|
||||
from capa.responsetypes import StudentInputError
|
||||
from capa.tests.response_xml_factory import CodeResponseXMLFactory, CustomResponseXMLFactory
|
||||
from courseware.model_data import StudentModule
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lms.djangoapps.instructor_task.api import (
|
||||
submit_delete_problem_state_for_all_students,
|
||||
submit_rescore_problem_for_all_students,
|
||||
|
||||
@@ -8,7 +8,7 @@ from django.conf import settings
|
||||
from django.dispatch import receiver
|
||||
|
||||
import lti_provider.outcomes as outcomes
|
||||
from lms.djangoapps.grades.signals.signals import PROBLEM_WEIGHTED_SCORE_CHANGED
|
||||
from lms.djangoapps.grades.api import signals as grades_signals
|
||||
from lti_provider.views import parse_course_and_usage_keys
|
||||
from xmodule.modulestore.django import modulestore
|
||||
from .tasks import send_composite_outcome, send_leaf_outcome
|
||||
@@ -35,7 +35,7 @@ def increment_assignment_versions(course_key, usage_key, user_id):
|
||||
return assignments
|
||||
|
||||
|
||||
@receiver(PROBLEM_WEIGHTED_SCORE_CHANGED)
|
||||
@receiver(grades_signals.PROBLEM_WEIGHTED_SCORE_CHANGED)
|
||||
def score_changed_handler(sender, **kwargs): # pylint: disable=unused-argument
|
||||
"""
|
||||
Consume signals that indicate score changes. See the definition of
|
||||
|
||||
@@ -9,7 +9,7 @@ from opaque_keys.edx.keys import CourseKey
|
||||
|
||||
import lti_provider.outcomes as outcomes
|
||||
from lms import CELERY_APP
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from lti_provider.models import GradedAssignment
|
||||
from xmodule.modulestore.django import modulestore
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<%!
|
||||
from course_modes.models import CourseMode
|
||||
from lms.djangoapps.certificates.models import CertificateStatuses
|
||||
from lms.djangoapps.grades.models import PersistentSubsectionGradeOverrideHistory
|
||||
from lms.djangoapps.grades.api import constants as grades_constants
|
||||
from django.utils.translation import ugettext as _
|
||||
from openedx.core.djangolib.markup import HTML, Text
|
||||
from django.urls import reverse
|
||||
@@ -198,7 +198,7 @@ username = get_enterprise_learner_generic_name(request) or student.username
|
||||
<p class="override-notice">
|
||||
%if section.override is not None:
|
||||
<%last_override_history = section.override.get_history().order_by('created').last()%>
|
||||
%if (not last_override_history or last_override_history.feature == PersistentSubsectionGradeOverrideHistory.PROCTORING) and section.format == "Exam" and earned == 0:
|
||||
%if (not last_override_history or last_override_history.feature == grades_constants.GradeOverrideFeatureEnum.proctoring) and section.format == "Exam" and earned == 0:
|
||||
${_("Suspicious activity detected during proctored exam review. Exam score 0.")}
|
||||
%else:
|
||||
${_("Section grade has been overridden.")}
|
||||
|
||||
@@ -22,8 +22,8 @@ from opaque_keys import InvalidKeyError
|
||||
from opaque_keys.edx.keys import CourseKey
|
||||
from pytz import UTC
|
||||
|
||||
from lms.djangoapps.certificates.models import GeneratedCertificate
|
||||
from lms.djangoapps.grades.models import PersistentCourseGrade
|
||||
from lms.djangoapps.certificates.api import get_recently_modified_certificates
|
||||
from lms.djangoapps.grades.api import get_recently_modified_grades
|
||||
from openedx.core.djangoapps.credentials.models import NotifyCredentialsConfig
|
||||
from openedx.core.djangoapps.credentials.signals import handle_cert_change, send_grade_if_interesting
|
||||
from openedx.core.djangoapps.programs.signals import handle_course_cert_changed
|
||||
@@ -169,32 +169,18 @@ class Command(BaseCommand):
|
||||
options['delay']
|
||||
)
|
||||
|
||||
cert_filter_args = {}
|
||||
grade_filter_args = {}
|
||||
|
||||
try:
|
||||
site_config = SiteConfiguration.objects.get(site__domain=options['site']) if options['site'] else None
|
||||
except SiteConfiguration.DoesNotExist:
|
||||
log.error(u'No site configuration found for site %s', options['site'])
|
||||
if options['courses']:
|
||||
course_keys = self.get_course_keys(options['courses'])
|
||||
cert_filter_args['course_id__in'] = course_keys
|
||||
grade_filter_args['course_id__in'] = course_keys
|
||||
|
||||
if options['start_date']:
|
||||
cert_filter_args['modified_date__gte'] = options['start_date']
|
||||
grade_filter_args['modified__gte'] = options['start_date']
|
||||
|
||||
if options['end_date']:
|
||||
cert_filter_args['modified_date__lte'] = options['end_date']
|
||||
grade_filter_args['modified__lte'] = options['end_date']
|
||||
|
||||
if not cert_filter_args:
|
||||
course_keys = self.get_course_keys(options['courses'])
|
||||
if not (course_keys or options['start_date'] or options['end_date']):
|
||||
raise CommandError('You must specify a filter (e.g. --courses= or --start-date)')
|
||||
|
||||
# pylint: disable=no-member
|
||||
certs = GeneratedCertificate.objects.filter(**cert_filter_args).order_by('modified_date')
|
||||
grades = PersistentCourseGrade.objects.filter(**grade_filter_args).order_by('modified')
|
||||
certs = get_recently_modified_certificates(course_keys, options['start_date'], options['end_date'])
|
||||
grades = get_recently_modified_grades(course_keys, options['start_date'], options['end_date'])
|
||||
|
||||
if options['dry_run']:
|
||||
self.print_dry_run(certs, grades)
|
||||
@@ -254,7 +240,7 @@ class Command(BaseCommand):
|
||||
verbose=verbose
|
||||
)
|
||||
|
||||
def get_course_keys(self, courses):
|
||||
def get_course_keys(self, courses=None):
|
||||
"""
|
||||
Return a list of CourseKeys that we will emit signals to.
|
||||
|
||||
@@ -265,7 +251,10 @@ class Command(BaseCommand):
|
||||
it is a fatal error and will cause us to exit the entire process.
|
||||
"""
|
||||
# Use specific courses if specified, but fall back to all courses.
|
||||
if not courses:
|
||||
courses = []
|
||||
course_keys = []
|
||||
|
||||
log.info(u"%d courses specified: %s", len(courses), ", ".join(courses))
|
||||
for course_id in courses:
|
||||
try:
|
||||
|
||||
@@ -6,7 +6,7 @@ from logging import getLogger
|
||||
from course_modes.models import CourseMode
|
||||
from django.contrib.sites.models import Site
|
||||
from lms.djangoapps.certificates.models import CertificateStatuses, GeneratedCertificate
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from openedx.core.djangoapps.catalog.utils import get_programs
|
||||
from openedx.core.djangoapps.credentials.models import CredentialsApiConfig
|
||||
from openedx.core.djangoapps.site_configuration import helpers
|
||||
|
||||
@@ -24,7 +24,7 @@ from lms.djangoapps.certificates import api as certificate_api
|
||||
from lms.djangoapps.certificates.models import GeneratedCertificate
|
||||
from lms.djangoapps.commerce.utils import EcommerceService
|
||||
from lms.djangoapps.courseware.access import has_access
|
||||
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
|
||||
from lms.djangoapps.grades.api import CourseGradeFactory
|
||||
from openedx.core.djangoapps.catalog.utils import get_programs, get_fulfillable_course_runs_for_entitlement
|
||||
from openedx.core.djangoapps.certificates.api import available_date_for_certificate
|
||||
from openedx.core.djangoapps.commerce.utils import ecommerce_api_client
|
||||
|
||||
@@ -12,7 +12,7 @@ from django.utils.translation import ugettext as _
|
||||
from completion.models import BlockCompletion
|
||||
from lms.djangoapps.courseware.access import _has_access_to_course
|
||||
from lms.djangoapps.course_blocks.api import get_course_blocks
|
||||
from lms.djangoapps.grades.subsection_grade_factory import SubsectionGradeFactory
|
||||
from lms.djangoapps.grades.api import SubsectionGradeFactory
|
||||
from milestones import api as milestones_api
|
||||
from opaque_keys.edx.keys import UsageKey
|
||||
from openedx.core.lib.gating.exceptions import GatingValidationError
|
||||
|
||||
@@ -10,10 +10,10 @@ from mock import patch, Mock
|
||||
from ddt import ddt, data, unpack
|
||||
from django.conf import settings
|
||||
from lms.djangoapps.gating import api as lms_gating_api
|
||||
from lms.djangoapps.grades.constants import GradeOverrideFeatureEnum
|
||||
from lms.djangoapps.grades.models import (
|
||||
PersistentSubsectionGrade,
|
||||
PersistentSubsectionGradeOverride,
|
||||
PersistentSubsectionGradeOverrideHistory,
|
||||
)
|
||||
from lms.djangoapps.grades.tests.base import GradeTestBase
|
||||
from lms.djangoapps.grades.tests.utils import mock_get_score
|
||||
@@ -392,7 +392,7 @@ class TestGatingGradesIntegration(GradeTestBase):
|
||||
earned_graded_override=0,
|
||||
earned_all_override=0,
|
||||
possible_graded_override=3,
|
||||
feature=PersistentSubsectionGradeOverrideHistory.GRADEBOOK,
|
||||
feature=GradeOverrideFeatureEnum.gradebook,
|
||||
)
|
||||
|
||||
# it's important that we stay in the mock_get_score() context here,
|
||||
|
||||
@@ -15,7 +15,7 @@ import six
|
||||
from xblock.completable import XBlockCompletionMode
|
||||
from xblock.core import XBlock
|
||||
|
||||
from lms.djangoapps.grades.signals.signals import PROBLEM_WEIGHTED_SCORE_CHANGED
|
||||
from lms.djangoapps.grades.api import signals as grades_signals
|
||||
from openedx.core.djangolib.testing.utils import skip_unless_lms
|
||||
|
||||
|
||||
@@ -120,7 +120,7 @@ class ScorableCompletionHandlerTestCase(CompletionSetUpMixin, TestCase):
|
||||
|
||||
def test_signal_calls_handler(self):
|
||||
with patch('completion.handlers.scorable_block_completion') as mock_handler:
|
||||
PROBLEM_WEIGHTED_SCORE_CHANGED.send_robust(
|
||||
grades_signals.PROBLEM_WEIGHTED_SCORE_CHANGED.send_robust(
|
||||
sender=self,
|
||||
user_id=self.user.id,
|
||||
course_id=six.text_type(self.course_key),
|
||||
|
||||
@@ -73,6 +73,7 @@ INSTALLED_APPS = (
|
||||
'courseware',
|
||||
'student',
|
||||
'openedx.core.djangoapps.site_configuration',
|
||||
'lms.djangoapps.grades.apps.GradesConfig',
|
||||
'lms.djangoapps.certificates.apps.CertificatesConfig',
|
||||
'openedx.core.djangoapps.user_api',
|
||||
'course_modes.apps.CourseModesConfig',
|
||||
@@ -101,6 +102,11 @@ MEDIA_ROOT = tempfile.mkdtemp()
|
||||
MICROSITE_BACKEND = 'microsite_configuration.backends.filebased.FilebasedMicrositeBackend'
|
||||
MICROSITE_TEMPLATE_BACKEND = 'microsite_configuration.backends.filebased.FilebasedMicrositeTemplateBackend'
|
||||
|
||||
RECALCULATE_GRADES_ROUTING_KEY = 'edx.core.default'
|
||||
POLICY_CHANGE_GRADES_ROUTING_KEY = 'edx.core.default'
|
||||
POLICY_CHANGE_TASK_RATE_LIMIT = '300/h'
|
||||
|
||||
|
||||
SECRET_KEY = 'insecure-secret-key'
|
||||
SITE_ID = 1
|
||||
|
||||
|
||||
Reference in New Issue
Block a user