feat: Read course indexes from MySQL, not MongoDB (#29184)

Description
This is a follow up to #29058 and #29413. This is the next step in moving part of the modulestore data (the course indexes / "active versions" table) from MongoDB to MySQL.

There are four steps planned in moving course index data to MySQL:

Step 1: create the tables in MySQL, start writing to MySQL + MongoDB  done
Step 2: migrate all remaining courses to MySQL  done
Step 3: switch reads from MongoDB to MySQL (this PR)
Step 4 (much later, once we know this is working well): stop writing to MongoDB altogether.
Supporting information
OpenCraft Jira ticket: MNG-2557

Status
 Tested with a large Open edX instance is in progress.

Testing instructions
Try making changes in Studio and verify that they work fine.

Deadline
None
This commit is contained in:
Braden MacDonald
2022-03-09 07:21:09 -08:00
committed by GitHub
parent 835285d494
commit dcb7ef8821
34 changed files with 145 additions and 169 deletions

View File

@@ -163,8 +163,8 @@ class TestCourseListing(ModuleStoreTestCase):
self.assertEqual(len(list(courses_iter)), 0)
@ddt.data(
(ModuleStoreEnum.Type.split, 3),
(ModuleStoreEnum.Type.mongo, 2)
(ModuleStoreEnum.Type.split, 2),
(ModuleStoreEnum.Type.mongo, 1)
)
@ddt.unpack
def test_staff_course_listing(self, default_store, mongo_calls):

View File

@@ -403,13 +403,13 @@ class TestCourseIndexArchived(CourseTestCase):
@ddt.data(
# Staff user has course staff access
(True, 'staff', None, 1, 19),
(False, 'staff', None, 1, 19),
(True, 'staff', None, 0, 20),
(False, 'staff', None, 0, 20),
# Base user has global staff access
(True, 'user', ORG, 3, 18),
(False, 'user', ORG, 3, 18),
(True, 'user', None, 3, 18),
(False, 'user', None, 3, 18),
(True, 'user', ORG, 1, 20),
(False, 'user', ORG, 1, 20),
(True, 'user', None, 1, 20),
(False, 'user', None, 1, 20),
)
@ddt.unpack
def test_separate_archived_courses(self, separate_archived_courses, username, org, mongo_queries, sql_queries):

View File

@@ -2560,7 +2560,7 @@ class TestXBlockInfo(ItemTest):
self.validate_course_xblock_info(json_response, course_outline=True)
@ddt.data(
(ModuleStoreEnum.Type.split, 4, 4),
(ModuleStoreEnum.Type.split, 3, 3),
(ModuleStoreEnum.Type.mongo, 5, 7),
)
@ddt.unpack

View File

@@ -12,6 +12,7 @@ import zlib
from contextlib import contextmanager
from time import time
from ccx_keys.locator import CCXLocator
from django.core.cache import caches, InvalidCacheBackendError
from django.db.transaction import TransactionManagementError
import pymongo
@@ -471,8 +472,7 @@ class MongoPersistenceBackend:
if not last_update_already_set:
course_index['last_update'] = datetime.datetime.now(pytz.utc)
# Update the course index:
result = self.course_index.replace_one(query, course_index, upsert=False,)
return result.modified_count == 1
self.course_index.replace_one(query, course_index, upsert=False,)
def delete_course_index(self, course_key):
"""
@@ -588,11 +588,7 @@ class DjangoFlexPersistenceBackend(MongoPersistenceBackend):
"""
Get the course_index from the persistence mechanism whose id is the given key
"""
#######################
# TEMP: as we migrate, we are currently reading from MongoDB only, but writing to both MySQL + MongoDB
return super().get_course_index(key, ignore_case=ignore_case)
#######################
if key.version_guid and not key.org: # pylint: disable=unreachable
if key.version_guid and not key.org:
# I don't think it was intentional, but with the MongoPersistenceBackend, using a key with only a version
# guid and no org/course/run value would not raise an error, but would always return None. So we need to be
# compatible with that.
@@ -611,6 +607,17 @@ class DjangoFlexPersistenceBackend(MongoPersistenceBackend):
try:
return SplitModulestoreCourseIndex.objects.get(**query).as_v1_schema()
except SplitModulestoreCourseIndex.DoesNotExist:
# The mongo implementation does not retrieve by string key; it retrieves by (org, course, run) tuple.
# As a result, it will handle read requests for a CCX key like
# ccx-v1:org.0+course_0+Run_0+branch@published-branch+ccx@1
# identically to the corresponding course key. This seems to be an oversight though, not an intentional
# feature, as the CCXModulestoreWrapper is supposed to "hide" CCX keys from the underlying modulestore.
# Anyhow, for compatbility we need to do the same:
if isinstance(key, CCXLocator):
log.warning(
f"A CCX key leaked through to the underlying modulestore, bypassing CCXModulestoreWrapper: {key}"
)
return self.get_course_index(key.to_course_locator(), ignore_case)
return None
def find_matching_course_indexes( # pylint: disable=arguments-differ
@@ -632,10 +639,6 @@ class DjangoFlexPersistenceBackend(MongoPersistenceBackend):
org_target: If specified, this is an ORG filter so that only course_indexs are
returned for the specified ORG
"""
#######################
# TEMP: as we migrate, we are currently reading from MongoDB only, but writing to both MySQL + MongoDB
force_mongo = True
#######################
if force_mongo:
# For data migration purposes, this argument will read from MongoDB instead of MySQL
return super().find_matching_course_indexes(
@@ -688,31 +691,17 @@ class DjangoFlexPersistenceBackend(MongoPersistenceBackend):
RequestCache(namespace="course_index_cache").clear()
course_index['last_update'] = datetime.datetime.now(pytz.utc)
# Find the SplitModulestoreCourseIndex entry that we'll be updating:
try:
index_obj = SplitModulestoreCourseIndex.objects.get(objectid=course_index["_id"])
except SplitModulestoreCourseIndex.DoesNotExist:
#######################
# TEMP: Maybe the data migration hasn't (completely) run yet?
data = SplitModulestoreCourseIndex.fields_from_v1_schema(course_index)
if super().get_course_index(data["course_id"]) is None:
raise # This course doesn't exist in MySQL or in MongoDB
# This course record exists in MongoDB but not yet in MySQL
index_obj = SplitModulestoreCourseIndex(**data)
if from_index:
index_obj.last_update = from_index["last_update"] # Make sure this won't get marked as a collision
#######################
index_obj = SplitModulestoreCourseIndex.objects.get(objectid=course_index["_id"])
# Check for collisions:
# Except this collision logic doesn't work when using both MySQL and MongoDB together, one for writes and one
# for reads, so we're temporarily defering to Mongo's colision logic.
# if from_index and index_obj.last_update != from_index["last_update"]:
# # "last_update not only tells us when this course was last updated but also helps prevent collisions"
# log.warning(
# "Collision in Split Mongo when applying course index. This can happen in dev if django debug toolbar "
# "is enabled, as it slows down parallel queries. \nNew index was: %s\nFrom index was: %s",
# course_index, from_index,
# )
# return # Collision; skip this update
if from_index and index_obj.last_update != from_index["last_update"]:
# "last_update not only tells us when this course was last updated but also helps prevent collisions"
log.warning(
"Collision in Split Mongo when applying course index. This can happen in dev if django debug toolbar "
"is enabled, as it slows down parallel queries. New index was: %s",
course_index,
)
return # Collision; skip this update
# Apply updates to the index entry. While doing so, track which branch versions were changed (if any).
changed_branches = []
@@ -735,13 +724,10 @@ class DjangoFlexPersistenceBackend(MongoPersistenceBackend):
# which branch(es) were changed, not anything more useful than that.
index_obj._change_reason = f'Updated {" and ".join(changed_branches)} branch' # pylint: disable=protected-access
# Save the course index entry and create a historical record:
index_obj.save()
# TEMP: Also write to MongoDB, so we can switch back to using it if this new MySQL version doesn't work well:
mongo_updated = super().update_course_index(
course_index, from_index, course_context, last_update_already_set=True
)
if mongo_updated:
# Save the course index entry and create a historical record:
index_obj.save()
super().update_course_index(course_index, from_index, course_context, last_update_already_set=True)
def delete_course_index(self, course_key):
"""

View File

@@ -37,7 +37,7 @@ class TestLibraries(MixedSplitTestCase):
-> INSERT into SplitModulestoreCourseIndex to save the new library
-> INSERT a historical record of the SplitModulestoreCourseIndex
"""
with check_mongo_calls(2, 3), self.assertNumQueries(3):
with check_mongo_calls(0, 3), self.assertNumQueries(5):
LibraryFactory.create(modulestore=self.store)
def test_duplicate_library(self):

View File

@@ -368,7 +368,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# fake: one w/ wildcard version
# split: has one lookup for the course and then one for the course items
# but the active_versions check is done in MySQL
@ddt.data((ModuleStoreEnum.Type.mongo, [1, 1], 0), (ModuleStoreEnum.Type.split, [2, 1], 0))
@ddt.data((ModuleStoreEnum.Type.mongo, [1, 1], 0), (ModuleStoreEnum.Type.split, [1, 1], 0))
@ddt.unpack
def test_has_item(self, default_ms, max_find, max_send):
self.initdb(default_ms)
@@ -391,17 +391,17 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# split:
# problem: active_versions, structure
# non-existent problem: ditto
@ddt.data((ModuleStoreEnum.Type.mongo, 0, [3, 2], 0), (ModuleStoreEnum.Type.split, 0, [2, 1], 0))
@ddt.data((ModuleStoreEnum.Type.mongo, [0, 0], [3, 2], 0), (ModuleStoreEnum.Type.split, [1, 0], [1, 1], 0))
@ddt.unpack
def test_get_item(self, default_ms, num_mysql, max_find, max_send):
self.initdb(default_ms)
self._create_block_hierarchy()
with check_mongo_calls(max_find.pop(0), max_send), self.assertNumQueries(num_mysql):
with check_mongo_calls(max_find.pop(0), max_send), self.assertNumQueries(num_mysql.pop(0)):
assert self.store.get_item(self.problem_x1a_1) is not None # lint-amnesty, pylint: disable=no-member
# try negative cases
with check_mongo_calls(max_find.pop(0), max_send), self.assertNumQueries(num_mysql):
with check_mongo_calls(max_find.pop(0), max_send), self.assertNumQueries(num_mysql.pop(0)):
with pytest.raises(ItemNotFoundError):
self.store.get_item(self.fake_location)
@@ -414,7 +414,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# Split:
# mysql: fetch course's active version from SplitModulestoreCourseIndex, spurious refetch x2
# find: get structure
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 14, 0), (ModuleStoreEnum.Type.split, 0, 3, 0))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 14, 0), (ModuleStoreEnum.Type.split, 2, 1, 0))
@ddt.unpack
def test_get_items(self, default_ms, num_mysql, max_find, max_send):
self.initdb(default_ms)
@@ -917,7 +917,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# mysql: SplitModulestoreCourseIndex - select 2x (by course_id, by objectid), update, update historical record
# Find: active_versions, 2 structures (published & draft), definition (unnecessary)
# Sends: updated draft and published structures and active_versions
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 7, 2), (ModuleStoreEnum.Type.split, 3, 3, 3))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 7, 2), (ModuleStoreEnum.Type.split, 4, 2, 3))
@ddt.unpack
def test_delete_item(self, default_ms, num_mysql, max_find, max_send):
"""
@@ -946,7 +946,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# mysql: SplitModulestoreCourseIndex - select 2x (by course_id, by objectid), update, update historical record
# find: draft and published structures, definition (unnecessary)
# sends: update published (why?), draft, and active_versions
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 9, 2), (ModuleStoreEnum.Type.split, 3, 4, 3))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 9, 2), (ModuleStoreEnum.Type.split, 4, 3, 3))
@ddt.unpack
def test_delete_private_vertical(self, default_ms, num_mysql, max_find, max_send):
"""
@@ -1000,7 +1000,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# mysql: SplitModulestoreCourseIndex - select 2x (by course_id, by objectid), update, update historical record
# find: structure (cached)
# send: update structure and active_versions
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 4, 1), (ModuleStoreEnum.Type.split, 3, 2, 2))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 4, 1), (ModuleStoreEnum.Type.split, 4, 1, 2))
@ddt.unpack
def test_delete_draft_vertical(self, default_ms, num_mysql, max_find, max_send):
"""
@@ -1043,7 +1043,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# executed twice, possibly unnecessarily)
# find: 2 reads of structure, definition (s/b lazy; so, unnecessary),
# plus 1 wildcard find in draft mongo which has none
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 3, 0), (ModuleStoreEnum.Type.split, 0, 5, 0))
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 2, 0), (ModuleStoreEnum.Type.split, 2, 3, 0))
@ddt.unpack
def test_get_courses(self, default_ms, num_mysql, max_find, max_send):
self.initdb(default_ms)
@@ -1083,7 +1083,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# draft is 2: find out which ms owns course, get item
# split: active_versions (mysql), structure, definition (to load course wiki string)
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 2, 0), (ModuleStoreEnum.Type.split, 0, 3, 0))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 2, 0), (ModuleStoreEnum.Type.split, 1, 2, 0))
@ddt.unpack
def test_get_course(self, default_ms, num_mysql, max_find, max_send):
"""
@@ -1120,7 +1120,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# still only 2)
# Draft: get_parent
# Split: active_versions, structure
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 1, 0), (ModuleStoreEnum.Type.split, 0, 2, 0))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 1, 0), (ModuleStoreEnum.Type.split, 1, 1, 0))
@ddt.unpack
def test_get_parent_locations(self, default_ms, num_mysql, max_find, max_send):
"""
@@ -1638,7 +1638,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# 8-9. get vertical, compute inheritance
# 10-11. get other vertical_x1b (why?) and compute inheritance
# Split: loading structure from mongo (also loads active version from MySQL, not tracked here)
@ddt.data((ModuleStoreEnum.Type.mongo, 0, [12, 3], 0), (ModuleStoreEnum.Type.split, 0, [3, 1], 0))
@ddt.data((ModuleStoreEnum.Type.mongo, [0, 0], [12, 3], 0), (ModuleStoreEnum.Type.split, [1, 0], [2, 1], 0))
@ddt.unpack
def test_path_to_location(self, default_ms, num_mysql, num_finds, num_sends):
"""
@@ -1659,7 +1659,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
for location, expected in should_work:
# each iteration has different find count, pop this iter's find count
with check_mongo_calls(num_finds.pop(0), num_sends), self.assertNumQueries(num_mysql):
with check_mongo_calls(num_finds.pop(0), num_sends), self.assertNumQueries(num_mysql.pop(0)):
path = path_to_location(self.store, location)
assert path == expected
@@ -1878,7 +1878,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# Draft: get all items which can be or should have parents
# Split: active_versions (mysql), structure (mongo)
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 1, 0), (ModuleStoreEnum.Type.split, 0, 2, 0))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 1, 0), (ModuleStoreEnum.Type.split, 1, 1, 0))
@ddt.unpack
def test_get_orphans(self, default_ms, num_mysql, max_find, max_send):
"""
@@ -2015,7 +2015,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# Draft: wildcard search of draft (find) and split (mysql)
# Split: wildcard search of draft (find) and split (mysql)
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 2, 0), (ModuleStoreEnum.Type.split, 0, 2, 0))
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 1, 0), (ModuleStoreEnum.Type.split, 1, 1, 0))
@ddt.unpack
def test_get_courses_for_wiki(self, default_ms, num_mysql, max_find, max_send):
"""
@@ -2046,7 +2046,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# Sends:
# 1. insert structure
# 2. write index entry
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 2, 6), (ModuleStoreEnum.Type.split, 3, 3, 2))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 2, 6), (ModuleStoreEnum.Type.split, 4, 2, 2))
@ddt.unpack
def test_unpublish(self, default_ms, num_mysql, max_find, max_send):
"""
@@ -2084,7 +2084,7 @@ class TestMixedModuleStore(CommonMixedModuleStoreSetup):
# Draft: specific query for revision None
# Split: active_versions from MySQL, structure from mongo
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 1, 0), (ModuleStoreEnum.Type.split, 0, 2, 0))
@ddt.data((ModuleStoreEnum.Type.mongo, 0, 1, 0), (ModuleStoreEnum.Type.split, 1, 1, 0))
@ddt.unpack
def test_has_published_version(self, default_ms, mysql_queries, max_find, max_send):
"""
@@ -3785,7 +3785,7 @@ class TestAsidesWithMixedModuleStore(CommonMixedModuleStoreSetup):
assert asides2[0].field11 == 'aside1_default_value1'
assert asides2[0].field12 == 'aside1_default_value2'
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 2, 0))
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 1, 0))
@XBlockAside.register_temp_plugin(AsideFoo, 'test_aside1')
@patch('xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.applicable_aside_types',
lambda self, block: ['test_aside1'])

View File

@@ -154,14 +154,14 @@ class CountMongoCallsCourseTraversal(TestCase):
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, True, False, 359),
# The line below shows the way this traversal *should* be done
# (if you'll eventually access all the fields and load all the definitions anyway).
(MIXED_SPLIT_MODULESTORE_BUILDER, None, False, True, 3),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, True, True, 38),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, False, True, 38),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, True, True, 38),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, False, False, 3),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, True, False, 3),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, False, False, 3),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, True, False, 3),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, False, True, 2),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, True, True, 37),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, False, True, 37),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, True, True, 37),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, False, False, 2),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, True, False, 2),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, False, False, 2),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, True, False, 2),
)
@ddt.unpack
def test_number_mongo_calls(self, store_builder, depth, lazy, access_all_block_fields, num_mongo_calls):
@@ -178,7 +178,7 @@ class CountMongoCallsCourseTraversal(TestCase):
@ddt.data(
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 176),
(MIXED_SPLIT_MODULESTORE_BUILDER, 4),
(MIXED_SPLIT_MODULESTORE_BUILDER, 3),
)
@ddt.unpack
def test_lazy_when_course_previously_cached(self, store_builder, num_mongo_calls):

View File

@@ -234,22 +234,22 @@ class TestFieldOverrideSplitPerformance(FieldOverridePerformanceTestCase):
__test__ = True
# TODO: decrease query count as part of REVO-28
QUERY_COUNT = 31
QUERY_COUNT = 32
TEST_DATA = {
('no_overrides', 1, True, False): (QUERY_COUNT, 3),
('no_overrides', 2, True, False): (QUERY_COUNT, 3),
('no_overrides', 3, True, False): (QUERY_COUNT, 3),
('ccx', 1, True, False): (QUERY_COUNT, 3),
('ccx', 2, True, False): (QUERY_COUNT, 3),
('ccx', 3, True, False): (QUERY_COUNT, 3),
('ccx', 1, True, True): (QUERY_COUNT + 2, 3),
('ccx', 2, True, True): (QUERY_COUNT + 2, 3),
('ccx', 3, True, True): (QUERY_COUNT + 2, 3),
('no_overrides', 1, False, False): (QUERY_COUNT, 3),
('no_overrides', 2, False, False): (QUERY_COUNT, 3),
('no_overrides', 3, False, False): (QUERY_COUNT, 3),
('ccx', 1, False, False): (QUERY_COUNT, 3),
('ccx', 2, False, False): (QUERY_COUNT, 3),
('ccx', 3, False, False): (QUERY_COUNT, 3),
('no_overrides', 1, True, False): (QUERY_COUNT, 2),
('no_overrides', 2, True, False): (QUERY_COUNT, 2),
('no_overrides', 3, True, False): (QUERY_COUNT, 2),
('ccx', 1, True, False): (QUERY_COUNT, 2),
('ccx', 2, True, False): (QUERY_COUNT, 2),
('ccx', 3, True, False): (QUERY_COUNT, 2),
('ccx', 1, True, True): (QUERY_COUNT + 2, 2),
('ccx', 2, True, True): (QUERY_COUNT + 2, 2),
('ccx', 3, True, True): (QUERY_COUNT + 2, 2),
('no_overrides', 1, False, False): (QUERY_COUNT, 2),
('no_overrides', 2, False, False): (QUERY_COUNT, 2),
('no_overrides', 3, False, False): (QUERY_COUNT, 2),
('ccx', 1, False, False): (QUERY_COUNT, 2),
('ccx', 2, False, False): (QUERY_COUNT, 2),
('ccx', 3, False, False): (QUERY_COUNT, 2),
}

View File

@@ -43,7 +43,7 @@ class TestCCX(ModuleStoreTestCase):
def test_ccx_course_caching(self):
"""verify that caching the propery works to limit queries"""
with check_mongo_calls(3):
with check_mongo_calls(2):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.
@@ -69,7 +69,7 @@ class TestCCX(ModuleStoreTestCase):
"""verify that caching the start property works to limit queries"""
now = datetime.now(utc)
self.set_ccx_override('start', now)
with check_mongo_calls(3):
with check_mongo_calls(2):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.
@@ -94,7 +94,7 @@ class TestCCX(ModuleStoreTestCase):
"""verify that caching the due property works to limit queries"""
expected = datetime.now(utc)
self.set_ccx_override('due', expected)
with check_mongo_calls(3):
with check_mongo_calls(2):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.

View File

@@ -177,7 +177,7 @@ class MilestonesTransformerTestCase(CourseStructureTestCase, MilestonesTestCaseM
self.user,
)
with self.assertNumQueries(6):
with self.assertNumQueries(7):
self.get_blocks_and_check_against_expected(self.user, self.ALL_BLOCKS_EXCEPT_SPECIAL)
def test_staff_access(self):

View File

@@ -410,8 +410,8 @@ class SelfPacedCourseInfoTestCase(LoginEnrollmentTestCase, SharedModuleStoreTest
def test_num_queries_instructor_paced(self):
# TODO: decrease query count as part of REVO-28
self.fetch_course_info_with_queries(self.instructor_paced_course, 41, 3)
self.fetch_course_info_with_queries(self.instructor_paced_course, 42, 2)
def test_num_queries_self_paced(self):
# TODO: decrease query count as part of REVO-28
self.fetch_course_info_with_queries(self.self_paced_course, 41, 3)
self.fetch_course_info_with_queries(self.self_paced_course, 42, 2)

View File

@@ -89,7 +89,7 @@ class CoursesTest(ModuleStoreTestCase):
assert not error.value.access_response.has_access
@ddt.data(
(GET_COURSE_WITH_ACCESS, 3),
(GET_COURSE_WITH_ACCESS, 2),
(GET_COURSE_OVERVIEW_WITH_ACCESS, 0),
)
@ddt.unpack

View File

@@ -431,13 +431,14 @@ class TestXBlockQueryLoad(SharedModuleStoreTestCase):
discussion_target='Target Discussion',
))
# 5 queries are required to do first discussion xblock render:
# 7 queries are required to do first discussion xblock render:
# * split_modulestore_django_splitmodulestorecourseindex x2
# * waffle_utils_wafflecourseoverridemodel
# * waffle_utils_waffleorgoverridemodel
# * waffle_flag
# * django_comment_client_role
# * lms_xblock_xblockasidesconfig
num_queries = 5
num_queries = 7
for discussion in discussions:
discussion_xblock = get_module_for_descriptor_internal(
user=user,

View File

@@ -2258,16 +2258,12 @@ class LMSXBlockServiceBindingTest(SharedModuleStoreTestCase):
Tests that the LMS Module System (XBlock Runtime) provides an expected set of services.
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
"""
Set up the user and other fields that will be used to instantiate the runtime.
"""
super().setUp()
self.course = CourseFactory.create()
self.user = UserFactory()
self.student_data = Mock()
self.track_function = Mock()
@@ -2346,15 +2342,10 @@ class TestFilteredChildren(SharedModuleStoreTestCase):
Tests that verify access to XBlock/XModule children work correctly
even when those children are filtered by the runtime when loaded.
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.course = CourseFactory.create()
# pylint: disable=attribute-defined-outside-init
def setUp(self):
super().setUp()
self.course = CourseFactory.create()
self.users = {number: UserFactory() for number in USER_NUMBERS}
self._old_has_access = render.has_access

View File

@@ -412,8 +412,8 @@ class IndexQueryTestCase(ModuleStoreTestCase):
self.client.login(username=self.user.username, password=TEST_PASSWORD)
CourseEnrollment.enroll(self.user, course.id)
with self.assertNumQueries(205, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(4):
with self.assertNumQueries(206, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(3):
url = reverse(
'courseware_section',
kwargs={
@@ -1584,8 +1584,8 @@ class ProgressPageTests(ProgressPageBaseTests):
self.assertContains(resp, "Download Your Certificate")
@ddt.data(
(True, 52),
(False, 52),
(True, 53),
(False, 53),
)
@ddt.unpack
def test_progress_queries_paced_courses(self, self_paced, query_count):
@@ -1593,13 +1593,13 @@ class ProgressPageTests(ProgressPageBaseTests):
# TODO: decrease query count as part of REVO-28
ContentTypeGatingConfig.objects.create(enabled=True, enabled_as_of=datetime(2018, 1, 1))
self.setup_course(self_paced=self_paced)
with self.assertNumQueries(query_count, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST), check_mongo_calls(3):
with self.assertNumQueries(query_count, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST), check_mongo_calls(2):
self._get_progress_page()
@patch.dict(settings.FEATURES, {'ASSUME_ZERO_GRADE_IF_ABSENT_FOR_ALL_TESTS': False})
@ddt.data(
(False, 60, 42),
(True, 52, 36)
(False, 61, 43),
(True, 53, 37)
)
@ddt.unpack
def test_progress_queries(self, enable_waffle, initial, subsequent):
@@ -1608,14 +1608,14 @@ class ProgressPageTests(ProgressPageBaseTests):
with override_waffle_switch(grades_waffle_switch(ASSUME_ZERO_GRADE_IF_ABSENT), active=enable_waffle):
with self.assertNumQueries(
initial, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST
), check_mongo_calls(3):
), check_mongo_calls(2):
self._get_progress_page()
# subsequent accesses to the progress page require fewer queries.
for _ in range(2):
with self.assertNumQueries(
subsequent, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST
), check_mongo_calls(3):
), check_mongo_calls(2):
self._get_progress_page()
@ddt.data(

View File

@@ -161,8 +161,8 @@ class RenderXBlockTestMixin(MasqueradeMixin, metaclass=ABCMeta):
return response
@ddt.data(
('vertical_block', 5),
('html_block', 5),
('vertical_block', 4),
('html_block', 4),
)
@ddt.unpack
def test_courseware_html(self, block_name, mongo_calls):
@@ -193,12 +193,11 @@ class RenderXBlockTestMixin(MasqueradeMixin, metaclass=ABCMeta):
self.setup_user(admin=True, enroll=True, login=True)
# The 5 mongoDB calls include calls for
# (1) course_index - bulk_operation call
# (2) structure - get_course_with_access
# (3) definition - get_course_with_access
# (4) definition - HTML block
# (5) definition - edx_notes decorator (original_get_html)
with check_mongo_calls(5):
# (1) structure - get_course_with_access
# (2) definition - get_course_with_access
# (3) definition - HTML block
# (4) definition - edx_notes decorator (original_get_html)
with check_mongo_calls(4):
self.verify_response()
def test_success_unenrolled_staff(self):

View File

@@ -403,7 +403,7 @@ class ViewsQueryCountTestCase(
return inner
@ddt.data(
(ModuleStoreEnum.Type.split, 3, 11, 39),
(ModuleStoreEnum.Type.split, 3, 8, 42),
)
@ddt.unpack
@count_queries
@@ -411,7 +411,7 @@ class ViewsQueryCountTestCase(
self.create_thread_helper(mock_request)
@ddt.data(
(ModuleStoreEnum.Type.split, 3, 9, 35),
(ModuleStoreEnum.Type.split, 3, 6, 38),
)
@ddt.unpack
@count_queries

View File

@@ -780,10 +780,10 @@ class CourseTopicsViewTest(DiscussionAPIViewTestMixin, CommentsServiceMockMixin,
)
@ddt.data(
(2, ModuleStoreEnum.Type.split, 3, {"Test Topic 1": {"id": "test_topic_1"}}),
(2, ModuleStoreEnum.Type.split, 3,
(2, ModuleStoreEnum.Type.split, 2, {"Test Topic 1": {"id": "test_topic_1"}}),
(2, ModuleStoreEnum.Type.split, 2,
{"Test Topic 1": {"id": "test_topic_1"}, "Test Topic 2": {"id": "test_topic_2"}}),
(10, ModuleStoreEnum.Type.split, 3, {"Test Topic 1": {"id": "test_topic_1"}}),
(10, ModuleStoreEnum.Type.split, 2, {"Test Topic 1": {"id": "test_topic_1"}}),
)
@ddt.unpack
def test_bulk_response(self, modules_count, module_store, mongo_calls, topics):

View File

@@ -316,7 +316,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
else mock_course_grade.return_value
for student in self.students
]
with self.assertNumQueries(8):
with self.assertNumQueries(9):
all_course_grades, all_errors = self._course_grades_and_errors_for(self.course, self.students)
assert {student: str(all_errors[student]) for student in all_errors} == {
student3: 'Error for student3.',

View File

@@ -165,7 +165,6 @@ class RecalculateSubsectionGradeTest(HasCourseWithProblemsMixin, ModuleStoreTest
(ModuleStoreEnum.Type.mongo, 1, 42, False),
(ModuleStoreEnum.Type.split, 2, 42, True),
(ModuleStoreEnum.Type.split, 2, 42, False),
)
@ddt.unpack
def test_query_counts(self, default_store, num_mongo_calls, num_sql_calls, create_multiple_subsections):

View File

@@ -429,7 +429,7 @@ class MultiProblemModulestoreAccessTestCase(CourseStructureTestCase, SharedModul
self.client.login(username=self.student.username, password=password)
@ddt.data(
(ModuleStoreEnum.Type.split, 3),
(ModuleStoreEnum.Type.split, 2),
(ModuleStoreEnum.Type.mongo, 2),
)
@ddt.unpack

View File

@@ -687,6 +687,6 @@ class TestInstructorDashboardPerformance(ModuleStoreTestCase, LoginEnrollmentTes
# check MongoDB calls count
url = reverse('spoc_gradebook', kwargs={'course_id': self.course.id})
with check_mongo_calls(7):
with check_mongo_calls(6):
response = self.client.get(url)
assert response.status_code == 200

View File

@@ -371,7 +371,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
@ddt.data(
(ModuleStoreEnum.Type.mongo, 4, 47),
(ModuleStoreEnum.Type.split, 3, 47),
(ModuleStoreEnum.Type.split, 2, 48),
)
@ddt.unpack
def test_query_counts(self, store_type, mongo_count, expected_query_count):

View File

@@ -75,7 +75,7 @@ class TopicSerializerTestCase(SerializerTestCase):
Verifies that the `TopicSerializer` correctly displays a topic with a
team count of 0, and that it takes a known number of SQL queries.
"""
with self.assertNumQueries(1):
with self.assertNumQueries(3): # 2 split modulestore MySQL queries, 1 for Teams
serializer = TopicSerializer(
self.course.teamsets[0].cleaned_data,
context={'course_id': self.course.id},
@@ -91,7 +91,7 @@ class TopicSerializerTestCase(SerializerTestCase):
CourseTeamFactory.create(
course_id=self.course.id, topic_id=self.course.teamsets[0].teamset_id
)
with self.assertNumQueries(1):
with self.assertNumQueries(3): # 2 split modulestore MySQL queries, 1 for Teams
serializer = TopicSerializer(
self.course.teamsets[0].cleaned_data,
context={'course_id': self.course.id},
@@ -110,7 +110,7 @@ class TopicSerializerTestCase(SerializerTestCase):
)
CourseTeamFactory.create(course_id=self.course.id, topic_id=duplicate_topic['id'])
CourseTeamFactory.create(course_id=second_course.id, topic_id=duplicate_topic['id'])
with self.assertNumQueries(1):
with self.assertNumQueries(3): # 2 split modulestore MySQL queries, 1 for Teams
serializer = TopicSerializer(
self.course.teamsets[0].cleaned_data,
context={'course_id': self.course.id},
@@ -251,7 +251,7 @@ class BulkTeamCountTopicSerializerTestCase(BaseTopicSerializerTestCase):
request = RequestFactory().get('/api/team/v0/topics')
request.user = self.user
with self.assertNumQueries(num_queries):
with self.assertNumQueries(num_queries + 2): # num_queries on teams tables, plus 2 split modulestore queries
serializer = self.serializer(
topics,
context={

View File

@@ -118,7 +118,7 @@ class BookmarksAPITests(BookmarkApiEventTestMixin, BookmarksTestsBase):
"""
assert len(api.get_bookmarks(user=self.user, course_key=self.course.id)) == 4
with self.assertNumQueries(9):
with self.assertNumQueries(10):
bookmark_data = api.create_bookmark(user=self.user, usage_key=self.vertical_2.location)
self.assert_bookmark_event_emitted(
@@ -139,7 +139,7 @@ class BookmarksAPITests(BookmarkApiEventTestMixin, BookmarksTestsBase):
"""
assert len(api.get_bookmarks(user=self.user, course_key=self.course.id)) == 4
with self.assertNumQueries(9):
with self.assertNumQueries(10):
bookmark_data = api.create_bookmark(user=self.user, usage_key=self.vertical_2.location)
self.assert_bookmark_event_emitted(

View File

@@ -269,11 +269,11 @@ class BookmarkModelTests(BookmarksTestsBase):
}
@ddt.data(
('course', [], 3),
('chapter_1', [], 2),
('sequential_1', ['chapter_1'], 2),
('vertical_1', ['chapter_1', 'sequential_1'], 2),
('html_1', ['chapter_1', 'sequential_2', 'vertical_2'], 2),
('course', [], 2),
('chapter_1', [], 1),
('sequential_1', ['chapter_1'], 1),
('vertical_1', ['chapter_1', 'sequential_1'], 1),
('html_1', ['chapter_1', 'sequential_2', 'vertical_2'], 1),
)
@ddt.unpack
def test_path_and_queries_on_create(self, block_to_bookmark, ancestors_attrs, expected_mongo_calls):

View File

@@ -69,7 +69,7 @@ class BookmarksServiceTests(BookmarksTestsBase):
assert not self.bookmark_service\
.set_bookmarked(usage_key=UsageKey.from_string('i4x://ed/ed/ed/interactive'))
with self.assertNumQueries(9):
with self.assertNumQueries(10):
assert self.bookmark_service.set_bookmarked(usage_key=self.vertical_2.location)
def test_unset_bookmarked(self):

View File

@@ -101,10 +101,10 @@ class XBlockCacheTaskTests(BookmarksTestsBase):
}
@ddt.data(
(2, 2, 3),
(4, 2, 3),
(2, 3, 3),
(2, 4, 3),
(2, 2, 2),
(4, 2, 2),
(2, 3, 2),
(2, 4, 2),
)
@ddt.unpack
def test_calculate_course_xblocks_data_queries(self, children_per_block, depth, expected_mongo_calls):
@@ -137,8 +137,8 @@ class XBlockCacheTaskTests(BookmarksTestsBase):
assert path_item['usage_key'] == expected_cache_data[usage_key][path_index][path_item_index]
@ddt.data(
('course', 36),
('other_course', 34)
('course', 37),
('other_course', 35)
)
@ddt.unpack
def test_update_xblocks_cache(self, course_attr, expected_sql_queries):

View File

@@ -663,7 +663,7 @@ class CreditRequirementApiTests(CreditApiTestBase):
assert not api.is_user_eligible_for_credit(user.username, self.course_key)
# Satisfy the other requirement
with self.assertNumQueries(20):
with self.assertNumQueries(22):
api.set_credit_requirement_status(
user,
self.course_key,

View File

@@ -522,8 +522,8 @@ class EnrollmentTest(EnrollmentTestMixin, ModuleStoreTestCase, APITestCase, Ente
self.assert_enrollment_status(
course_id='entirely/fake/course',
expected_status=status.HTTP_400_BAD_REQUEST,
min_mongo_calls=3,
max_mongo_calls=4
min_mongo_calls=2,
max_mongo_calls=3
)
def test_get_enrollment_details_bad_course(self):

View File

@@ -249,7 +249,7 @@ class TestCourseNextSectionUpdateResolver(SchedulesResolverTestMixin, ModuleStor
def test_schedule_context(self):
resolver = self.create_resolver()
# using this to make sure the select_related stays intact
with self.assertNumQueries(38):
with self.assertNumQueries(41):
sc = resolver.get_schedules()
schedules = list(sc)

View File

@@ -205,8 +205,8 @@ class TestCourseHomePage(CourseHomePageTestCase): # lint-amnesty, pylint: disab
# Fetch the view and verify the query counts
# TODO: decrease query count as part of REVO-28
with self.assertNumQueries(65, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(4):
with self.assertNumQueries(66, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(3):
url = course_home_url(self.course)
self.client.get(url)

View File

@@ -49,7 +49,7 @@ class TestCourseUpdatesPage(BaseCourseUpdatesTestCase):
# Fetch the view and verify that the query counts haven't changed
# TODO: decrease query count as part of REVO-28
with self.assertNumQueries(51, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(4):
with self.assertNumQueries(52, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(3):
url = course_updates_url(self.course)
self.client.get(url)

View File

@@ -63,7 +63,7 @@ class SubmitCompletionTestCase(CompletionSetUpMixin, TestCase):
def test_changed_value(self):
with self.assertNumQueries(SELECT + UPDATE + 2 * SAVEPOINT + 2 * OTHER):
# OTHER = user exists, completion exists, 2x look up course in splitmodulestorecourseindex
# OTHER = user exists, completion exists
completion, isnew = models.BlockCompletion.objects.submit_completion(
user=self.user,
block_key=self.block_key,
@@ -88,7 +88,7 @@ class SubmitCompletionTestCase(CompletionSetUpMixin, TestCase):
def test_new_user(self):
newuser = UserFactory()
with self.assertNumQueries(SELECT + UPDATE + 4 * SAVEPOINT + 0 * OTHER):
with self.assertNumQueries(SELECT + UPDATE + 4 * SAVEPOINT):
_, isnew = models.BlockCompletion.objects.submit_completion(
user=newuser,
block_key=self.block_key,
@@ -99,7 +99,7 @@ class SubmitCompletionTestCase(CompletionSetUpMixin, TestCase):
def test_new_block(self):
newblock = UsageKey.from_string('block-v1:edx+test+run+type@video+block@puppers')
with self.assertNumQueries(SELECT + UPDATE + 4 * SAVEPOINT + 0 * OTHER):
with self.assertNumQueries(SELECT + UPDATE + 4 * SAVEPOINT):
_, isnew = models.BlockCompletion.objects.submit_completion(
user=self.user,
block_key=newblock,