chore: add log statement >=1MB data (#32821)

This commit is contained in:
Muhammad Umar Khan
2023-07-24 20:58:46 +05:00
committed by GitHub
parent 35ff87f68d
commit 25afbb194e
2 changed files with 42 additions and 1 deletions

View File

@@ -242,8 +242,13 @@ class CourseStructureCache:
# 1 = Fastest (slightly larger results)
compressed_pickled_data = zlib.compress(pickled_data, 1)
tagger.measure('compressed_size', len(compressed_pickled_data))
data_size = len(compressed_pickled_data)
tagger.measure('compressed_size', data_size)
total_bytes_in_one_mb = 1024 * 1024
# only print logs when data size is greater than or equal to 1MB
if data_size >= total_bytes_in_one_mb:
log.info('Data to be cached is: {:.2f} MB'.format(data_size / total_bytes_in_one_mb))
# Stuctures are immutable, so we set a timeout of "never"
self.cache.set(key, compressed_pickled_data, None)

View File

@@ -16,6 +16,7 @@ import ddt
from ccx_keys.locator import CCXBlockUsageLocator
from django.core.cache import InvalidCacheBackendError, caches
from opaque_keys.edx.locator import BlockUsageLocator, CourseKey, CourseLocator, LocalId
from testfixtures import LogCapture
from xblock.fields import Reference, ReferenceList, ReferenceValueDict
from openedx.core.djangolib.testing.utils import CacheIsolationMixin
@@ -34,6 +35,7 @@ from xmodule.modulestore.exceptions import (
)
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.modulestore.split_mongo import BlockKey
from xmodule.modulestore.split_mongo.mongo_connection import CourseStructureCache
from xmodule.modulestore.split_mongo.split import SplitMongoModuleStore
from xmodule.modulestore.tests.factories import check_mongo_calls
from xmodule.modulestore.tests.mongo_connection import MONGO_HOST, MONGO_PORT_NUM
@@ -840,6 +842,40 @@ class TestCourseStructureCache(CacheIsolationMixin, SplitModuleTest):
# now make sure that you get the same structure
assert cached_structure == not_cached_structure
@patch('xmodule.modulestore.split_mongo.mongo_connection.get_cache')
def test_course_structure_cache_with_data_chunk_greater_than_one_mb(self, mock_get_cache):
enabled_cache = caches['default']
mock_get_cache.return_value = enabled_cache
course_cache = CourseStructureCache()
size = 300000000
# this data_chunk will be compressed before being cached
data_chunk = b'\x00' * size
logger_name = 'xmodule.modulestore.split_mongo.mongo_connection'
expected_message = 'Data to be cached is: 1.25 MB'
with LogCapture(logger_name) as capture:
course_cache.set('my_data_chunk', data_chunk)
self.assertEqual(capture.records[0].name, logger_name)
self.assertEqual(capture.records[0].msg, expected_message)
self.assertEqual(capture.records[0].levelname, 'INFO')
@patch('xmodule.modulestore.split_mongo.mongo_connection.get_cache')
def test_course_structure_cache_with_data_chunk_lesser_than_one_mb(self, mock_get_cache):
enabled_cache = caches['default']
mock_get_cache.return_value = enabled_cache
course_cache = CourseStructureCache()
size = 30000
data_chunk = b'\x00' * size
logger_name = 'xmodule.modulestore.split_mongo.mongo_connection'
with LogCapture(logger_name) as capture:
course_cache.set('my_data_chunk', data_chunk)
# data chunk was less than 1MB so no logs were added.
self.assertEqual(len(capture.records), 0)
def _get_structure(self, course):
"""
Helper function to get a structure from a course.