Add error handling in course structure cache.

When going between python 2 and python 3.5 we can get pickeld course
structires that are incompatible no matter what we do do to the bug
linked in the comment.  In this case, handle the error and delete the
corrupt data from the cache.

Making this fairly generic because if we have any bad data in cache we
don't want it to blow up the whole process.  Just delete the bad data
and try again.
This commit is contained in:
Feanil Patel
2019-12-05 14:37:18 -05:00
parent 009b96d4db
commit 684f254a77
2 changed files with 28 additions and 15 deletions

View File

@@ -226,24 +226,30 @@ class CourseStructureCache(object):
return None
with TIMER.timer("CourseStructureCache.get", course_context) as tagger:
compressed_pickled_data = self.cache.get(key)
tagger.tag(from_cache=str(compressed_pickled_data is not None).lower())
try:
compressed_pickled_data = self.cache.get(key)
tagger.tag(from_cache=str(compressed_pickled_data is not None).lower())
if compressed_pickled_data is None:
# Always log cache misses, because they are unexpected
tagger.sample_rate = 1
if compressed_pickled_data is None:
# Always log cache misses, because they are unexpected
tagger.sample_rate = 1
return None
tagger.measure('compressed_size', len(compressed_pickled_data))
pickled_data = zlib.decompress(compressed_pickled_data)
tagger.measure('uncompressed_size', len(pickled_data))
if six.PY2:
return pickle.loads(pickled_data)
else:
return pickle.loads(pickled_data, encoding='latin-1')
except Exception:
# The cached data is corrupt in some way, get rid of it.
log.warning("CourseStructureCache: Bad data in cache for %s", course_context)
self.cache.delete(key)
return None
tagger.measure('compressed_size', len(compressed_pickled_data))
pickled_data = zlib.decompress(compressed_pickled_data)
tagger.measure('uncompressed_size', len(pickled_data))
if six.PY2:
return pickle.loads(pickled_data)
else:
return pickle.loads(pickled_data, encoding='latin-1')
def set(self, key, structure, course_context=None):
"""Given a structure, will pickle, compress, and write to cache."""
if self.cache is None:

View File

@@ -977,6 +977,13 @@ class TestCourseStructureCache(SplitModuleTest):
# now make sure that you get the same structure
self.assertEqual(cached_structure, not_cached_structure)
# If data is corrupted, get it from mongo again.
cache_key = self.new_course.id.version_guid
self.cache.set(cache_key, b"bad_data")
with check_mongo_calls(1):
not_cached_structure = self._get_structure(self.new_course)
@patch('xmodule.modulestore.split_mongo.mongo_connection.get_cache')
def test_course_structure_cache_no_cache_configured(self, mock_get_cache):
mock_get_cache.side_effect = InvalidCacheBackendError