From 684f254a7764f5bdc8de122232483a75c81ab7ac Mon Sep 17 00:00:00 2001 From: Feanil Patel Date: Thu, 5 Dec 2019 14:37:18 -0500 Subject: [PATCH] Add error handling in course structure cache. When going between python 2 and python 3.5 we can get pickeld course structires that are incompatible no matter what we do do to the bug linked in the comment. In this case, handle the error and delete the corrupt data from the cache. Making this fairly generic because if we have any bad data in cache we don't want it to blow up the whole process. Just delete the bad data and try again. --- .../split_mongo/mongo_connection.py | 36 +++++++++++-------- .../tests/test_split_modulestore.py | 7 ++++ 2 files changed, 28 insertions(+), 15 deletions(-) diff --git a/common/lib/xmodule/xmodule/modulestore/split_mongo/mongo_connection.py b/common/lib/xmodule/xmodule/modulestore/split_mongo/mongo_connection.py index 928566e179..cdfd854d09 100644 --- a/common/lib/xmodule/xmodule/modulestore/split_mongo/mongo_connection.py +++ b/common/lib/xmodule/xmodule/modulestore/split_mongo/mongo_connection.py @@ -226,24 +226,30 @@ class CourseStructureCache(object): return None with TIMER.timer("CourseStructureCache.get", course_context) as tagger: - compressed_pickled_data = self.cache.get(key) - tagger.tag(from_cache=str(compressed_pickled_data is not None).lower()) + try: + compressed_pickled_data = self.cache.get(key) + tagger.tag(from_cache=str(compressed_pickled_data is not None).lower()) - if compressed_pickled_data is None: - # Always log cache misses, because they are unexpected - tagger.sample_rate = 1 + if compressed_pickled_data is None: + # Always log cache misses, because they are unexpected + tagger.sample_rate = 1 + return None + + tagger.measure('compressed_size', len(compressed_pickled_data)) + + pickled_data = zlib.decompress(compressed_pickled_data) + tagger.measure('uncompressed_size', len(pickled_data)) + + if six.PY2: + return pickle.loads(pickled_data) + else: + return pickle.loads(pickled_data, encoding='latin-1') + except Exception: + # The cached data is corrupt in some way, get rid of it. + log.warning("CourseStructureCache: Bad data in cache for %s", course_context) + self.cache.delete(key) return None - tagger.measure('compressed_size', len(compressed_pickled_data)) - - pickled_data = zlib.decompress(compressed_pickled_data) - tagger.measure('uncompressed_size', len(pickled_data)) - - if six.PY2: - return pickle.loads(pickled_data) - else: - return pickle.loads(pickled_data, encoding='latin-1') - def set(self, key, structure, course_context=None): """Given a structure, will pickle, compress, and write to cache.""" if self.cache is None: diff --git a/common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py b/common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py index 6b72e27e81..d8de53452d 100644 --- a/common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py +++ b/common/lib/xmodule/xmodule/modulestore/tests/test_split_modulestore.py @@ -977,6 +977,13 @@ class TestCourseStructureCache(SplitModuleTest): # now make sure that you get the same structure self.assertEqual(cached_structure, not_cached_structure) + # If data is corrupted, get it from mongo again. + cache_key = self.new_course.id.version_guid + self.cache.set(cache_key, b"bad_data") + with check_mongo_calls(1): + not_cached_structure = self._get_structure(self.new_course) + + @patch('xmodule.modulestore.split_mongo.mongo_connection.get_cache') def test_course_structure_cache_no_cache_configured(self, mock_get_cache): mock_get_cache.side_effect = InvalidCacheBackendError