diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 73cfd9147c..f665a21518 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -17,6 +17,11 @@ Common: Switch from mitx.db to edx.db for sqlite databases. This will effectivel reset state for local instances of the code, unless you manually rename your mitx.db file to edx.db. +Common: significant performance improvement for authorization checks and location translations. + Ensure all auth checks, check all possible permutations of the auth key (Instructor dashboard + now shows when it should for all courses in lms). + Made queries for Studio dashboard 2 orders of magnitude faster (and fewer). + Blades: Video Transcripts: Fix clear and download buttons. BLD-438. Common: Switch over from MITX_FEATURES to just FEATURES. To override items in diff --git a/cms/djangoapps/auth/authz.py b/cms/djangoapps/auth/authz.py index a5d00e14c1..19c00f832a 100644 --- a/cms/djangoapps/auth/authz.py +++ b/cms/djangoapps/auth/authz.py @@ -63,7 +63,7 @@ def get_all_course_role_groupnames(location, role, use_filter=True): # filter to the ones which exist default = groupnames[0] if use_filter: - groupnames = [group for group in groupnames if Group.objects.filter(name=group).exists()] + groupnames = [group.name for group in Group.objects.filter(name__in=groupnames)] return groupnames, default @@ -203,12 +203,8 @@ def remove_user_from_course_group(caller, user, location, role): # see if the user is actually in that role, if not then we don't have to do anything groupnames, _ = get_all_course_role_groupnames(location, role) - for groupname in groupnames: - groups = user.groups.filter(name=groupname) - if groups: - # will only be one with that name - user.groups.remove(groups[0]) - user.save() + user.groups.remove(*user.groups.filter(name__in=groupnames)) + user.save() def remove_user_from_creator_group(caller, user): @@ -243,7 +239,7 @@ def is_user_in_course_group_role(user, location, role, check_staff=True): if check_staff and user.is_staff: return True groupnames, _ = get_all_course_role_groupnames(location, role) - return any(user.groups.filter(name=groupname).exists() for groupname in groupnames) + return user.groups.filter(name__in=groupnames).exists() return False @@ -266,7 +262,7 @@ def is_user_in_creator_group(user): # Feature flag for using the creator group setting. Will be removed once the feature is complete. if settings.FEATURES.get('ENABLE_CREATOR_GROUP', False): - return user.groups.filter(name=COURSE_CREATOR_GROUP_NAME).count() > 0 + return user.groups.filter(name=COURSE_CREATOR_GROUP_NAME).exists() return True diff --git a/cms/djangoapps/contentstore/tests/test_contentstore.py b/cms/djangoapps/contentstore/tests/test_contentstore.py index 563c05affb..a13742d404 100644 --- a/cms/djangoapps/contentstore/tests/test_contentstore.py +++ b/cms/djangoapps/contentstore/tests/test_contentstore.py @@ -7,7 +7,6 @@ from textwrap import dedent from django.test.utils import override_settings from django.conf import settings -from django.core.urlresolvers import reverse from path import path from tempdir import mkdtemp_clean from fs.osfs import OSFS @@ -427,7 +426,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase): course = module_store.get_item(course_location) num_tabs = len(course.tabs) - last_tab = course.tabs[num_tabs - 1] + last_tab = course.tabs[-1] url_slug = last_tab['url_slug'] delete_url = self._get_tab_locator(course, last_tab).url_reverse('xblock') @@ -446,7 +445,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase): def _get_tab_locator(self, course, tab): """ Returns the locator for a given tab. """ - tab_location = 'i4x://MITx/999/static_tab/{0}'.format(tab['url_slug']) + tab_location = 'i4x://edX/999/static_tab/{0}'.format(tab['url_slug']) return loc_mapper().translate_location( course.location.course_id, Location(tab_location), False, True ) diff --git a/cms/djangoapps/contentstore/views/course.py b/cms/djangoapps/contentstore/views/course.py index 32538a80c7..261370b7ff 100644 --- a/cms/djangoapps/contentstore/views/course.py +++ b/cms/djangoapps/contentstore/views/course.py @@ -128,7 +128,8 @@ def course_listing(request): """ List all courses available to the logged in user """ - courses = modulestore('direct').get_items(Location('i4x', None, None, 'course', None)) + # there's an index on category which will be used if none of its antecedents are set + courses = modulestore('direct').get_items(Location(None, None, None, 'course', None)) # filter out courses that we don't have access too def course_filter(course): diff --git a/cms/envs/dev.py b/cms/envs/dev.py index 74561cee63..0ca4266f40 100644 --- a/cms/envs/dev.py +++ b/cms/envs/dev.py @@ -129,7 +129,12 @@ CACHES = { 'LOCATION': '/var/tmp/mongo_metadata_inheritance', 'TIMEOUT': 300, 'KEY_FUNCTION': 'util.memcache.safe_key', - } + }, + 'loc_cache': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': 'edx_location_mem_cache', + }, + } # Make the keyedcache startup warnings go away diff --git a/cms/envs/test.py b/cms/envs/test.py index a35824ed52..e541b0d390 100644 --- a/cms/envs/test.py +++ b/cms/envs/test.py @@ -138,7 +138,12 @@ CACHES = { 'LOCATION': '/var/tmp/mongo_metadata_inheritance', 'TIMEOUT': 300, 'KEY_FUNCTION': 'util.memcache.safe_key', - } + }, + 'loc_cache': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': 'edx_location_mem_cache', + }, + } # hide ratelimit warnings while running tests diff --git a/common/lib/xmodule/xmodule/modulestore/django.py b/common/lib/xmodule/xmodule/modulestore/django.py index 42372ab64f..ef4b9df40a 100644 --- a/common/lib/xmodule/xmodule/modulestore/django.py +++ b/common/lib/xmodule/xmodule/modulestore/django.py @@ -129,8 +129,12 @@ def loc_mapper(): global _loc_singleton # pylint: disable=W0212 if _loc_singleton is None: + try: + loc_cache = get_cache('loc_cache') + except InvalidCacheBackendError: + loc_cache = get_cache('default') # instantiate - _loc_singleton = LocMapperStore(**settings.DOC_STORE_CONFIG) + _loc_singleton = LocMapperStore(loc_cache, **settings.DOC_STORE_CONFIG) # inject into split mongo modulestore if 'split' in _MODULESTORES: _MODULESTORES['split'].loc_mapper = _loc_singleton diff --git a/common/lib/xmodule/xmodule/modulestore/loc_mapper_store.py b/common/lib/xmodule/xmodule/modulestore/loc_mapper_store.py index c7355c2f29..31e56da140 100644 --- a/common/lib/xmodule/xmodule/modulestore/loc_mapper_store.py +++ b/common/lib/xmodule/xmodule/modulestore/loc_mapper_store.py @@ -4,10 +4,10 @@ Method for converting among our differing Location/Locator whatever reprs from random import randint import re import pymongo +import bson.son from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundError, DuplicateItemError from xmodule.modulestore.locator import BlockUsageLocator -from xmodule.modulestore.mongo import draft from xmodule.modulestore import Location import urllib @@ -28,7 +28,7 @@ class LocMapperStore(object): ''' def __init__( - self, host, db, collection, port=27017, user=None, password=None, + self, cache, host, db, collection, port=27017, user=None, password=None, **kwargs ): ''' @@ -48,6 +48,7 @@ class LocMapperStore(object): self.location_map = self.db[collection + '.location_map'] self.location_map.write_concern = {'w': 1} + self.cache = cache # location_map functions def create_map_entry(self, course_location, course_id=None, draft_branch='draft', prod_branch='published', @@ -91,9 +92,10 @@ class LocMapperStore(object): else: course_id = "{0.org}.{0.course}".format(course_location) # very like _interpret_location_id but w/o the _id - location_id = {'org': course_location.org, 'course': course_location.course} - if course_location.category == 'course': - location_id['name'] = course_location.name + location_id = self._construct_location_son( + course_location.org, course_location.course, + course_location.name if course_location.category == 'course' else None + ) self.location_map.insert({ '_id': location_id, @@ -127,26 +129,33 @@ class LocMapperStore(object): of locations including course. """ location_id = self._interpret_location_course_id(old_style_course_id, location) + if old_style_course_id is None: + old_style_course_id = self._generate_location_course_id(location_id) - maps = self.location_map.find(location_id).sort('_id.name', pymongo.ASCENDING) - if maps.count() == 0: + cached_value = self._get_locator_from_cache(old_style_course_id, location, published) + if cached_value: + return cached_value + + + maps = self.location_map.find(location_id) + maps = list(maps) + if len(maps) == 0: if add_entry_if_missing: # create a new map - course_location = location.replace(category='course', name=location_id['_id.name']) + course_location = location.replace(category='course', name=location_id['_id']['name']) self.create_map_entry(course_location) entry = self.location_map.find_one(location_id) else: raise ItemNotFoundError() - elif maps.count() > 1: - # if more than one, prefer the one w/o a name if that exists. Otherwise, choose the first (alphabetically) + elif len(maps) == 1: entry = maps[0] else: + # find entry w/o name, if any; otherwise, pick arbitrary entry = maps[0] - - if published: - branch = entry['prod_branch'] - else: - branch = entry['draft_branch'] + for item in maps: + if 'name' not in item['_id']: + entry = item + break usage_id = entry['block_map'].get(self._encode_for_mongo(location.name)) if usage_id is None: @@ -165,7 +174,18 @@ class LocMapperStore(object): else: raise InvalidLocationError() - return BlockUsageLocator(course_id=entry['course_id'], branch=branch, usage_id=usage_id) + published_usage = BlockUsageLocator( + course_id=entry['course_id'], branch=entry['prod_branch'], usage_id=usage_id) + draft_usage = BlockUsageLocator( + course_id=entry['course_id'], branch=entry['draft_branch'], usage_id=usage_id) + if published: + result = published_usage + else: + result = draft_usage + + self._cache_location_map_entry(old_style_course_id, location, published_usage, draft_usage) + return result + def translate_locator_to_location(self, locator, get_course=False): """ @@ -185,161 +205,51 @@ class LocMapperStore(object): :param locator: a BlockUsageLocator """ + if get_course: + cached_value = self._get_course_location_from_cache(locator.course_id) + else: + cached_value = self._get_location_from_cache(locator) + if cached_value: + return cached_value + # This does not require that the course exist in any modulestore # only that it has a mapping entry. maps = self.location_map.find({'course_id': locator.course_id}) # look for one which maps to this block usage_id if maps.count() == 0: return None + result = None for candidate in maps: + old_course_id = self._generate_location_course_id(candidate['_id']) for old_name, cat_to_usage in candidate['block_map'].iteritems(): for category, usage_id in cat_to_usage.iteritems(): - if get_course: - if category == 'course': - return Location( - 'i4x', - candidate['_id']['org'], - candidate['_id']['course'], - 'course', - self._decode_from_mongo(old_name), - None) - elif usage_id == locator.usage_id: - # Always return revision=None because the - # old draft module store wraps locations as draft before - # trying to access things. - return Location( - 'i4x', - candidate['_id']['org'], - candidate['_id']['course'], - category, - self._decode_from_mongo(old_name), - None) + # cache all entries and then figure out if we have the one we want + # Always return revision=None because the + # old draft module store wraps locations as draft before + # trying to access things. + location = Location( + 'i4x', + candidate['_id']['org'], + candidate['_id']['course'], + category, + self._decode_from_mongo(old_name), + None) + published_locator = BlockUsageLocator( + candidate['course_id'], branch=candidate['prod_branch'], usage_id=usage_id + ) + draft_locator = BlockUsageLocator( + candidate['course_id'], branch=candidate['draft_branch'], usage_id=usage_id + ) + self._cache_location_map_entry(old_course_id, location, published_locator, draft_locator) + + if get_course and category == 'course': + result = location + elif not get_course and usage_id == locator.usage_id: + result = location + if result is not None: + return result return None - def add_block_location_translator(self, location, old_course_id=None, usage_id=None): - """ - Similar to translate_location which adds an entry if none is found, but this cannot create a new - course mapping entry, only a block within such a mapping entry. If it finds no existing - course maps, it raises ItemNotFoundError. - - In the case that there are more than one mapping record for the course identified by location, this - method adds the mapping to all matching records! (translate_location only adds to one) - - It allows the caller to specify - the new-style usage_id for the target rather than having the translate concoct its own. - If the provided usage_id already exists in one of the found maps for the org/course, this function - raises DuplicateItemError unless the old item id == the new one. - - If the caller does not provide a usage_id and there exists an entry in one of the course variants, - it will use that entry. If more than one variant uses conflicting entries, it will raise DuplicateItemError. - - Returns the usage_id used in the mapping - - :param location: a fully specified Location - :param old_course_id: the old-style org/course or org/course/run string (optional) - :param usage_id: the desired new block_id. If left as None, this will generate one as per translate_location - """ - location_id = self._interpret_location_course_id(old_course_id, location) - - maps = self.location_map.find(location_id) - if maps.count() == 0: - raise ItemNotFoundError() - - # turn maps from cursor to list - map_list = list(maps) - encoded_location_name = self._encode_for_mongo(location.name) - # check whether there's already a usage_id for this location (and it agrees w/ any passed in or found) - for map_entry in map_list: - if (encoded_location_name in map_entry['block_map'] and - location.category in map_entry['block_map'][encoded_location_name]): - if usage_id is None: - usage_id = map_entry['block_map'][encoded_location_name][location.category] - elif usage_id != map_entry['block_map'][encoded_location_name][location.category]: - raise DuplicateItemError(usage_id, self, 'location_map') - - computed_usage_id = usage_id - - # update the maps (and generate a usage_id if it's not been set yet) - for map_entry in map_list: - if computed_usage_id is None: - computed_usage_id = self._add_to_block_map(location, location_id, map_entry['block_map']) - elif (encoded_location_name not in map_entry['block_map'] or - location.category not in map_entry['block_map'][encoded_location_name]): - alt_usage_id = self._verify_uniqueness(computed_usage_id, map_entry['block_map']) - if alt_usage_id != computed_usage_id: - if usage_id is not None: - raise DuplicateItemError(usage_id, self, 'location_map') - else: - # revise already set ones and add to remaining ones - computed_usage_id = self.update_block_location_translator( - location, - alt_usage_id, - old_course_id, - True - ) - - map_entry['block_map'].setdefault(encoded_location_name, {})[location.category] = computed_usage_id - self.location_map.update({'_id': map_entry['_id']}, {'$set': {'block_map': map_entry['block_map']}}) - - return computed_usage_id - - def update_block_location_translator(self, location, usage_id, old_course_id=None, autogenerated_usage_id=False): - """ - Update all existing maps from location's block to the new usage_id. Used for changing the usage_id, - thus the usage_id is required. - - Returns the usage_id. (which is primarily useful in the case of autogenerated_usage_id) - - :param location: a fully specified Location - :param usage_id: the desired new block_id. - :param old_course_id: the old-style org/course or org/course/run string (optional) - :param autogenerated_usage_id: a flag used mostly for internal calls to indicate that this usage_id - was autogenerated and thus can be overridden if it's not unique. If you set this flag, the stored - usage_id may not be the one you submitted. - """ - location_id = self._interpret_location_course_id(old_course_id, location) - - maps = self.location_map.find(location_id) - encoded_location_name = self._encode_for_mongo(location.name) - for map_entry in maps: - # handle noop of renaming to same name - if (encoded_location_name in map_entry['block_map'] and - map_entry['block_map'][encoded_location_name].get(location.category) == usage_id): - continue - alt_usage_id = self._verify_uniqueness(usage_id, map_entry['block_map']) - if alt_usage_id != usage_id: - if autogenerated_usage_id: - # revise already set ones and add to remaining ones - usage_id = self.update_block_location_translator(location, alt_usage_id, old_course_id, True) - return usage_id - else: - raise DuplicateItemError(usage_id, self, 'location_map') - - if location.category in map_entry['block_map'].setdefault(encoded_location_name, {}): - map_entry['block_map'][encoded_location_name][location.category] = usage_id - self.location_map.update({'_id': map_entry['_id']}, {'$set': {'block_map': map_entry['block_map']}}) - - return usage_id - - def delete_block_location_translator(self, location, old_course_id=None): - """ - Remove all existing maps from location's block. - - :param location: a fully specified Location - :param old_course_id: the old-style org/course or org/course/run string (optional) - """ - location_id = self._interpret_location_course_id(old_course_id, location) - - maps = self.location_map.find(location_id) - encoded_location_name = self._encode_for_mongo(location.name) - for map_entry in maps: - if location.category in map_entry['block_map'].setdefault(encoded_location_name, {}): - if len(map_entry['block_map'][encoded_location_name]) == 1: - del map_entry['block_map'][encoded_location_name] - else: - del map_entry['block_map'][encoded_location_name][location.category] - self.location_map.update({'_id': map_entry['_id']}, {'$set': {'block_map': map_entry['block_map']}}) - def _add_to_block_map(self, location, location_id, block_map): '''add the given location to the block_map and persist it''' if self._block_id_is_guid(location.name): @@ -357,7 +267,7 @@ class LocMapperStore(object): def _interpret_location_course_id(self, course_id, location): """ - Take the old style course id (org/course/run) and return a dict for querying the mapping table. + Take the old style course id (org/course/run) and return a dict w/ a SON for querying the mapping table. If the course_id is empty, it uses location, but this may result in an inadequate id. :param course_id: old style 'org/course/run' id from Location.course_id where Location.category = 'course' @@ -367,12 +277,35 @@ class LocMapperStore(object): if course_id: # re doesn't allow ?P<_id.org> and ilk matched = re.match(r'([^/]+)/([^/]+)/([^/]+)', course_id) - return dict(zip(['_id.org', '_id.course', '_id.name'], matched.groups())) + return {'_id': self._construct_location_son(*matched.groups())} - location_id = {'_id.org': location.org, '_id.course': location.course} if location.category == 'course': - location_id['_id.name'] = location.name - return location_id + return {'_id': self._construct_location_son(location.org, location.course, location.name)} + else: + return bson.son.SON([('_id.org', location.org), ('_id.course', location.course)]) + + def _generate_location_course_id(self, entry_id): + """ + Generate a Location course_id for the given entry's id + """ + # strip id envelope if any + entry_id = entry_id.get('_id', entry_id) + if entry_id.get('name', False): + return '{0[org]}/{0[course]}/{0[name]}'.format(entry_id) + elif entry_id.get('_id.org', False): + # the odd format one + return '{0[_id.org]}/{0[_id.course]}'.format(entry_id) + else: + return '{0[org]}/{0[course]}'.format(entry_id) + + def _construct_location_son(self, org, course, name=None): + """ + Construct the SON needed to repr the location for either a query or an insertion + """ + if name: + return bson.son.SON([('org', org), ('course', course), ('name', name)]) + else: + return bson.son.SON([('org', org), ('course', course)]) def _block_id_is_guid(self, name): """ @@ -413,3 +346,41 @@ class LocMapperStore(object): """ return urllib.unquote(fieldname) + def _get_locator_from_cache(self, old_course_id, location, published): + """ + See if the location x published pair is in the cache. If so, return the mapped locator. + """ + entry = self.cache.get('{}+{}'.format(old_course_id, location.url())) + if entry is not None: + if published: + return entry[0] + else: + return entry[1] + return None + + def _get_location_from_cache(self, locator): + """ + See if the locator is in the cache. If so, return the mapped location. + """ + return self.cache.get(unicode(locator)) + + def _get_course_location_from_cache(self, locator_course_id): + """ + See if the course_id is in the cache. If so, return the mapped location to the + course root. + """ + return self.cache.get('courseId+{}'.format(locator_course_id)) + + def _cache_location_map_entry(self, old_course_id, location, published_usage, draft_usage): + """ + Cache the mapping from location to the draft and published Locators in entry. + Also caches the inverse. If the location is category=='course', it caches it for + the get_course query + """ + setmany = {} + if location.category == 'course': + setmany['courseId+{}'.format(published_usage.course_id)] = location + setmany[unicode(published_usage)] = location + setmany[unicode(draft_usage)] = location + setmany['{}+{}'.format(old_course_id, location.url())] = (published_usage, draft_usage) + self.cache.set_many(setmany) diff --git a/common/lib/xmodule/xmodule/modulestore/tests/test_location_mapper.py b/common/lib/xmodule/xmodule/modulestore/tests/test_location_mapper.py index 087a1951a9..18745bd856 100644 --- a/common/lib/xmodule/xmodule/modulestore/tests/test_location_mapper.py +++ b/common/lib/xmodule/xmodule/modulestore/tests/test_location_mapper.py @@ -7,8 +7,9 @@ import unittest import uuid from xmodule.modulestore import Location from xmodule.modulestore.locator import BlockUsageLocator -from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateItemError +from xmodule.modulestore.exceptions import ItemNotFoundError from xmodule.modulestore.loc_mapper_store import LocMapperStore +from mock import Mock class TestLocationMapper(unittest.TestCase): @@ -23,8 +24,10 @@ class TestLocationMapper(unittest.TestCase): 'collection': 'modulestore{0}'.format(uuid.uuid4().hex[:5]), } + cache_standin = TrivialCache() + self.instrumented_cache = Mock(spec=cache_standin, wraps=cache_standin) # pylint: disable=W0142 - TestLocationMapper.loc_store = LocMapperStore(**modulestore_options) + TestLocationMapper.loc_store = LocMapperStore(self.instrumented_cache, **modulestore_options) def tearDown(self): dbref = TestLocationMapper.loc_store.db @@ -36,8 +39,9 @@ class TestLocationMapper(unittest.TestCase): org = 'foo_org' course = 'bar_course' loc_mapper().create_map_entry(Location('i4x', org, course, 'course', 'baz_run')) + # pylint: disable=protected-access entry = loc_mapper().location_map.find_one({ - '_id': {'org': org, 'course': course, 'name': 'baz_run'} + '_id': loc_mapper()._construct_location_son(org, course, 'baz_run') }) self.assertIsNotNone(entry, "Didn't find entry") self.assertEqual(entry['course_id'], '{}.{}.baz_run'.format(org, course)) @@ -48,8 +52,9 @@ class TestLocationMapper(unittest.TestCase): # ensure create_entry does the right thing when not given a course (creates org/course # rather than org/course/run course_id) loc_mapper().create_map_entry(Location('i4x', org, course, 'vertical', 'baz_vert')) + # find the one which has no name entry = loc_mapper().location_map.find_one({ - '_id': {'org': org, 'course': course} + '_id' : loc_mapper()._construct_location_son(org, course, None) }) self.assertIsNotNone(entry, "Didn't find entry") self.assertEqual(entry['course_id'], '{}.{}'.format(org, course)) @@ -63,15 +68,28 @@ class TestLocationMapper(unittest.TestCase): 'wip', 'live', block_map) - entry = loc_mapper().location_map.find_one({ - '_id': {'org': org, 'course': course} - }) + entry = loc_mapper().location_map.find_one({'_id.org': org, '_id.course': course}) self.assertIsNotNone(entry, "Didn't find entry") self.assertEqual(entry['course_id'], 'foo_org.geek_dept.quux_course.baz_run') self.assertEqual(entry['draft_branch'], 'wip') self.assertEqual(entry['prod_branch'], 'live') self.assertEqual(entry['block_map'], block_map) + + def translate_n_check(self, location, old_style_course_id, new_style_course_id, usage_id, branch, add_entry=False): + """ + Request translation, check course_id, usage_id, and branch + """ + prob_locator = loc_mapper().translate_location( + old_style_course_id, + location, + published= (branch=='published'), + add_entry_if_missing=add_entry + ) + self.assertEqual(prob_locator.course_id, new_style_course_id) + self.assertEqual(prob_locator.usage_id, usage_id) + self.assertEqual(prob_locator.branch, branch) + def test_translate_location_read_only(self): """ Test the variants of translate_location which don't create entries, just decode @@ -88,83 +106,67 @@ class TestLocationMapper(unittest.TestCase): ) new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course) - block_map = {'abc123': {'problem': 'problem2'}} + block_map = { + 'abc123': {'problem': 'problem2'}, + 'def456': {'problem': 'problem4'}, + 'ghi789': {'problem': 'problem7'}, + } loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'baz_run'), new_style_course_id, block_map=block_map ) + test_problem_locn = Location('i4x', org, course, 'problem', 'abc123') # only one course matches - prob_locator = loc_mapper().translate_location( - old_style_course_id, - Location('i4x', org, course, 'problem', 'abc123'), - add_entry_if_missing=False - ) - self.assertEqual(prob_locator.course_id, new_style_course_id) - self.assertEqual(prob_locator.branch, 'published') - self.assertEqual(prob_locator.usage_id, 'problem2') - # look for w/ only the Location (works b/c there's only one possible course match) - prob_locator = loc_mapper().translate_location( - None, - Location('i4x', org, course, 'problem', 'abc123'), - add_entry_if_missing=False - ) - self.assertEqual(prob_locator.course_id, new_style_course_id) + self.translate_n_check(test_problem_locn, old_style_course_id, new_style_course_id, 'problem2', 'published') + # look for w/ only the Location (works b/c there's only one possible course match). Will force + # cache as default translation for this problemid + self.translate_n_check(test_problem_locn, None, new_style_course_id, 'problem2', 'published') # look for non-existent problem with self.assertRaises(ItemNotFoundError): - prob_locator = loc_mapper().translate_location( + loc_mapper().translate_location( None, Location('i4x', org, course, 'problem', '1def23'), add_entry_if_missing=False ) - # add a distractor course - block_map = {'abc123': {'problem': 'problem3'}} + # add a distractor course (note that abc123 has a different translation in this one) + distractor_block_map = { + 'abc123': {'problem': 'problem3'}, + 'def456': {'problem': 'problem4'}, + 'ghi789': {'problem': 'problem7'}, + } + test_delta_new_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run') + test_delta_old_id = '{}/{}/{}'.format(org, course, 'delta_run') loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'delta_run'), - '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'), - block_map=block_map + test_delta_new_id, + block_map=distractor_block_map ) - prob_locator = loc_mapper().translate_location( - old_style_course_id, - Location('i4x', org, course, 'problem', 'abc123'), - add_entry_if_missing=False - ) - self.assertEqual(prob_locator.course_id, new_style_course_id) - self.assertEqual(prob_locator.usage_id, 'problem2') - # look for w/ only the Location (not unique; so, just verify it returns something) + # test that old translation still works + self.translate_n_check(test_problem_locn, old_style_course_id, new_style_course_id, 'problem2', 'published') + # and new returns new id + self.translate_n_check(test_problem_locn, test_delta_old_id, test_delta_new_id, 'problem3', 'published') + # look for default translation of uncached Location (not unique; so, just verify it returns something) prob_locator = loc_mapper().translate_location( None, - Location('i4x', org, course, 'problem', 'abc123'), + Location('i4x', org, course, 'problem', 'def456'), add_entry_if_missing=False ) self.assertIsNotNone(prob_locator, "couldn't find ambiguous location") - # add a default course pointing to the delta_run + # make delta_run default course: anything not cached using None as old_course_id will use this loc_mapper().create_map_entry( Location('i4x', org, course, 'problem', '789abc123efg456'), - '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'), + test_delta_new_id, block_map=block_map ) - # now the ambiguous query should return delta - prob_locator = loc_mapper().translate_location( - None, - Location('i4x', org, course, 'problem', 'abc123'), - add_entry_if_missing=False - ) - self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')) - self.assertEqual(prob_locator.usage_id, 'problem3') + # now an uncached ambiguous query should return delta + test_unused_locn = Location('i4x', org, course, 'problem', 'ghi789') + self.translate_n_check(test_unused_locn, None, test_delta_new_id, 'problem7', 'published') # get the draft one (I'm sorry this is getting long) - prob_locator = loc_mapper().translate_location( - None, - Location('i4x', org, course, 'problem', 'abc123'), - published=False, - add_entry_if_missing=False - ) - self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')) - self.assertEqual(prob_locator.usage_id, 'problem3') - self.assertEqual(prob_locator.branch, 'draft') + self.translate_n_check(test_unused_locn, None, test_delta_new_id, 'problem7', 'draft') def test_translate_location_dwim(self): """ @@ -176,48 +178,39 @@ class TestLocationMapper(unittest.TestCase): old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run') problem_name = 'abc123abc123abc123abc123abc123f9' location = Location('i4x', org, course, 'problem', problem_name) - prob_locator = loc_mapper().translate_location( - old_style_course_id, - location, - add_entry_if_missing=True - ) new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run') - self.assertEqual(prob_locator.course_id, new_style_course_id) - self.assertEqual(prob_locator.branch, 'published') - self.assertEqual(prob_locator.usage_id, 'problemabc') - # look for w/ only the Location (works b/c there's only one possible course match) - prob_locator = loc_mapper().translate_location( - None, - location, - add_entry_if_missing=True - ) - self.assertEqual(prob_locator.course_id, new_style_course_id) + self.translate_n_check(location, old_style_course_id, new_style_course_id, 'problemabc', 'published', True) + # look for w/ only the Location (works b/c there's only one possible course match): causes cache + self.translate_n_check(location, None, new_style_course_id, 'problemabc', 'published', True) + # create an entry w/o a guid name other_location = Location('i4x', org, course, 'chapter', 'intro') - locator = loc_mapper().translate_location( - old_style_course_id, - other_location, - add_entry_if_missing=True - ) - self.assertEqual(locator.usage_id, 'intro') + self.translate_n_check(other_location, old_style_course_id, new_style_course_id, 'intro', 'published', True) # add a distractor course + delta_new_course_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run') + delta_course_locn = Location('i4x', org, course, 'course', 'delta_run') loc_mapper().create_map_entry( - Location('i4x', org, course, 'course', 'delta_run'), - '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'), + delta_course_locn, + delta_new_course_id, block_map={problem_name: {'problem': 'problem3'}} ) - prob_locator = loc_mapper().translate_location( - old_style_course_id, - location, - add_entry_if_missing=True + self.translate_n_check(location, old_style_course_id, new_style_course_id, 'problemabc', 'published', True) + + # add a new one to both courses (ensure name doesn't have same beginning) + new_prob_name = uuid.uuid4().hex + while new_prob_name.startswith('abc'): + new_prob_name = uuid.uuid4().hex + new_prob_locn = location.replace(name=new_prob_name) + new_usage_id = 'problem{}'.format(new_prob_name[:3]) + self.translate_n_check(new_prob_locn, old_style_course_id, new_style_course_id, new_usage_id, 'published', True) + self.translate_n_check( + new_prob_locn, delta_course_locn.course_id, delta_new_course_id, new_usage_id, 'published', True ) - self.assertEqual(prob_locator.course_id, new_style_course_id) - self.assertEqual(prob_locator.usage_id, 'problemabc') - # look for w/ only the Location (not unique; so, just verify it returns something) + # look for w/ only the Location: causes caching and not unique; so, can't check which course prob_locator = loc_mapper().translate_location( None, - location, + new_prob_locn, add_entry_if_missing=True ) self.assertIsNotNone(prob_locator, "couldn't find ambiguous location") @@ -225,17 +218,20 @@ class TestLocationMapper(unittest.TestCase): # add a default course pointing to the delta_run loc_mapper().create_map_entry( Location('i4x', org, course, 'problem', '789abc123efg456'), - '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'), + delta_new_course_id, block_map={problem_name: {'problem': 'problem3'}} ) # now the ambiguous query should return delta - prob_locator = loc_mapper().translate_location( - None, - location, - add_entry_if_missing=False + again_prob_name = uuid.uuid4().hex + while again_prob_name.startswith('abc') or again_prob_name.startswith(new_prob_name[:3]): + again_prob_name = uuid.uuid4().hex + again_prob_locn = location.replace(name=again_prob_name) + again_usage_id = 'problem{}'.format(again_prob_name[:3]) + self.translate_n_check(again_prob_locn, old_style_course_id, new_style_course_id, again_usage_id, 'published', True) + self.translate_n_check( + again_prob_locn, delta_course_locn.course_id, delta_new_course_id, again_usage_id, 'published', True ) - self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')) - self.assertEqual(prob_locator.usage_id, 'problem3') + self.translate_n_check(again_prob_locn, None, delta_new_course_id, again_usage_id, 'published', True) def test_translate_locator(self): """ @@ -352,185 +348,6 @@ class TestLocationMapper(unittest.TestCase): reverted_location = loc_mapper().translate_locator_to_location(prob_locator) self.assertEqual(location, reverted_location) - def test_add_block(self): - """ - Test add_block_location_translator(location, old_course_id=None, usage_id=None) - """ - # call w/ no matching courses - org = 'foo_org' - course = 'bar_course' - old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run') - problem_name = 'abc123abc123abc123abc123abc123f9' - location = Location('i4x', org, course, 'problem', problem_name) - with self.assertRaises(ItemNotFoundError): - loc_mapper().add_block_location_translator(location) - with self.assertRaises(ItemNotFoundError): - loc_mapper().add_block_location_translator(location, old_style_course_id) - - # w/ one matching course - new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run') - loc_mapper().create_map_entry( - Location('i4x', org, course, 'course', 'baz_run'), - new_style_course_id, - ) - new_usage_id = loc_mapper().add_block_location_translator(location) - self.assertEqual(new_usage_id, 'problemabc') - # look it up - translated_loc = loc_mapper().translate_location(old_style_course_id, location, add_entry_if_missing=False) - self.assertEqual(translated_loc.course_id, new_style_course_id) - self.assertEqual(translated_loc.usage_id, new_usage_id) - - # w/ one distractor which has one entry already - new_style_course_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run') - loc_mapper().create_map_entry( - Location('i4x', org, course, 'course', 'delta_run'), - new_style_course_id, - block_map={'48f23a10395384929234': {'chapter': 'chapter48f'}} - ) - # try adding the one added before - new_usage_id2 = loc_mapper().add_block_location_translator(location) - self.assertEqual(new_usage_id, new_usage_id2) - # it should be in the distractor now - new_location = loc_mapper().translate_locator_to_location( - BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id2, branch='published') - ) - self.assertEqual(new_location, location) - # add one close to the existing chapter (cause name collision) - location = Location('i4x', org, course, 'chapter', '48f23a103953849292341234567890ab') - new_usage_id = loc_mapper().add_block_location_translator(location) - self.assertRegexpMatches(new_usage_id, r'^chapter48f\d') - # retrievable from both courses - new_location = loc_mapper().translate_locator_to_location( - BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id, branch='published') - ) - self.assertEqual(new_location, location) - new_location = loc_mapper().translate_locator_to_location( - BlockUsageLocator( - course_id='{}.{}.{}'.format(org, course, 'baz_run'), - usage_id=new_usage_id, - branch='published' - ) - ) - self.assertEqual(new_location, location) - - # provoke duplicate item errors - location = location.replace(name='44f23a103953849292341234567890ab') - with self.assertRaises(DuplicateItemError): - loc_mapper().add_block_location_translator(location, usage_id=new_usage_id) - new_usage_id = loc_mapper().add_block_location_translator(location, old_course_id=old_style_course_id) - other_course_old_style = '{}/{}/{}'.format(org, course, 'delta_run') - new_usage_id2 = loc_mapper().add_block_location_translator( - location, - old_course_id=other_course_old_style, - usage_id='{}b'.format(new_usage_id) - ) - with self.assertRaises(DuplicateItemError): - loc_mapper().add_block_location_translator(location) - - def test_update_block(self): - """ - test update_block_location_translator(location, usage_id, old_course_id=None) - """ - org = 'foo_org' - course = 'bar_course' - new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course) - loc_mapper().create_map_entry( - Location('i4x', org, course, 'course', 'baz_run'), - new_style_course_id, - block_map={ - 'abc123': {'problem': 'problem2'}, - '48f23a10395384929234': {'chapter': 'chapter48f'}, - '1': {'chapter': 'chapter1', 'problem': 'problem1'}, - } - ) - new_style_course_id2 = '{}.geek_dept.{}.delta_run'.format(org, course) - loc_mapper().create_map_entry( - Location('i4x', org, course, 'course', 'delta_run'), - new_style_course_id2, - block_map={ - 'abc123': {'problem': 'problem3'}, - '48f23a10395384929234': {'chapter': 'chapter48b'}, - '1': {'chapter': 'chapter2', 'problem': 'problem2'}, - } - ) - location = Location('i4x', org, course, 'problem', '1') - # change in all courses to same value - loc_mapper().update_block_location_translator(location, 'problem1') - trans_loc = loc_mapper().translate_locator_to_location( - BlockUsageLocator(course_id=new_style_course_id, usage_id='problem1', branch='published') - ) - self.assertEqual(trans_loc, location) - trans_loc = loc_mapper().translate_locator_to_location( - BlockUsageLocator(course_id=new_style_course_id2, usage_id='problem1', branch='published') - ) - self.assertEqual(trans_loc, location) - # try to change to overwrite used usage_id - location = Location('i4x', org, course, 'chapter', '48f23a10395384929234') - with self.assertRaises(DuplicateItemError): - loc_mapper().update_block_location_translator(location, 'chapter2') - # just change the one course - loc_mapper().update_block_location_translator(location, 'chapter2', '{}/{}/{}'.format(org, course, 'baz_run')) - trans_loc = loc_mapper().translate_locator_to_location( - BlockUsageLocator(course_id=new_style_course_id, usage_id='chapter2', branch='published') - ) - self.assertEqual(trans_loc.name, '48f23a10395384929234') - # but this still points to the old - trans_loc = loc_mapper().translate_locator_to_location( - BlockUsageLocator(course_id=new_style_course_id2, usage_id='chapter2', branch='published') - ) - self.assertEqual(trans_loc.name, '1') - - def test_delete_block(self): - """ - test delete_block_location_translator(location, old_course_id=None) - """ - org = 'foo_org' - course = 'bar_course' - new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course) - loc_mapper().create_map_entry( - Location('i4x', org, course, 'course', 'baz_run'), - new_style_course_id, - block_map={ - 'abc123': {'problem': 'problem2'}, - '48f23a10395384929234': {'chapter': 'chapter48f'}, - '1': {'chapter': 'chapter1', 'problem': 'problem1'}, - } - ) - new_style_course_id2 = '{}.geek_dept.{}.delta_run'.format(org, course) - loc_mapper().create_map_entry( - Location('i4x', org, course, 'course', 'delta_run'), - new_style_course_id2, - block_map={ - 'abc123': {'problem': 'problem3'}, - '48f23a10395384929234': {'chapter': 'chapter48b'}, - '1': {'chapter': 'chapter2', 'problem': 'problem2'}, - } - ) - location = Location('i4x', org, course, 'problem', '1') - # delete from all courses - loc_mapper().delete_block_location_translator(location) - self.assertIsNone(loc_mapper().translate_locator_to_location( - BlockUsageLocator(course_id=new_style_course_id, usage_id='problem1', branch='published') - )) - self.assertIsNone(loc_mapper().translate_locator_to_location( - BlockUsageLocator(course_id=new_style_course_id2, usage_id='problem2', branch='published') - )) - # delete from one course - location = location.replace(name='abc123') - loc_mapper().delete_block_location_translator(location, '{}/{}/{}'.format(org, course, 'baz_run')) - with self.assertRaises(ItemNotFoundError): - loc_mapper().translate_location( - '{}/{}/{}'.format(org, course, 'baz_run'), - location, - add_entry_if_missing=False - ) - locator = loc_mapper().translate_location( - '{}/{}/{}'.format(org, course, 'delta_run'), - location, - add_entry_if_missing=False - ) - self.assertEqual(locator.usage_id, 'problem3') - #================================== # functions to mock existing services @@ -545,3 +362,23 @@ def render_to_template_mock(*_args): """ Mocks the mako render_to_template w/ a noop """ + + +class TrivialCache(object): + """ + A trivial cache impl + """ + def __init__(self): + self.cache = {} + + def get(self, key): + """ + Mock the .get + """ + return self.cache.get(key) + + def set_many(self, entries): + """ + mock set_many + """ + self.cache.update(entries) diff --git a/common/lib/xmodule/xmodule/modulestore/tests/test_split_migrator.py b/common/lib/xmodule/xmodule/modulestore/tests/test_split_migrator.py index 7ddab1e376..c444aa32e1 100644 --- a/common/lib/xmodule/xmodule/modulestore/tests/test_split_migrator.py +++ b/common/lib/xmodule/xmodule/modulestore/tests/test_split_migrator.py @@ -43,7 +43,10 @@ class TestMigration(unittest.TestCase): def setUp(self): super(TestMigration, self).setUp() - self.loc_mapper = LocMapperStore(**self.db_config) + noop_cache = mock.Mock(spec=['get', 'set_many']) + noop_cache.configure_mock(**{'get.return_value': None}) + # pylint: disable=W0142 + self.loc_mapper = LocMapperStore(noop_cache, **self.db_config) self.old_mongo = MongoModuleStore(self.db_config, **self.modulestore_options) self.draft_mongo = DraftModuleStore(self.db_config, **self.modulestore_options) self.split_mongo = SplitMongoModuleStore( diff --git a/lms/djangoapps/courseware/roles.py b/lms/djangoapps/courseware/roles.py index 110bb9f362..60853e8090 100644 --- a/lms/djangoapps/courseware/roles.py +++ b/lms/djangoapps/courseware/roles.py @@ -4,7 +4,6 @@ adding users, removing users, and listing members """ from abc import ABCMeta, abstractmethod -from functools import partial from django.contrib.auth.models import User, Group diff --git a/lms/envs/dev.py b/lms/envs/dev.py index bfff68157e..30053d3da3 100644 --- a/lms/envs/dev.py +++ b/lms/envs/dev.py @@ -85,7 +85,11 @@ CACHES = { 'LOCATION': '/var/tmp/mongo_metadata_inheritance', 'TIMEOUT': 300, 'KEY_FUNCTION': 'util.memcache.safe_key', - } + }, + 'loc_cache': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': 'edx_location_mem_cache', + }, } diff --git a/lms/envs/test.py b/lms/envs/test.py index 6fe96d760b..2dbfca0bd6 100644 --- a/lms/envs/test.py +++ b/lms/envs/test.py @@ -165,7 +165,12 @@ CACHES = { 'LOCATION': '/var/tmp/mongo_metadata_inheritance', 'TIMEOUT': 300, 'KEY_FUNCTION': 'util.memcache.safe_key', - } + }, + 'loc_cache': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': 'edx_location_mem_cache', + }, + } # Dummy secret key for dev diff --git a/mongo_indexes.md b/mongo_indexes.md new file mode 100644 index 0000000000..f42aed9a58 --- /dev/null +++ b/mongo_indexes.md @@ -0,0 +1,21 @@ +These are the indexes each mongo db should have in order to perform well. +Each section states the collection name and then the indexes. To create an index, +you'll typically either use the mongohq type web interface or a standard terminal console. +If a terminal, this assumes you've logged in and gotten to the mongo prompt +``` +mongo mydatabasename +``` + +If using the terminal, to add an index to a collection, you'll need to prefix ```ensureIndex``` with +``` +db.collection_name +``` +as in ```db.location_map.ensureIndex({'course_id': 1}{background: true})``` + +location_map: +============= + +``` +ensureIndex({'_id.org': 1, '_id.course': 1}) +ensureIndex({'course_id': 1}) +```