loc_mapper memoize all translations
STUD-1048
This commit is contained in:
@@ -7,7 +7,6 @@ from textwrap import dedent
|
||||
|
||||
from django.test.utils import override_settings
|
||||
from django.conf import settings
|
||||
from django.core.urlresolvers import reverse
|
||||
from path import path
|
||||
from tempdir import mkdtemp_clean
|
||||
from fs.osfs import OSFS
|
||||
@@ -427,7 +426,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
|
||||
|
||||
course = module_store.get_item(course_location)
|
||||
num_tabs = len(course.tabs)
|
||||
last_tab = course.tabs[num_tabs - 1]
|
||||
last_tab = course.tabs[-1]
|
||||
url_slug = last_tab['url_slug']
|
||||
delete_url = self._get_tab_locator(course, last_tab).url_reverse('xblock')
|
||||
|
||||
@@ -446,7 +445,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
|
||||
|
||||
def _get_tab_locator(self, course, tab):
|
||||
""" Returns the locator for a given tab. """
|
||||
tab_location = 'i4x://MITx/999/static_tab/{0}'.format(tab['url_slug'])
|
||||
tab_location = 'i4x://edX/999/static_tab/{0}'.format(tab['url_slug'])
|
||||
return loc_mapper().translate_location(
|
||||
course.location.course_id, Location(tab_location), False, True
|
||||
)
|
||||
|
||||
@@ -130,7 +130,7 @@ def loc_mapper():
|
||||
# pylint: disable=W0212
|
||||
if _loc_singleton is None:
|
||||
# instantiate
|
||||
_loc_singleton = LocMapperStore(**settings.DOC_STORE_CONFIG)
|
||||
_loc_singleton = LocMapperStore(get_cache('default'), **settings.DOC_STORE_CONFIG)
|
||||
# inject into split mongo modulestore
|
||||
if 'split' in _MODULESTORES:
|
||||
_MODULESTORES['split'].loc_mapper = _loc_singleton
|
||||
|
||||
@@ -28,7 +28,7 @@ class LocMapperStore(object):
|
||||
'''
|
||||
|
||||
def __init__(
|
||||
self, host, db, collection, port=27017, user=None, password=None,
|
||||
self, cache, host, db, collection, port=27017, user=None, password=None,
|
||||
**kwargs
|
||||
):
|
||||
'''
|
||||
@@ -48,6 +48,7 @@ class LocMapperStore(object):
|
||||
|
||||
self.location_map = self.db[collection + '.location_map']
|
||||
self.location_map.write_concern = {'w': 1}
|
||||
self.cache = cache
|
||||
|
||||
# location_map functions
|
||||
def create_map_entry(self, course_location, course_id=None, draft_branch='draft', prod_branch='published',
|
||||
@@ -128,6 +129,13 @@ class LocMapperStore(object):
|
||||
of locations including course.
|
||||
"""
|
||||
location_id = self._interpret_location_course_id(old_style_course_id, location)
|
||||
if old_style_course_id is None:
|
||||
old_style_course_id = self._generate_location_course_id(location_id)
|
||||
|
||||
cached_value = self._get_locator_from_cache(old_style_course_id, location, published)
|
||||
if cached_value:
|
||||
return cached_value
|
||||
|
||||
|
||||
maps = self.location_map.find(location_id)
|
||||
maps = list(maps)
|
||||
@@ -149,11 +157,6 @@ class LocMapperStore(object):
|
||||
entry = item
|
||||
break
|
||||
|
||||
if published:
|
||||
branch = entry['prod_branch']
|
||||
else:
|
||||
branch = entry['draft_branch']
|
||||
|
||||
usage_id = entry['block_map'].get(self._encode_for_mongo(location.name))
|
||||
if usage_id is None:
|
||||
if add_entry_if_missing:
|
||||
@@ -171,7 +174,18 @@ class LocMapperStore(object):
|
||||
else:
|
||||
raise InvalidLocationError()
|
||||
|
||||
return BlockUsageLocator(course_id=entry['course_id'], branch=branch, usage_id=usage_id)
|
||||
published_usage = BlockUsageLocator(
|
||||
course_id=entry['course_id'], branch=entry['prod_branch'], usage_id=usage_id)
|
||||
draft_usage = BlockUsageLocator(
|
||||
course_id=entry['course_id'], branch=entry['draft_branch'], usage_id=usage_id)
|
||||
if published:
|
||||
result = published_usage
|
||||
else:
|
||||
result = draft_usage
|
||||
|
||||
self._cache_location_map_entry(old_style_course_id, location, published_usage, draft_usage)
|
||||
return result
|
||||
|
||||
|
||||
def translate_locator_to_location(self, locator, get_course=False):
|
||||
"""
|
||||
@@ -191,170 +205,51 @@ class LocMapperStore(object):
|
||||
|
||||
:param locator: a BlockUsageLocator
|
||||
"""
|
||||
if get_course:
|
||||
cached_value = self._get_course_location_from_cache(locator.course_id)
|
||||
else:
|
||||
cached_value = self._get_location_from_cache(locator)
|
||||
if cached_value:
|
||||
return cached_value
|
||||
|
||||
# This does not require that the course exist in any modulestore
|
||||
# only that it has a mapping entry.
|
||||
maps = self.location_map.find({'course_id': locator.course_id})
|
||||
# look for one which maps to this block usage_id
|
||||
if maps.count() == 0:
|
||||
return None
|
||||
result = None
|
||||
for candidate in maps:
|
||||
old_course_id = self._generate_location_course_id(candidate['_id'])
|
||||
for old_name, cat_to_usage in candidate['block_map'].iteritems():
|
||||
for category, usage_id in cat_to_usage.iteritems():
|
||||
if get_course:
|
||||
if category == 'course':
|
||||
return Location(
|
||||
'i4x',
|
||||
candidate['_id']['org'],
|
||||
candidate['_id']['course'],
|
||||
'course',
|
||||
self._decode_from_mongo(old_name),
|
||||
None)
|
||||
elif usage_id == locator.usage_id:
|
||||
# Always return revision=None because the
|
||||
# old draft module store wraps locations as draft before
|
||||
# trying to access things.
|
||||
return Location(
|
||||
'i4x',
|
||||
candidate['_id']['org'],
|
||||
candidate['_id']['course'],
|
||||
category,
|
||||
self._decode_from_mongo(old_name),
|
||||
None)
|
||||
# cache all entries and then figure out if we have the one we want
|
||||
# Always return revision=None because the
|
||||
# old draft module store wraps locations as draft before
|
||||
# trying to access things.
|
||||
location = Location(
|
||||
'i4x',
|
||||
candidate['_id']['org'],
|
||||
candidate['_id']['course'],
|
||||
category,
|
||||
self._decode_from_mongo(old_name),
|
||||
None)
|
||||
published_locator = BlockUsageLocator(
|
||||
candidate['course_id'], branch=candidate['prod_branch'], usage_id=usage_id
|
||||
)
|
||||
draft_locator = BlockUsageLocator(
|
||||
candidate['course_id'], branch=candidate['draft_branch'], usage_id=usage_id
|
||||
)
|
||||
self._cache_location_map_entry(old_course_id, location, published_locator, draft_locator)
|
||||
|
||||
if get_course and category == 'course':
|
||||
result = location
|
||||
elif not get_course and usage_id == locator.usage_id:
|
||||
result = location
|
||||
if result is not None:
|
||||
return result
|
||||
return None
|
||||
|
||||
def add_block_location_translator(self, location, old_course_id=None, usage_id=None):
|
||||
"""
|
||||
Similar to translate_location which adds an entry if none is found, but this cannot create a new
|
||||
course mapping entry, only a block within such a mapping entry. If it finds no existing
|
||||
course maps, it raises ItemNotFoundError.
|
||||
|
||||
In the case that there are more than one mapping record for the course identified by location, this
|
||||
method adds the mapping to all matching records! (translate_location only adds to one)
|
||||
|
||||
It allows the caller to specify
|
||||
the new-style usage_id for the target rather than having the translate concoct its own.
|
||||
If the provided usage_id already exists in one of the found maps for the org/course, this function
|
||||
raises DuplicateItemError unless the old item id == the new one.
|
||||
|
||||
If the caller does not provide a usage_id and there exists an entry in one of the course variants,
|
||||
it will use that entry. If more than one variant uses conflicting entries, it will raise DuplicateItemError.
|
||||
|
||||
Returns the usage_id used in the mapping
|
||||
|
||||
:param location: a fully specified Location
|
||||
:param old_course_id: the old-style org/course or org/course/run string (optional)
|
||||
:param usage_id: the desired new block_id. If left as None, this will generate one as per translate_location
|
||||
"""
|
||||
location_id = self._interpret_location_course_id(old_course_id, location)
|
||||
|
||||
maps = self.location_map.find(location_id)
|
||||
# turn maps from cursor to list
|
||||
map_list = list(maps)
|
||||
if len(map_list) == 0:
|
||||
raise ItemNotFoundError()
|
||||
|
||||
encoded_location_name = self._encode_for_mongo(location.name)
|
||||
# check whether there's already a usage_id for this location (and it agrees w/ any passed in or found)
|
||||
for map_entry in map_list:
|
||||
if (encoded_location_name in map_entry['block_map'] and
|
||||
location.category in map_entry['block_map'][encoded_location_name]):
|
||||
if usage_id is None:
|
||||
usage_id = map_entry['block_map'][encoded_location_name][location.category]
|
||||
elif usage_id != map_entry['block_map'][encoded_location_name][location.category]:
|
||||
raise DuplicateItemError(usage_id, self, 'location_map')
|
||||
|
||||
computed_usage_id = usage_id
|
||||
|
||||
# update the maps (and generate a usage_id if it's not been set yet)
|
||||
for map_entry in map_list:
|
||||
if computed_usage_id is None:
|
||||
computed_usage_id = self._add_to_block_map(location, location_id, map_entry['block_map'])
|
||||
elif (encoded_location_name not in map_entry['block_map'] or
|
||||
location.category not in map_entry['block_map'][encoded_location_name]):
|
||||
alt_usage_id = self._verify_uniqueness(computed_usage_id, map_entry['block_map'])
|
||||
if alt_usage_id != computed_usage_id:
|
||||
if usage_id is not None:
|
||||
raise DuplicateItemError(usage_id, self, 'location_map')
|
||||
else:
|
||||
# revise already set ones and add to remaining ones
|
||||
computed_usage_id = self.update_block_location_translator(
|
||||
location,
|
||||
alt_usage_id,
|
||||
old_course_id,
|
||||
True
|
||||
)
|
||||
|
||||
map_entry['block_map'].setdefault(encoded_location_name, {})[location.category] = computed_usage_id
|
||||
self.location_map.update(
|
||||
{'_id': self._construct_location_son(**map_entry['_id'])},
|
||||
{'$set': {'block_map': map_entry['block_map']}}
|
||||
)
|
||||
|
||||
return computed_usage_id
|
||||
|
||||
def update_block_location_translator(self, location, usage_id, old_course_id=None, autogenerated_usage_id=False):
|
||||
"""
|
||||
Update all existing maps from location's block to the new usage_id. Used for changing the usage_id,
|
||||
thus the usage_id is required.
|
||||
|
||||
Returns the usage_id. (which is primarily useful in the case of autogenerated_usage_id)
|
||||
|
||||
:param location: a fully specified Location
|
||||
:param usage_id: the desired new block_id.
|
||||
:param old_course_id: the old-style org/course or org/course/run string (optional)
|
||||
:param autogenerated_usage_id: a flag used mostly for internal calls to indicate that this usage_id
|
||||
was autogenerated and thus can be overridden if it's not unique. If you set this flag, the stored
|
||||
usage_id may not be the one you submitted.
|
||||
"""
|
||||
location_id = self._interpret_location_course_id(old_course_id, location)
|
||||
|
||||
maps = self.location_map.find(location_id)
|
||||
encoded_location_name = self._encode_for_mongo(location.name)
|
||||
for map_entry in maps:
|
||||
# handle noop of renaming to same name
|
||||
if (encoded_location_name in map_entry['block_map'] and
|
||||
map_entry['block_map'][encoded_location_name].get(location.category) == usage_id):
|
||||
continue
|
||||
alt_usage_id = self._verify_uniqueness(usage_id, map_entry['block_map'])
|
||||
if alt_usage_id != usage_id:
|
||||
if autogenerated_usage_id:
|
||||
# revise already set ones and add to remaining ones
|
||||
usage_id = self.update_block_location_translator(location, alt_usage_id, old_course_id, True)
|
||||
return usage_id
|
||||
else:
|
||||
raise DuplicateItemError(usage_id, self, 'location_map')
|
||||
|
||||
if location.category in map_entry['block_map'].setdefault(encoded_location_name, {}):
|
||||
map_entry['block_map'][encoded_location_name][location.category] = usage_id
|
||||
self.location_map.update(
|
||||
{'_id': self._construct_location_son(**map_entry['_id'])},
|
||||
{'$set': {'block_map': map_entry['block_map']}}
|
||||
)
|
||||
|
||||
return usage_id
|
||||
|
||||
def delete_block_location_translator(self, location, old_course_id=None):
|
||||
"""
|
||||
Remove all existing maps from location's block.
|
||||
|
||||
:param location: a fully specified Location
|
||||
:param old_course_id: the old-style org/course or org/course/run string (optional)
|
||||
"""
|
||||
location_id = self._interpret_location_course_id(old_course_id, location)
|
||||
|
||||
maps = self.location_map.find(location_id)
|
||||
encoded_location_name = self._encode_for_mongo(location.name)
|
||||
for map_entry in maps:
|
||||
if location.category in map_entry['block_map'].setdefault(encoded_location_name, {}):
|
||||
if len(map_entry['block_map'][encoded_location_name]) == 1:
|
||||
del map_entry['block_map'][encoded_location_name]
|
||||
else:
|
||||
del map_entry['block_map'][encoded_location_name][location.category]
|
||||
self.location_map.update(
|
||||
{'_id': self._construct_location_son(**map_entry['_id'])},
|
||||
{'$set': {'block_map': map_entry['block_map']}}
|
||||
)
|
||||
|
||||
def _add_to_block_map(self, location, location_id, block_map):
|
||||
'''add the given location to the block_map and persist it'''
|
||||
if self._block_id_is_guid(location.name):
|
||||
@@ -389,6 +284,20 @@ class LocMapperStore(object):
|
||||
else:
|
||||
return bson.son.SON([('_id.org', location.org), ('_id.course', location.course)])
|
||||
|
||||
def _generate_location_course_id(self, entry_id):
|
||||
"""
|
||||
Generate a Location course_id for the given entry's id
|
||||
"""
|
||||
# strip id envelope if any
|
||||
entry_id = entry_id.get('_id', entry_id)
|
||||
if entry_id.get('name', False):
|
||||
return '{0[org]}/{0[course]}/{0[name]}'.format(entry_id)
|
||||
elif entry_id.get('_id.org', False):
|
||||
# the odd format one
|
||||
return '{0[_id.org]}/{0[_id.course]}'.format(entry_id)
|
||||
else:
|
||||
return '{0[org]}/{0[course]}'.format(entry_id)
|
||||
|
||||
def _construct_location_son(self, org, course, name=None):
|
||||
"""
|
||||
Construct the SON needed to repr the location for either a query or an insertion
|
||||
@@ -437,3 +346,41 @@ class LocMapperStore(object):
|
||||
"""
|
||||
return urllib.unquote(fieldname)
|
||||
|
||||
def _get_locator_from_cache(self, old_course_id, location, published):
|
||||
"""
|
||||
See if the location x published pair is in the cache. If so, return the mapped locator.
|
||||
"""
|
||||
entry = self.cache.get('{}+{}'.format(old_course_id, location.url()))
|
||||
if entry is not None:
|
||||
if published:
|
||||
return entry[0]
|
||||
else:
|
||||
return entry[1]
|
||||
return None
|
||||
|
||||
def _get_location_from_cache(self, locator):
|
||||
"""
|
||||
See if the locator is in the cache. If so, return the mapped location.
|
||||
"""
|
||||
return self.cache.get(unicode(locator))
|
||||
|
||||
def _get_course_location_from_cache(self, locator_course_id):
|
||||
"""
|
||||
See if the course_id is in the cache. If so, return the mapped location to the
|
||||
course root.
|
||||
"""
|
||||
return self.cache.get('courseId+{}'.format(locator_course_id))
|
||||
|
||||
def _cache_location_map_entry(self, old_course_id, location, published_usage, draft_usage):
|
||||
"""
|
||||
Cache the mapping from location to the draft and published Locators in entry.
|
||||
Also caches the inverse. If the location is category=='course', it caches it for
|
||||
the get_course query
|
||||
"""
|
||||
setmany = {}
|
||||
if location.category == 'course':
|
||||
setmany['courseId+{}'.format(published_usage.course_id)] = location
|
||||
setmany[unicode(published_usage)] = location
|
||||
setmany[unicode(draft_usage)] = location
|
||||
setmany['{}+{}'.format(old_course_id, location.url())] = (published_usage, draft_usage)
|
||||
self.cache.set_many(setmany)
|
||||
|
||||
@@ -7,8 +7,9 @@ import unittest
|
||||
import uuid
|
||||
from xmodule.modulestore import Location
|
||||
from xmodule.modulestore.locator import BlockUsageLocator
|
||||
from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateItemError
|
||||
from xmodule.modulestore.exceptions import ItemNotFoundError
|
||||
from xmodule.modulestore.loc_mapper_store import LocMapperStore
|
||||
from mock import Mock
|
||||
|
||||
|
||||
class TestLocationMapper(unittest.TestCase):
|
||||
@@ -23,8 +24,10 @@ class TestLocationMapper(unittest.TestCase):
|
||||
'collection': 'modulestore{0}'.format(uuid.uuid4().hex[:5]),
|
||||
}
|
||||
|
||||
cache_standin = TrivialCache()
|
||||
self.instrumented_cache = Mock(spec=cache_standin, wraps=cache_standin)
|
||||
# pylint: disable=W0142
|
||||
TestLocationMapper.loc_store = LocMapperStore(**modulestore_options)
|
||||
TestLocationMapper.loc_store = LocMapperStore(self.instrumented_cache, **modulestore_options)
|
||||
|
||||
def tearDown(self):
|
||||
dbref = TestLocationMapper.loc_store.db
|
||||
@@ -72,6 +75,21 @@ class TestLocationMapper(unittest.TestCase):
|
||||
self.assertEqual(entry['prod_branch'], 'live')
|
||||
self.assertEqual(entry['block_map'], block_map)
|
||||
|
||||
|
||||
def translate_n_check(self, location, old_style_course_id, new_style_course_id, usage_id, branch, add_entry=False):
|
||||
"""
|
||||
Request translation, check course_id, usage_id, and branch
|
||||
"""
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
old_style_course_id,
|
||||
location,
|
||||
published= (branch=='published'),
|
||||
add_entry_if_missing=add_entry
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, new_style_course_id)
|
||||
self.assertEqual(prob_locator.usage_id, usage_id)
|
||||
self.assertEqual(prob_locator.branch, branch)
|
||||
|
||||
def test_translate_location_read_only(self):
|
||||
"""
|
||||
Test the variants of translate_location which don't create entries, just decode
|
||||
@@ -88,83 +106,67 @@ class TestLocationMapper(unittest.TestCase):
|
||||
)
|
||||
|
||||
new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course)
|
||||
block_map = {'abc123': {'problem': 'problem2'}}
|
||||
block_map = {
|
||||
'abc123': {'problem': 'problem2'},
|
||||
'def456': {'problem': 'problem4'},
|
||||
'ghi789': {'problem': 'problem7'},
|
||||
}
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'baz_run'),
|
||||
new_style_course_id,
|
||||
block_map=block_map
|
||||
)
|
||||
test_problem_locn = Location('i4x', org, course, 'problem', 'abc123')
|
||||
# only one course matches
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
old_style_course_id,
|
||||
Location('i4x', org, course, 'problem', 'abc123'),
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, new_style_course_id)
|
||||
self.assertEqual(prob_locator.branch, 'published')
|
||||
self.assertEqual(prob_locator.usage_id, 'problem2')
|
||||
# look for w/ only the Location (works b/c there's only one possible course match)
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
None,
|
||||
Location('i4x', org, course, 'problem', 'abc123'),
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, new_style_course_id)
|
||||
self.translate_n_check(test_problem_locn, old_style_course_id, new_style_course_id, 'problem2', 'published')
|
||||
# look for w/ only the Location (works b/c there's only one possible course match). Will force
|
||||
# cache as default translation for this problemid
|
||||
self.translate_n_check(test_problem_locn, None, new_style_course_id, 'problem2', 'published')
|
||||
# look for non-existent problem
|
||||
with self.assertRaises(ItemNotFoundError):
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
loc_mapper().translate_location(
|
||||
None,
|
||||
Location('i4x', org, course, 'problem', '1def23'),
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
|
||||
# add a distractor course
|
||||
block_map = {'abc123': {'problem': 'problem3'}}
|
||||
# add a distractor course (note that abc123 has a different translation in this one)
|
||||
distractor_block_map = {
|
||||
'abc123': {'problem': 'problem3'},
|
||||
'def456': {'problem': 'problem4'},
|
||||
'ghi789': {'problem': 'problem7'},
|
||||
}
|
||||
test_delta_new_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')
|
||||
test_delta_old_id = '{}/{}/{}'.format(org, course, 'delta_run')
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'delta_run'),
|
||||
'{}.geek_dept.{}.{}'.format(org, course, 'delta_run'),
|
||||
block_map=block_map
|
||||
test_delta_new_id,
|
||||
block_map=distractor_block_map
|
||||
)
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
old_style_course_id,
|
||||
Location('i4x', org, course, 'problem', 'abc123'),
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, new_style_course_id)
|
||||
self.assertEqual(prob_locator.usage_id, 'problem2')
|
||||
# look for w/ only the Location (not unique; so, just verify it returns something)
|
||||
# test that old translation still works
|
||||
self.translate_n_check(test_problem_locn, old_style_course_id, new_style_course_id, 'problem2', 'published')
|
||||
# and new returns new id
|
||||
self.translate_n_check(test_problem_locn, test_delta_old_id, test_delta_new_id, 'problem3', 'published')
|
||||
# look for default translation of uncached Location (not unique; so, just verify it returns something)
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
None,
|
||||
Location('i4x', org, course, 'problem', 'abc123'),
|
||||
Location('i4x', org, course, 'problem', 'def456'),
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
self.assertIsNotNone(prob_locator, "couldn't find ambiguous location")
|
||||
|
||||
# add a default course pointing to the delta_run
|
||||
# make delta_run default course: anything not cached using None as old_course_id will use this
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'problem', '789abc123efg456'),
|
||||
'{}.geek_dept.{}.{}'.format(org, course, 'delta_run'),
|
||||
test_delta_new_id,
|
||||
block_map=block_map
|
||||
)
|
||||
# now the ambiguous query should return delta
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
None,
|
||||
Location('i4x', org, course, 'problem', 'abc123'),
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'))
|
||||
self.assertEqual(prob_locator.usage_id, 'problem3')
|
||||
# now an uncached ambiguous query should return delta
|
||||
test_unused_locn = Location('i4x', org, course, 'problem', 'ghi789')
|
||||
self.translate_n_check(test_unused_locn, None, test_delta_new_id, 'problem7', 'published')
|
||||
|
||||
# get the draft one (I'm sorry this is getting long)
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
None,
|
||||
Location('i4x', org, course, 'problem', 'abc123'),
|
||||
published=False,
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'))
|
||||
self.assertEqual(prob_locator.usage_id, 'problem3')
|
||||
self.assertEqual(prob_locator.branch, 'draft')
|
||||
self.translate_n_check(test_unused_locn, None, test_delta_new_id, 'problem7', 'draft')
|
||||
|
||||
def test_translate_location_dwim(self):
|
||||
"""
|
||||
@@ -176,48 +178,39 @@ class TestLocationMapper(unittest.TestCase):
|
||||
old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run')
|
||||
problem_name = 'abc123abc123abc123abc123abc123f9'
|
||||
location = Location('i4x', org, course, 'problem', problem_name)
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
old_style_course_id,
|
||||
location,
|
||||
add_entry_if_missing=True
|
||||
)
|
||||
new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run')
|
||||
self.assertEqual(prob_locator.course_id, new_style_course_id)
|
||||
self.assertEqual(prob_locator.branch, 'published')
|
||||
self.assertEqual(prob_locator.usage_id, 'problemabc')
|
||||
# look for w/ only the Location (works b/c there's only one possible course match)
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
None,
|
||||
location,
|
||||
add_entry_if_missing=True
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, new_style_course_id)
|
||||
self.translate_n_check(location, old_style_course_id, new_style_course_id, 'problemabc', 'published', True)
|
||||
# look for w/ only the Location (works b/c there's only one possible course match): causes cache
|
||||
self.translate_n_check(location, None, new_style_course_id, 'problemabc', 'published', True)
|
||||
|
||||
# create an entry w/o a guid name
|
||||
other_location = Location('i4x', org, course, 'chapter', 'intro')
|
||||
locator = loc_mapper().translate_location(
|
||||
old_style_course_id,
|
||||
other_location,
|
||||
add_entry_if_missing=True
|
||||
)
|
||||
self.assertEqual(locator.usage_id, 'intro')
|
||||
self.translate_n_check(other_location, old_style_course_id, new_style_course_id, 'intro', 'published', True)
|
||||
|
||||
# add a distractor course
|
||||
delta_new_course_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')
|
||||
delta_course_locn = Location('i4x', org, course, 'course', 'delta_run')
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'delta_run'),
|
||||
'{}.geek_dept.{}.{}'.format(org, course, 'delta_run'),
|
||||
delta_course_locn,
|
||||
delta_new_course_id,
|
||||
block_map={problem_name: {'problem': 'problem3'}}
|
||||
)
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
old_style_course_id,
|
||||
location,
|
||||
add_entry_if_missing=True
|
||||
self.translate_n_check(location, old_style_course_id, new_style_course_id, 'problemabc', 'published', True)
|
||||
|
||||
# add a new one to both courses (ensure name doesn't have same beginning)
|
||||
new_prob_name = uuid.uuid4().hex
|
||||
while new_prob_name.startswith('abc'):
|
||||
new_prob_name = uuid.uuid4().hex
|
||||
new_prob_locn = location.replace(name=new_prob_name)
|
||||
new_usage_id = 'problem{}'.format(new_prob_name[:3])
|
||||
self.translate_n_check(new_prob_locn, old_style_course_id, new_style_course_id, new_usage_id, 'published', True)
|
||||
self.translate_n_check(
|
||||
new_prob_locn, delta_course_locn.course_id, delta_new_course_id, new_usage_id, 'published', True
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, new_style_course_id)
|
||||
self.assertEqual(prob_locator.usage_id, 'problemabc')
|
||||
# look for w/ only the Location (not unique; so, just verify it returns something)
|
||||
# look for w/ only the Location: causes caching and not unique; so, can't check which course
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
None,
|
||||
location,
|
||||
new_prob_locn,
|
||||
add_entry_if_missing=True
|
||||
)
|
||||
self.assertIsNotNone(prob_locator, "couldn't find ambiguous location")
|
||||
@@ -225,17 +218,20 @@ class TestLocationMapper(unittest.TestCase):
|
||||
# add a default course pointing to the delta_run
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'problem', '789abc123efg456'),
|
||||
'{}.geek_dept.{}.{}'.format(org, course, 'delta_run'),
|
||||
delta_new_course_id,
|
||||
block_map={problem_name: {'problem': 'problem3'}}
|
||||
)
|
||||
# now the ambiguous query should return delta
|
||||
prob_locator = loc_mapper().translate_location(
|
||||
None,
|
||||
location,
|
||||
add_entry_if_missing=False
|
||||
again_prob_name = uuid.uuid4().hex
|
||||
while again_prob_name.startswith('abc') or again_prob_name.startswith(new_prob_name[:3]):
|
||||
again_prob_name = uuid.uuid4().hex
|
||||
again_prob_locn = location.replace(name=again_prob_name)
|
||||
again_usage_id = 'problem{}'.format(again_prob_name[:3])
|
||||
self.translate_n_check(again_prob_locn, old_style_course_id, new_style_course_id, again_usage_id, 'published', True)
|
||||
self.translate_n_check(
|
||||
again_prob_locn, delta_course_locn.course_id, delta_new_course_id, again_usage_id, 'published', True
|
||||
)
|
||||
self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'))
|
||||
self.assertEqual(prob_locator.usage_id, 'problem3')
|
||||
self.translate_n_check(again_prob_locn, None, delta_new_course_id, again_usage_id, 'published', True)
|
||||
|
||||
def test_translate_locator(self):
|
||||
"""
|
||||
@@ -352,185 +348,6 @@ class TestLocationMapper(unittest.TestCase):
|
||||
reverted_location = loc_mapper().translate_locator_to_location(prob_locator)
|
||||
self.assertEqual(location, reverted_location)
|
||||
|
||||
def test_add_block(self):
|
||||
"""
|
||||
Test add_block_location_translator(location, old_course_id=None, usage_id=None)
|
||||
"""
|
||||
# call w/ no matching courses
|
||||
org = 'foo_org'
|
||||
course = 'bar_course'
|
||||
old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run')
|
||||
problem_name = 'abc123abc123abc123abc123abc123f9'
|
||||
location = Location('i4x', org, course, 'problem', problem_name)
|
||||
with self.assertRaises(ItemNotFoundError):
|
||||
loc_mapper().add_block_location_translator(location)
|
||||
with self.assertRaises(ItemNotFoundError):
|
||||
loc_mapper().add_block_location_translator(location, old_style_course_id)
|
||||
|
||||
# w/ one matching course
|
||||
new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run')
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'baz_run'),
|
||||
new_style_course_id,
|
||||
)
|
||||
new_usage_id = loc_mapper().add_block_location_translator(location)
|
||||
self.assertEqual(new_usage_id, 'problemabc')
|
||||
# look it up
|
||||
translated_loc = loc_mapper().translate_location(old_style_course_id, location, add_entry_if_missing=False)
|
||||
self.assertEqual(translated_loc.course_id, new_style_course_id)
|
||||
self.assertEqual(translated_loc.usage_id, new_usage_id)
|
||||
|
||||
# w/ one distractor which has one entry already
|
||||
new_style_course_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'delta_run'),
|
||||
new_style_course_id,
|
||||
block_map={'48f23a10395384929234': {'chapter': 'chapter48f'}}
|
||||
)
|
||||
# try adding the one added before
|
||||
new_usage_id2 = loc_mapper().add_block_location_translator(location)
|
||||
self.assertEqual(new_usage_id, new_usage_id2)
|
||||
# it should be in the distractor now
|
||||
new_location = loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id2, branch='published')
|
||||
)
|
||||
self.assertEqual(new_location, location)
|
||||
# add one close to the existing chapter (cause name collision)
|
||||
location = Location('i4x', org, course, 'chapter', '48f23a103953849292341234567890ab')
|
||||
new_usage_id = loc_mapper().add_block_location_translator(location)
|
||||
self.assertRegexpMatches(new_usage_id, r'^chapter48f\d')
|
||||
# retrievable from both courses
|
||||
new_location = loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id, branch='published')
|
||||
)
|
||||
self.assertEqual(new_location, location)
|
||||
new_location = loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(
|
||||
course_id='{}.{}.{}'.format(org, course, 'baz_run'),
|
||||
usage_id=new_usage_id,
|
||||
branch='published'
|
||||
)
|
||||
)
|
||||
self.assertEqual(new_location, location)
|
||||
|
||||
# provoke duplicate item errors
|
||||
location = location.replace(name='44f23a103953849292341234567890ab')
|
||||
with self.assertRaises(DuplicateItemError):
|
||||
loc_mapper().add_block_location_translator(location, usage_id=new_usage_id)
|
||||
new_usage_id = loc_mapper().add_block_location_translator(location, old_course_id=old_style_course_id)
|
||||
other_course_old_style = '{}/{}/{}'.format(org, course, 'delta_run')
|
||||
new_usage_id2 = loc_mapper().add_block_location_translator(
|
||||
location,
|
||||
old_course_id=other_course_old_style,
|
||||
usage_id='{}b'.format(new_usage_id)
|
||||
)
|
||||
with self.assertRaises(DuplicateItemError):
|
||||
loc_mapper().add_block_location_translator(location)
|
||||
|
||||
def test_update_block(self):
|
||||
"""
|
||||
test update_block_location_translator(location, usage_id, old_course_id=None)
|
||||
"""
|
||||
org = 'foo_org'
|
||||
course = 'bar_course'
|
||||
new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course)
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'baz_run'),
|
||||
new_style_course_id,
|
||||
block_map={
|
||||
'abc123': {'problem': 'problem2'},
|
||||
'48f23a10395384929234': {'chapter': 'chapter48f'},
|
||||
'1': {'chapter': 'chapter1', 'problem': 'problem1'},
|
||||
}
|
||||
)
|
||||
new_style_course_id2 = '{}.geek_dept.{}.delta_run'.format(org, course)
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'delta_run'),
|
||||
new_style_course_id2,
|
||||
block_map={
|
||||
'abc123': {'problem': 'problem3'},
|
||||
'48f23a10395384929234': {'chapter': 'chapter48b'},
|
||||
'1': {'chapter': 'chapter2', 'problem': 'problem2'},
|
||||
}
|
||||
)
|
||||
location = Location('i4x', org, course, 'problem', '1')
|
||||
# change in all courses to same value
|
||||
loc_mapper().update_block_location_translator(location, 'problem1')
|
||||
trans_loc = loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(course_id=new_style_course_id, usage_id='problem1', branch='published')
|
||||
)
|
||||
self.assertEqual(trans_loc, location)
|
||||
trans_loc = loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(course_id=new_style_course_id2, usage_id='problem1', branch='published')
|
||||
)
|
||||
self.assertEqual(trans_loc, location)
|
||||
# try to change to overwrite used usage_id
|
||||
location = Location('i4x', org, course, 'chapter', '48f23a10395384929234')
|
||||
with self.assertRaises(DuplicateItemError):
|
||||
loc_mapper().update_block_location_translator(location, 'chapter2')
|
||||
# just change the one course
|
||||
loc_mapper().update_block_location_translator(location, 'chapter2', '{}/{}/{}'.format(org, course, 'baz_run'))
|
||||
trans_loc = loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(course_id=new_style_course_id, usage_id='chapter2', branch='published')
|
||||
)
|
||||
self.assertEqual(trans_loc.name, '48f23a10395384929234')
|
||||
# but this still points to the old
|
||||
trans_loc = loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(course_id=new_style_course_id2, usage_id='chapter2', branch='published')
|
||||
)
|
||||
self.assertEqual(trans_loc.name, '1')
|
||||
|
||||
def test_delete_block(self):
|
||||
"""
|
||||
test delete_block_location_translator(location, old_course_id=None)
|
||||
"""
|
||||
org = 'foo_org'
|
||||
course = 'bar_course'
|
||||
new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course)
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'baz_run'),
|
||||
new_style_course_id,
|
||||
block_map={
|
||||
'abc123': {'problem': 'problem2'},
|
||||
'48f23a10395384929234': {'chapter': 'chapter48f'},
|
||||
'1': {'chapter': 'chapter1', 'problem': 'problem1'},
|
||||
}
|
||||
)
|
||||
new_style_course_id2 = '{}.geek_dept.{}.delta_run'.format(org, course)
|
||||
loc_mapper().create_map_entry(
|
||||
Location('i4x', org, course, 'course', 'delta_run'),
|
||||
new_style_course_id2,
|
||||
block_map={
|
||||
'abc123': {'problem': 'problem3'},
|
||||
'48f23a10395384929234': {'chapter': 'chapter48b'},
|
||||
'1': {'chapter': 'chapter2', 'problem': 'problem2'},
|
||||
}
|
||||
)
|
||||
location = Location('i4x', org, course, 'problem', '1')
|
||||
# delete from all courses
|
||||
loc_mapper().delete_block_location_translator(location)
|
||||
self.assertIsNone(loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(course_id=new_style_course_id, usage_id='problem1', branch='published')
|
||||
))
|
||||
self.assertIsNone(loc_mapper().translate_locator_to_location(
|
||||
BlockUsageLocator(course_id=new_style_course_id2, usage_id='problem2', branch='published')
|
||||
))
|
||||
# delete from one course
|
||||
location = location.replace(name='abc123')
|
||||
loc_mapper().delete_block_location_translator(location, '{}/{}/{}'.format(org, course, 'baz_run'))
|
||||
with self.assertRaises(ItemNotFoundError):
|
||||
loc_mapper().translate_location(
|
||||
'{}/{}/{}'.format(org, course, 'baz_run'),
|
||||
location,
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
locator = loc_mapper().translate_location(
|
||||
'{}/{}/{}'.format(org, course, 'delta_run'),
|
||||
location,
|
||||
add_entry_if_missing=False
|
||||
)
|
||||
self.assertEqual(locator.usage_id, 'problem3')
|
||||
|
||||
|
||||
#==================================
|
||||
# functions to mock existing services
|
||||
@@ -545,3 +362,23 @@ def render_to_template_mock(*_args):
|
||||
"""
|
||||
Mocks the mako render_to_template w/ a noop
|
||||
"""
|
||||
|
||||
|
||||
class TrivialCache(object):
|
||||
"""
|
||||
A trivial cache impl
|
||||
"""
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def get(self, key):
|
||||
"""
|
||||
Mock the .get
|
||||
"""
|
||||
return self.cache.get(key)
|
||||
|
||||
def set_many(self, entries):
|
||||
"""
|
||||
mock set_many
|
||||
"""
|
||||
self.cache.update(entries)
|
||||
|
||||
@@ -43,7 +43,10 @@ class TestMigration(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestMigration, self).setUp()
|
||||
self.loc_mapper = LocMapperStore(**self.db_config)
|
||||
noop_cache = mock.Mock(spec=['get', 'set_many'])
|
||||
noop_cache.configure_mock(**{'get.return_value': None})
|
||||
# pylint: disable=W0142
|
||||
self.loc_mapper = LocMapperStore(noop_cache, **self.db_config)
|
||||
self.old_mongo = MongoModuleStore(self.db_config, **self.modulestore_options)
|
||||
self.draft_mongo = DraftModuleStore(self.db_config, **self.modulestore_options)
|
||||
self.split_mongo = SplitMongoModuleStore(
|
||||
|
||||
@@ -16,6 +16,6 @@ location_map:
|
||||
=============
|
||||
|
||||
```
|
||||
ensureIndex({'_id.org': 1, '_id.course': 1})
|
||||
ensureIndex({'course_id': 1})
|
||||
ensureIndex({'_id.org': 1, '_id.course': 1})
|
||||
ensureIndex({'course_id': 1})
|
||||
```
|
||||
|
||||
Reference in New Issue
Block a user