refactor: rename descriptor -> block within xmodule/modulestore

Co-authored-by: Agrendalath <piotr@surowiec.it>
This commit is contained in:
Pooja Kulkarni
2023-01-05 14:26:32 -05:00
committed by Agrendalath
parent fb5f8474b0
commit 537cfe4e0f
8 changed files with 125 additions and 125 deletions

View File

@@ -922,7 +922,7 @@ class ModuleStoreRead(ModuleStoreAssetBase, metaclass=ABCMeta):
def get_course(self, course_id, depth=0, **kwargs):
'''
Look for a specific course by its id (:class:`CourseKey`).
Returns the course descriptor, or None if not found.
Returns the course block, or None if not found.
'''
pass # lint-amnesty, pylint: disable=unnecessary-pass
@@ -1286,8 +1286,8 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
# clone a default 'about' overview block as well
about_location = self.make_course_key(org, course, run).make_usage_key('about', 'overview')
about_descriptor = XBlock.load_class('about')
overview_template = about_descriptor.get_template('overview.yaml')
about_block = XBlock.load_class('about')
overview_template = about_block.get_template('overview.yaml')
self.create_item(
user_id,
about_location.course_key,

View File

@@ -272,27 +272,27 @@ class InheritanceMixin(XBlockMixin):
return self.is_past_due()
def compute_inherited_metadata(descriptor):
"""Given a descriptor, traverse all of its descendants and do metadata
def compute_inherited_metadata(block):
"""Given a block, traverse all of its descendants and do metadata
inheritance. Should be called on a CourseBlock after importing a
course.
NOTE: This means that there is no such thing as lazy loading at the
moment--this accesses all the children."""
if descriptor.has_children:
parent_metadata = descriptor.xblock_kvs.inherited_settings.copy()
# add any of descriptor's explicitly set fields to the inheriting list
if block.has_children:
parent_metadata = block.xblock_kvs.inherited_settings.copy()
# add any of block's explicitly set fields to the inheriting list
for field in InheritanceMixin.fields.values(): # lint-amnesty, pylint: disable=no-member
if field.is_set_on(descriptor):
if field.is_set_on(block):
# inherited_settings values are json repr
parent_metadata[field.name] = field.read_json(descriptor)
parent_metadata[field.name] = field.read_json(block)
for child in descriptor.get_children():
for child in block.get_children():
inherit_metadata(child, parent_metadata)
compute_inherited_metadata(child)
def inherit_metadata(descriptor, inherited_data):
def inherit_metadata(block, inherited_data):
"""
Updates this block with metadata inherited from a containing block.
Only metadata specified in self.inheritable_metadata will
@@ -302,7 +302,7 @@ def inherit_metadata(descriptor, inherited_data):
they should inherit
"""
try:
descriptor.xblock_kvs.inherited_settings = inherited_data
block.xblock_kvs.inherited_settings = inherited_data
except AttributeError: # the kvs doesn't have inherited_settings probably b/c it's an error block
pass

View File

@@ -781,7 +781,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
def _get_cache(self, course_version_guid):
"""
Find the descriptor cache for this course if it exists
Find the block cache for this course if it exists
:param course_version_guid:
"""
if self.request_cache is None:
@@ -956,7 +956,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
@autoretry_read()
def get_courses(self, branch, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Returns a list of course descriptors matching any given qualifiers.
Returns a list of course blocks matching any given qualifiers.
qualifiers should be a dict of keywords matching the db fields or any
legal query for mongo to use against the active_versions collection.
@@ -1098,7 +1098,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
def get_course(self, course_id, depth=0, **kwargs):
"""
Gets the course descriptor for the course identified by the locator
Gets the course block for the course identified by the locator
"""
if not isinstance(course_id, CourseLocator) or course_id.deprecated:
# The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore.
@@ -1423,7 +1423,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
index = self.get_course_index(course_key)
return index
# TODO figure out a way to make this info accessible from the course descriptor
# TODO figure out a way to make this info accessible from the course block
def get_course_history_info(self, course_key):
"""
Because xblocks doesn't give a means to separate the course structure's meta information from
@@ -1490,7 +1490,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
def create_definition_from_data(self, course_key, new_def_data, category, user_id):
"""
Pull the definition fields out of descriptor and save to the db as a new definition
Pull the definition fields out of block and save to the db as a new definition
w/o a predecessor and return the new id.
:param user_id: request.user object
@@ -1529,7 +1529,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
return True
# if this looks in cache rather than fresh fetches, then it will probably not detect
# actual change b/c the descriptor and cache probably point to the same objects
# actual change b/c the block and cache probably point to the same objects
old_definition = self.get_definition(course_key, definition_locator.definition_id)
if old_definition is None:
raise ItemNotFoundError(definition_locator)
@@ -1579,7 +1579,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
def create_item(self, user_id, course_key, block_type, block_id=None, definition_locator=None, fields=None, # lint-amnesty, pylint: disable=arguments-differ
asides=None, force=False, **kwargs):
"""
Add a descriptor to persistence as an element
Add a block to persistence as an element
of the course. Return the resulting post saved version with populated locators.
:param course_key: If it has a version_guid and a course org + course + run + branch, this
@@ -1937,26 +1937,26 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
locator = LibraryLocator(org=org, library=library, branch=kwargs["master_branch"])
return self._create_courselike(locator, user_id, **kwargs)
def update_item(self, descriptor, user_id, allow_not_found=False, force=False, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
def update_item(self, block, user_id, allow_not_found=False, force=False, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Save the descriptor's fields. it doesn't descend the course dag to save the children.
Return the new descriptor (updated location).
Save the block's fields. it doesn't descend the course dag to save the children.
Return the new block (updated location).
raises ItemNotFoundError if the location does not exist.
Creates a new course version. If the descriptor's location has a org and course and run, it moves the course head # lint-amnesty, pylint: disable=line-too-long
pointer. If the version_guid of the descriptor points to a non-head version and there's been an intervening
Creates a new course version. If the block's location has a org and course and run, it moves the course head # lint-amnesty, pylint: disable=line-too-long
pointer. If the version_guid of the block points to a non-head version and there's been an intervening
change to this item, it raises a VersionConflictError unless force is True. In the force case, it forks
the course but leaves the head pointer where it is (this change will not be in the course head).
The implementation tries to detect which, if any changes, actually need to be saved and thus won't version
the definition, structure, nor course if they didn't change.
"""
partitioned_fields = self.partition_xblock_fields_by_scope(descriptor)
partitioned_fields = self.partition_xblock_fields_by_scope(block)
return self._update_item_from_fields(
user_id, descriptor.location.course_key, BlockKey.from_usage_key(descriptor.location),
partitioned_fields, descriptor.definition_locator, allow_not_found, force, **kwargs
) or descriptor
user_id, block.location.course_key, BlockKey.from_usage_key(block.location),
partitioned_fields, block.definition_locator, allow_not_found, force, **kwargs
) or block
def _update_item_from_fields(self, user_id, course_key, block_key, partitioned_fields, # pylint: disable=too-many-statements
definition_locator, allow_not_found, force, asides=None, **kwargs):
@@ -2519,8 +2519,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
raises ItemNotFoundError if the location does not exist.
raises ValueError if usage_locator points to the structure root
Creates a new course version. If the descriptor's location has a org, a course, and a run, it moves the course head # lint-amnesty, pylint: disable=line-too-long
pointer. If the version_guid of the descriptor points to a non-head version and there's been an intervening
Creates a new course version. If the block's location has a org, a course, and a run, it moves the course head # lint-amnesty, pylint: disable=line-too-long
pointer. If the version_guid of the block points to a non-head version and there's been an intervening
change to this item, it raises a VersionConflictError unless force is True. In the force case, it forks
the course but leaves the head pointer where it is (this change will not be in the course head).
"""

View File

@@ -140,15 +140,15 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
keys_to_check.extend(children)
return new_keys
def update_item(self, descriptor, user_id, allow_not_found=False, force=False, asides=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
old_descriptor_locn = descriptor.location
descriptor.location = self._map_revision_to_branch(old_descriptor_locn)
emit_signals = descriptor.location.branch == ModuleStoreEnum.BranchName.published \
or descriptor.location.block_type in DIRECT_ONLY_CATEGORIES
def update_item(self, block, user_id, allow_not_found=False, force=False, asides=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
old_block_locn = block.location
block.location = self._map_revision_to_branch(old_block_locn)
emit_signals = block.location.branch == ModuleStoreEnum.BranchName.published \
or block.location.block_type in DIRECT_ONLY_CATEGORIES
with self.bulk_operations(descriptor.location.course_key, emit_signals=emit_signals):
with self.bulk_operations(block.location.course_key, emit_signals=emit_signals):
item = super().update_item(
descriptor,
block,
user_id,
allow_not_found=allow_not_found,
force=force,
@@ -156,7 +156,7 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
**kwargs
)
self._auto_publish_no_children(item.location, item.location.block_type, user_id, **kwargs)
descriptor.location = old_descriptor_locn
block.location = old_block_locn
return item
def create_item(self, user_id, course_key, block_type, block_id=None, # pylint: disable=W0221

View File

@@ -514,7 +514,7 @@ class SplitModuleTest(unittest.TestCase):
def findByIdInResult(self, collection, _id): # pylint: disable=invalid-name
"""
Result is a collection of descriptors. Find the one whose block id
Result is a collection of blocks. Find the one whose block id
matches the _id.
"""
for element in collection:
@@ -576,7 +576,7 @@ class SplitModuleCourseTests(SplitModuleTest):
assert course.display_name == 'The Ancient Greek Hero', 'wrong display name'
assert course.advertised_start == 'Fall 2013', 'advertised_start'
assert len(course.children) == 4, 'children'
# check dates and graders--forces loading of descriptor
# check dates and graders--forces loading of block
assert course.edited_by == TEST_ASSISTANT_USER_ID
self.assertDictEqual(course.grade_cutoffs, {"Pass": 0.45})
@@ -662,7 +662,7 @@ class SplitModuleCourseTests(SplitModuleTest):
assert course.advertised_start is None
assert len(course.children) == 0
assert course.definition_locator.definition_id != head_course.definition_locator.definition_id
# check dates and graders--forces loading of descriptor
# check dates and graders--forces loading of block
assert course.edited_by == TEST_ASSISTANT_USER_ID
self.assertDictEqual(course.grade_cutoffs, {"Pass": 0.55})
@@ -676,7 +676,7 @@ class SplitModuleCourseTests(SplitModuleTest):
assert course.display_name == 'The Ancient Greek Hero'
assert course.advertised_start == 'Fall 2013'
assert len(course.children) == 4
# check dates and graders--forces loading of descriptor
# check dates and graders--forces loading of block
assert course.edited_by == TEST_ASSISTANT_USER_ID
self.assertDictEqual(course.grade_cutoffs, {"Pass": 0.45})
@@ -928,7 +928,7 @@ class SplitModuleItemTests(SplitModuleTest):
assert block.display_name == 'The Ancient Greek Hero'
assert block.advertised_start == 'Fall 2013'
assert len(block.children) == 4
# check dates and graders--forces loading of descriptor
# check dates and graders--forces loading of block
assert block.edited_by == TEST_ASSISTANT_USER_ID
self.assertDictEqual(
block.grade_cutoffs, {"Pass": 0.45},
@@ -1061,7 +1061,7 @@ class SplitModuleItemTests(SplitModuleTest):
def test_get_children(self):
"""
Test the existing get_children method on xdescriptors
Test the existing get_children method on xblocks
"""
locator = BlockUsageLocator(
CourseLocator(org='testx', course='GreekHero', run="run", branch=BRANCH_NAME_DRAFT), 'course', 'head12345'
@@ -1080,7 +1080,7 @@ class SplitModuleItemTests(SplitModuleTest):
def version_agnostic(children):
"""
children: list of descriptors
children: list of blocks
Returns the `children` list with each member version-agnostic
"""
return [child.version_agnostic() for child in children]

View File

@@ -160,7 +160,7 @@ class ImportSystem(XMLParsingSystem, MakoDescriptorSystem): # lint-amnesty, pyl
try:
xml_data = etree.fromstring(xml)
make_name_unique(xml_data)
descriptor = self.xblock_from_node(
block = self.xblock_from_node(
xml_data,
None, # parent_id
id_manager,
@@ -170,7 +170,7 @@ class ImportSystem(XMLParsingSystem, MakoDescriptorSystem): # lint-amnesty, pyl
raise
# Didn't load properly. Fall back on loading as an error
# descriptor. This should never error due to formatting.
# block. This should never error due to formatting.
msg = "Error loading from xml. %s"
log.warning(
@@ -186,34 +186,34 @@ class ImportSystem(XMLParsingSystem, MakoDescriptorSystem): # lint-amnesty, pyl
self.error_tracker(msg)
err_msg = msg + "\n" + exc_info_to_str(sys.exc_info())
descriptor = ErrorBlock.from_xml(
block = ErrorBlock.from_xml(
xml,
self,
id_manager,
err_msg
)
descriptor.data_dir = course_dir
block.data_dir = course_dir
if descriptor.scope_ids.usage_id in xmlstore.modules[course_id]:
if block.scope_ids.usage_id in xmlstore.modules[course_id]:
# keep the parent pointer if any but allow everything else to overwrite
other_copy = xmlstore.modules[course_id][descriptor.scope_ids.usage_id]
descriptor.parent = other_copy.parent
if descriptor != other_copy:
log.warning("%s has more than one definition", descriptor.scope_ids.usage_id)
xmlstore.modules[course_id][descriptor.scope_ids.usage_id] = descriptor
other_copy = xmlstore.modules[course_id][block.scope_ids.usage_id]
block.parent = other_copy.parent
if block != other_copy:
log.warning("%s has more than one definition", block.scope_ids.usage_id)
xmlstore.modules[course_id][block.scope_ids.usage_id] = block
if descriptor.has_children:
for child in descriptor.get_children():
if block.has_children:
for child in block.get_children():
# parent is alphabetically least
if child.parent is None or child.parent > descriptor.scope_ids.usage_id:
child.parent = descriptor.location
if child.parent is None or child.parent > block.scope_ids.usage_id:
child.parent = block.location
child.save()
# After setting up the descriptor, save any changes that we have
# made to attributes on the descriptor to the underlying KeyValueStore.
descriptor.save()
return descriptor
# After setting up the block, save any changes that we have
# made to attributes on the block to the underlying KeyValueStore.
block.save()
return block
render_template = lambda template, context: ''
@@ -313,7 +313,7 @@ class XMLModuleStore(ModuleStoreReadBase):
Args:
data_dir (str): path to data directory containing the course directories
default_class (str): dot-separated string defining the default descriptor
default_class (str): dot-separated string defining the default block
class to use if none is specified in entry_points
source_dirs or course_ids (list of str): If specified, the list of source_dirs or course_ids to load.
@@ -376,9 +376,9 @@ class XMLModuleStore(ModuleStoreReadBase):
# So, make a tracker to track load-time errors, then put in the right
# place after the course loads and we have its location
errorlog = make_error_tracker()
course_descriptor = None
course_block = None
try:
course_descriptor = self.load_course(course_dir, course_ids, errorlog.tracker, target_course_id)
course_block = self.load_course(course_dir, course_ids, errorlog.tracker, target_course_id)
except Exception as exc: # pylint: disable=broad-except
msg = f'Course import {target_course_id}: ERROR: Failed to load courselike "{course_dir}": {str(exc)}'
log.exception(msg)
@@ -387,15 +387,15 @@ class XMLModuleStore(ModuleStoreReadBase):
monitor_import_failure(target_course_id, 'Updating', exception=exc)
raise exc
finally:
if course_descriptor is None:
if course_block is None:
pass
elif isinstance(course_descriptor, ErrorBlock):
elif isinstance(course_block, ErrorBlock):
# Didn't load course. Instead, save the errors elsewhere.
self.errored_courses[course_dir] = errorlog
else:
self.courses[course_dir] = course_descriptor
course_descriptor.parent = None
course_id = self.id_from_descriptor(course_descriptor)
self.courses[course_dir] = course_block
course_block.parent = None
course_id = self.id_from_block(course_block)
self._course_errors[course_id] = errorlog
def __str__(self):
@@ -407,11 +407,11 @@ class XMLModuleStore(ModuleStoreReadBase):
)
@staticmethod
def id_from_descriptor(descriptor):
def id_from_block(block):
"""
Grab the course ID from the descriptor
Grab the course ID from the block
"""
return descriptor.id
return block.id
def load_policy(self, policy_path, tracker):
"""
@@ -527,30 +527,30 @@ class XMLModuleStore(ModuleStoreReadBase):
services=services,
target_course_id=target_course_id,
)
course_descriptor = system.process_xml(etree.tostring(course_data, encoding='unicode'))
course_block = system.process_xml(etree.tostring(course_data, encoding='unicode'))
# If we fail to load the course, then skip the rest of the loading steps
if isinstance(course_descriptor, ErrorBlock):
return course_descriptor
if isinstance(course_block, ErrorBlock):
return course_block
self.content_importers(system, course_descriptor, course_dir, url_name)
self.content_importers(system, course_block, course_dir, url_name)
log.info(f'Course import {target_course_id}: Done with courselike import from {course_dir}')
return course_descriptor
return course_block
def content_importers(self, system, course_descriptor, course_dir, url_name):
def content_importers(self, system, course_block, course_dir, url_name):
"""
Load all extra non-course content, and calculate metadata inheritance.
"""
# NOTE: The descriptors end up loading somewhat bottom up, which
# NOTE: The blocks end up loading somewhat bottom up, which
# breaks metadata inheritance via get_children(). Instead
# (actually, in addition to, for now), we do a final inheritance pass
# after we have the course descriptor.
compute_inherited_metadata(course_descriptor)
# after we have the course block.
compute_inherited_metadata(course_block)
# now import all pieces of course_info which is expected to be stored
# in <content_dir>/info or <content_dir>/info/<url_name>
self.load_extra_content(
system, course_descriptor, 'course_info',
system, course_block, 'course_info',
self.data_dir / course_dir / 'info',
course_dir, url_name
)
@@ -558,19 +558,19 @@ class XMLModuleStore(ModuleStoreReadBase):
# now import all static tabs which are expected to be stored in
# in <content_dir>/tabs or <content_dir>/tabs/<url_name>
self.load_extra_content(
system, course_descriptor, 'static_tab',
system, course_block, 'static_tab',
self.data_dir / course_dir / 'tabs',
course_dir, url_name
)
self.load_extra_content(
system, course_descriptor, 'custom_tag_template',
system, course_block, 'custom_tag_template',
self.data_dir / course_dir / 'custom_tags',
course_dir, url_name
)
self.load_extra_content(
system, course_descriptor, 'about',
system, course_block, 'about',
self.data_dir / course_dir / 'about',
course_dir, url_name
)
@@ -587,14 +587,14 @@ class XMLModuleStore(ModuleStoreReadBase):
# always used, preventing duplicate keys.
return CourseKey.from_string('/'.join([org, course, url_name]))
def load_extra_content(self, system, course_descriptor, category, base_dir, course_dir, url_name): # lint-amnesty, pylint: disable=missing-function-docstring
self._load_extra_content(system, course_descriptor, category, base_dir, course_dir)
def load_extra_content(self, system, course_block, category, base_dir, course_dir, url_name): # lint-amnesty, pylint: disable=missing-function-docstring
self._load_extra_content(system, course_block, category, base_dir, course_dir)
# then look in a override folder based on the course run
if os.path.isdir(base_dir / url_name):
self._load_extra_content(system, course_descriptor, category, base_dir / url_name, course_dir)
self._load_extra_content(system, course_block, category, base_dir / url_name, course_dir)
def _import_field_content(self, course_descriptor, category, file_path):
def _import_field_content(self, course_block, category, file_path):
"""
Import field data content for field other than 'data' or 'metadata' form json file and
return field data content as dictionary
@@ -606,7 +606,7 @@ class XMLModuleStore(ModuleStoreReadBase):
dirname, field, file_suffix = file_path.split('/')[-1].split('.')
if file_suffix == 'json' and field not in DEFAULT_CONTENT_FIELDS:
slug = os.path.splitext(os.path.basename(dirname))[0]
location = course_descriptor.scope_ids.usage_id.replace(category=category, name=slug)
location = course_block.scope_ids.usage_id.replace(category=category, name=slug)
with open(file_path) as field_content_file:
field_data = json.load(field_content_file)
data_content = {field: field_data}
@@ -618,7 +618,7 @@ class XMLModuleStore(ModuleStoreReadBase):
return slug, location, data_content
def _load_extra_content(self, system, course_descriptor, category, content_path, course_dir):
def _load_extra_content(self, system, course_block, category, content_path, course_dir):
"""
Import fields data content from files
"""
@@ -633,7 +633,7 @@ class XMLModuleStore(ModuleStoreReadBase):
try:
if filepath.find('.json') != -1:
# json file with json data content
slug, loc, data_content = self._import_field_content(course_descriptor, category, filepath)
slug, loc, data_content = self._import_field_content(course_block, category, filepath)
if data_content is None:
continue
else:
@@ -649,7 +649,7 @@ class XMLModuleStore(ModuleStoreReadBase):
data_content['category'] = category
else:
slug = os.path.splitext(os.path.basename(filepath))[0]
loc = course_descriptor.scope_ids.usage_id.replace(category=category, name=slug)
loc = course_block.scope_ids.usage_id.replace(category=category, name=slug)
# html file with html data content
html = f.read()
try:
@@ -663,7 +663,7 @@ class XMLModuleStore(ModuleStoreReadBase):
if block is None:
block = system.construct_xblock(
category,
# We're loading a descriptor, so student_id is meaningless
# We're loading a block, so student_id is meaningless
# We also don't have separate notions of definition and usage ids yet,
# so we use the location for both
ScopeIds(None, category, loc, loc),
@@ -673,14 +673,14 @@ class XMLModuleStore(ModuleStoreReadBase):
# Hack because we need to pull in the 'display_name' for static tabs (because we need to edit them) # lint-amnesty, pylint: disable=line-too-long
# from the course policy
if category == "static_tab":
tab = CourseTabList.get_tab_by_slug(tab_list=course_descriptor.tabs, url_slug=slug)
tab = CourseTabList.get_tab_by_slug(tab_list=course_block.tabs, url_slug=slug)
if tab:
block.display_name = tab.name
block.course_staff_only = tab.course_staff_only
block.data_dir = course_dir
block.save()
self.modules[course_descriptor.id][block.scope_ids.usage_id] = block
self.modules[course_block.id][block.scope_ids.usage_id] = block
except Exception as exc: # pylint: disable=broad-except
logging.exception("Failed to load %s. Skipping... \
Exception: %s", filepath, str(exc))
@@ -712,7 +712,7 @@ class XMLModuleStore(ModuleStoreReadBase):
def get_items(self, course_id, settings=None, content=None, revision=None, qualifiers=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Returns:
list of XModuleDescriptor instances for the matching items within the course with
list of XBlock instances for the matching items within the course with
the given course_id
NOTE: don't use this to look for courses
@@ -737,7 +737,7 @@ class XMLModuleStore(ModuleStoreReadBase):
if revision == ModuleStoreEnum.RevisionOption.draft_only:
return []
items = []
blocks = []
qualifiers = qualifiers.copy() if qualifiers else {} # copy the qualifiers (destructively manipulated here)
category = qualifiers.pop('category', None)
@@ -760,9 +760,9 @@ class XMLModuleStore(ModuleStoreReadBase):
for block_loc, block in self.modules[course_id].items():
if _block_matches_all(block_loc, block):
items.append(block)
blocks.append(block)
return items
return blocks
def make_course_key(self, org, course, run):
"""
@@ -782,7 +782,7 @@ class XMLModuleStore(ModuleStoreReadBase):
def get_courses(self, **kwargs):
"""
Returns a list of course descriptors. If there were errors on loading,
Returns a list of course blocks. If there were errors on loading,
some of these may be ErrorBlock instead.
"""
return list(self.courses.values())
@@ -900,26 +900,26 @@ class LibraryXMLModuleStore(XMLModuleStore):
return LibraryLocator(org=org, library=library)
@staticmethod
def patch_descriptor_kvs(library_descriptor):
def patch_block_kvs(library_block):
"""
Metadata inheritance can be done purely through XBlocks, but in the import phase
a root block with an InheritanceKeyValueStore is assumed to be at the top of the hierarchy.
This should change in the future, but as XBlocks don't have this KVS, we have to patch it
here manually.
"""
init_dict = {key: getattr(library_descriptor, key) for key in library_descriptor.fields.keys()}
init_dict = {key: getattr(library_block, key) for key in library_block.fields.keys()}
# if set, invalidate '_unwrapped_field_data' so it will be reset
# the next time it will be called
lazy.invalidate(library_descriptor, '_unwrapped_field_data')
lazy.invalidate(library_block, '_unwrapped_field_data')
# pylint: disable=protected-access
library_descriptor._field_data = inheriting_field_data(InheritanceKeyValueStore(init_dict))
library_block._field_data = inheriting_field_data(InheritanceKeyValueStore(init_dict))
def content_importers(self, system, course_descriptor, course_dir, url_name):
def content_importers(self, system, course_block, course_dir, url_name):
"""
Handle Metadata inheritance for Libraries.
"""
self.patch_descriptor_kvs(course_descriptor)
compute_inherited_metadata(course_descriptor)
self.patch_block_kvs(course_block)
compute_inherited_metadata(course_block)
def get_library(self, library_id, depth=0, **kwargs): # pylint: disable=unused-argument
"""
@@ -932,11 +932,11 @@ class LibraryXMLModuleStore(XMLModuleStore):
return None
@staticmethod
def id_from_descriptor(descriptor):
def id_from_block(block):
"""
Get the Library Key from the Library descriptor.
Get the Library Key from the Library block.
"""
return descriptor.location.library_key
return block.location.library_key
def get_orphans(self, course_key, **kwargs):
"""

View File

@@ -111,7 +111,7 @@ class ExportManager:
`modulestore`: A `ModuleStore` object that is the source of the blocks to export
`contentstore`: A `ContentStore` object that is the source of the content to export, can be None
`courselike_key`: The Locator of the Descriptor to export
`courselike_key`: The Locator of the block to export
`root_dir`: The directory to write the exported xml to
`target_dir`: The name of the directory inside `root_dir` to write the content to
"""

View File

@@ -457,7 +457,7 @@ class ImportManager:
@abstractmethod
def get_courselike(self, courselike_key, runtime, dest_id):
"""
Given a key, a runtime, and an intended destination key, get the descriptor for the courselike
Given a key, a runtime, and an intended destination key, get the block for the courselike
we'll be importing into.
"""
raise NotImplementedError
@@ -739,7 +739,7 @@ class LibraryImportManager(ImportManager):
def get_courselike(self, courselike_key, runtime, dest_id):
"""
Get the descriptor of the library from the XML import modulestore.
Get the block of the library from the XML import modulestore.
"""
source_library = self.xml_module_store.get_library(courselike_key)
library, library_data_path = self.import_courselike(
@@ -974,7 +974,7 @@ def _import_course_draft(
# in the list of children since they would have been
# filtered out from the non-draft store export.
if parent_url is not None and index is not None:
course_key = descriptor.location.course_key
course_key = block.location.course_key
parent_location = UsageKey.from_string(parent_url).map_into_course(course_key)
# IMPORTANT: Be sure to update the parent in the NEW namespace
@@ -1018,7 +1018,7 @@ def _import_course_draft(
# Therefore only process verticals at the unit level, assuming that any other
# verticals must be descendants.
if 'index_in_children_list' in xml:
descriptor = system.process_xml(xml)
block = system.process_xml(xml)
# HACK: since we are doing partial imports of drafts
# the vertical doesn't have the 'url-name' set in the
@@ -1026,14 +1026,14 @@ def _import_course_draft(
# aka sequential), so we have to replace the location.name
# with the XML filename that is part of the pack
filename, __ = os.path.splitext(filename)
descriptor.location = descriptor.location.replace(name=filename)
block.location = block.location.replace(name=filename)
index = index_in_children_list(descriptor)
parent_url = get_parent_url(descriptor, xml)
draft_url = str(descriptor.location)
index = index_in_children_list(block)
parent_url = get_parent_url(block, xml)
draft_url = str(block.location)
draft = draft_node_constructor(
block=descriptor, url=draft_url, parent_url=parent_url, index=index
block=block, url=draft_url, parent_url=parent_url, index=index
)
drafts.append(draft)
@@ -1047,7 +1047,7 @@ def _import_course_draft(
try:
_import_block(draft.module)
except Exception: # pylint: disable=broad-except
logging.exception(f'Course import {source_course_id}: while importing draft descriptor {draft.module}')
logging.exception(f'Course import {source_course_id}: while importing draft block {draft.module}')
def allowed_metadata_by_category(category):