def test_get_draft_subtree_roots(self, node_arguments_list, expected_roots_urls): """tests for get_draft_subtree_roots""" module_nodes = [] for node_args in node_arguments_list: module_nodes.append(draft_node_constructor(Mock(), node_args[0], node_args[1])) subtree_roots_urls = [root.url for root in get_draft_subtree_roots(module_nodes)] # check that we return the expected urls self.assertEqual(set(subtree_roots_urls), set(expected_roots_urls))
def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir): """ Export all modules from `modulestore` and content from `contentstore` as xml to `root_dir`. `modulestore`: A `ModuleStore` object that is the source of the modules to export `contentstore`: A `ContentStore` object that is the source of the content to export, can be None `course_key`: The `CourseKey` of the `CourseModuleDescriptor` to export `root_dir`: The directory to write the exported xml to `course_dir`: The name of the directory inside `root_dir` to write the course content to """ with modulestore.bulk_operations(course_key): course = modulestore.get_course(course_key, depth=None) # None means infinite fsm = OSFS(root_dir) export_fs = course.runtime.export_fs = fsm.makeopendir(course_dir) root = lxml.etree.Element('unknown') # export only the published content with modulestore.branch_setting(ModuleStoreEnum.Branch.published_only, course_key): # change all of the references inside the course to use the xml expected key type w/o version & branch xml_centric_course_key = CourseLocator(course_key.org, course_key.course, course_key.run, deprecated=True) adapt_references(course, xml_centric_course_key, export_fs) course.add_xml_to_node(root) with export_fs.open('course.xml', 'w') as course_xml: lxml.etree.ElementTree(root).write(course_xml) # export the static assets policies_dir = export_fs.makeopendir('policies') if contentstore: contentstore.export_all_for_course( course_key, root_dir + '/' + course_dir + '/static/', root_dir + '/' + course_dir + '/policies/assets.json', ) # If we are using the default course image, export it to the # legacy location to support backwards compatibility. if course.course_image == course.fields['course_image'].default: try: course_image = contentstore.find( StaticContent.compute_location(course.id, course.course_image), ) except NotFoundError: pass else: output_dir = root_dir + '/' + course_dir + '/static/images/' if not os.path.isdir(output_dir): os.makedirs(output_dir) with OSFS(output_dir).open('course_image.jpg', 'wb') as course_image_file: course_image_file.write(course_image.data) # export the static tabs export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'static_tab', 'tabs', '.html') # export the custom tags export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'custom_tag_template', 'custom_tags') # export the course updates export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'course_info', 'info', '.html') # export the 'about' data (e.g. overview, etc.) export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'about', 'about', '.html') # export the grading policy course_run_policy_dir = policies_dir.makeopendir(course.location.name) with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy: grading_policy.write( dumps(course.grading_policy, cls=EdxJSONEncoder, sort_keys=True, indent=4)) # export all of the course metadata in policy.json with course_run_policy_dir.open('policy.json', 'w') as course_policy: policy = {'course/' + course.location.name: own_metadata(course)} course_policy.write( dumps(policy, cls=EdxJSONEncoder, sort_keys=True, indent=4)) #### DRAFTS #### # xml backed courses don't support drafts! if course.runtime.modulestore.get_modulestore_type( ) != ModuleStoreEnum.Type.xml: # NOTE: we need to explicitly implement the logic for setting the vertical's parent # and index here since the XML modulestore cannot load draft modules with modulestore.branch_setting( ModuleStoreEnum.Branch.draft_preferred, course_key): draft_modules = modulestore.get_items( course_key, qualifiers={'category': { '$nin': DIRECT_ONLY_CATEGORIES }}, revision=ModuleStoreEnum.RevisionOption.draft_only) if draft_modules: draft_course_dir = export_fs.makeopendir(DRAFT_DIR) # accumulate tuples of draft_modules and their parents in # this list: draft_node_list = [] for draft_module in draft_modules: parent_loc = modulestore.get_parent_location( draft_module.location, revision=ModuleStoreEnum.RevisionOption. draft_preferred) # if module has no parent, set its parent_url to `None` parent_url = None if parent_loc is not None: parent_url = parent_loc.to_deprecated_string() draft_node = draft_node_constructor( draft_module, location=draft_module.location, url=draft_module.location.to_deprecated_string(), parent_location=parent_loc, parent_url=parent_url, ) draft_node_list.append(draft_node) for draft_node in get_draft_subtree_roots(draft_node_list): # only export the roots of the draft subtrees # since export_from_xml (called by `add_xml_to_node`) # exports a whole tree # ensure module has "xml_attributes" attr if not hasattr(draft_node.module, 'xml_attributes'): draft_node.module.xml_attributes = {} # Don't try to export orphaned items # and their descendents if draft_node.parent_location is None: continue logging.debug('parent_loc = {0}'.format( draft_node.parent_location)) draft_node.module.xml_attributes[ 'parent_url'] = draft_node.parent_url parent = modulestore.get_item( draft_node.parent_location) index = parent.children.index( draft_node.module.location) draft_node.module.xml_attributes[ 'index_in_children_list'] = str(index) draft_node.module.runtime.export_fs = draft_course_dir adapt_references(draft_node.module, xml_centric_course_key, draft_course_dir) node = lxml.etree.Element('unknown') draft_node.module.add_xml_to_node(node)
def _import_course_draft( xml_module_store, store, user_id, course_data_path, source_course_id, target_course_id, mongo_runtime ): ''' This will import all the content inside of the 'drafts' folder, if it exists NOTE: This is not a full course import, basically in our current application only verticals (and downwards) can be in draft. Therefore, we need to use slightly different call points into the import process_xml as we can't simply call XMLModuleStore() constructor (like we do for importing public content) ''' draft_dir = course_data_path + "/drafts" if not os.path.exists(draft_dir): return # create a new 'System' object which will manage the importing errorlog = make_error_tracker() # The course_dir as passed to ImportSystem is expected to just be relative, not # the complete path including data_dir. ImportSystem will concatenate the two together. data_dir = xml_module_store.data_dir # Whether or not data_dir ends with a "/" differs in production vs. test. if not data_dir.endswith("/"): data_dir += "/" draft_course_dir = draft_dir.replace(data_dir, '', 1) system = ImportSystem( xmlstore=xml_module_store, course_id=source_course_id, course_dir=draft_course_dir, error_tracker=errorlog.tracker, parent_tracker=ParentTracker(), load_error_modules=False, mixins=xml_module_store.xblock_mixins, field_data=KvsFieldData(kvs=DictKeyValueStore()), ) def _import_module(module): # IMPORTANT: Be sure to update the module location in the NEW namespace module_location = module.location.map_into_course(target_course_id) # Update the module's location to DRAFT revision # We need to call this method (instead of updating the location directly) # to ensure that pure XBlock field data is updated correctly. _update_module_location(module, module_location.replace(revision=MongoRevisionKey.draft)) parent_url = get_parent_url(module) index = index_in_children_list(module) # make sure our parent has us in its list of children # this is to make sure private only modules show up # in the list of children since they would have been # filtered out from the non-draft store export. if parent_url is not None and index is not None: course_key = descriptor.location.course_key parent_location = course_key.make_usage_key_from_deprecated_string(parent_url) # IMPORTANT: Be sure to update the parent in the NEW namespace parent_location = parent_location.map_into_course(target_course_id) parent = store.get_item(parent_location, depth=0) non_draft_location = module.location.map_into_course(target_course_id) if not any(child.block_id == module.location.block_id for child in parent.children): parent.children.insert(index, non_draft_location) store.update_item(parent, user_id) _import_module_and_update_references( module, store, user_id, source_course_id, target_course_id, runtime=mongo_runtime, ) for child in module.get_children(): _import_module(child) # now walk the /vertical directory where each file in there # will be a draft copy of the Vertical # First it is necessary to order the draft items by their desired index in the child list # (order os.walk returns them in is not guaranteed). drafts = [] for dirname, _dirnames, filenames in os.walk(draft_dir): for filename in filenames: module_path = os.path.join(dirname, filename) with open(module_path, 'r') as f: try: # note, on local dev it seems like OSX will put # some extra files in the directory with "quarantine" # information. These files are binary files and will # throw exceptions when we try to parse the file # as an XML string. Let's make sure we're # dealing with a string before ingesting data = f.read() try: xml = data.decode('utf-8') except UnicodeDecodeError, err: # seems like on OSX localdev, the OS is making # quarantine files in the unzip directory # when importing courses so if we blindly try to # enumerate through the directory, we'll try # to process a bunch of binary quarantine files # (which are prefixed with a '._' character which # will dump a bunch of exceptions to the output, # although they are harmless. # # Reading online docs there doesn't seem to be # a good means to detect a 'hidden' file that works # well across all OS environments. So for now, I'm using # OSX's utilization of a leading '.' in the filename # to indicate a system hidden file. # # Better yet would be a way to figure out if this is # a binary file, but I haven't found a good way # to do this yet. if filename.startswith('._'): continue # Not a 'hidden file', then re-raise exception raise err # process_xml call below recursively processes all descendants. If # we call this on all verticals in a course with verticals nested below # the unit level, we try to import the same content twice, causing naming conflicts. # Therefore only process verticals at the unit level, assuming that any other # verticals must be descendants. if 'index_in_children_list' in xml: descriptor = system.process_xml(xml) # HACK: since we are doing partial imports of drafts # the vertical doesn't have the 'url-name' set in the # attributes (they are normally in the parent object, # aka sequential), so we have to replace the location.name # with the XML filename that is part of the pack filename, __ = os.path.splitext(filename) descriptor.location = descriptor.location.replace(name=filename) index = index_in_children_list(descriptor) parent_url = get_parent_url(descriptor, xml) draft_url = descriptor.location.to_deprecated_string() draft = draft_node_constructor( module=descriptor, url=draft_url, parent_url=parent_url, index=index ) drafts.append(draft) except Exception: # pylint: disable=broad-except logging.exception('Error while parsing course xml.')
def _import_course_draft( xml_module_store, store, user_id, course_data_path, source_course_id, target_id, mongo_runtime ): """ This method will import all the content inside of the 'drafts' folder, if content exists. NOTE: This is not a full course import! In our current application, only verticals (and blocks beneath) can be in draft. Therefore, different call points into the import process_xml are used as the XMLModuleStore() constructor cannot simply be called (as is done for importing public content). """ draft_dir = course_data_path + "/drafts" if not os.path.exists(draft_dir): return # create a new 'System' object which will manage the importing errorlog = make_error_tracker() # The course_dir as passed to ImportSystem is expected to just be relative, not # the complete path including data_dir. ImportSystem will concatenate the two together. data_dir = xml_module_store.data_dir # Whether or not data_dir ends with a "/" differs in production vs. test. if not data_dir.endswith("/"): data_dir += "/" # Remove absolute path, leaving relative <course_name>/drafts. draft_course_dir = draft_dir.replace(data_dir, '', 1) system = ImportSystem( xmlstore=xml_module_store, course_id=source_course_id, course_dir=draft_course_dir, error_tracker=errorlog.tracker, load_error_modules=False, mixins=xml_module_store.xblock_mixins, field_data=KvsFieldData(kvs=DictKeyValueStore()), target_course_id=target_id, ) def _import_module(module): # IMPORTANT: Be sure to update the module location in the NEW namespace module_location = module.location.map_into_course(target_id) # Update the module's location to DRAFT revision # We need to call this method (instead of updating the location directly) # to ensure that pure XBlock field data is updated correctly. _update_module_location(module, module_location.replace(revision=MongoRevisionKey.draft)) parent_url = get_parent_url(module) index = index_in_children_list(module) # make sure our parent has us in its list of children # this is to make sure private only modules show up # in the list of children since they would have been # filtered out from the non-draft store export. if parent_url is not None and index is not None: course_key = descriptor.location.course_key parent_location = course_key.make_usage_key_from_deprecated_string(parent_url) # IMPORTANT: Be sure to update the parent in the NEW namespace parent_location = parent_location.map_into_course(target_id) parent = store.get_item(parent_location, depth=0) non_draft_location = module.location.map_into_course(target_id) if not any(child.block_id == module.location.block_id for child in parent.children): parent.children.insert(index, non_draft_location) store.update_item(parent, user_id) _update_and_import_module( module, store, user_id, source_course_id, target_id, runtime=mongo_runtime, ) for child in module.get_children(): _import_module(child) # Now walk the /drafts directory. # Each file in the directory will be a draft copy of the vertical. # First it is necessary to order the draft items by their desired index in the child list, # since the order in which os.walk() returns the files is not guaranteed. drafts = [] for rootdir, __, filenames in os.walk(draft_dir): for filename in filenames: if filename.startswith('._'): # Skip any OSX quarantine files, prefixed with a '._'. continue module_path = os.path.join(rootdir, filename) with open(module_path, 'r') as f: try: xml = f.read().decode('utf-8') # The process_xml() call below recursively processes all descendants. If # we call this on all verticals in a course with verticals nested below # the unit level, we try to import the same content twice, causing naming conflicts. # Therefore only process verticals at the unit level, assuming that any other # verticals must be descendants. if 'index_in_children_list' in xml: descriptor = system.process_xml(xml) # HACK: since we are doing partial imports of drafts # the vertical doesn't have the 'url-name' set in the # attributes (they are normally in the parent object, # aka sequential), so we have to replace the location.name # with the XML filename that is part of the pack filename, __ = os.path.splitext(filename) descriptor.location = descriptor.location.replace(name=filename) index = index_in_children_list(descriptor) parent_url = get_parent_url(descriptor, xml) draft_url = unicode(descriptor.location) draft = draft_node_constructor( module=descriptor, url=draft_url, parent_url=parent_url, index=index ) drafts.append(draft) except Exception: # pylint: disable=broad-except logging.exception('Error while parsing course drafts xml.') # Sort drafts by `index_in_children_list` attribute. drafts.sort(key=lambda x: x.index) for draft in get_draft_subtree_roots(drafts): try: _import_module(draft.module) except Exception: # pylint: disable=broad-except logging.exception('while importing draft descriptor %s', draft.module)
def _export_drafts(modulestore, course_key, export_fs, xml_centric_course_key): """ Exports course drafts. """ # NOTE: we need to explicitly implement the logic for setting the vertical's parent # and index here since the XML modulestore cannot load draft modules with modulestore.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_key): draft_modules = modulestore.get_items( course_key, qualifiers={'category': { '$nin': DIRECT_ONLY_CATEGORIES }}, revision=ModuleStoreEnum.RevisionOption.draft_only) # Check to see if the returned draft modules have changes w.r.t. the published module. # Only modules with changes will be exported into the /drafts directory. draft_modules = [ module for module in draft_modules if modulestore.has_changes(module) ] if draft_modules: draft_course_dir = export_fs.makeopendir(DRAFT_DIR) # accumulate tuples of draft_modules and their parents in # this list: draft_node_list = [] for draft_module in draft_modules: parent_loc = modulestore.get_parent_location( draft_module.location, revision=ModuleStoreEnum.RevisionOption.draft_preferred) # if module has no parent, set its parent_url to `None` parent_url = None if parent_loc is not None: parent_url = parent_loc.to_deprecated_string() draft_node = draft_node_constructor( draft_module, location=draft_module.location, url=draft_module.location.to_deprecated_string(), parent_location=parent_loc, parent_url=parent_url, ) draft_node_list.append(draft_node) for draft_node in get_draft_subtree_roots(draft_node_list): # only export the roots of the draft subtrees # since export_from_xml (called by `add_xml_to_node`) # exports a whole tree # ensure module has "xml_attributes" attr if not hasattr(draft_node.module, 'xml_attributes'): draft_node.module.xml_attributes = {} # Don't try to export orphaned items # and their descendents if draft_node.parent_location is None: continue logging.debug('parent_loc = %s', draft_node.parent_location) draft_node.module.xml_attributes[ 'parent_url'] = draft_node.parent_url parent = modulestore.get_item(draft_node.parent_location) index = parent.children.index(draft_node.module.location) draft_node.module.xml_attributes[ 'index_in_children_list'] = str(index) draft_node.module.runtime.export_fs = draft_course_dir adapt_references(draft_node.module, xml_centric_course_key, draft_course_dir) node = lxml.etree.Element('unknown') draft_node.module.add_xml_to_node(node)
def _export_drafts(modulestore, course_key, export_fs, xml_centric_course_key): """ Exports course drafts. """ # NOTE: we need to explicitly implement the logic for setting the vertical's parent # and index here since the XML modulestore cannot load draft modules with modulestore.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_key): draft_modules = modulestore.get_items( course_key, qualifiers={"category": {"$nin": DIRECT_ONLY_CATEGORIES}}, revision=ModuleStoreEnum.RevisionOption.draft_only, ) # Check to see if the returned draft modules have changes w.r.t. the published module. # Only modules with changes will be exported into the /drafts directory. draft_modules = [module for module in draft_modules if modulestore.has_changes(module)] if draft_modules: draft_course_dir = export_fs.makeopendir(DRAFT_DIR) # accumulate tuples of draft_modules and their parents in # this list: draft_node_list = [] for draft_module in draft_modules: parent_loc = modulestore.get_parent_location( draft_module.location, revision=ModuleStoreEnum.RevisionOption.draft_preferred ) # if module has no parent, set its parent_url to `None` parent_url = None if parent_loc is not None: parent_url = parent_loc.to_deprecated_string() draft_node = draft_node_constructor( draft_module, location=draft_module.location, url=draft_module.location.to_deprecated_string(), parent_location=parent_loc, parent_url=parent_url, ) draft_node_list.append(draft_node) for draft_node in get_draft_subtree_roots(draft_node_list): # only export the roots of the draft subtrees # since export_from_xml (called by `add_xml_to_node`) # exports a whole tree # ensure module has "xml_attributes" attr if not hasattr(draft_node.module, "xml_attributes"): draft_node.module.xml_attributes = {} # Don't try to export orphaned items # and their descendents if draft_node.parent_location is None: continue logging.debug("parent_loc = %s", draft_node.parent_location) draft_node.module.xml_attributes["parent_url"] = draft_node.parent_url parent = modulestore.get_item(draft_node.parent_location) index = parent.children.index(draft_node.module.location) draft_node.module.xml_attributes["index_in_children_list"] = str(index) draft_node.module.runtime.export_fs = draft_course_dir adapt_references(draft_node.module, xml_centric_course_key, draft_course_dir) node = lxml.etree.Element("unknown") draft_node.module.add_xml_to_node(node)
def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir): """ Export all modules from `modulestore` and content from `contentstore` as xml to `root_dir`. `modulestore`: A `ModuleStore` object that is the source of the modules to export `contentstore`: A `ContentStore` object that is the source of the content to export, can be None `course_key`: The `CourseKey` of the `CourseModuleDescriptor` to export `root_dir`: The directory to write the exported xml to `course_dir`: The name of the directory inside `root_dir` to write the course content to """ with modulestore.bulk_operations(course_key): course = modulestore.get_course(course_key, depth=None) # None means infinite fsm = OSFS(root_dir) export_fs = course.runtime.export_fs = fsm.makeopendir(course_dir) root = lxml.etree.Element('unknown') # export only the published content with modulestore.branch_setting(ModuleStoreEnum.Branch.published_only, course_key): # change all of the references inside the course to use the xml expected key type w/o version & branch xml_centric_course_key = CourseLocator(course_key.org, course_key.course, course_key.run, deprecated=True) adapt_references(course, xml_centric_course_key, export_fs) course.add_xml_to_node(root) with export_fs.open('course.xml', 'w') as course_xml: lxml.etree.ElementTree(root).write(course_xml) # export the static assets policies_dir = export_fs.makeopendir('policies') if contentstore: contentstore.export_all_for_course( course_key, root_dir + '/' + course_dir + '/static/', root_dir + '/' + course_dir + '/policies/assets.json', ) # If we are using the default course image, export it to the # legacy location to support backwards compatibility. if course.course_image == course.fields['course_image'].default: try: course_image = contentstore.find( StaticContent.compute_location( course.id, course.course_image ), ) except NotFoundError: pass else: output_dir = root_dir + '/' + course_dir + '/static/images/' if not os.path.isdir(output_dir): os.makedirs(output_dir) with OSFS(output_dir).open('course_image.jpg', 'wb') as course_image_file: course_image_file.write(course_image.data) # export the static tabs export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'static_tab', 'tabs', '.html') # export the custom tags export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'custom_tag_template', 'custom_tags') # export the course updates export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'course_info', 'info', '.html') # export the 'about' data (e.g. overview, etc.) export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'about', 'about', '.html') # export the grading policy course_run_policy_dir = policies_dir.makeopendir(course.location.name) with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy: grading_policy.write(dumps(course.grading_policy, cls=EdxJSONEncoder, sort_keys=True, indent=4)) # export all of the course metadata in policy.json with course_run_policy_dir.open('policy.json', 'w') as course_policy: policy = {'course/' + course.location.name: own_metadata(course)} course_policy.write(dumps(policy, cls=EdxJSONEncoder, sort_keys=True, indent=4)) #### DRAFTS #### # xml backed courses don't support drafts! if course.runtime.modulestore.get_modulestore_type() != ModuleStoreEnum.Type.xml: # NOTE: we need to explicitly implement the logic for setting the vertical's parent # and index here since the XML modulestore cannot load draft modules with modulestore.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_key): draft_modules = modulestore.get_items( course_key, qualifiers={'category': {'$nin': DIRECT_ONLY_CATEGORIES}}, revision=ModuleStoreEnum.RevisionOption.draft_only ) if draft_modules: draft_course_dir = export_fs.makeopendir(DRAFT_DIR) # accumulate tuples of draft_modules and their parents in # this list: draft_node_list = [] for draft_module in draft_modules: parent_loc = modulestore.get_parent_location( draft_module.location, revision=ModuleStoreEnum.RevisionOption.draft_preferred ) # if module has no parent, set its parent_url to `None` parent_url = None if parent_loc is not None: parent_url = parent_loc.to_deprecated_string() draft_node = draft_node_constructor( draft_module, location=draft_module.location, url=draft_module.location.to_deprecated_string(), parent_location=parent_loc, parent_url=parent_url, ) draft_node_list.append(draft_node) for draft_node in get_draft_subtree_roots(draft_node_list): # only export the roots of the draft subtrees # since export_from_xml (called by `add_xml_to_node`) # exports a whole tree # ensure module has "xml_attributes" attr if not hasattr(draft_node.module, 'xml_attributes'): draft_node.module.xml_attributes = {} # Don't try to export orphaned items # and their descendents if draft_node.parent_location is None: continue logging.debug('parent_loc = {0}'.format(draft_node.parent_location)) draft_node.module.xml_attributes['parent_url'] = draft_node.parent_url parent = modulestore.get_item(draft_node.parent_location) index = parent.children.index(draft_node.module.location) draft_node.module.xml_attributes['index_in_children_list'] = str(index) draft_node.module.runtime.export_fs = draft_course_dir adapt_references(draft_node.module, xml_centric_course_key, draft_course_dir) node = lxml.etree.Element('unknown') draft_node.module.add_xml_to_node(node)
class TestUtils(unittest.TestCase): """ Tests for store_utilities ASCII trees for ONLY_ROOTS and SOME_TREES: ONLY_ROOTS: 1) vertical (not draft) | url1 2) sequential (not draft) | url2 SOME_TREES: 1) sequential_1 (not draft) | vertical_1 / \ / \ child_1 child_2 2) great_grandparent_vertical (not draft) | grandparent_vertical | vertical_2 / \ / \ child_3 child_4 """ shard = 2 ONLY_ROOTS = [ draft_node_constructor(Mock(), 'url1', 'vertical'), draft_node_constructor(Mock(), 'url2', 'sequential'), ] ONLY_ROOTS_URLS = ['url1', 'url2'] SOME_TREES = [ draft_node_constructor(Mock(), 'child_1', 'vertical_1'), draft_node_constructor(Mock(), 'child_2', 'vertical_1'), draft_node_constructor(Mock(), 'vertical_1', 'sequential_1'), draft_node_constructor(Mock(), 'child_3', 'vertical_2'), draft_node_constructor(Mock(), 'child_4', 'vertical_2'), draft_node_constructor(Mock(), 'vertical_2', 'grandparent_vertical'), draft_node_constructor(Mock(), 'grandparent_vertical', 'great_grandparent_vertical'), ] SOME_TREES_ROOTS_URLS = ['vertical_1', 'grandparent_vertical'] @ddt.data( (ONLY_ROOTS, ONLY_ROOTS_URLS), (SOME_TREES, SOME_TREES_ROOTS_URLS), ) @ddt.unpack def test_get_draft_subtree_roots(self, module_nodes, expected_roots_urls): """tests for get_draft_subtree_roots""" subtree_roots_urls = [ root.url for root in get_draft_subtree_roots(module_nodes) ] # check that we return the expected urls self.assertEqual(set(subtree_roots_urls), set(expected_roots_urls))