def test_name_collision(self): """ Test dwim translation when the old name was not unique """ org = "myorg" course = "another_course" name = "running_again" course_location = Location('i4x', org, course, 'course', name) course_xlate = loc_mapper().translate_location( None, course_location, add_entry_if_missing=True) self.assertEqual( course_location, loc_mapper().translate_locator_to_location(course_xlate)) eponymous_block = course_location.replace(category='chapter') chapter_xlate = loc_mapper().translate_location( None, eponymous_block, add_entry_if_missing=True) self.assertEqual( course_location, loc_mapper().translate_locator_to_location(course_xlate)) self.assertEqual( eponymous_block, loc_mapper().translate_locator_to_location(chapter_xlate)) # and a non-existent one w/o add eponymous_block = course_location.replace(category='problem') with self.assertRaises(ItemNotFoundError): chapter_xlate = loc_mapper().translate_location( None, eponymous_block, add_entry_if_missing=False)
def _query_children_for_cache_children(self, items): # first get non-draft in a round-trip to_process_non_drafts = super(DraftModuleStore, self)._query_children_for_cache_children(items) to_process_dict = {} for non_draft in to_process_non_drafts: to_process_dict[Location(non_draft["_id"])] = non_draft # now query all draft content in another round-trip query = { '_id': {'$in': [namedtuple_to_son(as_draft(Location(item))) for item in items]} } to_process_drafts = list(self.collection.find(query)) # now we have to go through all drafts and replace the non-draft # with the draft. This is because the semantics of the DraftStore is to # always return the draft - if available for draft in to_process_drafts: draft_loc = Location(draft["_id"]) draft_as_non_draft_loc = draft_loc.replace(revision=None) # does non-draft exist in the collection # if so, replace it if draft_as_non_draft_loc in to_process_dict: to_process_dict[draft_as_non_draft_loc] = draft # convert the dict - which is used for look ups - back into a list queried_children = to_process_dict.values() return queried_children
def _construct(cls, system, contents, error_msg, location): if isinstance(location, dict) and 'course' in location: location = Location(location) if isinstance(location, Location) and location.name is None: location = location.replace( category='error', # Pick a unique url_name -- the sha1 hash of the contents. # NOTE: We could try to pull out the url_name of the errored descriptor, # but url_names aren't guaranteed to be unique between descriptor types, # and ErrorDescriptor can wrap any type. When the wrapped module is fixed, # it will be written out with the original url_name. name=hashlib.sha1(contents.encode('utf8')).hexdigest()) # real metadata stays in the content, but add a display name field_data = DictFieldData({ 'error_msg': str(error_msg), 'contents': contents, 'display_name': 'Error: ' + location.url(), 'location': location, 'category': 'error' }) return system.construct_xblock_from_class( cls, field_data, # The error module doesn't use scoped data, and thus doesn't need # real scope keys ScopeIds('error', None, location, location))
def _construct(cls, system, contents, error_msg, location): if isinstance(location, dict) and 'course' in location: location = Location(location) if isinstance(location, Location) and location.name is None: location = location.replace( category='error', # Pick a unique url_name -- the sha1 hash of the contents. # NOTE: We could try to pull out the url_name of the errored descriptor, # but url_names aren't guaranteed to be unique between descriptor types, # and ErrorDescriptor can wrap any type. When the wrapped module is fixed, # it will be written out with the original url_name. name=hashlib.sha1(contents.encode('utf8')).hexdigest()) # real metadata stays in the content, but add a display name model_data = { 'error_msg': str(error_msg), 'contents': contents, 'display_name': 'Error: ' + location.url(), 'location': location, 'category': 'error' } return cls( system, model_data, )
def create_item(request): parent_location = Location(request.POST["parent_location"]) category = request.POST["category"] display_name = request.POST.get("display_name") if not has_access(request.user, parent_location): raise PermissionDenied() parent = get_modulestore(category).get_item(parent_location) dest_location = parent_location.replace(category=category, name=uuid4().hex) # get the metadata, display_name, and definition from the request metadata = {} data = None template_id = request.POST.get("boilerplate") if template_id is not None: clz = XModuleDescriptor.load_class(category) if clz is not None: template = clz.get_template(template_id) if template is not None: metadata = template.get("metadata", {}) data = template.get("data") if display_name is not None: metadata["display_name"] = display_name get_modulestore(category).create_and_save_xmodule( dest_location, definition_data=data, metadata=metadata, system=parent.system ) if category not in DETACHED_CATEGORIES: get_modulestore(parent.location).update_children(parent_location, parent.children + [dest_location.url()]) return JsonResponse({"id": dest_location.url()})
def _construct(cls, system, contents, error_msg, location): location = Location(location) if error_msg is None: # this string is not marked for translation because we don't have # access to the user context, and this will only be seen by staff error_msg = 'Error not available' if location.category == 'error': location = location.replace( # Pick a unique url_name -- the sha1 hash of the contents. # NOTE: We could try to pull out the url_name of the errored descriptor, # but url_names aren't guaranteed to be unique between descriptor types, # and ErrorDescriptor can wrap any type. When the wrapped module is fixed, # it will be written out with the original url_name. name=hashlib.sha1(contents.encode('utf8')).hexdigest() ) # real metadata stays in the content, but add a display name field_data = DictFieldData({ 'error_msg': str(error_msg), 'contents': contents, 'location': location, 'category': 'error' }) return system.construct_xblock_from_class( cls, # The error module doesn't use scoped data, and thus doesn't need # real scope keys ScopeIds('error', None, location, location), field_data, )
def _construct(cls, system, contents, error_msg, location): if isinstance(location, dict) and 'course' in location: location = Location(location) if isinstance(location, Location) and location.name is None: location = location.replace( category='error', # Pick a unique url_name -- the sha1 hash of the contents. # NOTE: We could try to pull out the url_name of the errored descriptor, # but url_names aren't guaranteed to be unique between descriptor types, # and ErrorDescriptor can wrap any type. When the wrapped module is fixed, # it will be written out with the original url_name. name=hashlib.sha1(contents.encode('utf8')).hexdigest() ) # real metadata stays in the content, but add a display name model_data = { 'error_msg': str(error_msg), 'contents': contents, 'display_name': 'Error: ' + location.url(), 'location': location, 'category': 'error' } return cls( system, model_data, )
def _construct(cls, system, contents, error_msg, location): location = Location(location) if location.category == "error": location = location.replace( # Pick a unique url_name -- the sha1 hash of the contents. # NOTE: We could try to pull out the url_name of the errored descriptor, # but url_names aren't guaranteed to be unique between descriptor types, # and ErrorDescriptor can wrap any type. When the wrapped module is fixed, # it will be written out with the original url_name. name=hashlib.sha1(contents.encode("utf8")).hexdigest() ) # real metadata stays in the content, but add a display name field_data = DictFieldData( { "error_msg": str(error_msg), "contents": contents, "display_name": "Error: " + location.url(), "location": location, "category": "error", } ) return system.construct_xblock_from_class( cls, # The error module doesn't use scoped data, and thus doesn't need # real scope keys ScopeIds("error", None, location, location), field_data, )
def _import_module(module): module.location = module.location.replace(revision='draft') # make sure our parent has us in its list of children # this is to make sure private only verticals show up # in the list of children since they would have been # filtered out from the non-draft store export if module.location.category == 'vertical': non_draft_location = module.location.replace(revision=None) sequential_url = module.xml_attributes['parent_sequential_url'] index = int(module.xml_attributes['index_in_children_list']) seq_location = Location(sequential_url) # IMPORTANT: Be sure to update the sequential # in the NEW namespace seq_location = seq_location.replace( org=target_location_namespace.org, course=target_location_namespace.course ) sequential = store.get_item(seq_location, depth=0) if non_draft_location.url() not in sequential.children: sequential.children.insert(index, non_draft_location.url()) store.update_item(sequential, '**replace_user**') import_module( module, draft_store, course_data_path, static_content_store, source_location_namespace, target_location_namespace, allow_not_found=True ) for child in module.get_children(): _import_module(child)
def _construct(cls, system, contents, error_msg, location): if isinstance(location, dict) and 'course' in location: location = Location(location) if isinstance(location, Location) and location.name is None: location = location.replace( category='error', # Pick a unique url_name -- the sha1 hash of the contents. # NOTE: We could try to pull out the url_name of the errored descriptor, # but url_names aren't guaranteed to be unique between descriptor types, # and ErrorDescriptor can wrap any type. When the wrapped module is fixed, # it will be written out with the original url_name. name=hashlib.sha1(contents.encode('utf8')).hexdigest() ) # real metadata stays in the content, but add a display name field_data = DictFieldData({ 'error_msg': str(error_msg), 'contents': contents, 'display_name': 'Error: ' + location.url(), 'location': location, 'category': 'error' }) return system.construct_xblock_from_class( cls, field_data, # The error module doesn't use scoped data, and thus doesn't need # real scope keys ScopeIds('error', None, location, location) )
def test_post_course_update(self): """ Test that a user can successfully post on course updates and handouts of a course whose location in not in loc_mapper """ # create a course via the view handler course_location = Location(['i4x', 'Org_1', 'Course_1', 'course', 'Run_1']) course_locator = loc_mapper().translate_location( course_location.course_id, course_location, False, True ) self.client.ajax_post( course_locator.url_reverse('course'), { 'org': course_location.org, 'number': course_location.course, 'display_name': 'test course', 'run': course_location.name, } ) branch = u'draft' version = None block = u'updates' updates_locator = BlockUsageLocator( package_id=course_location.course_id.replace('/', '.'), branch=branch, version_guid=version, block_id=block ) content = u"Sample update" payload = {'content': content, 'date': 'January 8, 2013'} course_update_url = updates_locator.url_reverse('course_info_update') resp = self.client.ajax_post(course_update_url, payload) # check that response status is 200 not 400 self.assertEqual(resp.status_code, 200) payload = json.loads(resp.content) self.assertHTMLEqual(payload['content'], content) # now test that calling translate_location returns a locator whose block_id is 'updates' updates_location = course_location.replace(category='course_info', name=block) updates_locator = loc_mapper().translate_location(course_location.course_id, updates_location) self.assertTrue(isinstance(updates_locator, BlockUsageLocator)) self.assertEqual(updates_locator.block_id, block) # check posting on handouts block = u'handouts' handouts_locator = BlockUsageLocator( package_id=updates_locator.package_id, branch=updates_locator.branch, version_guid=version, block_id=block ) course_handouts_url = handouts_locator.url_reverse('xblock') content = u"Sample handout" payload = {"data": content} resp = self.client.ajax_post(course_handouts_url, payload) # check that response status is 200 not 500 self.assertEqual(resp.status_code, 200) payload = json.loads(resp.content) self.assertHTMLEqual(payload['data'], content)
def create_new_course(request): """ Create a new course """ if not is_user_in_creator_group(request.user): raise PermissionDenied() org = request.POST.get("org") number = request.POST.get("number") display_name = request.POST.get("display_name") try: dest_location = Location("i4x", org, number, "course", Location.clean(display_name)) except InvalidLocationError as error: return JsonResponse( {"ErrMsg": "Unable to create course '{name}'.\n\n{err}".format(name=display_name, err=error.message)} ) # see if the course already exists existing_course = None try: existing_course = modulestore("direct").get_item(dest_location) except ItemNotFoundError: pass if existing_course is not None: return JsonResponse({"ErrMsg": "There is already a course defined with this name."}) course_search_location = ["i4x", dest_location.org, dest_location.course, "course", None] courses = modulestore().get_items(course_search_location) if len(courses) > 0: return JsonResponse( {"ErrMsg": "There is already a course defined with the same organization and course number."} ) # instantiate the CourseDescriptor and then persist it # note: no system to pass if display_name is None: metadata = {} else: metadata = {"display_name": display_name} modulestore("direct").create_and_save_xmodule(dest_location, metadata=metadata) new_course = modulestore("direct").get_item(dest_location) # clone a default 'about' overview module as well dest_about_location = dest_location.replace(category="about", name="overview") overview_template = AboutDescriptor.get_template("overview.yaml") modulestore("direct").create_and_save_xmodule( dest_about_location, system=new_course.system, definition_data=overview_template.get("data") ) initialize_course_tabs(new_course) create_all_course_groups(request.user, new_course.location) # seed the forums seed_permissions_roles(new_course.location.course_id) return JsonResponse({"id": new_course.location.url()})
def test_name_collision(self): """ Test dwim translation when the old name was not unique """ org = "myorg" course = "another_course" name = "running_again" course_location = Location('i4x', org, course, 'course', name) course_xlate = loc_mapper().translate_location(None, course_location, add_entry_if_missing=True) self.assertEqual(course_location, loc_mapper().translate_locator_to_location(course_xlate)) eponymous_block = course_location.replace(category='chapter') chapter_xlate = loc_mapper().translate_location(None, eponymous_block, add_entry_if_missing=True) self.assertEqual(course_location, loc_mapper().translate_locator_to_location(course_xlate)) self.assertEqual(eponymous_block, loc_mapper().translate_locator_to_location(chapter_xlate)) # and a non-existent one w/o add eponymous_block = course_location.replace(category='problem') with self.assertRaises(ItemNotFoundError): chapter_xlate = loc_mapper().translate_location(None, eponymous_block, add_entry_if_missing=False)
def load_item(self, location): """ Return an XModule instance for the specified location """ location = Location(location) json_data = self.module_data.get(location) if json_data is None: module = self.modulestore.get_item(location) if module is not None: # update our own cache after going to the DB to get cache miss self.module_data.update(module.runtime.module_data) return module else: # load the module and apply the inherited metadata try: category = json_data['location']['category'] class_ = XModuleDescriptor.load_class(category, self.default_class) definition = json_data.get('definition', {}) metadata = json_data.get('metadata', {}) for old_name, new_name in getattr(class_, 'metadata_translations', {}).items(): if old_name in metadata: metadata[new_name] = metadata[old_name] del metadata[old_name] kvs = MongoKeyValueStore( definition.get('data', {}), definition.get('children', []), metadata, ) field_data = DbModel(kvs) scope_ids = ScopeIds(None, category, location, location) module = self.construct_xblock_from_class( class_, field_data, scope_ids) if self.cached_metadata is not None: # parent container pointers don't differentiate between draft and non-draft # so when we do the lookup, we should do so with a non-draft location non_draft_loc = location.replace(revision=None) # Convert the serialized fields values in self.cached_metadata # to python values metadata_to_inherit = self.cached_metadata.get( non_draft_loc.url(), {}) inherit_metadata(module, metadata_to_inherit) # decache any computed pending field settings module.save() return module except: log.warning("Failed to load descriptor", exc_info=True) return ErrorDescriptor.from_json(json_data, self, json_data['location'], error_msg=exc_info_to_str( sys.exc_info()))
def load_item(self, location): """ Return an XModule instance for the specified location """ location = Location(location) json_data = self.module_data.get(location) if json_data is None: module = self.modulestore.get_item(location) if module is not None: # update our own cache after going to the DB to get cache miss self.module_data.update(module.runtime.module_data) return module else: # load the module and apply the inherited metadata try: category = json_data['location']['category'] class_ = XModuleDescriptor.load_class( category, self.default_class ) definition = json_data.get('definition', {}) metadata = json_data.get('metadata', {}) for old_name, new_name in getattr(class_, 'metadata_translations', {}).items(): if old_name in metadata: metadata[new_name] = metadata[old_name] del metadata[old_name] kvs = MongoKeyValueStore( definition.get('data', {}), definition.get('children', []), metadata, ) field_data = DbModel(kvs) scope_ids = ScopeIds(None, category, location, location) module = self.construct_xblock_from_class(class_, field_data, scope_ids) if self.cached_metadata is not None: # parent container pointers don't differentiate between draft and non-draft # so when we do the lookup, we should do so with a non-draft location non_draft_loc = location.replace(revision=None) # Convert the serialized fields values in self.cached_metadata # to python values metadata_to_inherit = self.cached_metadata.get(non_draft_loc.url(), {}) inherit_metadata(module, metadata_to_inherit) # decache any computed pending field settings module.save() return module except: log.warning("Failed to load descriptor", exc_info=True) return ErrorDescriptor.from_json( json_data, self, json_data['location'], error_msg=exc_info_to_str(sys.exc_info()) )
def load_item(self, location): """ Return an XModule instance for the specified location """ location = Location(location) json_data = self.module_data.get(location) if json_data is None: module = self.modulestore.get_item(location) if module is not None: # update our own cache after going to the DB to get cache miss self.module_data.update(module.system.module_data) return module else: # load the module and apply the inherited metadata try: category = json_data['location']['category'] class_ = XModuleDescriptor.load_class( category, self.default_class ) definition = json_data.get('definition', {}) metadata = json_data.get('metadata', {}) for old_name, new_name in class_.metadata_translations.items(): if old_name in metadata: metadata[new_name] = metadata[old_name] del metadata[old_name] kvs = MongoKeyValueStore( definition.get('data', {}), definition.get('children', []), metadata, location, category ) model_data = DbModel(kvs, class_, None, MongoUsage(self.course_id, location)) model_data['category'] = category model_data['location'] = location module = class_(self, model_data) if self.cached_metadata is not None: # parent container pointers don't differentiate between draft and non-draft # so when we do the lookup, we should do so with a non-draft location non_draft_loc = location.replace(revision=None) metadata_to_inherit = self.cached_metadata.get(non_draft_loc.url(), {}) inherit_metadata(module, metadata_to_inherit) return module except: log.warning("Failed to load descriptor", exc_info=True) return ErrorDescriptor.from_json( json_data, self, json_data['location'], error_msg=exc_info_to_str(sys.exc_info()) )
def fetch(cls, course_location): """ Fetch the course details for the given course from persistence and return a CourseDetails model. """ if not isinstance(course_location, Location): course_location = Location(course_location) course = cls(course_location) descriptor = get_modulestore(course_location).get_item(course_location) course.start_date = descriptor.start course.end_date = descriptor.end course.enrollment_start = descriptor.enrollment_start course.enrollment_end = descriptor.enrollment_end course.course_image_name = descriptor.course_image course.course_image_asset_path = course_image_url(descriptor) temploc = course_location.replace(category='about', name='syllabus') try: course.syllabus = get_modulestore(temploc).get_item(temploc).data except ItemNotFoundError: pass temploc = temploc.replace(name='overview') try: course.overview = get_modulestore(temploc).get_item(temploc).data except ItemNotFoundError: pass temploc = temploc.replace(name='tags') try: course.tags = get_modulestore(temploc).get_item(temploc).data except ItemNotFoundError: pass temploc = temploc.replace(name='effort') try: course.effort = get_modulestore(temploc).get_item(temploc).data except ItemNotFoundError: pass temploc = temploc.replace(name='video') try: raw_video = get_modulestore(temploc).get_item(temploc).data course.intro_video = CourseDetails.parse_video_tag(raw_video) except ItemNotFoundError: pass return course
def test_delete_block(self): """ test delete_block_location_translator(location, old_course_id=None) """ org = 'foo_org' course = 'bar_course' new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course) loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'baz_run'), new_style_course_id, block_map={ 'abc123': {'problem': 'problem2'}, '48f23a10395384929234': {'chapter': 'chapter48f'}, '1': {'chapter': 'chapter1', 'problem': 'problem1'}, } ) new_style_course_id2 = '{}.geek_dept.{}.delta_run'.format(org, course) loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'delta_run'), new_style_course_id2, block_map={ 'abc123': {'problem': 'problem3'}, '48f23a10395384929234': {'chapter': 'chapter48b'}, '1': {'chapter': 'chapter2', 'problem': 'problem2'}, } ) location = Location('i4x', org, course, 'problem', '1') # delete from all courses loc_mapper().delete_block_location_translator(location) self.assertIsNone(loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id=new_style_course_id, usage_id='problem1') )) self.assertIsNone(loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id=new_style_course_id2, usage_id='problem2') )) # delete from one course location = location.replace(name='abc123') loc_mapper().delete_block_location_translator(location, '{}/{}/{}'.format(org, course, 'baz_run')) with self.assertRaises(ItemNotFoundError): loc_mapper().translate_location( '{}/{}/{}'.format(org, course, 'baz_run'), location, add_entry_if_missing=False ) locator = loc_mapper().translate_location( '{}/{}/{}'.format(org, course, 'delta_run'), location, add_entry_if_missing=False ) self.assertEqual(locator.usage_id, 'problem3')
def test_delete_block(self): """ test delete_block_location_translator(location, old_course_id=None) """ org = 'foo_org' course = 'bar_course' new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course) loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'baz_run'), new_style_course_id, block_map={ 'abc123': {'problem': 'problem2'}, '48f23a10395384929234': {'chapter': 'chapter48f'}, '1': {'chapter': 'chapter1', 'problem': 'problem1'}, } ) new_style_course_id2 = '{}.geek_dept.{}.delta_run'.format(org, course) loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'delta_run'), new_style_course_id2, block_map={ 'abc123': {'problem': 'problem3'}, '48f23a10395384929234': {'chapter': 'chapter48b'}, '1': {'chapter': 'chapter2', 'problem': 'problem2'}, } ) location = Location('i4x', org, course, 'problem', '1') # delete from all courses loc_mapper().delete_block_location_translator(location) self.assertIsNone(loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id=new_style_course_id, usage_id='problem1', branch='published') )) self.assertIsNone(loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id=new_style_course_id2, usage_id='problem2', branch='published') )) # delete from one course location = location.replace(name='abc123') loc_mapper().delete_block_location_translator(location, '{}/{}/{}'.format(org, course, 'baz_run')) with self.assertRaises(ItemNotFoundError): loc_mapper().translate_location( '{}/{}/{}'.format(org, course, 'baz_run'), location, add_entry_if_missing=False ) locator = loc_mapper().translate_location( '{}/{}/{}'.format(org, course, 'delta_run'), location, add_entry_if_missing=False ) self.assertEqual(locator.usage_id, 'problem3')
def load_item(self, location): """ Return an XModule instance for the specified location """ location = Location(location) json_data = self.module_data.get(location) if json_data is None: module = self.modulestore.get_item(location) if module is not None: # update our own cache after going to the DB to get cache miss self.module_data.update(module.system.module_data) return module else: # load the module and apply the inherited metadata try: category = json_data['location']['category'] class_ = XModuleDescriptor.load_class(category, self.default_class) definition = json_data.get('definition', {}) metadata = json_data.get('metadata', {}) for old_name, new_name in class_.metadata_translations.items(): if old_name in metadata: metadata[new_name] = metadata[old_name] del metadata[old_name] kvs = MongoKeyValueStore(definition.get('data', {}), definition.get('children', []), metadata, location, category) model_data = DbModel(kvs, class_, None, MongoUsage(self.course_id, location)) model_data['category'] = category model_data['location'] = location module = class_(self, model_data) if self.cached_metadata is not None: # parent container pointers don't differentiate between draft and non-draft # so when we do the lookup, we should do so with a non-draft location non_draft_loc = location.replace(revision=None) metadata_to_inherit = self.cached_metadata.get( non_draft_loc.url(), {}) inherit_metadata(module, metadata_to_inherit) return module except: log.warning("Failed to load descriptor", exc_info=True) return ErrorDescriptor.from_json(json_data, self, json_data['location'], error_msg=exc_info_to_str( sys.exc_info()))
def _import_module(module): # Update the module's location to "draft" revision # We need to call this method (instead of updating the location directly) # to ensure that pure XBlock field data is updated correctly. _update_module_location( module, module.location.replace(revision='draft')) # make sure our parent has us in its list of children # this is to make sure private only verticals show up # in the list of children since they would have been # filtered out from the non-draft store export if module.location.category == 'vertical': non_draft_location = module.location.replace( revision=None) sequential_url = module.xml_attributes[ 'parent_sequential_url'] index = int( module.xml_attributes['index_in_children_list'] ) seq_location = Location(sequential_url) # IMPORTANT: Be sure to update the sequential # in the NEW namespace seq_location = seq_location.replace( org=target_location_namespace.org, course=target_location_namespace.course) sequential = store.get_item(seq_location, depth=0) if non_draft_location.url( ) not in sequential.children: sequential.children.insert( index, non_draft_location.url()) store.update_item(sequential, '**replace_user**') import_module(module, draft_store, course_data_path, static_content_store, source_location_namespace, target_location_namespace, allow_not_found=True) for child in module.get_children(): _import_module(child)
def convert_ref(reference): """ Convert a reference to the new namespace, but only if the original namespace matched the original course. Otherwise, returns the input value. """ new_ref = reference ref = Location(reference) in_original_namespace = (original_location.tag == ref.tag and original_location.org == ref.org and original_location.course == ref.course) if in_original_namespace: new_ref = ref.replace( tag=target_location_namespace.tag, org=target_location_namespace.org, course=target_location_namespace.course).url() return new_ref
def fetch(cls, course_location): """ Fetch the course details for the given course from persistence and return a CourseDetails model. """ if not isinstance(course_location, Location): course_location = Location(course_location) course = cls(course_location) descriptor = get_modulestore(course_location).get_item(course_location) course.start_date = descriptor.start course.end_date = descriptor.end course.enrollment_start = descriptor.enrollment_start course.enrollment_end = descriptor.enrollment_end course.course_image_name = descriptor.course_image course.course_image_asset_path = course_image_url(descriptor) temploc = course_location.replace(category='about', name='syllabus') try: course.syllabus = get_modulestore(temploc).get_item(temploc).data except ItemNotFoundError: pass temploc = temploc.replace(name='overview') try: course.overview = get_modulestore(temploc).get_item(temploc).data except ItemNotFoundError: pass temploc = temploc.replace(name='effort') try: course.effort = get_modulestore(temploc).get_item(temploc).data except ItemNotFoundError: pass temploc = temploc.replace(name='video') try: raw_video = get_modulestore(temploc).get_item(temploc).data course.intro_video = CourseDetails.parse_video_tag(raw_video) except ItemNotFoundError: pass return course
def convert_ref(reference): """ Convert a reference to the new namespace, but only if the original namespace matched the original course. Otherwise, returns the input value. """ new_ref = reference ref = Location(reference) in_original_namespace = (original_location.tag == ref.tag and original_location.org == ref.org and original_location.course == ref.course) if in_original_namespace: new_ref = ref.replace( tag=target_location_namespace.tag, org=target_location_namespace.org, course=target_location_namespace.course ).url() return new_ref
def create_item(request): """View for create items.""" parent_location = Location(request.json['parent_location']) category = request.json['category'] display_name = request.json.get('display_name') if not has_access(request.user, parent_location): raise PermissionDenied() parent = get_modulestore(category).get_item(parent_location) dest_location = parent_location.replace(category=category, name=uuid4().hex) # get the metadata, display_name, and definition from the request metadata = {} data = None template_id = request.json.get('boilerplate') if template_id is not None: clz = XModuleDescriptor.load_class(category) if clz is not None: template = clz.get_template(template_id) if template is not None: metadata = template.get('metadata', {}) data = template.get('data') if display_name is not None: metadata['display_name'] = display_name get_modulestore(category).create_and_save_xmodule( dest_location, definition_data=data, metadata=metadata, system=parent.system, ) if category not in DETACHED_CATEGORIES: get_modulestore(parent.location).update_children(parent_location, parent.children + [dest_location.url()]) locator = loc_mapper().translate_location( get_course_for_item(parent_location).location.course_id, dest_location, False, True ) return JsonResponse({'id': dest_location.url(), "update_url": locator.url_reverse("xblock")})
def create_item(request): """View for create items.""" parent_location = Location(request.json['parent_location']) category = request.json['category'] display_name = request.json.get('display_name') if not has_access(request.user, parent_location): raise PermissionDenied() parent = get_modulestore(category).get_item(parent_location) dest_location = parent_location.replace(category=category, name=uuid4().hex) # get the metadata, display_name, and definition from the request metadata = {} data = None template_id = request.json.get('boilerplate') if template_id is not None: clz = XModuleDescriptor.load_class(category) if clz is not None: template = clz.get_template(template_id) if template is not None: metadata = template.get('metadata', {}) data = template.get('data') if display_name is not None: metadata['display_name'] = display_name get_modulestore(category).create_and_save_xmodule( dest_location, definition_data=data, metadata=metadata, system=parent.system, ) if category not in DETACHED_CATEGORIES: get_modulestore(parent.location).update_children( parent_location, parent.children + [dest_location.url()]) return JsonResponse({'id': dest_location.url()})
def create_asset_entries(self, cstore, number): """ Create the fake entries """ course_filter = Location( XASSET_LOCATION_TAG, category='asset', course=self.course.location.course, org=self.course.location.org ) base_entry = { 'displayname': 'foo.jpg', 'chunkSize': 262144, 'length': 0, 'uploadDate': datetime(2012, 1, 2, 0, 0), 'contentType': 'image/jpeg', } for i in range(number): base_entry['displayname'] = '{:03x}.jpeg'.format(i) base_entry['uploadDate'] += timedelta(hours=i) base_entry['_id'] = course_filter.replace(name=base_entry['displayname']).dict() cstore.fs_files.insert(base_entry) return course_filter.dict()
def test_translate_location_dwim(self): """ Test the location translation mechanisms which try to do-what-i-mean by creating new entries for never seen queries. """ org = 'foo_org' course = 'bar_course' run = 'baz_run' problem_name = 'abc123abc123abc123abc123abc123f9' location = Location(org, course, run, 'problem', problem_name) new_offering = '{}.{}'.format(course, run) self.translate_n_check(location, org, new_offering, 'problemabc', 'published', True) # create an entry w/o a guid name other_location = Location(org, course, run, 'chapter', 'intro') self.translate_n_check(other_location, org, new_offering, 'intro', 'published', True) # add a distractor course delta_new_org = '{}.geek_dept'.format(org) run = 'delta_run' delta_new_offering = '{}.{}'.format(course, run) delta_course_locn = SlashSeparatedCourseKey(org, course, run) loc_mapper().create_map_entry( delta_course_locn, delta_new_org, delta_new_offering, block_map={problem_name: {'problem': 'problem3'}} ) self.translate_n_check(location, org, new_offering, 'problemabc', 'published', True) # add a new one to both courses (ensure name doesn't have same beginning) new_prob_name = uuid.uuid4().hex while new_prob_name.startswith('abc'): new_prob_name = uuid.uuid4().hex new_prob_locn = location.replace(name=new_prob_name) new_usage_id = 'problem{}'.format(new_prob_name[:3]) self.translate_n_check(new_prob_locn, org, new_offering, new_usage_id, 'published', True) new_prob_locn = new_prob_locn.replace(run=run) self.translate_n_check( new_prob_locn, delta_new_org, delta_new_offering, new_usage_id, 'published', True )
def create_asset_entries(self, cstore, number): """ Create the fake entries """ course_filter = Location( XASSET_LOCATION_TAG, category='asset', course=self.course.location.course, org=self.course.location.org ) # purge existing entries (a bit brutal but hopefully tests are independent enuf to not trip on this) cstore.fs_files.remove(location_to_query(course_filter)) base_entry = { 'displayname': 'foo.jpg', 'chunkSize': 262144, 'length': 0, 'uploadDate': datetime(2012, 1, 2, 0, 0), 'contentType': 'image/jpeg', } for i in range(number): base_entry['displayname'] = '{:03x}.jpeg'.format(i) base_entry['uploadDate'] += timedelta(hours=i) base_entry['_id'] = course_filter.replace(name=base_entry['displayname']).dict() cstore.fs_files.insert(base_entry) return course_filter.dict()
def create_asset_entries(self, cstore, number): """ Create the fake entries """ course_filter = Location( XASSET_LOCATION_TAG, category="asset", course=self.course.location.course, org=self.course.location.org ) # purge existing entries (a bit brutal but hopefully tests are independent enuf to not trip on this) cstore.fs_files.remove(course_filter.dict()) base_entry = { "displayname": "foo.jpg", "chunkSize": 262144, "length": 0, "uploadDate": datetime(2012, 1, 2, 0, 0), "contentType": "image/jpeg", } for i in range(number): base_entry["displayname"] = "{:03x}.jpeg".format(i) base_entry["uploadDate"] += timedelta(hours=i) base_entry["_id"] = course_filter.replace(name=base_entry["displayname"]).dict() cstore.fs_files.insert(base_entry) return course_filter.dict()
def _construct(cls, system, contents, error_msg, location): if isinstance(location, dict) and "course" in location: location = Location(location) if isinstance(location, Location) and location.name is None: location = location.replace( category="error", # Pick a unique url_name -- the sha1 hash of the contents. # NOTE: We could try to pull out the url_name of the errored descriptor, # but url_names aren't guaranteed to be unique between descriptor types, # and ErrorDescriptor can wrap any type. When the wrapped module is fixed, # it will be written out with the original url_name. name=hashlib.sha1(contents.encode("utf8")).hexdigest(), ) # real metadata stays in the content, but add a display name model_data = { "error_msg": str(error_msg), "contents": contents, "display_name": "Error: " + location.url(), "location": location, "category": "error", } return cls(system, model_data)
def test_cms_imported_course_walkthrough(self): """ Import and walk through some common URL endpoints. This just verifies non-500 and no other correct behavior, so it is not a deep test """ import_from_xml(modulestore("direct"), "common/test/data/", ["simple"]) loc = Location(["i4x", "edX", "simple", "course", "2012_Fall", None]) resp = self.client.get(reverse("course_index", kwargs={"org": loc.org, "course": loc.course, "name": loc.name})) self.assertEqual(200, resp.status_code) self.assertContains(resp, "Chapter 2") # go to various pages # import page resp = self.client.get( reverse("import_course", kwargs={"org": loc.org, "course": loc.course, "name": loc.name}) ) self.assertEqual(200, resp.status_code) # export page resp = self.client.get( reverse("export_course", kwargs={"org": loc.org, "course": loc.course, "name": loc.name}) ) self.assertEqual(200, resp.status_code) # manage users resp = self.client.get(reverse("manage_users", kwargs={"location": loc.url()})) self.assertEqual(200, resp.status_code) # course info resp = self.client.get(reverse("course_info", kwargs={"org": loc.org, "course": loc.course, "name": loc.name})) self.assertEqual(200, resp.status_code) # settings_details resp = self.client.get( reverse("settings_details", kwargs={"org": loc.org, "course": loc.course, "name": loc.name}) ) self.assertEqual(200, resp.status_code) # settings_details resp = self.client.get( reverse("settings_grading", kwargs={"org": loc.org, "course": loc.course, "name": loc.name}) ) self.assertEqual(200, resp.status_code) # static_pages resp = self.client.get( reverse("static_pages", kwargs={"org": loc.org, "course": loc.course, "coursename": loc.name}) ) self.assertEqual(200, resp.status_code) # static_pages resp = self.client.get(reverse("asset_index", kwargs={"org": loc.org, "course": loc.course, "name": loc.name})) self.assertEqual(200, resp.status_code) # go look at a subsection page subsection_location = loc.replace(category="sequential", name="test_sequence") resp = self.client.get(reverse("edit_subsection", kwargs={"location": subsection_location.url()})) self.assertEqual(200, resp.status_code) # go look at the Edit page unit_location = loc.replace(category="vertical", name="test_vertical") resp = self.client.get(reverse("edit_unit", kwargs={"location": unit_location.url()})) self.assertEqual(200, resp.status_code) # delete a component del_loc = loc.replace(category="html", name="test_html") resp = self.client.post(reverse("delete_item"), json.dumps({"id": del_loc.url()}), "application/json") self.assertEqual(200, resp.status_code) # delete a unit del_loc = loc.replace(category="vertical", name="test_vertical") resp = self.client.post(reverse("delete_item"), json.dumps({"id": del_loc.url()}), "application/json") self.assertEqual(200, resp.status_code) # delete a unit del_loc = loc.replace(category="sequential", name="test_sequence") resp = self.client.post(reverse("delete_item"), json.dumps({"id": del_loc.url()}), "application/json") self.assertEqual(200, resp.status_code) # delete a chapter del_loc = loc.replace(category="chapter", name="chapter_2") resp = self.client.post(reverse("delete_item"), json.dumps({"id": del_loc.url()}), "application/json") self.assertEqual(200, resp.status_code)
def test_translate_location_read_only(self): """ Test the variants of translate_location which don't create entries, just decode """ # lookup before there are any maps org = 'foo_org' course = 'bar_course' old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run') with self.assertRaises(ItemNotFoundError): _ = loc_mapper().translate_location(old_style_course_id, Location( 'i4x', org, course, 'problem', 'abc123'), add_entry_if_missing=False) new_style_package_id = '{}.geek_dept.{}.baz_run'.format(org, course) block_map = { 'abc123': { 'problem': 'problem2', 'vertical': 'vertical2' }, 'def456': { 'problem': 'problem4' }, 'ghi789': { 'problem': 'problem7' }, } loc_mapper().create_map_entry(Location('i4x', org, course, 'course', 'baz_run'), new_style_package_id, block_map=block_map) test_problem_locn = Location('i4x', org, course, 'problem', 'abc123') # only one course matches # look for w/ only the Location (works b/c there's only one possible course match). Will force # cache as default translation for this problemid self.translate_n_check(test_problem_locn, None, new_style_package_id, 'problem2', 'published') # look for non-existent problem with self.assertRaises(ItemNotFoundError): loc_mapper().translate_location(None, Location('i4x', org, course, 'problem', '1def23'), add_entry_if_missing=False) test_no_cat_locn = test_problem_locn.replace(category=None) with self.assertRaises(InvalidLocationError): loc_mapper().translate_location(old_style_course_id, test_no_cat_locn, False, False) test_no_cat_locn = test_no_cat_locn.replace(name='def456') # only one course matches self.translate_n_check(test_no_cat_locn, old_style_course_id, new_style_package_id, 'problem4', 'published') # add a distractor course (note that abc123 has a different translation in this one) distractor_block_map = { 'abc123': { 'problem': 'problem3' }, 'def456': { 'problem': 'problem4' }, 'ghi789': { 'problem': 'problem7' }, } test_delta_new_id = '{}.geek_dept.{}.{}'.format( org, course, 'delta_run') test_delta_old_id = '{}/{}/{}'.format(org, course, 'delta_run') loc_mapper().create_map_entry(Location('i4x', org, course, 'course', 'delta_run'), test_delta_new_id, block_map=distractor_block_map) # test that old translation still works self.translate_n_check(test_problem_locn, old_style_course_id, new_style_package_id, 'problem2', 'published') # and new returns new id self.translate_n_check(test_problem_locn, test_delta_old_id, test_delta_new_id, 'problem3', 'published') # look for default translation of uncached Location (not unique; so, just verify it returns something) prob_locator = loc_mapper().translate_location( None, Location('i4x', org, course, 'problem', 'def456'), add_entry_if_missing=False) self.assertIsNotNone(prob_locator, "couldn't find ambiguous location") # make delta_run default course: anything not cached using None as old_course_id will use this loc_mapper().create_map_entry(Location('i4x', org, course, 'problem', '789abc123efg456'), test_delta_new_id, block_map=block_map) # now an uncached ambiguous query should return delta test_unused_locn = Location('i4x', org, course, 'problem', 'ghi789') self.translate_n_check(test_unused_locn, None, test_delta_new_id, 'problem7', 'published') # get the draft one (I'm sorry this is getting long) self.translate_n_check(test_unused_locn, None, test_delta_new_id, 'problem7', 'draft')
def test_add_block(self): """ Test add_block_location_translator(location, old_course_id=None, usage_id=None) """ # call w/ no matching courses org = 'foo_org' course = 'bar_course' old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run') problem_name = 'abc123abc123abc123abc123abc123f9' location = Location('i4x', org, course, 'problem', problem_name) with self.assertRaises(ItemNotFoundError): loc_mapper().add_block_location_translator(location) with self.assertRaises(ItemNotFoundError): loc_mapper().add_block_location_translator(location, old_style_course_id) # w/ one matching course new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run') loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'baz_run'), new_style_course_id, ) new_usage_id = loc_mapper().add_block_location_translator(location) self.assertEqual(new_usage_id, 'problemabc') # look it up translated_loc = loc_mapper().translate_location(old_style_course_id, location, add_entry_if_missing=False) self.assertEqual(translated_loc.course_id, new_style_course_id) self.assertEqual(translated_loc.usage_id, new_usage_id) # w/ one distractor which has one entry already new_style_course_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run') loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'delta_run'), new_style_course_id, block_map={'48f23a10395384929234': {'chapter': 'chapter48f'}} ) # try adding the one added before new_usage_id2 = loc_mapper().add_block_location_translator(location) self.assertEqual(new_usage_id, new_usage_id2) # it should be in the distractor now new_location = loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id2) ) self.assertEqual(new_location, location) # add one close to the existing chapter (cause name collision) location = Location('i4x', org, course, 'chapter', '48f23a103953849292341234567890ab') new_usage_id = loc_mapper().add_block_location_translator(location) self.assertRegexpMatches(new_usage_id, r'^chapter48f\d') # retrievable from both courses new_location = loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id) ) self.assertEqual(new_location, location) new_location = loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id='{}.{}.{}'.format(org, course, 'baz_run'), usage_id=new_usage_id) ) self.assertEqual(new_location, location) # provoke duplicate item errors location = location.replace(name='44f23a103953849292341234567890ab') with self.assertRaises(DuplicateItemError): loc_mapper().add_block_location_translator(location, usage_id=new_usage_id) new_usage_id = loc_mapper().add_block_location_translator(location, old_course_id=old_style_course_id) other_course_old_style = '{}/{}/{}'.format(org, course, 'delta_run') new_usage_id2 = loc_mapper().add_block_location_translator( location, old_course_id=other_course_old_style, usage_id='{}b'.format(new_usage_id) ) with self.assertRaises(DuplicateItemError): loc_mapper().add_block_location_translator(location)
def create_new_course(request): """ Create a new course. Returns the URL for the course overview page. """ if not auth.has_access(request.user, CourseCreatorRole()): raise PermissionDenied() org = request.json.get('org') number = request.json.get('number') display_name = request.json.get('display_name') run = request.json.get('run') # allow/disable unicode characters in course_id according to settings if not settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID'): if _has_non_ascii_characters(org) or _has_non_ascii_characters( number) or _has_non_ascii_characters(run): return JsonResponse( { 'error': _('Special characters not allowed in organization, course number, and course run.' ) }, status=400) try: dest_location = Location(u'i4x', org, number, u'course', run) except InvalidLocationError as error: return JsonResponse({ "ErrMsg": _("Unable to create course '{name}'.\n\n{err}").format( name=display_name, err=error.message) }) # see if the course already exists existing_course = None try: existing_course = modulestore('direct').get_item(dest_location) except ItemNotFoundError: pass if existing_course is not None: return JsonResponse({ 'ErrMsg': _('There is already a course defined with the same ' 'organization, course number, and course run. Please ' 'change either organization or course number to be ' 'unique.'), 'OrgErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), 'CourseErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), }) # dhm: this query breaks the abstraction, but I'll fix it when I do my suspended refactoring of this # file for new locators. get_items should accept a query rather than requiring it be a legal location course_search_location = bson.son.SON({ '_id.tag': 'i4x', # cannot pass regex to Location constructor; thus this hack # pylint: disable=E1101 '_id.org': re.compile(u'^{}$'.format(dest_location.org), re.IGNORECASE | re.UNICODE), # pylint: disable=E1101 '_id.course': re.compile(u'^{}$'.format(dest_location.course), re.IGNORECASE | re.UNICODE), '_id.category': 'course', }) courses = modulestore().collection.find(course_search_location, fields=('_id')) if courses.count() > 0: return JsonResponse({ 'ErrMsg': _('There is already a course defined with the same ' 'organization and course number. Please ' 'change at least one field to be unique.'), 'OrgErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), 'CourseErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), }) # instantiate the CourseDescriptor and then persist it # note: no system to pass if display_name is None: metadata = {} else: metadata = {'display_name': display_name} # Set a unique wiki_slug for newly created courses. To maintain active wiki_slugs for existing xml courses this # cannot be changed in CourseDescriptor. wiki_slug = "{0}.{1}.{2}".format(dest_location.org, dest_location.course, dest_location.name) definition_data = {'wiki_slug': wiki_slug} modulestore('direct').create_and_save_xmodule( dest_location, definition_data=definition_data, metadata=metadata) new_course = modulestore('direct').get_item(dest_location) # clone a default 'about' overview module as well dest_about_location = dest_location.replace(category='about', name='overview') overview_template = AboutDescriptor.get_template('overview.yaml') modulestore('direct').create_and_save_xmodule( dest_about_location, system=new_course.system, definition_data=overview_template.get('data')) initialize_course_tabs(new_course, request.user) new_location = loc_mapper().translate_location( new_course.location.course_id, new_course.location, False, True) # can't use auth.add_users here b/c it requires request.user to already have Instructor perms in this course # however, we can assume that b/c this user had authority to create the course, the user can add themselves CourseInstructorRole(new_location).add_users(request.user) auth.add_users(request.user, CourseStaffRole(new_location), request.user) # seed the forums seed_permissions_roles(new_course.location.course_id) # auto-enroll the course creator in the course so that "View Live" will # work. CourseEnrollment.enroll(request.user, new_course.location.course_id) _users_assign_default_role(new_course.location) return JsonResponse({'url': new_location.url_reverse("course/", "")})
def create_new_course(request): """ Create a new course """ if not is_user_in_creator_group(request.user): raise PermissionDenied() org = request.POST.get('org') number = request.POST.get('number') display_name = request.POST.get('display_name') run = request.POST.get('run') try: dest_location = Location('i4x', org, number, 'course', run) except InvalidLocationError as error: return JsonResponse({ "ErrMsg": _("Unable to create course '{name}'.\n\n{err}").format( name=display_name, err=error.message)}) # see if the course already exists existing_course = None try: existing_course = modulestore('direct').get_item(dest_location) except ItemNotFoundError: pass if existing_course is not None: return JsonResponse({ 'ErrMsg': _('There is already a course defined with the same ' 'organization, course number, and course run. Please ' 'change either organization or course number to be ' 'unique.'), 'OrgErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), 'CourseErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), }) course_search_location = ['i4x', dest_location.org, dest_location.course, 'course', None ] courses = modulestore().get_items(course_search_location) if len(courses) > 0: return JsonResponse({ 'ErrMsg': _('There is already a course defined with the same ' 'organization and course number. Please ' 'change at least one field to be unique.'), 'OrgErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), 'CourseErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), }) # instantiate the CourseDescriptor and then persist it # note: no system to pass if display_name is None: metadata = {} else: metadata = {'display_name': display_name} modulestore('direct').create_and_save_xmodule( dest_location, metadata=metadata ) new_course = modulestore('direct').get_item(dest_location) # clone a default 'about' overview module as well dest_about_location = dest_location.replace( category='about', name='overview' ) overview_template = AboutDescriptor.get_template('overview.yaml') modulestore('direct').create_and_save_xmodule( dest_about_location, system=new_course.system, definition_data=overview_template.get('data') ) initialize_course_tabs(new_course) create_all_course_groups(request.user, new_course.location) # seed the forums seed_permissions_roles(new_course.location.course_id) # auto-enroll the course creator in the course so that "View Live" will # work. CourseEnrollment.enroll(request.user, new_course.location.course_id) return JsonResponse({'id': new_course.location.url()})
def compute_metadata_inheritance_tree(self, location): ''' TODO (cdodge) This method can be deleted when the 'split module store' work has been completed ''' # get all collections in the course, this query should not return any leaf nodes # note this is a bit ugly as when we add new categories of containers, we have to add it here query = {'_id.org': location.org, '_id.course': location.course, '_id.category': {'$in': ['course', 'chapter', 'sequential', 'vertical', 'videosequence', 'wrapper', 'problemset', 'conditional', 'randomize']} } # we just want the Location, children, and inheritable metadata record_filter = {'_id': 1, 'definition.children': 1} # just get the inheritable metadata since that is all we need for the computation # this minimizes both data pushed over the wire for field_name in InheritanceMixin.fields: record_filter['metadata.{0}'.format(field_name)] = 1 # call out to the DB resultset = self.collection.find(query, record_filter) results_by_url = {} root = None # now go through the results and order them by the location url for result in resultset: location = Location(result['_id']) # We need to collate between draft and non-draft # i.e. draft verticals will have draft children but will have non-draft parents currently location = location.replace(revision=None) location_url = location.url() if location_url in results_by_url: existing_children = results_by_url[location_url].get('definition', {}).get('children', []) additional_children = result.get('definition', {}).get('children', []) total_children = existing_children + additional_children results_by_url[location_url].setdefault('definition', {})['children'] = total_children results_by_url[location.url()] = result if location.category == 'course': root = location.url() # now traverse the tree and compute down the inherited metadata metadata_to_inherit = {} def _compute_inherited_metadata(url): """ Helper method for computing inherited metadata for a specific location url """ # check for presence of metadata key. Note that a given module may not yet be fully formed. # example: update_item -> update_children -> update_metadata sequence on new item create # if we get called here without update_metadata called first then 'metadata' hasn't been set # as we're not fully transactional at the DB layer. Same comment applies to below key name # check my_metadata = results_by_url[url].get('metadata', {}) # go through all the children and recurse, but only if we have # in the result set. Remember results will not contain leaf nodes for child in results_by_url[url].get('definition', {}).get('children', []): if child in results_by_url: new_child_metadata = copy.deepcopy(my_metadata) new_child_metadata.update(results_by_url[child].get('metadata', {})) results_by_url[child]['metadata'] = new_child_metadata metadata_to_inherit[child] = new_child_metadata _compute_inherited_metadata(child) else: # this is likely a leaf node, so let's record what metadata we need to inherit metadata_to_inherit[child] = my_metadata if root is not None: _compute_inherited_metadata(root) return metadata_to_inherit
def compute_metadata_inheritance_tree(self, location): ''' TODO (cdodge) This method can be deleted when the 'split module store' work has been completed ''' # get all collections in the course, this query should not return any leaf nodes # note this is a bit ugly as when we add new categories of containers, we have to add it here query = {'_id.org': location.org, '_id.course': location.course, '_id.category': {'$in': ['course', 'chapter', 'sequential', 'vertical', 'videosequence', 'wrapper', 'problemset', 'conditional', 'randomize']} } # we just want the Location, children, and inheritable metadata record_filter = {'_id': 1, 'definition.children': 1} # just get the inheritable metadata since that is all we need for the computation # this minimizes both data pushed over the wire for attr in INHERITABLE_METADATA: record_filter['metadata.{0}'.format(attr)] = 1 # call out to the DB resultset = self.collection.find(query, record_filter) results_by_url = {} root = None # now go through the results and order them by the location url for result in resultset: location = Location(result['_id']) # We need to collate between draft and non-draft # i.e. draft verticals can have children which are not in non-draft versions location = location.replace(revision=None) location_url = location.url() if location_url in results_by_url: existing_children = results_by_url[location_url].get('definition', {}).get('children', []) additional_children = result.get('definition', {}).get('children', []) total_children = existing_children + additional_children if 'definition' not in results_by_url[location_url]: results_by_url[location_url]['definition'] = {} results_by_url[location_url]['definition']['children'] = total_children results_by_url[location.url()] = result if location.category == 'course': root = location.url() # now traverse the tree and compute down the inherited metadata metadata_to_inherit = {} def _compute_inherited_metadata(url): """ Helper method for computing inherited metadata for a specific location url """ # check for presence of metadata key. Note that a given module may not yet be fully formed. # example: update_item -> update_children -> update_metadata sequence on new item create # if we get called here without update_metadata called first then 'metadata' hasn't been set # as we're not fully transactional at the DB layer. Same comment applies to below key name # check my_metadata = results_by_url[url].get('metadata', {}) # go through all the children and recurse, but only if we have # in the result set. Remember results will not contain leaf nodes for child in results_by_url[url].get('definition', {}).get('children', []): if child in results_by_url: new_child_metadata = copy.deepcopy(my_metadata) new_child_metadata.update(results_by_url[child].get('metadata', {})) results_by_url[child]['metadata'] = new_child_metadata metadata_to_inherit[child] = new_child_metadata _compute_inherited_metadata(child) else: # this is likely a leaf node, so let's record what metadata we need to inherit metadata_to_inherit[child] = my_metadata if root is not None: _compute_inherited_metadata(root) return metadata_to_inherit
def test_add_block(self): """ Test add_block_location_translator(location, old_course_id=None, usage_id=None) """ # call w/ no matching courses org = 'foo_org' course = 'bar_course' old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run') problem_name = 'abc123abc123abc123abc123abc123f9' location = Location('i4x', org, course, 'problem', problem_name) with self.assertRaises(ItemNotFoundError): loc_mapper().add_block_location_translator(location) with self.assertRaises(ItemNotFoundError): loc_mapper().add_block_location_translator(location, old_style_course_id) # w/ one matching course new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run') loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'baz_run'), new_style_course_id, ) new_usage_id = loc_mapper().add_block_location_translator(location) self.assertEqual(new_usage_id, 'problemabc') # look it up translated_loc = loc_mapper().translate_location( old_style_course_id, location, add_entry_if_missing=False) self.assertEqual(translated_loc.course_id, new_style_course_id) self.assertEqual(translated_loc.usage_id, new_usage_id) # w/ one distractor which has one entry already new_style_course_id = '{}.geek_dept.{}.{}'.format( org, course, 'delta_run') loc_mapper().create_map_entry( Location('i4x', org, course, 'course', 'delta_run'), new_style_course_id, block_map={'48f23a10395384929234': { 'chapter': 'chapter48f' }}) # try adding the one added before new_usage_id2 = loc_mapper().add_block_location_translator(location) self.assertEqual(new_usage_id, new_usage_id2) # it should be in the distractor now new_location = loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id2, branch='published')) self.assertEqual(new_location, location) # add one close to the existing chapter (cause name collision) location = Location('i4x', org, course, 'chapter', '48f23a103953849292341234567890ab') new_usage_id = loc_mapper().add_block_location_translator(location) self.assertRegexpMatches(new_usage_id, r'^chapter48f\d') # retrievable from both courses new_location = loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id, branch='published')) self.assertEqual(new_location, location) new_location = loc_mapper().translate_locator_to_location( BlockUsageLocator(course_id='{}.{}.{}'.format( org, course, 'baz_run'), usage_id=new_usage_id, branch='published')) self.assertEqual(new_location, location) # provoke duplicate item errors location = location.replace(name='44f23a103953849292341234567890ab') with self.assertRaises(DuplicateItemError): loc_mapper().add_block_location_translator(location, usage_id=new_usage_id) new_usage_id = loc_mapper().add_block_location_translator( location, old_course_id=old_style_course_id) other_course_old_style = '{}/{}/{}'.format(org, course, 'delta_run') new_usage_id2 = loc_mapper().add_block_location_translator( location, old_course_id=other_course_old_style, usage_id='{}b'.format(new_usage_id)) with self.assertRaises(DuplicateItemError): loc_mapper().add_block_location_translator(location)
def test_translate_location_dwim(self): """ Test the location translation mechanisms which try to do-what-i-mean by creating new entries for never seen queries. """ org = 'foo_org' course = 'bar_course' old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run') problem_name = 'abc123abc123abc123abc123abc123f9' location = Location('i4x', org, course, 'problem', problem_name) new_style_package_id = '{}.{}.{}'.format(org, course, 'baz_run') self.translate_n_check(location, old_style_course_id, new_style_package_id, 'problemabc', 'published', True) # look for w/ only the Location (works b/c there's only one possible course match): causes cache self.translate_n_check(location, None, new_style_package_id, 'problemabc', 'published', True) # create an entry w/o a guid name other_location = Location('i4x', org, course, 'chapter', 'intro') self.translate_n_check(other_location, old_style_course_id, new_style_package_id, 'intro', 'published', True) # add a distractor course delta_new_package_id = '{}.geek_dept.{}.{}'.format( org, course, 'delta_run') delta_course_locn = Location('i4x', org, course, 'course', 'delta_run') loc_mapper().create_map_entry( delta_course_locn, delta_new_package_id, block_map={problem_name: { 'problem': 'problem3' }}) self.translate_n_check(location, old_style_course_id, new_style_package_id, 'problemabc', 'published', True) # add a new one to both courses (ensure name doesn't have same beginning) new_prob_name = uuid.uuid4().hex while new_prob_name.startswith('abc'): new_prob_name = uuid.uuid4().hex new_prob_locn = location.replace(name=new_prob_name) new_usage_id = 'problem{}'.format(new_prob_name[:3]) self.translate_n_check(new_prob_locn, old_style_course_id, new_style_package_id, new_usage_id, 'published', True) self.translate_n_check(new_prob_locn, delta_course_locn.course_id, delta_new_package_id, new_usage_id, 'published', True) # look for w/ only the Location: causes caching and not unique; so, can't check which course prob_locator = loc_mapper().translate_location( None, new_prob_locn, add_entry_if_missing=True) self.assertIsNotNone(prob_locator, "couldn't find ambiguous location") # add a default course pointing to the delta_run loc_mapper().create_map_entry( Location('i4x', org, course, 'problem', '789abc123efg456'), delta_new_package_id, block_map={problem_name: { 'problem': 'problem3' }}) # now the ambiguous query should return delta again_prob_name = uuid.uuid4().hex while again_prob_name.startswith('abc') or again_prob_name.startswith( new_prob_name[:3]): again_prob_name = uuid.uuid4().hex again_prob_locn = location.replace(name=again_prob_name) again_usage_id = 'problem{}'.format(again_prob_name[:3]) self.translate_n_check(again_prob_locn, old_style_course_id, new_style_package_id, again_usage_id, 'published', True) self.translate_n_check(again_prob_locn, delta_course_locn.course_id, delta_new_package_id, again_usage_id, 'published', True) self.translate_n_check(again_prob_locn, None, delta_new_package_id, again_usage_id, 'published', True)
def create_source_course(self): """ A course testing all of the conversion mechanisms: * some inheritable settings * sequences w/ draft and live intermixed children to ensure all get to the draft but only the live ones get to published. Some are only draft, some are both, some are only live. * about, static_tab, and conditional documents """ location = Location('i4x', 'test_org', 'test_course', 'course', 'runid') self.course_location = location date_proxy = Date() metadata = { 'start': date_proxy.to_json(datetime.datetime(2000, 3, 13, 4)), 'display_name': 'Migration test course', } data = {'wiki_slug': 'test_course_slug'} course_root = self._create_and_get_item(self.old_mongo, location, data, metadata) runtime = course_root.runtime # chapters location = location.replace(category='chapter', name=uuid.uuid4().hex) chapter1 = self._create_and_get_item(self.old_mongo, location, {}, {'display_name': 'Chapter 1'}, runtime) course_root.children.append(chapter1.location.url()) location = location.replace(category='chapter', name=uuid.uuid4().hex) chapter2 = self._create_and_get_item(self.old_mongo, location, {}, {'display_name': 'Chapter 2'}, runtime) course_root.children.append(chapter2.location.url()) self.old_mongo.update_item(course_root, '**replace_user**') # vertical in live only location = location.replace(category='vertical', name=uuid.uuid4().hex) live_vert = self._create_and_get_item( self.old_mongo, location, {}, {'display_name': 'Live vertical'}, runtime) chapter1.children.append(live_vert.location.url()) self.create_random_units(self.old_mongo, live_vert) # vertical in both live and draft location = location.replace(category='vertical', name=uuid.uuid4().hex) both_vert = self._create_and_get_item( self.old_mongo, location, {}, {'display_name': 'Both vertical'}, runtime) draft_both = self._create_and_get_item( self.draft_mongo, location, {}, {'display_name': 'Both vertical renamed'}, runtime) chapter1.children.append(both_vert.location.url()) self.create_random_units(self.old_mongo, both_vert, self.draft_mongo, draft_both) # vertical in draft only (x2) location = location.replace(category='vertical', name=uuid.uuid4().hex) draft_vert = self._create_and_get_item( self.draft_mongo, location, {}, {'display_name': 'Draft vertical'}, runtime) chapter1.children.append(draft_vert.location.url()) self.create_random_units(self.draft_mongo, draft_vert) location = location.replace(category='vertical', name=uuid.uuid4().hex) draft_vert = self._create_and_get_item( self.draft_mongo, location, {}, {'display_name': 'Draft vertical2'}, runtime) chapter1.children.append(draft_vert.location.url()) self.create_random_units(self.draft_mongo, draft_vert) # and finally one in live only (so published has to skip 2) location = location.replace(category='vertical', name=uuid.uuid4().hex) live_vert = self._create_and_get_item( self.old_mongo, location, {}, {'display_name': 'Live vertical end'}, runtime) chapter1.children.append(live_vert.location.url()) self.create_random_units(self.old_mongo, live_vert) # update the chapter self.old_mongo.update_item(chapter1, '**replace_user**') # now the other one w/ the conditional # first create some show children indirect1 = self._create_and_get_item( self.old_mongo, location.replace(category='discussion', name=uuid.uuid4().hex), "", {'display_name': 'conditional show 1'}, runtime) indirect2 = self._create_and_get_item( self.old_mongo, location.replace(category='html', name=uuid.uuid4().hex), "", {'display_name': 'conditional show 2'}, runtime) location = location.replace(category='conditional', name=uuid.uuid4().hex) metadata = { 'xml_attributes': { 'sources': [ live_vert.location.url(), ], 'completed': True, }, } data = { 'show_tag_list': [indirect1.location.url(), indirect2.location.url()] } conditional = self._create_and_get_item(self.old_mongo, location, data, metadata, runtime) conditional.children = [ indirect1.location.url(), indirect2.location.url() ] # add direct children self.create_random_units(self.old_mongo, conditional) chapter2.children.append(conditional.location.url()) self.old_mongo.update_item(chapter2, '**replace_user**') # and the ancillary docs (not children) location = location.replace(category='static_tab', name=uuid.uuid4().hex) # the below automatically adds the tab to the course _tab = self._create_and_get_item(self.old_mongo, location, "", {'display_name': 'Tab uno'}, runtime) location = location.replace(category='about', name='overview') _overview = self._create_and_get_item(self.old_mongo, location, "<p>test</p>", {}, runtime) location = location.replace(category='course_info', name='updates') _overview = self._create_and_get_item( self.old_mongo, location, "<ol><li><h2>Sep 22</h2><p>test</p></li></ol>", {}, runtime)
class RolesTestCase(TestCase): """ Tests of student.roles """ def setUp(self): self.course = Location('i4x://edX/toy/course/2012_Fall') self.anonymous_user = AnonymousUserFactory() self.student = UserFactory() self.global_staff = UserFactory(is_staff=True) self.course_staff = StaffFactory(course=self.course) self.course_instructor = InstructorFactory(course=self.course) def test_global_staff(self): self.assertFalse(GlobalStaff().has_user(self.student)) self.assertFalse(GlobalStaff().has_user(self.course_staff)) self.assertFalse(GlobalStaff().has_user(self.course_instructor)) self.assertTrue(GlobalStaff().has_user(self.global_staff)) def test_group_name_case_insensitive(self): uppercase_loc = "i4x://ORG/COURSE/course/NAME" lowercase_loc = uppercase_loc.lower() lowercase_group = "role_org/course/name" uppercase_group = lowercase_group.upper() lowercase_user = UserFactory(groups=lowercase_group) uppercase_user = UserFactory(groups=uppercase_group) self.assertTrue( CourseRole("role", lowercase_loc).has_user(lowercase_user)) self.assertTrue( CourseRole("role", uppercase_loc).has_user(lowercase_user)) self.assertTrue( CourseRole("role", lowercase_loc).has_user(uppercase_user)) self.assertTrue( CourseRole("role", uppercase_loc).has_user(uppercase_user)) def test_course_role(self): """ Test that giving a user a course role enables access appropriately """ course_locator = loc_mapper().translate_location( self.course.course_id, self.course, add_entry_if_missing=True) self.assertFalse( CourseStaffRole(course_locator).has_user(self.student), "Student has premature access to {}".format( unicode(course_locator))) self.assertFalse( CourseStaffRole(self.course).has_user(self.student), "Student has premature access to {}".format(self.course.url())) CourseStaffRole(course_locator).add_users(self.student) self.assertTrue( CourseStaffRole(course_locator).has_user(self.student), "Student doesn't have access to {}".format( unicode(course_locator))) self.assertTrue( CourseStaffRole(self.course).has_user(self.student), "Student doesn't have access to {}".format( unicode(self.course.url()))) # now try accessing something internal to the course vertical_locator = BlockUsageLocator( package_id=course_locator.package_id, branch='published', block_id='madeup') vertical_location = self.course.replace(category='vertical', name='madeuptoo') self.assertTrue( CourseStaffRole(vertical_locator).has_user(self.student), "Student doesn't have access to {}".format( unicode(vertical_locator))) self.assertTrue( CourseStaffRole(vertical_location, course_context=self.course.course_id).has_user( self.student), "Student doesn't have access to {}".format( unicode(vertical_location.url())))
def compute_metadata_inheritance_tree(self, location): ''' TODO (cdodge) This method can be deleted when the 'split module store' work has been completed ''' # get all collections in the course, this query should not return any leaf nodes # note this is a bit ugly as when we add new categories of containers, we have to add it here query = { '_id.org': location.org, '_id.course': location.course, '_id.category': { '$in': [ 'course', 'chapter', 'sequential', 'vertical', 'videosequence', 'wrapper', 'problemset', 'conditional', 'randomize' ] } } # we just want the Location, children, and inheritable metadata record_filter = {'_id': 1, 'definition.children': 1} # just get the inheritable metadata since that is all we need for the computation # this minimizes both data pushed over the wire for field_name in InheritanceMixin.fields: record_filter['metadata.{0}'.format(field_name)] = 1 # call out to the DB resultset = self.collection.find(query, record_filter) results_by_url = {} root = None # now go through the results and order them by the location url for result in resultset: location = Location(result['_id']) # We need to collate between draft and non-draft # i.e. draft verticals will have draft children but will have non-draft parents currently location = location.replace(revision=None) location_url = location.url() if location_url in results_by_url: existing_children = results_by_url[location_url].get( 'definition', {}).get('children', []) additional_children = result.get('definition', {}).get('children', []) total_children = existing_children + additional_children results_by_url[location_url].setdefault( 'definition', {})['children'] = total_children results_by_url[location.url()] = result if location.category == 'course': root = location.url() # now traverse the tree and compute down the inherited metadata metadata_to_inherit = {} def _compute_inherited_metadata(url): """ Helper method for computing inherited metadata for a specific location url """ my_metadata = results_by_url[url].get('metadata', {}) # go through all the children and recurse, but only if we have # in the result set. Remember results will not contain leaf nodes for child in results_by_url[url].get('definition', {}).get('children', []): if child in results_by_url: new_child_metadata = copy.deepcopy(my_metadata) new_child_metadata.update(results_by_url[child].get( 'metadata', {})) results_by_url[child]['metadata'] = new_child_metadata metadata_to_inherit[child] = new_child_metadata _compute_inherited_metadata(child) else: # this is likely a leaf node, so let's record what metadata we need to inherit metadata_to_inherit[child] = my_metadata if root is not None: _compute_inherited_metadata(root) return metadata_to_inherit
class TestPublish(unittest.TestCase): """ Test the publish code (primary causing orphans) """ # Snippet of what would be in the django settings envs file db_config = { 'host': 'localhost', 'db': 'test_xmodule', } modulestore_options = { 'default_class': 'xmodule.raw_module.RawDescriptor', 'fs_root': '', 'render_template': mock.Mock(return_value=""), 'xblock_mixins': (InheritanceMixin,) } def setUp(self): self.db_config['collection'] = 'modulestore{0}'.format(uuid.uuid4().hex[:5]) self.old_mongo = MongoModuleStore(self.db_config, **self.modulestore_options) self.draft_mongo = DraftMongoModuleStore(self.db_config, **self.modulestore_options) self.addCleanup(self.tear_down_mongo) self.course_location = None def tear_down_mongo(self): # old_mongo doesn't give a db attr, but all of the dbs are the same and draft and pub use same collection dbref = self.old_mongo.collection.database dbref.drop_collection(self.old_mongo.collection) dbref.connection.close() def _create_item(self, category, name, data, metadata, parent_category, parent_name, runtime): """ Create the item in either draft or direct based on category and attach to its parent. """ location = self.course_location.replace(category=category, name=name) if category in DIRECT_ONLY_CATEGORIES: mongo = self.old_mongo else: mongo = self.draft_mongo mongo.create_and_save_xmodule(location, data, metadata, runtime) if isinstance(data, basestring): fields = {'data': data} else: fields = data.copy() fields.update(metadata) if parent_name: # add child to parent in mongo parent_location = self.course_location.replace(category=parent_category, name=parent_name) parent = self.draft_mongo.get_item(parent_location) parent.children.append(location.url()) if parent_category in DIRECT_ONLY_CATEGORIES: mongo = self.old_mongo else: mongo = self.draft_mongo mongo.update_children(parent_location, parent.children) def _create_course(self): """ Create the course, publish all verticals * some detached items """ date_proxy = Date() metadata = { 'start': date_proxy.to_json(datetime.datetime(2000, 3, 13, 4)), 'display_name': 'Migration test course', } data = { 'wiki_slug': 'test_course_slug' } fields = metadata.copy() fields.update(data) self.course_location = Location('i4x', 'test_org', 'test_course', 'course', 'runid') self.old_mongo.create_and_save_xmodule(self.course_location, data, metadata) runtime = self.draft_mongo.get_item(self.course_location).runtime self._create_item('chapter', 'Chapter1', {}, {'display_name': 'Chapter 1'}, 'course', 'runid', runtime) self._create_item('chapter', 'Chapter2', {}, {'display_name': 'Chapter 2'}, 'course', 'runid', runtime) self._create_item('vertical', 'Vert1', {}, {'display_name': 'Vertical 1'}, 'chapter', 'Chapter1', runtime) self._create_item('vertical', 'Vert2', {}, {'display_name': 'Vertical 2'}, 'chapter', 'Chapter1', runtime) self._create_item('html', 'Html1', "<p>Goodbye</p>", {'display_name': 'Parented Html'}, 'vertical', 'Vert1', runtime) self._create_item( 'discussion', 'Discussion1', "discussion discussion_category=\"Lecture 1\" discussion_id=\"a08bfd89b2aa40fa81f2c650a9332846\" discussion_target=\"Lecture 1\"/>\n", { "discussion_category": "Lecture 1", "discussion_target": "Lecture 1", "display_name": "Lecture 1 Discussion", "discussion_id": "a08bfd89b2aa40fa81f2c650a9332846" }, 'vertical', 'Vert1', runtime ) self._create_item('html', 'Html2', "<p>Hellow</p>", {'display_name': 'Hollow Html'}, 'vertical', 'Vert1', runtime) self._create_item( 'discussion', 'Discussion2', "discussion discussion_category=\"Lecture 2\" discussion_id=\"b08bfd89b2aa40fa81f2c650a9332846\" discussion_target=\"Lecture 2\"/>\n", { "discussion_category": "Lecture 2", "discussion_target": "Lecture 2", "display_name": "Lecture 2 Discussion", "discussion_id": "b08bfd89b2aa40fa81f2c650a9332846" }, 'vertical', 'Vert2', runtime ) self._create_item('static_tab', 'staticuno', "<p>tab</p>", {'display_name': 'Tab uno'}, None, None, runtime) self._create_item('about', 'overview', "<p>overview</p>", {}, None, None, runtime) self._create_item('course_info', 'updates', "<ol><li><h2>Sep 22</h2><p>test</p></li></ol>", {}, None, None, runtime) def _xmodule_recurse(self, item, action): """ Applies action depth-first down tree and to item last. A copy of cms.djangoapps.contentstore.views.helpers._xmodule_recurse to reproduce its use and behavior outside of django. """ for child in item.get_children(): self._xmodule_recurse(child, action) action(item) def test_publish_draft_delete(self): """ To reproduce a bug (STUD-811) publish a vertical, convert to draft, delete a child, move a child, publish. See if deleted and moved children still is connected or exists in db (bug was disconnected but existed) """ self._create_course() userid = random.getrandbits(32) location = self.course_location.replace(category='vertical', name='Vert1') item = self.draft_mongo.get_item(location, 2) self._xmodule_recurse( item, lambda i: self.draft_mongo.publish(i.location, userid) ) # verify status item = self.draft_mongo.get_item(location, 0) self.assertFalse(getattr(item, 'is_draft', False), "Item was published. Draft should not exist") # however, children are still draft, but I'm not sure that's by design # convert back to draft self.draft_mongo.convert_to_draft(location) # both draft and published should exist draft_vert = self.draft_mongo.get_item(location, 0) self.assertTrue(getattr(draft_vert, 'is_draft', False), "Item was converted to draft but doesn't say so") item = self.old_mongo.get_item(location, 0) self.assertFalse(getattr(item, 'is_draft', False), "Published item doesn't say so") # delete the discussion (which oddly is not in draft mode) location = self.course_location.replace(category='discussion', name='Discussion1') self.draft_mongo.delete_item(location) # remove pointer from draft vertical (verify presence first to ensure process is valid) self.assertIn(location.url(), draft_vert.children) draft_vert.children.remove(location.url()) # move the other child other_child_loc = self.course_location.replace(category='html', name='Html2') draft_vert.children.remove(other_child_loc.url()) other_vert = self.draft_mongo.get_item(self.course_location.replace(category='vertical', name='Vert2'), 0) other_vert.children.append(other_child_loc.url()) self.draft_mongo.update_children(draft_vert.location, draft_vert.children) self.draft_mongo.update_children(other_vert.location, other_vert.children) # publish self._xmodule_recurse( draft_vert, lambda i: self.draft_mongo.publish(i.location, userid) ) item = self.old_mongo.get_item(draft_vert.location, 0) self.assertNotIn(location.url(), item.children) with self.assertRaises(ItemNotFoundError): self.draft_mongo.get_item(location) self.assertNotIn(other_child_loc.url(), item.children) self.assertTrue(self.draft_mongo.has_item(None, other_child_loc), "Oops, lost moved item")
class TestOrphan(unittest.TestCase): """ Test the orphan finding code """ # Snippet of what would be in the django settings envs file db_config = { 'host': 'localhost', 'db': 'test_xmodule', } modulestore_options = { 'default_class': 'xmodule.raw_module.RawDescriptor', 'fs_root': '', 'render_template': mock.Mock(return_value=""), 'xblock_mixins': (InheritanceMixin,) } split_package_id = 'test_org.test_course.runid' def setUp(self): self.db_config['collection'] = 'modulestore{0}'.format(uuid.uuid4().hex[:5]) self.userid = random.getrandbits(32) super(TestOrphan, self).setUp() self.split_mongo = SplitMongoModuleStore( self.db_config, **self.modulestore_options ) self.addCleanup(self.tear_down_split) self.old_mongo = MongoModuleStore(self.db_config, **self.modulestore_options) self.addCleanup(self.tear_down_mongo) self.course_location = None self._create_course() def tear_down_split(self): """ Remove the test collections, close the db connection """ split_db = self.split_mongo.db split_db.drop_collection(split_db.course_index) split_db.drop_collection(split_db.structures) split_db.drop_collection(split_db.definitions) split_db.connection.close() def tear_down_mongo(self): """ Remove the test collections, close the db connection """ split_db = self.split_mongo.db # old_mongo doesn't give a db attr, but all of the dbs are the same split_db.drop_collection(self.old_mongo.collection) def _create_item(self, category, name, data, metadata, parent_category, parent_name, runtime): """ Create the item of the given category and block id in split and old mongo, add it to the optional parent. The parent category is only needed because old mongo requires it for the id. """ location = Location('i4x', 'test_org', 'test_course', category, name) self.old_mongo.create_and_save_xmodule(location, data, metadata, runtime) if isinstance(data, basestring): fields = {'data': data} else: fields = data.copy() fields.update(metadata) if parent_name: # add child to parent in mongo parent_location = Location('i4x', 'test_org', 'test_course', parent_category, parent_name) parent = self.old_mongo.get_item(parent_location) parent.children.append(location.url()) self.old_mongo.update_item(parent, self.userid) # create pointer for split course_or_parent_locator = BlockUsageLocator( package_id=self.split_package_id, branch='draft', block_id=parent_name ) else: course_or_parent_locator = CourseLocator( package_id='test_org.test_course.runid', branch='draft', ) self.split_mongo.create_item(course_or_parent_locator, category, self.userid, block_id=name, fields=fields) def _create_course(self): """ * some detached items * some attached children * some orphans """ date_proxy = Date() metadata = { 'start': date_proxy.to_json(datetime.datetime(2000, 3, 13, 4)), 'display_name': 'Migration test course', } data = { 'wiki_slug': 'test_course_slug' } fields = metadata.copy() fields.update(data) # split requires the course to be created separately from creating items self.split_mongo.create_course( self.split_package_id, 'test_org', self.userid, fields=fields, root_block_id='runid' ) self.course_location = Location('i4x', 'test_org', 'test_course', 'course', 'runid') self.old_mongo.create_and_save_xmodule(self.course_location, data, metadata) runtime = self.old_mongo.get_item(self.course_location).runtime self._create_item('chapter', 'Chapter1', {}, {'display_name': 'Chapter 1'}, 'course', 'runid', runtime) self._create_item('chapter', 'Chapter2', {}, {'display_name': 'Chapter 2'}, 'course', 'runid', runtime) self._create_item('chapter', 'OrphanChapter', {}, {'display_name': 'Orphan Chapter'}, None, None, runtime) self._create_item('vertical', 'Vert1', {}, {'display_name': 'Vertical 1'}, 'chapter', 'Chapter1', runtime) self._create_item('vertical', 'OrphanVert', {}, {'display_name': 'Orphan Vertical'}, None, None, runtime) self._create_item('html', 'Html1', "<p>Goodbye</p>", {'display_name': 'Parented Html'}, 'vertical', 'Vert1', runtime) self._create_item('html', 'OrphanHtml', "<p>Hello</p>", {'display_name': 'Orphan html'}, None, None, runtime) self._create_item('static_tab', 'staticuno', "<p>tab</p>", {'display_name': 'Tab uno'}, None, None, runtime) self._create_item('about', 'overview', "<p>overview</p>", {}, None, None, runtime) self._create_item('course_info', 'updates', "<ol><li><h2>Sep 22</h2><p>test</p></li></ol>", {}, None, None, runtime) def test_mongo_orphan(self): """ Test that old mongo finds the orphans """ orphans = self.old_mongo.get_orphans(self.course_location, None) self.assertEqual(len(orphans), 3, "Wrong # {}".format(orphans)) location = self.course_location.replace(category='chapter', name='OrphanChapter') self.assertIn(location.url(), orphans) location = self.course_location.replace(category='vertical', name='OrphanVert') self.assertIn(location.url(), orphans) location = self.course_location.replace(category='html', name='OrphanHtml') self.assertIn(location.url(), orphans) def test_split_orphan(self): """ Test that old mongo finds the orphans """ orphans = self.split_mongo.get_orphans(self.split_package_id, 'draft') self.assertEqual(len(orphans), 3, "Wrong # {}".format(orphans)) location = BlockUsageLocator(package_id=self.split_package_id, branch='draft', block_id='OrphanChapter') self.assertIn(location, orphans) location = BlockUsageLocator(package_id=self.split_package_id, branch='draft', block_id='OrphanVert') self.assertIn(location, orphans) location = BlockUsageLocator(package_id=self.split_package_id, branch='draft', block_id='OrphanHtml') self.assertIn(location, orphans)
def update_from_json(cls, jsondict): """ Decode the json into CourseDetails and save any changed attrs to the db """ # TODO make it an error for this to be undefined & for it to not be retrievable from modulestore course_location = Location(jsondict['course_location']) # Will probably want to cache the inflight courses because every blur generates an update descriptor = get_modulestore(course_location).get_item(course_location) dirty = False # In the descriptor's setter, the date is converted to JSON using Date's to_json method. # Calling to_json on something that is already JSON doesn't work. Since reaching directly # into the model is nasty, convert the JSON Date to a Python date, which is what the # setter expects as input. date = Date() if 'start_date' in jsondict: converted = date.from_json(jsondict['start_date']) else: converted = None if converted != descriptor.start: dirty = True descriptor.start = converted if 'end_date' in jsondict: converted = date.from_json(jsondict['end_date']) else: converted = None if converted != descriptor.end: dirty = True descriptor.end = converted if 'enrollment_start' in jsondict: converted = date.from_json(jsondict['enrollment_start']) else: converted = None if converted != descriptor.enrollment_start: dirty = True descriptor.enrollment_start = converted if 'enrollment_end' in jsondict: converted = date.from_json(jsondict['enrollment_end']) else: converted = None if converted != descriptor.enrollment_end: dirty = True descriptor.enrollment_end = converted if dirty: get_modulestore(course_location).update_metadata( course_location, own_metadata(descriptor)) # NOTE: below auto writes to the db w/o verifying that any of the fields actually changed # to make faster, could compare against db or could have client send over a list of which fields changed. temploc = Location(course_location).replace(category='about', name='syllabus') update_item(temploc, jsondict['syllabus']) temploc = temploc.replace(name='overview') update_item(temploc, jsondict['overview']) temploc = temploc.replace(name='effort') update_item(temploc, jsondict['effort']) temploc = temploc.replace(name='video') recomposed_video_tag = CourseDetails.recompose_video_tag( jsondict['intro_video']) update_item(temploc, recomposed_video_tag) # Could just generate and return a course obj w/o doing any db reads, but I put the reads in as a means to confirm # it persisted correctly return CourseDetails.fetch(course_location)
def update_from_json(cls, course_locator, jsondict): """ Decode the json into CourseDetails and save any changed attrs to the db """ course_old_location = loc_mapper().translate_locator_to_location( course_locator) descriptor = get_modulestore(course_old_location).get_item( course_old_location) dirty = False # In the descriptor's setter, the date is converted to JSON using Date's to_json method. # Calling to_json on something that is already JSON doesn't work. Since reaching directly # into the model is nasty, convert the JSON Date to a Python date, which is what the # setter expects as input. date = Date() if 'start_date' in jsondict: converted = date.from_json(jsondict['start_date']) else: converted = None if converted != descriptor.start: dirty = True descriptor.start = converted if 'end_date' in jsondict: converted = date.from_json(jsondict['end_date']) else: converted = None if converted != descriptor.end: dirty = True descriptor.end = converted if 'enrollment_start' in jsondict: converted = date.from_json(jsondict['enrollment_start']) else: converted = None if converted != descriptor.enrollment_start: dirty = True descriptor.enrollment_start = converted if 'enrollment_end' in jsondict: converted = date.from_json(jsondict['enrollment_end']) else: converted = None if converted != descriptor.enrollment_end: dirty = True descriptor.enrollment_end = converted if 'course_image_name' in jsondict and jsondict[ 'course_image_name'] != descriptor.course_image: descriptor.course_image = jsondict['course_image_name'] dirty = True if dirty: # Save the data that we've just changed to the underlying # MongoKeyValueStore before we update the mongo datastore. descriptor.save() get_modulestore(course_old_location).update_metadata( course_old_location, own_metadata(descriptor)) # NOTE: below auto writes to the db w/o verifying that any of the fields actually changed # to make faster, could compare against db or could have client send over a list of which fields changed. temploc = Location(course_old_location).replace(category='about', name='syllabus') update_item(temploc, jsondict['syllabus']) temploc = temploc.replace(name='overview') update_item(temploc, jsondict['overview']) temploc = temploc.replace(name='effort') update_item(temploc, jsondict['effort']) temploc = temploc.replace(name='video') recomposed_video_tag = CourseDetails.recompose_video_tag( jsondict['intro_video']) update_item(temploc, recomposed_video_tag) # Could just return jsondict w/o doing any db reads, but I put the reads in as a means to confirm # it persisted correctly return CourseDetails.fetch(course_locator)
def test_cms_imported_course_walkthrough(self): """ Import and walk through some common URL endpoints. This just verifies non-500 and no other correct behavior, so it is not a deep test """ import_from_xml(modulestore('direct'), 'common/test/data/', ['simple']) loc = Location(['i4x', 'edX', 'simple', 'course', '2012_Fall', None]) resp = self.client.get(reverse('course_index', kwargs={'org': loc.org, 'course': loc.course, 'name': loc.name})) self.assertEqual(200, resp.status_code) self.assertContains(resp, 'Chapter 2') # go to various pages # import page resp = self.client.get(reverse('import_course', kwargs={'org': loc.org, 'course': loc.course, 'name': loc.name})) self.assertEqual(200, resp.status_code) # export page resp = self.client.get(reverse('export_course', kwargs={'org': loc.org, 'course': loc.course, 'name': loc.name})) self.assertEqual(200, resp.status_code) # manage users resp = self.client.get(reverse('manage_users', kwargs={'location': loc.url()})) self.assertEqual(200, resp.status_code) # course info resp = self.client.get(reverse('course_info', kwargs={'org': loc.org, 'course': loc.course, 'name': loc.name})) self.assertEqual(200, resp.status_code) # settings_details resp = self.client.get(reverse('settings_details', kwargs={'org': loc.org, 'course': loc.course, 'name': loc.name})) self.assertEqual(200, resp.status_code) # settings_details resp = self.client.get(reverse('settings_grading', kwargs={'org': loc.org, 'course': loc.course, 'name': loc.name})) self.assertEqual(200, resp.status_code) # static_pages resp = self.client.get(reverse('static_pages', kwargs={'org': loc.org, 'course': loc.course, 'coursename': loc.name})) self.assertEqual(200, resp.status_code) # static_pages resp = self.client.get(reverse('asset_index', kwargs={'org': loc.org, 'course': loc.course, 'name': loc.name})) self.assertEqual(200, resp.status_code) # go look at a subsection page subsection_location = loc.replace(category='sequential', name='test_sequence') resp = self.client.get(reverse('edit_subsection', kwargs={'location': subsection_location.url()})) self.assertEqual(200, resp.status_code) # go look at the Edit page unit_location = loc.replace(category='vertical', name='test_vertical') resp = self.client.get(reverse('edit_unit', kwargs={'location': unit_location.url()})) self.assertEqual(200, resp.status_code) # delete a component del_loc = loc.replace(category='html', name='test_html') resp = self.client.post(reverse('delete_item'), json.dumps({'id': del_loc.url()}), "application/json") self.assertEqual(200, resp.status_code) # delete a unit del_loc = loc.replace(category='vertical', name='test_vertical') resp = self.client.post(reverse('delete_item'), json.dumps({'id': del_loc.url()}), "application/json") self.assertEqual(200, resp.status_code) # delete a unit del_loc = loc.replace(category='sequential', name='test_sequence') resp = self.client.post(reverse('delete_item'), json.dumps({'id': del_loc.url()}), "application/json") self.assertEqual(200, resp.status_code) # delete a chapter del_loc = loc.replace(category='chapter', name='chapter_2') resp = self.client.post(reverse('delete_item'), json.dumps({'id': del_loc.url()}), "application/json") self.assertEqual(200, resp.status_code)
def create_new_course(request): """ Create a new course """ if not is_user_in_creator_group(request.user): raise PermissionDenied() org = request.POST.get('org') number = request.POST.get('number') display_name = request.POST.get('display_name') run = request.POST.get('run') try: dest_location = Location('i4x', org, number, 'course', run) except InvalidLocationError as error: return JsonResponse({ "ErrMsg": _("Unable to create course '{name}'.\n\n{err}").format( name=display_name, err=error.message)}) # see if the course already exists existing_course = None try: existing_course = modulestore('direct').get_item(dest_location) except ItemNotFoundError: pass if existing_course is not None: return JsonResponse({ 'ErrMsg': _('There is already a course defined with the same ' 'organization, course number, and course run. Please ' 'change either organization or course number to be ' 'unique.'), 'OrgErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), 'CourseErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), }) course_search_location = ['i4x', dest_location.org, dest_location.course, 'course', None ] courses = modulestore().get_items(course_search_location) if len(courses) > 0: return JsonResponse({ 'ErrMsg': _('There is already a course defined with the same ' 'organization and course number. Please ' 'change at least one field to be unique.'), 'OrgErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), 'CourseErrMsg': _('Please change either the organization or ' 'course number so that it is unique.'), }) # instantiate the CourseDescriptor and then persist it # note: no system to pass if display_name is None: metadata = {} else: metadata = {'display_name': display_name} modulestore('direct').create_and_save_xmodule( dest_location, metadata=metadata ) new_course = modulestore('direct').get_item(dest_location) # clone a default 'about' overview module as well dest_about_location = dest_location.replace( category='about', name='overview' ) overview_template = AboutDescriptor.get_template('overview.yaml') modulestore('direct').create_and_save_xmodule( dest_about_location, system=new_course.system, definition_data=overview_template.get('data') ) initialize_course_tabs(new_course) create_all_course_groups(request.user, new_course.location) # seed the forums seed_permissions_roles(new_course.location.course_id) # auto-enroll the course creator in the course so that "View Live" will # work. CourseEnrollment.enroll(request.user, new_course.location.course_id) return JsonResponse({'id': new_course.location.url()})
def create_new_course(request): """ Create a new course. Returns the URL for the course overview page. """ if not auth.has_access(request.user, CourseCreatorRole()): raise PermissionDenied() org = request.json.get('org') number = request.json.get('number') display_name = request.json.get('display_name') run = request.json.get('run') try: dest_location = Location(u'i4x', org, number, u'course', run) except InvalidLocationError as error: return JsonResponse({ "ErrMsg": _("Unable to create course '{name}'.\n\n{err}").format( name=display_name, err=error.message)}) # see if the course already exists existing_course = None try: existing_course = modulestore('direct').get_item(dest_location) except ItemNotFoundError: pass if existing_course is not None: return JsonResponse({ 'ErrMsg': _( 'There is already a course defined with the same ' 'organization, course number, and course run. Please ' 'change either organization or course number to be ' 'unique.' ), 'OrgErrMsg': _( 'Please change either the organization or ' 'course number so that it is unique.' ), 'CourseErrMsg': _( 'Please change either the organization or ' 'course number so that it is unique.' ), }) # dhm: this query breaks the abstraction, but I'll fix it when I do my suspended refactoring of this # file for new locators. get_items should accept a query rather than requiring it be a legal location course_search_location = bson.son.SON({ '_id.tag': 'i4x', # cannot pass regex to Location constructor; thus this hack # pylint: disable=E1101 '_id.org': re.compile(u'^{}$'.format(dest_location.org), re.IGNORECASE | re.UNICODE), # pylint: disable=E1101 '_id.course': re.compile(u'^{}$'.format(dest_location.course), re.IGNORECASE | re.UNICODE), '_id.category': 'course', }) courses = modulestore().collection.find(course_search_location, fields=('_id')) if courses.count() > 0: return JsonResponse({ 'ErrMsg': _( 'There is already a course defined with the same ' 'organization and course number. Please ' 'change at least one field to be unique.'), 'OrgErrMsg': _( 'Please change either the organization or ' 'course number so that it is unique.'), 'CourseErrMsg': _( 'Please change either the organization or ' 'course number so that it is unique.'), }) # instantiate the CourseDescriptor and then persist it # note: no system to pass if display_name is None: metadata = {} else: metadata = {'display_name': display_name} modulestore('direct').create_and_save_xmodule( dest_location, metadata=metadata ) new_course = modulestore('direct').get_item(dest_location) # clone a default 'about' overview module as well dest_about_location = dest_location.replace( category='about', name='overview' ) overview_template = AboutDescriptor.get_template('overview.yaml') modulestore('direct').create_and_save_xmodule( dest_about_location, system=new_course.system, definition_data=overview_template.get('data') ) initialize_course_tabs(new_course, request.user) new_location = loc_mapper().translate_location(new_course.location.course_id, new_course.location, False, True) # can't use auth.add_users here b/c it requires request.user to already have Instructor perms in this course # however, we can assume that b/c this user had authority to create the course, the user can add themselves CourseInstructorRole(new_location).add_users(request.user) auth.add_users(request.user, CourseStaffRole(new_location), request.user) # seed the forums seed_permissions_roles(new_course.location.course_id) # auto-enroll the course creator in the course so that "View Live" will # work. CourseEnrollment.enroll(request.user, new_course.location.course_id) return JsonResponse({'url': new_location.url_reverse("course/", "")})
class TestOrphan(unittest.TestCase): """ Test the orphan finding code """ # Snippet of what would be in the django settings envs file db_config = { 'host': 'localhost', 'db': 'test_xmodule', } modulestore_options = { 'default_class': 'xmodule.raw_module.RawDescriptor', 'fs_root': '', 'render_template': mock.Mock(return_value=""), 'xblock_mixins': (InheritanceMixin, ) } split_package_id = 'test_org.test_course.runid' def setUp(self): self.db_config['collection'] = 'modulestore{0}'.format( uuid.uuid4().hex[:5]) self.userid = random.getrandbits(32) super(TestOrphan, self).setUp() self.split_mongo = SplitMongoModuleStore(self.db_config, **self.modulestore_options) self.addCleanup(self.tear_down_split) self.old_mongo = MongoModuleStore(self.db_config, **self.modulestore_options) self.addCleanup(self.tear_down_mongo) self.course_location = None self._create_course() def tear_down_split(self): """ Remove the test collections, close the db connection """ split_db = self.split_mongo.db split_db.drop_collection(split_db.course_index) split_db.drop_collection(split_db.structures) split_db.drop_collection(split_db.definitions) split_db.connection.close() def tear_down_mongo(self): """ Remove the test collections, close the db connection """ split_db = self.split_mongo.db # old_mongo doesn't give a db attr, but all of the dbs are the same split_db.drop_collection(self.old_mongo.collection) def _create_item(self, category, name, data, metadata, parent_category, parent_name, runtime): """ Create the item of the given category and block id in split and old mongo, add it to the optional parent. The parent category is only needed because old mongo requires it for the id. """ location = Location('i4x', 'test_org', 'test_course', category, name) self.old_mongo.create_and_save_xmodule(location, data, metadata, runtime) if isinstance(data, basestring): fields = {'data': data} else: fields = data.copy() fields.update(metadata) if parent_name: # add child to parent in mongo parent_location = Location('i4x', 'test_org', 'test_course', parent_category, parent_name) parent = self.old_mongo.get_item(parent_location) parent.children.append(location.url()) self.old_mongo.update_item(parent, self.userid) # create pointer for split course_or_parent_locator = BlockUsageLocator( package_id=self.split_package_id, branch='draft', block_id=parent_name) else: course_or_parent_locator = CourseLocator( package_id='test_org.test_course.runid', branch='draft', ) self.split_mongo.create_item(course_or_parent_locator, category, self.userid, block_id=name, fields=fields) def _create_course(self): """ * some detached items * some attached children * some orphans """ date_proxy = Date() metadata = { 'start': date_proxy.to_json(datetime.datetime(2000, 3, 13, 4)), 'display_name': 'Migration test course', } data = {'wiki_slug': 'test_course_slug'} fields = metadata.copy() fields.update(data) # split requires the course to be created separately from creating items self.split_mongo.create_course(self.split_package_id, 'test_org', self.userid, fields=fields, root_block_id='runid') self.course_location = Location('i4x', 'test_org', 'test_course', 'course', 'runid') self.old_mongo.create_and_save_xmodule(self.course_location, data, metadata) runtime = self.old_mongo.get_item(self.course_location).runtime self._create_item('chapter', 'Chapter1', {}, {'display_name': 'Chapter 1'}, 'course', 'runid', runtime) self._create_item('chapter', 'Chapter2', {}, {'display_name': 'Chapter 2'}, 'course', 'runid', runtime) self._create_item('chapter', 'OrphanChapter', {}, {'display_name': 'Orphan Chapter'}, None, None, runtime) self._create_item('vertical', 'Vert1', {}, {'display_name': 'Vertical 1'}, 'chapter', 'Chapter1', runtime) self._create_item('vertical', 'OrphanVert', {}, {'display_name': 'Orphan Vertical'}, None, None, runtime) self._create_item('html', 'Html1', "<p>Goodbye</p>", {'display_name': 'Parented Html'}, 'vertical', 'Vert1', runtime) self._create_item('html', 'OrphanHtml', "<p>Hello</p>", {'display_name': 'Orphan html'}, None, None, runtime) self._create_item('static_tab', 'staticuno', "<p>tab</p>", {'display_name': 'Tab uno'}, None, None, runtime) self._create_item('about', 'overview', "<p>overview</p>", {}, None, None, runtime) self._create_item('course_info', 'updates', "<ol><li><h2>Sep 22</h2><p>test</p></li></ol>", {}, None, None, runtime) def test_mongo_orphan(self): """ Test that old mongo finds the orphans """ orphans = self.old_mongo.get_orphans(self.course_location, None) self.assertEqual(len(orphans), 3, "Wrong # {}".format(orphans)) location = self.course_location.replace(category='chapter', name='OrphanChapter') self.assertIn(location.url(), orphans) location = self.course_location.replace(category='vertical', name='OrphanVert') self.assertIn(location.url(), orphans) location = self.course_location.replace(category='html', name='OrphanHtml') self.assertIn(location.url(), orphans) def test_split_orphan(self): """ Test that old mongo finds the orphans """ orphans = self.split_mongo.get_orphans(self.split_package_id, 'draft') self.assertEqual(len(orphans), 3, "Wrong # {}".format(orphans)) location = BlockUsageLocator(package_id=self.split_package_id, branch='draft', block_id='OrphanChapter') self.assertIn(location, orphans) location = BlockUsageLocator(package_id=self.split_package_id, branch='draft', block_id='OrphanVert') self.assertIn(location, orphans) location = BlockUsageLocator(package_id=self.split_package_id, branch='draft', block_id='OrphanHtml') self.assertIn(location, orphans)
def compute_metadata_inheritance_tree(self, location): ''' TODO (cdodge) This method can be deleted when the 'split module store' work has been completed ''' # get all collections in the course, this query should not return any leaf nodes # note this is a bit ugly as when we add new categories of containers, we have to add it here block_types_with_children = set(name for name, class_ in XBlock.load_classes() if getattr(class_, 'has_children', False)) query = {'_id.org': location.org, '_id.course': location.course, '_id.category': {'$in': list(block_types_with_children)} } # we just want the Location, children, and inheritable metadata record_filter = {'_id': 1, 'definition.children': 1} # just get the inheritable metadata since that is all we need for the computation # this minimizes both data pushed over the wire for field_name in InheritanceMixin.fields: record_filter['metadata.{0}'.format(field_name)] = 1 # call out to the DB resultset = self.collection.find(query, record_filter) results_by_url = {} root = None # now go through the results and order them by the location url for result in resultset: location = Location(result['_id']) # We need to collate between draft and non-draft # i.e. draft verticals will have draft children but will have non-draft parents currently location = location.replace(revision=None) location_url = location.url() if location_url in results_by_url: existing_children = results_by_url[location_url].get('definition', {}).get('children', []) additional_children = result.get('definition', {}).get('children', []) total_children = existing_children + additional_children results_by_url[location_url].setdefault('definition', {})['children'] = total_children results_by_url[location.url()] = result if location.category == 'course': root = location.url() # now traverse the tree and compute down the inherited metadata metadata_to_inherit = {} def _compute_inherited_metadata(url): """ Helper method for computing inherited metadata for a specific location url """ my_metadata = results_by_url[url].get('metadata', {}) # go through all the children and recurse, but only if we have # in the result set. Remember results will not contain leaf nodes for child in results_by_url[url].get('definition', {}).get('children', []): if child in results_by_url: new_child_metadata = copy.deepcopy(my_metadata) new_child_metadata.update(results_by_url[child].get('metadata', {})) results_by_url[child]['metadata'] = new_child_metadata metadata_to_inherit[child] = new_child_metadata _compute_inherited_metadata(child) else: # this is likely a leaf node, so let's record what metadata we need to inherit metadata_to_inherit[child] = my_metadata if root is not None: _compute_inherited_metadata(root) return metadata_to_inherit
def create_new_course(request): """ Create a new course. Returns the URL for the course overview page. """ if not is_user_in_creator_group(request.user): raise PermissionDenied() org = request.json.get('org') number = request.json.get('number') display_name = request.json.get('display_name') run = request.json.get('run') try: dest_location = Location('i4x', org, number, 'course', run) except InvalidLocationError as error: return JsonResponse({ "ErrMsg": _("Unable to create course '{name}'.\n\n{err}").format( name=display_name, err=error.message)}) # see if the course already exists existing_course = None try: existing_course = modulestore('direct').get_item(dest_location) except ItemNotFoundError: pass if existing_course is not None: return JsonResponse({ 'ErrMsg': _( 'There is already a course defined with the same ' 'organization, course number, and course run. Please ' 'change either organization or course number to be ' 'unique.' ), 'OrgErrMsg': _( 'Please change either the organization or ' 'course number so that it is unique.' ), 'CourseErrMsg': _( 'Please change either the organization or ' 'course number so that it is unique.' ), }) # dhm: this query breaks the abstraction, but I'll fix it when I do my suspended refactoring of this # file for new locators. get_items should accept a query rather than requiring it be a legal location course_search_location = bson.son.SON({ '_id.tag': 'i4x', # cannot pass regex to Location constructor; thus this hack '_id.org': re.compile('^{}$'.format(dest_location.org), re.IGNORECASE), '_id.course': re.compile('^{}$'.format(dest_location.course), re.IGNORECASE), '_id.category': 'course', }) courses = modulestore().collection.find(course_search_location, fields=('_id')) if courses.count() > 0: return JsonResponse({ 'ErrMsg': _( 'There is already a course defined with the same ' 'organization and course number. Please ' 'change at least one field to be unique.'), 'OrgErrMsg': _( 'Please change either the organization or ' 'course number so that it is unique.'), 'CourseErrMsg': _( 'Please change either the organization or ' 'course number so that it is unique.'), }) # instantiate the CourseDescriptor and then persist it # note: no system to pass if display_name is None: metadata = {} else: metadata = {'display_name': display_name} modulestore('direct').create_and_save_xmodule( dest_location, metadata=metadata ) new_course = modulestore('direct').get_item(dest_location) # clone a default 'about' overview module as well dest_about_location = dest_location.replace( category='about', name='overview' ) overview_template = AboutDescriptor.get_template('overview.yaml') modulestore('direct').create_and_save_xmodule( dest_about_location, system=new_course.system, definition_data=overview_template.get('data') ) initialize_course_tabs(new_course) new_location = loc_mapper().translate_location(new_course.location.course_id, new_course.location, False, True) create_all_course_groups(request.user, new_location) # seed the forums seed_permissions_roles(new_course.location.course_id) # auto-enroll the course creator in the course so that "View Live" will # work. CourseEnrollment.enroll(request.user, new_course.location.course_id) return JsonResponse({'url': new_location.url_reverse("course/", "")})