def _query_children_for_cache_children(self, course_key, items): # first get non-draft in a round-trip to_process_non_drafts = super(DraftModuleStore, self)._query_children_for_cache_children(course_key, items) to_process_dict = {} for non_draft in to_process_non_drafts: to_process_dict[BlockUsageLocator._from_deprecated_son(non_draft["_id"], course_key.run)] = non_draft if self.get_branch_setting() == ModuleStoreEnum.Branch.draft_preferred: # now query all draft content in another round-trip query = [] for item in items: item_usage_key = UsageKey.from_string(item).map_into_course(course_key) if item_usage_key.block_type not in DIRECT_ONLY_CATEGORIES: query.append(as_draft(item_usage_key).to_deprecated_son()) if query: query = {'_id': {'$in': query}} to_process_drafts = list(self.collection.find(query)) # now we have to go through all drafts and replace the non-draft # with the draft. This is because the semantics of the DraftStore is to # always return the draft - if available for draft in to_process_drafts: draft_loc = BlockUsageLocator._from_deprecated_son(draft["_id"], course_key.run) draft_as_non_draft_loc = as_published(draft_loc) # does non-draft exist in the collection # if so, replace it if draft_as_non_draft_loc in to_process_dict: to_process_dict[draft_as_non_draft_loc] = draft # convert the dict - which is used for look ups - back into a list queried_children = to_process_dict.values() return queried_children
def test_map_into_course_location(self, deprecated_source, deprecated_dest): original_course = CourseLocator('org', 'course', 'run', deprecated=deprecated_source) new_course = CourseLocator('edX', 'toy', '2012_Fall', deprecated=deprecated_dest) loc = BlockUsageLocator(original_course, 'cat', 'name:more_name', deprecated=deprecated_source) expected = BlockUsageLocator(new_course, 'cat', 'name:more_name', deprecated=deprecated_dest) actual = loc.map_into_course(new_course) self.assertEqual(expected, actual)
def test_map_into_course_location(self): original_course = CourseKey.from_string('org/course/run') new_course = CourseKey.from_string('edX/toy/2012_Fall') loc = BlockUsageLocator(original_course, 'cat', 'name:more_name', deprecated=True) self.assertEquals( BlockUsageLocator(new_course, 'cat', 'name:more_name', deprecated=True), loc.map_into_course(new_course) )
def test_block_generations(self): """ Test get_block_generations """ test_course = persistent_factories.PersistentCourseFactory.create( offering="history.hist101", org="edu.harvard", display_name="history test course", user_id="testbot" ) chapter = persistent_factories.ItemFactory.create( display_name="chapter 1", parent_location=test_course.location, user_id="testbot" ) sub = persistent_factories.ItemFactory.create( display_name="subsection 1", parent_location=chapter.location, user_id="testbot", category="vertical" ) first_problem = persistent_factories.ItemFactory.create( display_name="problem 1", parent_location=sub.location, user_id="testbot", category="problem", data="<problem></problem>", ) first_problem.max_attempts = 3 first_problem.save() # decache the above into the kvs updated_problem = self.split_store.update_item(first_problem, "**replace_user**") self.assertIsNotNone(updated_problem.previous_version) self.assertEqual(updated_problem.previous_version, first_problem.update_version) self.assertNotEqual(updated_problem.update_version, first_problem.update_version) updated_loc = self.split_store.delete_item(updated_problem.location, "**replace_user**", "testbot") second_problem = persistent_factories.ItemFactory.create( display_name="problem 2", parent_location=BlockUsageLocator.make_relative( updated_loc, block_type="problem", block_id=sub.location.block_id ), user_id="testbot", category="problem", data="<problem></problem>", ) # course root only updated 2x version_history = self.split_store.get_block_generations(test_course.location) self.assertEqual(version_history.locator.version_guid, test_course.location.version_guid) self.assertEqual(len(version_history.children), 1) self.assertEqual(version_history.children[0].children, []) self.assertEqual(version_history.children[0].locator.version_guid, chapter.location.version_guid) # sub changed on add, add problem, delete problem, add problem in strict linear seq version_history = self.split_store.get_block_generations(sub.location) self.assertEqual(len(version_history.children), 1) self.assertEqual(len(version_history.children[0].children), 1) self.assertEqual(len(version_history.children[0].children[0].children), 1) self.assertEqual(len(version_history.children[0].children[0].children[0].children), 0) # first and second problem may show as same usage_id; so, need to ensure their histories are right version_history = self.split_store.get_block_generations(updated_problem.location) self.assertEqual(version_history.locator.version_guid, first_problem.location.version_guid) self.assertEqual(len(version_history.children), 1) # updated max_attempts self.assertEqual(len(version_history.children[0].children), 0) version_history = self.split_store.get_block_generations(second_problem.location) self.assertNotEqual(version_history.locator.version_guid, first_problem.location.version_guid)
def get_block_id_from_string(block_id_string): if not block_id_string: return None try: return BlockUsageLocator.from_string(block_id_string) except InvalidKeyError: # workbench support return block_id_string
def convert_item(item, to_be_deleted): """ Convert the subtree """ # collect the children's ids for future processing next_tier = [] for child in item.get('definition', {}).get('children', []): child_loc = BlockUsageLocator.from_string(child) next_tier.append(child_loc.to_deprecated_son()) # insert a new DRAFT version of the item item['_id']['revision'] = MongoRevisionKey.draft # ensure keys are in fixed and right order before inserting item['_id'] = self._id_dict_to_son(item['_id']) bulk_record = self._get_bulk_ops_record(location.course_key) bulk_record.dirty = True try: self.collection.insert(item) except pymongo.errors.DuplicateKeyError: # prevent re-creation of DRAFT versions, unless explicitly requested to ignore if not ignore_if_draft: raise DuplicateItemError(item['_id'], self, 'collection') # delete the old PUBLISHED version if requested if delete_published: item['_id']['revision'] = MongoRevisionKey.published to_be_deleted.append(item['_id']) return next_tier
def delete_draft_only(root_location): """ Helper function that calls delete on the specified location if a draft version of the item exists. If no draft exists, this function recursively calls itself on the children of the item. """ query = root_location.to_deprecated_son(prefix='_id.') del query['_id.revision'] versions_found = self.collection.find( query, {'_id': True, 'definition.children': True}, sort=[SORT_REVISION_FAVOR_DRAFT] ) # If 2 versions versions exist, we can assume one is a published version. Go ahead and do the delete # of the draft version. if versions_found.count() > 1: # Moving a child from published parent creates a draft of the parent and moved child. published_version = [ version for version in versions_found if version.get('_id').get('revision') != MongoRevisionKey.draft ] if len(published_version) > 0: # This change makes sure that parents are updated too i.e. an item will have only one parent. self.update_parent_if_moved(root_location, published_version[0], delete_draft_only, user_id) self._delete_subtree(root_location, [as_draft], draft_only=True) elif versions_found.count() == 1: # Since this method cannot be called on something in DIRECT_ONLY_CATEGORIES and we call # delete_subtree as soon as we find an item with a draft version, if there is only 1 version # it must be published (since adding a child to a published item creates a draft of the parent). item = versions_found[0] assert item.get('_id').get('revision') != MongoRevisionKey.draft for child in item.get('definition', {}).get('children', []): child_loc = BlockUsageLocator.from_string(child) delete_draft_only(child_loc)
def test_conditional_module_with_empty_sources_list(self): """ If a ConditionalDescriptor is initialized with an empty sources_list, we assert that the sources_list is set via generating UsageKeys from the values in xml_attributes['sources'] """ dummy_system = Mock() dummy_location = BlockUsageLocator(CourseLocator("edX", "conditional_test", "test_run"), "conditional", "SampleConditional") dummy_scope_ids = ScopeIds(None, None, dummy_location, dummy_location) dummy_field_data = DictFieldData({ 'data': '<conditional/>', 'xml_attributes': {'sources': 'i4x://HarvardX/ER22x/poll_question/T15_poll'}, 'children': None, }) conditional = ConditionalDescriptor( dummy_system, dummy_field_data, dummy_scope_ids, ) new_run = conditional.location.course_key.run self.assertEqual( conditional.sources_list[0], # Matching what is in ConditionalDescriptor.__init__. BlockUsageLocator.from_string(conditional.xml_attributes['sources']).replace(run=new_run) )
def _create(cls, target_class, **kwargs): """ Create and return a new course. For performance reasons, we do not emit signals during this process, but if you need signals to run, you can pass `emit_signals=True` to this method. """ # All class attributes (from this class and base classes) are # passed in via **kwargs. However, some of those aren't actual field values, # so pop those off for use separately org = kwargs.pop('org', None) # because the factory provides a default 'number' arg, prefer the non-defaulted 'course' arg if any number = kwargs.pop('course', kwargs.pop('number', None)) store = kwargs.pop('modulestore') name = kwargs.get('name', kwargs.get('run', BlockUsageLocator.clean(kwargs.get('display_name')))) run = kwargs.pop('run', name) user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test) emit_signals = kwargs.pop('emit_signals', False) # Pass the metadata just as field=value pairs kwargs.update(kwargs.pop('metadata', {})) default_store_override = kwargs.pop('default_store', None) with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred): course_key = store.make_course_key(org, number, run) with store.bulk_operations(course_key, emit_signals=emit_signals): if default_store_override is not None: with store.default_store(default_store_override): new_course = store.create_course(org, number, run, user_id, fields=kwargs) else: new_course = store.create_course(org, number, run, user_id, fields=kwargs) last_course.loc = new_course.location return new_course
def _get_raw_parent_locations(self, location, key_revision): """ Get the parents but don't unset the revision in their locations. Intended for internal use but not restricted. Args: location (UsageKey): assumes the location's revision is None; so, uses revision keyword solely key_revision: MongoRevisionKey.draft - return only the draft parent MongoRevisionKey.published - return only the published parent ModuleStoreEnum.RevisionOption.all - return both draft and published parents """ _verify_revision_is_published(location) # create a query to find all items in the course that have the given location listed as a child query = self._course_key_to_son(location.course_key) query['definition.children'] = text_type(location) # find all the items that satisfy the query parents = self.collection.find(query, {'_id': True}, sort=[SORT_REVISION_FAVOR_DRAFT]) # return only the parent(s) that satisfy the request return [ BlockUsageLocator._from_deprecated_son(parent['_id'], location.course_key.run) for parent in parents if ( # return all versions of the parent if revision is ModuleStoreEnum.RevisionOption.all key_revision == ModuleStoreEnum.RevisionOption.all or # return this parent if it's direct-only, regardless of which revision is requested parent['_id']['category'] in DIRECT_ONLY_CATEGORIES or # return this parent only if its revision matches the requested one parent['_id']['revision'] == key_revision ) ]
def get_students_problem_grades(request, csv=False): """ Get a list of students and grades for a particular problem. If 'csv' is False, returns a dict of student's name: username: grade: percent. If 'csv' is True, returns a header array, and an array of arrays in the format: student names, usernames, grades, percents for CSV download. """ module_state_key = BlockUsageLocator.from_string(request.GET.get('module_id')) csv = request.GET.get('csv') # Query for "problem grades" students students = models.StudentModule.objects.select_related('student').filter( module_state_key=module_state_key, module_type__exact='problem', grade__isnull=False, ).values('student__username', 'student__profile__name', 'grade', 'max_grade').order_by('student__profile__name') results = [] if not csv: # Restrict screen list length # Adding 1 so can tell if list is larger than MAX_SCREEN_LIST_LENGTH # without doing another select. for student in students[0:MAX_SCREEN_LIST_LENGTH + 1]: student_dict = { 'name': student['student__profile__name'], 'username': student['student__username'], 'grade': student['grade'], } student_dict['percent'] = 0 if student['max_grade'] > 0: student_dict['percent'] = round(student['grade'] * 100 / student['max_grade']) results.append(student_dict) max_exceeded = False if len(results) > MAX_SCREEN_LIST_LENGTH: # Remove the last item so list length is exactly MAX_SCREEN_LIST_LENGTH del results[-1] max_exceeded = True response_payload = { 'results': results, 'max_exceeded': max_exceeded, } return JsonResponse(response_payload) else: tooltip = request.GET.get('tooltip') filename = sanitize_filename(tooltip[:tooltip.rfind(' - ')]) header = [_("Name"), _("Username"), _("Grade"), _("Percent")] for student in students: percent = 0 if student['max_grade'] > 0: percent = round(student['grade'] * 100 / student['max_grade']) results.append([student['student__profile__name'], student['student__username'], student['grade'], percent]) response = create_csv_response(filename, header, results) return response
def test_block_generations(self): """ Test get_block_generations """ test_course = persistent_factories.PersistentCourseFactory.create( course='history', run='hist101', org='edu.harvard', display_name='history test course', user_id='testbot' ) chapter = persistent_factories.ItemFactory.create(display_name='chapter 1', parent_location=test_course.location, user_id='testbot') sub = persistent_factories.ItemFactory.create(display_name='subsection 1', parent_location=chapter.location, user_id='testbot', category='vertical') first_problem = persistent_factories.ItemFactory.create( display_name='problem 1', parent_location=sub.location, user_id='testbot', category='problem', data="<problem></problem>" ) first_problem.max_attempts = 3 first_problem.save() # decache the above into the kvs updated_problem = self.split_store.update_item(first_problem, 'testbot') self.assertIsNotNone(updated_problem.previous_version) self.assertEqual(updated_problem.previous_version, first_problem.update_version) self.assertNotEqual(updated_problem.update_version, first_problem.update_version) self.split_store.delete_item(updated_problem.location, 'testbot') second_problem = persistent_factories.ItemFactory.create( display_name='problem 2', parent_location=BlockUsageLocator.make_relative( test_course.location.version_agnostic(), block_type='problem', block_id=sub.location.block_id ), user_id='testbot', category='problem', data="<problem></problem>" ) # course root only updated 2x version_history = self.split_store.get_block_generations(test_course.location) # create course causes 2 versions for the time being; skip the first. version_history = version_history.children[0] self.assertEqual(version_history.locator.version_guid, test_course.location.version_guid) self.assertEqual(len(version_history.children), 1) self.assertEqual(version_history.children[0].children, []) self.assertEqual(version_history.children[0].locator.version_guid, chapter.location.version_guid) # sub changed on add, add problem, delete problem, add problem in strict linear seq version_history = self.split_store.get_block_generations(sub.location) self.assertEqual(len(version_history.children), 1) self.assertEqual(len(version_history.children[0].children), 1) self.assertEqual(len(version_history.children[0].children[0].children), 1) self.assertEqual(len(version_history.children[0].children[0].children[0].children), 0) # first and second problem may show as same usage_id; so, need to ensure their histories are right version_history = self.split_store.get_block_generations(updated_problem.location) self.assertEqual(version_history.locator.version_guid, first_problem.location.version_guid) self.assertEqual(len(version_history.children), 1) # updated max_attempts self.assertEqual(len(version_history.children[0].children), 0) version_history = self.split_store.get_block_generations(second_problem.location) self.assertNotEqual(version_history.locator.version_guid, first_problem.location.version_guid)
def test_relative(self): """ Test making a relative usage locator. """ org = 'mit.eecs' offering = '1' branch = 'foo' baseobj = CourseLocator(org=org, offering=offering, branch=branch) block_id = 'problem:with-colon~2' testobj = BlockUsageLocator.make_relative(baseobj, 'problem', block_id) self.check_block_locn_fields( testobj, org=org, offering=offering, branch=branch, block=block_id ) block_id = 'completely_different' testobj = BlockUsageLocator.make_relative(testobj, 'problem', block_id) self.check_block_locn_fields( testobj, org=org, offering=offering, branch=branch, block=block_id )
def test_relative(self): """ Test making a relative usage locator. """ org = 'mit.eecs' course = 'ponypower' run = "2014_T2" branch = 'foo' baseobj = CourseLocator(org=org, course=course, run=run, branch=branch) block_id = 'problem:with-colon~2' testobj = BlockUsageLocator.make_relative(baseobj, 'problem', block_id) self.check_block_locn_fields( testobj, org=org, course=course, run=run, branch=branch, block=block_id ) block_id = 'completely_different' testobj = BlockUsageLocator.make_relative(testobj, 'problem', block_id) self.check_block_locn_fields( testobj, org=org, course=course, run=run, branch=branch, block=block_id )
def _get_parent_content_id(html_content_id): """ Gets parent block content id """ try: html_usage_id = BlockUsageLocator.from_string(html_content_id) html_module = modulestore().get_item(html_usage_id) return unicode(html_module.parent) except (InvalidKeyError, ItemNotFoundError) as exception: # something has gone wrong - the best we can do is to return original content id log.warn("Error getting parent content_id for html module: %s", exception.message) return html_content_id
def from_json(cls, blockrecord_json): """ Return a BlockRecordSet from a json list. """ block_dicts = json.loads(blockrecord_json) record_generator = ( BlockRecord( locator=BlockUsageLocator.from_string(block["locator"]), weight=block["weight"], max_score=block["max_score"], ) for block in block_dicts ) return cls(record_generator)
def get_students_opened_subsection(request, csv=False): """ Get a list of students that opened a particular subsection. If 'csv' is False, returns a dict of student's name: username. If 'csv' is True, returns a header array, and an array of arrays in the format: student names, usernames for CSV download. """ module_state_key = BlockUsageLocator.from_string(request.GET.get('module_id')) csv = request.GET.get('csv') # Query for "opened a subsection" students students = models.StudentModule.objects.select_related('student').filter( module_state_key__exact=module_state_key, module_type__exact='sequential', ).values('student__username', 'student__profile__name').order_by('student__profile__name') results = [] if not csv: # Restrict screen list length # Adding 1 so can tell if list is larger than MAX_SCREEN_LIST_LENGTH # without doing another select. for student in students[0:MAX_SCREEN_LIST_LENGTH + 1]: results.append({ 'name': student['student__profile__name'], 'username': student['student__username'], }) max_exceeded = False if len(results) > MAX_SCREEN_LIST_LENGTH: # Remove the last item so list length is exactly MAX_SCREEN_LIST_LENGTH del results[-1] max_exceeded = True response_payload = { 'results': results, 'max_exceeded': max_exceeded, } return JsonResponse(response_payload) else: tooltip = request.GET.get('tooltip') # Subsection name is everything after 3rd space in tooltip filename = sanitize_filename(' '.join(tooltip.split(' ')[3:])) header = [_("Name"), _("Username")] for student in students: results.append([student['student__profile__name'], student['student__username']]) response = create_csv_response(filename, header, results) return response
def problems_with_psychometric_data(course_id): ''' Return dict of {problems (location urls): count} for which psychometric data is available. Does this for a given course_id. ''' pmdset = PsychometricData.objects.using(db).filter(studentmodule__course_id=course_id) plist = [p['studentmodule__module_state_key'] for p in pmdset.values('studentmodule__module_state_key').distinct()] problems = dict( ( p, pmdset.filter( studentmodule__module_state_key=BlockUsageLocator.from_string(p) ).count() ) for p in plist ) return problems
def __init__(self, *args, **kwargs): """ Create an instance of the conditional module. """ super(ConditionalDescriptor, self).__init__(*args, **kwargs) # Convert sources xml_attribute to a ReferenceList field type so Location/Locator # substitution can be done. if not self.sources_list: if 'sources' in self.xml_attributes and isinstance(self.xml_attributes['sources'], basestring): self.sources_list = [ # TODO: it is not clear why we are replacing the run here (which actually is a no-op # for old-style course locators. However, this is the implementation of # CourseLocator.make_usage_key_from_deprecated_string, which was previously # being called in this location. BlockUsageLocator.from_string(item).replace(run=self.location.course_key.run) for item in ConditionalDescriptor.parse_sources(self.xml_attributes) ]
def update_parent_if_moved(self, original_parent_location, published_version, delete_draft_only, user_id): """ Update parent of an item if it has moved. Arguments: original_parent_location (BlockUsageLocator) : Original parent block locator. published_version (dict) : Published version of the block. delete_draft_only (function) : A callback function to delete draft children if it was moved. user_id (int) : User id """ for child_location in published_version.get('definition', {}).get('children', []): item_location = UsageKey.from_string(child_location).map_into_course(original_parent_location.course_key) try: source_item = self.get_item(item_location) except ItemNotFoundError: log.error('Unable to find the item %s', unicode(item_location)) return if source_item.parent and source_item.parent.block_id != original_parent_location.block_id: if self.update_item_parent(item_location, original_parent_location, source_item.parent, user_id): delete_draft_only(BlockUsageLocator.from_string(child_location))
def read_unit(course_dir, conn, course_key): units_dir = course_dir / "video" xml_file_paths = [ file_path for file_path in sorted(units_dir.glob("*.xml")) ] cursor = conn.cursor() cursor.execute("BEGIN") for file_path in xml_file_paths: # print(file_path) vertical_el = ET.parse(file_path).getroot().attrib.get('display_name') title = vertical_el.attrib.get('display_name') for child_el in vertical_el: if child_el.attrib.keys() == {'url_name'}: pass with open(file_path, 'rb') as olx_file: olx_content = olx_file.read() cursor.execute( """ insert into openedx_content_item (natural_key, type, title, definition) values (?, ?, ?, ?); """, ( # f"video+block@{file_path.stem}", file_path.stem, "xblock/video", title, olx_content)) insert_course_item( cursor, conn.last_insert_rowid(), BlockUsageLocator(course_key, 'video', file_path.stem), ) cursor.execute("COMMIT")
def zip_student_submissions(course_id, block_id, locator_unicode, username): """ Task to download all submissions as zip file Args: course_id (unicode): edx course id block_id (unicode): edx block id locator_unicode (unicode): Unicode representing a BlockUsageLocator for the sga module username (unicode): user name of the staff user requesting the zip file """ locator = BlockUsageLocator.from_string(locator_unicode) zip_file_path = get_zip_file_path(username, course_id, block_id, locator) log.info("Creating zip file for course: %s at path: %s", locator, zip_file_path) if default_storage.exists(zip_file_path): log.info("Deleting already-existing zip file at path: %s", zip_file_path) default_storage.delete(zip_file_path) _compress_student_submissions( zip_file_path, block_id, course_id, locator )
def setup_course(self): # Set up a mock course course_id_string = 'course-v1:org+course+run1' ck = CourseKey.from_string(course_id_string) bul = BlockUsageLocator(ck, u'course', u'course') course = Mock() course.id = ck course.system = Mock() course.scope_ids = Mock() course.scope_id.user_id = None course.scope_ids.block_type = u'course' course.scope_ids.def_id = bul course.scope_ids.usage_id = bul course.location = bul course.display_name = u'Course - Run 1' self.course_id_string = course_id_string self.ck = ck self.course = course email_params = { 'registration_url': u'https://localhost:8000/register', # noqa: E501 'course_about_url': u'https://localhost:8000/courses/course-v1:org+course+run1/about', # noqa: E501 'site_name': 'localhost:8000', 'course': self.course, 'is_shib_course': None, 'display_name': u'Course - Run 1', 'auto_enroll': True, 'course_url': u'https://localhost:8000/courses/course-v1:org+course+run1/' } # noqa: E501 self.email_params = email_params
def read_html(course_dir, conn, course_key): html_dir = course_dir / "html" xml_file_paths = [ file_path for file_path in sorted(html_dir.glob("*.xml")) ] cursor = conn.cursor() cursor.execute("BEGIN") for file_path in xml_file_paths: # print(file_path) # Should we leave missing things as empty strings or null? title = ET.parse(file_path).getroot().attrib.get('display_name') html_file_path = file_path.with_suffix('.html') with open(html_file_path, 'rb') as html_file: html_content = html_file.read() cursor.execute( """ insert into openedx_content_item (natural_key, type, title, definition) values (?, ?, ?, ?); """, ( # f"html+block@{file_path.stem}", file_path.stem, "xblock/html", title, html_content)) insert_course_item( cursor, conn.last_insert_rowid(), BlockUsageLocator(course_key, 'html', file_path.stem), ) #conn.last_insert_rowid() cursor.execute("COMMIT")
def delete_draft_only(root_location): """ Helper function that calls delete on the specified location if a draft version of the item exists. If no draft exists, this function recursively calls itself on the children of the item. """ query = root_location.to_deprecated_son(prefix='_id.') del query['_id.revision'] versions_found = self.collection.find( query, { '_id': True, 'definition.children': True }, sort=[SORT_REVISION_FAVOR_DRAFT]) versions_found = list(versions_found) # If 2 versions versions exist, we can assume one is a published version. Go ahead and do the delete # of the draft version. if len(versions_found) > 1: # Moving a child from published parent creates a draft of the parent and moved child. published_version = [ version for version in versions_found if version.get( '_id').get('revision') != MongoRevisionKey.draft ] if len(published_version) > 0: # This change makes sure that parents are updated too i.e. an item will have only one parent. self.update_parent_if_moved(root_location, published_version[0], delete_draft_only, user_id) self._delete_subtree(root_location, [as_draft], draft_only=True) elif len(versions_found) == 1: # Since this method cannot be called on something in DIRECT_ONLY_CATEGORIES and we call # delete_subtree as soon as we find an item with a draft version, if there is only 1 version # it must be published (since adding a child to a published item creates a draft of the parent). item = versions_found[0] assert item.get('_id').get( 'revision') != MongoRevisionKey.draft for child in item.get('definition', {}).get('children', []): child_loc = BlockUsageLocator.from_string(child) delete_draft_only(child_loc)
def setUp(self): super(TabsEditingDescriptorTestCase, self).setUp() system = get_test_descriptor_system() system.render_template = Mock( return_value="<div>Test Template HTML</div>") self.tabs = [{ 'name': "Test_css", 'template': "tabs/codemirror-edit.html", 'current': True, 'css': { 'scss': [ resource_string( __name__, '../../test_files/test_tabseditingdescriptor.scss') ], 'css': [ resource_string( __name__, '../../test_files/test_tabseditingdescriptor.css') ] } }, { 'name': "Subtitles", 'template': "video/subtitles.html", }, { 'name': "Settings", 'template': "tabs/video-metadata-edit-tab.html" }] TabsEditingDescriptor.tabs = self.tabs self.descriptor = system.construct_xblock_from_class( TabsEditingDescriptor, scope_ids=ScopeIds( None, None, None, BlockUsageLocator( CourseLocator('org', 'course', 'run', branch='revision'), 'category', 'name')), field_data=DictFieldData({}), )
def setUp(self): super(BaseOutcomeTest, self).setUp() self.course_key = CourseLocator( org='some_org', course='some_course', run='some_run' ) self.usage_key = BlockUsageLocator( course_key=self.course_key, block_type='problem', block_id='block_id' ) self.user = UserFactory.create() self.consumer = LtiConsumer( consumer_name='Lti Consumer Name', consumer_key='consumer_key', consumer_secret='consumer_secret', instance_guid='tool_instance_guid' ) self.consumer.save() outcome = OutcomeService( lis_outcome_service_url='http://example.com/service_url', lti_consumer=self.consumer ) outcome.save() self.assignment = GradedAssignment( user=self.user, course_key=self.course_key, usage_key=self.usage_key, outcome_service=outcome, lis_result_sourcedid='sourcedid', version_number=1, ) self.assignment.save() self.send_score_update_mock = self.setup_patch( 'lti_provider.outcomes.send_score_update', None )
def read_video(course_dir, conn, course_key): """ Problems have policies unlike most things -– the retry stuff only really applies here... """ blocks_dir = course_dir / "video" xml_file_paths = [ file_path for file_path in sorted(blocks_dir.glob("*.xml")) ] cursor = conn.cursor() cursor.execute("BEGIN") for file_path in xml_file_paths: # print(file_path) title = ET.parse(file_path).getroot().attrib.get('display_name') with open(file_path, 'rb') as olx_file: olx_content = olx_file.read() cursor.execute( """ insert into openedx_content_item (natural_key, type, title, definition) values (?, ?, ?, ?); """, ( # f"video+block@{file_path.stem}", file_path.stem, "xblock/video", title, olx_content)) insert_course_item( cursor, conn.last_insert_rowid(), BlockUsageLocator(course_key, 'video', file_path.stem), ) cursor.execute("COMMIT")
def test_remap_namespace_native_xblock(self): # Set the XBlock's location self.xblock.location = BlockUsageLocator(CourseLocator("org", "import", "run"), "category", "stubxblock") # Explicitly set the content and settings fields self.xblock.test_content_field = "Explicitly set" self.xblock.test_settings_field = "Explicitly set" self.xblock.save() # Move to different runtime w/ different course id target_location_namespace = CourseKey.from_string("org/course/run") new_version = _update_and_import_module( self.xblock, modulestore(), 999, self.xblock.location.course_key, target_location_namespace, do_import_static=False ) # Check the XBlock's location self.assertEqual(new_version.location.course_key, target_location_namespace) # Check the values of the fields. # The content and settings fields should be preserved self.assertEqual(new_version.test_content_field, 'Explicitly set') self.assertEqual(new_version.test_settings_field, 'Explicitly set') # Expect that these fields are marked explicitly set self.assertIn( 'test_content_field', new_version.get_explicitly_set_fields_by_scope(scope=Scope.content) ) self.assertIn( 'test_settings_field', new_version.get_explicitly_set_fields_by_scope(scope=Scope.settings) )
def test_conditional_module_parse_sources(self): dummy_system = Mock() dummy_location = BlockUsageLocator( CourseLocator("edX", "conditional_test", "test_run"), "conditional", "SampleConditional") dummy_scope_ids = ScopeIds(None, None, dummy_location, dummy_location) dummy_field_data = DictFieldData({ 'data': '<conditional/>', 'xml_attributes': { 'sources': 'i4x://HarvardX/ER22x/poll_question/T15_poll;i4x://HarvardX/ER22x/poll_question/T16_poll' }, # lint-amnesty, pylint: disable=line-too-long 'children': None, }) conditional = ConditionalBlock( dummy_system, dummy_field_data, dummy_scope_ids, ) assert conditional.parse_sources(conditional.xml_attributes) == [ 'i4x://HarvardX/ER22x/poll_question/T15_poll', 'i4x://HarvardX/ER22x/poll_question/T16_poll' ]
def test_update_locations_native_xblock(self): """ Update locations updates location and keeps values and "is_set_on" status """ # Set the XBlock's location self.xblock.location = BlockUsageLocator(CourseLocator("org", "import", "run"), "category", "stubxblock") # Explicitly set the content, settings and children fields self.xblock.test_content_field = 'Explicitly set' self.xblock.test_settings_field = 'Explicitly set' self.xblock.test_mutable_content_field = [1, 2, 3] self.xblock.test_mutable_settings_field = ["a", "s", "d"] self.xblock.children = self.fake_children_locations # pylint:disable=attribute-defined-outside-init self.xblock.save() # Update location target_location = self.xblock.location.replace(revision='draft') _update_module_location(self.xblock, target_location) new_version = self.xblock # _update_module_location updates in-place # Check the XBlock's location self.assertEqual(new_version.location, target_location) # Check the values of the fields. # The content, settings and children fields should be preserved self.assertEqual(new_version.test_content_field, 'Explicitly set') self.assertEqual(new_version.test_settings_field, 'Explicitly set') self.assertEqual(new_version.test_mutable_content_field, [1, 2, 3]) self.assertEqual(new_version.test_mutable_settings_field, ["a", "s", "d"]) self.assertEqual(new_version.children, self.fake_children_locations) # Expect that these fields are marked explicitly set self._check_explicitly_set(new_version, Scope.content, self.CONTENT_FIELDS, should_be_set=True) self._check_explicitly_set(new_version, Scope.settings, self.SETTINGS_FIELDS, should_be_set=True) self._check_explicitly_set(new_version, Scope.children, self.CHILDREN_FIELDS, should_be_set=True) # Expect these fields pass "is_set_on" test for field in self.CONTENT_FIELDS + self.SETTINGS_FIELDS + self.CHILDREN_FIELDS: self.assertTrue(new_version.fields[field].is_set_on(new_version))
def get_course_required_block_ids(self, required_block_ids): """ Filters the required_block_ids list, and returns only the required block ids that belong to the same course key. If the self.matching_blocks_by_type is set, it returns a mix with required_block_ids which exists in self.matching_blocks_by_type too. If required_block_ids is not provided, it returns just the self.matching_blocks_by_type list. Args: required_block_ids: List of the block location ids. Returns: required_course_block_ids: List containing only the BlockUsageLocator items. """ matching_blocks_by_type = self.matching_blocks_by_type if not required_block_ids: return matching_blocks_by_type required_course_block_ids = [] for required_block_id in required_block_ids: try: block_locator = BlockUsageLocator.from_string( required_block_id) if not matching_blocks_by_type and block_locator.course_key == self.course_key: required_course_block_ids.append(block_locator) continue if block_locator in matching_blocks_by_type: required_course_block_ids.append(block_locator) except InvalidKeyError: continue return required_course_block_ids
def _calculate_score_for_modules(user_id, course, modules): """ Calculates the cumulative score (percent) of the given modules """ # removing branch and version from exam modules locator # otherwise student module would not return scores since module usage keys would not match modules = [m for m in modules] locations = [ BlockUsageLocator(course_key=course.id, block_type=module.location.block_type, block_id=module.location.block_id) if isinstance(module.location, BlockUsageLocator) and module.location.version else module.location for module in modules ] scores_client = ScoresClient(course.id, user_id) scores_client.fetch_scores(locations) # Iterate over all of the exam modules to get score percentage of user for each of them module_percentages = [] ignore_categories = [ 'course', 'chapter', 'sequential', 'vertical', 'randomize', 'library_content' ] for index, module in enumerate(modules): if module.category not in ignore_categories and (module.graded or module.has_score): module_score = scores_client.get(locations[index]) if module_score: correct = module_score.correct or 0 total = module_score.total or 1 module_percentages.append(correct / total) return sum(module_percentages) / float( len(module_percentages)) if module_percentages else 0
def recurse_mark_complete(course_block_completions, latest_completion, block): """ Helper function to walk course tree dict, marking blocks as 'complete' and 'last_complete' If all blocks are complete, mark parent block complete mark parent blocks of 'last_complete' as 'last_complete' :param course_block_completions: dict[course_completion_object] = completion_value :param latest_completion: course_completion_object :param block: course_outline_root_block block object or child block :return: block: course_outline_root_block block object or child block """ locatable_block_string = BlockUsageLocator.from_string(block['id']) if course_block_completions.get(locatable_block_string): block['complete'] = True if locatable_block_string == latest_completion.block_key: block['resume_block'] = True if block.get('children'): for idx in range(len(block['children'])): recurse_mark_complete(course_block_completions, latest_completion, block=block['children'][idx]) if block['children'][idx]['resume_block'] is True: block['resume_block'] = True if len([ child['complete'] for child in block['children'] if child['complete'] ]) == len(block['children']): block['complete'] = True
def test_conditional_module_parse_sources(self): dummy_system = Mock() dummy_location = BlockUsageLocator( CourseLocator("edX", "conditional_test", "test_run"), "conditional", "SampleConditional") dummy_scope_ids = ScopeIds(None, None, dummy_location, dummy_location) dummy_field_data = DictFieldData({ 'data': '<conditional/>', 'xml_attributes': { 'sources': 'i4x://HarvardX/ER22x/poll_question/T15_poll;i4x://HarvardX/ER22x/poll_question/T16_poll' }, 'children': None, }) conditional = ConditionalDescriptor( dummy_system, dummy_field_data, dummy_scope_ids, ) self.assertEqual(conditional.parse_sources(conditional.xml_attributes), [ 'i4x://HarvardX/ER22x/poll_question/T15_poll', 'i4x://HarvardX/ER22x/poll_question/T16_poll' ])
def setUp(self): super(SendOutcomeTest, self).setUp() self.course_key = CourseLocator(org='some_org', course='some_course', run='some_run') self.usage_key = BlockUsageLocator(course_key=self.course_key, block_type='problem', block_id='block_id') self.user = UserFactory.create() self.points_possible = 10 self.points_earned = 3 self.generate_xml_mock = self.setup_patch( 'lti_provider.outcomes.generate_replace_result_xml', 'replace result XML') self.replace_result_mock = self.setup_patch( 'lti_provider.outcomes.sign_and_send_replace_result', 'replace result response') self.check_result_mock = self.setup_patch( 'lti_provider.outcomes.check_replace_result_response', True) consumer = LtiConsumer(consumer_name='Lti Consumer Name', consumer_key='consumer_key', consumer_secret='consumer_secret', instance_guid='tool_instance_guid') consumer.save() outcome = OutcomeService( lis_outcome_service_url='http://example.com/service_url', lti_consumer=consumer) outcome.save() self.assignment = GradedAssignment( user=self.user, course_key=self.course_key, usage_key=self.usage_key, outcome_service=outcome, lis_result_sourcedid='sourcedid', ) self.assignment.save()
def test_superclass_make_relative(self): lib_key = LibraryLocator(org="TestX", library="problem-bank-15") obj = BlockUsageLocator.make_relative(lib_key, "block_type", "block_id") self.assertIsInstance(obj, LibraryUsageLocator)
def async_migrate_transcript_subtask(self, *args, **kwargs): # pylint: disable=unused-argument """ Migrates a transcript of a given video in a course as a new celery task. """ success, failure = 'Success', 'Failure' video_location, revision, language_code, force_update = args command_run = kwargs['command_run'] store = modulestore() video = store.get_item(usage_key=BlockUsageLocator.from_string(video_location), revision=revision) edx_video_id = clean_video_id(video.edx_video_id) if not kwargs['commit']: LOGGER.info( ('[%s] [run=%s] [video-transcript-will-be-migrated] ' '[revision=%s] [video=%s] [edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code ) return success LOGGER.info( ('[%s] [run=%s] [transcripts-migration-process-started-for-video-transcript] [revision=%s] ' '[video=%s] [edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code ) try: transcripts_info = video.get_transcripts_info() transcript_content, _, _ = get_transcript_from_contentstore( video=video, language=language_code, output_format=Transcript.SJSON, transcripts_info=transcripts_info, ) is_video_valid = edx_video_id and is_video_available(edx_video_id) if not is_video_valid: edx_video_id = create_external_video('external-video') video.edx_video_id = edx_video_id # determine branch published/draft branch_setting = ( ModuleStoreEnum.Branch.published_only if revision == ModuleStoreEnum.RevisionOption.published_only else ModuleStoreEnum.Branch.draft_preferred ) with store.branch_setting(branch_setting): store.update_item(video, ModuleStoreEnum.UserID.mgmt_command) LOGGER.info( '[%s] [run=%s] [generated-edx-video-id] [revision=%s] [video=%s] [edx_video_id=%s] [language_code=%s]', MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code ) save_transcript_to_storage( command_run=command_run, edx_video_id=edx_video_id, language_code=language_code, transcript_content=transcript_content, file_format=Transcript.SJSON, force_update=force_update, ) except (NotFoundError, TranscriptsGenerationException, ValCannotCreateError): LOGGER.exception( ('[%s] [run=%s] [video-transcript-migration-failed-with-known-exc] [revision=%s] [video=%s] ' '[edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code ) return failure except Exception: LOGGER.exception( ('[%s] [run=%s] [video-transcript-migration-failed-with-unknown-exc] [revision=%s] ' '[video=%s] [edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code ) raise LOGGER.info( ('[%s] [run=%s] [video-transcript-migration-succeeded-for-a-video] [revision=%s] ' '[video=%s] [edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code ) return success
def create(system, source_is_error_module=False, source_visible_to_staff_only=False): """ return a dict of modules: the conditional with a single source and a single child. Keys are 'cond_module', 'source_module', and 'child_module'. if the source_is_error_module flag is set, create a real ErrorModule for the source. """ descriptor_system = get_test_descriptor_system() # construct source descriptor and module: source_location = BlockUsageLocator(CourseLocator("edX", "conditional_test", "test_run", deprecated=True), "problem", "SampleProblem", deprecated=True) if source_is_error_module: # Make an error descriptor and module source_descriptor = NonStaffErrorDescriptor.from_xml( 'some random xml data', system, id_generator=CourseLocationManager(source_location.course_key), error_msg='random error message' ) else: source_descriptor = Mock(name='source_descriptor') source_descriptor.location = source_location source_descriptor.visible_to_staff_only = source_visible_to_staff_only source_descriptor.runtime = descriptor_system source_descriptor.render = lambda view, context=None: descriptor_system.render(source_descriptor, view, context) # construct other descriptors: child_descriptor = Mock(name='child_descriptor') child_descriptor.visible_to_staff_only = False child_descriptor._xmodule.student_view.return_value = Fragment(content=u'<p>This is a secret</p>') child_descriptor.student_view = child_descriptor._xmodule.student_view child_descriptor.displayable_items.return_value = [child_descriptor] child_descriptor.runtime = descriptor_system child_descriptor.xmodule_runtime = get_test_system() child_descriptor.render = lambda view, context=None: descriptor_system.render(child_descriptor, view, context) child_descriptor.location = source_location.replace(category='html', name='child') def visible_to_nonstaff_users(desc): """ Returns if the object is visible to nonstaff users. """ return not desc.visible_to_staff_only def load_item(usage_id, for_parent=None): # pylint: disable=unused-argument """Test-only implementation of load_item that simply returns static xblocks.""" return { child_descriptor.location: child_descriptor, source_location: source_descriptor }.get(usage_id) descriptor_system.load_item = load_item system.descriptor_runtime = descriptor_system # construct conditional module: cond_location = BlockUsageLocator(CourseLocator("edX", "conditional_test", "test_run", deprecated=True), "conditional", "SampleConditional", deprecated=True) field_data = DictFieldData({ 'data': '<conditional/>', 'conditional_attr': 'attempted', 'conditional_value': 'true', 'xml_attributes': {'attempted': 'true'}, 'children': [child_descriptor.location], }) cond_descriptor = ConditionalDescriptor( descriptor_system, field_data, ScopeIds(None, None, cond_location, cond_location) ) cond_descriptor.xmodule_runtime = system system.get_module = lambda desc: desc if visible_to_nonstaff_users(desc) else None cond_descriptor.get_required_module_descriptors = Mock(return_value=[source_descriptor]) cond_descriptor.required_modules = [ system.get_module(descriptor) for descriptor in cond_descriptor.get_required_module_descriptors() ] # return dict: return {'cond_module': cond_descriptor, 'source_module': source_descriptor, 'child_module': child_descriptor}
def test_clean(self, pair): self.assertEquals(BlockUsageLocator.clean(pair[0]), pair[1])
'oauth_timestamp': u'OAuth Timestamp', 'oauth_nonce': u'OAuth Nonce', 'user_id': u'LTI_User', } LTI_OPTIONAL_PARAMS = { 'lis_result_sourcedid': u'result sourcedid', 'lis_outcome_service_url': u'outcome service URL', 'tool_consumer_instance_guid': u'consumer instance guid' } COURSE_KEY = CourseLocator(org='some_org', course='some_course', run='some_run') USAGE_KEY = BlockUsageLocator(course_key=COURSE_KEY, block_type='problem', block_id='block_id') COURSE_PARAMS = {'course_key': COURSE_KEY, 'usage_key': USAGE_KEY} ALL_PARAMS = dict(LTI_DEFAULT_PARAMS.items() + COURSE_PARAMS.items()) def build_launch_request(authenticated=True): """ Helper method to create a new request object for the LTI launch. """ request = RequestFactory().post('/') request.user = UserFactory.create() request.user.is_authenticated = MagicMock(return_value=authenticated) request.session = {}
def load_course(self, course_dir, course_ids, tracker, target_course_id=None): """ Load a course into this module store course_path: Course directory name returns a CourseBlock for the course """ log.info( f'Course import {target_course_id}: Starting courselike import from {course_dir}' ) with open(self.data_dir / course_dir / self.parent_xml) as course_file: course_data = etree.parse(course_file, parser=edx_xml_parser).getroot() org = course_data.get('org') if org is None: msg = ("No 'org' attribute set for courselike in {dir}. " "Using default 'edx'".format(dir=course_dir)) log.warning(msg) tracker(msg) org = 'edx' # Parent XML should be something like 'library.xml' or 'course.xml' courselike_label = self.parent_xml.split('.', maxsplit=1)[0] course = course_data.get(courselike_label) if course is None: msg = ( "No '{courselike_label}' attribute set for course in {dir}." " Using default '{default}'".format( courselike_label=courselike_label, dir=course_dir, default=course_dir)) log.warning(msg) tracker(msg) course = course_dir url_name = course_data.get('url_name', course_data.get('slug')) if url_name: policy_dir = self.data_dir / course_dir / 'policies' / url_name policy_path = policy_dir / 'policy.json' policy = self.load_policy(policy_path, tracker) # VS[compat]: remove once courses use the policy dirs. if policy == {}: old_policy_path = self.data_dir / course_dir / 'policies' / f'{url_name}.json' policy = self.load_policy(old_policy_path, tracker) else: policy = {} # VS[compat] : 'name' is deprecated, but support it for now... if course_data.get('name'): url_name = BlockUsageLocator.clean(course_data.get('name')) tracker("'name' is deprecated for module xml. Please use " "display_name and url_name.") else: url_name = None course_id = self.get_id(org, course, url_name) if course_ids is not None and course_id not in course_ids: return None def get_policy(usage_id): """ Return the policy dictionary to be applied to the specified XBlock usage """ return policy.get(policy_key(usage_id), {}) services = {} if self.i18n_service: services['i18n'] = self.i18n_service if self.fs_service: services['fs'] = self.fs_service if self.user_service: services['user'] = self.user_service system = ImportSystem( xmlstore=self, course_id=course_id, course_dir=course_dir, error_tracker=tracker, load_error_modules=self.load_error_modules, get_policy=get_policy, mixins=self.xblock_mixins, default_class=self.default_class, select=self.xblock_select, field_data=self.field_data, services=services, target_course_id=target_course_id, ) course_descriptor = system.process_xml( etree.tostring(course_data, encoding='unicode')) # If we fail to load the course, then skip the rest of the loading steps if isinstance(course_descriptor, ErrorBlock): return course_descriptor self.content_importers(system, course_descriptor, course_dir, url_name) log.info( f'Course import {target_course_id}: Done with courselike import from {course_dir}' ) return course_descriptor
def test_clean_for_url_name(self, pair): self.assertEqual(BlockUsageLocator.clean_for_url_name(pair[0]), pair[1])
def test_possibly_scored(self): course_key = CourseLocator(u'org', u'course', u'run') for block_type in self.possibly_scored_block_types: usage_key = BlockUsageLocator(course_key, block_type, 'mock_block_id') self.assertTrue(scores.possibly_scored(usage_key))
def xblock_from_json(self, class_, course_key, block_id, json_data, inherited_settings, course_entry_override=None, **kwargs): if course_entry_override is None: course_entry_override = self.course_entry else: # most recent retrieval is most likely the right one for next caller (see comment above fn) self.course_entry['branch'] = course_entry_override['branch'] self.course_entry['org'] = course_entry_override['org'] self.course_entry['course'] = course_entry_override['course'] self.course_entry['run'] = course_entry_override['run'] definition_id = json_data.get('definition') block_type = json_data['category'] if block_id is not None: if inherited_settings is None: # see if there's a value in course_entry if (block_type, block_id) in self.course_entry['inherited_settings']: inherited_settings = self.course_entry[ 'inherited_settings'][(block_type, block_id)] elif (block_type, block_id) not in self.course_entry['inherited_settings']: self.course_entry['inherited_settings'][( block_type, block_id)] = inherited_settings if definition_id is not None and not json_data.get( 'definition_loaded', False): definition_loader = DefinitionLazyLoader( self.modulestore, block_type, definition_id, lambda fields: self.modulestore.convert_references_to_keys( course_key, self.load_block_type(block_type), fields, self.course_entry['structure']['blocks'], )) else: definition_loader = None # If no definition id is provide, generate an in-memory id if definition_id is None: definition_id = LocalId() # If no usage id is provided, generate an in-memory id if block_id is None: block_id = LocalId() block_locator = BlockUsageLocator( course_key, block_type=block_type, block_id=block_id, ) converted_fields = self.modulestore.convert_references_to_keys( block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry['structure']['blocks'], ) kvs = SplitMongoKVS(definition_loader, converted_fields, inherited_settings, **kwargs) field_data = KvsFieldData(kvs) try: module = self.construct_xblock_from_class( class_, ScopeIds(None, block_type, definition_id, block_locator), field_data, ) except Exception: log.warning("Failed to load descriptor", exc_info=True) return ErrorDescriptor.from_json( json_data, self, BlockUsageLocator(CourseLocator( version_guid=course_entry_override['structure']['_id']), block_type='error', block_id=block_id), error_msg=exc_info_to_str(sys.exc_info())) edit_info = json_data.get('edit_info', {}) module._edited_by = edit_info.get('edited_by') module._edited_on = edit_info.get('edited_on') module.previous_version = edit_info.get('previous_version') module.update_version = edit_info.get('update_version') module.source_version = edit_info.get('source_version', None) module.definition_locator = DefinitionLocator(block_type, definition_id) # decache any pending field settings module.save() # If this is an in-memory block, store it in this system if isinstance(block_locator.block_id, LocalId): self.local_modules[block_locator] = module return module
def xblock_from_json(self, class_, block_id, json_data, course_entry_override=None, **kwargs): if course_entry_override is None: course_entry_override = self.course_entry else: # most recent retrieval is most likely the right one for next caller (see comment above fn) self.course_entry['branch'] = course_entry_override['branch'] self.course_entry['org'] = course_entry_override['org'] self.course_entry['course'] = course_entry_override['course'] self.course_entry['run'] = course_entry_override['run'] # most likely a lazy loader or the id directly definition = json_data.get('definition', {}) definition_id = self.modulestore.definition_locator(definition) # If no usage id is provided, generate an in-memory id if block_id is None: block_id = LocalId() block_locator = BlockUsageLocator( CourseLocator( version_guid=course_entry_override['structure']['_id'], org=course_entry_override.get('org'), course=course_entry_override.get('course'), run=course_entry_override.get('run'), branch=course_entry_override.get('branch'), ), block_type=json_data.get('category'), block_id=block_id, ) converted_fields = self.modulestore.convert_references_to_keys( block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry['structure']['blocks'], ) kvs = SplitMongoKVS(definition, converted_fields, json_data.get('_inherited_settings'), **kwargs) field_data = KvsFieldData(kvs) try: module = self.construct_xblock_from_class( class_, ScopeIds(None, json_data.get('category'), definition_id, block_locator), field_data, ) except Exception: log.warning("Failed to load descriptor", exc_info=True) return ErrorDescriptor.from_json( json_data, self, BlockUsageLocator(CourseLocator( version_guid=course_entry_override['structure']['_id']), block_type='error', block_id=block_id), error_msg=exc_info_to_str(sys.exc_info())) edit_info = json_data.get('edit_info', {}) module.edited_by = edit_info.get('edited_by') module.edited_on = edit_info.get('edited_on') module.subtree_edited_by = None # TODO - addressed with LMS-11183 module.subtree_edited_on = None # TODO - addressed with LMS-11183 module.published_by = None # TODO - addressed with LMS-11184 module.published_date = None # TODO - addressed with LMS-11184 module.previous_version = edit_info.get('previous_version') module.update_version = edit_info.get('update_version') module.source_version = edit_info.get('source_version', None) module.definition_locator = definition_id # decache any pending field settings module.save() # If this is an in-memory block, store it in this system if isinstance(block_locator.block_id, LocalId): self.local_modules[block_locator] = module return module
def create(system, source_is_error_module=False, source_visible_to_staff_only=False): """ return a dict of modules: the conditional with a single source and a single child. Keys are 'cond_module', 'source_module', and 'child_module'. if the source_is_error_module flag is set, create a real ErrorModule for the source. """ descriptor_system = get_test_descriptor_system() # construct source descriptor and module: source_location = BlockUsageLocator(CourseLocator("edX", "conditional_test", "test_run", deprecated=True), "problem", "SampleProblem", deprecated=True) if source_is_error_module: # Make an error descriptor and module source_descriptor = NonStaffErrorDescriptor.from_xml( 'some random xml data', system, id_generator=CourseLocationManager(source_location.course_key), error_msg='random error message') else: source_descriptor = Mock(name='source_descriptor') source_descriptor.location = source_location source_descriptor.visible_to_staff_only = source_visible_to_staff_only source_descriptor.runtime = descriptor_system source_descriptor.render = lambda view, context=None: descriptor_system.render( source_descriptor, view, context) # construct other descriptors: child_descriptor = Mock(name='child_descriptor') child_descriptor.visible_to_staff_only = False child_descriptor._xmodule.student_view.return_value = Fragment( content=u'<p>This is a secret</p>') child_descriptor.student_view = child_descriptor._xmodule.student_view child_descriptor.displayable_items.return_value = [child_descriptor] child_descriptor.runtime = descriptor_system child_descriptor.xmodule_runtime = get_test_system() child_descriptor.render = lambda view, context=None: descriptor_system.render( child_descriptor, view, context) child_descriptor.location = source_location.replace(category='html', name='child') def visible_to_nonstaff_users(desc): """ Returns if the object is visible to nonstaff users. """ return not desc.visible_to_staff_only def load_item(usage_id, for_parent=None): # pylint: disable=unused-argument """Test-only implementation of load_item that simply returns static xblocks.""" return { child_descriptor.location: child_descriptor, source_location: source_descriptor }.get(usage_id) descriptor_system.load_item = load_item system.descriptor_runtime = descriptor_system # construct conditional module: cond_location = BlockUsageLocator(CourseLocator("edX", "conditional_test", "test_run", deprecated=True), "conditional", "SampleConditional", deprecated=True) field_data = DictFieldData({ 'data': '<conditional/>', 'conditional_attr': 'attempted', 'conditional_value': 'true', 'xml_attributes': { 'attempted': 'true' }, 'children': [child_descriptor.location], }) cond_descriptor = ConditionalDescriptor( descriptor_system, field_data, ScopeIds(None, None, cond_location, cond_location)) cond_descriptor.xmodule_runtime = system system.get_module = lambda desc: desc if visible_to_nonstaff_users( desc) else None cond_descriptor.get_required_module_descriptors = Mock( return_value=[source_descriptor]) cond_descriptor.required_modules = [ system.get_module(descriptor) for descriptor in cond_descriptor.get_required_module_descriptors() ] # return dict: return { 'cond_module': cond_descriptor, 'source_module': source_descriptor, 'child_module': child_descriptor }
def async_migrate_transcript_subtask(self, *args, **kwargs): # pylint: disable=unused-argument """ Migrates a transcript of a given video in a course as a new celery task. """ success, failure = 'Success', 'Failure' video_location, revision, language_code, force_update = args command_run = kwargs['command_run'] store = modulestore() video = store.get_item( usage_key=BlockUsageLocator.from_string(video_location), revision=revision) edx_video_id = clean_video_id(video.edx_video_id) if not kwargs['commit']: LOGGER.info( ('[%s] [run=%s] [video-transcript-will-be-migrated] ' '[revision=%s] [video=%s] [edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code) return success LOGGER.info(( '[%s] [run=%s] [transcripts-migration-process-started-for-video-transcript] [revision=%s] ' '[video=%s] [edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code) try: transcripts_info = video.get_transcripts_info() transcript_content, _, _ = get_transcript_from_contentstore( video=video, language=language_code, output_format=Transcript.SJSON, transcripts_info=transcripts_info, ) is_video_valid = edx_video_id and is_video_available(edx_video_id) if not is_video_valid: edx_video_id = create_external_video('external-video') video.edx_video_id = edx_video_id # determine branch published/draft branch_setting = (ModuleStoreEnum.Branch.published_only if revision == ModuleStoreEnum.RevisionOption.published_only else ModuleStoreEnum.Branch.draft_preferred) with store.branch_setting(branch_setting): store.update_item(video, ModuleStoreEnum.UserID.mgmt_command) LOGGER.info( '[%s] [run=%s] [generated-edx-video-id] [revision=%s] [video=%s] [edx_video_id=%s] [language_code=%s]', MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code) save_transcript_to_storage( command_run=command_run, edx_video_id=edx_video_id, language_code=language_code, transcript_content=transcript_content, file_format=Transcript.SJSON, force_update=force_update, ) except (NotFoundError, TranscriptsGenerationException, ValCannotCreateError): LOGGER.exception(( '[%s] [run=%s] [video-transcript-migration-failed-with-known-exc] [revision=%s] [video=%s] ' '[edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code) return failure except Exception: LOGGER.exception(( '[%s] [run=%s] [video-transcript-migration-failed-with-unknown-exc] [revision=%s] ' '[video=%s] [edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code) raise LOGGER.info(( '[%s] [run=%s] [video-transcript-migration-succeeded-for-a-video] [revision=%s] ' '[video=%s] [edx_video_id=%s] [language_code=%s]'), MIGRATION_LOGS_PREFIX, command_run, revision, video_location, edx_video_id, language_code) return success
def test_conditional_module(self): """Make sure that conditional module works""" print "Starting import" course = self.get_course('conditional_and_poll') print "Course: ", course print "id: ", course.id def inner_get_module(descriptor): if isinstance(descriptor, BlockUsageLocator): location = descriptor descriptor = self.modulestore.get_item(location, depth=None) descriptor.xmodule_runtime = get_test_system() descriptor.xmodule_runtime.descriptor_runtime = descriptor._runtime # pylint: disable=protected-access descriptor.xmodule_runtime.get_module = inner_get_module return descriptor # edx - HarvardX # cond_test - ER22x location = BlockUsageLocator(CourseLocator("HarvardX", "ER22x", "2013_Spring", deprecated=True), "conditional", "condone", deprecated=True) def replace_urls(text, staticfiles_prefix=None, replace_prefix='/static/', course_namespace=None): return text self.test_system.replace_urls = replace_urls self.test_system.get_module = inner_get_module module = inner_get_module(location) print "module: ", module print "module children: ", module.get_children() print "module display items (children): ", module.get_display_items() html = module.render(STUDENT_VIEW).content print "html type: ", type(html) print "html: ", html html_expect = module.xmodule_runtime.render_template( 'conditional_ajax.html', { # Test ajax url is just usage-id / handler_name 'ajax_url': '{}/xmodule_handler'.format(text_type(location)), 'element_id': u'i4x-HarvardX-ER22x-conditional-condone', 'depends': u'i4x-HarvardX-ER22x-problem-choiceprob' }) self.assertEqual(html, html_expect) gdi = module.get_display_items() print "gdi=", gdi ajax = json.loads(module.handle_ajax('', '')) module.save() print "ajax: ", ajax fragments = ajax['fragments'] self.assertFalse( any(['This is a secret' in item['content'] for item in fragments])) # Now change state of the capa problem to make it completed inner_module = inner_get_module( location.replace(category="problem", name='choiceprob')) inner_module.attempts = 1 # Save our modifications to the underlying KeyValueStore so they can be persisted inner_module.save() ajax = json.loads(module.handle_ajax('', '')) module.save() print "post-attempt ajax: ", ajax fragments = ajax['fragments'] self.assertTrue( any(['This is a secret' in item['content'] for item in fragments]))
import edx.analytics.tasks.util.opaque_key_util as opaque_key_util VALID_COURSE_KEY = CourseLocator(org='org', course='course_id', run='course_run') VALID_COURSE_ID = unicode(VALID_COURSE_KEY) VALID_LEGACY_COURSE_ID = "org/course_id/course_run" INVALID_LEGACY_COURSE_ID = "org:course_id:course_run" INVALID_NONASCII_LEGACY_COURSE_ID = u"org/course\ufffd_id/course_run" VALID_NONASCII_LEGACY_COURSE_ID = u"org/cours\u00e9_id/course_run" VALID_CCX_COURSE_ID = unicode( CCXLocator(org='org', course='course_id', run='course_run', ccx='13')) COURSE_ID_WITH_COLONS = unicode( CourseLocator(org='org', course='course:id', run='course:run')) VALID_BLOCK_ID = BlockUsageLocator(course_key=VALID_COURSE_KEY, block_type='video', block_id='Welcome') @ddt class CourseIdTest(TestCase): """ Verify that course_id filtering works correctly. """ @data( VALID_COURSE_ID, VALID_LEGACY_COURSE_ID, VALID_NONASCII_LEGACY_COURSE_ID, VALID_CCX_COURSE_ID, ) def test_valid_course_id(self, course_id):
def test_clean_for_html(self, pair): self.assertEqual(BlockUsageLocator.clean_for_html(pair[0]), pair[1])
class TestAsideKeys(TestCase): """Test of Aside keys.""" @ddt.data(*itertools.product([ AsideUsageKeyV1, AsideUsageKeyV2, ], [ Location.from_string('i4x://org/course/cat/name'), BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'block_type', 'block_id'), ], ['aside', 'aside_b'])) @ddt.unpack def test_usage_round_trip_deserialized(self, key_class, usage_key, aside_type): key = key_class(usage_key, aside_type) serialized = text_type(key) deserialized = AsideUsageKey.from_string(serialized) self.assertEqual(key, deserialized) self.assertEqual(usage_key, key.usage_key, usage_key) self.assertEqual(usage_key, deserialized.usage_key) self.assertEqual(aside_type, key.aside_type) self.assertEqual(aside_type, deserialized.aside_type) @ddt.data( 'aside-usage-v1:i4x://org/course/cat/name::aside', 'aside-usage-v1:block-v1:org+course+cat+type@block_type+block@name::aside', 'aside-usage-v2:lib-block-v1$:$:+-+branch@-+version@000000000000000000000000+type@-+block@-::0', 'aside-usage-v2:i4x$://-/-/-/$:$:-::0', 'aside-usage-v2:i4x$://-/-/-/$:$:$:-::0', 'aside-usage-v2:i4x$://-/-/$:$:$:$:$:/-::0', ) def test_usage_round_trip_serialized(self, aside_key): deserialized = AsideUsageKey.from_string(aside_key) serialized = text_type(deserialized) self.assertEqual(aside_key, serialized) @ddt.data(*itertools.product([ AsideDefinitionKeyV1, AsideDefinitionKeyV2, ], [ DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234'), ], ['aside', 'aside_b'])) @ddt.unpack def test_definition_round_trip_deserialized(self, key_class, definition_key, aside_type): key = key_class(definition_key, aside_type) serialized = text_type(key) deserialized = AsideDefinitionKey.from_string(serialized) self.assertEqual(key, deserialized) self.assertEqual(definition_key, key.definition_key, definition_key) self.assertEqual(definition_key, deserialized.definition_key) self.assertEqual(aside_type, key.aside_type) self.assertEqual(aside_type, deserialized.aside_type) @ddt.data( 'aside-def-v1:def-v1:abcd1234abcd1234abcd1234+type@block_type::aside', 'aside-def-v2:def-v1$:abcd1234abcd1234abcd1234+type@block_type::aside') def test_definition_round_trip_serialized(self, aside_key): deserialized = AsideDefinitionKey.from_string(aside_key) serialized = text_type(deserialized) self.assertEqual(aside_key, serialized) @ddt.data(*itertools.product( [ AsideUsageKeyV1, AsideUsageKeyV2, ], [ ('aside_type', 'bside'), ('usage_key', BlockUsageLocator(CourseLocator('borg', 'horse', 'gun'), 'lock_type', 'lock_id')), ('block_id', 'lock_id'), ('block_type', 'lock_type'), # BlockUsageLocator can't `replace` a definition_key, so skip for now # ('definition_key', DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234')), ('course_key', CourseLocator('borg', 'horse', 'gun')), ])) @ddt.unpack def test_usage_key_replace(self, key_class, attr_value): attr, value = attr_value key = key_class( BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'block_type', 'block_id'), 'aside') new_key = key.replace(**{attr: value}) self.assertEqual(getattr(new_key, attr), value) @ddt.data(*itertools.product([ AsideDefinitionKeyV1, AsideDefinitionKeyV2, ], [ ('aside_type', 'bside'), ('definition_key', DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234')), ('block_type', 'lock_type'), ])) @ddt.unpack def test_definition_key_replace(self, key_class, attr_value): attr, value = attr_value key = key_class( DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234'), 'aside') new_key = key.replace(**{attr: value}) self.assertEqual(getattr(new_key, attr), value)
def test_html_id(self): course_key = CourseLocator('org', 'course', 'run') locator = BlockUsageLocator(course_key, block_type='cat', block_id='name:more_name') self.assertEqual(locator.html_id(), "name:more_name")
def get_usage_id(self, block_type, block_id): """ Constructs usage id using 'block_type' and 'block_id' """ return BlockUsageLocator(self.dummy_course_key, block_type=block_type, block_id=block_id)
def test_deprecated_html_id(self): course_key = CourseLocator('org', 'course', 'run', version_guid='rev', deprecated=True) locator = BlockUsageLocator(course_key, block_type='cat', block_id='name:more_name', deprecated=True) self.assertEqual(locator.html_id(), "i4x-org-course-cat-name_more_name-rev")
class XBlockCacheModelTest(ModuleStoreTestCase): """ Test the XBlockCache model. """ COURSE_KEY = CourseLocator(org='test', course='test', run='test') CHAPTER1_USAGE_KEY = BlockUsageLocator(COURSE_KEY, block_type='chapter', block_id='chapter1') SECTION1_USAGE_KEY = BlockUsageLocator(COURSE_KEY, block_type='section', block_id='section1') SECTION2_USAGE_KEY = BlockUsageLocator(COURSE_KEY, block_type='section', block_id='section1') VERTICAL1_USAGE_KEY = BlockUsageLocator(COURSE_KEY, block_type='vertical', block_id='sequential1') PATH1 = [ [str(CHAPTER1_USAGE_KEY), 'Chapter 1'], [str(SECTION1_USAGE_KEY), 'Section 1'], ] PATH2 = [ [str(CHAPTER1_USAGE_KEY), 'Chapter 1'], [str(SECTION2_USAGE_KEY), 'Section 2'], ] def assert_xblock_cache_data(self, xblock_cache, data): """ Assert that the XBlockCache object values match. """ assert xblock_cache.usage_key == data['usage_key'] assert xblock_cache.course_key == data['usage_key'].course_key assert xblock_cache.display_name == data['display_name'] assert xblock_cache._paths == data['_paths'] # pylint: disable=protected-access assert xblock_cache.paths == [parse_path_data(path) for path in data['_paths']] @ddt.data( ( [ {'usage_key': VERTICAL1_USAGE_KEY, }, {'display_name': '', '_paths': [], }, ], [ {'usage_key': VERTICAL1_USAGE_KEY, 'display_name': 'Vertical 5', '_paths': [PATH2]}, {'_paths': []}, ], ), ( [ {'usage_key': VERTICAL1_USAGE_KEY, 'display_name': 'Vertical 4', '_paths': [PATH1]}, {}, ], [ {'usage_key': VERTICAL1_USAGE_KEY, 'display_name': 'Vertical 5', '_paths': [PATH2]}, {'_paths': [PATH1]}, ], ), ) def test_create(self, data): """ Test XBlockCache.create() constructs and updates objects correctly. """ for create_data, additional_data_to_expect in data: xblock_cache = XBlockCache.create(create_data) create_data.update(additional_data_to_expect) self.assert_xblock_cache_data(xblock_cache, create_data) @ddt.data( ([], [PATH1]), ([PATH1, PATH2], [PATH1]), ([PATH1], []), ) @ddt.unpack def test_paths(self, original_paths, updated_paths): xblock_cache = XBlockCache.create({ 'usage_key': self.VERTICAL1_USAGE_KEY, 'display_name': 'The end.', '_paths': original_paths, }) assert xblock_cache.paths == [parse_path_data(path) for path in original_paths] xblock_cache.paths = [parse_path_data(path) for path in updated_paths] xblock_cache.save() xblock_cache = XBlockCache.objects.get(id=xblock_cache.id) assert xblock_cache._paths == updated_paths # pylint: disable=protected-access assert xblock_cache.paths == [parse_path_data(path) for path in updated_paths]
def chart_update(request): results = {'success' : False} chart_info_json = dumps(results) if request.method == u'GET': GET = request.GET user_id = GET[u'user_id'] user_id = request.user if user_id == "" else user_id chart = int(GET[u'chart']) course_key = get_course_key(GET[u'course_id']) if chart == VISUALIZATIONS_ID['LA_chapter_time']: cs, st = get_DB_course_spent_time(course_key, student_id=user_id) student_spent_time = chapter_time_to_js(cs, st) chart_info_json = dumps(student_spent_time) elif chart == VISUALIZATIONS_ID['LA_course_accesses']: cs, sa = get_DB_course_section_accesses(course_key, student_id=user_id) student_course_accesses = course_accesses_to_js(cs, sa) chart_info_json = dumps(student_course_accesses) elif chart == VISUALIZATIONS_ID['LA_student_grades']: students_grades = get_DB_student_grades(course_key, student_id=user_id) chart_info_json = dumps(students_grades) elif chart == VISUALIZATIONS_ID['LA_time_schedule']: student_time_schedule = get_DB_time_schedule(course_key, student_id=user_id) chart_info_json = dumps(student_time_schedule) elif chart == VISUALIZATIONS_ID['LA_vid_prob_prog']: student_prob_vid_progress = get_DB_course_video_problem_progress(course_key, student_id=user_id) chart_info_json = dumps(student_prob_vid_progress) elif chart == VISUALIZATIONS_ID['LA_video_progress']: # Video progress visualization. Video percentage seen total and non-overlapped. course = get_course_with_access(user_id, action='load', course_key=course_key, depth=None, check_if_enrolled=False) video_descriptors = videos_problems_in(course)[0] video_durations = get_info_videos_descriptors(video_descriptors)[2] video_names, avg_video_time, video_percentages = get_module_consumption(user_id, course_key, 'video', 'video_progress') if avg_video_time != []: all_video_time_percent = map(truediv, avg_video_time, video_durations) all_video_time_percent = [int(round(x*100,0)) for x in all_video_time_percent] else: all_video_time_percent = avg_video_time column_headers = ['Video', 'Different video time', 'Total video time'] chart_info_json = ready_for_arraytodatatable(column_headers, video_names, video_percentages, all_video_time_percent) elif chart == VISUALIZATIONS_ID['LA_video_time']: # Time spent on every video resource video_names, all_video_time = get_module_consumption(user_id, course_key, 'video', 'total_time_vid_prob')[0:2] column_headers = ['Video', 'Time watched'] chart_info_json = ready_for_arraytodatatable(column_headers, video_names, all_video_time) elif chart == VISUALIZATIONS_ID['LA_problem_time']: # Time spent on every problem resource problem_names, time_x_problem = get_module_consumption(user_id, course_key, 'problem', 'total_time_vid_prob')[0:2] column_headers = ['Problem', 'Time on problem'] chart_info_json = ready_for_arraytodatatable(column_headers, problem_names, time_x_problem) elif chart == VISUALIZATIONS_ID['LA_repetition_video_interval']: # Repetitions per video intervals video_name = GET[u'video'] video_id = BlockUsageLocator._from_string(video_name) video_id = Location.from_deprecated_string(video_id._to_deprecated_string()) chart_info_json = get_user_video_intervals(user_id, video_id) elif chart == VISUALIZATIONS_ID['LA_daily_time']: # Daily time spent on video and/or problem resources video_days, video_daily_time = get_daily_consumption(user_id, course_key, 'video') problem_days, problem_daily_time = get_daily_consumption(user_id, course_key, 'problem') chart_info_json = join_video_problem_time(video_days, video_daily_time, problem_days, problem_daily_time) elif chart == VISUALIZATIONS_ID['LA_video_events']: # Video events dispersion within video length video_name = GET[u'video'] video_id = BlockUsageLocator._from_string(video_name) video_id = Location.from_deprecated_string(video_id._to_deprecated_string()) chart_info_json = get_video_events_info(user_id, video_id) return HttpResponse(chart_info_json, mimetype='application/json')
def make_course_usage_key(self, course_key): """ Return a valid :class:`~opaque_keys.edx.keys.UsageKey` for this modulestore that matches the supplied course_key. """ return BlockUsageLocator(course_key, 'course', course_key.run)
def test_conditional_module(self): """Make sure that conditional module works""" print("Starting import") course = self.get_course('conditional_and_poll') print("Course: ", course) print("id: ", course.id) def inner_get_module(descriptor): if isinstance(descriptor, BlockUsageLocator): location = descriptor descriptor = self.modulestore.get_item(location, depth=None) descriptor.xmodule_runtime = get_test_system() descriptor.xmodule_runtime.descriptor_runtime = descriptor._runtime # pylint: disable=protected-access descriptor.xmodule_runtime.get_module = inner_get_module return descriptor # edx - HarvardX # cond_test - ER22x location = BlockUsageLocator(CourseLocator("HarvardX", "ER22x", "2013_Spring", deprecated=True), "conditional", "condone", deprecated=True) def replace_urls(text, staticfiles_prefix=None, replace_prefix='/static/', course_namespace=None): return text self.test_system.replace_urls = replace_urls self.test_system.get_module = inner_get_module module = inner_get_module(location) print("module: ", module) print("module children: ", module.get_children()) print("module display items (children): ", module.get_display_items()) html = module.render(STUDENT_VIEW).content print("html type: ", type(html)) print("html: ", html) html_expect = module.xmodule_runtime.render_template( 'conditional_ajax.html', { # Test ajax url is just usage-id / handler_name 'ajax_url': '{}/xmodule_handler'.format(text_type(location)), 'element_id': u'i4x-HarvardX-ER22x-conditional-condone', 'depends': u'i4x-HarvardX-ER22x-problem-choiceprob' } ) self.assertEqual(html, html_expect) gdi = module.get_display_items() print("gdi=", gdi) ajax = json.loads(module.handle_ajax('', '')) module.save() print("ajax: ", ajax) fragments = ajax['fragments'] self.assertFalse(any(['This is a secret' in item['content'] for item in fragments])) # Now change state of the capa problem to make it completed inner_module = inner_get_module(location.replace(category="problem", name='choiceprob')) inner_module.attempts = 1 # Save our modifications to the underlying KeyValueStore so they can be persisted inner_module.save() ajax = json.loads(module.handle_ajax('', '')) module.save() print("post-attempt ajax: ", ajax) fragments = ajax['fragments'] self.assertTrue(any(['This is a secret' in item['content'] for item in fragments]))