def _query_children_for_cache_children(self, course_key, items): # first get non-draft in a round-trip to_process_non_drafts = super(DraftModuleStore, self)._query_children_for_cache_children(course_key, items) to_process_dict = {} for non_draft in to_process_non_drafts: to_process_dict[Location._from_deprecated_son(non_draft["_id"], course_key.run)] = non_draft if self.branch_setting_func() == ModuleStoreEnum.Branch.draft_preferred: # now query all draft content in another round-trip query = [] for item in items: item_usage_key = course_key.make_usage_key_from_deprecated_string(item) if item_usage_key.category not in DIRECT_ONLY_CATEGORIES: query.append(as_draft(item_usage_key).to_deprecated_son()) if query: query = {'_id': {'$in': query}} to_process_drafts = list(self.collection.find(query)) # now we have to go through all drafts and replace the non-draft # with the draft. This is because the semantics of the DraftStore is to # always return the draft - if available for draft in to_process_drafts: draft_loc = Location._from_deprecated_son(draft["_id"], course_key.run) draft_as_non_draft_loc = as_published(draft_loc) # does non-draft exist in the collection # if so, replace it if draft_as_non_draft_loc in to_process_dict: to_process_dict[draft_as_non_draft_loc] = draft # convert the dict - which is used for look ups - back into a list queried_children = to_process_dict.values() return queried_children
def test_jumpto_id_invalid_location(self): location = Location("edX", "toy", "NoSuchPlace", None, None, None) jumpto_url = "{0}/{1}/jump_to_id/{2}".format( "/courses", self.course_key.to_deprecated_string(), location.to_deprecated_string() ) response = self.client.get(jumpto_url) self.assertEqual(response.status_code, 404)
def _query_children_for_cache_children(self, course_key, items): # first get non-draft in a round-trip to_process_non_drafts = super(DraftModuleStore, self)._query_children_for_cache_children(course_key, items) to_process_dict = {} for non_draft in to_process_non_drafts: to_process_dict[Location._from_deprecated_son(non_draft["_id"], course_key.run)] = non_draft # now query all draft content in another round-trip query = { '_id': {'$in': [ as_draft(course_key.make_usage_key_from_deprecated_string(item)).to_deprecated_son() for item in items ]} } to_process_drafts = list(self.collection.find(query)) # now we have to go through all drafts and replace the non-draft # with the draft. This is because the semantics of the DraftStore is to # always return the draft - if available for draft in to_process_drafts: draft_loc = Location._from_deprecated_son(draft["_id"], course_key.run) draft_as_non_draft_loc = draft_loc.replace(revision=None) # does non-draft exist in the collection # if so, replace it if draft_as_non_draft_loc in to_process_dict: to_process_dict[draft_as_non_draft_loc] = draft # convert the dict - which is used for look ups - back into a list queried_children = to_process_dict.values() return queried_children
def create(system, source_is_error_module=False): """ return a dict of modules: the conditional with a single source and a single child. Keys are 'cond_module', 'source_module', and 'child_module'. if the source_is_error_module flag is set, create a real ErrorModule for the source. """ descriptor_system = get_test_descriptor_system() # construct source descriptor and module: source_location = Location("edX", "conditional_test", "test_run", "problem", "SampleProblem", None) if source_is_error_module: # Make an error descriptor and module source_descriptor = NonStaffErrorDescriptor.from_xml( "some random xml data", system, id_generator=CourseLocationManager(source_location.course_key), error_msg="random error message", ) else: source_descriptor = Mock(name="source_descriptor") source_descriptor.location = source_location source_descriptor.runtime = descriptor_system source_descriptor.render = lambda view, context=None: descriptor_system.render(source_descriptor, view, context) # construct other descriptors: child_descriptor = Mock(name="child_descriptor") child_descriptor._xmodule.student_view.return_value.content = u"<p>This is a secret</p>" child_descriptor.student_view = child_descriptor._xmodule.student_view child_descriptor.displayable_items.return_value = [child_descriptor] child_descriptor.runtime = descriptor_system child_descriptor.xmodule_runtime = get_test_system() child_descriptor.render = lambda view, context=None: descriptor_system.render(child_descriptor, view, context) child_descriptor.location = source_location.replace(category="html", name="child") descriptor_system.load_item = { child_descriptor.location: child_descriptor, source_location: source_descriptor, }.get system.descriptor_runtime = descriptor_system # construct conditional module: cond_location = Location("edX", "conditional_test", "test_run", "conditional", "SampleConditional", None) field_data = DictFieldData( {"data": "<conditional/>", "xml_attributes": {"attempted": "true"}, "children": [child_descriptor.location]} ) cond_descriptor = ConditionalDescriptor( descriptor_system, field_data, ScopeIds(None, None, cond_location, cond_location) ) cond_descriptor.xmodule_runtime = system system.get_module = lambda desc: desc cond_descriptor.get_required_module_descriptors = Mock(return_value=[source_descriptor]) # return dict: return {"cond_module": cond_descriptor, "source_module": source_descriptor, "child_module": child_descriptor}
def get_parent_location(self, location, revision=ModuleStoreEnum.RevisionOption.published_only, **kwargs): ''' Find the location that is the parent of this location in this course. Returns: version agnostic location (revision always None) as per the rest of mongo. Args: revision: ModuleStoreEnum.RevisionOption.published_only - return only the PUBLISHED parent if it exists, else returns None ModuleStoreEnum.RevisionOption.draft_preferred - return either the DRAFT or PUBLISHED parent, preferring DRAFT, if parent(s) exists, else returns None ''' assert location.revision is None assert revision == ModuleStoreEnum.RevisionOption.published_only \ or revision == ModuleStoreEnum.RevisionOption.draft_preferred # create a query with tag, org, course, and the children field set to the given location query = self._course_key_to_son(location.course_key) query['definition.children'] = location.to_deprecated_string() # if only looking for the PUBLISHED parent, set the revision in the query to None if revision == ModuleStoreEnum.RevisionOption.published_only: query['_id.revision'] = MongoRevisionKey.published # query the collection, sorting by DRAFT first parents = self.collection.find(query, {'_id': True}, sort=[SORT_REVISION_FAVOR_DRAFT]) if parents.count() == 0: # no parents were found return None if revision == ModuleStoreEnum.RevisionOption.published_only: if parents.count() > 1: # should never have multiple PUBLISHED parents raise ReferentialIntegrityError( u"{} parents claim {}".format(parents.count(), location) ) else: # return the single PUBLISHED parent return Location._from_deprecated_son(parents[0]['_id'], location.course_key.run) else: # there could be 2 different parents if # (1) the draft item was moved or # (2) the parent itself has 2 versions: DRAFT and PUBLISHED # since we sorted by SORT_REVISION_FAVOR_DRAFT, the 0'th parent is the one we want found_id = parents[0]['_id'] # don't disclose revision outside modulestore return as_published(Location._from_deprecated_son(found_id, location.course_key.run))
def delete_draft_only(root_location): """ Helper function that calls delete on the specified location if a draft version of the item exists. If no draft exists, this function recursively calls itself on the children of the item. """ query = root_location.to_deprecated_son(prefix='_id.') del query['_id.revision'] versions_found = self.collection.find( query, {'_id': True, 'definition.children': True}, sort=[SORT_REVISION_FAVOR_DRAFT] ) # If 2 versions versions exist, we can assume one is a published version. Go ahead and do the delete # of the draft version. if versions_found.count() > 1: # Moving a child from published parent creates a draft of the parent and moved child. published_version = [ version for version in versions_found if version.get('_id').get('revision') != MongoRevisionKey.draft ] if len(published_version) > 0: # This change makes sure that parents are updated too i.e. an item will have only one parent. self.update_parent_if_moved(root_location, published_version[0], delete_draft_only, user_id) self._delete_subtree(root_location, [as_draft], draft_only=True) elif versions_found.count() == 1: # Since this method cannot be called on something in DIRECT_ONLY_CATEGORIES and we call # delete_subtree as soon as we find an item with a draft version, if there is only 1 version # it must be published (since adding a child to a published item creates a draft of the parent). item = versions_found[0] assert item.get('_id').get('revision') != MongoRevisionKey.draft for child in item.get('definition', {}).get('children', []): child_loc = Location.from_deprecated_string(child) delete_draft_only(child_loc)
def convert_item(item, to_be_deleted): """ Convert the subtree """ # collect the children's ids for future processing next_tier = [] for child in item.get('definition', {}).get('children', []): child_loc = Location.from_deprecated_string(child) next_tier.append(child_loc.to_deprecated_son()) # insert a new DRAFT version of the item item['_id']['revision'] = MongoRevisionKey.draft # ensure keys are in fixed and right order before inserting item['_id'] = self._id_dict_to_son(item['_id']) try: self.collection.insert(item) except pymongo.errors.DuplicateKeyError: # prevent re-creation of DRAFT versions, unless explicitly requested to ignore if not ignore_if_draft: raise DuplicateItemError(item['_id'], self, 'collection') # delete the old PUBLISHED version if requested if delete_published: item['_id']['revision'] = MongoRevisionKey.published to_be_deleted.append(item['_id']) return next_tier
def _create(cls, target_class, **kwargs): # All class attributes (from this class and base classes) are # passed in via **kwargs. However, some of those aren't actual field values, # so pop those off for use separately org = kwargs.pop('org', None) # because the factory provides a default 'number' arg, prefer the non-defaulted 'course' arg if any number = kwargs.pop('course', kwargs.pop('number', None)) store = kwargs.pop('modulestore') name = kwargs.get('name', kwargs.get('run', Location.clean(kwargs.get('display_name')))) run = kwargs.get('run', name) user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test) location = Location(org, number, run, 'course', name) with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred): # Write the data to the mongo datastore new_course = store.create_xmodule(location, metadata=kwargs.get('metadata', None)) # The rest of kwargs become attributes on the course: for k, v in kwargs.iteritems(): setattr(new_course, k, v) # Save the attributes we just set new_course.save() # Update the data in the mongo datastore store.update_item(new_course, user_id) return new_course
def _get_raw_parent_locations(self, location, key_revision): """ Get the parents but don't unset the revision in their locations. Intended for internal use but not restricted. Args: location (UsageKey): assumes the location's revision is None; so, uses revision keyword solely key_revision: MongoRevisionKey.draft - return only the draft parent MongoRevisionKey.published - return only the published parent ModuleStoreEnum.RevisionOption.all - return both draft and published parents """ _verify_revision_is_published(location) # create a query to find all items in the course that have the given location listed as a child query = self._course_key_to_son(location.course_key) query['definition.children'] = location.to_deprecated_string() # find all the items that satisfy the query parents = self.collection.find(query, {'_id': True}, sort=[SORT_REVISION_FAVOR_DRAFT]) # return only the parent(s) that satisfy the request return [ Location._from_deprecated_son(parent['_id'], location.course_key.run) for parent in parents if ( # return all versions of the parent if revision is ModuleStoreEnum.RevisionOption.all key_revision == ModuleStoreEnum.RevisionOption.all or # return this parent if it's direct-only, regardless of which revision is requested parent['_id']['category'] in DIRECT_ONLY_CATEGORIES or # return this parent only if its revision matches the requested one parent['_id']['revision'] == key_revision ) ]
def _load_item(self, course_key, item, data_cache, apply_cached_metadata=True): """ Load an XModuleDescriptor from item, using the children stored in data_cache """ location = Location._from_deprecated_son(item['location'], course_key.run) data_dir = getattr(item, 'data_dir', location.course) root = self.fs_root / data_dir root.makedirs_p() # create directory if it doesn't exist resource_fs = OSFS(root) cached_metadata = {} if apply_cached_metadata: cached_metadata = self._get_cached_metadata_inheritance_tree(course_key) services = {} if self.i18n_service: services["i18n"] = self.i18n_service system = CachingDescriptorSystem( modulestore=self, course_key=course_key, module_data=data_cache, default_class=self.default_class, resources_fs=resource_fs, error_tracker=self.error_tracker, render_template=self.render_template, cached_metadata=cached_metadata, mixins=self.xblock_mixins, select=self.xblock_select, services=services, ) return system.load_item(location)
def _create(cls, target_class, **kwargs): """ Create and return a new course. For performance reasons, we do not emit signals during this process, but if you need signals to run, you can pass `emit_signals=True` to this method. """ # All class attributes (from this class and base classes) are # passed in via **kwargs. However, some of those aren't actual field values, # so pop those off for use separately org = kwargs.pop('org', None) # because the factory provides a default 'number' arg, prefer the non-defaulted 'course' arg if any number = kwargs.pop('course', kwargs.pop('number', None)) store = kwargs.pop('modulestore') name = kwargs.get('name', kwargs.get('run', Location.clean(kwargs.get('display_name')))) run = kwargs.pop('run', name) user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test) emit_signals = kwargs.pop('emit_signals', False) # Pass the metadata just as field=value pairs kwargs.update(kwargs.pop('metadata', {})) default_store_override = kwargs.pop('default_store', None) with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred): course_key = store.make_course_key(org, number, run) with store.bulk_operations(course_key, emit_signals=emit_signals): if default_store_override is not None: with store.default_store(default_store_override): new_course = store.create_course(org, number, run, user_id, fields=kwargs) else: new_course = store.create_course(org, number, run, user_id, fields=kwargs) last_course.loc = new_course.location return new_course
def _create(cls, target_class, **kwargs): # All class attributes (from this class and base classes) are # passed in via **kwargs. However, some of those aren't actual field values, # so pop those off for use separately org = kwargs.pop('org', None) # because the factory provides a default 'number' arg, prefer the non-defaulted 'course' arg if any number = kwargs.pop('course', kwargs.pop('number', None)) store = kwargs.pop('modulestore') name = kwargs.get('name', kwargs.get('run', Location.clean(kwargs.get('display_name')))) run = kwargs.get('run', name) user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test) # Pass the metadata just as field=value pairs kwargs.update(kwargs.pop('metadata', {})) default_store_override = kwargs.pop('default_store', None) with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred): if default_store_override is not None: with store.default_store(default_store_override): new_course = store.create_course(org, number, run, user_id, fields=kwargs) else: new_course = store.create_course(org, number, run, user_id, fields=kwargs) last_course.loc = new_course.location return new_course
def get_students_problem_grades(request, csv=False): """ Get a list of students and grades for a particular problem. If 'csv' is False, returns a dict of student's name: username: grade: percent. If 'csv' is True, returns a header array, and an array of arrays in the format: student names, usernames, grades, percents for CSV download. """ module_state_key = Location.from_deprecated_string(request.GET.get('module_id')) csv = request.GET.get('csv') # Query for "problem grades" students students = models.StudentModule.objects.select_related('student').filter( module_state_key=module_state_key, module_type__exact='problem', grade__isnull=False, ).values('student__username', 'student__profile__name', 'grade', 'max_grade').order_by('student__profile__name') results = [] if not csv: # Restrict screen list length # Adding 1 so can tell if list is larger than MAX_SCREEN_LIST_LENGTH # without doing another select. for student in students[0:MAX_SCREEN_LIST_LENGTH + 1]: student_dict = { 'name': student['student__profile__name'], 'username': student['student__username'], 'grade': student['grade'], } student_dict['percent'] = 0 if student['max_grade'] > 0: student_dict['percent'] = round(student['grade'] * 100 / student['max_grade']) results.append(student_dict) max_exceeded = False if len(results) > MAX_SCREEN_LIST_LENGTH: # Remove the last item so list length is exactly MAX_SCREEN_LIST_LENGTH del results[-1] max_exceeded = True response_payload = { 'results': results, 'max_exceeded': max_exceeded, } return JsonResponse(response_payload) else: tooltip = request.GET.get('tooltip') filename = sanitize_filename(tooltip[:tooltip.rfind(' - ')]) header = [_("Name").encode('utf-8'), _("Username").encode('utf-8'), _("Grade").encode('utf-8'), _("Percent").encode('utf-8')] for student in students: percent = 0 if student['max_grade'] > 0: percent = round(student['grade'] * 100 / student['max_grade']) results.append([student['student__profile__name'], student['student__username'], student['grade'], percent]) response = create_csv_response(filename, header, results) return response
def _get_course_child(request, user, course_key, content_id): """ Return a course xmodule/xblock to the caller """ content_descriptor = None content_key = None content = None try: content_key = UsageKey.from_string(content_id) except InvalidKeyError: try: content_key = Location.from_deprecated_string(content_id) except (InvalidKeyError, InvalidLocationError): pass if content_key: store = modulestore() content_descriptor = store.get_item(content_key) if content_descriptor: field_data_cache = FieldDataCache([content_descriptor], course_key, user) content = module_render.get_module( user, request, content_key, field_data_cache, course_key) return content_descriptor, content_key, content
def _cache_children(self, course_key, items, depth=0): """ Returns a dictionary mapping Location -> item data, populated with json data for all descendents of items up to the specified depth. (0 = no descendents, 1 = children, 2 = grandchildren, etc) If depth is None, will load all the children. This will make a number of queries that is linear in the depth. """ data = {} to_process = list(items) while to_process and depth is None or depth >= 0: children = [] for item in to_process: self._clean_item_data(item) children.extend(item.get('definition', {}).get('children', [])) data[Location._from_deprecated_son(item['location'], course_key.run)] = item if depth == 0: break # Load all children by id. See # http://www.mongodb.org/display/DOCS/Advanced+Queries#AdvancedQueries-%24or # for or-query syntax to_process = [] if children: to_process = self._query_children_for_cache_children(course_key, children) # If depth is None, then we just recurse until we hit all the descendents if depth is not None: depth -= 1 return data
def _create(cls, target_class, **kwargs): # All class attributes (from this class and base classes) are # passed in via **kwargs. However, some of those aren't actual field values, # so pop those off for use separately org = kwargs.pop("org", None) # because the factory provides a default 'number' arg, prefer the non-defaulted 'course' arg if any number = kwargs.pop("course", kwargs.pop("number", None)) store = kwargs.pop("modulestore") name = kwargs.get("name", kwargs.get("run", Location.clean(kwargs.get("display_name")))) run = kwargs.get("run", name) location = Location(org, number, run, "course", name) # Write the data to the mongo datastore new_course = store.create_xmodule(location, metadata=kwargs.get("metadata", None)) # The rest of kwargs become attributes on the course: for k, v in kwargs.iteritems(): setattr(new_course, k, v) # Save the attributes we just set new_course.save() # Update the data in the mongo datastore store.update_item(new_course) return new_course
def problem_location(problem_url_name): """ Create an internal location for a test problem. """ if "i4x:" in problem_url_name: return Location.from_deprecated_string(problem_url_name) else: return TEST_COURSE_KEY.make_usage_key('problem', problem_url_name)
def test_name_collision(self): """ Test dwim translation when the old name was not unique """ org = "myorg" course = "another_course" name = "running_again" course_location = Location(org, course, name, "course", name) course_xlate = loc_mapper().translate_location(course_location, add_entry_if_missing=True) self.assertEqual(course_location, loc_mapper().translate_locator_to_location(course_xlate)) eponymous_block = course_location.replace(category="chapter") chapter_xlate = loc_mapper().translate_location(eponymous_block, add_entry_if_missing=True) self.assertEqual(course_location, loc_mapper().translate_locator_to_location(course_xlate)) self.assertEqual(eponymous_block, loc_mapper().translate_locator_to_location(chapter_xlate)) # and a non-existent one w/o add eponymous_block = course_location.replace(category="problem") with self.assertRaises(ItemNotFoundError): chapter_xlate = loc_mapper().translate_location(eponymous_block, add_entry_if_missing=False)
def get_courses_for_wiki(self, wiki_slug): """ Return the list of courses which use this wiki_slug :param wiki_slug: the course wiki root slug :return: list of course locations """ courses = self.collection.find({'_id.category': 'course', 'definition.data.wiki_slug': wiki_slug}) # the course's run == its name. It's the only xblock for which that's necessarily true. return [Location._from_deprecated_son(course['_id'], course['_id']['name']) for course in courses]
def get_course_child_key(content_id): try: content_key = UsageKey.from_string(content_id) except InvalidKeyError: try: content_key = Location.from_deprecated_string(content_id) except (InvalidLocationError, InvalidKeyError): content_key = None return content_key
def to_python(self, location): """ Deserialize to a UsageKey instance: for now it's a location missing the run """ assert isinstance(location, (NoneType, basestring, UsageKey)) if location == '': return None if isinstance(location, basestring): location = super(UsageKeyField, self).to_python(location) return Location.from_string(location) else: return location
def get_students_opened_subsection(request, csv=False): """ Get a list of students that opened a particular subsection. If 'csv' is False, returns a dict of student's name: username. If 'csv' is True, returns a header array, and an array of arrays in the format: student names, usernames for CSV download. """ module_state_key = Location.from_deprecated_string(request.GET.get('module_id')) course_id = request.GET.get('course_id') csv = request.GET.get('csv') course_key = locator.CourseLocator.from_string(course_id) non_student_list = get_non_student_list(course_key) # Query for "opened a subsection" students students = models.StudentModule.objects.select_related('student').filter( module_state_key__exact=module_state_key, module_type__exact='sequential', ).exclude(student_id__in=non_student_list).values('student__id', 'student__username', 'student__profile__name').order_by('student__profile__name') results = [] if not csv: # Restrict screen list length # Adding 1 so can tell if list is larger than MAX_SCREEN_LIST_LENGTH # without doing another select. for student in students[0:MAX_SCREEN_LIST_LENGTH + 1]: results.append({ 'name': student['student__profile__name'], 'username': student['student__username'], }) max_exceeded = False if len(results) > MAX_SCREEN_LIST_LENGTH: # Remove the last item so list length is exactly MAX_SCREEN_LIST_LENGTH del results[-1] max_exceeded = True response_payload = { 'results': results, 'max_exceeded': max_exceeded, } return JsonResponse(response_payload) else: tooltip = request.GET.get('tooltip') # Subsection name is everything after 3rd space in tooltip filename = sanitize_filename(' '.join(tooltip.split(' ')[3:])) header = [_("Name").encode('utf-8'), _("Username").encode('utf-8')] for student in students: results.append([student['student__profile__name'], student['student__username']]) response = create_csv_response(filename, header, results) return response
def test_translate_location_dwim(self): """ Test the location translation mechanisms which try to do-what-i-mean by creating new entries for never seen queries. """ org = "foo_org" course = "bar_course" run = "baz_run" problem_name = "abc123abc123abc123abc123abc123f9" location = Location(org, course, run, "problem", problem_name) new_offering = "{}.{}".format(course, run) self.translate_n_check(location, org, new_offering, "problemabc", ModuleStoreEnum.BranchName.published, True) # create an entry w/o a guid name other_location = Location(org, course, run, "chapter", "intro") self.translate_n_check(other_location, org, new_offering, "intro", ModuleStoreEnum.BranchName.published, True) # add a distractor course delta_new_org = "{}.geek_dept".format(org) run = "delta_run" delta_new_offering = "{}.{}".format(course, run) delta_course_locn = SlashSeparatedCourseKey(org, course, run) loc_mapper().create_map_entry( delta_course_locn, delta_new_org, delta_new_offering, block_map={problem_name: {"problem": "problem3"}} ) self.translate_n_check(location, org, new_offering, "problemabc", ModuleStoreEnum.BranchName.published, True) # add a new one to both courses (ensure name doesn't have same beginning) new_prob_name = uuid.uuid4().hex while new_prob_name.startswith("abc"): new_prob_name = uuid.uuid4().hex new_prob_locn = location.replace(name=new_prob_name) new_usage_id = "problem{}".format(new_prob_name[:3]) self.translate_n_check( new_prob_locn, org, new_offering, new_usage_id, ModuleStoreEnum.BranchName.published, True ) new_prob_locn = new_prob_locn.replace(run=run) self.translate_n_check( new_prob_locn, delta_new_org, delta_new_offering, new_usage_id, ModuleStoreEnum.BranchName.published, True )
def test_translate_location_dwim(self): """ Test the location translation mechanisms which try to do-what-i-mean by creating new entries for never seen queries. """ org = 'foo_org' course = 'bar_course' run = 'baz_run' problem_name = 'abc123abc123abc123abc123abc123f9' location = Location(org, course, run, 'problem', problem_name) new_offering = '{}.{}'.format(course, run) self.translate_n_check(location, org, new_offering, 'problemabc', BRANCH_NAME_PUBLISHED, True) # create an entry w/o a guid name other_location = Location(org, course, run, 'chapter', 'intro') self.translate_n_check(other_location, org, new_offering, 'intro', BRANCH_NAME_PUBLISHED, True) # add a distractor course delta_new_org = '{}.geek_dept'.format(org) run = 'delta_run' delta_new_offering = '{}.{}'.format(course, run) delta_course_locn = SlashSeparatedCourseKey(org, course, run) loc_mapper().create_map_entry( delta_course_locn, delta_new_org, delta_new_offering, block_map={problem_name: {'problem': 'problem3'}} ) self.translate_n_check(location, org, new_offering, 'problemabc', BRANCH_NAME_PUBLISHED, True) # add a new one to both courses (ensure name doesn't have same beginning) new_prob_name = uuid.uuid4().hex while new_prob_name.startswith('abc'): new_prob_name = uuid.uuid4().hex new_prob_locn = location.replace(name=new_prob_name) new_usage_id = 'problem{}'.format(new_prob_name[:3]) self.translate_n_check(new_prob_locn, org, new_offering, new_usage_id, BRANCH_NAME_PUBLISHED, True) new_prob_locn = new_prob_locn.replace(run=run) self.translate_n_check( new_prob_locn, delta_new_org, delta_new_offering, new_usage_id, BRANCH_NAME_PUBLISHED, True )
def test_clean(self): with self.assertDeprecationWarning(count=6): with self.assertRaises(InvalidKeyError): Location._check_location_part('abc123', re.compile(r'\d')) self.assertEqual('abc_', Location._clean('abc123', re.compile(r'\d'))) self.assertEqual('a._%-', Location.clean('a.*:%-')) self.assertEqual('a.__%-', Location.clean_keeping_underscores('a.*:%-')) self.assertEqual('a._:%-', Location.clean_for_url_name('a.*:%-')) self.assertEqual('a_-', Location.clean_for_html('a.*:%-'))
def to_python(self, value): if value is self.Empty or value is None: return value assert isinstance(value, (basestring, UsageKey)) if value == '': return None if isinstance(value, basestring): return Location.from_deprecated_string(value) else: return value
def assign_textbook_id(textbook, used_ids=()): """ Return an ID that can be assigned to a textbook and doesn't match the used_ids """ tid = Location.clean(textbook["tab_title"]) if not tid[0].isdigit(): # stick a random digit in front tid = random.choice(string.digits) + tid while tid in used_ids: # add a random ASCII character to the end tid = tid + random.choice(string.ascii_lowercase) return tid
def get_orphans(self, course_key): """ Return an array of all of the locations (deprecated string format) for orphans in the course. """ course_key = self.fill_in_run(course_key) detached_categories = [name for name, __ in XBlock.load_tagged_classes("detached")] query = self._course_key_to_son(course_key) query["_id.category"] = {"$nin": detached_categories} all_items = self.collection.find(query) all_reachable = set() item_locs = set() for item in all_items: if item["_id"]["category"] != "course": # It would be nice to change this method to return UsageKeys instead of the deprecated string. item_locs.add( as_published(Location._from_deprecated_son(item["_id"], course_key.run)).to_deprecated_string() ) all_reachable = all_reachable.union(item.get("definition", {}).get("children", []))
def get_orphans(self, course_key): """ Return an array all of the locations (deprecated string format) for orphans in the course. """ detached_categories = [name for name, __ in XBlock.load_tagged_classes("detached")] query = self._course_key_to_son(course_key) query['_id.category'] = {'$nin': detached_categories} all_items = self.collection.find(query) all_reachable = set() item_locs = set() for item in all_items: if item['_id']['category'] != 'course': # It would be nice to change this method to return UsageKeys instead of the deprecated string. item_locs.add( Location._from_deprecated_son(item['_id'], course_key.run).replace(revision=None).to_deprecated_string() ) all_reachable = all_reachable.union(item.get('definition', {}).get('children', [])) item_locs -= all_reachable return list(item_locs)
def update_parent_if_moved(self, original_parent_location, published_version, delete_draft_only, user_id): """ Update parent of an item if it has moved. Arguments: original_parent_location (BlockUsageLocator) : Original parent block locator. published_version (dict) : Published version of the block. delete_draft_only (function) : A callback function to delete draft children if it was moved. user_id (int) : User id """ for child_location in published_version.get('definition', {}).get('children', []): item_location = original_parent_location.course_key.make_usage_key_from_deprecated_string(child_location) try: source_item = self.get_item(item_location) except ItemNotFoundError: log.error('Unable to find the item %s', unicode(item_location)) return if source_item.parent and source_item.parent.block_id != original_parent_location.block_id: if self.update_item_parent(item_location, original_parent_location, source_item.parent, user_id): delete_draft_only(Location.from_deprecated_string(child_location))
def get_orphans(self, course_key): """ Return an array all of the locations (deprecated string format) for orphans in the course. """ detached_categories = [ name for name, __ in XBlock.load_tagged_classes("detached") ] query = self._course_key_to_son(course_key) query['_id.category'] = {'$nin': detached_categories} all_items = self.collection.find(query) all_reachable = set() item_locs = set() for item in all_items: if item['_id']['category'] != 'course': # It would be nice to change this method to return UsageKeys instead of the deprecated string. item_locs.add( Location._from_deprecated_son( item['_id'], course_key.run).replace( revision=None).to_deprecated_string()) all_reachable = all_reachable.union( item.get('definition', {}).get('children', [])) item_locs -= all_reachable return list(item_locs)
def make_one(self, display_name=None, **kw): """ Creates a XBlock SGA for testing purpose. """ field_data = DictFieldData(kw) block = StaffGradedAssignmentXBlock(self.runtime, field_data, self.scope_ids) block.location = Location('foo', 'bar', 'baz', 'category', 'name', 'revision') block.xmodule_runtime = self.runtime block.course_id = self.course_id block.category = 'problem' if display_name: block.display_name = display_name block.start = datetime.datetime(2010, 5, 12, 2, 42, tzinfo=pytz.utc) modulestore().create_item(self.staff.username, block.location.course_key, block.location.block_type, block.location.block_id) return block
def delete_draft_only(root_location): """ Helper function that calls delete on the specified location if a draft version of the item exists. If no draft exists, this function recursively calls itself on the children of the item. """ query = root_location.to_deprecated_son(prefix='_id.') del query['_id.revision'] versions_found = self.collection.find( query, {'_id': True, 'definition.children': True}, sort=[SORT_REVISION_FAVOR_DRAFT] ) # If 2 versions versions exist, we can assume one is a published version. Go ahead and do the delete # of the draft version. if versions_found.count() > 1: self._delete_subtree(root_location, [as_draft]) elif versions_found.count() == 1: # Since this method cannot be called on something in DIRECT_ONLY_CATEGORIES and we call # delete_subtree as soon as we find an item with a draft version, if there is only 1 version # it must be published (since adding a child to a published item creates a draft of the parent). item = versions_found[0] assert item.get('_id').get('revision') != MongoRevisionKey.draft for child in item.get('definition', {}).get('children', []): child_loc = Location.from_deprecated_string(child) delete_draft_only(child_loc)
def test_conditional_module_parse_sources(self): dummy_system = Mock() dummy_location = Location("edX", "conditional_test", "test_run", "conditional", "SampleConditional", None) dummy_scope_ids = ScopeIds(None, None, dummy_location, dummy_location) dummy_field_data = DictFieldData({ 'data': '<conditional/>', 'xml_attributes': { 'sources': 'i4x://HarvardX/ER22x/poll_question/T15_poll;i4x://HarvardX/ER22x/poll_question/T16_poll' }, 'children': None, }) conditional = ConditionalDescriptor( dummy_system, dummy_field_data, dummy_scope_ids, ) self.assertEqual(conditional.parse_sources(conditional.xml_attributes), [ 'i4x://HarvardX/ER22x/poll_question/T15_poll', 'i4x://HarvardX/ER22x/poll_question/T16_poll' ])
def setUp(self): self.user = UserFactory() store = modulestore() course_items = import_from_xml(store, self.user.id, TEST_DATA_DIR, ['open_ended']) # pylint: disable=maybe-no-member self.course = course_items[0] self.course_id = self.course.id self.problem_location = Location("edX", "open_ended", "2012_Fall", "combinedopenended", "SampleQuestion") self.self_assessment_task_number = 0 self.open_ended_task_number = 1 self.student_on_initial = UserFactory() self.student_on_accessing = UserFactory() self.student_on_post_assessment = UserFactory() StudentModuleFactory.create(course_id=self.course_id, module_state_key=self.problem_location, student=self.student_on_initial, grade=0, max_grade=1, state=STATE_INITIAL) StudentModuleFactory.create(course_id=self.course_id, module_state_key=self.problem_location, student=self.student_on_accessing, grade=0, max_grade=1, state=STATE_ACCESSING) StudentModuleFactory.create(course_id=self.course_id, module_state_key=self.problem_location, student=self.student_on_post_assessment, grade=0, max_grade=1, state=STATE_POST_ASSESSMENT)
def setUp(self): self.course_id = SlashSeparatedCourseKey("edX", "open_ended", "2012_Fall") self.problem_location = Location("edX", "open_ended", "2012_Fall", "combinedopenended", "SampleQuestion") self.task_number = 1 self.invalid_task_number = 3 self.student_on_initial = UserFactory() self.student_on_accessing = UserFactory() self.student_on_post_assessment = UserFactory() StudentModuleFactory.create(course_id=self.course_id, module_state_key=self.problem_location, student=self.student_on_initial, grade=0, max_grade=1, state=STATE_INITIAL) StudentModuleFactory.create(course_id=self.course_id, module_state_key=self.problem_location, student=self.student_on_accessing, grade=0, max_grade=1, state=STATE_ACCESSING) StudentModuleFactory.create(course_id=self.course_id, module_state_key=self.problem_location, student=self.student_on_post_assessment, grade=0, max_grade=1, state=STATE_POST_ASSESSMENT) self.students = [ self.student_on_initial, self.student_on_accessing, self.student_on_post_assessment ]
def test_update_locations_native_xblock(self): """ Update locations updates location and keeps values and "is_set_on" status """ # Set the XBlock's location self.xblock.location = Location("org", "import", "run", "category", "stubxblock") # Explicitly set the content, settings and children fields self.xblock.test_content_field = 'Explicitly set' self.xblock.test_settings_field = 'Explicitly set' self.xblock.test_mutable_content_field = [1, 2, 3] self.xblock.test_mutable_settings_field = ["a", "s", "d"] self.xblock.children = self.fake_children_locations # pylint:disable=attribute-defined-outside-init self.xblock.save() # Update location target_location = self.xblock.location.replace(revision='draft') _update_module_location(self.xblock, target_location) new_version = self.xblock # _update_module_location updates in-place # Check the XBlock's location self.assertEqual(new_version.location, target_location) # Check the values of the fields. # The content, settings and children fields should be preserved self.assertEqual(new_version.test_content_field, 'Explicitly set') self.assertEqual(new_version.test_settings_field, 'Explicitly set') self.assertEqual(new_version.test_mutable_content_field, [1, 2, 3]) self.assertEqual(new_version.test_mutable_settings_field, ["a", "s", "d"]) self.assertEqual(new_version.children, self.fake_children_locations) # Expect that these fields are marked explicitly set self._check_explicitly_set(new_version, Scope.content, self.CONTENT_FIELDS, should_be_set=True) self._check_explicitly_set(new_version, Scope.settings, self.SETTINGS_FIELDS, should_be_set=True) self._check_explicitly_set(new_version, Scope.children, self.CHILDREN_FIELDS, should_be_set=True) # Expect these fields pass "is_set_on" test for field in self.CONTENT_FIELDS + self.SETTINGS_FIELDS + self.CHILDREN_FIELDS: self.assertTrue(new_version.fields[field].is_set_on(new_version))
def test_update_published_info(self): """ Tests that published_date and published_by are set correctly """ location = Location('edX', 'toy', '2012_Fall', 'html', 'test_html') create_user = 123 publish_user = 456 # Create a dummy component to test against self.draft_store.create_item( create_user, location.course_key, location.block_type, block_id=location.block_id ) # Store the current time, then publish old_time = datetime.now(UTC) self.draft_store.publish(location, publish_user) updated_component = self.draft_store.get_item(location) # Verify the time order and that publish_user caused publication self.assertLessEqual(old_time, updated_component.published_date) self.assertEqual(updated_component.published_by, publish_user)
def _create(cls, target_class, **kwargs): # All class attributes (from this class and base classes) are # passed in via **kwargs. However, some of those aren't actual field values, # so pop those off for use separately org = kwargs.pop('org', None) # because the factory provides a default 'number' arg, prefer the non-defaulted 'course' arg if any number = kwargs.pop('course', kwargs.pop('number', None)) store = kwargs.pop('modulestore') name = kwargs.get( 'name', kwargs.get('run', Location.clean(kwargs.get('display_name')))) run = kwargs.get('run', name) user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test) # Pass the metadata just as field=value pairs kwargs.update(kwargs.pop('metadata', {})) default_store_override = kwargs.pop('default_store', None) with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred): if default_store_override is not None: with store.default_store(default_store_override): new_course = store.create_course(org, number, run, user_id, fields=kwargs) else: new_course = store.create_course(org, number, run, user_id, fields=kwargs) last_course.loc = new_course.location return new_course
def create(system, source_is_error_module=False, source_visible_to_staff_only=False): """ return a dict of modules: the conditional with a single source and a single child. Keys are 'cond_module', 'source_module', and 'child_module'. if the source_is_error_module flag is set, create a real ErrorModule for the source. """ descriptor_system = get_test_descriptor_system() # construct source descriptor and module: source_location = Location("edX", "conditional_test", "test_run", "problem", "SampleProblem", None) if source_is_error_module: # Make an error descriptor and module source_descriptor = NonStaffErrorDescriptor.from_xml( 'some random xml data', system, id_generator=CourseLocationManager(source_location.course_key), error_msg='random error message') else: source_descriptor = Mock(name='source_descriptor') source_descriptor.location = source_location source_descriptor.visible_to_staff_only = source_visible_to_staff_only source_descriptor.runtime = descriptor_system source_descriptor.render = lambda view, context=None: descriptor_system.render( source_descriptor, view, context) # construct other descriptors: child_descriptor = Mock(name='child_descriptor') child_descriptor.visible_to_staff_only = False child_descriptor._xmodule.student_view.return_value.content = u'<p>This is a secret</p>' child_descriptor.student_view = child_descriptor._xmodule.student_view child_descriptor.displayable_items.return_value = [child_descriptor] child_descriptor.runtime = descriptor_system child_descriptor.xmodule_runtime = get_test_system() child_descriptor.render = lambda view, context=None: descriptor_system.render( child_descriptor, view, context) child_descriptor.location = source_location.replace(category='html', name='child') def visible_to_nonstaff_users(desc): """ Returns if the object is visible to nonstaff users. """ return not desc.visible_to_staff_only def load_item(usage_id, for_parent=None): # pylint: disable=unused-argument """Test-only implementation of load_item that simply returns static xblocks.""" return { child_descriptor.location: child_descriptor, source_location: source_descriptor }.get(usage_id) descriptor_system.load_item = load_item system.descriptor_runtime = descriptor_system # construct conditional module: cond_location = Location("edX", "conditional_test", "test_run", "conditional", "SampleConditional", None) field_data = DictFieldData({ 'data': '<conditional/>', 'conditional_attr': 'attempted', 'conditional_value': 'true', 'xml_attributes': { 'attempted': 'true' }, 'children': [child_descriptor.location], }) cond_descriptor = ConditionalDescriptor( descriptor_system, field_data, ScopeIds(None, None, cond_location, cond_location)) cond_descriptor.xmodule_runtime = system system.get_module = lambda desc: desc if visible_to_nonstaff_users( desc) else None cond_descriptor.get_required_module_descriptors = Mock( return_value=[source_descriptor]) cond_descriptor.required_modules = [ system.get_module(descriptor) for descriptor in cond_descriptor.get_required_module_descriptors() ] # return dict: return { 'cond_module': cond_descriptor, 'source_module': source_descriptor, 'child_module': child_descriptor }
def load_course(self, course_dir, course_ids, tracker): """ Load a course into this module store course_path: Course directory name returns a CourseDescriptor for the course """ log.debug('========> Starting course import from {0}'.format(course_dir)) with open(self.data_dir / course_dir / "course.xml") as course_file: # VS[compat] # TODO (cpennington): Remove this once all fall 2012 courses have # been imported into the cms from xml course_file = StringIO(clean_out_mako_templating(course_file.read())) course_data = etree.parse(course_file, parser=edx_xml_parser).getroot() org = course_data.get('org') if org is None: msg = ("No 'org' attribute set for course in {dir}. " "Using default 'edx'".format(dir=course_dir)) log.warning(msg) tracker(msg) org = 'edx' course = course_data.get('course') if course is None: msg = ("No 'course' attribute set for course in {dir}." " Using default '{default}'".format(dir=course_dir, default=course_dir ) ) log.warning(msg) tracker(msg) course = course_dir url_name = course_data.get('url_name', course_data.get('slug')) policy_dir = None if url_name: policy_dir = self.data_dir / course_dir / 'policies' / url_name policy_path = policy_dir / 'policy.json' policy = self.load_policy(policy_path, tracker) # VS[compat]: remove once courses use the policy dirs. if policy == {}: old_policy_path = self.data_dir / course_dir / 'policies' / '{0}.json'.format(url_name) policy = self.load_policy(old_policy_path, tracker) else: policy = {} # VS[compat] : 'name' is deprecated, but support it for now... if course_data.get('name'): url_name = Location.clean(course_data.get('name')) tracker("'name' is deprecated for module xml. Please use " "display_name and url_name.") else: raise ValueError("Can't load a course without a 'url_name' " "(or 'name') set. Set url_name.") course_id = SlashSeparatedCourseKey(org, course, url_name) if course_ids is not None and course_id not in course_ids: return None def get_policy(usage_id): """ Return the policy dictionary to be applied to the specified XBlock usage """ return policy.get(policy_key(usage_id), {}) services = {} if self.i18n_service: services['i18n'] = self.i18n_service if self.fs_service: services['fs'] = self.fs_service system = ImportSystem( xmlstore=self, course_id=course_id, course_dir=course_dir, error_tracker=tracker, parent_tracker=self.parent_trackers[course_id], load_error_modules=self.load_error_modules, get_policy=get_policy, mixins=self.xblock_mixins, default_class=self.default_class, select=self.xblock_select, field_data=self.field_data, services=services, ) course_descriptor = system.process_xml(etree.tostring(course_data, encoding='unicode')) # If we fail to load the course, then skip the rest of the loading steps if isinstance(course_descriptor, ErrorDescriptor): return course_descriptor # NOTE: The descriptors end up loading somewhat bottom up, which # breaks metadata inheritance via get_children(). Instead # (actually, in addition to, for now), we do a final inheritance pass # after we have the course descriptor. compute_inherited_metadata(course_descriptor) # now import all pieces of course_info which is expected to be stored # in <content_dir>/info or <content_dir>/info/<url_name> self.load_extra_content(system, course_descriptor, 'course_info', self.data_dir / course_dir / 'info', course_dir, url_name) # now import all static tabs which are expected to be stored in # in <content_dir>/tabs or <content_dir>/tabs/<url_name> self.load_extra_content(system, course_descriptor, 'static_tab', self.data_dir / course_dir / 'tabs', course_dir, url_name) self.load_extra_content(system, course_descriptor, 'custom_tag_template', self.data_dir / course_dir / 'custom_tags', course_dir, url_name) self.load_extra_content(system, course_descriptor, 'about', self.data_dir / course_dir / 'about', course_dir, url_name) log.debug('========> Done with course import from {0}'.format(course_dir)) return course_descriptor
def create(system, source_is_error_module=False): """ return a dict of modules: the conditional with a single source and a single child. Keys are 'cond_module', 'source_module', and 'child_module'. if the source_is_error_module flag is set, create a real ErrorModule for the source. """ descriptor_system = get_test_descriptor_system() # construct source descriptor and module: source_location = Location("edX", "conditional_test", "test_run", "problem", "SampleProblem", None) if source_is_error_module: # Make an error descriptor and module source_descriptor = NonStaffErrorDescriptor.from_xml( 'some random xml data', system, id_generator=CourseLocationManager(source_location.course_key), error_msg='random error message') else: source_descriptor = Mock(name='source_descriptor') source_descriptor.location = source_location source_descriptor.runtime = descriptor_system source_descriptor.render = lambda view, context=None: descriptor_system.render( source_descriptor, view, context) # construct other descriptors: child_descriptor = Mock(name='child_descriptor') child_descriptor._xmodule.student_view.return_value.content = u'<p>This is a secret</p>' child_descriptor.student_view = child_descriptor._xmodule.student_view child_descriptor.displayable_items.return_value = [child_descriptor] child_descriptor.runtime = descriptor_system child_descriptor.xmodule_runtime = get_test_system() child_descriptor.render = lambda view, context=None: descriptor_system.render( child_descriptor, view, context) child_descriptor.location = source_location.replace(category='html', name='child') descriptor_system.load_item = { child_descriptor.location: child_descriptor, source_location: source_descriptor }.get system.descriptor_runtime = descriptor_system # construct conditional module: cond_location = Location("edX", "conditional_test", "test_run", "conditional", "SampleConditional", None) field_data = DictFieldData({ 'data': '<conditional/>', 'xml_attributes': { 'attempted': 'true' }, 'children': [child_descriptor.location], }) cond_descriptor = ConditionalDescriptor( descriptor_system, field_data, ScopeIds(None, None, cond_location, cond_location)) cond_descriptor.xmodule_runtime = system system.get_module = lambda desc: desc cond_descriptor.get_required_module_descriptors = Mock( return_value=[source_descriptor]) # return dict: return { 'cond_module': cond_descriptor, 'source_module': source_descriptor, 'child_module': child_descriptor }
class SelfAssessmentTest(unittest.TestCase): rubric = '''<rubric><rubric> <category> <description>Response Quality</description> <option>The response is not a satisfactory answer to the question. It either fails to address the question or does so in a limited way, with no evidence of higher-order thinking.</option> </category> </rubric></rubric>''' prompt = etree.XML("<prompt>This is sample prompt text.</prompt>") definition = { 'rubric': rubric, 'prompt': prompt, 'submitmessage': 'Shall we submit now?', 'hintprompt': 'Consider this...', } location = Location("edX", "sa_test", "run", "selfassessment", "SampleQuestion", None) descriptor = Mock() def setUp(self): self.static_data = { 'max_attempts': 10, 'rubric': etree.XML(self.rubric), 'prompt': self.prompt, 'max_score': 1, 'display_name': "Name", 'accept_file_upload': False, 'close_date': None, 's3_interface': test_util_open_ended.S3_INTERFACE, 'open_ended_grading_interface': test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE, 'skip_basic_checks': False, 'control': { 'required_peer_grading': 1, 'peer_grader_count': 1, 'min_to_calibrate': 3, 'max_to_calibrate': 6, 'peer_grade_finished_submissions_when_none_pending': False, } } system = get_test_system() usage_key = system.course_id.make_usage_key('combinedopenended', 'test_loc') scope_ids = ScopeIds(1, 'combinedopenended', usage_key, usage_key) system.xmodule_instance = Mock(scope_ids=scope_ids) self.module = SelfAssessmentModule(system, self.location, self.definition, self.descriptor, self.static_data) def test_get_html(self): html = self.module.get_html(self.module.system) self.assertTrue("This is sample prompt text" in html) def test_self_assessment_flow(self): responses = {'assessment': '0', 'score_list[]': ['0', '0']} def get_fake_item(name): return responses[name] def get_data_for_location(self, location, student): return { 'count_graded': 0, 'count_required': 0, 'student_sub_count': 0, } mock_query_dict = MagicMock() mock_query_dict.__getitem__.side_effect = get_fake_item mock_query_dict.getall = get_fake_item self.module.peer_gs.get_data_for_location = get_data_for_location self.assertEqual(self.module.get_score()['score'], 0) self.module.save_answer({'student_answer': "I am an answer"}, self.module.system) self.assertEqual(self.module.child_state, self.module.ASSESSING) self.module.save_assessment(mock_query_dict, self.module.system) self.assertEqual(self.module.child_state, self.module.DONE) d = self.module.reset({}) self.assertTrue(d['success']) self.assertEqual(self.module.child_state, self.module.INITIAL) # if we now assess as right, skip the REQUEST_HINT state self.module.save_answer({'student_answer': 'answer 4'}, self.module.system) responses['assessment'] = '1' self.module.save_assessment(mock_query_dict, self.module.system) self.assertEqual(self.module.child_state, self.module.DONE) def test_self_assessment_display(self): """ Test storing an answer with the self assessment module. """ # Create a module with no state yet. Important that this start off as a blank slate. test_module = SelfAssessmentModule(get_test_system(), self.location, self.definition, self.descriptor, self.static_data) saved_response = "Saved response." submitted_response = "Submitted response." # Initially, there will be no stored answer. self.assertEqual(test_module.stored_answer, None) # And the initial answer to display will be an empty string. self.assertEqual(test_module.get_display_answer(), "") # Now, store an answer in the module. test_module.handle_ajax("store_answer", {'student_answer': saved_response}, get_test_system()) # The stored answer should now equal our response. self.assertEqual(test_module.stored_answer, saved_response) self.assertEqual(test_module.get_display_answer(), saved_response) # Submit a student response to the question. test_module.handle_ajax("save_answer", {"student_answer": submitted_response}, get_test_system()) # Submitting an answer should clear the stored answer. self.assertEqual(test_module.stored_answer, None) # Confirm that the answer is stored properly. self.assertEqual(test_module.latest_answer(), submitted_response) # Mock saving an assessment. assessment_dict = MultiDict({'assessment': 0, 'score_list[]': 0}) data = test_module.handle_ajax("save_assessment", assessment_dict, get_test_system()) self.assertTrue(json.loads(data)['success']) # Reset the module so the student can try again. test_module.reset(get_test_system()) # Confirm that the right response is loaded. self.assertEqual(test_module.get_display_answer(), submitted_response) def test_save_assessment_after_closing(self): """ Test storing assessment when close date is passed. """ responses = {'assessment': '0', 'score_list[]': ['0', '0']} self.module.save_answer({'student_answer': "I am an answer"}, self.module.system) self.assertEqual(self.module.child_state, self.module.ASSESSING) #Set close date to current datetime. self.module.close_date = datetime.now(UTC) #Save assessment when close date is passed. self.module.save_assessment(responses, self.module.system) self.assertNotEqual(self.module.child_state, self.module.DONE)
def setUp(self): self.location = Location("edX", 'course', 'run', "video", "SampleProblem1", None)
def load_course(self, course_dir, course_ids, tracker, target_course_id=None): """ Load a course into this module store course_path: Course directory name returns a CourseDescriptor for the course """ log.debug('========> Starting courselike import from %s', course_dir) with open(self.data_dir / course_dir / self.parent_xml) as course_file: # VS[compat] # TODO (cpennington): Remove this once all fall 2012 courses have # been imported into the cms from xml course_file = StringIO( clean_out_mako_templating(course_file.read())) course_data = etree.parse(course_file, parser=edx_xml_parser).getroot() org = course_data.get('org') if org is None: msg = ("No 'org' attribute set for courselike in {dir}. " "Using default 'edx'".format(dir=course_dir)) log.warning(msg) tracker(msg) org = 'edx' # Parent XML should be something like 'library.xml' or 'course.xml' courselike_label = self.parent_xml.split('.')[0] course = course_data.get(courselike_label) if course is None: msg = ( "No '{courselike_label}' attribute set for course in {dir}." " Using default '{default}'".format( courselike_label=courselike_label, dir=course_dir, default=course_dir)) log.warning(msg) tracker(msg) course = course_dir url_name = course_data.get('url_name', course_data.get('slug')) if url_name: policy_dir = self.data_dir / course_dir / 'policies' / url_name policy_path = policy_dir / 'policy.json' policy = self.load_policy(policy_path, tracker) # VS[compat]: remove once courses use the policy dirs. if policy == {}: dog_stats_api.increment( DEPRECATION_VSCOMPAT_EVENT, tags=( "location:xml_load_course_policy_dir", u"course:{}".format(course), )) old_policy_path = self.data_dir / course_dir / 'policies' / '{0}.json'.format( url_name) policy = self.load_policy(old_policy_path, tracker) else: policy = {} # VS[compat] : 'name' is deprecated, but support it for now... if course_data.get('name'): dog_stats_api.increment( DEPRECATION_VSCOMPAT_EVENT, tags=( "location:xml_load_course_course_data_name", u"course:{}".format(course_data.get('course')), u"org:{}".format(course_data.get('org')), u"name:{}".format(course_data.get('name')), )) url_name = Location.clean(course_data.get('name')) tracker("'name' is deprecated for module xml. Please use " "display_name and url_name.") else: url_name = None course_id = self.get_id(org, course, url_name) if course_ids is not None and course_id not in course_ids: return None def get_policy(usage_id): """ Return the policy dictionary to be applied to the specified XBlock usage """ return policy.get(policy_key(usage_id), {}) services = {} if self.i18n_service: services['i18n'] = self.i18n_service if self.fs_service: services['fs'] = self.fs_service if self.user_service: services['user'] = self.user_service system = ImportSystem( xmlstore=self, course_id=course_id, course_dir=course_dir, error_tracker=tracker, load_error_modules=self.load_error_modules, get_policy=get_policy, mixins=self.xblock_mixins, default_class=self.default_class, select=self.xblock_select, field_data=self.field_data, services=services, target_course_id=target_course_id, ) course_descriptor = system.process_xml( etree.tostring(course_data, encoding='unicode')) # If we fail to load the course, then skip the rest of the loading steps if isinstance(course_descriptor, ErrorDescriptor): return course_descriptor self.content_importers(system, course_descriptor, course_dir, url_name) log.debug('========> Done with courselike import from %s', course_dir) return course_descriptor
class TestAsideKeys(TestCase): """Test of Aside keys.""" @ddt.data( (Location.from_string('i4x://org/course/cat/name'), 'aside'), (BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'block_type', 'block_id'), 'aside'), ) @ddt.unpack def test_usage_round_trip_deserialized(self, usage_key, aside_type): key = AsideUsageKeyV1(usage_key, aside_type) serialized = unicode(key) deserialized = AsideUsageKey.from_string(serialized) self.assertEquals(key, deserialized) self.assertEquals(usage_key, key.usage_key, usage_key) self.assertEquals(usage_key, deserialized.usage_key) self.assertEquals(aside_type, key.aside_type) self.assertEquals(aside_type, deserialized.aside_type) @ddt.data( 'aside-usage-v1:i4x://org/course/cat/name::aside', 'aside-usage-v1:block-v1:org+course+cat+type@block_type+block@name::aside', ) def test_usage_round_trip_serialized(self, aside_key): deserialized = AsideUsageKey.from_string(aside_key) serialized = unicode(deserialized) self.assertEquals(aside_key, serialized) @ddt.data( (DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234'), 'aside'), ) @ddt.unpack def test_definition_round_trip_deserialized(self, definition_key, aside_type): key = AsideDefinitionKeyV1(definition_key, aside_type) serialized = unicode(key) deserialized = AsideDefinitionKey.from_string(serialized) self.assertEquals(key, deserialized) self.assertEquals(definition_key, key.definition_key, definition_key) self.assertEquals(definition_key, deserialized.definition_key) self.assertEquals(aside_type, key.aside_type) self.assertEquals(aside_type, deserialized.aside_type) @ddt.data( 'aside-def-v1:def-v1:abcd1234abcd1234abcd1234+type@block_type::aside') def test_definition_round_trip_serialized(self, aside_key): deserialized = AsideDefinitionKey.from_string(aside_key) serialized = unicode(deserialized) self.assertEquals(aside_key, serialized) @ddt.data( ('aside_type', 'bside'), ('usage_key', BlockUsageLocator(CourseLocator('borg', 'horse', 'gun'), 'lock_type', 'lock_id')), ('block_id', 'lock_id'), ('block_type', 'lock_type'), # BlockUsageLocator can't `replace` a definition_key, so skip for now # ('definition_key', DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234')), ('course_key', CourseLocator('borg', 'horse', 'gun')), ) @ddt.unpack def test_usage_key_replace(self, attr, value): key = AsideUsageKeyV1( BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'block_type', 'block_id'), 'aside') new_key = key.replace(**{attr: value}) self.assertEquals(getattr(new_key, attr), value) @ddt.data( ('aside_type', 'bside'), ('definition_key', DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234')), ('block_type', 'lock_type'), ) @ddt.unpack def test_definition_key_replace(self, attr, value): key = AsideDefinitionKeyV1( DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234'), 'aside') new_key = key.replace(**{attr: value}) self.assertEquals(getattr(new_key, attr), value)
def get_students_problem_grades(request, csv=False): """ Get a list of students and grades for a particular problem. If 'csv' is False, returns a dict of student's name: username: grade: percent. If 'csv' is True, returns a header array, and an array of arrays in the format: student names, usernames, grades, percents for CSV download. """ module_state_key = Location.from_string(request.GET.get('module_id')) csv = request.GET.get('csv') # Query for "problem grades" students students = models.StudentModule.objects.select_related('student').filter( module_state_key=module_state_key, module_type__exact='problem', grade__isnull=False, ).values('student__username', 'student__profile__name', 'grade', 'max_grade').order_by('student__profile__name') results = [] if not csv: # Restrict screen list length # Adding 1 so can tell if list is larger than MAX_SCREEN_LIST_LENGTH # without doing another select. for student in students[0:MAX_SCREEN_LIST_LENGTH + 1]: student_dict = { 'name': student['student__profile__name'], 'username': student['student__username'], 'grade': student['grade'], } student_dict['percent'] = 0 if student['max_grade'] > 0: student_dict['percent'] = round(student['grade'] * 100 / student['max_grade']) results.append(student_dict) max_exceeded = False if len(results) > MAX_SCREEN_LIST_LENGTH: # Remove the last item so list length is exactly MAX_SCREEN_LIST_LENGTH del results[-1] max_exceeded = True response_payload = { 'results': results, 'max_exceeded': max_exceeded, } return JsonResponse(response_payload) else: tooltip = request.GET.get('tooltip') filename = sanitize_filename(tooltip[:tooltip.rfind(' - ')]) header = [_("Name"), _("Username"), _("Grade"), _("Percent")] for student in students: percent = 0 if student['max_grade'] > 0: percent = round(student['grade'] * 100 / student['max_grade']) results.append([ student['student__profile__name'], student['student__username'], student['grade'], percent ]) response = create_csv_response(filename, header, results) return response
def test_contentstore_attrs(self): """ Test getting, setting, and defaulting the locked attr and arbitrary attrs. """ location = Location('edX', 'toy', '2012_Fall', 'course', '2012_Fall') course_content, __ = self.content_store.get_all_content_for_course(location.course_key) assert_true(len(course_content) > 0) filter_params = _build_requested_filter('Images') filtered_course_content, __ = self.content_store.get_all_content_for_course( location.course_key, filter_params=filter_params) assert_true(len(filtered_course_content) < len(course_content)) # a bit overkill, could just do for content[0] for content in course_content: assert not content.get('locked', False) asset_key = AssetLocation._from_deprecated_son(content.get('content_son', content['_id']), location.run) assert not self.content_store.get_attr(asset_key, 'locked', False) attrs = self.content_store.get_attrs(asset_key) assert_in('uploadDate', attrs) assert not attrs.get('locked', False) self.content_store.set_attr(asset_key, 'locked', True) assert self.content_store.get_attr(asset_key, 'locked', False) attrs = self.content_store.get_attrs(asset_key) assert_in('locked', attrs) assert attrs['locked'] is True self.content_store.set_attrs(asset_key, {'miscel': 99}) assert_equals(self.content_store.get_attr(asset_key, 'miscel'), 99) asset_key = AssetLocation._from_deprecated_son( course_content[0].get('content_son', course_content[0]['_id']), location.run ) assert_raises( AttributeError, self.content_store.set_attr, asset_key, 'md5', 'ff1532598830e3feac91c2449eaa60d6' ) assert_raises( AttributeError, self.content_store.set_attrs, asset_key, {'foo': 9, 'md5': 'ff1532598830e3feac91c2449eaa60d6'} ) assert_raises( NotFoundError, self.content_store.get_attr, Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'), 'displayname' ) assert_raises( NotFoundError, self.content_store.set_attr, Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'), 'displayname', 'hello' ) assert_raises( NotFoundError, self.content_store.get_attrs, Location('bogus', 'bogus', 'bogus', 'asset', 'bogus') ) assert_raises( NotFoundError, self.content_store.set_attrs, Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'), {'displayname': 'hello'} ) assert_raises( NotFoundError, self.content_store.set_attrs, Location('bogus', 'bogus', 'bogus', 'asset', None), {'displayname': 'hello'} )
def _compute_metadata_inheritance_tree(self, course_id): ''' TODO (cdodge) This method can be deleted when the 'split module store' work has been completed ''' # get all collections in the course, this query should not return any leaf nodes # note this is a bit ugly as when we add new categories of containers, we have to add it here block_types_with_children = set( name for name, class_ in XBlock.load_classes() if getattr(class_, 'has_children', False) ) query = SON([ ('_id.tag', 'i4x'), ('_id.org', course_id.org), ('_id.course', course_id.course), ('_id.category', {'$in': list(block_types_with_children)}) ]) # we just want the Location, children, and inheritable metadata record_filter = {'_id': 1, 'definition.children': 1} # just get the inheritable metadata since that is all we need for the computation # this minimizes both data pushed over the wire for field_name in InheritanceMixin.fields: record_filter['metadata.{0}'.format(field_name)] = 1 # call out to the DB resultset = self.collection.find(query, record_filter) # it's ok to keep these as deprecated strings b/c the overall cache is indexed by course_key and this # is a dictionary relative to that course results_by_url = {} root = None # now go through the results and order them by the location url for result in resultset: # manually pick it apart b/c the db has tag and we want revision = None regardless location = Location._from_deprecated_son(result['_id'], course_id.run).replace(revision=None) location_url = location.to_deprecated_string() if location_url in results_by_url: # found either draft or live to complement the other revision existing_children = results_by_url[location_url].get('definition', {}).get('children', []) additional_children = result.get('definition', {}).get('children', []) total_children = existing_children + additional_children results_by_url[location_url].setdefault('definition', {})['children'] = total_children results_by_url[location_url] = result if location.category == 'course': root = location_url # now traverse the tree and compute down the inherited metadata metadata_to_inherit = {} def _compute_inherited_metadata(url): """ Helper method for computing inherited metadata for a specific location url """ my_metadata = results_by_url[url].get('metadata', {}) # go through all the children and recurse, but only if we have # in the result set. Remember results will not contain leaf nodes for child in results_by_url[url].get('definition', {}).get('children', []): if child in results_by_url: new_child_metadata = copy.deepcopy(my_metadata) new_child_metadata.update(results_by_url[child].get('metadata', {})) results_by_url[child]['metadata'] = new_child_metadata metadata_to_inherit[child] = new_child_metadata _compute_inherited_metadata(child) else: # this is likely a leaf node, so let's record what metadata we need to inherit metadata_to_inherit[child] = my_metadata if root is not None: _compute_inherited_metadata(root) return metadata_to_inherit
def test_deprecated_init(self): with self.assertDeprecationWarning(): loc = Location("foo", "bar", "baz", "cat", "name") self.assertTrue(isinstance(loc, BlockUsageLocator)) self.assertTrue(loc.deprecated)
def chart_update(request): results = {'success': False} chart_info_json = dumps(results) if request.method == u'GET': GET = request.GET user_id = GET[u'user_id'] user_id = request.user if user_id == "" else user_id chart = int(GET[u'chart']) course_key = get_course_key(GET[u'course_id']) if chart == VISUALIZATIONS_ID['LA_chapter_time']: cs, st = get_DB_course_spent_time(course_key, student_id=user_id) student_spent_time = chapter_time_to_js(cs, st) chart_info_json = dumps(student_spent_time) elif chart == VISUALIZATIONS_ID['LA_course_accesses']: cs, sa = get_DB_course_section_accesses(course_key, student_id=user_id) student_course_accesses = course_accesses_to_js(cs, sa) chart_info_json = dumps(student_course_accesses) elif chart == VISUALIZATIONS_ID['LA_student_grades']: students_grades = get_DB_student_grades(course_key, student_id=user_id) chart_info_json = dumps(students_grades) elif chart == VISUALIZATIONS_ID['LA_time_schedule']: student_time_schedule = get_DB_time_schedule(course_key, student_id=user_id) chart_info_json = dumps(student_time_schedule) elif chart == VISUALIZATIONS_ID['LA_vid_prob_prog']: student_prob_vid_progress = get_DB_course_video_problem_progress( course_key, student_id=user_id) chart_info_json = dumps(student_prob_vid_progress) elif chart == VISUALIZATIONS_ID['LA_video_progress']: # Video progress visualization. Video percentage seen total and non-overlapped. course = get_course_with_access(user_id, action='load', course_key=course_key, depth=None, check_if_enrolled=False) video_descriptors = videos_problems_in(course)[0] video_durations = get_info_videos_descriptors(video_descriptors)[2] video_names, avg_video_time, video_percentages = get_module_consumption( user_id, course_key, 'video', 'video_progress') if avg_video_time != []: all_video_time_percent = map(truediv, avg_video_time, video_durations) all_video_time_percent = [ int(round(x * 100, 0)) for x in all_video_time_percent ] else: all_video_time_percent = avg_video_time column_headers = [ 'Video', 'Different video time', 'Total video time' ] chart_info_json = ready_for_arraytodatatable( column_headers, video_names, video_percentages, all_video_time_percent) elif chart == VISUALIZATIONS_ID['LA_video_time']: # Time spent on every video resource video_names, all_video_time = get_module_consumption( user_id, course_key, 'video', 'total_time_vid_prob')[0:2] column_headers = ['Video', 'Time watched'] chart_info_json = ready_for_arraytodatatable( column_headers, video_names, all_video_time) elif chart == VISUALIZATIONS_ID['LA_problem_time']: # Time spent on every problem resource problem_names, time_x_problem = get_module_consumption( user_id, course_key, 'problem', 'total_time_vid_prob')[0:2] column_headers = ['Problem', 'Time on problem'] chart_info_json = ready_for_arraytodatatable( column_headers, problem_names, time_x_problem) elif chart == VISUALIZATIONS_ID['LA_repetition_video_interval']: # Repetitions per video intervals video_name = GET[u'video'] video_id = BlockUsageLocator._from_string(video_name) video_id = Location.from_deprecated_string( video_id._to_deprecated_string()) chart_info_json = get_user_video_intervals(user_id, video_id) elif chart == VISUALIZATIONS_ID['LA_daily_time']: # Daily time spent on video and/or problem resources video_days, video_daily_time = get_daily_consumption( user_id, course_key, 'video') problem_days, problem_daily_time = get_daily_consumption( user_id, course_key, 'problem') chart_info_json = join_video_problem_time(video_days, video_daily_time, problem_days, problem_daily_time) elif chart == VISUALIZATIONS_ID['LA_video_events']: # Video events dispersion within video length video_name = GET[u'video'] video_id = BlockUsageLocator._from_string(video_name) video_id = Location.from_deprecated_string( video_id._to_deprecated_string()) chart_info_json = get_video_events_info(user_id, video_id) return HttpResponse(chart_info_json, mimetype='application/json')
def test_encode_location(self): loc = Location('org', 'course', 'run', 'category', 'name', None) self.assertEqual(loc.to_deprecated_string(), self.encoder.default(loc)) loc = Location('org', 'course', 'run', 'category', 'name', 'version') self.assertEqual(loc.to_deprecated_string(), self.encoder.default(loc))
class TestAsideKeys(TestCase): """Test of Aside keys.""" @ddt.data(*itertools.product([ AsideUsageKeyV1, AsideUsageKeyV2, ], [ Location.from_string('i4x://org/course/cat/name'), BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'block_type', 'block_id'), ], ['aside', 'aside_b'])) @ddt.unpack def test_usage_round_trip_deserialized(self, key_class, usage_key, aside_type): key = key_class(usage_key, aside_type) serialized = text_type(key) deserialized = AsideUsageKey.from_string(serialized) self.assertEqual(key, deserialized) self.assertEqual(usage_key, key.usage_key, usage_key) self.assertEqual(usage_key, deserialized.usage_key) self.assertEqual(aside_type, key.aside_type) self.assertEqual(aside_type, deserialized.aside_type) @ddt.data( 'aside-usage-v1:i4x://org/course/cat/name::aside', 'aside-usage-v1:block-v1:org+course+cat+type@block_type+block@name::aside', 'aside-usage-v2:lib-block-v1$:$:+-+branch@-+version@000000000000000000000000+type@-+block@-::0', 'aside-usage-v2:i4x$://-/-/-/$:$:-::0', 'aside-usage-v2:i4x$://-/-/-/$:$:$:-::0', 'aside-usage-v2:i4x$://-/-/$:$:$:$:$:/-::0', ) def test_usage_round_trip_serialized(self, aside_key): deserialized = AsideUsageKey.from_string(aside_key) serialized = text_type(deserialized) self.assertEqual(aside_key, serialized) @ddt.data(*itertools.product([ AsideDefinitionKeyV1, AsideDefinitionKeyV2, ], [ DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234'), ], ['aside', 'aside_b'])) @ddt.unpack def test_definition_round_trip_deserialized(self, key_class, definition_key, aside_type): key = key_class(definition_key, aside_type) serialized = text_type(key) deserialized = AsideDefinitionKey.from_string(serialized) self.assertEqual(key, deserialized) self.assertEqual(definition_key, key.definition_key, definition_key) self.assertEqual(definition_key, deserialized.definition_key) self.assertEqual(aside_type, key.aside_type) self.assertEqual(aside_type, deserialized.aside_type) @ddt.data( 'aside-def-v1:def-v1:abcd1234abcd1234abcd1234+type@block_type::aside', 'aside-def-v2:def-v1$:abcd1234abcd1234abcd1234+type@block_type::aside') def test_definition_round_trip_serialized(self, aside_key): deserialized = AsideDefinitionKey.from_string(aside_key) serialized = text_type(deserialized) self.assertEqual(aside_key, serialized) @ddt.data(*itertools.product( [ AsideUsageKeyV1, AsideUsageKeyV2, ], [ ('aside_type', 'bside'), ('usage_key', BlockUsageLocator(CourseLocator('borg', 'horse', 'gun'), 'lock_type', 'lock_id')), ('block_id', 'lock_id'), ('block_type', 'lock_type'), # BlockUsageLocator can't `replace` a definition_key, so skip for now # ('definition_key', DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234')), ('course_key', CourseLocator('borg', 'horse', 'gun')), ])) @ddt.unpack def test_usage_key_replace(self, key_class, attr_value): attr, value = attr_value key = key_class( BlockUsageLocator(CourseLocator('org', 'course', 'run'), 'block_type', 'block_id'), 'aside') new_key = key.replace(**{attr: value}) self.assertEqual(getattr(new_key, attr), value) @ddt.data(*itertools.product([ AsideDefinitionKeyV1, AsideDefinitionKeyV2, ], [ ('aside_type', 'bside'), ('definition_key', DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234')), ('block_type', 'lock_type'), ])) @ddt.unpack def test_definition_key_replace(self, key_class, attr_value): attr, value = attr_value key = key_class( DefinitionLocator('block_type', 'abcd1234abcd1234abcd1234'), 'aside') new_key = key.replace(**{attr: value}) self.assertEqual(getattr(new_key, attr), value)
def test_encode_location(self): loc = Location('org', 'course', 'run', 'category', 'name', None) self.assertEqual(text_type(loc), self.encoder.default(loc)) loc = Location('org', 'course', 'run', 'category', 'name', 'version') self.assertEqual(text_type(loc), self.encoder.default(loc))
def test_translate_locator(self): """ tests translate_locator_to_location(BlockUsageLocator) """ # lookup for non-existent course org = 'foo_org' course = 'bar_course' run = 'baz_run' new_style_org = '{}.geek_dept'.format(org) new_style_offering = '{}.{}'.format(course, run) prob_course_key = CourseLocator( org=new_style_org, offering=new_style_offering, branch='published', ) prob_locator = BlockUsageLocator( prob_course_key, block_type='problem', block_id='problem2', ) prob_location = loc_mapper().translate_locator_to_location(prob_locator) self.assertIsNone(prob_location, 'found entry in empty map table') loc_mapper().create_map_entry( SlashSeparatedCourseKey(org, course, run), new_style_org, new_style_offering, block_map={ 'abc123': {'problem': 'problem2'}, '48f23a10395384929234': {'chapter': 'chapter48f'}, 'baz_run': {'course': 'root'}, } ) # only one course matches prob_location = loc_mapper().translate_locator_to_location(prob_locator) # default branch self.assertEqual(prob_location, Location(org, course, run, 'problem', 'abc123', None)) # test get_course keyword prob_location = loc_mapper().translate_locator_to_location(prob_locator, get_course=True) self.assertEqual(prob_location, SlashSeparatedCourseKey(org, course, run)) # explicit branch prob_locator = prob_locator.for_branch('draft') prob_location = loc_mapper().translate_locator_to_location(prob_locator) # Even though the problem was set as draft, we always return revision=None to work # with old mongo/draft modulestores. self.assertEqual(prob_location, Location(org, course, run, 'problem', 'abc123', None)) prob_locator = BlockUsageLocator( prob_course_key.for_branch('production'), block_type='problem', block_id='problem2' ) prob_location = loc_mapper().translate_locator_to_location(prob_locator) self.assertEqual(prob_location, Location(org, course, run, 'problem', 'abc123', None)) # same for chapter except chapter cannot be draft in old system chap_locator = BlockUsageLocator( prob_course_key.for_branch('production'), block_type='chapter', block_id='chapter48f', ) chap_location = loc_mapper().translate_locator_to_location(chap_locator) self.assertEqual(chap_location, Location(org, course, run, 'chapter', '48f23a10395384929234')) # explicit branch chap_locator = chap_locator.for_branch('draft') chap_location = loc_mapper().translate_locator_to_location(chap_locator) self.assertEqual(chap_location, Location(org, course, run, 'chapter', '48f23a10395384929234')) chap_locator = BlockUsageLocator( prob_course_key.for_branch('production'), block_type='chapter', block_id='chapter48f' ) chap_location = loc_mapper().translate_locator_to_location(chap_locator) self.assertEqual(chap_location, Location(org, course, run, 'chapter', '48f23a10395384929234')) # look for non-existent problem prob_locator2 = BlockUsageLocator( prob_course_key.for_branch('draft'), block_type='problem', block_id='problem3' ) prob_location = loc_mapper().translate_locator_to_location(prob_locator2) self.assertIsNone(prob_location, 'Found non-existent problem') # add a distractor course delta_run = 'delta_run' new_style_offering = '{}.{}'.format(course, delta_run) loc_mapper().create_map_entry( SlashSeparatedCourseKey(org, course, delta_run), new_style_org, new_style_offering, block_map={'abc123': {'problem': 'problem3'}} ) prob_location = loc_mapper().translate_locator_to_location(prob_locator) self.assertEqual(prob_location, Location(org, course, run, 'problem', 'abc123', None))
def test_conditional_module(self): """Make sure that conditional module works""" print "Starting import" course = self.get_course('conditional_and_poll') print "Course: ", course print "id: ", course.id def inner_get_module(descriptor): if isinstance(descriptor, Location): location = descriptor descriptor = self.modulestore.get_item(location, depth=None) descriptor.xmodule_runtime = get_test_system() descriptor.xmodule_runtime.descriptor_runtime = descriptor._runtime # pylint: disable=protected-access descriptor.xmodule_runtime.get_module = inner_get_module return descriptor # edx - HarvardX # cond_test - ER22x location = Location("HarvardX", "ER22x", "2013_Spring", "conditional", "condone") def replace_urls(text, staticfiles_prefix=None, replace_prefix='/static/', course_namespace=None): return text self.test_system.replace_urls = replace_urls self.test_system.get_module = inner_get_module module = inner_get_module(location) print "module: ", module print "module children: ", module.get_children() print "module display items (children): ", module.get_display_items() html = module.render(STUDENT_VIEW).content print "html type: ", type(html) print "html: ", html html_expect = module.xmodule_runtime.render_template( 'conditional_ajax.html', { # Test ajax url is just usage-id / handler_name 'ajax_url': '{}/xmodule_handler'.format(location.to_deprecated_string()), 'element_id': u'i4x-HarvardX-ER22x-conditional-condone', 'depends': u'i4x-HarvardX-ER22x-problem-choiceprob' }) self.assertEqual(html, html_expect) gdi = module.get_display_items() print "gdi=", gdi ajax = json.loads(module.handle_ajax('', '')) module.save() print "ajax: ", ajax html = ajax['html'] self.assertFalse(any(['This is a secret' in item for item in html])) # Now change state of the capa problem to make it completed inner_module = inner_get_module( location.replace(category="problem", name='choiceprob')) inner_module.attempts = 1 # Save our modifications to the underlying KeyValueStore so they can be persisted inner_module.save() ajax = json.loads(module.handle_ajax('', '')) module.save() print "post-attempt ajax: ", ajax html = ajax['html'] self.assertTrue(any(['This is a secret' in item for item in html]))
def test_jumpto_id_invalid_location(self): location = Location('edX', 'toy', 'NoSuchPlace', None, None, None) jumpto_url = '{0}/{1}/jump_to_id/{2}'.format('/courses', self.course_key.to_deprecated_string(), location.to_deprecated_string()) response = self.client.get(jumpto_url) self.assertEqual(response.status_code, 404)
def location(self): return Location('org', 'course', 'run', 'category', self.url_name, None)
def test_translate_location_read_only(self): """ Test the variants of translate_location which don't create entries, just decode """ # lookup before there are any maps org = 'foo_org' course = 'bar_course' run = 'baz_run' slash_course_key = SlashSeparatedCourseKey(org, course, run) with self.assertRaises(ItemNotFoundError): _ = loc_mapper().translate_location( Location(org, course, run, 'problem', 'abc123'), add_entry_if_missing=False ) new_style_org = '{}.geek_dept'.format(org) new_style_offering = '.{}.{}'.format(course, run) block_map = { 'abc123': {'problem': 'problem2', 'vertical': 'vertical2'}, 'def456': {'problem': 'problem4'}, 'ghi789': {'problem': 'problem7'}, } loc_mapper().create_map_entry( slash_course_key, new_style_org, new_style_offering, block_map=block_map ) test_problem_locn = Location(org, course, run, 'problem', 'abc123') self.translate_n_check(test_problem_locn, new_style_org, new_style_offering, 'problem2', 'published') # look for non-existent problem with self.assertRaises(ItemNotFoundError): loc_mapper().translate_location( Location(org, course, run, 'problem', '1def23'), add_entry_if_missing=False ) test_no_cat_locn = test_problem_locn.replace(category=None) with self.assertRaises(InvalidLocationError): loc_mapper().translate_location( slash_course_key.make_usage_key(None, 'abc123'), test_no_cat_locn, False, False ) test_no_cat_locn = test_no_cat_locn.replace(name='def456') self.translate_n_check( test_no_cat_locn, new_style_org, new_style_offering, 'problem4', 'published' ) # add a distractor course (note that abc123 has a different translation in this one) distractor_block_map = { 'abc123': {'problem': 'problem3'}, 'def456': {'problem': 'problem4'}, 'ghi789': {'problem': 'problem7'}, } run = 'delta_run' test_delta_new_org = '{}.geek_dept'.format(org) test_delta_new_offering = '{}.{}'.format(course, run) loc_mapper().create_map_entry( SlashSeparatedCourseKey(org, course, run), test_delta_new_org, test_delta_new_offering, block_map=distractor_block_map ) # test that old translation still works self.translate_n_check( test_problem_locn, new_style_org, new_style_offering, 'problem2', 'published' ) # and new returns new id self.translate_n_check( test_problem_locn.replace(run=run), test_delta_new_org, test_delta_new_offering, 'problem3', 'published' )