Example #1
0
def update_checklist(request, org, course, name, checklist_index=None):
    """
    restful CRUD operations on course checklists. The payload is a json rep of
    the modified checklist. For PUT or POST requests, the index of the
    checklist being modified must be included; the returned payload will
    be just that one checklist. For GET requests, the returned payload
    is a json representation of the list of all checklists.

    org, course, name: Attributes of the Location for the item to edit
    """
    location = get_location_and_verify_access(request, org, course, name)
    modulestore = get_modulestore(location)
    course_module = modulestore.get_item(location)

    real_method = get_request_method(request)
    if real_method == 'POST' or real_method == 'PUT':
        if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists):
            index = int(checklist_index)
            course_module.checklists[index] = json.loads(request.body)
            checklists, modified = expand_checklist_action_urls(course_module)
            modulestore.update_metadata(location, own_metadata(course_module))
            return HttpResponse(json.dumps(checklists[index]), mimetype="application/json")
        else:
            return HttpResponseBadRequest(
                "Could not save checklist state because the checklist index was out of range or unspecified.",
                content_type="text/plain")
    elif request.method == 'GET':
        # In the JavaScript view initialize method, we do a fetch to get all
        # the checklists.
        checklists, modified = expand_checklist_action_urls(course_module)
        if modified:
            modulestore.update_metadata(location, own_metadata(course_module))
        return HttpResponse(json.dumps(checklists), mimetype="application/json")
    else:
        return HttpResponseBadRequest("Unsupported request.", content_type="text/plain")
def update_checklist(request, org, course, name, checklist_index=None):
    """
    restful CRUD operations on course checklists. The payload is a json rep of
    the modified checklist. For PUT or POST requests, the index of the
    checklist being modified must be included; the returned payload will
    be just that one checklist. For GET requests, the returned payload
    is a json representation of the list of all checklists.

    org, course, name: Attributes of the Location for the item to edit
    """
    location = get_location_and_verify_access(request, org, course, name)
    modulestore = get_modulestore(location)
    course_module = modulestore.get_item(location)

    if request.method in ("POST", "PUT"):
        if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists):
            index = int(checklist_index)
            course_module.checklists[index] = json.loads(request.body)
            # seeming noop which triggers kvs to record that the metadata is not default
            course_module.checklists = course_module.checklists
            checklists, _ = expand_checklist_action_urls(course_module)
            course_module.save()
            modulestore.update_metadata(location, own_metadata(course_module))
            return JsonResponse(checklists[index])
        else:
            return HttpResponseBadRequest(
                "Could not save checklist state because the checklist index was out of range or unspecified.",
                content_type="text/plain")
    elif request.method == 'GET':
        # In the JavaScript view initialize method, we do a fetch to get all the checklists.
        checklists, modified = expand_checklist_action_urls(course_module)
        if modified:
            course_module.save()
            modulestore.update_metadata(location, own_metadata(course_module))
        return JsonResponse(checklists)
Example #3
0
def textbooks_detail_handler(request, tid, tag=None, package_id=None, branch=None, version_guid=None, block=None):
    """
    JSON API endpoint for manipulating a textbook via its internal ID.
    Used by the Backbone application.

    GET
        json: return JSON representation of textbook
    POST or PUT
        json: update textbook based on provided information
    DELETE
        json: remove textbook
    """
    __, course = _get_locator_and_course(
        package_id, branch, version_guid, block, request.user
    )
    store = get_modulestore(course.location)
    matching_id = [tb for tb in course.pdf_textbooks
                   if str(tb.get("id")) == str(tid)]
    if matching_id:
        textbook = matching_id[0]
    else:
        textbook = None

    if request.method == 'GET':
        if not textbook:
            return JsonResponse(status=404)
        return JsonResponse(textbook)
    elif request.method in ('POST', 'PUT'):  # can be either and sometimes
                                        # django is rewriting one to the other
        try:
            new_textbook = validate_textbook_json(request.body)
        except TextbookValidationError as err:
            return JsonResponse({"error": err.message}, status=400)
        new_textbook["id"] = tid
        if textbook:
            i = course.pdf_textbooks.index(textbook)
            new_textbooks = course.pdf_textbooks[0:i]
            new_textbooks.append(new_textbook)
            new_textbooks.extend(course.pdf_textbooks[i + 1:])
            course.pdf_textbooks = new_textbooks
        else:
            course.pdf_textbooks.append(new_textbook)
        store.update_metadata(
            course.location,
            own_metadata(course)
        )
        return JsonResponse(new_textbook, status=201)
    elif request.method == 'DELETE':
        if not textbook:
            return JsonResponse(status=404)
        i = course.pdf_textbooks.index(textbook)
        new_textbooks = course.pdf_textbooks[0:i]
        new_textbooks.extend(course.pdf_textbooks[i + 1:])
        course.pdf_textbooks = new_textbooks
        store.update_metadata(
            course.location,
            own_metadata(course)
        )
        return JsonResponse()
def get_d3_sequential_open_distrib(course_id):
    """
    Returns how many students opened a sequential/subsection for each section, data already in format for d3 function.

    `course_id` the course ID for the course interested in

    Returns an array in the order of the sections and each dict has:
      'display_name' - display name for the section
      'data' - data for the d3_stacked_bar_graph function of how many students opened each sequential/subsection
    """
    sequential_open_distrib = get_sequential_open_distrib(course_id)

    d3_data = []

    # Retrieve course object down to subsection
    course = modulestore().get_instance(course_id, CourseDescriptor.id_to_location(course_id), depth=2)

    # Iterate through sections, subsections
    for section in course.get_children():
        curr_section = {}
        curr_section['display_name'] = own_metadata(section).get('display_name', '')
        data = []
        c_subsection = 0

        # Construct data for each subsection to be sent to d3
        for subsection in section.get_children():
            c_subsection += 1
            subsection_name = own_metadata(subsection).get('display_name', '')

            num_students = 0
            if subsection.location.url() in sequential_open_distrib:
                num_students = sequential_open_distrib[subsection.location.url()]

            stack_data = []

            # Tooltip parameters for subsection in open_distribution view
            tooltip = {
                'type': 'subsection',
                'num_students': num_students,
                'subsection_num': c_subsection,
                'subsection_name': subsection_name
            }

            stack_data.append({
                'color': 0,
                'value': num_students,
                'tooltip': tooltip,
                'module_url': subsection.location.url(),
            })
            subsection = {
                'xValue': "SS {0}".format(c_subsection),
                'stackData': stack_data,
            }
            data.append(subsection)

        curr_section['data'] = data
        d3_data.append(curr_section)

    return d3_data
Example #5
0
    def assertCoursesEqual(self, course1_id, course2_id):
        """
        Verifies the content of the two given courses are equal
        """
        course1_items = self.store.get_items(course1_id)
        course2_items = self.store.get_items(course2_id)
        self.assertGreater(len(course1_items), 0)  # ensure it found content instead of [] == []
        self.assertEqual(len(course1_items), len(course2_items))

        for course1_item in course1_items:
            course2_item_location = course1_item.location.map_into_course(course2_id)
            if course1_item.location.category == 'course':
                # mongo uses the run as the name, split uses 'course'
                store = self.store._get_modulestore_for_courseid(course2_id)  # pylint: disable=protected-access
                new_name = 'course' if isinstance(store, SplitMongoModuleStore) else course2_item_location.run
                course2_item_location = course2_item_location.replace(name=new_name)
            course2_item = self.store.get_item(course2_item_location)

            try:
                # compare published state
                self.assertEqual(
                    self.store.compute_publish_state(course1_item),
                    self.store.compute_publish_state(course2_item)
                )
            except AssertionError:
                # old mongo calls things draft if draft exists even if it's != published; so, do more work
                self.assertEqual(
                    self.compute_real_state(course1_item),
                    self.compute_real_state(course2_item)
                )

            # compare data
            self.assertEqual(hasattr(course1_item, 'data'), hasattr(course2_item, 'data'))
            if hasattr(course1_item, 'data'):
                self.assertEqual(course1_item.data, course2_item.data)

            # compare meta-data
            self.assertEqual(own_metadata(course1_item), own_metadata(course2_item))

            # compare children
            self.assertEqual(course1_item.has_children, course2_item.has_children)
            if course1_item.has_children:
                expected_children = []
                for course1_item_child in course1_item.children:
                    expected_children.append(
                        course1_item_child.map_into_course(course2_id)
                    )
                # also process course2_children just in case they have version guids
                course2_children = [child.version_agnostic() for child in course2_item.children]
                self.assertEqual(expected_children, course2_children)

        # compare assets
        content_store = self.store.contentstore
        course1_assets, count_course1_assets = content_store.get_all_content_for_course(course1_id)
        _, count_course2_assets = content_store.get_all_content_for_course(course2_id)
        self.assertEqual(count_course1_assets, count_course2_assets)
        for asset in course1_assets:
            asset_son = asset.get('content_son', asset['_id'])
            self.assertAssetsEqual(asset_son, course1_id, course2_id)
Example #6
0
    def assertCoursesEqual(self, course1_id, course2_id):
        """
        Verifies the content of the two given courses are equal
        """
        course1_items = self.store.get_items(course1_id)
        course2_items = self.store.get_items(course2_id)
        self.assertGreater(len(course1_items), 0)  # ensure it found content instead of [] == []
        if len(course1_items) != len(course2_items):
            course1_block_ids = set([item.location.block_id for item in course1_items])
            course2_block_ids = set([item.location.block_id for item in course2_items])
            raise AssertionError(
                u"Course1 extra blocks: {}; course2 extra blocks: {}".format(
                    course1_block_ids - course2_block_ids, course2_block_ids - course1_block_ids
                )
            )

        for course1_item in course1_items:
            course1_item_loc = course1_item.location
            course2_item_loc = course2_id.make_usage_key(course1_item_loc.block_type, course1_item_loc.block_id)
            if course1_item_loc.block_type == 'course':
                # mongo uses the run as the name, split uses 'course'
                store = self.store._get_modulestore_for_courselike(course2_id)  # pylint: disable=protected-access
                new_name = 'course' if isinstance(store, SplitMongoModuleStore) else course2_item_loc.run
                course2_item_loc = course2_item_loc.replace(name=new_name)
            course2_item = self.store.get_item(course2_item_loc)

            # compare published state
            self.assertEqual(
                self.store.has_published_version(course1_item),
                self.store.has_published_version(course2_item)
            )

            # compare data
            self.assertEqual(hasattr(course1_item, 'data'), hasattr(course2_item, 'data'))
            if hasattr(course1_item, 'data'):
                self.assertEqual(course1_item.data, course2_item.data)

            # compare meta-data
            self.assertEqual(own_metadata(course1_item), own_metadata(course2_item))

            # compare children
            self.assertEqual(course1_item.has_children, course2_item.has_children)
            if course1_item.has_children:
                expected_children = []
                for course1_item_child in course1_item.children:
                    expected_children.append(
                        course2_id.make_usage_key(course1_item_child.block_type, course1_item_child.block_id)
                    )
                self.assertEqual(expected_children, course2_item.children)

        # compare assets
        content_store = self.store.contentstore
        course1_assets, count_course1_assets = content_store.get_all_content_for_course(course1_id)
        _, count_course2_assets = content_store.get_all_content_for_course(course2_id)
        self.assertEqual(count_course1_assets, count_course2_assets)
        for asset in course1_assets:
            asset_son = asset.get('content_son', asset['_id'])
            self.assertAssetsEqual(asset_son, course1_id, course2_id)
Example #7
0
def textbook_by_id(request, org, course, name, tid):
    """
    JSON API endpoint for manipulating a textbook via its internal ID.
    Used by the Backbone application.
    """
    location = get_location_and_verify_access(request, org, course, name)
    store = get_modulestore(location)
    course_module = store.get_item(location, depth=3)
    matching_id = [tb for tb in course_module.pdf_textbooks
                   if str(tb.get("id")) == str(tid)]
    if matching_id:
        textbook = matching_id[0]
    else:
        textbook = None

    if request.method == 'GET':
        if not textbook:
            return JsonResponse(status=404)
        return JsonResponse(textbook)
    elif request.method in ('POST', 'PUT'):  # can be either and sometimes
                                        # django is rewriting one to the other
        try:
            new_textbook = validate_textbook_json(request.body)
        except TextbookValidationError as err:
            return JsonResponse({"error": err.message}, status=400)
        new_textbook["id"] = tid
        if textbook:
            i = course_module.pdf_textbooks.index(textbook)
            new_textbooks = course_module.pdf_textbooks[0:i]
            new_textbooks.append(new_textbook)
            new_textbooks.extend(course_module.pdf_textbooks[i + 1:])
            course_module.pdf_textbooks = new_textbooks
        else:
            course_module.pdf_textbooks.append(new_textbook)
        # Save the data that we've just changed to the underlying
        # MongoKeyValueStore before we update the mongo datastore.
        course_module.save()
        store.update_metadata(
            course_module.location,
            own_metadata(course_module)
        )
        return JsonResponse(new_textbook, status=201)
    elif request.method == 'DELETE':
        if not textbook:
            return JsonResponse(status=404)
        i = course_module.pdf_textbooks.index(textbook)
        new_textbooks = course_module.pdf_textbooks[0:i]
        new_textbooks.extend(course_module.pdf_textbooks[i + 1:])
        course_module.pdf_textbooks = new_textbooks
        course_module.save()
        store.update_metadata(
            course_module.location,
            own_metadata(course_module)
        )
        return JsonResponse()
Example #8
0
    def assertCoursesEqual(self, course1_id, course2_id):
        """
        Verifies the content of the two given courses are equal
        """
        course1_items = self.store.get_items(course1_id)
        course2_items = self.store.get_items(course2_id)
        self.assertGreater(len(course1_items), 0)  # ensure it found content instead of [] == []
        self.assertEqual(len(course1_items), len(course2_items))

        for course1_item in course1_items:
            course2_item_location = course1_item.location.map_into_course(course2_id)
            if course1_item.location.category == 'course':
                course2_item_location = course2_item_location.replace(name=course2_item_location.run)
            course2_item = self.store.get_item(course2_item_location)

            # compare published state
            self.assertEqual(
                self.store.compute_publish_state(course1_item),
                self.store.compute_publish_state(course2_item)
            )

            # compare data
            self.assertEqual(hasattr(course1_item, 'data'), hasattr(course2_item, 'data'))
            if hasattr(course1_item, 'data'):
                self.assertEqual(course1_item.data, course2_item.data)

            # compare meta-data
            self.assertEqual(own_metadata(course1_item), own_metadata(course2_item))

            # compare children
            self.assertEqual(course1_item.has_children, course2_item.has_children)
            if course1_item.has_children:
                expected_children = []
                for course1_item_child in course1_item.children:
                    expected_children.append(
                        course1_item_child.map_into_course(course2_id)
                    )
                self.assertEqual(expected_children, course2_item.children)

        # compare assets
        content_store = contentstore()
        course1_assets, count_course1_assets = content_store.get_all_content_for_course(course1_id)
        _, count_course2_assets = content_store.get_all_content_for_course(course2_id)
        self.assertEqual(count_course1_assets, count_course2_assets)
        for asset in course1_assets:
            asset_id = asset.get('content_son', asset['_id'])
            asset_key = StaticContent.compute_location(course1_id, asset_id['name'])
            self.assertAssetsEqual(asset_key, course1_id, course2_id)
Example #9
0
    def test_view_index_xhr_content(self):
        "Check that the response maps to the content of the modulestore"
        content = [
            {
                "tab_title": "my textbook",
                "url": "/abc.pdf",
                "id": "992"
            }, {
                "tab_title": "pineapple",
                "id": "0pineapple",
                "chapters": [
                    {
                        "title": "The Fruit",
                        "url": "/a/b/fruit.pdf",
                    }, {
                        "title": "The Legend",
                        "url": "/b/c/legend.pdf",
                    }
                ]
            }
        ]
        self.course.pdf_textbooks = content
        store = get_modulestore(self.course.location)
        store.update_metadata(self.course.location, own_metadata(self.course))

        resp = self.client.get(
            self.url,
            HTTP_ACCEPT="application/json",
            HTTP_X_REQUESTED_WITH='XMLHttpRequest'
        )
        self.assert2XX(resp.status_code)
        obj = json.loads(resp.content)
        self.assertEqual(content, obj)
Example #10
0
def set_discussion_visibility(request,course_id,comment_id,discussion_visibility):
    import logging
    log = logging.getLogger("tracking")
    log.debug("discussion_id===============================\n:"+str(discussion_id)+"\n===========================")
    log.debug("discussion_visibility===============================\n:"+str(discussion_visibility)+"\n===========================")
    item_location = discussion_id

    store = get_modulestore(Location(item_location))
    metadata = {}
    metadata['discussion_visibility'] = discussion_visibility
    if metadata is not None:
        # the postback is not the complete metadata, as there's system metadata which is
        # not presented to the end-user for editing. So let's fetch the original and
        # 'apply' the submitted metadata, so we don't end up deleting system metadata
        existing_item = modulestore().get_item(item_location)
        # update existing metadata with submitted metadata (which can be partial)
        # IMPORTANT NOTE: if the client passed 'null' (None) for a piece of metadata that means 'remove it'. If
        # the intent is to make it None, use the nullout field
        for metadata_key, value in metadata.items():
            field = existing_item.fields[metadata_key]

            if value is None:
                field.delete_from(existing_item)
            else:
                value = field.from_json(value)
                field.write_to(existing_item, value)
        # Save the data that we've just changed to the underlying
        # MongoKeyValueStore before we update the mongo datastore.
        existing_item.save()
        # commit to datastore
        store.update_metadata(item_location, own_metadata(existing_item))
    return "true"

    
Example #11
0
def _clone_modules(modulestore, modules, dest_location):
    for module in modules:
        original_loc = Location(module.location)

        if original_loc.category != 'course':
            module.location = module.location._replace(
                tag=dest_location.tag, org=dest_location.org, course=dest_location.course)
        else:
            # on the course module we also have to update the module name
            module.location = module.location._replace(
                tag=dest_location.tag, org=dest_location.org, course=dest_location.course, name=dest_location.name)

        print "Cloning module {0} to {1}....".format(original_loc, module.location)

        # NOTE: usage of the the internal module.xblock_kvs._data does not include any 'default' values for the fields
        modulestore.update_item(module.location, module.xblock_kvs._data)

        # repoint children
        if module.has_children:
            new_children = []
            for child_loc_url in module.children:
                child_loc = Location(child_loc_url)
                child_loc = child_loc._replace(
                    tag=dest_location.tag,
                    org=dest_location.org,
                    course=dest_location.course
                )
                new_children.append(child_loc.url())

            modulestore.update_children(module.location, new_children)

        # save metadata
        modulestore.update_metadata(module.location, own_metadata(module))
Example #12
0
    def update_metadata(self, location, metadata):
        """
        Set the metadata for the item specified by the location to
        metadata

        location: Something that can be passed to Location
        metadata: A nested dictionary of module metadata
        """
        # VS[compat] cdodge: This is a hack because static_tabs also have references from the course module, so
        # if we add one then we need to also add it to the policy information (i.e. metadata)
        # we should remove this once we can break this reference from the course to static tabs
        loc = Location(location)
        if loc.category == 'static_tab':
            course = self._get_course_for_item(loc)
            existing_tabs = course.tabs or []
            for tab in existing_tabs:
                if tab.get('url_slug') == loc.name:
                    tab['name'] = metadata.get('display_name', tab.get('name'))
                    break
            course.tabs = existing_tabs
            # Save the updates to the course to the MongoKeyValueStore
            course.save()
            self.update_metadata(course.location, own_metadata(course))

        self._update_single_item(location, {'metadata': metadata})
        # recompute (and update) the metadata inheritance tree which is cached
        self.refresh_cached_metadata_inheritance_tree(loc)
        self.fire_updated_modulestore_signal(get_course_id_no_run(Location(location)), Location(location))
Example #13
0
def _get_module_info(usage_loc, rewrite_static_links=True):
    """
    metadata, data, id representation of a leaf module fetcher.
    :param usage_loc: A BlockUsageLocator
    """
    old_location = loc_mapper().translate_locator_to_location(usage_loc)
    store = get_modulestore(old_location)
    try:
        module = store.get_item(old_location)
    except ItemNotFoundError:
        if old_location.category in CREATE_IF_NOT_FOUND:
            # Create a new one for certain categories only. Used for course info handouts.
            store.create_and_save_xmodule(old_location)
            module = store.get_item(old_location)
        else:
            raise

    data = getattr(module, 'data', '')
    if rewrite_static_links:
        # we pass a partially bogus course_id as we don't have the RUN information passed yet
        # through the CMS. Also the contentstore is also not RUN-aware at this point in time.
        data = replace_static_urls(
            data,
            None,
            course_id=module.location.org + '/' + module.location.course + '/BOGUS_RUN_REPLACE_WHEN_AVAILABLE'
        )

    # Note that children aren't being returned until we have a use case.
    return {
        'id': unicode(usage_loc),
        'data': data,
        'metadata': own_metadata(module)
    }
Example #14
0
    def update_item(self, xblock, user_id=None, allow_not_found=False, force=False):
        """
        Update the persisted version of xblock to reflect its current values.

        xblock: which xblock to persist
        user_id: who made the change (ignored for now by this modulestore)
        allow_not_found: whether to create a new object if one didn't already exist or give an error
        force: force is meaningless for this modulestore
        """
        try:
            definition_data = self._convert_reference_fields_to_strings(xblock, xblock.get_explicitly_set_fields_by_scope())
            payload = {
                'definition.data': definition_data,
                'metadata': self._convert_reference_fields_to_strings(xblock, own_metadata(xblock)),
            }
            if xblock.has_children:
                children = self._convert_reference_fields_to_strings(xblock, {'children': xblock.children})
                payload.update({'definition.children': children['children']})
            self._update_single_item(xblock.scope_ids.usage_id, payload)
            # for static tabs, their containing course also records their display name
            if xblock.scope_ids.block_type == 'static_tab':
                course = self._get_course_for_item(xblock.scope_ids.usage_id)
                # find the course's reference to this tab and update the name.
                static_tab = CourseTabList.get_tab_by_slug(course.tabs, xblock.scope_ids.usage_id.name)
                # only update if changed
                if static_tab and static_tab['name'] != xblock.display_name:
                    static_tab['name'] = xblock.display_name
                    self.update_item(course, user_id)

            # recompute (and update) the metadata inheritance tree which is cached
            self.refresh_cached_metadata_inheritance_tree(xblock.scope_ids.usage_id.course_key, xblock.runtime)
            # fire signal that we've written to DB
        except ItemNotFoundError:
            if not allow_not_found:
                raise
Example #15
0
def _get_module_info(xblock, rewrite_static_links=True, include_ancestor_info=False, include_publishing_info=False):
    """
    metadata, data, id representation of a leaf module fetcher.
    :param usage_key: A UsageKey
    """
    with modulestore().bulk_operations(xblock.location.course_key):
        data = getattr(xblock, 'data', '')
        if rewrite_static_links:
            data = replace_static_urls(
                data,
                None,
                course_id=xblock.location.course_key
            )

        # Pre-cache has changes for the entire course because we'll need it for the ancestor info
        # Except library blocks which don't [yet] use draft/publish
        if not isinstance(xblock.location, LibraryUsageLocator):
            modulestore().has_changes(modulestore().get_course(xblock.location.course_key, depth=None))

        # Note that children aren't being returned until we have a use case.
        xblock_info = create_xblock_info(
            xblock, data=data, metadata=own_metadata(xblock), include_ancestor_info=include_ancestor_info
        )
        if include_publishing_info:
            add_container_page_publishing_info(xblock, xblock_info)
        return xblock_info
Example #16
0
def create_textbook(request, org, course, name):
    """
    JSON API endpoint for creating a textbook. Used by the Backbone application.
    """
    location = get_location_and_verify_access(request, org, course, name)
    store = get_modulestore(location)
    course_module = store.get_item(location, depth=0)

    try:
        textbook = validate_textbook_json(request.body)
    except TextbookValidationError as err:
        return JsonResponse({"error": err.message}, status=400)
    if not textbook.get("id"):
        tids = set(t["id"] for t in course_module.pdf_textbooks if "id" in t)
        textbook["id"] = assign_textbook_id(textbook, tids)
    existing = course_module.pdf_textbooks
    existing.append(textbook)
    course_module.pdf_textbooks = existing
    if not any(tab['type'] == 'pdf_textbooks' for tab in course_module.tabs):
        tabs = course_module.tabs
        tabs.append({"type": "pdf_textbooks"})
        course_module.tabs = tabs
    # Save the data that we've just changed to the underlying
    # MongoKeyValueStore before we update the mongo datastore.
    course_module.save()
    store.update_metadata(course_module.location, own_metadata(course_module))
    resp = JsonResponse(textbook, status=201)
    resp["Location"] = reverse("textbook_by_id", kwargs={
        'org': org,
        'course': course,
        'name': name,
        'tid': textbook["id"],
    })
    return resp
Example #17
0
    def export_to_xml(self, resource_fs):
        """
        Returns an xml string representing this module, and all modules
        underneath it.  May also write required resources out to resource_fs

        Assumes that modules have single parentage (that no module appears twice
        in the same course), and that it is thus safe to nest modules as xml
        children as appropriate.

        The returned XML should be able to be parsed back into an identical
        XModuleDescriptor using the from_xml method with the same system, org,
        and course

        resource_fs is a pyfilesystem object (from the fs package)
        """

        # Get the definition
        xml_object = self.definition_to_xml(resource_fs)
        self.__class__.clean_metadata_from_xml(xml_object)

        # Set the tag so we get the file path right
        xml_object.tag = self.category

        # Add the non-inherited metadata
        for attr in sorted(own_metadata(self)):
            # don't want e.g. data_dir
            if attr not in self.metadata_to_strip and attr not in self.metadata_to_export_to_policy:
                val = serialize_field(self._field_data.get(self, attr))
                try:
                    xml_object.set(attr, val)
                except Exception, e:
                    logging.exception('Failed to serialize metadata attribute {0} with value {1}. This could mean data loss!!!  Exception: {2}'.format(attr, val, e))
                    pass
def get_checklists(request, org, course, name):
    """
    Send models, views, and html for displaying the course checklists.

    org, course, name: Attributes of the Location for the item to edit
    """
    location = get_location_and_verify_access(request, org, course, name)

    modulestore = get_modulestore(location)
    course_module = modulestore.get_item(location)

    # If course was created before checklists were introduced, copy them over from the template.
    copied = False
    if not course_module.checklists:
        course_module.checklists = CourseDescriptor.checklists.default
        copied = True

    checklists, modified = expand_checklist_action_urls(course_module)
    if copied or modified:
        course_module.save()
        modulestore.update_metadata(location, own_metadata(course_module))
    return render_to_response('checklists.html',
                              {
                                  'context_course': course_module,
                                  'checklists': checklists
                              })
Example #19
0
    def publish(self, location, published_by_id):
        """
        Save a current draft to the underlying modulestore
        """
        try:
            original_published = super(DraftModuleStore, self).get_item(location)
        except ItemNotFoundError:
            original_published = None

        draft = self.get_item(location)

        draft.published_date = datetime.now(UTC)
        draft.published_by = published_by_id
        super(DraftModuleStore, self).update_item(location, draft.get_explicitly_set_fields_by_scope(Scope.content))
        if draft.has_children:
            if original_published is not None:
                # see if children were deleted. 2 reasons for children lists to differ:
                #   1) child deleted
                #   2) child moved
                for child in original_published.children:
                    if child not in draft.children:
                        rents = [Location(mom) for mom in self.get_parent_locations(child, None)]
                        if (len(rents) == 1 and rents[0] == Location(location)):  # the 1 is this original_published
                            self.delete_item(child, True)
            super(DraftModuleStore, self).update_children(location, draft.children)
        super(DraftModuleStore, self).update_metadata(location, own_metadata(draft))
        self.delete_item(location)
Example #20
0
def primitive_insert(course, num, tab_type, name):
    "Inserts a new tab at the given number (0 based)."
    validate_args(num, tab_type)
    new_tab = {u'type': unicode(tab_type), u'name': unicode(name)}
    tabs = course.tabs
    tabs.insert(num, new_tab)
    modulestore('direct').update_metadata(course.location, own_metadata(course))
def save_module(item):
    """
    Proceed with additional save operations.
    """
    item.save()
    store = get_modulestore(Location(item.id))
    store.update_metadata(item.id, own_metadata(item))
Example #22
0
def clone_item(request):
    parent_location = Location(request.POST['parent_location'])
    template = Location(request.POST['template'])

    display_name = request.POST.get('display_name')

    if not has_access(request.user, parent_location):
        raise PermissionDenied()

    parent = get_modulestore(template).get_item(parent_location)
    dest_location = parent_location._replace(
        category=template.category, name=uuid4().hex)

    new_item = get_modulestore(template).clone_item(template, dest_location)

    # replace the display name with an optional parameter passed in from the
    # caller
    if display_name is not None:
        new_item.display_name = display_name

    get_modulestore(template).update_metadata(
        new_item.location.url(), own_metadata(new_item))

    if new_item.location.category not in DETACHED_CATEGORIES:
        get_modulestore(parent.location).update_children(
            parent_location, parent.children + [new_item.location.url()])

    return HttpResponse(json.dumps({'id': dest_location.url()}))
    def check_components_on_page(self, component_types, expected_types):
        """
        Ensure that the right types end up on the page.

        component_types is the list of advanced components.

        expected_types is the list of elements that should appear on the page.

        expected_types and component_types should be similar, but not
        exactly the same -- for example, 'videoalpha' in
        component_types should cause 'Video Alpha' to be present.
        """
        store = modulestore('direct')
        import_from_xml(store, 'common/test/data/', ['simple'])

        course = store.get_item(Location(['i4x', 'edX', 'simple',
                                          'course', '2012_Fall', None]), depth=None)

        course.advanced_modules = component_types

        store.update_metadata(course.location, own_metadata(course))

        # just pick one vertical
        descriptor = store.get_items(Location('i4x', 'edX', 'simple', 'vertical', None, None))[0]

        resp = self.client.get(reverse('edit_unit', kwargs={'location': descriptor.location.url()}))
        self.assertEqual(resp.status_code, 200)

        for expected in expected_types:
            self.assertIn(expected, resp.content)
Example #24
0
 def editor_saved(self, user, old_metadata, old_content):
     """
     Used to update video values during `self`:save method from CMS.
     old_metadata: dict, values of fields of `self` with scope=settings which were explicitly set by user.
     old_content, same as `old_metadata` but for scope=content.
     Due to nature of code flow in item.py::_save_item, before current function is called,
     fields of `self` instance have been already updated, but not yet saved.
     To obtain values, which were changed by user input,
     one should compare own_metadata(self) and old_medatada.
     Video player has two tabs, and due to nature of sync between tabs,
     metadata from Basic tab is always sent when video player is edited and saved first time, for example:
     {'youtube_id_1_0': u'3_yD_cEKoCk', 'display_name': u'Video', 'sub': u'3_yD_cEKoCk', 'html5_sources': []},
     that's why these fields will always present in old_metadata after first save. This should be fixed.
     At consequent save requests html5_sources are always sent too, disregard of their change by user.
     That means that html5_sources are always in list of fields that were changed (`metadata` param in save_item).
     This should be fixed too.
     """
     metadata_was_changed_by_user = old_metadata != own_metadata(self)
     if metadata_was_changed_by_user:
         manage_video_subtitles_save(
             self,
             user,
             old_metadata if old_metadata else None,
             generate_translation=True
         )
Example #25
0
    def delete_item(self, location, delete_all_versions=False):
        """
        Delete an item from this modulestore

        location: Something that can be passed to Location
        delete_all_versions: is here because the DraftMongoModuleStore needs it and we need to keep the interface the same. It is unused.
        """
        # VS[compat] cdodge: This is a hack because static_tabs also have references from the course module, so
        # if we add one then we need to also add it to the policy information (i.e. metadata)
        # we should remove this once we can break this reference from the course to static tabs
        if location.category == 'static_tab':
            item = self.get_item(location)
            course = self._get_course_for_item(item.location)
            existing_tabs = course.tabs or []
            course.tabs = [tab for tab in existing_tabs if tab.get('url_slug') != location.name]
            # Save the updates to the course to the MongoKeyValueStore
            course.save()
            self.update_metadata(course.location, own_metadata(course))

        # Must include this to avoid the django debug toolbar (which defines the deprecated "safe=False")
        # from overriding our default value set in the init method.
        self.collection.remove({'_id': Location(location).dict()}, safe=self.collection.safe)
        # recompute (and update) the metadata inheritance tree which is cached
        self.refresh_cached_metadata_inheritance_tree(Location(location))
        self.fire_updated_modulestore_signal(get_course_id_no_run(Location(location)), Location(location))
Example #26
0
    def test_get_checklists(self):
        """ Tests the get checklists method. """
        checklists_url = reverse("checklists", kwargs={
            'org': self.course.location.org,
            'course': self.course.location.course,
            'name': self.course.location.name,
        })
        response = self.client.get(checklists_url)
        self.assertContains(response, "Getting Started With Studio")
        # Verify expansion of action URL happened.
        self.assertContains(response, '/mitX/333/team/Checklists_Course')
        # Verify persisted checklist does NOT have expanded URL.
        checklist_0 = self.get_persisted_checklists()[0]
        self.assertEqual('ManageUsers', get_action_url(checklist_0, 0))
        payload = response.content

        # Now delete the checklists from the course and verify they get repopulated (for courses
        # created before checklists were introduced).
        self.course.checklists = None
        # Save the changed `checklists` to the underlying KeyValueStore before updating the modulestore
        self.course.save()
        modulestore = get_modulestore(self.course.location)
        modulestore.update_metadata(self.course.location, own_metadata(self.course))
        self.assertEqual(self.get_persisted_checklists(), None)
        response = self.client.get(checklists_url)
        self.assertEqual(payload, response.content)
    def test_advanced_components_in_edit_unit(self):
        store = modulestore('direct')
        import_from_xml(store, 'common/test/data/', ['simple'])

        course = store.get_item(Location(['i4x', 'edX', 'simple',
                                          'course', '2012_Fall', None]), depth=None)

        course.advanced_modules = ADVANCED_COMPONENT_TYPES

        store.update_metadata(course.location, own_metadata(course))

        # just pick one vertical
        descriptor = store.get_items(Location(
            'i4x', 'edX', 'simple', 'vertical', None, None))[0]

        resp = self.client.get(reverse('edit_unit', kwargs={
                               'location': descriptor.location.url()}))
        self.assertEqual(resp.status_code, 200)

        # This could be made better, but for now let's just assert that we see the advanced modules mentioned in the page
        # response HTML
        self.assertIn('Video Alpha', resp.content)
        self.assertIn('Word cloud', resp.content)
        self.assertIn('Annotation', resp.content)
        self.assertIn('Open Ended Response', resp.content)
        self.assertIn('Peer Grading Interface', resp.content)
Example #28
0
 def setUp(self):
     "Set some useful content and URLs for tests"
     super(TextbookDetailTestCase, self).setUp()
     self.textbook1 = {
         "tab_title": "Economics",
         "id": 1,
         "chapters": {
             "title": "Chapter 1",
             "url": "/a/b/c/ch1.pdf",
         }
     }
     self.url1 = self.course_locator.url_reverse("textbooks", "1")
     self.textbook2 = {
         "tab_title": "Algebra",
         "id": 2,
         "chapters": {
             "title": "Chapter 11",
             "url": "/a/b/ch11.pdf",
         }
     }
     self.url2 = self.course_locator.url_reverse("textbooks", "2")
     self.course.pdf_textbooks = [self.textbook1, self.textbook2]
     # Save the data that we've just changed to the underlying
     # MongoKeyValueStore before we update the mongo datastore.
     self.course.save()
     self.store = get_modulestore(self.course.location)
     self.store.update_metadata(self.course.location, own_metadata(self.course))
     self.url_nonexist = self.course_locator.url_reverse("textbooks", "20")
Example #29
0
def initialize_course_tabs(course):
    """
    set up the default tabs
    I've added this because when we add static tabs, the LMS either expects a None for the tabs list or
    at least a list populated with the minimal times
    @TODO: I don't like the fact that the presentation tier is away of these data related constraints, let's find a better
    place for this. Also rather than using a simple list of dictionaries a nice class model would be helpful here
    """

    # This logic is repeated in xmodule/modulestore/tests/factories.py
    # so if you change anything here, you need to also change it there.
    course.tabs = [
        # Translators: "Courseware" is the title of the page where you access a course's videos and problems.
        {"type": "courseware", "name": _("Courseware")},
        # Translators: "Course Info" is the name of the course's information and updates page
        {"type": "course_info", "name": _("Course Info")},
        # Translators: "Discussion" is the title of the course forum page
        {"type": "discussion", "name": _("Discussion")},
        # Translators: "Wiki" is the title of the course's wiki page
        {"type": "wiki", "name": _("Wiki")},
        # Translators: "Progress" is the title of the student's grade information page
        {"type": "progress", "name": _("Progress")},
    ]

    modulestore('direct').update_metadata(course.location.url(), own_metadata(course))
Example #30
0
def _get_module_info(usage_key, user, rewrite_static_links=True):
    """
    metadata, data, id representation of a leaf module fetcher.
    :param usage_key: A UsageKey
    """
    store = modulestore()
    try:
        module = store.get_item(usage_key)
    except ItemNotFoundError:
        if usage_key.category in CREATE_IF_NOT_FOUND:
            # Create a new one for certain categories only. Used for course info handouts.
            module = store.create_and_save_xmodule(usage_key, user.id)
        else:
            raise

    data = getattr(module, 'data', '')
    if rewrite_static_links:
        data = replace_static_urls(
            data,
            None,
            course_id=module.location.course_key
        )

    # Note that children aren't being returned until we have a use case.
    return {
        'id': unicode(module.location),
        'data': data,
        'metadata': own_metadata(module)
    }
    def test_export_course(self):
        module_store = modulestore('direct')
        draft_store = modulestore('draft')
        content_store = contentstore()

        import_from_xml(module_store, 'common/test/data/', ['full'])
        location = CourseDescriptor.id_to_location(
            'edX/full/6.002_Spring_2012')

        # get a vertical (and components in it) to put into 'draft'
        vertical = module_store.get_item(Location(
            ['i4x', 'edX', 'full', 'vertical', 'vertical_66', None]),
                                         depth=1)

        draft_store.clone_item(vertical.location, vertical.location)

        # We had a bug where orphaned draft nodes caused export to fail. This is here to cover that case.
        draft_store.clone_item(
            vertical.location,
            Location(
                ['i4x', 'edX', 'full', 'vertical', 'no_references', 'draft']))

        for child in vertical.get_children():
            draft_store.clone_item(child.location, child.location)

        root_dir = path(mkdtemp_clean())

        # now create a private vertical
        private_vertical = draft_store.clone_item(
            vertical.location,
            Location(
                ['i4x', 'edX', 'full', 'vertical', 'a_private_vertical',
                 None]))

        # add private to list of children
        sequential = module_store.get_item(
            Location([
                'i4x', 'edX', 'full', 'sequential',
                'Administrivia_and_Circuit_Elements', None
            ]))
        private_location_no_draft = private_vertical.location.replace(
            revision=None)
        module_store.update_children(
            sequential.location,
            sequential.children + [private_location_no_draft.url()])

        # read back the sequential, to make sure we have a pointer to
        sequential = module_store.get_item(
            Location([
                'i4x', 'edX', 'full', 'sequential',
                'Administrivia_and_Circuit_Elements', None
            ]))

        self.assertIn(private_location_no_draft.url(), sequential.children)

        print 'Exporting to tempdir = {0}'.format(root_dir)

        # export out to a tempdir
        export_to_xml(module_store,
                      content_store,
                      location,
                      root_dir,
                      'test_export',
                      draft_modulestore=draft_store)

        # check for static tabs
        self.verify_content_existence(module_store, root_dir, location, 'tabs',
                                      'static_tab', '.html')

        # check for custom_tags
        self.verify_content_existence(module_store, root_dir, location, 'info',
                                      'course_info', '.html')

        # check for custom_tags
        self.verify_content_existence(module_store, root_dir, location,
                                      'custom_tags', 'custom_tag_template')

        # check for about content
        self.verify_content_existence(module_store, root_dir, location,
                                      'about', 'about', '.html')

        # check for graiding_policy.json
        filesystem = OSFS(root_dir / 'test_export/policies/6.002_Spring_2012')
        self.assertTrue(filesystem.exists('grading_policy.json'))

        course = module_store.get_item(location)
        # compare what's on disk compared to what we have in our course
        with filesystem.open('grading_policy.json', 'r') as grading_policy:
            on_disk = loads(grading_policy.read())
            self.assertEqual(on_disk, course.grading_policy)

        #check for policy.json
        self.assertTrue(filesystem.exists('policy.json'))

        # compare what's on disk to what we have in the course module
        with filesystem.open('policy.json', 'r') as course_policy:
            on_disk = loads(course_policy.read())
            self.assertIn('course/6.002_Spring_2012', on_disk)
            self.assertEqual(on_disk['course/6.002_Spring_2012'],
                             own_metadata(course))

        # remove old course
        delete_course(module_store, content_store, location)

        # reimport
        import_from_xml(module_store,
                        root_dir, ['test_export'],
                        draft_store=draft_store)

        items = module_store.get_items(
            Location(['i4x', 'edX', 'full', 'vertical', None]))
        self.assertGreater(len(items), 0)
        for descriptor in items:
            # don't try to look at private verticals. Right now we're running
            # the service in non-draft aware
            if getattr(descriptor, 'is_draft', False):
                print "Checking {0}....".format(descriptor.location.url())
                resp = self.client.get(
                    reverse('edit_unit',
                            kwargs={'location': descriptor.location.url()}))
                self.assertEqual(resp.status_code, 200)

        # verify that we have the content in the draft store as well
        vertical = draft_store.get_item(Location(
            ['i4x', 'edX', 'full', 'vertical', 'vertical_66', None]),
                                        depth=1)

        self.assertTrue(getattr(vertical, 'is_draft', False))
        for child in vertical.get_children():
            self.assertTrue(getattr(child, 'is_draft', False))

        # make sure that we don't have a sequential that is in draft mode
        sequential = draft_store.get_item(
            Location([
                'i4x', 'edX', 'full', 'sequential',
                'Administrivia_and_Circuit_Elements', None
            ]))

        self.assertFalse(getattr(sequential, 'is_draft', False))

        # verify that we have the private vertical
        test_private_vertical = draft_store.get_item(
            Location(['i4x', 'edX', 'full', 'vertical', 'vertical_66', None]))

        self.assertTrue(getattr(test_private_vertical, 'is_draft', False))

        # make sure the textbook survived the export/import
        course = module_store.get_item(
            Location(
                ['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None]))

        self.assertGreater(len(course.textbooks), 0)

        shutil.rmtree(root_dir)
    def test_draft_metadata(self):
        '''
        This verifies a bug we had where inherited metadata was getting written to the
        module as 'own-metadata' when publishing. Also verifies the metadata inheritance is
        properly computed
        '''
        store = modulestore('direct')
        draft_store = modulestore('draft')
        import_from_xml(store, 'common/test/data/', ['simple'])

        course = draft_store.get_item(Location(
            ['i4x', 'edX', 'simple', 'course', '2012_Fall', None]),
                                      depth=None)
        html_module = draft_store.get_item(
            ['i4x', 'edX', 'simple', 'html', 'test_html', None])

        self.assertEqual(html_module.lms.graceperiod, course.lms.graceperiod)
        self.assertNotIn('graceperiod', own_metadata(html_module))

        draft_store.clone_item(html_module.location, html_module.location)

        # refetch to check metadata
        html_module = draft_store.get_item(
            ['i4x', 'edX', 'simple', 'html', 'test_html', None])

        self.assertEqual(html_module.lms.graceperiod, course.lms.graceperiod)
        self.assertNotIn('graceperiod', own_metadata(html_module))

        # publish module
        draft_store.publish(html_module.location, 0)

        # refetch to check metadata
        html_module = draft_store.get_item(
            ['i4x', 'edX', 'simple', 'html', 'test_html', None])

        self.assertEqual(html_module.lms.graceperiod, course.lms.graceperiod)
        self.assertNotIn('graceperiod', own_metadata(html_module))

        # put back in draft and change metadata and see if it's now marked as 'own_metadata'
        draft_store.clone_item(html_module.location, html_module.location)
        html_module = draft_store.get_item(
            ['i4x', 'edX', 'simple', 'html', 'test_html', None])

        new_graceperiod = timedelta(**{'hours': 1})

        self.assertNotIn('graceperiod', own_metadata(html_module))
        html_module.lms.graceperiod = new_graceperiod
        self.assertIn('graceperiod', own_metadata(html_module))
        self.assertEqual(html_module.lms.graceperiod, new_graceperiod)

        draft_store.update_metadata(html_module.location,
                                    own_metadata(html_module))

        # read back to make sure it reads as 'own-metadata'
        html_module = draft_store.get_item(
            ['i4x', 'edX', 'simple', 'html', 'test_html', None])

        self.assertIn('graceperiod', own_metadata(html_module))
        self.assertEqual(html_module.lms.graceperiod, new_graceperiod)

        # republish
        draft_store.publish(html_module.location, 0)

        # and re-read and verify 'own-metadata'
        draft_store.clone_item(html_module.location, html_module.location)
        html_module = draft_store.get_item(
            ['i4x', 'edX', 'simple', 'html', 'test_html', None])

        self.assertIn('graceperiod', own_metadata(html_module))
        self.assertEqual(html_module.lms.graceperiod, new_graceperiod)
Example #33
0
    def export_to_xml(self, resource_fs):
        """
        Returns an xml string representing this module, and all modules
        underneath it.  May also write required resources out to resource_fs

        Assumes that modules have single parentage (that no module appears twice
        in the same course), and that it is thus safe to nest modules as xml
        children as appropriate.

        The returned XML should be able to be parsed back into an identical
        XModuleDescriptor using the from_xml method with the same system, org,
        and course

        resource_fs is a pyfilesystem object (from the fs package)
        """

        # Set up runtime.export_fs so that it's available through future
        # uses of the pure xblock add_xml_to_node api
        self.runtime.export_fs = resource_fs

        # Get the definition
        xml_object = self.definition_to_xml(resource_fs)
        self.clean_metadata_from_xml(xml_object)

        # Set the tag so we get the file path right
        xml_object.tag = self.category

        # Add the non-inherited metadata
        for attr in sorted(own_metadata(self)):
            # don't want e.g. data_dir
            if attr not in self.metadata_to_strip and attr not in self.metadata_to_export_to_policy:
                val = serialize_field(self._field_data.get(self, attr))
                try:
                    xml_object.set(attr, val)
                except Exception:
                    logging.exception(
                        u'Failed to serialize metadata attribute %s with value %s in module %s. This could mean data loss!!!',
                        attr, val, self.url_name
                    )

        for key, value in self.xml_attributes.items():
            if key not in self.metadata_to_strip:
                xml_object.set(key, serialize_field(value))

        if self.export_to_file():
            # Write the definition to a file
            url_path = name_to_pathname(self.url_name)
            filepath = self._format_filepath(self.category, url_path)
            resource_fs.makedir(os.path.dirname(filepath), recursive=True, allow_recreate=True)
            with resource_fs.open(filepath, 'w') as fileobj:
                fileobj.write(etree.tostring(xml_object, pretty_print=True, encoding='utf-8'))

            # And return just a pointer with the category and filename.
            record_object = etree.Element(self.category)
        else:
            record_object = xml_object

        record_object.set('url_name', self.url_name)

        # Special case for course pointers:
        if self.category == 'course':
            # add org and course attributes on the pointer tag
            record_object.set('org', self.location.org)
            record_object.set('course', self.location.course)

        return etree.tostring(record_object, pretty_print=True, encoding='utf-8')
Example #34
0
def _save_xblock(user,
                 xblock,
                 data=None,
                 children_strings=None,
                 metadata=None,
                 nullout=None,
                 grader_type=None,
                 publish=None):
    """
    Saves xblock w/ its fields. Has special processing for grader_type, publish, and nullout and Nones in metadata.
    nullout means to truly set the field to None whereas nones in metadata mean to unset them (so they revert
    to default).
    """
    store = modulestore()
    # Perform all xblock changes within a (single-versioned) transaction
    with store.bulk_operations(xblock.location.course_key):

        # Don't allow updating an xblock and discarding changes in a single operation (unsupported by UI).
        if publish == "discard_changes":
            store.revert_to_published(xblock.location, user.id)
            # Returning the same sort of result that we do for other save operations. In the future,
            # we may want to return the full XBlockInfo.
            return JsonResponse({'id': unicode(xblock.location)})

        old_metadata = own_metadata(xblock)
        old_content = xblock.get_explicitly_set_fields_by_scope(Scope.content)

        if data:
            # TODO Allow any scope.content fields not just "data" (exactly like the get below this)
            xblock.data = data
        else:
            data = old_content['data'] if 'data' in old_content else None

        if children_strings is not None:
            children = []
            for child_string in children_strings:
                children.append(usage_key_with_run(child_string))

            # if new children have been added, remove them from their old parents
            new_children = set(children) - set(xblock.children)
            for new_child in new_children:
                old_parent_location = store.get_parent_location(new_child)
                if old_parent_location:
                    old_parent = store.get_item(old_parent_location)
                    old_parent.children.remove(new_child)
                    old_parent = _update_with_callback(old_parent, user)
                else:
                    # the Studio UI currently doesn't present orphaned children, so assume this is an error
                    return JsonResponse(
                        {
                            "error":
                            "Invalid data, possibly caused by concurrent authors."
                        }, 400)

            # make sure there are no old children that became orphans
            # In a single-author (no-conflict) scenario, all children in the persisted list on the server should be
            # present in the updated list.  If there are any children that have been dropped as part of this update,
            # then that would be an error.
            #
            # We can be even more restrictive in a multi-author (conflict), by returning an error whenever
            # len(old_children) > 0. However, that conflict can still be "merged" if the dropped child had been
            # re-parented. Hence, the check for the parent in the any statement below.
            #
            # Note that this multi-author conflict error should not occur in modulestores (such as Split) that support
            # atomic write transactions.  In Split, if there was another author who moved one of the "old_children"
            # into another parent, then that child would have been deleted from this parent on the server. However,
            # this is error could occur in modulestores (such as Draft) that do not support atomic write-transactions
            old_children = set(xblock.children) - set(children)
            if any(
                    store.get_parent_location(old_child) == xblock.location
                    for old_child in old_children):
                # since children are moved as part of a single transaction, orphans should not be created
                return JsonResponse(
                    {
                        "error":
                        "Invalid data, possibly caused by concurrent authors."
                    }, 400)

            # set the children on the xblock
            xblock.children = children

        # also commit any metadata which might have been passed along
        if nullout is not None or metadata is not None:
            # the postback is not the complete metadata, as there's system metadata which is
            # not presented to the end-user for editing. So let's use the original (existing_item) and
            # 'apply' the submitted metadata, so we don't end up deleting system metadata.
            if nullout is not None:
                for metadata_key in nullout:
                    setattr(xblock, metadata_key, None)

            # update existing metadata with submitted metadata (which can be partial)
            # IMPORTANT NOTE: if the client passed 'null' (None) for a piece of metadata that means 'remove it'. If
            # the intent is to make it None, use the nullout field
            if metadata is not None:
                for metadata_key, value in metadata.items():
                    field = xblock.fields[metadata_key]

                    if value is None:
                        field.delete_from(xblock)
                    else:
                        try:
                            value = field.from_json(value)
                        except ValueError as verr:
                            reason = _("Invalid data")
                            if verr.message:
                                reason = _("Invalid data ({details})").format(
                                    details=verr.message)
                            return JsonResponse({"error": reason}, 400)
                        field.write_to(xblock, value)

        # update the xblock and call any xblock callbacks
        xblock = _update_with_callback(xblock, user, old_metadata, old_content)

        # for static tabs, their containing course also records their display name
        if xblock.location.category == 'static_tab':
            course = store.get_course(xblock.location.course_key)
            # find the course's reference to this tab and update the name.
            static_tab = CourseTabList.get_tab_by_slug(course.tabs,
                                                       xblock.location.name)
            # only update if changed
            if static_tab and static_tab['name'] != xblock.display_name:
                static_tab['name'] = xblock.display_name
                store.update_item(course, user.id)

        result = {
            'id': unicode(xblock.location),
            'data': data,
            'metadata': own_metadata(xblock)
        }

        if grader_type is not None:
            result.update(
                CourseGradingModel.update_section_grader_type(
                    xblock, grader_type, user))

        # If publish is set to 'republish' and this item is not in direct only categories and has previously been published,
        # then this item should be republished. This is used by staff locking to ensure that changing the draft
        # value of the staff lock will also update the published version, but only at the unit level.
        if publish == 'republish' and xblock.category not in DIRECT_ONLY_CATEGORIES:
            if modulestore().has_published_version(xblock):
                publish = 'make_public'

        # Make public after updating the xblock, in case the caller asked for both an update and a publish.
        # Used by Bok Choy tests and by republishing of staff locks.
        if publish == 'make_public':
            modulestore().publish(xblock.location, user.id)

        # Note that children aren't being returned until we have a use case.
        return JsonResponse(result, encoder=EdxJSONEncoder)
Example #35
0
    def test_shib_login_enrollment(self):
        """
            A functionality test that a student with an existing shib login
            can auto-enroll in a class with GET or POST params.  Also tests the direction functionality of
            the 'next' GET/POST param
        """
        student = UserFactory.create()
        extauth = ExternalAuthMap(
            external_id='*****@*****.**',
            external_email='',
            external_domain='shib:https://idp.stanford.edu/',
            external_credentials="",
            internal_password="******",
            user=student)
        student.set_password("password")
        student.save()
        extauth.save()

        course = CourseFactory.create(org='Stanford',
                                      number='123',
                                      display_name='Shib Only')
        course.enrollment_domain = 'shib:https://idp.stanford.edu/'
        metadata = own_metadata(course)
        metadata['enrollment_domain'] = course.enrollment_domain
        self.store.update_metadata(course.location.url(), metadata)

        # use django test client for sessions and url processing
        # no enrollment before trying
        self.assertFalse(CourseEnrollment.is_enrolled(student, course.id))
        self.client.logout()
        request_kwargs = {
            'path': '/shib-login/',
            'data': {
                'enrollment_action': 'enroll',
                'course_id': course.id,
                'next': '/testredirect'
            },
            'follow': False,
            'REMOTE_USER': '******',
            'Shib-Identity-Provider': 'https://idp.stanford.edu/'
        }
        response = self.client.get(**request_kwargs)
        # successful login is a redirect to "/"
        self.assertEqual(response.status_code, 302)
        self.assertEqual(response['location'],
                         'http://testserver/testredirect')
        # now there is enrollment
        self.assertTrue(CourseEnrollment.is_enrolled(student, course.id))

        # Clean up and try again with POST (doesn't happen with real production shib, doing this for test coverage)
        self.client.logout()
        CourseEnrollment.unenroll(student, course.id)
        self.assertFalse(CourseEnrollment.is_enrolled(student, course.id))

        response = self.client.post(**request_kwargs)
        # successful login is a redirect to "/"
        self.assertEqual(response.status_code, 302)
        self.assertEqual(response['location'],
                         'http://testserver/testredirect')
        # now there is enrollment
        self.assertTrue(CourseEnrollment.is_enrolled(student, course.id))
    def test_enrollment_limit_by_domain(self):
        """
            Tests that the enrollmentDomain setting is properly limiting enrollment to those who have
            the proper external auth
        """

        #create 2 course, one with limited enrollment one without
        shib_course = CourseFactory.create(org='Stanford', number='123', display_name='Shib Only')
        shib_course.enrollment_domain = 'shib:https://idp.stanford.edu/'
        metadata = own_metadata(shib_course)
        metadata['enrollment_domain'] = shib_course.enrollment_domain
        self.store.update_metadata(shib_course.location.url(), metadata)

        open_enroll_course = CourseFactory.create(org='MITx', number='999', display_name='Robot Super Course')
        open_enroll_course.enrollment_domain = ''
        metadata = own_metadata(open_enroll_course)
        metadata['enrollment_domain'] = open_enroll_course.enrollment_domain
        self.store.update_metadata(open_enroll_course.location.url(), metadata)

        # create 3 kinds of students, external_auth matching shib_course, external_auth not matching, no external auth
        shib_student = UserFactory.create()
        shib_student.save()
        extauth = ExternalAuthMap(external_id='*****@*****.**',
                                  external_email='',
                                  external_domain='shib:https://idp.stanford.edu/',
                                  external_credentials="",
                                  user=shib_student)
        extauth.save()

        other_ext_student = UserFactory.create()
        other_ext_student.username = "******"
        other_ext_student.email = "*****@*****.**"
        other_ext_student.save()
        extauth = ExternalAuthMap(external_id='*****@*****.**',
                                  external_email='',
                                  external_domain='shib:https://other.edu/',
                                  external_credentials="",
                                  user=other_ext_student)
        extauth.save()

        int_student = UserFactory.create()
        int_student.username = "******"
        int_student.email = "*****@*****.**"
        int_student.save()

        #Tests the two case for courses, limited and not
        for course in [shib_course, open_enroll_course]:
            for student in [shib_student, other_ext_student, int_student]:
                request = self.request_factory.post('/change_enrollment')
                request.POST.update({'enrollment_action': 'enroll',
                                     'course_id': course.id})
                request.user = student
                response = change_enrollment(request)
                #if course is not limited or student has correct shib extauth then enrollment should be allowed
                if course is open_enroll_course or student is shib_student:
                    self.assertEqual(response.status_code, 200)
                    self.assertEqual(CourseEnrollment.objects.filter(user=student, course_id=course.id).count(), 1)
                    #clean up
                    CourseEnrollment.objects.filter(user=student, course_id=course.id).delete()
                else:
                    self.assertEqual(response.status_code, 400)
                    self.assertEqual(CourseEnrollment.objects.filter(user=student, course_id=course.id).count(), 0)
    def test_course_specificLoginAndReg(self):
        """
        Tests that the correct course specific login and registration urls work for shib
        """
        course = CourseFactory.create(org='MITx', number='999', display_name='Robot Super Course')

        # Test for cases where course is found
        for domain in ["", "shib:https://idp.stanford.edu/"]:
            #set domains
            course.enrollment_domain = domain
            metadata = own_metadata(course)
            metadata['enrollment_domain'] = domain
            self.store.update_metadata(course.location.url(), metadata)

            #setting location to test that GET params get passed through
            login_request = self.request_factory.get('/course_specific_login/MITx/999/Robot_Super_Course' +
                                                     '?course_id=MITx/999/Robot_Super_Course' +
                                                     '&enrollment_action=enroll')
            reg_request = self.request_factory.get('/course_specific_register/MITx/999/Robot_Super_Course' +
                                                   '?course_id=MITx/999/course/Robot_Super_Course' +
                                                   '&enrollment_action=enroll')

            login_response = course_specific_login(login_request, 'MITx/999/Robot_Super_Course')
            reg_response = course_specific_register(login_request, 'MITx/999/Robot_Super_Course')

            if "shib" in domain:
                self.assertIsInstance(login_response, HttpResponseRedirect)
                self.assertEqual(login_response['Location'],
                                 reverse('shib-login') +
                                 '?course_id=MITx/999/Robot_Super_Course' +
                                 '&enrollment_action=enroll')
                self.assertIsInstance(login_response, HttpResponseRedirect)
                self.assertEqual(reg_response['Location'],
                                 reverse('shib-login') +
                                 '?course_id=MITx/999/Robot_Super_Course' +
                                 '&enrollment_action=enroll')
            else:
                self.assertIsInstance(login_response, HttpResponseRedirect)
                self.assertEqual(login_response['Location'],
                                 reverse('signin_user') +
                                 '?course_id=MITx/999/Robot_Super_Course' +
                                 '&enrollment_action=enroll')
                self.assertIsInstance(login_response, HttpResponseRedirect)
                self.assertEqual(reg_response['Location'],
                                 reverse('register_user') +
                                 '?course_id=MITx/999/Robot_Super_Course' +
                                 '&enrollment_action=enroll')

            # Now test for non-existent course
            #setting location to test that GET params get passed through
            login_request = self.request_factory.get('/course_specific_login/DNE/DNE/DNE' +
                                                     '?course_id=DNE/DNE/DNE' +
                                                     '&enrollment_action=enroll')
            reg_request = self.request_factory.get('/course_specific_register/DNE/DNE/DNE' +
                                                   '?course_id=DNE/DNE/DNE/Robot_Super_Course' +
                                                   '&enrollment_action=enroll')

            login_response = course_specific_login(login_request, 'DNE/DNE/DNE')
            reg_response = course_specific_register(login_request, 'DNE/DNE/DNE')

            self.assertIsInstance(login_response, HttpResponseRedirect)
            self.assertEqual(login_response['Location'],
                             reverse('signin_user') +
                             '?course_id=DNE/DNE/DNE' +
                             '&enrollment_action=enroll')
            self.assertIsInstance(login_response, HttpResponseRedirect)
            self.assertEqual(reg_response['Location'],
                             reverse('register_user') +
                             '?course_id=DNE/DNE/DNE' +
                             '&enrollment_action=enroll')
Example #38
0
def get_d3_problem_grade_distrib(course_id):
    """
    Returns problem grade distribution information for each section, data already in format for d3 function.

    `course_id` the course ID for the course interested in

    Returns an array of dicts in the order of the sections. Each dict has:
      'display_name' - display name for the section
      'data' - data for the d3_stacked_bar_graph function of the grade distribution for that problem
    """

    prob_grade_distrib = get_problem_grade_distribution(course_id)
    d3_data = []

    # Retrieve course object down to problems
    course = modulestore().get_instance(
        course_id, CourseDescriptor.id_to_location(course_id), depth=4)

    # Iterate through sections, subsections, units, problems
    for section in course.get_children():
        curr_section = {}
        curr_section['display_name'] = own_metadata(section).get(
            'display_name', '')
        data = []
        c_subsection = 0
        for subsection in section.get_children():
            c_subsection += 1
            c_unit = 0
            for unit in subsection.get_children():
                c_unit += 1
                c_problem = 0
                for child in unit.get_children():

                    # Student data is at the problem level
                    if child.location.category == 'problem':
                        c_problem += 1
                        stack_data = []

                        # Construct label to display for this problem
                        label = "P{0}.{1}.{2}".format(c_subsection, c_unit,
                                                      c_problem)

                        # Only problems in prob_grade_distrib have had a student submission.
                        if child.location.url() in prob_grade_distrib:

                            # Get max_grade, grade_distribution for this problem
                            problem_info = prob_grade_distrib[
                                child.location.url()]

                            # Get problem_name for tooltip
                            problem_name = own_metadata(child).get(
                                'display_name', '')

                            # Compute percent of this grade over max_grade
                            max_grade = float(problem_info['max_grade'])
                            for (grade,
                                 count_grade) in problem_info['grade_distrib']:
                                percent = 0.0
                                if max_grade > 0:
                                    percent = (grade * 100.0) / max_grade

                                # Construct tooltip for problem in grade distibution view
                                tooltip = _(
                                    "{label} {problem_name} - {count_grade} {students} ({percent:.0f}%: {grade:.0f}/{max_grade:.0f} {questions})"
                                ).format(
                                    label=label,
                                    problem_name=problem_name,
                                    count_grade=count_grade,
                                    students=_("students"),
                                    percent=percent,
                                    grade=grade,
                                    max_grade=max_grade,
                                    questions=_("questions"),
                                )

                                # Construct data to be sent to d3
                                stack_data.append({
                                    'color': percent,
                                    'value': count_grade,
                                    'tooltip': tooltip,
                                })

                        problem = {
                            'xValue': label,
                            'stackData': stack_data,
                        }
                        data.append(problem)
        curr_section['data'] = data

        d3_data.append(curr_section)

    return d3_data
Example #39
0
def _save_item(request,
               usage_loc,
               item_location,
               data=None,
               children=None,
               metadata=None,
               nullout=None,
               grader_type=None,
               publish=None):
    """
    Saves xblock w/ its fields. Has special processing for grader_type, publish, and nullout and Nones in metadata.
    nullout means to truly set the field to None whereas nones in metadata mean to unset them (so they revert
    to default).

    The item_location is still the old-style location whereas usage_loc is a BlockUsageLocator
    """
    store = get_modulestore(item_location)

    try:
        existing_item = store.get_item(item_location)
    except ItemNotFoundError:
        if item_location.category in CREATE_IF_NOT_FOUND:
            # New module at this location, for pages that are not pre-created.
            # Used for course info handouts.
            store.create_and_save_xmodule(item_location)
            existing_item = store.get_item(item_location)
        else:
            raise
    except InvalidLocationError:
        log.error("Can't find item by location.")
        return JsonResponse(
            {"error": "Can't find item by location: " + str(item_location)},
            404)

    if publish:
        if publish == 'make_private':
            _xmodule_recurse(existing_item,
                             lambda i: modulestore().unpublish(i.location))
        elif publish == 'create_draft':
            # This clones the existing item location to a draft location (the draft is
            # implicit, because modulestore is a Draft modulestore)
            modulestore().convert_to_draft(item_location)

    if data:
        store.update_item(item_location, data)
    else:
        data = existing_item.get_explicitly_set_fields_by_scope(Scope.content)

    if children is not None:
        children_ids = [
            loc_mapper().translate_locator_to_location(
                BlockUsageLocator(child_locator)).url()
            for child_locator in children
        ]
        store.update_children(item_location, children_ids)

    # cdodge: also commit any metadata which might have been passed along
    if nullout is not None or metadata is not None:
        # the postback is not the complete metadata, as there's system metadata which is
        # not presented to the end-user for editing. So let's use the original (existing_item) and
        # 'apply' the submitted metadata, so we don't end up deleting system metadata.
        if nullout is not None:
            for metadata_key in nullout:
                setattr(existing_item, metadata_key, None)

        # update existing metadata with submitted metadata (which can be partial)
        # IMPORTANT NOTE: if the client passed 'null' (None) for a piece of metadata that means 'remove it'. If
        # the intent is to make it None, use the nullout field
        if metadata is not None:
            for metadata_key, value in metadata.items():
                field = existing_item.fields[metadata_key]

                if value is None:
                    field.delete_from(existing_item)
                else:
                    try:
                        value = field.from_json(value)
                    except ValueError:
                        return JsonResponse({"error": "Invalid data"}, 400)
                    field.write_to(existing_item, value)

        # Save the data that we've just changed to the underlying
        # MongoKeyValueStore before we update the mongo datastore.
        existing_item.save()
        # commit to datastore
        store.update_metadata(item_location, own_metadata(existing_item))

        if existing_item.category == 'video':
            manage_video_subtitles_save(existing_item, existing_item)

    result = {
        'id': unicode(usage_loc),
        'data': data,
        'metadata': own_metadata(existing_item)
    }

    if grader_type is not None:
        result.update(
            CourseGradingModel.update_section_grader_type(
                existing_item, grader_type))

    # Make public after updating the xblock, in case the caller asked
    # for both an update and a publish.
    if publish and publish == 'make_public':
        _xmodule_recurse(
            existing_item,
            lambda i: modulestore().publish(i.location, request.user.id))

    # Note that children aren't being returned until we have a use case.
    return JsonResponse(result)
Example #40
0
    def dump_module(self,
                    module,
                    destination=None,
                    inherited=False,
                    defaults=False):
        """
        Add the module and all its children to the destination dictionary in
        as a flat structure.
        """

        destination = destination if destination else {}

        items = own_metadata(module)

        # HACK: add discussion ids to list of items to export (AN-6696)
        if isinstance(module,
                      DiscussionXBlock) and 'discussion_id' not in items:
            items['discussion_id'] = module.discussion_id

        filtered_metadata = {
            k: v
            for k, v in six.iteritems(items) if k not in FILTER_LIST
        }

        destination[six.text_type(module.location)] = {
            'category':
            module.location.block_type,
            'children': [
                six.text_type(child)
                for child in getattr(module, 'children', [])
            ],
            'metadata':
            filtered_metadata,
        }

        if inherited:
            # When calculating inherited metadata, don't include existing
            # locally-defined metadata
            inherited_metadata_filter_list = list(filtered_metadata.keys())
            inherited_metadata_filter_list.extend(INHERITED_FILTER_LIST)

            def is_inherited(field):
                if field.name in inherited_metadata_filter_list:
                    return False
                elif field.scope != Scope.settings:
                    return False
                elif defaults:
                    return True
                else:
                    return field.values != field.default

            inherited_metadata = {
                field.name: field.read_json(module)
                for field in module.fields.values() if is_inherited(field)
            }
            destination[six.text_type(
                module.location)]['inherited_metadata'] = inherited_metadata

        for child in module.get_children():
            self.dump_module(child, destination, inherited, defaults)

        return destination
Example #41
0
def textbooks_list_handler(request,
                           tag=None,
                           package_id=None,
                           branch=None,
                           version_guid=None,
                           block=None):
    """
    A RESTful handler for textbook collections.

    GET
        html: return textbook list page (Backbone application)
        json: return JSON representation of all textbooks in this course
    POST
        json: create a new textbook for this course
    PUT
        json: overwrite all textbooks in the course with the given list
    """
    locator, course = _get_locator_and_course(package_id, branch, version_guid,
                                              block, request.user)
    store = get_modulestore(course.location)

    if not "application/json" in request.META.get('HTTP_ACCEPT', 'text/html'):
        # return HTML page
        upload_asset_url = locator.url_reverse('assets/', '')
        textbook_url = locator.url_reverse('/textbooks')
        return render_to_response(
            'textbooks.html', {
                'context_course': course,
                'textbooks': course.pdf_textbooks,
                'upload_asset_url': upload_asset_url,
                'textbook_url': textbook_url,
            })

    # from here on down, we know the client has requested JSON
    if request.method == 'GET':
        return JsonResponse(course.pdf_textbooks)
    elif request.method == 'PUT':
        try:
            textbooks = validate_textbooks_json(request.body)
        except TextbookValidationError as err:
            return JsonResponse({"error": err.message}, status=400)

        tids = set(t["id"] for t in textbooks if "id" in t)
        for textbook in textbooks:
            if not "id" in textbook:
                tid = assign_textbook_id(textbook, tids)
                textbook["id"] = tid
                tids.add(tid)

        if not any(tab['type'] == 'pdf_textbooks' for tab in course.tabs):
            course.tabs.append({"type": "pdf_textbooks"})
        course.pdf_textbooks = textbooks
        store.update_metadata(course.location, own_metadata(course))
        return JsonResponse(course.pdf_textbooks)
    elif request.method == 'POST':
        # create a new textbook for the course
        try:
            textbook = validate_textbook_json(request.body)
        except TextbookValidationError as err:
            return JsonResponse({"error": err.message}, status=400)
        if not textbook.get("id"):
            tids = set(t["id"] for t in course.pdf_textbooks if "id" in t)
            textbook["id"] = assign_textbook_id(textbook, tids)
        existing = course.pdf_textbooks
        existing.append(textbook)
        course.pdf_textbooks = existing
        if not any(tab['type'] == 'pdf_textbooks' for tab in course.tabs):
            tabs = course.tabs
            tabs.append({"type": "pdf_textbooks"})
            course.tabs = tabs
        store.update_metadata(course.location, own_metadata(course))
        resp = JsonResponse(textbook, status=201)
        resp["Location"] = locator.url_reverse('textbooks', textbook["id"])
        return resp
def get_d3_sequential_open_distrib(course_id):
    """
    Returns how many students opened a sequential/subsection for each section, data already in format for d3 function.

    `course_id` the course ID for the course interested in

    Returns an array in the order of the sections and each dict has:
      'display_name' - display name for the section
      'data' - data for the d3_stacked_bar_graph function of how many students opened each sequential/subsection
    """
    sequential_open_distrib = get_sequential_open_distrib(course_id)

    d3_data = []

    # Retrieve course object down to subsection
    course = modulestore().get_course(course_id, depth=2)

    # Iterate through sections, subsections
    for section in course.get_children():
        curr_section = {}
        curr_section['display_name'] = own_metadata(section).get(
            'display_name', '')
        data = []
        c_subsection = 0

        # Construct data for each subsection to be sent to d3
        for subsection in section.get_children():
            c_subsection += 1
            subsection_name = own_metadata(subsection).get('display_name', '')

            num_students = 0
            if subsection.location in sequential_open_distrib:
                num_students = sequential_open_distrib[subsection.location]

            stack_data = []

            # Tooltip parameters for subsection in open_distribution view
            tooltip = {
                'type': 'subsection',
                'num_students': num_students,
                'subsection_num': c_subsection,
                'subsection_name': subsection_name
            }

            stack_data.append({
                'color':
                0,
                'value':
                num_students,
                'tooltip':
                tooltip,
                'module_url':
                subsection.location.to_deprecated_string(),
            })
            subsection = {
                'xValue': "SS {0}".format(c_subsection),
                'stackData': stack_data,
            }
            data.append(subsection)

        curr_section['data'] = data
        d3_data.append(curr_section)

    return d3_data
    def process_extra(self, root, courselike, root_courselike_dir, xml_centric_courselike_key, export_fs):
        # Export the modulestore's asset metadata.
        asset_dir = root_courselike_dir + '/' + AssetMetadata.EXPORTED_ASSET_DIR + '/'
        if not os.path.isdir(asset_dir):
            os.makedirs(asset_dir)
        asset_root = lxml.etree.Element(AssetMetadata.ALL_ASSETS_XML_TAG)
        course_assets = self.modulestore.get_all_asset_metadata(self.courselike_key, None)
        for asset_md in course_assets:
            # All asset types are exported using the "asset" tag - but their asset type is specified in each asset key.
            asset = lxml.etree.SubElement(asset_root, AssetMetadata.ASSET_XML_TAG)
            asset_md.to_xml(asset)
        with OSFS(asset_dir).open(AssetMetadata.EXPORTED_ASSET_FILENAME, 'wb') as asset_xml_file:
            lxml.etree.ElementTree(asset_root).write(asset_xml_file, encoding='utf-8')

        # export the static assets
        policies_dir = export_fs.makedir('policies', recreate=True)
        if self.contentstore:
            self.contentstore.export_all_for_course(
                self.courselike_key,
                root_courselike_dir + '/static/',
                root_courselike_dir + '/policies/assets.json',
            )

            # If we are using the default course image, export it to the
            # legacy location to support backwards compatibility.
            if courselike.course_image == courselike.fields['course_image'].default:
                try:
                    course_image = self.contentstore.find(
                        StaticContent.compute_location(
                            courselike.id,
                            courselike.course_image
                        ),
                    )
                except NotFoundError:
                    pass
                else:
                    output_dir = root_courselike_dir + '/static/images/'
                    if not os.path.isdir(output_dir):
                        os.makedirs(output_dir)
                    with OSFS(output_dir).open(u'course_image.jpg', 'wb') as course_image_file:
                        course_image_file.write(course_image.data)

        # export the static tabs
        export_extra_content(
            export_fs, self.modulestore, self.courselike_key, xml_centric_courselike_key,
            'static_tab', 'tabs', '.html'
        )

        # export the custom tags
        export_extra_content(
            export_fs, self.modulestore, self.courselike_key, xml_centric_courselike_key,
            'custom_tag_template', 'custom_tags'
        )

        # export the course updates
        export_extra_content(
            export_fs, self.modulestore, self.courselike_key, xml_centric_courselike_key,
            'course_info', 'info', '.html'
        )

        # export the 'about' data (e.g. overview, etc.)
        export_extra_content(
            export_fs, self.modulestore, self.courselike_key, xml_centric_courselike_key,
            'about', 'about', '.html'
        )

        course_policy_dir_name = courselike.location.run
        course_run_policy_dir = policies_dir.makedir(course_policy_dir_name, recreate=True)

        # export the grading policy
        with course_run_policy_dir.open(u'grading_policy.json', 'wb') as grading_policy:
            grading_policy.write(dumps(courselike.grading_policy, cls=EdxJSONEncoder,
                                       sort_keys=True, indent=4).encode('utf-8'))

        # export all of the course metadata in policy.json
        with course_run_policy_dir.open(u'policy.json', 'wb') as course_policy:
            policy = {'course/' + courselike.location.run: own_metadata(courselike)}
            course_policy.write(dumps(policy, cls=EdxJSONEncoder, sort_keys=True, indent=4).encode('utf-8'))

        _export_drafts(self.modulestore, self.courselike_key, export_fs, xml_centric_courselike_key)
Example #44
0
    def update_from_json(cls, jsondict):
        """
        Decode the json into CourseDetails and save any changed attrs to the db
        """
        # TODO make it an error for this to be undefined & for it to not be
        # retrievable from modulestore
        course_location = jsondict['course_location']
        # Will probably want to cache the inflight courses because every blur
        # generates an update
        descriptor = get_modulestore(course_location).get_item(course_location)

        dirty = False

        # In the descriptor's setter, the date is converted to JSON using Date's to_json method.
        # Calling to_json on something that is already JSON doesn't work. Since reaching directly
        # into the model is nasty, convert the JSON Date to a Python date, which is what the
        # setter expects as input.
        date = Date()

        if 'start_date' in jsondict:
            converted = date.from_json(jsondict['start_date'])
        else:
            converted = None
        if converted != descriptor.start:
            dirty = True
            descriptor.start = converted

        if 'end_date' in jsondict:
            converted = date.from_json(jsondict['end_date'])
        else:
            converted = None

        if converted != descriptor.end:
            dirty = True
            descriptor.end = converted

        if 'enrollment_start' in jsondict:
            converted = date.from_json(jsondict['enrollment_start'])
        else:
            converted = None

        if converted != descriptor.enrollment_start:
            dirty = True
            descriptor.enrollment_start = converted

        if 'enrollment_end' in jsondict:
            converted = date.from_json(jsondict['enrollment_end'])
        else:
            converted = None

        if converted != descriptor.enrollment_end:
            dirty = True
            descriptor.enrollment_end = converted

        if dirty:
            get_modulestore(course_location).update_metadata(
                course_location, own_metadata(descriptor))

        # NOTE: below auto writes to the db w/o verifying that any of the fields actually changed
        # to make faster, could compare against db or could have client send
        # over a list of which fields changed.
        temploc = Location(course_location)._replace(category='about',
                                                     name='syllabus')
        update_item(temploc, jsondict['syllabus'])

        temploc = temploc._replace(name='overview')
        update_item(temploc, jsondict['overview'])

        temploc = temploc._replace(name='effort')
        update_item(temploc, jsondict['effort'])

        temploc = temploc._replace(name='video')
        recomposed_video_tag = CourseDetails.recompose_video_tag(
            jsondict['intro_video'])
        update_item(temploc, recomposed_video_tag)

        # Could just generate and return a course obj w/o doing any db reads, but I put the reads in as a means to confirm
        # it persisted correctly
        return CourseDetails.fetch(course_location)
Example #45
0
def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir):
    """
    Export all modules from `modulestore` and content from `contentstore` as xml to `root_dir`.

    `modulestore`: A `ModuleStore` object that is the source of the modules to export
    `contentstore`: A `ContentStore` object that is the source of the content to export, can be None
    `course_key`: The `CourseKey` of the `CourseModuleDescriptor` to export
    `root_dir`: The directory to write the exported xml to
    `course_dir`: The name of the directory inside `root_dir` to write the course content to
    """

    course = modulestore.get_course(course_key, depth=None)  # None means infinite
    fsm = OSFS(root_dir)
    export_fs = course.runtime.export_fs = fsm.makeopendir(course_dir)

    root = lxml.etree.Element('unknown')

    # export only the published content
    with modulestore.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
        # change all of the references inside the course to use the xml expected key type w/o version & branch
        xml_centric_course_key = CourseLocator(course_key.org, course_key.course, course_key.run, deprecated=True)
        adapt_references(course, xml_centric_course_key, export_fs)

        course.add_xml_to_node(root)

    with export_fs.open('course.xml', 'w') as course_xml:
        lxml.etree.ElementTree(root).write(course_xml)

    # export the static assets
    policies_dir = export_fs.makeopendir('policies')
    if contentstore:
        contentstore.export_all_for_course(
            course_key,
            root_dir + '/' + course_dir + '/static/',
            root_dir + '/' + course_dir + '/policies/assets.json',
        )

        # If we are using the default course image, export it to the
        # legacy location to support backwards compatibility.
        if course.course_image == course.fields['course_image'].default:
            try:
                course_image = contentstore.find(
                    StaticContent.compute_location(
                        course.id,
                        course.course_image
                    ),
                )
            except NotFoundError:
                pass
            else:
                output_dir = root_dir + '/' + course_dir + '/static/images/'
                if not os.path.isdir(output_dir):
                    os.makedirs(output_dir)
                with OSFS(output_dir).open('course_image.jpg', 'wb') as course_image_file:
                    course_image_file.write(course_image.data)

    # export the static tabs
    export_extra_content(export_fs, modulestore, xml_centric_course_key, 'static_tab', 'tabs', '.html')

    # export the custom tags
    export_extra_content(export_fs, modulestore, xml_centric_course_key, 'custom_tag_template', 'custom_tags')

    # export the course updates
    export_extra_content(export_fs, modulestore, xml_centric_course_key, 'course_info', 'info', '.html')

    # export the 'about' data (e.g. overview, etc.)
    export_extra_content(export_fs, modulestore, xml_centric_course_key, 'about', 'about', '.html')

    # export the grading policy
    course_run_policy_dir = policies_dir.makeopendir(course.location.name)
    with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy:
        grading_policy.write(dumps(course.grading_policy, cls=EdxJSONEncoder))

    # export all of the course metadata in policy.json
    with course_run_policy_dir.open('policy.json', 'w') as course_policy:
        policy = {'course/' + course.location.name: own_metadata(course)}
        course_policy.write(dumps(policy, cls=EdxJSONEncoder))

    # NOTE: this code assumes that verticals are the top most draftable container
    # should we change the application, then this assumption will no longer be valid
    # NOTE: we need to explicitly implement the logic for setting the vertical's parent
    # and index here since the XML modulestore cannot load draft modules
    draft_verticals = modulestore.get_items(
        course_key,
        qualifiers={'category': 'vertical'},
        revision=ModuleStoreEnum.RevisionOption.draft_only
    )
    if len(draft_verticals) > 0:
        draft_course_dir = export_fs.makeopendir(DRAFT_DIR)
        for draft_vertical in draft_verticals:
            parent_loc = modulestore.get_parent_location(
                draft_vertical.location,
                revision=ModuleStoreEnum.RevisionOption.draft_preferred
            )
            # Don't try to export orphaned items.
            if parent_loc is not None:
                logging.debug('parent_loc = {0}'.format(parent_loc))
                if parent_loc.category in DIRECT_ONLY_CATEGORIES:
                    draft_vertical.xml_attributes['parent_sequential_url'] = parent_loc.to_deprecated_string()
                    sequential = modulestore.get_item(parent_loc)
                    index = sequential.children.index(draft_vertical.location)
                    draft_vertical.xml_attributes['index_in_children_list'] = str(index)
                draft_vertical.runtime.export_fs = draft_course_dir
                adapt_references(draft_vertical, xml_centric_course_key, draft_course_dir)
                node = lxml.etree.Element('unknown')
                draft_vertical.add_xml_to_node(node)
Example #46
0
    def add_xml_to_node(self, node):
        """
        For exporting, set data on `node` from ourselves.
        """
        # Get the definition
        xml_object = self.definition_to_xml(self.runtime.export_fs)

        # If xml_object is None, we don't know how to serialize this node, but
        # we shouldn't crash out the whole export for it.
        if xml_object is None:
            return

        for aside in self.runtime.get_asides(self):
            if aside.needs_serialization():
                aside_node = etree.Element("unknown_root",
                                           nsmap=XML_NAMESPACES)
                aside.add_xml_to_node(aside_node)
                xml_object.append(aside_node)

        not_to_clean_fields = self.metadata_to_not_to_clean.get(
            self.category, ())
        self.clean_metadata_from_xml(xml_object,
                                     excluded_fields=not_to_clean_fields)

        # Set the tag on both nodes so we get the file path right.
        xml_object.tag = self.category
        node.tag = self.category

        # Add the non-inherited metadata
        for attr in sorted(own_metadata(self)):
            # don't want e.g. data_dir
            if (attr not in self.metadata_to_strip
                    and attr not in self.metadata_to_export_to_policy
                    and attr not in not_to_clean_fields):
                val = serialize_field(self._field_data.get(self, attr))
                try:
                    xml_object.set(attr, val)
                except Exception:  # lint-amnesty, pylint: disable=broad-except
                    logging.exception(
                        u'Failed to serialize metadata attribute %s with value %s in module %s. This could mean data loss!!!',  # lint-amnesty, pylint: disable=line-too-long
                        attr,
                        val,
                        self.url_name)

        for key, value in self.xml_attributes.items():
            if key not in self.metadata_to_strip:
                xml_object.set(key, serialize_field(value))

        if self.export_to_file():
            # Write the definition to a file
            url_path = name_to_pathname(self.url_name)
            # if folder is course then create file with name {course_run}.xml
            filepath = self._format_filepath(
                self.category,
                self.location.run if self.category == 'course' else url_path,
            )
            self.runtime.export_fs.makedirs(os.path.dirname(filepath),
                                            recreate=True)
            with self.runtime.export_fs.open(filepath, 'wb') as fileobj:
                ElementTree(xml_object).write(fileobj,
                                              pretty_print=True,
                                              encoding='utf-8')
        else:
            # Write all attributes from xml_object onto node
            node.clear()
            node.tag = xml_object.tag
            node.text = xml_object.text
            node.tail = xml_object.tail
            node.attrib.update(xml_object.attrib)
            node.extend(xml_object)

        node.set('url_name', self.url_name)

        # Special case for course pointers:
        if self.category == 'course':
            # add org and course attributes on the pointer tag
            node.set('org', self.location.org)
            node.set('course', self.location.course)
Example #47
0
def export_to_xml(modulestore,
                  contentstore,
                  course_location,
                  root_dir,
                  course_dir,
                  draft_modulestore=None):
    """
    Export all modules from `modulestore` and content from `contentstore` as xml to `root_dir`.

    `modulestore`: A `ModuleStore` object that is the source of the modules to export
    `contentstore`: A `ContentStore` object that is the source of the content to export, can be None
    `course_location`: The `Location` of the `CourseModuleDescriptor` to export
    `root_dir`: The directory to write the exported xml to
    `course_dir`: The name of the directory inside `root_dir` to write the course content to
    `draft_modulestore`: An optional `DraftModuleStore` that contains draft content, which will be exported
        alongside the public content in the course.
    """

    course_id = course_location.course_id
    course = modulestore.get_course(course_id)

    fs = OSFS(root_dir)
    export_fs = course.runtime.export_fs = fs.makeopendir(course_dir)

    root = lxml.etree.Element('unknown')
    course.add_xml_to_node(root)

    with export_fs.open('course.xml', 'w') as course_xml:
        lxml.etree.ElementTree(root).write(course_xml)

    # export the static assets
    policies_dir = export_fs.makeopendir('policies')
    if contentstore:
        contentstore.export_all_for_course(
            course_location,
            root_dir + '/' + course_dir + '/static/',
            root_dir + '/' + course_dir + '/policies/assets.json',
        )

        # If we are using the default course image, export it to the
        # legacy location to support backwards compatibility.
        if course.course_image == course.fields['course_image'].default:
            try:
                course_image = contentstore.find(
                    StaticContent.compute_location(course.location.org,
                                                   course.location.course,
                                                   course.course_image), )
            except NotFoundError:
                pass
            else:
                output_dir = root_dir + '/' + course_dir + '/static/images/'
                if not os.path.isdir(output_dir):
                    os.makedirs(output_dir)
                with OSFS(output_dir).open('course_image.jpg',
                                           'wb') as course_image_file:
                    course_image_file.write(course_image.data)

    # export the static tabs
    export_extra_content(export_fs, modulestore, course_id, course_location,
                         'static_tab', 'tabs', '.html')

    # export the custom tags
    export_extra_content(export_fs, modulestore, course_id, course_location,
                         'custom_tag_template', 'custom_tags')

    # export the course updates
    export_extra_content(export_fs, modulestore, course_id, course_location,
                         'course_info', 'info', '.html')

    # export the 'about' data (e.g. overview, etc.)
    export_extra_content(export_fs, modulestore, course_id, course_location,
                         'about', 'about', '.html')

    # export the grading policy
    course_run_policy_dir = policies_dir.makeopendir(course.location.name)
    with course_run_policy_dir.open('grading_policy.json',
                                    'w') as grading_policy:
        grading_policy.write(dumps(course.grading_policy, cls=EdxJSONEncoder))

    # export all of the course metadata in policy.json
    with course_run_policy_dir.open('policy.json', 'w') as course_policy:
        policy = {'course/' + course.location.name: own_metadata(course)}
        course_policy.write(dumps(policy, cls=EdxJSONEncoder))

    # export draft content
    # NOTE: this code assumes that verticals are the top most draftable container
    # should we change the application, then this assumption will no longer
    # be valid
    if draft_modulestore is not None:
        draft_verticals = draft_modulestore.get_items([
            None, course_location.org, course_location.course, 'vertical',
            None, 'draft'
        ])
        if len(draft_verticals) > 0:
            draft_course_dir = export_fs.makeopendir(DRAFT_DIR)
            for draft_vertical in draft_verticals:
                parent_locs = draft_modulestore.get_parent_locations(
                    draft_vertical.location, course.location.course_id)
                # Don't try to export orphaned items.
                if len(parent_locs) > 0:
                    logging.debug('parent_locs = {0}'.format(parent_locs))
                    draft_vertical.xml_attributes[
                        'parent_sequential_url'] = Location(
                            parent_locs[0]).url()
                    sequential = modulestore.get_item(Location(parent_locs[0]))
                    index = sequential.children.index(
                        draft_vertical.location.url())
                    draft_vertical.xml_attributes[
                        'index_in_children_list'] = str(index)
                    draft_vertical.runtime.export_fs = draft_course_dir
                    node = lxml.etree.Element('unknown')
                    draft_vertical.add_xml_to_node(node)
Example #48
0
def checklists_handler(request, tag=None, course_id=None, branch=None, version_guid=None, block=None, checklist_index=None):
    """
    The restful handler for checklists.

    GET
        html: return html page for all checklists
        json: return json representing all checklists. checklist_index is not supported for GET at this time.
    POST or PUT
        json: updates the checked state for items within a particular checklist. checklist_index is required.
    """
    location = BlockUsageLocator(course_id=course_id, branch=branch, version_guid=version_guid, usage_id=block)
    if not has_access(request.user, location):
        raise PermissionDenied()

    old_location = loc_mapper().translate_locator_to_location(location)

    modulestore = get_modulestore(old_location)
    course_module = modulestore.get_item(old_location)

    json_request = 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json')
    if request.method == 'GET':
        # If course was created before checklists were introduced, copy them over
        # from the template.
        if not course_module.checklists:
            course_module.checklists = CourseDescriptor.checklists.default
            course_module.save()
            modulestore.update_metadata(old_location, own_metadata(course_module))

        expanded_checklists = expand_all_action_urls(course_module)
        if json_request:
            return JsonResponse(expanded_checklists)
        else:
            handler_url = location.url_reverse('checklists/', '')
            return render_to_response('checklists.html',
                                      {
                                          'handler_url': handler_url,
                                          # context_course is used by analytics
                                          'context_course': course_module,
                                          'checklists': expanded_checklists
                                      })
    elif json_request:
        # Can now assume POST or PUT because GET handled above.
        if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists):
            index = int(checklist_index)
            persisted_checklist = course_module.checklists[index]
            modified_checklist = json.loads(request.body)
            # Only thing the user can modify is the "checked" state.
            # We don't want to persist what comes back from the client because it will
            # include the expanded action URLs (which are non-portable).
            for item_index, item in enumerate(modified_checklist.get('items')):
                persisted_checklist['items'][item_index]['is_checked'] = item['is_checked']
            # seeming noop which triggers kvs to record that the metadata is
            # not default
            course_module.checklists = course_module.checklists
            course_module.save()
            modulestore.update_metadata(old_location, own_metadata(course_module))
            expanded_checklist = expand_checklist_action_url(course_module, persisted_checklist)
            return JsonResponse(expanded_checklist)
        else:
            return HttpResponseBadRequest(
                ("Could not save checklist state because the checklist index "
                 "was out of range or unspecified."),
                content_type="text/plain"
            )
    else:
        return HttpResponseNotFound()
Example #49
0
def get_d3_section_grade_distrib(course_id, section):
    """
    Returns the grade distribution for the problems in the `section` section in a format for the d3 code.

    `course_id` a string that is the course's ID.

    `section` an int that is a zero-based index into the course's list of sections.

    Navigates to the section specified to find all the problems associated with that section and then finds the grade
    distribution for those problems. Finally returns an object formated the way the d3_stacked_bar_graph.js expects its
    data object to be in.

    If this is requested multiple times quickly for the same course, it is better to call
    get_d3_problem_grade_distrib and pick out the sections of interest.

    Returns an array of dicts with the following keys (taken from d3_stacked_bar_graph.js's documentation)
      'xValue' - Corresponding value for the x-axis
      'stackData' - Array of objects with key, value pairs that represent a bar:
        'color' - Defines what "color" the bar will map to
        'value' - Maps to the height of the bar, along the y-axis
        'tooltip' - (Optional) Text to display on mouse hover
    """

    # Retrieve course object down to problems
    course = modulestore().get_course(course_id, depth=4)

    problem_set = []
    problem_info = {}
    c_subsection = 0
    for subsection in course.get_children()[section].get_children():
        c_subsection += 1
        c_unit = 0
        for unit in subsection.get_children():
            c_unit += 1
            c_problem = 0
            for child in unit.get_children():
                if child.location.category == 'problem':
                    c_problem += 1
                    problem_set.append(child.location)
                    problem_info[child.location] = {
                        'id': text_type(child.location),
                        'x_value': "P{0}.{1}.{2}".format(c_subsection, c_unit, c_problem),
                        'display_name': own_metadata(child).get('display_name', ''),
                    }

    # Retrieve grade distribution for these problems
    grade_distrib = get_problem_set_grade_distrib(course_id, problem_set)

    d3_data = []

    # Construct data for each problem to be sent to d3
    for problem in problem_set:
        stack_data = []

        if problem in grade_distrib:  # Some problems have no data because students have not tried them yet.
            max_grade = float(grade_distrib[problem]['max_grade'])
            for (grade, count_grade) in grade_distrib[problem]['grade_distrib']:
                percent = 0.0
                if max_grade > 0:
                    percent = round((grade * 100.0) / max_grade, 1)

                # Construct tooltip for problem in grade distibution view
                tooltip = {
                    'type': 'problem',
                    'problem_info_x': problem_info[problem]['x_value'],
                    'count_grade': count_grade,
                    'percent': percent,
                    'problem_info_n': problem_info[problem]['display_name'],
                    'grade': grade,
                    'max_grade': max_grade,
                }

                stack_data.append({
                    'color': percent,
                    'value': count_grade,
                    'tooltip': tooltip,
                })

        d3_data.append({
            'xValue': problem_info[problem]['x_value'],
            'stackData': stack_data,
        })

    return d3_data
Example #50
0
def export_to_xml(modulestore,
                  contentstore,
                  course_location,
                  root_dir,
                  course_dir,
                  draft_modulestore=None):
    """
    Export all modules from `modulestore` and content from `contentstore` as xml to `root_dir`.

    `modulestore`: A `ModuleStore` object that is the source of the modules to export
    `contentstore`: A `ContentStore` object that is the source of the content to export
    `course_location`: The `Location` of the `CourseModuleDescriptor` to export
    `root_dir`: The directory to write the exported xml to
    `course_dir`: The name of the directory inside `root_dir` to write the course content to
    `draft_modulestore`: An optional `DraftModuleStore` that contains draft content, which will be exported
        alongside the public content in the course.
    """

    course = modulestore.get_item(course_location)

    fs = OSFS(root_dir)
    export_fs = fs.makeopendir(course_dir)

    xml = course.export_to_xml(export_fs)
    with export_fs.open('course.xml', 'w') as course_xml:
        course_xml.write(xml)

    # export the static assets
    contentstore.export_all_for_course(
        course_location, root_dir + '/' + course_dir + '/static/')

    # export the static tabs
    export_extra_content(export_fs, modulestore, course_location, 'static_tab',
                         'tabs', '.html')

    # export the custom tags
    export_extra_content(export_fs, modulestore, course_location,
                         'custom_tag_template', 'custom_tags')

    # export the course updates
    export_extra_content(export_fs, modulestore, course_location,
                         'course_info', 'info', '.html')

    # export the 'about' data (e.g. overview, etc.)
    export_extra_content(export_fs, modulestore, course_location, 'about',
                         'about', '.html')

    # export the grading policy
    policies_dir = export_fs.makeopendir('policies')
    course_run_policy_dir = policies_dir.makeopendir(course.location.name)
    with course_run_policy_dir.open('grading_policy.json',
                                    'w') as grading_policy:
        grading_policy.write(dumps(course.grading_policy, cls=EdxJSONEncoder))

    # export all of the course metadata in policy.json
    with course_run_policy_dir.open('policy.json', 'w') as course_policy:
        policy = {'course/' + course.location.name: own_metadata(course)}
        course_policy.write(dumps(policy, cls=EdxJSONEncoder))

    # export draft content
    # NOTE: this code assumes that verticals are the top most draftable container
    # should we change the application, then this assumption will no longer
    # be valid
    if draft_modulestore is not None:
        draft_verticals = draft_modulestore.get_items([
            None, course_location.org, course_location.course, 'vertical',
            None, 'draft'
        ])
        if len(draft_verticals) > 0:
            draft_course_dir = export_fs.makeopendir('drafts')
            for draft_vertical in draft_verticals:
                parent_locs = draft_modulestore.get_parent_locations(
                    draft_vertical.location, course.location.course_id)
                # Don't try to export orphaned items.
                if len(parent_locs) > 0:
                    logging.debug('parent_locs = {0}'.format(parent_locs))
                    draft_vertical.xml_attributes[
                        'parent_sequential_url'] = Location(
                            parent_locs[0]).url()
                    sequential = modulestore.get_item(Location(parent_locs[0]))
                    index = sequential.children.index(
                        draft_vertical.location.url())
                    draft_vertical.xml_attributes[
                        'index_in_children_list'] = str(index)
                    draft_vertical.export_to_xml(draft_course_dir)
Example #51
0
def save_item(request):
    """
    Will carry a json payload with these possible fields
    :id (required): the id
    :data (optional): the new value for the data
    :metadata (optional): new values for the metadata fields.
        Any whose values are None will be deleted not set to None! Absent ones will be left alone
    :nullout (optional): which metadata fields to set to None
    """
    # The nullout is a bit of a temporary copout until we can make module_edit.coffee and the metadata editors a
    # little smarter and able to pass something more akin to {unset: [field, field]}

    try:
        item_location = request.POST['id']
    except KeyError:
        import inspect

        log.exception(
            '''Request missing required attribute 'id'.
                Request info:
                %s
                Caller:
                Function %s in file %s
            ''',
            request.META,
            inspect.currentframe().f_back.f_code.co_name,
            inspect.currentframe().f_back.f_code.co_filename
        )
        return HttpResponseBadRequest()


    # check permissions for this user within this course
    if not has_access(request.user, item_location):
        raise PermissionDenied()

    store = get_modulestore(Location(item_location))

    if request.POST.get('data') is not None:
        data = request.POST['data']
        store.update_item(item_location, data)

    # cdodge: note calling request.POST.get('children') will return None if children is an empty array
    # so it lead to a bug whereby the last component to be deleted in the UI was not actually
    # deleting the children object from the children collection
    if 'children' in request.POST and request.POST['children'] is not None:
        children = request.POST['children']
        store.update_children(item_location, children)

    # cdodge: also commit any metadata which might have been passed along
    if request.POST.get('nullout') is not None or request.POST.get('metadata') is not None:
        # the postback is not the complete metadata, as there's system metadata which is
        # not presented to the end-user for editing. So let's fetch the original and
        # 'apply' the submitted metadata, so we don't end up deleting system metadata
        existing_item = modulestore().get_item(item_location)
        for metadata_key in request.POST.get('nullout', []):
            setattr(existing_item, metadata_key, None)

        # update existing metadata with submitted metadata (which can be partial)
        # IMPORTANT NOTE: if the client passed 'null' (None) for a piece of metadata that means 'remove it'. If
        # the intent is to make it None, use the nullout field
        for metadata_key, value in request.POST.get('metadata', {}).items():
            field = existing_item.fields[metadata_key]

            if value is None:
                field.delete_from(existing_item)
            else:
                value = field.from_json(value)
                field.write_to(existing_item, value)
        # Save the data that we've just changed to the underlying
        # MongoKeyValueStore before we update the mongo datastore.
        existing_item.save()
        # commit to datastore
        store.update_metadata(item_location, own_metadata(existing_item))

    return JsonResponse()
Example #52
0
def tabs_handler(request,
                 tag=None,
                 package_id=None,
                 branch=None,
                 version_guid=None,
                 block=None):
    """
    The restful handler for static tabs.

    GET
        html: return page for editing static tabs
        json: not supported
    PUT or POST
        json: update the tab order. It is expected that the request body contains a JSON-encoded dict with entry "tabs".
        The value for "tabs" is an array of tab locators, indicating the desired order of the tabs.

    Creating a tab, deleting a tab, or changing its contents is not supported through this method.
    Instead use the general xblock URL (see item.xblock_handler).
    """
    locator = BlockUsageLocator(package_id=package_id,
                                branch=branch,
                                version_guid=version_guid,
                                block_id=block)
    if not has_access(request.user, locator):
        raise PermissionDenied()

    old_location = loc_mapper().translate_locator_to_location(locator)
    store = get_modulestore(old_location)
    course_item = store.get_item(old_location)

    if 'application/json' in request.META.get('HTTP_ACCEPT',
                                              'application/json'):
        if request.method == 'GET':
            raise NotImplementedError('coming soon')
        else:
            if 'tabs' in request.json:

                def get_location_for_tab(tab):
                    """  Returns the location (old-style) for a tab. """
                    return loc_mapper().translate_locator_to_location(
                        BlockUsageLocator(tab))

                tabs = request.json['tabs']

                # get list of existing static tabs in course
                # make sure they are the same lengths (i.e. the number of passed in tabs equals the number
                # that we know about) otherwise we will inadvertently drop some!
                existing_static_tabs = [
                    t for t in course_item.tabs if t['type'] == 'static_tab'
                ]
                if len(existing_static_tabs) != len(tabs):
                    return JsonResponse(
                        {
                            "error":
                            "number of tabs must be {}".format(
                                len(existing_static_tabs))
                        },
                        status=400)

                # load all reference tabs, return BadRequest if we can't find any of them
                tab_items = []
                for tab in tabs:
                    item = modulestore('direct').get_item(
                        get_location_for_tab(tab))
                    if item is None:
                        return JsonResponse(
                            {
                                "error":
                                "no tab for found location {}".format(tab)
                            },
                            status=400)

                    tab_items.append(item)

                # now just go through the existing course_tabs and re-order the static tabs
                reordered_tabs = []
                static_tab_idx = 0
                for tab in course_item.tabs:
                    if tab['type'] == 'static_tab':
                        reordered_tabs.append({
                            'type':
                            'static_tab',
                            'name':
                            tab_items[static_tab_idx].display_name,
                            'url_slug':
                            tab_items[static_tab_idx].location.name,
                        })
                        static_tab_idx += 1
                    else:
                        reordered_tabs.append(tab)

                # OK, re-assemble the static tabs in the new order
                course_item.tabs = reordered_tabs
                modulestore('direct').update_metadata(
                    course_item.location, own_metadata(course_item))
                return JsonResponse()
            else:
                raise NotImplementedError(
                    'Creating or changing tab content is not supported.')
    elif request.method == 'GET':  # assume html
        # see tabs have been uninitialized (e.g. supporting courses created before tab support in studio)
        if course_item.tabs is None or len(course_item.tabs) == 0:
            initialize_course_tabs(course_item)

        # first get all static tabs from the tabs list
        # we do this because this is also the order in which items are displayed in the LMS
        static_tabs_refs = [
            t for t in course_item.tabs if t['type'] == 'static_tab'
        ]

        static_tabs = []
        for static_tab_ref in static_tabs_refs:
            static_tab_loc = old_location.replace(
                category='static_tab', name=static_tab_ref['url_slug'])
            static_tabs.append(modulestore('direct').get_item(static_tab_loc))

        components = [
            loc_mapper().translate_location(course_item.location.course_id,
                                            static_tab.location, False, True)
            for static_tab in static_tabs
        ]

        return render_to_response(
            'edit-tabs.html', {
                'context_course': course_item,
                'components': components,
                'course_locator': locator
            })
    else:
        return HttpResponseNotFound()
Example #53
0
    def editor_saved(self, user, old_metadata, old_content):
        """
        Used to update video values during `self`:save method from CMS.
        old_metadata: dict, values of fields of `self` with scope=settings which were explicitly set by user.
        old_content, same as `old_metadata` but for scope=content.
        Due to nature of code flow in item.py::_save_item, before current function is called,
        fields of `self` instance have been already updated, but not yet saved.
        To obtain values, which were changed by user input,
        one should compare own_metadata(self) and old_medatada.
        Video player has two tabs, and due to nature of sync between tabs,
        metadata from Basic tab is always sent when video player is edited and saved first time, for example:
        {'youtube_id_1_0': u'3_yD_cEKoCk', 'display_name': u'Video', 'sub': u'3_yD_cEKoCk', 'html5_sources': []},
        that's why these fields will always present in old_metadata after first save. This should be fixed.
        At consequent save requests html5_sources are always sent too, disregard of their change by user.
        That means that html5_sources are always in list of fields that were changed (`metadata` param in save_item).
        This should be fixed too.
        """
        metadata_was_changed_by_user = old_metadata != own_metadata(self)

        if edxval_api and int(self.duration) > 0:
            video_url = ""
            if len(self.html5_sources):
                video_url = self.html5_sources[0]
            elif self.youtube_id_1_0:
                video_url = "https://www.youtube.com/watch?v=" + str(self.youtube_id_1_0)

            if video_url:
                
                if not self.edx_video_id:
                    now = datetime.datetime.now()
                    hash_object = hashlib.sha256(str(now))
                    hex_dig = hash_object.hexdigest()
                    self.edx_video_id = hex_dig
                    self.save()

                payload = {
                    "url": video_url,
                    "edx_video_id": self.edx_video_id,
                    "duration": self.duration,
                    "status": "live",
                    "encoded_videos": [{
                        "url": video_url,
                        "file_size": 1,
                        "bitrate": 1,
                        "profile": "mobile_high"
                    }]
                }

                # TODO: Change this try catch
                try:
                    edxval_api.get_video_info(self.edx_video_id)
                except:
                    edxval_api.create_video(payload)
                    
                # with this
                # edxval_api.create_video(payload)
                # when edxval app is updated

        # There is an edge case when old_metadata and own_metadata are same and we are importing transcript from youtube
        # then there is a syncing issue where html5_subs are not syncing with youtube sub, We can make sync better by
        # checking if transcript is present for the video and if any html5_ids transcript is not present then trigger
        # the manage_video_subtitles_save to create the missing transcript with particular html5_id.
        if not metadata_was_changed_by_user and self.sub and hasattr(self, 'html5_sources'):
            html5_ids = get_html5_ids(self.html5_sources)
            for subs_id in html5_ids:
                try:
                    Transcript.asset(self.location, subs_id)
                except NotFoundError:
                    # If a transcript does not not exist with particular html5_id then there is no need to check other
                    # html5_ids because we have to create a new transcript with this missing html5_id by turning on
                    # metadata_was_changed_by_user flag.
                    metadata_was_changed_by_user = True
                    break

        if metadata_was_changed_by_user:
            self.edx_video_id = self.edx_video_id and self.edx_video_id.strip()

            # We want to override `youtube_id_1_0` with val youtube profile in the first place when someone adds/edits
            # an `edx_video_id` or its underlying YT val profile. Without this, override will only happen when a user
            # saves the video second time. This is because of the syncing of basic and advanced video settings which
            # also syncs val youtube id from basic tab's `Video Url` to advanced tab's `Youtube ID`.
            if self.edx_video_id and edxval_api:
                val_youtube_id = edxval_api.get_url_for_profile(self.edx_video_id, 'youtube')
                if val_youtube_id and self.youtube_id_1_0 != val_youtube_id:
                    self.youtube_id_1_0 = val_youtube_id

            manage_video_subtitles_save(
                self,
                user,
                old_metadata if old_metadata else None,
                generate_translation=True
            )
Example #54
0
    def assertCoursesEqual(self, course1_id, course2_id):
        """
        Verifies the content of the two given courses are equal
        """
        course1_items = self.store.get_items(course1_id)
        course2_items = self.store.get_items(course2_id)
        self.assertGreater(len(course1_items), 0)  # ensure it found content instead of [] == []
        self.assertEqual(len(course1_items), len(course2_items))

        for course1_item in course1_items:
            course2_item_location = course1_item.location.map_into_course(course2_id)
            if course1_item.location.category == 'course':
                # mongo uses the run as the name, split uses 'course'
                store = self.store._get_modulestore_for_courseid(course2_id)  # pylint: disable=protected-access
                new_name = 'course' if isinstance(store, SplitMongoModuleStore) else course2_item_location.run
                course2_item_location = course2_item_location.replace(name=new_name)
            course2_item = self.store.get_item(course2_item_location)

            try:
                # compare published state
                self.assertEqual(
                    self.store.compute_publish_state(course1_item),
                    self.store.compute_publish_state(course2_item)
                )
            except AssertionError:
                c1_state = self.compute_real_state(course1_item)
                c2_state = self.compute_real_state(course2_item)
                self.assertEqual(
                    c1_state,
                    c2_state,
                    "Publish states not equal: course item {} in state {} != course item {} in state {}".format(
                        course1_item.location, c1_state, course2_item.location, c2_state
                    )
                )

            # compare data
            self.assertEqual(hasattr(course1_item, 'data'), hasattr(course2_item, 'data'))
            if hasattr(course1_item, 'data'):
                self.assertEqual(course1_item.data, course2_item.data)

            # compare meta-data
            self.assertEqual(own_metadata(course1_item), own_metadata(course2_item))

            # compare children
            self.assertEqual(course1_item.has_children, course2_item.has_children)
            if course1_item.has_children:
                expected_children = []
                for course1_item_child in course1_item.children:
                    expected_children.append(
                        course1_item_child.map_into_course(course2_id)
                    )
                # also process course2_children just in case they have version guids
                course2_children = [child.version_agnostic() for child in course2_item.children]
                self.assertEqual(expected_children, course2_children)

        # compare assets
        content_store = self.store.contentstore
        course1_assets, count_course1_assets = content_store.get_all_content_for_course(course1_id)
        _, count_course2_assets = content_store.get_all_content_for_course(course2_id)
        self.assertEqual(count_course1_assets, count_course2_assets)
        for asset in course1_assets:
            asset_son = asset.get('content_son', asset['_id'])
            self.assertAssetsEqual(asset_son, course1_id, course2_id)
Example #55
0
def _save_item(request, usage_key, data=None, children=None, metadata=None, nullout=None,
               grader_type=None, publish=None):
    """
    Saves xblock w/ its fields. Has special processing for grader_type, publish, and nullout and Nones in metadata.
    nullout means to truly set the field to None whereas nones in metadata mean to unset them (so they revert
    to default).
    """
    store = get_modulestore(usage_key)

    try:
        existing_item = store.get_item(usage_key)
    except ItemNotFoundError:
        if usage_key.category in CREATE_IF_NOT_FOUND:
            # New module at this location, for pages that are not pre-created.
            # Used for course info handouts.
            store.create_and_save_xmodule(usage_key)
            existing_item = store.get_item(usage_key)
        else:
            raise
    except InvalidLocationError:
        log.error("Can't find item by location.")
        return JsonResponse({"error": "Can't find item by location: " + unicode(usage_key)}, 404)

    old_metadata = own_metadata(existing_item)

    if publish:
        if publish == 'make_private':
            _xmodule_recurse(
                existing_item,
                lambda i: modulestore().unpublish(i.location),
                ignore_exception=ItemNotFoundError
            )
        elif publish == 'create_draft':
            # This recursively clones the existing item location to a draft location (the draft is
            # implicit, because modulestore is a Draft modulestore)
            _xmodule_recurse(
                existing_item,
                lambda i: modulestore().convert_to_draft(i.location),
                ignore_exception=DuplicateItemError
            )

    if data:
        # TODO Allow any scope.content fields not just "data" (exactly like the get below this)
        existing_item.data = data
    else:
        data = existing_item.get_explicitly_set_fields_by_scope(Scope.content)

    if children is not None:
        children_usage_keys = [
            UsageKey.from_string(child)
            for child
            in children
        ]
        existing_item.children = children_usage_keys

    # also commit any metadata which might have been passed along
    if nullout is not None or metadata is not None:
        # the postback is not the complete metadata, as there's system metadata which is
        # not presented to the end-user for editing. So let's use the original (existing_item) and
        # 'apply' the submitted metadata, so we don't end up deleting system metadata.
        if nullout is not None:
            for metadata_key in nullout:
                setattr(existing_item, metadata_key, None)

        # update existing metadata with submitted metadata (which can be partial)
        # IMPORTANT NOTE: if the client passed 'null' (None) for a piece of metadata that means 'remove it'. If
        # the intent is to make it None, use the nullout field
        if metadata is not None:
            for metadata_key, value in metadata.items():
                field = existing_item.fields[metadata_key]

                if value is None:
                    field.delete_from(existing_item)
                else:
                    try:
                        value = field.from_json(value)
                    except ValueError:
                        return JsonResponse({"error": "Invalid data"}, 400)
                    field.write_to(existing_item, value)

        if existing_item.category == 'video':
            manage_video_subtitles_save(existing_item, request.user, old_metadata, generate_translation=True)

    # commit to datastore
    store.update_item(existing_item, request.user.id)

    result = {
        'id': unicode(usage_key),
        'data': data,
        'metadata': own_metadata(existing_item)
    }

    if grader_type is not None:
        result.update(CourseGradingModel.update_section_grader_type(existing_item, grader_type, request.user))

    # Make public after updating the xblock, in case the caller asked
    # for both an update and a publish.
    if publish and publish == 'make_public':
        def _publish(block):
            # This is super gross, but prevents us from publishing something that
            # we shouldn't. Ideally, all modulestores would have a consistant
            # interface for publishing. However, as of now, only the DraftMongoModulestore
            # does, so we have to check for the attribute explicitly.
            store = get_modulestore(block.location)
            store.publish(block.location, request.user.id)

        _xmodule_recurse(
            existing_item,
            _publish
        )

    # Note that children aren't being returned until we have a use case.
    return JsonResponse(result)
Example #56
0
    def assertCoursesEqual(self, course1_id, course2_id):
        """
        Verifies the content of the two given courses are equal
        """
        course1_items = self.store.get_items(course1_id)
        course2_items = self.store.get_items(course2_id)
        self.assertGreater(len(course1_items),
                           0)  # ensure it found content instead of [] == []
        if len(course1_items) != len(course2_items):
            course1_block_ids = {
                item.location.block_id
                for item in course1_items
            }
            course2_block_ids = {
                item.location.block_id
                for item in course2_items
            }
            raise AssertionError(
                "Course1 extra blocks: {}; course2 extra blocks: {}".format(
                    course1_block_ids - course2_block_ids,
                    course2_block_ids - course1_block_ids))

        for course1_item in course1_items:
            course1_item_loc = course1_item.location
            course2_item_loc = course2_id.make_usage_key(
                course1_item_loc.block_type, course1_item_loc.block_id)
            if course1_item_loc.block_type == 'course':
                # mongo uses the run as the name, split uses 'course'
                store = self.store._get_modulestore_for_courselike(course2_id)  # pylint: disable=protected-access
                new_name = 'course' if isinstance(
                    store, SplitMongoModuleStore) else course2_item_loc.run
                course2_item_loc = course2_item_loc.replace(name=new_name)
            course2_item = self.store.get_item(course2_item_loc)

            # compare published state
            self.assertEqual(self.store.has_published_version(course1_item),
                             self.store.has_published_version(course2_item))

            # compare data
            self.assertEqual(hasattr(course1_item, 'data'),
                             hasattr(course2_item, 'data'))
            if hasattr(course1_item, 'data'):
                self.assertEqual(course1_item.data, course2_item.data)

            # compare meta-data
            course1_metadata = own_metadata(course1_item)
            course2_metadata = own_metadata(course2_item)
            # Omit edx_video_id as it can be different in case of extrnal video imports.
            course1_metadata.pop('edx_video_id', None)
            course2_metadata.pop('edx_video_id', None)
            self.assertEqual(course1_metadata, course2_metadata)

            # compare children
            self.assertEqual(course1_item.has_children,
                             course2_item.has_children)
            if course1_item.has_children:
                expected_children = []
                for course1_item_child in course1_item.children:
                    expected_children.append(
                        course2_id.make_usage_key(
                            course1_item_child.block_type,
                            course1_item_child.block_id))
                self.assertEqual(expected_children, course2_item.children)

        # compare assets
        content_store = self.store.contentstore
        course1_assets, count_course1_assets = content_store.get_all_content_for_course(
            course1_id)
        _, count_course2_assets = content_store.get_all_content_for_course(
            course2_id)
        self.assertEqual(count_course1_assets, count_course2_assets)
        for asset in course1_assets:
            asset_son = asset.get('content_son', asset['_id'])
            self.assertAssetsEqual(asset_son, course1_id, course2_id)
Example #57
0
def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir):
    """
    Export all modules from `modulestore` and content from `contentstore` as xml to `root_dir`.

    `modulestore`: A `ModuleStore` object that is the source of the modules to export
    `contentstore`: A `ContentStore` object that is the source of the content to export, can be None
    `course_key`: The `CourseKey` of the `CourseModuleDescriptor` to export
    `root_dir`: The directory to write the exported xml to
    `course_dir`: The name of the directory inside `root_dir` to write the course content to
    """

    with modulestore.bulk_operations(course_key):

        course = modulestore.get_course(course_key,
                                        depth=None)  # None means infinite
        fsm = OSFS(root_dir)
        export_fs = course.runtime.export_fs = fsm.makeopendir(course_dir)
        root_course_dir = root_dir + '/' + course_dir

        root = lxml.etree.Element('unknown')

        # export only the published content
        with modulestore.branch_setting(ModuleStoreEnum.Branch.published_only,
                                        course_key):
            # change all of the references inside the course to use the xml expected key type w/o version & branch
            xml_centric_course_key = CourseLocator(course_key.org,
                                                   course_key.course,
                                                   course_key.run,
                                                   deprecated=True)
            adapt_references(course, xml_centric_course_key, export_fs)

            course.add_xml_to_node(root)

        with export_fs.open('course.xml', 'w') as course_xml:
            lxml.etree.ElementTree(root).write(course_xml)

        # Export the modulestore's asset metadata.
        asset_dir = root_course_dir + '/' + AssetMetadata.EXPORTED_ASSET_DIR + '/'
        if not os.path.isdir(asset_dir):
            os.makedirs(asset_dir)
        asset_root = lxml.etree.Element(AssetMetadata.ALL_ASSETS_XML_TAG)
        course_assets = modulestore.get_all_asset_metadata(course_key, None)
        for asset_md in course_assets:
            # All asset types are exported using the "asset" tag - but their asset type is specified in each asset key.
            asset = lxml.etree.SubElement(asset_root,
                                          AssetMetadata.ASSET_XML_TAG)
            asset_md.to_xml(asset)
        with OSFS(asset_dir).open(AssetMetadata.EXPORTED_ASSET_FILENAME,
                                  'w') as asset_xml_file:
            lxml.etree.ElementTree(asset_root).write(asset_xml_file)

        # export the static assets
        policies_dir = export_fs.makeopendir('policies')
        if contentstore:
            contentstore.export_all_for_course(
                course_key,
                root_course_dir + '/static/',
                root_course_dir + '/policies/assets.json',
            )

            # If we are using the default course image, export it to the
            # legacy location to support backwards compatibility.
            if course.course_image == course.fields['course_image'].default:
                try:
                    course_image = contentstore.find(
                        StaticContent.compute_location(course.id,
                                                       course.course_image), )
                except NotFoundError:
                    pass
                else:
                    output_dir = root_course_dir + '/static/images/'
                    if not os.path.isdir(output_dir):
                        os.makedirs(output_dir)
                    with OSFS(output_dir).open('course_image.jpg',
                                               'wb') as course_image_file:
                        course_image_file.write(course_image.data)

        # export the static tabs
        export_extra_content(export_fs, modulestore, course_key,
                             xml_centric_course_key, 'static_tab', 'tabs',
                             '.html')

        # export the custom tags
        export_extra_content(export_fs, modulestore, course_key,
                             xml_centric_course_key, 'custom_tag_template',
                             'custom_tags')

        # export the course updates
        export_extra_content(export_fs, modulestore, course_key,
                             xml_centric_course_key, 'course_info', 'info',
                             '.html')

        # export the 'about' data (e.g. overview, etc.)
        export_extra_content(export_fs, modulestore, course_key,
                             xml_centric_course_key, 'about', 'about', '.html')

        # export the grading policy
        course_run_policy_dir = policies_dir.makeopendir(course.location.name)
        with course_run_policy_dir.open('grading_policy.json',
                                        'w') as grading_policy:
            grading_policy.write(
                dumps(course.grading_policy,
                      cls=EdxJSONEncoder,
                      sort_keys=True,
                      indent=4))

        # export all of the course metadata in policy.json
        with course_run_policy_dir.open('policy.json', 'w') as course_policy:
            policy = {'course/' + course.location.name: own_metadata(course)}
            course_policy.write(
                dumps(policy, cls=EdxJSONEncoder, sort_keys=True, indent=4))

        #### DRAFTS ####
        # xml backed courses don't support drafts!
        if course.runtime.modulestore.get_modulestore_type(
        ) != ModuleStoreEnum.Type.xml:
            # NOTE: we need to explicitly implement the logic for setting the vertical's parent
            # and index here since the XML modulestore cannot load draft modules
            with modulestore.branch_setting(
                    ModuleStoreEnum.Branch.draft_preferred, course_key):
                draft_modules = modulestore.get_items(
                    course_key,
                    qualifiers={'category': {
                        '$nin': DIRECT_ONLY_CATEGORIES
                    }},
                    revision=ModuleStoreEnum.RevisionOption.draft_only)

                if draft_modules:
                    draft_course_dir = export_fs.makeopendir(DRAFT_DIR)

                    # accumulate tuples of draft_modules and their parents in
                    # this list:
                    draft_node_list = []

                    for draft_module in draft_modules:
                        parent_loc = modulestore.get_parent_location(
                            draft_module.location,
                            revision=ModuleStoreEnum.RevisionOption.
                            draft_preferred)

                        # if module has no parent, set its parent_url to `None`
                        parent_url = None
                        if parent_loc is not None:
                            parent_url = parent_loc.to_deprecated_string()

                        draft_node = draft_node_constructor(
                            draft_module,
                            location=draft_module.location,
                            url=draft_module.location.to_deprecated_string(),
                            parent_location=parent_loc,
                            parent_url=parent_url,
                        )

                        draft_node_list.append(draft_node)

                    for draft_node in get_draft_subtree_roots(draft_node_list):
                        # only export the roots of the draft subtrees
                        # since export_from_xml (called by `add_xml_to_node`)
                        # exports a whole tree

                        # ensure module has "xml_attributes" attr
                        if not hasattr(draft_node.module, 'xml_attributes'):
                            draft_node.module.xml_attributes = {}

                        # Don't try to export orphaned items
                        # and their descendents
                        if draft_node.parent_location is None:
                            continue

                        logging.debug('parent_loc = {0}'.format(
                            draft_node.parent_location))

                        draft_node.module.xml_attributes[
                            'parent_url'] = draft_node.parent_url
                        parent = modulestore.get_item(
                            draft_node.parent_location)
                        index = parent.children.index(
                            draft_node.module.location)
                        draft_node.module.xml_attributes[
                            'index_in_children_list'] = str(index)

                        draft_node.module.runtime.export_fs = draft_course_dir
                        adapt_references(draft_node.module,
                                         xml_centric_course_key,
                                         draft_course_dir)
                        node = lxml.etree.Element('unknown')

                        draft_node.module.add_xml_to_node(node)
Example #58
0
    def add_xml_to_node(self, node):
        """
        For exporting, set data on `node` from ourselves.
        """
        # Get the definition
        xml_object = self.definition_to_xml(self.runtime.export_fs)
        for aside in self.runtime.get_asides(self):
            if aside.needs_serialization():
                aside_node = etree.Element("unknown_root",
                                           nsmap=XML_NAMESPACES)
                aside.add_xml_to_node(aside_node)
                xml_object.append(aside_node)

        self.clean_metadata_from_xml(xml_object)

        # Set the tag on both nodes so we get the file path right.
        xml_object.tag = self.category
        node.tag = self.category

        # Add the non-inherited metadata
        for attr in sorted(own_metadata(self)):
            # don't want e.g. data_dir
            if attr not in self.metadata_to_strip and attr not in self.metadata_to_export_to_policy:
                val = serialize_field(self._field_data.get(self, attr))
                try:
                    xml_object.set(attr, val)
                except Exception:
                    logging.exception(
                        u'Failed to serialize metadata attribute %s with value %s in module %s. This could mean data loss!!!',
                        attr, val, self.url_name)

        for key, value in self.xml_attributes.items():
            if key not in self.metadata_to_strip:
                xml_object.set(key, serialize_field(value))

        if self.export_to_file():
            # Write the definition to a file
            url_path = name_to_pathname(self.url_name)
            filepath = self._format_filepath(self.category, url_path)
            self.runtime.export_fs.makedir(os.path.dirname(filepath),
                                           recursive=True,
                                           allow_recreate=True)
            with self.runtime.export_fs.open(filepath, 'w') as fileobj:
                ElementTree(xml_object).write(fileobj,
                                              pretty_print=True,
                                              encoding='utf-8')
        else:
            # Write all attributes from xml_object onto node
            node.clear()
            node.tag = xml_object.tag
            node.text = xml_object.text
            node.tail = xml_object.tail
            node.attrib.update(xml_object.attrib)
            node.extend(xml_object)

        node.set('url_name', self.url_name)

        # Special case for course pointers:
        if self.category == 'course':
            # add org and course attributes on the pointer tag
            node.set('org', self.location.org)
            node.set('course', self.location.course)
Example #59
0
def get_d3_problem_grade_distrib(course_id):
    """
    Returns problem grade distribution information for each section, data already in format for d3 function.

    `course_id` the course ID for the course interested in

    Returns an array of dicts in the order of the sections. Each dict has:
      'display_name' - display name for the section
      'data' - data for the d3_stacked_bar_graph function of the grade distribution for that problem
    """

    prob_grade_distrib, total_student_count = get_problem_grade_distribution(course_id)
    d3_data = []

    # Retrieve course object down to problems
    course = modulestore().get_course(course_id, depth=4)

    # Iterate through sections, subsections, units, problems
    for section in course.get_children():
        curr_section = {}
        curr_section['display_name'] = own_metadata(section).get('display_name', '')
        data = []
        c_subsection = 0
        for subsection in section.get_children():
            c_subsection += 1
            c_unit = 0
            for unit in subsection.get_children():
                c_unit += 1
                c_problem = 0
                for child in unit.get_children():

                    # Student data is at the problem level
                    if child.location.category == 'problem':
                        c_problem += 1
                        stack_data = []

                        # Construct label to display for this problem
                        label = "P{0}.{1}.{2}".format(c_subsection, c_unit, c_problem)

                        # Only problems in prob_grade_distrib have had a student submission.
                        if child.location in prob_grade_distrib:

                            # Get max_grade, grade_distribution for this problem
                            problem_info = prob_grade_distrib[child.location]

                            # Get problem_name for tooltip
                            problem_name = own_metadata(child).get('display_name', '')

                            # Compute percent of this grade over max_grade
                            max_grade = float(problem_info['max_grade'])
                            for (grade, count_grade) in problem_info['grade_distrib']:
                                percent = 0.0
                                if max_grade > 0:
                                    percent = round((grade * 100.0) / max_grade, 1)

                                # Compute percent of students with this grade
                                student_count_percent = 0
                                if total_student_count.get(child.location, 0) > 0:
                                    student_count_percent = count_grade * 100 / total_student_count[child.location]

                                # Tooltip parameters for problem in grade distribution view
                                tooltip = {
                                    'type': 'problem',
                                    'label': label,
                                    'problem_name': problem_name,
                                    'count_grade': count_grade,
                                    'percent': percent,
                                    'grade': grade,
                                    'max_grade': max_grade,
                                    'student_count_percent': student_count_percent,
                                }

                                # Construct data to be sent to d3
                                stack_data.append({
                                    'color': percent,
                                    'value': count_grade,
                                    'tooltip': tooltip,
                                    'module_url': text_type(child.location),
                                })

                        problem = {
                            'xValue': label,
                            'stackData': stack_data,
                        }
                        data.append(problem)
        curr_section['data'] = data

        d3_data.append(curr_section)

    return d3_data
Example #60
0
def _save_item(user,
               usage_key,
               data=None,
               children=None,
               metadata=None,
               nullout=None,
               grader_type=None,
               publish=None):
    """
    Saves xblock w/ its fields. Has special processing for grader_type, publish, and nullout and Nones in metadata.
    nullout means to truly set the field to None whereas nones in metadata mean to unset them (so they revert
    to default).
    """
    store = modulestore()

    try:
        existing_item = store.get_item(usage_key)
    except ItemNotFoundError:
        if usage_key.category in CREATE_IF_NOT_FOUND:
            # New module at this location, for pages that are not pre-created.
            # Used for course info handouts.
            existing_item = store.create_item(user.id, usage_key.course_key,
                                              usage_key.block_type,
                                              usage_key.block_id)
        else:
            raise
    except InvalidLocationError:
        log.error("Can't find item by location.")
        return JsonResponse(
            {"error": "Can't find item by location: " + unicode(usage_key)},
            404)

    old_metadata = own_metadata(existing_item)
    old_content = existing_item.get_explicitly_set_fields_by_scope(
        Scope.content)

    if publish:
        if publish == 'make_private':
            try:
                store.unpublish(existing_item.location, user.id),
            except ItemNotFoundError:
                pass
        elif publish == 'create_draft':
            try:
                store.convert_to_draft(existing_item.location, user.id)
            except DuplicateItemError:
                pass

    if data:
        # TODO Allow any scope.content fields not just "data" (exactly like the get below this)
        existing_item.data = data
    else:
        data = old_content['data'] if 'data' in old_content else None

    if children is not None:
        children_usage_keys = []
        for child in children:
            child_usage_key = UsageKey.from_string(child)
            child_usage_key = child_usage_key.replace(course_key=modulestore(
            ).fill_in_run(child_usage_key.course_key))
            children_usage_keys.append(child_usage_key)
        existing_item.children = children_usage_keys

    # also commit any metadata which might have been passed along
    if nullout is not None or metadata is not None:
        # the postback is not the complete metadata, as there's system metadata which is
        # not presented to the end-user for editing. So let's use the original (existing_item) and
        # 'apply' the submitted metadata, so we don't end up deleting system metadata.
        if nullout is not None:
            for metadata_key in nullout:
                setattr(existing_item, metadata_key, None)

        # update existing metadata with submitted metadata (which can be partial)
        # IMPORTANT NOTE: if the client passed 'null' (None) for a piece of metadata that means 'remove it'. If
        # the intent is to make it None, use the nullout field
        if metadata is not None:
            for metadata_key, value in metadata.items():
                field = existing_item.fields[metadata_key]

                if value is None:
                    field.delete_from(existing_item)
                else:
                    try:
                        value = field.from_json(value)
                    except ValueError:
                        return JsonResponse({"error": "Invalid data"}, 400)
                    field.write_to(existing_item, value)

    if callable(getattr(existing_item, "editor_saved", None)):
        existing_item.editor_saved(user, old_metadata, old_content)

    # commit to datastore
    store.update_item(existing_item, user.id)

    # for static tabs, their containing course also records their display name
    if usage_key.category == 'static_tab':
        course = store.get_course(usage_key.course_key)
        # find the course's reference to this tab and update the name.
        static_tab = CourseTabList.get_tab_by_slug(course.tabs, usage_key.name)
        # only update if changed
        if static_tab and static_tab['name'] != existing_item.display_name:
            static_tab['name'] = existing_item.display_name
            store.update_item(course, user.id)

    result = {
        'id': unicode(usage_key),
        'data': data,
        'metadata': own_metadata(existing_item)
    }

    if grader_type is not None:
        result.update(
            CourseGradingModel.update_section_grader_type(
                existing_item, grader_type, user))

    # Make public after updating the xblock, in case the caller asked
    # for both an update and a publish.
    if publish and publish == 'make_public':
        modulestore().publish(existing_item.location, user.id)

    # Note that children aren't being returned until we have a use case.
    return JsonResponse(result)