Exemple #1
0
    def add(self, block_structure):
        """
        Store a compressed and pickled serialization of the given
        block structure into the given cache.

        The key in the cache is 'root.key.<root_block_usage_key>'.
        The data stored in the cache includes the structure's
        block relations, transformer data, and block data.

        Arguments:
            block_structure (BlockStructure) - The block structure
                that is to be serialized to the given cache.
        """
        data_to_cache = (
            block_structure._block_relations,
            block_structure._transformer_data,
            block_structure._block_data_map
        )
        zp_data_to_cache = zpickle(data_to_cache)
        self._cache.set(
            self._encode_root_cache_key(block_structure.root_block_usage_key),
            zp_data_to_cache
        )
        logger.debug(
            "Wrote BlockStructure %s to cache, size: %s",
            block_structure.root_block_usage_key,
            len(zp_data_to_cache),
        )
Exemple #2
0
    def add(self, block_structure):
        """
        Store a compressed and pickled serialization of the given
        block structure into the given cache.

        The key in the cache is 'root.key.<root_block_usage_key>'.
        The data stored in the cache includes the structure's
        block relations, transformer data, and block data.

        Arguments:
            block_structure (BlockStructure) - The block structure
                that is to be serialized to the given cache.
        """
        data_to_cache = (
            block_structure._block_relations,
            block_structure._transformer_data,
            block_structure._block_data_map,
        )
        zp_data_to_cache = zpickle(data_to_cache)

        # Set the timeout value for the cache to 1 day as a fail-safe
        # in case the signal to invalidate the cache doesn't come through.
        timeout_in_seconds = 60 * 60 * 24
        self._cache.set(
            self._encode_root_cache_key(block_structure.root_block_usage_key),
            zp_data_to_cache,
            timeout=timeout_in_seconds,
        )

        logger.info(
            "Wrote BlockStructure %s to cache, size: %s",
            block_structure.root_block_usage_key,
            len(zp_data_to_cache),
        )
Exemple #3
0
    def add(self, block_structure):
        """
        Store a compressed and pickled serialization of the given
        block structure into the given cache.

        The key in the cache is 'root.key.<root_block_usage_key>'.
        The data stored in the cache includes the structure's
        block relations, transformer data, and block data.

        Arguments:
            block_structure (BlockStructure) - The block structure
                that is to be serialized to the given cache.
        """
        data_to_cache = (
            block_structure._block_relations,
            block_structure._transformer_data,
            block_structure._block_data_map,
        )
        zp_data_to_cache = zpickle(data_to_cache)

        # Set the timeout value for the cache to None. This caches the
        # value forever. The expectation is that the caller will delete
        # the cached value once it is outdated.
        self._cache.set(
            self._encode_root_cache_key(block_structure.root_block_usage_key),
            zp_data_to_cache,
            timeout=None,
        )

        logger.info(
            "Wrote BlockStructure %s to cache, size: %s",
            block_structure.root_block_usage_key,
            len(zp_data_to_cache),
        )
Exemple #4
0
    def add(self, block_structure):
        """
        Store a compressed and pickled serialization of the given
        block structure into the given cache.

        The key in the cache is 'root.key.<root_block_usage_key>'.
        The data stored in the cache includes the structure's
        block relations, transformer data, and block data.

        Arguments:
            block_structure (BlockStructure) - The block structure
                that is to be serialized to the given cache.
        """
        data_to_cache = (
            block_structure._block_relations,
            block_structure._transformer_data,
            block_structure._block_data_map,
        )
        zp_data_to_cache = zpickle(data_to_cache)

        # Set the timeout value for the cache to None. This caches the
        # value forever. The expectation is that the caller will delete
        # the cached value once it is outdated.
        self._cache.set(
            self._encode_root_cache_key(block_structure.root_block_usage_key),
            zp_data_to_cache,
            timeout=None,
        )

        logger.info(
            "Wrote BlockStructure %s to cache, size: %s",
            block_structure.root_block_usage_key,
            len(zp_data_to_cache),
        )
def get_edx_api_data(api_config, resource, api, resource_id=None, querystring=None, cache_key=None, many=True,
                     traverse_pagination=True):
    """GET data from an edX REST API.

    DRY utility for handling caching and pagination.

    Arguments:
        api_config (ConfigurationModel): The configuration model governing interaction with the API.
        resource (str): Name of the API resource being requested.

    Keyword Arguments:
        api (APIClient): API client to use for requesting data.
        resource_id (int or str): Identifies a specific resource to be retrieved.
        querystring (dict): Optional query string parameters.
        cache_key (str): Where to cache retrieved data. The cache will be ignored if this is omitted
            (neither inspected nor updated).
        many (bool): Whether the resource requested is a collection of objects, or a single object.
            If false, an empty dict will be returned in cases of failure rather than the default empty list.
        traverse_pagination (bool): Whether to traverse pagination or return paginated response..

    Returns:
        Data returned by the API. When hitting a list endpoint, extracts "results" (list of dict)
        returned by DRF-powered APIs.
    """
    no_data = [] if many else {}

    if not api_config.enabled:
        log.warning('%s configuration is disabled.', api_config.API_NAME)
        return no_data

    if cache_key:
        cache_key = '{}.{}'.format(cache_key, resource_id) if resource_id is not None else cache_key
        cache_key += '.zpickled'

        cached = cache.get(cache_key)
        if cached:
            return zunpickle(cached)

    try:
        endpoint = getattr(api, resource)
        querystring = querystring if querystring else {}
        response = endpoint(resource_id).get(**querystring)

        if resource_id is not None:
            results = response
        elif traverse_pagination:
            results = _traverse_pagination(response, endpoint, querystring, no_data)
        else:
            results = response
    except:  # pylint: disable=bare-except
        log.exception('Failed to retrieve data from the %s API.', api_config.API_NAME)
        return no_data

    if cache_key:
        zdata = zpickle(results)
        cache.set(cache_key, zdata, api_config.cache_ttl)

    return results
 def _serialize(self, block_structure):
     """
     Serializes the data for the given block_structure.
     """
     data_to_cache = (
         block_structure._block_relations,
         block_structure.transformer_data,
         block_structure._block_data_map,
     )
     return zpickle(data_to_cache)
Exemple #7
0
 def _serialize(self, block_structure):
     """
     Serializes the data for the given block_structure.
     """
     data_to_cache = (
         block_structure._block_relations,
         block_structure.transformer_data,
         block_structure._block_data_map,
     )
     return zpickle(data_to_cache)
def get_api_data(api_config,
                 resource,
                 api_client,
                 base_api_url,
                 resource_id=None,
                 querystring=None,
                 cache_key=None,
                 many=True,
                 traverse_pagination=True,
                 fields=None,
                 long_term_cache=False):
    """
    GET data from an edX REST API endpoint using the API client.

    DRY utility for handling caching and pagination.

    Arguments:
        api_config (ConfigurationModel): The configuration model governing interaction with the API.
        resource (str): Name of the API resource being requested.
        api_client (requests.Session): API client (either raw requests.Session or OAuthAPIClient) to use for
            requesting data.
        base_api_url (str): base API url, used to construct the full API URL across with resource and
            resource_id (if any).

    Keyword Arguments:
        resource_id (int or str): Identifies a specific resource to be retrieved.
        querystring (dict): Optional query string parameters.
        cache_key (str): Where to cache retrieved data. The cache will be ignored if this is omitted
            (neither inspected nor updated).
        many (bool): Whether the resource requested is a collection of objects, or a single object.
            If false, an empty dict will be returned in cases of failure rather than the default empty list.
        traverse_pagination (bool): Whether to traverse pagination or return paginated response..
        long_term_cache (bool): Whether to use the long term cache ttl or the standard cache ttl

    Returns:
        Data returned by the API. When hitting a list endpoint, extracts "results" (list of dict)
        returned by DRF-powered APIs.
    """
    no_data = [] if many else {}

    if not api_config.enabled:
        log.warning('%s configuration is disabled.', api_config.API_NAME)
        return no_data

    if cache_key:
        cache_key = f'{cache_key}.{resource_id}' if resource_id is not None else cache_key
        cache_key += '.zpickled'

        cached = cache.get(cache_key)
        if cached:
            try:
                cached_response = zunpickle(cached)
            except Exception:  # pylint: disable=broad-except
                # Data is corrupt in some way.
                log.warning("Data for cache is corrupt for cache key %s",
                            cache_key)
                cache.delete(cache_key)
            else:
                if fields:
                    cached_response = get_fields(fields, cached_response)

                return cached_response

    try:
        querystring = querystring if querystring else {}
        api_url = urljoin(
            f"{base_api_url}/",
            f"{resource}/{str(resource_id) + '/' if resource_id is not None else ''}"
        )
        response = api_client.get(api_url, params=querystring)
        response.raise_for_status()
        response = response.json()

        if resource_id is None and traverse_pagination:
            results = _traverse_pagination(response, api_client, api_url,
                                           querystring, no_data)
        else:
            results = response

    except:  # pylint: disable=bare-except
        log.exception('Failed to retrieve data from the %s API.',
                      api_config.API_NAME)
        return no_data

    if cache_key:
        zdata = zpickle(results)
        cache_ttl = api_config.cache_ttl
        if long_term_cache:
            cache_ttl = api_config.long_term_cache_ttl
        cache.set(cache_key, zdata, cache_ttl)

    if fields:
        results = get_fields(fields, results)

    return results
def get_edx_api_data(api_config,
                     resource,
                     api,
                     resource_id=None,
                     querystring=None,
                     cache_key=None,
                     many=True,
                     traverse_pagination=True):
    """GET data from an edX REST API.

    DRY utility for handling caching and pagination.

    Arguments:
        api_config (ConfigurationModel): The configuration model governing interaction with the API.
        resource (str): Name of the API resource being requested.

    Keyword Arguments:
        api (APIClient): API client to use for requesting data.
        resource_id (int or str): Identifies a specific resource to be retrieved.
        querystring (dict): Optional query string parameters.
        cache_key (str): Where to cache retrieved data. The cache will be ignored if this is omitted
            (neither inspected nor updated).
        many (bool): Whether the resource requested is a collection of objects, or a single object.
            If false, an empty dict will be returned in cases of failure rather than the default empty list.
        traverse_pagination (bool): Whether to traverse pagination or return paginated response..

    Returns:
        Data returned by the API. When hitting a list endpoint, extracts "results" (list of dict)
        returned by DRF-powered APIs.
    """
    no_data = [] if many else {}

    if not api_config.enabled:
        log.warning('%s configuration is disabled.', api_config.API_NAME)
        return no_data

    if cache_key:
        cache_key = '{}.{}'.format(
            cache_key, resource_id) if resource_id is not None else cache_key
        cache_key += '.zpickled'

        cached = cache.get(cache_key)
        if cached:
            return zunpickle(cached)

    try:
        endpoint = getattr(api, resource)
        querystring = querystring if querystring else {}
        response = endpoint(resource_id).get(**querystring)

        if resource_id is not None:
            results = response
        elif traverse_pagination:
            results = _traverse_pagination(response, endpoint, querystring,
                                           no_data)
        else:
            results = response
    except:  # pylint: disable=bare-except
        log.exception('Failed to retrieve data from the %s API.',
                      api_config.API_NAME)
        return no_data

    if cache_key:
        zdata = zpickle(results)
        cache.set(cache_key, zdata, api_config.cache_ttl)

    return results