Beispiel #1
0
    def reset_cache(self, cache_duration=None):
        """Remove any cached singles or albums charts

        Because the UK Top40 charts only change once per week, :py:class:`Top40` will cache the results of singles and
        albums. This means that during the execution of a program, repeated calls to retrieve singles and albums chart
        information will only actually call the remote API once. If, for whatever reason you need to ensure that an
        attempt to access single or album information actually results in a call to the remote API, then calling the
        :py:meth:`Top40.reset_cache` method will do this, by clearing down any existing cached chart information.

        If a cache is in place, then the results will also be cached across python runtime executions.

        Params:
            cache_duration (:py:class:`int`): If ``None`` we will uninstall the requests cache and the next
                read from the API will cause a remote call to be executed. Otherwise it specifies the number of
                seconds before the persistent cache will expire.
        """

        if cache_duration is None:
            # We are disabling the existing persistent_cache
            requests_cache.uninstall_cache()
        else:
            # We are setting a persistent cache so insert the duration into our cache config
            self.cache_config['expire_after'] = cache_duration

            # and then install the cache with this configuration
            requests_cache.install_cache(**self.cache_config)

        # Remember the new duration
        self.cache_duration = cache_duration

        # Rest the in-memory caches to force a read from remote site
        self._albums_chart = None
        self._singles_chart = None
    def test_hybrid_dois(self, test_data):

        (doi, fulltext_url, license, color) = test_data

        # because cookies breaks the cache pickling
        # for doi_start in ["10.1109", "10.1161", "10.1093", "10.1007", "10.1039"]:
        #     if doi.startswith(doi_start):
        requests_cache.uninstall_cache()

        my_pub = pub.lookup_product_by_doi(doi)
        my_pub.refresh()

        logger.info(u"\n\nwas looking for {}, got {}".format(fulltext_url, my_pub.fulltext_url))
        logger.info(u"https://api.unpaywall.org/v2/{}?email=me".format(doi))
        logger.info(u"doi: https://doi.org/{}".format(doi))
        logger.info(u"license: {}".format(my_pub.license))
        logger.info(u"oa_color: {}".format(my_pub.oa_color))
        logger.info(u"evidence: {}".format(my_pub.evidence))
        if my_pub.error:
            logger.info(my_pub.error)

        assert_equals(my_pub.error, "")
        assert_equals(my_pub.fulltext_url, fulltext_url)
        # assert_equals(my_pub.license, license)
        assert_equals(my_pub.error, "")
def compare_to(ctx, cfr_title, cfr_part, api_base, path):
    """Compare local JSON to a remote server. This is useful for verifying
    changes to the parser.

    API_BASE is the uri of the root of the API. Use what would be the last
    parameter in the `write_to` command.

    PATH parameters will filter the files we're trying to compare. For
    example, if we only want to see the difference between trees, one of the
    PATH parameters should be "regulation".
    """
    if not api_base.endswith("/"):
        api_base += "/"

    tmppath = tempfile.mkdtemp()
    ctx.invoke(write_to, cfr_title=cfr_title, cfr_part=cfr_part,
               output=tmppath)

    # @todo: ugly to uninstall the cache after installing it in eregs.py.
    # Remove the globalness
    requests_cache.uninstall_cache()

    for file_name in files_to_compare(tmppath, path or ['']):
        local_name = os.path.join(tmppath, file_name)
        remote_name = api_base + file_name.replace(os.path.sep, "/")
        compare(local_name, remote_name)
    shutil.rmtree(tmppath)
    def request(self, method, url, params=None, headers=None, to_json=True, data=None, **kwargs):
        """ Make request to TC API. """

        url, params, headers, data = self.prepare(url, params, headers, data)

        if self.options['cache']:
            rc.install_cache(self.options['cache'])

        elif type(self).cache_installed:
            rc.uninstall_cache()

        type(self).cache_installed = bool(self.options['cache'])

        try:
            response = rs.api.request(
                method, url, params=params, headers=headers, data=data, **kwargs)
            logger.debug(response.content)
            response.raise_for_status()
            if to_json:
                response = response.json()

        except (ValueError, rs.HTTPError):
            if locals().get('response') is not None:
                message = "%s: %s" % (response.status_code, response.content)
                raise TCException(message)
            raise

        return response
    def test_chorus_dois(self, test_data):

        doi = test_data

        # because cookies breaks the cache pickling
        # for doi_start in ["10.1109", "10.1161", "10.1093", "10.1007", "10.1039"]:
        #     if doi.startswith(doi_start):
        requests_cache.uninstall_cache()

        my_pub = pub.lookup_product_by_doi(doi)
        if not my_pub:
            logger.info(u"doi {} not in db, skipping".format(doi))
            return
        my_pub.refresh()

        logger.info(u"https://api.unpaywall.org/v2/{}?email=me".format(doi))
        logger.info(u"doi: https://doi.org/{}".format(doi))
        logger.info(u"license: {}".format(my_pub.best_license))
        logger.info(u"evidence: {}".format(my_pub.best_evidence))
        logger.info(u"host: {}".format(my_pub.best_host))
        if my_pub.error:
            logger.info(my_pub.error)

        assert_equals(my_pub.error, "")
        assert_is_not_none(my_pub.fulltext_url)
Beispiel #6
0
def after_scenario(context, scenario):
    requests_cache.uninstall_cache()
    _clean_up_marathon_apps(context)
    _clean_up_chronos_jobs(context)
    _clean_up_mesos_cli_config(context)
    _clean_up_soa_dir(context)
    _clean_up_etc_paasta(context)
    _clean_up_zookeeper_autoscaling(context)
def full_tests():
    import nose

    mymods = ["", "tests"]  # nose essentially ignores the first arg to argv.
    mymods.extend(get_stevedore_module_names("eregs_ns.parser.test_suite"))

    requests_cache.uninstall_cache()

    nose.run(argv=mymods)
 def test_install_uninstall(self):
     for _ in range(2):
         requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND)
         self.assertTrue(isinstance(requests.Session(), CachedSession))
         self.assertTrue(isinstance(requests.sessions.Session(), CachedSession))
         self.assertTrue(isinstance(requests.session(), CachedSession))
         requests_cache.uninstall_cache()
         self.assertFalse(isinstance(requests.Session(), CachedSession))
         self.assertFalse(isinstance(requests.sessions.Session(), CachedSession))
         self.assertFalse(isinstance(requests.session(), CachedSession))
Beispiel #9
0
 def ctx(self, **options):
     """ Redefine context. """
     _opts = dict(self.options)
     try:
         self.options.update(options)
         yield self
     finally:
         self.options = _opts
         if not self.options['cache'] and type(self).cache_installed:
             rc.uninstall_cache()
def full_tests():
    import pytest

    mymods = ["tests"]
    mymods.extend(get_stevedore_module_names("eregs_ns.parser.test_suite"))

    requests_cache.uninstall_cache()

    errno = pytest.main(["--pyargs"] + mymods)
    sys.exit(errno)
Beispiel #11
0
def main():
    """Attempt to set up a list of marathon service instances given.
    Exits 1 if any service.instance deployment failed.
    This is done in the following order:

    - Load the marathon configuration
    - Connect to marathon
    - Do the following for each service.instance:
        - Load the service instance's configuration
        - Create the complete marathon job configuration
        - Deploy/bounce the service
        - Emit an event about the deployment to sensu"""

    args = parse_args()
    soa_dir = args.soa_dir
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.WARNING)

    # Setting up transparent cache for http API calls
    requests_cache.install_cache("setup_marathon_jobs", backend="memory")

    marathon_config = get_main_marathon_config()
    client = marathon_tools.get_marathon_client(marathon_config.get_url(), marathon_config.get_username(),
                                                marathon_config.get_password())
    marathon_apps = marathon_tools.get_all_marathon_apps(client, embed_failures=True)

    num_failed_deployments = 0
    for service_instance in args.service_instance_list:
        try:
            service, instance, _, __ = decompose_job_id(service_instance)
        except InvalidJobNameError:
            log.error("Invalid service instance specified. Format is service%sinstance." % SPACER)
            num_failed_deployments = num_failed_deployments + 1
        else:
            if deploy_marathon_service(service, instance, client, soa_dir, marathon_config, marathon_apps):
                num_failed_deployments = num_failed_deployments + 1

    requests_cache.uninstall_cache()

    log.debug("%d out of %d service.instances failed to deploy." %
              (num_failed_deployments, len(args.service_instance_list)))

    sys.exit(1 if num_failed_deployments else 0)
def compare_to(api_base, paths, prompt):
    """Compare local JSON to a remote server. This is useful for verifying
    changes to the parser.

    API_BASE is the uri of the root of the API. Use what would be the last
    parameter in the `write_to` command.

    PATH parameters indicate specific files or directories to use when
    comparing. For example, use `/some/path/to/regulation/555` to compare all
    versions of 555. Glob syntax works if your shell supports it"""
    if not api_base.endswith("/"):
        api_base += "/"

    # @todo: ugly to uninstall the cache after installing it in eregs.py.
    # Remove the globalness
    requests_cache.uninstall_cache()

    pairs = local_and_remote_generator(api_base, paths)
    return any([compare(local, remote, prompt) for local, remote in pairs])
Beispiel #13
0
def main():
    # The real request will only be made once; afterward, the cached response is used
    for i in range(5):
        response = requests.get('http://httpbin.org/get')

    # This is more obvious when calling a slow endpoint
    for i in range(5):
        response = requests.get('http://httpbin.org/delay/2')

    # Caching can be disabled if we want to get a fresh page and not cache it
    with requests_cache.disabled():
        print(requests.get('http://httpbin.org/ip').text)

    # Get some debugging info about the cache
    print(requests_cache.get_cache())
    print('Cached URLS:', requests_cache.get_cache().urls)

    # Uninstall to remove caching from all requests functions
    requests_cache.uninstall_cache()
Beispiel #14
0
 def scan_documents(self, cached=False):
     if cached:
         requests_cache.install_cache()
     r = requests.get(URL_BASE + '/files/')
     if r.status_code != 200:
         raise Exception("Directory read failed")
     root = html.fromstring(r.content)
     links = root.xpath("//a")
     info_list = []
     for link_el in links:
         link = link_el.attrib['href']
         if not link.startswith('/files'):
             continue
         policymaker_id = link_el.text.split('_')[-1].strip()
         dir_list = self.scan_dir(link, policymaker_id)
         info_list = info_list + dir_list
     self.doc_list = info_list
     if cached:
         requests_cache.uninstall_cache()
     return info_list
Beispiel #15
0
 def scan_documents(self, cached=False):
     if cached:
         requests_cache.install_cache()
     r = requests.get(URL_BASE + '/files/')
     if r.status_code != 200:
         raise Exception("Directory read failed")
     root = html.fromstring(r.content)
     links = root.xpath("//a")
     info_list = []
     for link_el in links:
         link = link_el.attrib['href']
         if not link.startswith('/files'):
             continue
         policymaker_id = link_el.text.split('_')[-1].strip()
         dir_list = self.scan_dir(link, policymaker_id)
         info_list = info_list + dir_list
     self.doc_list = info_list
     if cached:
         requests_cache.uninstall_cache()
     return info_list
Beispiel #16
0
def generate_csl_items(args, citation_df):
    """
    General CSL (citeproc) items for standard_citations in citation_df.
    Writes references.json to disk and logs warnings for potential problems.
    """
    # Read manual references (overrides) in JSON CSL
    manual_refs = read_manual_references(args.manual_references_path)

    requests_cache.install_cache(args.requests_cache_path, include_get_headers=True)
    cache = requests_cache.get_cache()
    if args.clear_requests_cache:
        logging.info('Clearing requests-cache')
        requests_cache.clear()
    logging.info(f'requests-cache starting with {len(cache.responses)} cached responses')

    csl_items = list()
    failures = list()
    for citation in citation_df.standard_citation.unique():
        if citation in manual_refs:
            csl_items.append(manual_refs[citation])
            continue
        try:
            citeproc = citation_to_citeproc(citation)
            csl_items.append(citeproc)
        except Exception as error:
            logging.exception(f'Citeproc retrieval failure for {citation}')
            failures.append(citation)

    logging.info(f'requests-cache finished with {len(cache.responses)} cached responses')
    requests_cache.uninstall_cache()

    if failures:
        message = 'Citeproc retrieval failed for:\n{}'.format(
            '\n'.join(failures))
        logging.error(message)

    # Write JSON CSL bibliography for Pandoc.
    with args.references_path.open('w') as write_file:
        json.dump(csl_items, write_file, indent=2, ensure_ascii=False)
        write_file.write('\n')
    return csl_items
Beispiel #17
0
def disable_cache():
    """
    If the requests_cache package is available, uninstall the existing
    installed cache. Returns True if disable happened.
    """

    global _CACHE_INSTALLED

    if not _CACHE_INSTALLED:
        return False

    try:
        from requests_cache import uninstall_cache
        uninstall_cache()

    except ImportError:
        return False

    else:
        _CACHE_INSTALLED = False
        return True
Beispiel #18
0
    def request(self, method, url, params=None, headers=None, **kwargs):
        """ Make request to Github API. """

        loglevel = self.options.get('loglevel', 'info')
        logger.setLevel(loglevel.upper())
        rs_logger.setLevel(loglevel.upper())

        if self.options['cache']:
            rc.install_cache(self.options['cache'])

        elif type(self).cache_installed:
            rc.uninstall_cache()

        type(self).cache_installed = bool(self.options['cache'])

        if self.options['mock'] and url in self.options['mock']:
            return self.__load_mock(self.options['mock'][url])

        url = 'https://%s/%s' % (self.options['domain'], url.strip('/'))

        _params = self.params
        if params is not None:
            _params.update(params)

        _headers = self.headers
        if headers is not None:
            _headers.update(headers)

        try:
            response = rs.api.request(
                method, url, params=_params, headers=_headers, **kwargs)
            logger.debug(response.content)
            response.raise_for_status()
            response = response.json()

        except (rs.HTTPError, ValueError):
            message = "%s: %s" % (response.status_code, response.content)
            raise OctocatException(message)

        return response
def test_policy(rulefiles):
    requests_cache.uninstall_cache()
    defense_api = CbDefenseAPI(profile="test")


    default_policies = [policy for policy in defense_api.select(Policy) if policy.name == "default"]
    new_policy = defense_api.create(Policy)
    new_policy.policy = default_policies[0].policy
    new_policy.name = "cbapi-python-test-%d" % time.time()
    new_policy.priorityLevel = "LOW"
    new_policy.description = "Test policy"
    new_policy.version = 2
    new_policy.save()

    for t in rulefiles:
        try:
            test_rule(new_policy, t)
            print("Added rule %s" % t)
        except Exception as e:
            print("Exception adding rule %s: %s" % (t, e))

    new_policy.delete()
Beispiel #20
0
def _import_depfinder():
    import os
    import yaml

    # https://github.com/ericdill/depfinder/pull/64/
    if not hasattr(yaml, "CSafeLoader"):
        yaml.CSafeLoader = yaml.SafeLoader

    if os.getenv("CI"):
        import depfinder
    else:
        if "depfinder" not in sys.modules:
            import requests_cache

            dir = Path(__file__).parent
            requests_cache.install_cache(str(options.cache / "requests_cache"))
            dir.mkdir(parents=True, exist_ok=True)
            import depfinder

            requests_cache.uninstall_cache()
        else:
            import depfinder

    return depfinder
Beispiel #21
0
def cache_enabled(*args, **kwargs):
    requests_cache.install_cache(*args, **kwargs)
    try:
        yield
    finally:
        requests_cache.uninstall_cache()
Beispiel #22
0
 def stop_caching(self):
     ''' Stop caching.'''
     if self._cached and caching_avail:
         requests_cache.uninstall_cache()
         self._cached = False
     return
Beispiel #23
0
def cache_requests():
    testdir = os.path.dirname(os.path.abspath(__file__))
    location = os.path.join(testdir, 'data/requests_cache')
    requests_cache.install_cache(cache_name=location)
    yield
    requests_cache.uninstall_cache()
Beispiel #24
0
 def setUp(self):
     self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE)
     self.s.cache.clear()
     requests_cache.uninstall_cache()
Beispiel #25
0
def caching(
        cache=False,
        name=None,
        backend="sqlite",
        expire_after=86400,
        allowable_codes=(200, ),
        allowable_methods=("GET", ),
):
    """
    pygbif caching management

    :param cache: [bool] if ``True`` all http requests are cached. if ``False`` (default),
        no http requests are cached.
    :param name: [str] the cache name. when backend=sqlite, this is the path for the
        sqlite file, ignored if sqlite not used. if not set, the file is put in your
        temporary directory, and therefore is cleaned up/deleted after closing your
        python session
    :param backend: [str] the backend, one of:

     - ``sqlite`` sqlite database (default)
     - ``memory`` not persistent, stores all data in Python dict in memory
     - ``mongodb`` (experimental) MongoDB database (pymongo < 3.0 required)
     - ``redis`` stores all data on a redis data store (redis required)

    :param expire_after: [str] timedelta or number of seconds after cache will be expired
        or None (default) to ignore expiration. default: 86400 seconds (24 hrs)
    :param allowable_codes: [tuple] limit caching only for response with this codes
        (default: 200)
    :param allowable_methods: [tuple] cache only requests of this methods
        (default: ‘GET’)
    
    :return: sets options to be used by pygbif, returns the options you selected
        in a hash

    Note: setting cache=False will turn off caching, but the backend data still
    persists. thus, you can turn caching back on without losing your cache.
    this also means if you want to delete your cache you have to do it yourself.

    Note: on loading pygbif, we clean up expired responses

    Usage::

        import pygbif
        
        # caching is off by default
        from pygbif import occurrences
        %time z=occurrences.search(taxonKey = 3329049)
        %time w=occurrences.search(taxonKey = 3329049)

        # turn caching on
        pygbif.caching(True)
    
        %time z=occurrences.search(taxonKey = 3329049)
        %time w=occurrences.search(taxonKey = 3329049)

        # set a different backend
        pygbif.caching(cache=True, backend="redis")
        %time z=occurrences.search(taxonKey = 3329049)
        %time w=occurrences.search(taxonKey = 3329049)

        # set a different backend
        pygbif.caching(cache=True, backend="mongodb")
        %time z=occurrences.search(taxonKey = 3329049)
        %time w=occurrences.search(taxonKey = 3329049)
        
        # set path to a sqlite file
        pygbif.caching(name = "some/path/my_file")
    """
    default_name = "pygbif_requests_cache"
    if not cache:
        requests_cache.uninstall_cache()
        CACHE_NAME = None
    else:
        if name is None and backend == "sqlite":
            CACHE_NAME = os.path.join(tempfile.gettempdir(), default_name)
        else:
            CACHE_NAME = default_name

        requests_cache.install_cache(cache_name=CACHE_NAME,
                                     backend=backend,
                                     expire_after=expire_after)
        remove_expired_responses()

    cache_settings = {
        "cache": cache,
        "name": CACHE_NAME,
        "backend": backend,
        "expire_after": expire_after,
        "allowable_codes": allowable_codes,
        "allowable_methods": allowable_methods,
    }
    return cache_settings
Beispiel #26
0
    #Specify parameters for api request
    params = {
        'key': 'shkEPTAKIwt6G92CdZAUEuw8U9pWbwfZ',
    }

    #Use computer memory for api caching
    requests_cache.install_cache(backend='memory')

    #Read input excel file and create a list of urls
    read_df = pd.read_excel(sys.argv[1], engine='openpyxl')

    urlList = [(
        f'https://api.tomtom.com/search/2/reverseGeocode/{read_df.loc[x,["Latitude"]].values[0]},'
        f'{read_df.loc[x,["Longitude"]].values[0]}.JSON')
               for x in range(len(read_df.index))]

    #Use multithreading to excute the requests in parellel.
    with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor:
        results = executor.map(get_address, urlList)

    data = [f'{val["streetNumber"]} {val["streetName"]}, {val["municipality"]} {val["postalCode"]}'\
            for val in results]

    #Print addresses
    for address in data:
        sys.stdout.write(address + '\n')

    #Remove cache
    requests_cache.uninstall_cache()
Beispiel #27
0
 def use_cache(self, use):
     if use:
         requests_cache.install_cache('kosapy_cache', expire_after=24*60*60)
     else:
         requests_cache.uninstall_cache()
Beispiel #28
0
def generate_csl_items(
    citekeys: list,
    manual_refs: dict = {},
    requests_cache_path: Optional[str] = None,
    clear_requests_cache: Optional[bool] = False,
) -> list:
    """
    General CSL (citeproc) items for standard_citekeys in citekeys_df.

    Parameters:

    - citekeys: list of standard_citekeys
    - manual_refs: mapping from standard_citekey to csl_item for manual references
    - requests_cache_path: path for the requests cache database.
      Passed as cache_name to `requests_cache.install_cache`.
      requests_cache may append an extension to this path, so it is not always the exact
      path to the cache. If None, do not use requests_cache.
    - clear_requests_cache: If True, clear the requests cache before generating citekey metadata.
    """
    # Deduplicate citations
    citekeys = list(dict.fromkeys(citekeys))

    # Install cache
    if requests_cache_path is not None:
        requests  # require `import requests` in case this is essential for monkey patching by requests_cache.
        requests_cache.install_cache(requests_cache_path,
                                     include_get_headers=True)
        cache = requests_cache.get_cache()
        if clear_requests_cache:
            logging.info("Clearing requests-cache")
            requests_cache.clear()
        logging.info(
            f"requests-cache starting with {len(cache.responses)} cached responses"
        )

    csl_items = list()
    failures = list()
    for standard_citekey in citekeys:
        if standard_citekey in manual_refs:
            csl_items.append(manual_refs[standard_citekey])
            continue
        elif standard_citekey.startswith("raw:"):
            logging.error(
                f"CSL JSON Data with a standard_citekey of {standard_citekey!r} not found in manual-references.json. "
                "Metadata must be provided for raw citekeys.")
            failures.append(standard_citekey)
        try:
            csl_item = citekey_to_csl_item(standard_citekey)
            csl_items.append(csl_item)
        except Exception:
            logging.exception(
                f"Citeproc retrieval failure for {standard_citekey!r}")
            failures.append(standard_citekey)

    # Uninstall cache
    if requests_cache_path is not None:
        logging.info(
            f"requests-cache finished with {len(cache.responses)} cached responses"
        )
        requests_cache.uninstall_cache()

    if failures:
        message = "CSL JSON Data retrieval failed for the following standardized citation keys:\n{}".format(
            "\n".join(failures))
        logging.error(message)

    return csl_items
Beispiel #29
0
 def setUp(self):
     requests_cache.uninstall_cache()
     self.tls_adapter = CbAPISessionAdapter(force_tls_1_2=True)
     self.session = requests.Session()
     self.session.mount("https://", self.tls_adapter)
Beispiel #30
0
 def use_cache(self, use):
     if use:
         requests_cache.install_cache('kosapy_cache',
                                      expire_after=24 * 60 * 60)
     else:
         requests_cache.uninstall_cache()
Beispiel #31
0
 def _stop_caching(self):
     '''Stop caching.'''
     if self._cached and caching_avail:
         requests_cache.uninstall_cache()
         self._cached = False
     return
 def setUp(self):
     requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND)
     requests.Session().cache.clear()
     requests_cache.uninstall_cache()
Beispiel #33
0
 def setUp(self):
     self.s = CachedSession(CACHE_NAME,
                            backend=CACHE_BACKEND,
                            fast_save=FAST_SAVE)
     self.s.cache.clear()
     requests_cache.uninstall_cache()
Beispiel #34
0
 def setUp(self):
     requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND)
     requests.Session().cache.clear()
     requests_cache.uninstall_cache()
Beispiel #35
0
 def tearDown(self):
     requests_cache.uninstall_cache()
Beispiel #36
0
 def __exit__(self, type, value, traceback):
     requests_cache.uninstall_cache()
Beispiel #37
0
def test_remove_expired_responses(remove_expired_responses):
    requests_cache.install_cache(expire_after=360)
    requests_cache.remove_expired_responses()
    assert remove_expired_responses.called is True
    requests_cache.uninstall_cache()
Beispiel #38
0
 def close(self):
     """uninstall cache"""
     logging.info(
         f"requests-cache finished with {len(self.cache.responses)} cached responses"
     )
     requests_cache.uninstall_cache()
Beispiel #39
0
 def disable_cache(self) -> None:
     requests_cache.disable_cache()
     requests_cache.uninstall_cache()
     self.cache_enabled = False