def compare_to(ctx, cfr_title, cfr_part, api_base, path): """Compare local JSON to a remote server. This is useful for verifying changes to the parser. API_BASE is the uri of the root of the API. Use what would be the last parameter in the `write_to` command. PATH parameters will filter the files we're trying to compare. For example, if we only want to see the difference between trees, one of the PATH parameters should be "regulation". """ if not api_base.endswith("/"): api_base += "/" tmppath = tempfile.mkdtemp() ctx.invoke(write_to, cfr_title=cfr_title, cfr_part=cfr_part, output=tmppath) # @todo: ugly to uninstall the cache after installing it in eregs.py. # Remove the globalness requests_cache.uninstall_cache() for file_name in files_to_compare(tmppath, path or ['']): local_name = os.path.join(tmppath, file_name) remote_name = api_base + file_name.replace(os.path.sep, "/") compare(local_name, remote_name) shutil.rmtree(tmppath)
def request(self, method, url, params=None, headers=None, to_json=True, data=None, **kwargs): """ Make request to TC API. """ url, params, headers, data = self.prepare(url, params, headers, data) if self.options['cache']: rc.install_cache(self.options['cache']) elif type(self).cache_installed: rc.uninstall_cache() type(self).cache_installed = bool(self.options['cache']) try: response = rs.api.request( method, url, params=params, headers=headers, data=data, **kwargs) logger.debug(response.content) response.raise_for_status() if to_json: response = response.json() except (ValueError, rs.HTTPError): if locals().get('response') is not None: message = "%s: %s" % (response.status_code, response.content) raise TCException(message) raise return response
def reset_cache(self, cache_duration=None): """Remove any cached singles or albums charts Because the UK Top40 charts only change once per week, :py:class:`Top40` will cache the results of singles and albums. This means that during the execution of a program, repeated calls to retrieve singles and albums chart information will only actually call the remote API once. If, for whatever reason you need to ensure that an attempt to access single or album information actually results in a call to the remote API, then calling the :py:meth:`Top40.reset_cache` method will do this, by clearing down any existing cached chart information. If a cache is in place, then the results will also be cached across python runtime executions. Params: cache_duration (:py:class:`int`): If ``None`` we will uninstall the requests cache and the next read from the API will cause a remote call to be executed. Otherwise it specifies the number of seconds before the persistent cache will expire. """ if cache_duration is None: # We are disabling the existing persistent_cache requests_cache.uninstall_cache() else: # We are setting a persistent cache so insert the duration into our cache config self.cache_config['expire_after'] = cache_duration # and then install the cache with this configuration requests_cache.install_cache(**self.cache_config) # Remember the new duration self.cache_duration = cache_duration # Rest the in-memory caches to force a read from remote site self._albums_chart = None self._singles_chart = None
def test_hybrid_dois(self, test_data): (doi, fulltext_url, license, color) = test_data # because cookies breaks the cache pickling # for doi_start in ["10.1109", "10.1161", "10.1093", "10.1007", "10.1039"]: # if doi.startswith(doi_start): requests_cache.uninstall_cache() my_pub = pub.lookup_product_by_doi(doi) my_pub.refresh() logger.info(u"\n\nwas looking for {}, got {}".format(fulltext_url, my_pub.fulltext_url)) logger.info(u"https://api.unpaywall.org/v2/{}?email=me".format(doi)) logger.info(u"doi: https://doi.org/{}".format(doi)) logger.info(u"license: {}".format(my_pub.license)) logger.info(u"oa_color: {}".format(my_pub.oa_color)) logger.info(u"evidence: {}".format(my_pub.evidence)) if my_pub.error: logger.info(my_pub.error) assert_equals(my_pub.error, "") assert_equals(my_pub.fulltext_url, fulltext_url) # assert_equals(my_pub.license, license) assert_equals(my_pub.error, "")
def test_chorus_dois(self, test_data): doi = test_data # because cookies breaks the cache pickling # for doi_start in ["10.1109", "10.1161", "10.1093", "10.1007", "10.1039"]: # if doi.startswith(doi_start): requests_cache.uninstall_cache() my_pub = pub.lookup_product_by_doi(doi) if not my_pub: logger.info(u"doi {} not in db, skipping".format(doi)) return my_pub.refresh() logger.info(u"https://api.unpaywall.org/v2/{}?email=me".format(doi)) logger.info(u"doi: https://doi.org/{}".format(doi)) logger.info(u"license: {}".format(my_pub.best_license)) logger.info(u"evidence: {}".format(my_pub.best_evidence)) logger.info(u"host: {}".format(my_pub.best_host)) if my_pub.error: logger.info(my_pub.error) assert_equals(my_pub.error, "") assert_is_not_none(my_pub.fulltext_url)
def after_scenario(context, scenario): requests_cache.uninstall_cache() _clean_up_marathon_apps(context) _clean_up_chronos_jobs(context) _clean_up_mesos_cli_config(context) _clean_up_soa_dir(context) _clean_up_etc_paasta(context) _clean_up_zookeeper_autoscaling(context)
def full_tests(): import nose mymods = ["", "tests"] # nose essentially ignores the first arg to argv. mymods.extend(get_stevedore_module_names("eregs_ns.parser.test_suite")) requests_cache.uninstall_cache() nose.run(argv=mymods)
def test_install_uninstall(self): for _ in range(2): requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND) self.assertTrue(isinstance(requests.Session(), CachedSession)) self.assertTrue(isinstance(requests.sessions.Session(), CachedSession)) self.assertTrue(isinstance(requests.session(), CachedSession)) requests_cache.uninstall_cache() self.assertFalse(isinstance(requests.Session(), CachedSession)) self.assertFalse(isinstance(requests.sessions.Session(), CachedSession)) self.assertFalse(isinstance(requests.session(), CachedSession))
def full_tests(): import pytest mymods = ["tests"] mymods.extend(get_stevedore_module_names("eregs_ns.parser.test_suite")) requests_cache.uninstall_cache() errno = pytest.main(["--pyargs"] + mymods) sys.exit(errno)
def ctx(self, **options): """ Redefine context. """ _opts = dict(self.options) try: self.options.update(options) yield self finally: self.options = _opts if not self.options['cache'] and type(self).cache_installed: rc.uninstall_cache()
def main(): """Attempt to set up a list of marathon service instances given. Exits 1 if any service.instance deployment failed. This is done in the following order: - Load the marathon configuration - Connect to marathon - Do the following for each service.instance: - Load the service instance's configuration - Create the complete marathon job configuration - Deploy/bounce the service - Emit an event about the deployment to sensu""" args = parse_args() soa_dir = args.soa_dir if args.verbose: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.WARNING) # Setting up transparent cache for http API calls requests_cache.install_cache("setup_marathon_jobs", backend="memory") marathon_config = get_main_marathon_config() client = marathon_tools.get_marathon_client(marathon_config.get_url(), marathon_config.get_username(), marathon_config.get_password()) marathon_apps = marathon_tools.get_all_marathon_apps(client, embed_failures=True) num_failed_deployments = 0 for service_instance in args.service_instance_list: try: service, instance, _, __ = decompose_job_id(service_instance) except InvalidJobNameError: log.error("Invalid service instance specified. Format is service%sinstance." % SPACER) num_failed_deployments = num_failed_deployments + 1 else: if deploy_marathon_service(service, instance, client, soa_dir, marathon_config, marathon_apps): num_failed_deployments = num_failed_deployments + 1 requests_cache.uninstall_cache() log.debug("%d out of %d service.instances failed to deploy." % (num_failed_deployments, len(args.service_instance_list))) sys.exit(1 if num_failed_deployments else 0)
def compare_to(api_base, paths, prompt): """Compare local JSON to a remote server. This is useful for verifying changes to the parser. API_BASE is the uri of the root of the API. Use what would be the last parameter in the `write_to` command. PATH parameters indicate specific files or directories to use when comparing. For example, use `/some/path/to/regulation/555` to compare all versions of 555. Glob syntax works if your shell supports it""" if not api_base.endswith("/"): api_base += "/" # @todo: ugly to uninstall the cache after installing it in eregs.py. # Remove the globalness requests_cache.uninstall_cache() pairs = local_and_remote_generator(api_base, paths) return any([compare(local, remote, prompt) for local, remote in pairs])
def scan_documents(self, cached=False): if cached: requests_cache.install_cache() r = requests.get(URL_BASE + '/files/') if r.status_code != 200: raise Exception("Directory read failed") root = html.fromstring(r.content) links = root.xpath("//a") info_list = [] for link_el in links: link = link_el.attrib['href'] if not link.startswith('/files'): continue policymaker_id = link_el.text.split('_')[-1].strip() dir_list = self.scan_dir(link, policymaker_id) info_list = info_list + dir_list self.doc_list = info_list if cached: requests_cache.uninstall_cache() return info_list
def disable_cache(): """ If the requests_cache package is available, uninstall the existing installed cache. Returns True if disable happened. """ global _CACHE_INSTALLED if not _CACHE_INSTALLED: return False try: from requests_cache import uninstall_cache uninstall_cache() except ImportError: return False else: _CACHE_INSTALLED = False return True
def request(self, method, url, params=None, headers=None, **kwargs): """ Make request to Github API. """ loglevel = self.options.get('loglevel', 'info') logger.setLevel(loglevel.upper()) rs_logger.setLevel(loglevel.upper()) if self.options['cache']: rc.install_cache(self.options['cache']) elif type(self).cache_installed: rc.uninstall_cache() type(self).cache_installed = bool(self.options['cache']) if self.options['mock'] and url in self.options['mock']: return self.__load_mock(self.options['mock'][url]) url = 'https://%s/%s' % (self.options['domain'], url.strip('/')) _params = self.params if params is not None: _params.update(params) _headers = self.headers if headers is not None: _headers.update(headers) try: response = rs.api.request( method, url, params=_params, headers=_headers, **kwargs) logger.debug(response.content) response.raise_for_status() response = response.json() except (rs.HTTPError, ValueError): message = "%s: %s" % (response.status_code, response.content) raise OctocatException(message) return response
def test_policy(rulefiles): requests_cache.uninstall_cache() defense_api = CbDefenseAPI(profile="test") default_policies = [policy for policy in defense_api.select(Policy) if policy.name == "default"] new_policy = defense_api.create(Policy) new_policy.policy = default_policies[0].policy new_policy.name = "cbapi-python-test-%d" % time.time() new_policy.priorityLevel = "LOW" new_policy.description = "Test policy" new_policy.version = 2 new_policy.save() for t in rulefiles: try: test_rule(new_policy, t) print("Added rule %s" % t) except Exception as e: print("Exception adding rule %s: %s" % (t, e)) new_policy.delete()
def setUp(self): requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND) requests.Session().cache.clear() requests_cache.uninstall_cache()
def stop_caching(self): ''' Stop caching.''' if self._cached and caching_avail: requests_cache.uninstall_cache() self._cached = False return
def use_cache(self, use): if use: requests_cache.install_cache('kosapy_cache', expire_after=24*60*60) else: requests_cache.uninstall_cache()
def cache_enabled(*args, **kwargs): requests_cache.install_cache(*args, **kwargs) try: yield finally: requests_cache.uninstall_cache()
def setUp(self): self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) self.s.cache.clear() requests_cache.uninstall_cache()