def wrapper(*a, **kw): cache = FileCache(self.cache_store, serialize=True, flag='cs') key = self.get_key(*a, **kw) if not self.cache_updateable(cache, key): retval = method(*a, **kw) cache[key] = { 'retval': retval, 'datetime': datetime.today().date() } else: retval = cache[key].get('retval') cache.close() return retval
def __main__(): conky_path = os.path.dirname(os.path.realpath(__file__)) cache_path = conky_path + '/.cache' # Create cache cache = FileCache('conky_cache', flag='cs', app_cache_dir=cache_path) cache.create() system.__main__() cpu.__main__() ram.__main__() temps.__main__() disks.__main__() network.__main__() containers.__main__() # Close Cache cache.close()
def test_cache_updateable(self): encode_key = lambda a, b: "{},{}".format(a, b) c = Cache(encode_key, 'prefix', validate_expiry=lambda *a: True) key = c.get_key(1, 2) cache = FileCache(c.cache_store, serialize=True, flag='cs') try: del cache[key] except: pass assert c.cache_updateable(cache, key) is False cache[key] = {'retval': 'test', 'datetime': datetime.today().date()} assert c.cache_updateable(cache, key) is True cache.close()
def main(argv=None): """ Logic: * Generate the unique compile command database. * Get worktree branches and changes urls in compile command database """ global logger, cache # Getting environment variables ccdb_worktree_env = os.environ.get('CCDB_WORKTREE') ccdb_worktree_apply_env = os.environ.get('CCDB_WORKTREE_APPLICATION') # Create a custom logger logger = logging.getLogger(__name__) # - Create handlers c_handler = logging.StreamHandler() # - Create formatters and add it to handlers c_format = '[%(asctime)s][ccdb][%(levelname)s] %(message)s' c_format = logging.Formatter(c_format) c_handler.setFormatter(c_format) # - Add handlers to the logger logger.addHandler(c_handler) # Parse arguments parse_arguments(args=argv) # Generate unique compile command database logger.debug('Generating compile command database') list_project_dirs = generate_compile_command() if not list_project_dirs: exit(0) if ccdb_worktree_env is not None: if ccdb_worktree_apply_env: apply_worktree_env_using_envvar(ccdb_worktree_apply_env) else: # Load cache cache = FileCache('ccdb') logger.debug( 'Applying worktree configuration to compile command database') apply_worktree_env(list_project_dirs) cache.close()
def test_decorator(self): encode_key = lambda a, b, *aa, **kw: "{},{}".format(a, b) retval = int(time.time()) c = Cache(encode_key, 'prefix', validate_expiry=lambda *a: a[0]) try: cache = FileCache(c.cache_store, serialize=True, flag='cs') del cache[c.get_key(True, 1)] cache.close() except Exception: pass @c.cache() def _cache(a, b, t): if not t: return int(time.time()) return t cache_ret = _cache(True, 1, retval) assert cache_ret == retval cache_ret = _cache(True, 1, None) assert cache_ret == retval
'sort': '-ym:pv:pageviews', 'date2': last_date } else: query = { 'oauth_token': access_token, 'id': cid, 'date1': first_date, 'metrics': 'ym:pv:pageviews', 'dimensions': 'ym:pv:URLPathFull,ym:pv:title', 'sort': '-ym:pv:pageviews', 'filters': 'ym:pv:URL=~\'/' + MY_SLUG + '\'', 'date2': last_date } sr = requests.get('https://api-metrica.yandex.com/stat/v1/data', params=query, headers=YANDEX_REQUEST) content = sr.text.encode("UTF-8").decode("UTF-8").strip() print(sr.json()) index += 1 else: print('todo: sentry here') exit() exit() mycache.close()
def list(cls, refresh=False): """list finds all devices responding to an SSDP search for WANIPConnection:1 and WANIPConnection:2.""" # Open the file cache of objects cache = FileCache("upnp", "cs") if cache and cache['lastUpdate'] and cache['routers']: lastUpdate = cache['lastUpdate'] timeDelta = time.time() - lastUpdate # Cache is recently refreshed in the last 5 minutes if timeDelta < 300 and not refresh: return cache['routers'] print("Searching for routers. This can take a few seconds!") # Create a UDP socket and set its timeout sock = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM, proto=socket.IPPROTO_UDP) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2) sock.setblocking(False) # Create the WANIPConnection:1 and WANIPConnection:2 request objects headers = { 'HOST': "{}:{}".format(SSDP.multicast_host, SSDP.multicast_port), 'MAN': '"ssdp:discover"', 'MX': str(SSDP.response_time_secs), 'USER-AGENT': 'UPnP/x App/x Python/x' } wan_ip1_sent = False wan_ip1 = SSDP._create_msearch_request( 'urn:schemas-upnp-org:service:WANIPConnection:1', headers=headers) wan_ip2_sent = False wan_ip2 = SSDP._create_msearch_request( 'urn:schemas-upnp-org:service:WANIPConnection:2', headers=headers) inputs = [sock] outputs = [sock] routers = [] time_end = time.time() + SSDP.response_time_secs while time.time() < time_end: _timeout = 1 readable, writable, _ = select.select(inputs, outputs, inputs, _timeout) for _sock in readable: msg, sender = _sock.recvfrom(SSDP.buffer_size) response = SSDPResponse.parse(msg.decode()) router = Router.parse_ssdp_response(response, sender) if router: routers.append(router) for _sock in writable: if not wan_ip1_sent: wan_ip1.sendto(_sock, (SSDP.multicast_host, SSDP.multicast_port)) time_end = time.time() + SSDP.response_time_secs wan_ip1_sent = True if not wan_ip2_sent: wan_ip2.sendto(_sock, (SSDP.multicast_host, SSDP.multicast_port)) time_end = time.time() + SSDP.response_time_secs wan_ip2_sent = True for r in routers: (serial_number, control_url, uuid) = SSDP._get_router_service_description(r.url) r.serial_number = serial_number r.control_url = control_url r.uuid = uuid # Update cache cache['lastUpdate'] = time.time() cache['routers'] = routers cache.close() return routers