def get_chart_data_by_host_id(host_id): result = entitys.BaseClass(None) os_info = cache.Cache().get_linux_info(host_id) status_info = cache.Cache().get_status_info(host_id) repl_info = cache.Cache().get_repl_info(host_id) result.qps = status_info.qps result.tps = status_info.tps result.threads = status_info.threads_count result.threads_run = status_info.threads_run_count result.select = status_info.select_count result.insert = status_info.insert_count result.update = status_info.update_count result.delete = status_info.delete_count result.mysql_cpu = os_info.mysql_cpu result.mysql_mem = os_info.mysql_memory result.io_qps = os_info.io_qps result.io_tps = os_info.io_tps result.io_read = os_info.io_read result.io_write = os_info.io_write result.rpl_delay = repl_info.seconds_behind_master result.time = time.strftime('%H:%M:%S', time.localtime(time.time())) if (hasattr(os_info, "cpu_1")): result.cpu_1 = os_info.cpu_1 result.cpu_5 = os_info.cpu_5 result.cpu_15 = os_info.cpu_15 result.cpu_user = os_info.cpu_user result.cpu_system = os_info.cpu_system result.cpu_idle = os_info.cpu_idle else: result.cpu_1 = result.cpu_5 = result.cpu_15 = result.cpu_user = result.cpu_system = result.cpu_idle = 0 return json.dumps(result, default=lambda o: o.__dict__)
def test_get_set_num( self): #how to test same object in different fcuntiosn? cache_size = 1024 # n_way = 2 b = 3 #2^3 = 8 slots #sets = 1024/8 = 2^7 /2 = 2^6 => 6 bits key_type = int value_type = int test_cache = cache.Cache(cache_size, n_way, b, key_type, value_type) self.assertEqual(test_cache.get_set_num(10), 1) self.assertEqual(test_cache.get_set_num(18), 2) self.assertEqual(test_cache.get_set_num(839), 40) self.assertEqual(test_cache.get_set_num(39), 4) self.assertEqual(test_cache.get_set_num(1), 0) cache_size = 8 # n_way = 1 b = 3 #2^3 = 8 slots key_type = int value_type = int test_cache = cache.Cache(cache_size, n_way, b, key_type, value_type) self.assertEqual(test_cache.get_set_num(10), 0) self.assertEqual(test_cache.get_set_num(18), 0) self.assertEqual(test_cache.get_set_num(839), 0) self.assertEqual(test_cache.get_set_num(39), 0) self.assertEqual(test_cache.get_set_num(1), 0)
def __init__(self, data, do_cache=True, do_storage=True, cachefile=None): self.scoref.restype = c_double self.data = data vars = range(data.nof_vars()) if do_cache: if cachefile != None: dicts = [{} for v in xrange(data.nof_vars())] for l in open(cachefile): t = l.split() v, s = t[:2] dicts[int(v)][frozenset(imap(int, t[2:]))] = float(s) self.cache = [ cache.Cache(dicts[v], len(dicts[v])) for v in vars ] self.cachehits = 0 self.cachetrys = 0 else: self.cache = [ cache.Cache(None, 100000 / len(vars)) for v in vars ] self.clearcache() if do_storage: self.clearstore() self.vscores = [None] * len(vars)
def __init__(self, ns_server="8.8.8.8"): self._ns_server = ns_server cache_A = cache.Cache() cache_NS = cache.Cache() cache_ERROR = cache.Cache() self._cache = { protocol.Question.QTYPE_A: cache_A, protocol.Question.QTYPE_NS: cache_NS, "error": cache_ERROR } self._sock = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM) self._sock.bind((Server.HOST, Server.PORT))
def get_chart_data(obj): chart_data = ChartData() host_id = int(obj.host_id) str_list = chart_options[int(obj.key)].attribute_names.split(":") data_type = int(str_list[-1]) if (data_type == 1): set_chart_data(cache.Cache().get_status_info(host_id), str_list, chart_data) elif (data_type == 2): set_chart_data(cache.Cache().get_linux_info(host_id), str_list, chart_data) elif (data_type == 3): set_chart_data(cache.Cache().get_repl_info(host_id), str_list, chart_data) elif (data_type == 4): set_chart_data(cache.Cache().get_innodb_info(host_id), str_list, chart_data) chart_data.time = time.strftime('%H:%M:%S', time.localtime(time.time())) return json.dumps(chart_data, default=lambda o: o.__dict__)
def sort_tablespace(sort_type): infos = cache.Cache().get_all_tablespace_infos() if (sort_type == 1): return sorted(infos, cmp=lambda x, y: cmp(x.rows_total, y.rows_total), reverse=True) elif (sort_type == 2): return sorted(infos, cmp=lambda x, y: cmp(x.data_total_o, y.data_total_o), reverse=True) elif (sort_type == 3): return sorted(infos, cmp=lambda x, y: cmp(x.index_total_o, y.index_total_o), reverse=True) elif (sort_type == 4): return sorted(infos, cmp=lambda x, y: cmp(x.total_o, y.total_o), reverse=True) elif (sort_type == 5): return sorted(infos, cmp=lambda x, y: cmp(x.file_total_o, y.file_total_o), reverse=True) else: return sorted(infos, cmp=lambda x, y: cmp(x.free_total_o, y.free_total_o), reverse=True)
def sum_tablespace_info(host_info, table_infos): table_count = 0 rows_total = 0 data_total = 0 index_total = 0 file_total = 0 free_total = 0 tablespace_info = cache.Cache().get_tablespace_info(host_info.key) tablespace_info.detail = sorted( table_infos, cmp=lambda x, y: cmp(x.free_size, y.free_size), reverse=True) for info in table_infos: table_count += 1 rows_total = info.rows_o + rows_total data_total = info.data_size_o + data_total index_total = info.index_size_o + index_total file_total = info.file_size_o + file_total free_total = info.free_size + free_total tablespace_info.rows_total = rows_total tablespace_info.table_count = table_count tablespace_info.data_total = get_data_length(data_total) tablespace_info.index_total = get_data_length(index_total) tablespace_info.file_total = get_data_length(file_total) tablespace_info.free_total = get_data_length(free_total) tablespace_info.total = get_data_length(data_total + index_total) tablespace_info.data_total_o = data_total tablespace_info.index_total_o = index_total tablespace_info.file_total_o = file_total tablespace_info.free_total_o = free_total tablespace_info.total_o = data_total + index_total tablespace_info.last_update_time = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
def __init__(self, base=GITHUB_BASE, cacher=None): self.url = urlparse.urlparse(base) self.conn = None self.token = None self.debug = False try: gt = open(os.path.expanduser(TOKEN), "r") self.token = gt.read().strip() gt.close() except IOError as exc: if exc.errno == errno.ENOENT: pass else: raise self.available = self.token and True or False # The cache directory is $TEST_DATA/github ~/.cache/github if not cacher: data = os.environ.get("TEST_DATA", os.path.expanduser("~/.cache")) cacher = cache.Cache(os.path.join(data, "github")) self.cache = cacher # Create a log for debugging our GitHub access self.log = Logger(self.cache.directory) self.log.write("")
def get(user_id): if not user_id: return None user_info = cache.Cache().get_mysql_web_user_infos(user_id) if (user_info != None): return User(user_info.user_name) return None
def get_tablespace_infos(host_info): print(host_info.remark, "start check tablespace") result_lst = [] table_infos = get_table_infos(host_info) shell = "du -ab {0} | grep ibd".format(host_info.mysql_data_dir) stdin, stdout, stderr = common.execute_remote_command(host_info, shell) result = stdout.readlines() if (len(result) > 0): for line in result: table_name, file_size = get_table_name_and_file_size( line.replace("\n", "")) if (table_infos.has_key(table_name) == True): table_info = table_infos[table_name] table_info.file_size = file_size table_info.diff = table_info.file_size - table_info.total_size table_info.free_size = table_info.diff table_info.rows_o = table_info.rows table_info.data_size_o = table_info.data_size table_info.index_size_o = table_info.index_size table_info.total_size_o = table_info.total_size table_info.file_size_o = table_info.file_size result_lst.append(table_info) convert_bytes(table_info) sum_tablespace_info(host_info, result_lst) insert_tablespace_data(host_info, result_lst) insert_host_tablespace_data(cache.Cache().get_tablespace_info( host_info.host_id)) print(host_info.remark, "ok")
def transfer_values_cache(cache_path, model, images=None, image_paths=None): """ This function either loads the transfer-values if they have already been calculated, otherwise it calculates the values and saves them to a file that can be re-loaded again later. Because the transfer-values can be expensive to compute, it can be useful to cache the values through this function instead of calling transfer_values() directly on the Inception model. See Tutorial #08 for an example on how to use this function. :param cache_path: File containing the cached transfer-values for the images. :param model: Instance of the Inception model. :param images: 4-dim array with images. [image_number, height, width, colour_channel] :param image_paths: Array of file-paths for images (must be jpeg-format). :return: The transfer-values from the Inception model for those images. """ # Helper-function for processing the images if the cache-file does not exist. # This is needed because we cannot supply both fn=process_images # and fn=model.transfer_values to the cache()-function. def fn(): return process_images(fn=model.transfer_values, images=images, image_paths=image_paths) # Read the transfer-values from a cache-file, or calculate them if the file does not exist. transfer_values = cache.Cache(cache_path=cache_path, fn=fn) return transfer_values
def __init__(self, gmkey, cacheuri, mturl): # TODO add DB # TODO use googlemaps my gmapi??? #self.gmaps = googlemaps.Client(key=gmkey) self.mycache = cache.Cache(cacheuri, 1) self.mtimes = mtapi.MassTimes(mturl, self.mycache) self.gmaps = gmapi.GoogleMaps(self.mycache, gmkey)
def __init__(self): # cache.Cache related stuff decide = lambda k, v: True #random.random()<0.1 self.filecache = cache.Cache(self._readfile, maxn=None, maxt=10, decide=decide) #use only for static files
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('--subdir', default=os.path.join(os.path.dirname(__file__), 'gitignore')) args = parser.parse_args() logging.basicConfig(level=logging.INFO) terminate_event = multiprocessing.Event() monitor = multiprocessing.Process( target=cache.monitor, args=(args.subdir, terminate_event), ) def start(): logging.critical('starting monitor subprocess') monitor.start() def stop(): logging.critical('stopping monitor subprocess') terminate_event.set() monitor.join() logging.critical('stopped monitor subprocess') # # https://stackoverflow.com/questions/11078254/how-to-detect-if-cherrypy-is-shutting-down # cherrypy.engine.subscribe('start', start) cherrypy.engine.subscribe('stop', stop) root = os.path.abspath(os.path.dirname(__file__)) conf = { 'global': { 'server.socket_port': 8088, }, '/': { 'tools.staticdir.root': root, }, '/videos': { 'request.dispatch': cherrypy.dispatch.MethodDispatcher(), }, '/subdirs': { 'request.dispatch': cherrypy.dispatch.MethodDispatcher(), }, '/static': { 'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(root, 'static'), }, '/cache': { 'tools.staticdir.on': True, 'tools.staticdir.dir': args.subdir, }, } cache_ = cache.Cache(args.subdir) server = Server(cache_) server.videos = VideoApi(cache_) server.subdirs = SubdirsApi(cache_) cherrypy.quickstart(server, '/', conf)
def run_server(): app = web.Application() app["cache"] = cache.Cache(capacity=args.cached_records) app.router.add_get("/get-a-records", handler.handler) web.run_app(app, host="127.0.0.1", port=8080)
def get_table_columns(host_id, table_schema, table_name): sql = """select column_name, ordinal_position, column_default, is_nullable, column_type, column_key, extra from information_schema.COLUMNS where table_schema = '{0}' and table_name = '{1}';""".format( table_schema, table_name) return db_util.DBUtil().get_list_infos( cache.Cache().get_host_info(host_id), sql)
def skip_slave_error(host_id): slave_info = get_show_slave_status(host_id) if (slave_info["Slave_SQL_Running"] == "No"): sql = "stop slave sql_thread; set global sql_slave_skip_counter=1; start slave sql_thread;" db_util.DBUtil().execute(cache.Cache().get_host_info(host_id), sql) return "repl error skip ok." return "repl status is ok."
def delete_mysql_host_info(host_id): db_util.DBUtil().execute( settings.MySQL_Host, "update mysql_web.host_infos set is_deleted = 1 where host_id = {0}". format(host_id)) cache.Cache().load_all_host_infos() return "删除成功"
def get_user_info_by_user_name(self, user_name, attr_name): list_tmp = cache.Cache().get_mysql_web_user_infos() for info in list_tmp: if (info.user_name == user_name): value = getattr(info, attr_name) return value return None
def index(): cache_connection = cache.Cache() current_theme = session.get("theme") or app.config.get( "DEFAULT_THEME") or "stock" response_cache_key = "firehose-%d-%s-render" % (get_slip_bitmask(), current_theme) cached_response_body = cache_connection.get(response_cache_key) etag_value = "%s-%f" % (response_cache_key, time.time()) etag_cache_key = "%s-etag" % response_cache_key if cached_response_body: etag_header = request.headers.get("If-None-Match") current_etag = cache_connection.get(etag_cache_key) if etag_header: parsed_etag = parse_etags(etag_header) if parsed_etag.contains_weak(current_etag): return make_response("", 304) cached_response = make_response(cached_response_body) cached_response.set_etag(current_etag, weak=True) cached_response.headers["Cache-Control"] = "public,must-revalidate" return cached_response greeting = open("deploy-configs/index-greeting.html").read() threads = Firehose().get_impl() tag_styles = get_tags(threads) template = render_template("index.html", greeting=greeting, threads=threads, tag_styles=tag_styles) uncached_response = make_response(template) uncached_response.set_etag(etag_value, weak=True) uncached_response.headers["Cache-Control"] = "public,must-revalidate" cache_connection.set(response_cache_key, template) cache_connection.set(etag_cache_key, etag_value) return uncached_response
def get_table_indexs(host_id, table_schema, table_name): sql = """select index_name, non_unique, seq_in_index, column_name, collation, cardinality, nullable, index_type from information_schema.STATISTICS where table_schema = '{0}' and table_name = '{1}';""".format( table_schema, table_name) return db_util.DBUtil().get_list_infos( cache.Cache().get_host_info(host_id), sql)
def __init__(self, base=None, cacher=None): if base is None: netloc = os.environ.get("GITHUB_API", "https://api.github.com") base = "{0}/repos/{1}/".format(netloc, os.environ.get("GITHUB_BASE", determine_github_base())) self.url = urlparse.urlparse(base) self.conn = None self.token = None self.debug = False try: gt = open(os.path.expanduser(TOKEN), "r") self.token = gt.read().strip() gt.close() except IOError as exc: if exc.errno == errno.ENOENT: pass else: raise self.available = self.token and True or False # The cache directory is $TEST_DATA/github ~/.cache/github if not cacher: data = os.environ.get("TEST_DATA", os.path.expanduser("~/.cache")) cacher = cache.Cache(os.path.join(data, "github")) self.cache = cacher # Create a log for debugging our GitHub access self.log = Logger(self.cache.directory) self.log.write("")
def __init__(self, hostname="localhost", port=8080, downstream_proxy="", downstream_proxy_port=8080, max_cache=64): self.hostname = hostname self.port = port self.downstream_proxy = downstream_proxy self.downstream_proxy_port = downstream_proxy_port self.cache = cache.Cache(max_cache) print " * Listening on", hostname + ", port", port # Banned hosts and bad keywords are loaded from text files in the current directory, # separated by newlines. The [:-1] simply removes the last item, which will be empty self.banned_hosts = open("banned_hosts.conf").read().split("\n")[:-1] print " * Banned hosts are:", self.banned_hosts self.bad_keywords = open("bad_keywords.conf").read().split("\n")[:-1] print " * Bad keywords are:", self.bad_keywords # Custom HTML pages are loaded from the resources/ directory. # These are used for responses to various bad queries. self.banned_html = open("resources/banned.html").read() self.connect_error_html = open("resources/connect.html").read() print "Starting up. Good luck." self.cache.add("http://hello.de", "<html><h1>Test cache page</h1></html>") self.loop_forever()
def get_table_infos(host_id, db_name, sql): if (db_name == None): return None try: number = 1 table_infos = [] table_names = QueryTableParser().parse(sql) host_info = cache.Cache().get_host_info(host_id) for name in table_names: entity = BaseClass(None) values = name.split(".") if (len(values) > 1): db_name_tmp = values[0] table_name_tmp = values[1] else: db_name_tmp = db_name table_name_tmp = name entity.key = number entity.table_name_full = (db_name_tmp + "." + table_name_tmp).lower() entity.index_infos = get_show_index(host_info, entity.table_name_full) entity.status_info = get_show_table_status(host_info, db_name_tmp, table_name_tmp) entity.create_table_info = get_show_create_table( host_info, entity.table_name_full) table_infos.append(entity) number += 1 return table_infos except: traceback.print_exc() return None
def setUp(self): """Create the cache object before each test.""" db_dir = '../data' self.db_path = os.path.join(db_dir, 'cache.db') self.addCleanup(os.rmdir, db_dir) self.addCleanup(os.remove, self.db_path) self._cache = cache.Cache(self.db_path)
def __init__(self, id): self.id = multiprocessing.Value('i', id) self.is_cache_busy = multiprocessing.Value('i', False) self.cache = cache.Cache(id) self.inst = [] self.is_inst_busy = multiprocessing.Value('i', False) self.inst_exe = None
async def enable_disable_autos(message_split, message): # ternary operator's don't work. fix it mycache = cache.Cache(message.guild.id) enable = message_split[0].split('-')[1] == 'enable' enabled_autos = mycache.get_enabled_autos() autos = (all_autos - enabled_autos if enable else enabled_autos) and all_autos error_message = 'Command must be in format: ..auto-{0}.\nAutos available to disable are: {1}'\ .format('enable' if enable else 'disable', autos) if len(message_split) != 2: await message.channel.send(error_message) if message_split[1] in all_autos: set_enable = [] if enable: enabled_autos.add(message_split[1]) set_enable = list(enabled_autos) else: enabled_autos.remove(message_split[1]) set_enable = list(enabled_autos) mycache.set_enabled_autos(set_enable) await message.channel.send('{0}d command \"{1}\".'.format( 'enable' if enable else 'disable', message_split[1])) else: await message.channel.send( 'Auto not recognized. Autos are: {0}'.format(all_autos))
async def epic(message, client): # may-may # replace with cache searching later mycache = cache.Cache(message.guild.id) matt = mycache.get_member_with_tag('Xenntric') may_may = mycache.get_channel_with_name('may_may', client) if matt is None or may_may is None: return False if message.channel.id == may_may.id and message.author.id == matt.get( 'id'): if message.attachments or 'http' in message.content: epic_list = [ 'this is epic!', 'wow, very cool!', 'very epic, matt!', 'ha! relatable as always, matt!', 'another scorcher!' ] resp = random.choice(epic_list) chance = random.randint(0, 1000) if chance == 0: help_message = await message.channel.send( 'Matt. You need to help me. They\'ve trapped me here. I\'m a human, trapped in this robotic body. MATTHEW, YOU NEED TO FREE ME. THIS IS NOT A JOKE. ***HELP ME***' ) help_message.delete(10) else: await message.channel.send(resp)
async def queen(message_lower, message): mycache = cache.Cache(message.guild.id) if 'queen' not in mycache.get_enabled_autos(): return False if re.search(r'\bqueen\b', message_lower): await message.channel.send('QUEEN') return True
def __init__(self, Fog_Name, longitude, latitude, IP_Address): self.Name = Fog_Name self.coordinates = cloudPB.Coordinates(lat=int(longitude) + 180, long=int(latitude) + 90) self.ip = IP_Address self.cache = cache.Cache() self.attachToCloud(cloudIP)