Example #1
0
def memcached(keys=False, stats=False, clear=False):
    """List memcached stored keys and server stats"""
    if 'CACHE_MEMCACHED_SERVERS' in app.config:
        servers = app.config['CACHE_MEMCACHED_SERVERS']
        pp = pprint.PrettyPrinter(indent=4)

        for server in servers:
            host, port = server.split(':')
            mem = MemcachedStats(host, port)

            print '%s' % '=' * 80
            print 'SERVER: %s:%s' % (host, port)

            if keys:
                print 'KEYS:'
                pp.pprint(mem.keys())

            if stats:
                print 'STATS:'
                pp.pprint(mem.stats())

        # clear keys
        if clear:
            cache = memcache.Client(servers, debug=0)
            if cache.flush_all():
                print 'Memcached data flushed'
            else:
                print 'Could not flush memcached'
    else:
        print 'There is no memcached servers in the config files'
def persist_hits():
    if is_cached_hitcount_enabled() and using_memcache():

        backend, location, params = parse_backend_conf(CACHED_HITCOUNT_CACHE)
        host, port = location.split(':')
        hitcount_cache = get_hitcount_cache()
        lock = hitcount_cache.get(CACHED_HITCOUNT_LOCK_KEY)
        #print  'persist_hits - check %s lock = %s' % (CACHED_HITCOUNT_LOCK_KEY, lock)
        if lock is None or lock != 1:
            try:
                #acquire a lock so no updates will occur while we are persisting the hits to DB
                hitcount_cache.set(CACHED_HITCOUNT_LOCK_KEY, 1,
                                   CACHED_HITCOUNT_CACHE_TIMEOUT)
                #print  'acquire %s lock = %s ' % (CACHED_HITCOUNT_LOCK_KEY, hitcount_cache.get(CACHED_HITCOUNT_LOCK_KEY))
                mem = MemcachedStats(host, port)
                keys = mem.keys()

                content_types = {
                }  #used for keeping track of the content types so DB doesn't have to be queried each time
                for cache_key in keys:
                    if "hitcount__" in cache_key and not CACHED_HITCOUNT_IP_CACHE in cache_key:
                        cache_key = cache_key.split(
                            ':'
                        )[-1]  #the key is a combination of key_prefix, version and key all separated by : - all we need is the key
                        count = hitcount_cache.get(cache_key)
                        if count:  #only update the hit count if the is not None
                            hitcount, ctype_pk, object_pk = cache_key.split(
                                '__')
                            if ctype_pk in content_types.keys():
                                content_type = content_types[ctype_pk]
                            else:
                                content_type = ContentType.objects.get(
                                    id=ctype_pk)
                                content_types[ctype_pk] = content_type

                            with transaction_atomic():
                                #save a new hit or increment this hits on an existing hit
                                hit, created = Hit.objects.select_for_update(
                                ).get_or_create(added=datetime.utcnow().date(),
                                                object_pk=object_pk,
                                                content_type=content_type)
                                if hit and created:
                                    hit.hits = long(count)
                                    hit.save()
                                elif hit:
                                    hit.hits = hit.hits + long(count)
                                    hit.save()

                        #reset the hitcount for this object to 0 - even if it was previously None
                        hitcount_cache.set(cache_key, 0,
                                           CACHED_HITCOUNT_CACHE_TIMEOUT)
                        #print  'reset key %s to zero = %s ' % (cache_key, hitcount_cache.get(cache_key))
            except Exception, ex:
                logger.error('Unable to persist hits')
                logger.error(ex)

                raise ex
            finally:
Example #3
0
    def get_relevant_keys(self):

        cache_stats = MemcachedStats("127.0.0.1", 8001)
        all_keys = cache_stats.keys()

        relevant_keys = [
            key.replace(":1:", "") for key in all_keys
            if key.split("|")[0].replace(":1:", "") == str(self.pk)
        ]

        return relevant_keys
Example #4
0
def cache_stats():
    config_name = request.args.get('name')
    cache_config = config.get(config_name)
    if cache_config is None:
        return Response({}, 404, content_type='application/json')

    if cache_config['type'] == 'redis':
        r = redis.StrictRedis(host=cache_config['host'], port=cache_config['port'], db=0)
        return Response(json_to_html(r.info()), 200, content_type='application/json')
    elif cache_config['type'] == 'memcached':
        cache = MemcachedStats(cache_config['host'], cache_config['port'])
        return Response(json_to_html(cache.stats()), 200, content_type='application/json')
Example #5
0
    def reset_cache_layer(self, layer_key_name):

        cache = caches[self.cache_name]
        location = self._get_location()
        location = location[0] if isinstance(location, list) else location
        location = location.split(':')
        mem = MemcachedStats(location[0], location[1])
        keys = mem.keys()

        for key in keys:
            if key.startswith('{}/{}/{}'.format(self.memcache_key_prefix, '0',
                                                layer_key_name)):
                cache.delete(key)
Example #6
0
def get_data(host = '127.0.0.1',port = '11211'):
	from memcached_stats import MemcachedStats
	m = MemcachedStats(host, port)
	import pylibmc
	import time
	shared = pylibmc.Client([host], binary=True)
	shared.behaviors = {"tcp_nodelay": True, "ketama": True}
        str_data = '<head>\n<meta http-equiv="refresh" content="20">\n</head>\n'
	str_data = str_data + '<b>Data of internal memchache server:</b></br>\n'
	str_data = str_data + str(time.strftime("%x %X %Z", time.localtime())) + '</br>\n'
	dict_data = shared.get_multi( m.keys())
	for k,v in sorted(dict_data.items()):
		str_data = str_data + str(k) + " : " + str(v) + " </br>\n"
	str_data = str_data + "-"*30 + " </br>\n"
	return str_data;
Example #7
0
def persist_hits():
    if is_cached_hitcount_enabled() and using_memcache():

        backend, location, params = parse_backend_conf(CACHED_HITCOUNT_CACHE)
        host, port = location.split(':')
        hitcount_cache = get_hitcount_cache()
        lock = hitcount_cache.get(CACHED_HITCOUNT_LOCK_KEY)
        #print  'persist_hits - check %s lock = %s' % (CACHED_HITCOUNT_LOCK_KEY, lock)
        if lock is None or lock != 1:
            try:
                #acquire a lock so no updates will occur while we are persisting the hits to DB
                hitcount_cache.set(CACHED_HITCOUNT_LOCK_KEY, 1, CACHED_HITCOUNT_CACHE_TIMEOUT)
                #print  'acquire %s lock = %s ' % (CACHED_HITCOUNT_LOCK_KEY, hitcount_cache.get(CACHED_HITCOUNT_LOCK_KEY))
                mem = MemcachedStats(host, port)
                keys = mem.keys()

                content_types = {}#used for keeping track of the content types so DB doesn't have to be queried each time
                for cache_key in keys:
                    if "hitcount__" in cache_key and not CACHED_HITCOUNT_IP_CACHE in cache_key:
                        cache_key = cache_key.split(':')[-1]#the key is a combination of key_prefix, version and key all separated by : - all we need is the key
                        count = hitcount_cache.get(cache_key)
                        if count:#only update the hit count if the is not None
                            hitcount, ctype_pk, object_pk  = cache_key.split('__')
                            if ctype_pk in content_types.keys():
                                content_type = content_types[ctype_pk]
                            else:
                                content_type = ContentType.objects.get(id=ctype_pk)
                                content_types[ctype_pk] = content_type

                            with transaction_atomic():
                                #save a new hit or increment this hits on an existing hit
                                hit, created = Hit.objects.select_for_update().get_or_create(added=datetime.utcnow().date(), object_pk=object_pk, content_type=content_type)
                                if hit and created:
                                    hit.hits = long(count)
                                    hit.save()
                                elif hit:
                                    hit.hits = hit.hits + long(count)
                                    hit.save()

                        #reset the hitcount for this object to 0 - even if it was previously None
                        hitcount_cache.set(cache_key, 0, CACHED_HITCOUNT_CACHE_TIMEOUT)
                        #print  'reset key %s to zero = %s ' % (cache_key, hitcount_cache.get(cache_key))
            except Exception, ex:
                logger.error('Unable to persist hits')
                logger.error(ex)

                raise ex
            finally:
Example #8
0
def get_nip_records_from_cache(nip_pk, nip_name):

    mem = MemcachedStats()

    nip_uid = "%d_%s" % (nip_pk, nip_name)

    relevant_keys = [key for key in mem.keys() if nip_uid in key]

    data = {}

    for key in relevant_keys:

        time = key.splt("_")[2]

        data[key] = json.loads(cache.get(key))

    
    return data
Example #9
0
class AnguisMemcached(AnguisBase):
    def __init__(self, host='localhost', port=11211, *args, **kwargs):
        self.client = mc.Client((host, port))
        self.client_stats = MemcachedStats(host, port)
        super(AnguisMemcached, self).__init__()

    def __del__(self):
        super(AnguisMemcached, self).__del__()

    def __getitem__(self, key):
        return self.unserialize(self.client.get(key))

    def __setitem__(self, key, obj):
        self.client.set(key, self.serialize(obj))

    def __delitem__(self, key):
        self.client.delete(key)

    def __iter__(self):
        return iter(self.client_stats.keys())

    def __len__(self):
        return len(self.client_stats.keys())
def stats():
    logging.error("Sample error message")
    mem = MemcachedStats()
    stats_all = mem.stats()
    stats_get_hits_rate = float(mem.stats()['get_hits']) / float(
        mem.stats()['get_misses']) * 100
    stats_memcached_mem_used = mem.stats()['bytes']
    var_temp = (float(mem.stats()['bytes']) / 64000000.00) * 100.00
    stats_memcached_mem_used_percentage = format(var_temp, '.6f')

    return render_template(
        "app.html.j2",
        placeholder_stats_all=stats_all,
        placeholder_stats_get_hits_rate=stats_get_hits_rate,
        placeholder_stats_memcached_mem_used=stats_memcached_mem_used,
        placeholder_stats_memcached_mem_used_percentage=
        stats_memcached_mem_used_percentage)
Example #11
0
# Configuration
interval = 3


def percent_change_in_interval(a0, a1, b0, b1):
    a_delta = a1 - a0
    b_delta = b1 - b0
    try:
        return float(a_delta) / (a_delta + b_delta)
    except:
        return '-'


print "Interval is", interval, "seconds. Ctrl-c to quit."
t  = Terminal()
mc = MemcachedStats()
time_0   = mc.stats()
hits_0   = int(time_0['get_hits'])
misses_0 = int(time_0['get_misses'])

time_a = time_0
while True:
    try:
        time.sleep(interval)
        print t.clear()
        time_b = mc.stats()

        hits_a   = int(time_a['get_hits'])
        hits_b   = int(time_b['get_hits'])
        misses_a = int(time_a['get_misses'])
        misses_b = int(time_b['get_misses'])
def main():
    """ main """
    # parse options
    usage = 'Usage: %prog -h HOSTNAME [-p PORT] [[-r REGEX] [-r REGEX] ...] [-l NUM_OF_KEYS] [-v]'
    parser = OptionParser(usage, add_help_option=False, version="%prog v0.5")
    parser.add_option('-?', '--help', action='help')
    parser.add_option('-h', '--host', dest='hostname', metavar='HOSTNAME')
    parser.add_option('-p', '--port', dest='port', metavar='PORT', help='Default = 11211')
    parser.add_option('-l', '--limit', dest='limit', type='long', metavar='NUM_OF_KEYS',
                      help='Limit the number of matching keys to examine. Specify 0 for no limit. Default = 100')
    parser.add_option('-r', '--regex', action='append', dest='patterns', metavar='REGEX', 
                      help='Add one or more regex pattern(s) for filtering keys. Default = .*')
    parser.add_option('-v', '--verbose', action='store_true',  dest='verbose',
                      help='Be chatty.')
    (options, args) = parser.parse_args()

    # do some options housekeeping
    if not options.hostname:
        parser.print_help()
        parser.usage = None
        parser.error('HOSTNAME is required!')
    if options.port is None:
        options.port = 11211
    if options.limit is None:
        options.limit = 100
    if not options.patterns:
        options.patterns = [ur'.*']
    if options.verbose:
        print 'Loaded patterns: %s' % options.patterns

    # some inital values
    sizes = []
    rmb = re.compile(ur' b$')

    # compile our regex patterns
    for i, pattern in enumerate(options.patterns):
        options.patterns[i] = re.compile(pattern)

    if options.verbose:
        print 'Connecting to %s:%s' % (options.hostname, options.port)
    # connect to the given memcached instance
    mem = MemcachedStats(options.hostname, options.port)

    # get the total number of slabs
    total_slabs = len(mem.slab_ids())

    if options.verbose:
        print 'Collecting data for ALL keys...'
    # pull all the key details regardless of options.limit
    # because 'stats cachedump <slabid> <limit>' is on a per-slab basis
    details = mem.key_details(limit=0)
    
    if options.verbose:
        if options.limit == 0:
            print 'Looking for ALL keys matching the supplied regex patterns...'
        else:
            print 'Looking for at most %d keys matching the supplied regex patterns...' % options.limit
    for key in details:
        # tuple format: ('key_name', '\d+ b', '\d+ s')
        name, size, time = key
        for p in options.patterns:
            if p.search(name) is not None:
                # we found a match
                # remove the 'b' from the size in bytes and append it to the list
                sizes.append(float(rmb.sub('', size)))
                if options.verbose:
                    print key
        # stop looking if we've hit our limit on how many keys to examine
        if options.limit != 0 and len(sizes) == options.limit:
            break

    matches = len(sizes)
    total_size = sum(sizes)
    if options.verbose: print
    print 'Total number of slabs: %d' % total_slabs
    print 'Total number of matched objects in the cache: %d' % (matches)
    print 'Total size of all matched objects in the cache (in bytes): %d' % (total_size)
    if matches > 0:
        print 'Size statistics for the matched results (in bytes):'
        print '  Smallest:\t\t%d' % (min(sizes))
        print '  Largest:\t\t%d' % (max(sizes))
        print '  Average:\t\t%.2f' % (total_size/matches)
        print '  Mean:\t\t\t%.2f' % (valueatpercentile(sizes, 50))
        print '  90th percentile:\t%.2f' % (valueatpercentile(sizes, 90))
        print '  95th percentile:\t%.2f' % (valueatpercentile(sizes, 95))
        print '  99th percentile:\t%.2f' % (valueatpercentile(sizes, 99))
from memcached_stats import MemcachedStats

import httplib
import json
import time

def report_to_elasticsearch(host, port, data):
  server = httplib.HTTPConnection(host, port)
  headers = {'Content-type': 'application/json'}
  server.request('POST', '/datastore-memcache/appscale', json.dumps(data), headers)
  response = server.getresponse()
  print 'Operation completed with status:', response.status

if __name__ == '__main__':
  mem = MemcachedStats()
  result = { 'timestamp' : int(time.time() * 1000) }
  data = mem.stats()
  for k,v in data.items():
    try:
      result[k] = int(v)
      continue
    except ValueError:
      pass
    try:
      result[k] = float(v)
    except ValueError:
      result[k] = v
  report_to_elasticsearch('128.111.179.159', 9200, result)
from memcached_stats import MemcachedStats

mem = MemcachedStats()

keys = mem.keys()

print keys

import memcached2
memcache = memcached2.Memcache(('memcached://localhost/',))

import pprint
pprint.pprint(memcache.get_multi(keys))
Example #15
0
 def __init__(self, host='localhost', port=11211, *args, **kwargs):
     self.client = mc.Client((host, port))
     self.client_stats = MemcachedStats(host, port)
     super(AnguisMemcached, self).__init__()