def redis_store(user): conn = mysql.connect cursor = conn.cursor() cursor.execute('select ' ' field02, ' ' CONVERT(field03, char(200)) as field03, ' ' CONVERT(field04, char(200)) as field04, ' ' CONVERT(field05, char(200)) as field05, ' ' CONVERT(field06, char(200)) as field06, ' ' field07, ' ' field08, ' ' field09, ' ' field10 ' ' from ' ' prety_large_table' ' limit 10000;') row_headers = [x[0] for x in cursor.description] rv = cursor.fetchall() conn.close() json_data = [] row = 0 for result in rv: key = 'KEY_' + user + '_' + str(row) redis.execute_command('JSON.DEL', key) row += 1 return json_data
def createhotel(): if not request.json or not 'displayname' in request.json or not 'id' in request.json: abort(400) hotel = { 'id': request.json['id'], 'displayname': request.json['displayname'], 'acname': request.json['acname'], 'image': request.json.get('image', ''), 'latitude': request.json.get('latitude', 0), 'longitude': request.json.get('longitude', 0), 'thirdpartyrating': request.json.get('thirdpartyrating', 0) } hotelname = hotel['acname'] for l in range(1, len(hotelname)): hotelfragment = hotelname[0:l] redis.zadd('hotelfragments', 0, hotelfragment) hotelwithid = hotelname + '%H-' + str(hotel['id']) + '%' redis.zadd('hotelfragments', 0, hotelwithid) hotelkey = 'H-' + str(hotel['id']) redis.execute_command('geoadd', 'hotels', '%f' % hotel['longitude'], '%f' % hotel['latitude'], hotelkey) redis.delete(hotelkey) redis.rpush(hotelkey, hotel['id']) redis.rpush(hotelkey, hotel['displayname']) redis.rpush(hotelkey, hotel['acname']) redis.rpush(hotelkey, hotel['image']) redis.rpush(hotelkey, hotel['latitude']) redis.rpush(hotelkey, hotel['longitude']) redis.rpush(hotelkey, hotel['thirdpartyrating']) return jsonify({ 'hotel': hotel }), 201
def _insert_agg_data(self, redis, key, agg_type): agg_key = '%s_agg_%s_10' % (key, agg_type) assert redis.execute_command('TS.CREATE', key) assert redis.execute_command('TS.CREATE', agg_key) assert redis.execute_command('TS.CREATERULE', key, agg_key, "AGGREGATION", agg_type, 10) values = (31, 41, 59, 26, 53, 58, 97, 93, 23, 84) for i in range(10, 50): assert redis.execute_command('TS.ADD', key, i, i // 10 * 100 + values[i % 10]) return agg_key
def want_some_rest(): test = redis.execute_command('JSON.GET', 'test') if test is None: query = base_query() redis.execute_command('JSON.SET', 'test', '.', json.dumps(query)) print('stored') return jsonify(query) else: print('cached') return jsonify(json.loads(test))
def create_compacted_key(redis, i, source, agg, bucket): dest = '%s_%s_%s' % (source, agg, bucket) redis.delete(dest) redis.execute_command('ts.create', dest, 'RETENTION', 0, 'CHUNK_SIZE', 360, 'LABELS', 'index', i, "aggregation", agg, "bucket", bucket) redis.execute_command( 'ts.createrule', source, dest, 'AGGREGATION', agg, bucket, )
def _insert_data(redis, key, start_ts, samples_count, value): """ insert data to key, starting from start_ts, with 1 sec interval between them :param redis: redis connection :param key: name of time_series :param start_ts: beginning of time series :param samples_count: number of samples :param value: could be a list of samples_count values, or one value. if a list, insert the values in their order, if not, insert the single value for all the timestamps """ for i in range(samples_count): value_to_insert = value[i] if type(value) == list else value assert redis.execute_command('TS.ADD', key, start_ts + i, value_to_insert)
def hotelsearchbydistance(latitude, longitude): hotels = [] eachhotel = {} radius = int(request.args.get('radius')) searchresults = redis.execute_command('georadius', 'hotels', '%f' % longitude, '%f' % latitude, '%d' % radius, 'km', 'WITHDIST', 'ASC') for hotel in searchresults: hotelproperties = redis.lrange(hotel[0], 0, -1) eachhotel['id'] = hotelproperties[0] eachhotel['displayname'] = hotelproperties[1] eachhotel['acname'] = hotelproperties[2] eachhotel['image'] = hotelproperties[3] eachhotel['latitude'] = hotelproperties[4] eachhotel['longitude'] = hotelproperties[5] eachhotel['distance'] = round(float(hotel[1]), 3) eachhotel['thirdpartyrating'] = int(hotelproperties[6]) hotels.append(eachhotel) eachhotel = {} return jsonify({ 'hotelsearch': hotels })
import requests import redis import time redis = redis.Redis(host='localhost', port=6379, db=0) redis_pipe = redis.pipeline() redis.flushall() start_time = time.time() error_rate = 0.001 q = 10000 redis.execute_command('BF.RESERVE bloom_filter ', error_rate, q / 10) for x in range(q): redis_pipe.execute_command('bf.add bloom_filter ' + str(x)) redis_pipe.execute() for x in range(q): redis_pipe.execute_command('bf.exists bloom_filter ' + str(x)) responses = redis_pipe.execute() for response in responses: assert (response == 1) print 'Test for False Positives' for x in range(q, q * 101): redis_pipe.execute_command('bf.exists bloom_filter ' + str(x)) responses = redis_pipe.execute() false_positive = 0.0
def _insert_data(self, redis, key, start_ts, samples_count, value): for i in range(samples_count): assert redis.execute_command('TS.ADD', key, start_ts + i, 5)
def _get_ts_info(self, redis, key): info = redis.execute_command('TS.INFO', key) return dict([(info[i], info[i+1]) for i in range(0, len(info), 2)])
import requests import redis import time redis = redis.Redis(host='localhost', port=6379, db=0) redis_pipe = redis.pipeline() redis.flushall() print "start benchmark" start_time = time.time() num_sample = 1024 * 1024 redis_pipe.execute_command('ts.create test') for i in range(num_sample): redis_pipe.execute_command('ts.add test', i, i) redis_pipe.execute() total_time = time.time() - start_time redis.execute_command('ts.range test 0 -1') print total_time
import redis import time redis = redis.Redis(host='localhost', port=6379, db=0) redis.execute_command('flushall') redis_pipe = redis.pipeline() print "start benchmark" print "no out-of-order" start_time = time.time() name = "rts" start = 1000000 num_sample = 1024 * 1024 + 1 redis.execute_command('ts.create', name, 'uncompressed') for i in range(start, start + num_sample, 2): redis_pipe.execute_command('ts.add', name, i, i) if i % 999 == 0: redis_pipe.execute() redis_pipe.execute() print time.time() - start_time
def create_compacted_key(redis, i, source, agg, bucket): dest = '%s_%s_%s' % (source, agg, bucket) redis.delete(dest) redis.execute_command('ts.create', dest, 0, 360, 'index=%s' % i, "aggregation=%s" % agg, "bucket=%s" % bucket) redis.execute_command('ts.createrule', source, agg, bucket, dest)
import redis import time redis = redis.Redis(host='localhost', port=6379, db=0) redis.execute_command('flushall') redis_pipe = redis.pipeline() print "start benchmark" print "no out-of-order" start_time = time.time() name = "rts" start = 1000001 num_sample = 1024 * 1024 + 1 redis.execute_command('ts.create', name, 'uncompressed') for i in range(start, start + num_sample, 2): redis_pipe.execute_command('ts.add', name, i, i) if i % 999 == 0: #print 'execute' redis_pipe.execute() redis_pipe.execute() print time.time() - start_time print redis.execute_command('ts.info', name) redis.execute_command('flushall') print "out-of-order"
import redis import time import traceback from random import randint from executor import BoundedExecutor, threaded from queue import Queue, Empty redis = redis.Redis(host='localhost') print(redis.execute_command('INFO')['redis_version']) stream_name = 'mystream' control_channel = Queue() values_channel = Queue() start_time = time.time() et = 0.0 et_max = 60.0 executor = BoundedExecutor(2, 5) def format_stacktrace(): parts = ["Traceback (most recent call last):\n"] parts.extend(traceback.format_stack(limit=25)[:-2]) parts.extend(traceback.format_exception(*sys.exc_info())[1:]) return "".join(parts)
import requests import redis import time redis = redis.Redis(host='localhost', port=6379, db=0) redis_pipe = redis.pipeline() redis.flushall() start_time = time.time() q = 16000000 redis.execute_command('CF.RESERVE cf ' + str(q / 64) + ' MAXITERATIONS 50') print 'add' batches = 100 for i in range(batches): for x in xrange(q / batches): redis_pipe.execute_command('cf.add cf ' + str(x + q / batches * i)) redis_pipe.execute() print("--- %s seconds to add ---" % (time.time() - start_time)) start_time = time.time() ''' print 'check' for x in xrange(q): redis_pipe.execute_command('cf.exists cf', str(x)) responses = redis_pipe.execute() for response in responses: pass
pass real_results = redis.zrevrange('bm_text', 0, 49) print("--- %s seconds ---" % (time.time() - start_time)) print('Memory used %s' % redis.memory_usage('bm_text')) print('This is an accurate list for comparison') print(redis.zcount('bm_text', '-inf', '+inf')) # test Top-K print("K Width(*k) Depth Memory Accuracy Time") k_list = [10, 50, 100, 1000] for k in k_list: real_results = redis.zrevrange('bm_text', 0, k - 1) for width in [4, 8]: for depth in [3, 7, 10]: redis.execute_command('DEL', 'bm_topk') start_time = time.time() create_topk(redis, k, width, depth) for line in page.iter_lines(): if line is not '' and line is not ' ': a = line.split() redis_pipe.execute_command('topk.add', 'bm_topk', *a) responses = redis_pipe.execute() for response in responses: pass leaderboard = redis.execute_command('topk.list', 'bm_topk') print( str(k) + " " + str(width) + " " + str(depth) + " " +