def testStrAppendShouldSucceed(self): "Test JSONStrAppend" rj.jsonset('str', Path.rootPath(), 'foo') self.assertEqual(6, rj.jsonstrappend('str', 'bar', Path.rootPath())) self.assertEqual(9, rj.jsonstrappend('str', 'baz')) self.assertEqual('foobarbaz', rj.jsonget('str', Path.rootPath()))
def testNumIncrByShouldSucceed(self): "Test JSONNumIncrBy" rj.jsonset('num', Path.rootPath(), 1) self.assertEqual(2, rj.jsonnumincrby('num', Path.rootPath(), 1)) self.assertEqual(2.5, rj.jsonnumincrby('num', Path.rootPath(), 0.5)) self.assertEqual(1.25, rj.jsonnumincrby('num', Path.rootPath(), -1.25))
def testNumMultByShouldSucceed(self): "Test JSONNumIncrBy" rj.jsonset('num', Path.rootPath(), 1) self.assertEqual(2, rj.jsonnummultby('num', Path.rootPath(), 2)) self.assertEqual(5, rj.jsonnummultby('num', Path.rootPath(), 2.5)) self.assertEqual(2.5, rj.jsonnummultby('num', Path.rootPath(), 0.5))
def testStrLenShouldSucceed(self): "Test JSONStrLen" rj.jsonset('str', Path.rootPath(), 'foo') self.assertEqual(3, rj.jsonstrlen('str', Path.rootPath())) rj.jsonstrappend('str', 'bar', Path.rootPath()) self.assertEqual(6, rj.jsonstrlen('str', Path.rootPath()))
def redis_test(): rj = Client(host='localhost', port=6379) # Set the key `obj` to some object obj = { 'answer': 42, 'arr': [None, True, 3.14], 'truth': { 'coord': 'out there' } } rj.jsonset('obj', Path.rootPath(), obj) # Get something print ('Is there anybody... {}?'.format( rj.jsonget('obj', Path('.truth.coord')) )) # Delete something (or perhaps nothing), append something and pop it rj.jsondel('obj', Path('.arr[0]')) rj.jsonarrappend('obj', Path('.arr'), 'something') print ('{} popped!'.format(rj.jsonarrpop('obj', Path('.arr')))) # Update something else rj.jsonset('obj', Path('.answer'), 2.17) # And use just like the regular redis-py client jp = rj.pipeline() jp.set('foo', 'bar') jp.jsonset('baz', Path.rootPath(), 'qaz') jp.execute()
def purge_cache(): rj_queue.jsonset('tweet_ids', Path.rootPath(), []) #reset the most recent list of tweets for key in rj_cache.keys(): rj_cache.jsondel(key, Path.rootPath()) print("Cache cleared") return 'Cache cleared..', status.HTTP_200_OK
def testArrAppendShouldSucceed(self): "Test JSONSArrAppend" rj.jsonset('arr', Path.rootPath(), [1]) self.assertEqual(2, rj.jsonarrappend('arr', Path.rootPath(), 2)) self.assertEqual(4, rj.jsonarrappend('arr', Path.rootPath(), 3, 4)) self.assertEqual(7, rj.jsonarrappend('arr', Path.rootPath(), *[5, 6, 7]))
def testMGetShouldSucceed(self): "Test JSONMGet" rj.jsonset('1', Path.rootPath(), 1) rj.jsonset('2', Path.rootPath(), 2) r = rj.jsonmget(Path.rootPath(), '1', '2') e = [1, 2] self.assertListEqual(e, r)
def checkifObjectExist(): if rj.jsonget('user', Path.rootPath()) == None or rj.jsonget( 'user', Path('.profileRequest')) == None: rj.jsonset('user', Path.rootPath(), profileRequest) if rj.jsonget('tweet', Path.rootPath()) == None or rj.jsonget( 'tweet', Path('.tweetRequest')) == None: rj.jsonset('tweet', Path.rootPath(), tweetRequest)
def testArrPopShouldSucceed(self): "Test JSONSArrPop" rj.jsonset('arr', Path.rootPath(), [0, 1, 2, 3, 4]) self.assertEqual(4, rj.jsonarrpop('arr', Path.rootPath(), 4)) self.assertEqual(3, rj.jsonarrpop('arr', Path.rootPath(), -1)) self.assertEqual(2, rj.jsonarrpop('arr', Path.rootPath())) self.assertEqual(0, rj.jsonarrpop('arr', Path.rootPath(), 0)) self.assertListEqual([1], rj.jsonget('arr'))
def on_message(message: IncomingMessage): try: data_dict = json.loads(message.body.decode()) # check if the packet={'key':[rg]<String>,'path_param':[_SHA1_attr/d]<String>,'data':[adapter packet]<JSON>} from the queue is empty if data_dict is None or not bool(data_dict): print('> data_dict is empty. data_dict---- ' + str(data_dict)) return 'failed' else: # print('> redis_key: ' + data_dict['key'] + '\nredis_path_param: ' + data_dict['path_param'] + '\nadapter_data_packet: ' + data_dict['data']) # this needs to be tested exp_val = data_dict['data']['id'] + '_' + data_dict['data'][ 'observationDateTime'] except json.decoder.JSONDecodeError as json_error: print('> JsonDecodeError!!!!' + str(json_error)) return 'failed' try: # check if redis already has existing data? chck_if_rg_exists = redis_client.jsonget(data_dict['key'], Path.rootPath()) # (nil) - never seen packet -> None, Insert # {}, !exists # data, upsert # redis already has previous data if chck_if_rg_exists is not None: print('> ' + str(data_dict['key']) + 'exists.') print('> Upserting ' + str(data_dict['data']) + ' at .' + data_dict['path_param']) print('> Upsertion still in progress...') redis_client.jsonset( data_dict['key'], '.' + data_dict['key'] + '.' + data_dict['path_param'], data_dict['data']) print('> Upsertion successful!') message.ack() # First time the ingestor receives a packet belonging to RG else: print('> RG=' + data_dict['key'] + ' is not present in Redis. Inserting RG with {} at root.') # create first entry in the redis server # origin = {rg: {SHA : {}}} origin = {data_dict['key']: {data_dict['path_param']: {}}} redis_client.jsonset(data_dict['key'], Path.rootPath(), origin) print('> Insertion still in progress...') # insert data now # JSON.GET resource-group-key/redis-key .path_param (SHA1...) # JSON.GET resource-group .resource-group.SHA1.... {adapter_data} redis_client.jsonset( data_dict['key'], '.' + data_dict['key'] + '.' + data_dict['path_param'], data_dict['data']) print('> Insertion successful!') message.ack() except redis.exceptions.ResponseError as r_error: print('> Response Error from Redis!!!! ' + str(r_error)) return 'failed'
def testToggleShouldSucceed(self): "Test JSONToggle" rj.jsonset('bool', Path.rootPath(), False) self.assertTrue(rj.jsontoggle('bool', Path.rootPath())) self.assertFalse(rj.jsontoggle('bool', Path.rootPath())) # check non-boolean value rj.jsonset('num', Path.rootPath(), 1) with self.assertRaises(redis.exceptions.ResponseError): rj.jsontoggle('num', Path.rootPath())
def get_all_tweets(): try: data = [] for tweet_ids in rj_queue.jsonget('tweet_ids', Path.rootPath()): data.append(rj_cache.jsonget(tweet_ids, Path.rootPath())) print(data) return dumps(data), status.HTTP_200_OK except Exception as e: print(e) return "Error", status.HTTP_500_INTERNAL_SERVER_ERROR
def testObjKeysShouldSucceed(self): "Test JSONSObjKeys" obj = {'foo': 'bar', 'baz': 'qaz'} rj.jsonset('obj', Path.rootPath(), obj) keys = rj.jsonobjkeys('obj', Path.rootPath()) keys.sort() exp = [k for k in six.iterkeys(obj)] exp.sort() self.assertListEqual(exp, keys)
def append_to_path(existing, addition): """ Append an key to an existing subpath :param existing: a subpath string :param addition: a new key in the subpath :return: a path string :rtype: str """ if existing == Path.rootPath(): return Path.rootPath() + addition return "{}.{}".format(existing, addition)
def send_to_db(): print("Listening for tasks in the backend queue ..") try: tweets = rj_queue_backend.jsonget('tweets', Path.rootPath()) if len(tweets) > 0: rj_queue_backend.jsonset('tweets', Path.rootPath(), []) #putting the tweets_cache to database db['tweets'].insert_many(tweets) print("Found a List of tweets.. Sending it to database..") except: print('no items to send to db..')
def testArrInsertShouldSucceed(self): "Test JSONSArrInsert" rj.jsonset('arr', Path.rootPath(), [0, 4]) self.assertEqual( 5, rj.jsonarrinsert('arr', Path.rootPath(), 1, *[ 1, 2, 3, ])) self.assertListEqual([0, 1, 2, 3, 4], rj.jsonget('arr'))
def init_info_db(): info = rj.jsonget("info", ".") if (info): print(info) return info else: info = {"uniswap": {"index": 0}} rj.jsonset('info', Path.rootPath(), info) rj.jsonset('pairs', Path.rootPath(), {}) rj.jsonset('tokens', Path.rootPath(), {}) rj.jsonset('paths', Path.rootPath(), {}) return info
def list_set(self, key, idx, value): """ Sets the list element at index to value arguments: key -- list access key idx -- a zero based index where the set should be performed value -- a JSON-serializable value to be inserted """ # TODO the operation pair should be atomic to avoid possible race conditions self.redis.jsonarrpop(key, Path.rootPath(), idx) return self.redis.jsonarrinsert(key, Path.rootPath(), idx, value)
def applyRecordLevelUpdates(): keys_i = dict() keys_u = dict() keys_x = dict() # import pdb; pdb.set_trace(); # A: get all 'r'/'i' keys (fresh_keys_downloaded list) and sort by timestamp search_pattern = "r" for s in search_pattern: for i, key in enumerate(rj.keys(s + '*')): # get semantics from key prefix, crud, timestamp, suffix = prefix_crud_timestamp_suffix(key) print("prefix, crud, timestamp, suffix: ", prefix, crud, timestamp, suffix) if crud == 'i': # print("key, prefix, crud, timestamp, suffix: ", key, prefix, crud, timestamp, suffix) # import pdb; pdb.set_trace(); # I add timestamp first to process chronologically keys_i[timestamp + '-' + prefix + '-' + 'i' + '-' + suffix] = key elif crud == 'u': # print("key, prefix, crud, timestamp, suffix: ", key, prefix, crud, timestamp, suffix) # import pdb; pdb.set_trace(); # I add timestamp first to process chronologically keys_u[timestamp + '-' + prefix + '-' + 'u' + '-' + suffix] = key elif crud == 'x': # print("key, prefix, crud, timestamp, suffix: ", key, prefix, crud, timestamp, suffix) # import pdb; pdb.set_trace(); # I add timestamp first to process chronologically keys_x[timestamp + '-' + prefix + '-' + 'x' + '-' + suffix] = key # B. apply inserts one by one to current records # Note prefix starts with 'r', so get rid of that # import pdb; pdb.set_trace() for k in sorted(keys_i.items()): print("**** :", k) timestamp, prefix, crud, suffix = timestamp_prefix_crud_suffix(k[0]) record = rj.jsonget(k[1], Path.rootPath()) apply(prefix[1:], 'i', suffix, record) # C. apply updates one by one to current records for k in sorted(keys_u.items()): timestamp, prefix, crud, suffix = timestamp_prefix_crud_suffix(k[0]) record = rj.jsonget(k[1], Path.rootPath()) apply(prefix[1:], 'u', suffix, record) # D. apply deletes one by one to current records for k in sorted(keys_x.items()): timestamp, prefix, crud, suffix = timestamp_prefix_crud_suffix(k[0]) record = rj.jsonget(k[1], Path.rootPath()) apply(prefix[1:], 'x', suffix, record)
def update_cache(tweet_id=None): cache = rj_queue.jsonget( 'tweet_ids', Path.rootPath()) #know what tweets are cached from tweet_ids in the q if tweet_id: isPresentinCache = rj_cache.jsonget(tweet_id, Path.rootPath()) if isPresentinCache: #if present, update.. else leave it tweet = remove_mongo_id(list(db['tweets'].find({'id': tweet_id}))) if len(tweet) > 0: rj_cache.jsonset(tweet_id, Path.rootPath(), tweet[0]) else: #delete print('Deleting...') rj_cache.jsondel(tweet_id, Path.rootPath()) all_tweets = rj_queue.jsonget('tweet_ids', Path.rootPath()) rj_queue.jsonarrpop('tweet_ids', Path.rootPath(), all_tweets.index(tweet_id)) else: for item in cache: #to update many tweets db_item = remove_mongo_id(list(db['tweets'].find({'id': item}))) if len(db_item) > 0: rj_cache.jsonset(item, Path.rootPath(), db_item[0]) else: print('deleting..') rj_cache.jsondel(tweet_id, Path.rootPath()) print('cache updated')
def update_local_copy(self, channel, message): """ Update the local copy of the data stored under this channel name in redis. Args: channel: the name of the channel that was published. message: message published on that channel Returns: None """ #logger.info("SILENT_SUBSCRIBER @{} : channel={} message={}".format(self.prefix, channel, message)) try: message = message.decode("utf-8") except Exception as e: return if message != "Publish": return if channel == self.prefix: self.local_copy = self.read_from_redis(Path.rootPath()) return path = channel[len(self.prefix):] redis_value = self.read_from_redis(path) #logger.debug("SILENT_SUBSCRIBER @{} : Read from Redis: {}".format(self.prefix, redis_value)) insert_into_dictionary(self.local_copy, path_to_key_sequence(path), redis_value)
def send_to_redis(self, set_path, set_value): """ Publisher equivalent of Writer ``send_to_redis`` This is an equivalent function to ``Writer``'s ``send_to_redis`` method but also publishes a message indicating what channel has been updated. Args: set_path (str): path underneath JSON key to set set_value: value to set Returns: None """ #logger.info("PUBLISH {} {} = {}".format(self.top_key_name, set_path, type(set_value))) #logger.debug("PUBLISH {} {} Metadata Update?: {}".format(self.top_key_name, set_path, self.do_metadata_update)) self.__process_metadata(set_path, set_value) #logger.debug("PUBLISH {} {} Metadata: {}".format(self.top_key_name, set_path, self.metadata)) self.__publish_non_serializables(set_path, set_value) self.__publish_serializables(set_path, set_value) # Addition to Writer class if set_path == Path.rootPath(): set_path = "" channel_name = "__pubspace@0__:" + self.top_key_name + set_path self.pipeline.publish(channel_name, self.message) # Resume Writer Class self.pipeline.execute()
def store_cache(query, filter, unwind, sort, limit, context, cache_key): if context == 'text': docs = mongo.db.dns.aggregate([{ '$match': query }, { '$limit': limit }, { '$addFields': extra_fields(context) }, { '$project': filter }, { '$sort': sort }]) elif context == 'spatial': docs = mongo.db.dns.aggregate([{ '$geoNear': query }, { '$limit': limit }, { '$addFields': extra_fields(context) }, { '$project': filter }, { '$sort': sort }]) elif context == 'unwind': docs = mongo.db.dns.aggregate([{ '$match': query }, { '$unwind': unwind }, { '$limit': limit }, { '$addFields': extra_fields(context) }, { '$project': filter }, { '$sort': sort }]) else: docs = mongo.db.dns.aggregate([{ '$match': query }, { '$limit': limit }, { '$addFields': extra_fields(context) }, { '$project': filter }, { '$sort': sort }]) for doc in docs: uid = hash(uuid.uuid4()) expire = 3600 * 24 cache.jsonset(uid, Path.rootPath(), json.loads(json.dumps(doc, default=json_util.default))) cache.sadd(cache_key, uid) cache.expire(cache_key, expire) cache.expire(uid, expire)
def testJSONSetGetDelShouldSucceed(self): "Test basic JSONSet/Get/Del" self.assertTrue(rj.jsonset('foo', Path.rootPath(), 'bar')) self.assertEqual('bar', rj.jsonget('foo')) self.assertEqual(1, rj.jsondel('foo')) self.assertFalse(rj.exists('foo'))
def post(self): # Generate a random six digit hex id for the book entry book_id = binascii.b2a_hex(os.urandom(6)) # Create a parser for request args parser = reqparse.RequestParser() parser.add_argument('book_title', required=True) parser.add_argument('author', required=True) parser.add_argument('release_year', required=True) parser.add_argument('genre', required=True) parser.add_argument('read', required=True) parser.add_argument('page_nos', required=True) parser.add_argument('rating', required=True) # Parse the arguments into an object args = parser.parse_args() # Write the object to redis rj.jsonset(book_id, Path.rootPath(), args) response_data = "{0}:{1}".format(book_id, args) return {'message': 'Book record added', 'data': response_data}
def post(self, request): validated_data = request.serializer.validated_data task = Task.objects.create(title=validated_data['title'], description=validated_data['description'], status=Task.CREATED, user_created=request.user, date_create_task=datetime.now()) if validated_data['user_assigned']: task.user_assigned = validated_data['user_assigned'] task.save() if validated_data['user_assigned'] and validated_data[ 'user_assigned'] != request.user: AddNotificationTaskStatus(task.user_assigned, task, "created") # add in ReJSON database rj = Client( host='localhost', port=6379, ) rj.jsonset('task:' + str(task.id), Path.rootPath(), TaskSerializer(task).data) rj.execute_command('JSON.NUMINCRBY acc .total 1') rj.execute_command('JSON.SET acc .maxId ' + str(task.id)) return Response(status=201)
def key_sequence_to_path(sequence: List[str]): """ Convert a sequence of key accesses into a path string representing a path below the top level key in redis :param sequence: list of strings representing key accesses :return: a subpath string """ return Path.rootPath() + ".".join(sequence)
def wrapper(*args, **kwargs): auth_header = request.headers.get('Authorization') current_user = None if auth_header: try: access_token = auth_header.split(' ')[1] try: token = jwt.decode(access_token, current_app.config['SECRET_KEY']) current_user = User.find_by_username(token['uid']) user_token = rj.jsonget(token['hash'], Path.rootPath()) if not user_token: current_user = None except jwt.ExpiredSignatureError as e: return f(*args, **kwargs, current_user=None) # raise e except (jwt.DecodeError, jwt.InvalidTokenError) as e: return f(*args, **kwargs, current_user=None) # raise e except: rest_api.abort(401, TOKEN['unknown']) except IndexError: return f(*args, **kwargs, current_user=None) # raise jwt.InvalidTokenError else: return f(*args, **kwargs, current_user=None) return f(*args, **kwargs, current_user=current_user)
def testJSONSetGetDelNonAsciiShouldSucceed(self): "Test non-ascii JSONSet/Get/Del" self.assertTrue(rj.jsonset('notascii', Path.rootPath(), 'hyvää-élève')) self.assertNotEqual('hyvää-élève', rj.jsonget('notascii')) self.assertEqual('hyvää-élève', rj.jsonget('notascii', no_escape=True)) self.assertEqual(1, rj.jsondel('notascii')) self.assertFalse(rj.exists('notascii'))