def testNumIncrByShouldSucceed(self): "Test JSONNumIncrBy" rj.jsonset('num', Path.rootPath(), 1) self.assertEqual(2, rj.jsonnumincrby('num', Path.rootPath(), 1)) self.assertEqual(2.5, rj.jsonnumincrby('num', Path.rootPath(), 0.5)) self.assertEqual(1.25, rj.jsonnumincrby('num', Path.rootPath(), -1.25))
def testStrAppendShouldSucceed(self): "Test JSONStrAppend" rj.jsonset('str', Path.rootPath(), 'foo') self.assertEqual(6, rj.jsonstrappend('str', 'bar', Path.rootPath())) self.assertEqual(9, rj.jsonstrappend('str', 'baz')) self.assertEqual('foobarbaz', rj.jsonget('str', Path.rootPath()))
def testNumMultByShouldSucceed(self): "Test JSONNumIncrBy" rj.jsonset('num', Path.rootPath(), 1) self.assertEqual(2, rj.jsonnummultby('num', Path.rootPath(), 2)) self.assertEqual(5, rj.jsonnummultby('num', Path.rootPath(), 2.5)) self.assertEqual(2.5, rj.jsonnummultby('num', Path.rootPath(), 0.5))
def testStrLenShouldSucceed(self): "Test JSONStrLen" rj.jsonset('str', Path.rootPath(), 'foo') self.assertEqual(3, rj.jsonstrlen('str', Path.rootPath())) rj.jsonstrappend('str', 'bar', Path.rootPath()) self.assertEqual(6, rj.jsonstrlen('str', Path.rootPath()))
def purge_cache(): rj_queue.jsonset('tweet_ids', Path.rootPath(), []) #reset the most recent list of tweets for key in rj_cache.keys(): rj_cache.jsondel(key, Path.rootPath()) print("Cache cleared") return 'Cache cleared..', status.HTTP_200_OK
def testArrAppendShouldSucceed(self): "Test JSONSArrAppend" rj.jsonset('arr', Path.rootPath(), [1]) self.assertEqual(2, rj.jsonarrappend('arr', Path.rootPath(), 2)) self.assertEqual(4, rj.jsonarrappend('arr', Path.rootPath(), 3, 4)) self.assertEqual(7, rj.jsonarrappend('arr', Path.rootPath(), *[5, 6, 7]))
def testMGetShouldSucceed(self): "Test JSONMGet" rj.jsonset('1', Path.rootPath(), 1) rj.jsonset('2', Path.rootPath(), 2) r = rj.jsonmget(Path.rootPath(), '1', '2') e = [1, 2] self.assertListEqual(e, r)
def testArrPopShouldSucceed(self): "Test JSONSArrPop" rj.jsonset('arr', Path.rootPath(), [0, 1, 2, 3, 4]) self.assertEqual(4, rj.jsonarrpop('arr', Path.rootPath(), 4)) self.assertEqual(3, rj.jsonarrpop('arr', Path.rootPath(), -1)) self.assertEqual(2, rj.jsonarrpop('arr', Path.rootPath())) self.assertEqual(0, rj.jsonarrpop('arr', Path.rootPath(), 0)) self.assertListEqual([1], rj.jsonget('arr'))
def on_message(message: IncomingMessage): try: data_dict = json.loads(message.body.decode()) # check if the packet={'key':[rg]<String>,'path_param':[_SHA1_attr/d]<String>,'data':[adapter packet]<JSON>} from the queue is empty if data_dict is None or not bool(data_dict): print('> data_dict is empty. data_dict---- ' + str(data_dict)) return 'failed' else: # print('> redis_key: ' + data_dict['key'] + '\nredis_path_param: ' + data_dict['path_param'] + '\nadapter_data_packet: ' + data_dict['data']) # this needs to be tested exp_val = data_dict['data']['id'] + '_' + data_dict['data'][ 'observationDateTime'] except json.decoder.JSONDecodeError as json_error: print('> JsonDecodeError!!!!' + str(json_error)) return 'failed' try: # check if redis already has existing data? chck_if_rg_exists = redis_client.jsonget(data_dict['key'], Path.rootPath()) # (nil) - never seen packet -> None, Insert # {}, !exists # data, upsert # redis already has previous data if chck_if_rg_exists is not None: print('> ' + str(data_dict['key']) + 'exists.') print('> Upserting ' + str(data_dict['data']) + ' at .' + data_dict['path_param']) print('> Upsertion still in progress...') redis_client.jsonset( data_dict['key'], '.' + data_dict['key'] + '.' + data_dict['path_param'], data_dict['data']) print('> Upsertion successful!') message.ack() # First time the ingestor receives a packet belonging to RG else: print('> RG=' + data_dict['key'] + ' is not present in Redis. Inserting RG with {} at root.') # create first entry in the redis server # origin = {rg: {SHA : {}}} origin = {data_dict['key']: {data_dict['path_param']: {}}} redis_client.jsonset(data_dict['key'], Path.rootPath(), origin) print('> Insertion still in progress...') # insert data now # JSON.GET resource-group-key/redis-key .path_param (SHA1...) # JSON.GET resource-group .resource-group.SHA1.... {adapter_data} redis_client.jsonset( data_dict['key'], '.' + data_dict['key'] + '.' + data_dict['path_param'], data_dict['data']) print('> Insertion successful!') message.ack() except redis.exceptions.ResponseError as r_error: print('> Response Error from Redis!!!! ' + str(r_error)) return 'failed'
def profileService(): # get contents checkifObjectExist() for obj in rj.jsonget('user', Path('.profileRequest')): print(obj) rj.jsonarrpop('user', Path('.profileRequest')) res = profileAction(obj['requestPath'], obj['method'], obj['headers'], obj['file'], obj['form']) print(res.text.encode('utf8'))
def testToggleShouldSucceed(self): "Test JSONToggle" rj.jsonset('bool', Path.rootPath(), False) self.assertTrue(rj.jsontoggle('bool', Path.rootPath())) self.assertFalse(rj.jsontoggle('bool', Path.rootPath())) # check non-boolean value rj.jsonset('num', Path.rootPath(), 1) with self.assertRaises(redis.exceptions.ResponseError): rj.jsontoggle('num', Path.rootPath())
def get_all_tweets(): try: data = [] for tweet_ids in rj_queue.jsonget('tweet_ids', Path.rootPath()): data.append(rj_cache.jsonget(tweet_ids, Path.rootPath())) print(data) return dumps(data), status.HTTP_200_OK except Exception as e: print(e) return "Error", status.HTTP_500_INTERNAL_SERVER_ERROR
def testObjKeysShouldSucceed(self): "Test JSONSObjKeys" obj = {'foo': 'bar', 'baz': 'qaz'} rj.jsonset('obj', Path.rootPath(), obj) keys = rj.jsonobjkeys('obj', Path.rootPath()) keys.sort() exp = [k for k in six.iterkeys(obj)] exp.sort() self.assertListEqual(exp, keys)
def submitModule(self, repo_id, **kwargs): logger.info('Module submitted to hub {}'.format(repo_id)) repo_id = repo_id.lower() ts = datetime.utcnow() res = {'id': repo_id, 'status': 'failed'} if not self.dconn.jsonget(self._hubkey, Path('submit_enabled')): res['message'] = 'Module submission is currently disabled' return res # Check if the module is already listed m = RedisModule(self.dconn, self.sconn, self.autocomplete, repo_id) if m.exists: # TODO: return in search results res['message'] = 'Module already listed in the hub' return res # Check if there's an active submission, or if the failure was too recent submission = Submission(self.dconn, repo_id) if submission.exists: status = submission.status if status != 'failed': res['status'] = 'active' res['message'] = 'Active submission found for module' return res else: # TODO: handle failed submissions res['message'] = 'Module already submitted to the hub and had failed, please reset manually for now' return res # Store the new submission submission.save(**kwargs) # Record the submission in the catalog # TODO: find a good use for that, e.g. 5 last submissions self.dconn.jsonarrappend(self._hubkey, Path('.submissions'), { 'id': submission.get_id(), 'created': submission.created, }) # Add a job to process the submission q = Queue(connection=self.qconn) job = q.enqueue(callProcessSubmission, submission.get_id()) if job is None: res['message'] = 'Submission job could not be created' # TODO: design retry path logger.error( 'Could not create submission processing job for {}'.format( submission.get_id())) else: res['status'] = 'queued' submission.status = res['status'] submission.job = job.id return res
def send_to_db(): print("Listening for tasks in the backend queue ..") try: tweets = rj_queue_backend.jsonget('tweets', Path.rootPath()) if len(tweets) > 0: rj_queue_backend.jsonset('tweets', Path.rootPath(), []) #putting the tweets_cache to database db['tweets'].insert_many(tweets) print("Found a List of tweets.. Sending it to database..") except: print('no items to send to db..')
def testArrInsertShouldSucceed(self): "Test JSONSArrInsert" rj.jsonset('arr', Path.rootPath(), [0, 4]) self.assertEqual( 5, rj.jsonarrinsert('arr', Path.rootPath(), 1, *[ 1, 2, 3, ])) self.assertListEqual([0, 1, 2, 3, 4], rj.jsonget('arr'))
def append_to_path(existing, addition): """ Append an key to an existing subpath :param existing: a subpath string :param addition: a new key in the subpath :return: a path string :rtype: str """ if existing == Path.rootPath(): return Path.rootPath() + addition return "{}.{}".format(existing, addition)
def hash_get(self, key, field): """ Gets a value from a hash table stored under the passed key arguments: key -- data access key field -- hash table entry key """ if self.redis.jsontype(key, Path(f'["{field}"]')) is None: return None return self.redis.jsonget(key, Path(f'["{field}"]'), no_escape=True)
def applyRecordLevelUpdates(): keys_i = dict() keys_u = dict() keys_x = dict() # import pdb; pdb.set_trace(); # A: get all 'r'/'i' keys (fresh_keys_downloaded list) and sort by timestamp search_pattern = "r" for s in search_pattern: for i, key in enumerate(rj.keys(s + '*')): # get semantics from key prefix, crud, timestamp, suffix = prefix_crud_timestamp_suffix(key) print("prefix, crud, timestamp, suffix: ", prefix, crud, timestamp, suffix) if crud == 'i': # print("key, prefix, crud, timestamp, suffix: ", key, prefix, crud, timestamp, suffix) # import pdb; pdb.set_trace(); # I add timestamp first to process chronologically keys_i[timestamp + '-' + prefix + '-' + 'i' + '-' + suffix] = key elif crud == 'u': # print("key, prefix, crud, timestamp, suffix: ", key, prefix, crud, timestamp, suffix) # import pdb; pdb.set_trace(); # I add timestamp first to process chronologically keys_u[timestamp + '-' + prefix + '-' + 'u' + '-' + suffix] = key elif crud == 'x': # print("key, prefix, crud, timestamp, suffix: ", key, prefix, crud, timestamp, suffix) # import pdb; pdb.set_trace(); # I add timestamp first to process chronologically keys_x[timestamp + '-' + prefix + '-' + 'x' + '-' + suffix] = key # B. apply inserts one by one to current records # Note prefix starts with 'r', so get rid of that # import pdb; pdb.set_trace() for k in sorted(keys_i.items()): print("**** :", k) timestamp, prefix, crud, suffix = timestamp_prefix_crud_suffix(k[0]) record = rj.jsonget(k[1], Path.rootPath()) apply(prefix[1:], 'i', suffix, record) # C. apply updates one by one to current records for k in sorted(keys_u.items()): timestamp, prefix, crud, suffix = timestamp_prefix_crud_suffix(k[0]) record = rj.jsonget(k[1], Path.rootPath()) apply(prefix[1:], 'u', suffix, record) # D. apply deletes one by one to current records for k in sorted(keys_x.items()): timestamp, prefix, crud, suffix = timestamp_prefix_crud_suffix(k[0]) record = rj.jsonget(k[1], Path.rootPath()) apply(prefix[1:], 'x', suffix, record)
def init_info_db(): info = rj.jsonget("info", ".") if (info): print(info) return info else: info = {"uniswap": {"index": 0}} rj.jsonset('info', Path.rootPath(), info) rj.jsonset('pairs', Path.rootPath(), {}) rj.jsonset('tokens', Path.rootPath(), {}) rj.jsonset('paths', Path.rootPath(), {}) return info
def list_set(self, key, idx, value): """ Sets the list element at index to value arguments: key -- list access key idx -- a zero based index where the set should be performed value -- a JSON-serializable value to be inserted """ # TODO the operation pair should be atomic to avoid possible race conditions self.redis.jsonarrpop(key, Path.rootPath(), idx) return self.redis.jsonarrinsert(key, Path.rootPath(), idx, value)
def append_rules_in_redis( self, json_key, path, json_object, namespace): ## assumption to have empty rules array in the DB if self.get_values_by_key(json_key, '.'): try: self.get_values_by_key(json_key, '.' + str(namespace)) except: self.client.jsonset(json_key, Path('.' + str(namespace)), {}) return self.client.jsonset(json_key, Path(path), json_object) self.set_key_value(json_key, {}) self.client.jsonset(json_key, Path('.' + str(namespace)), {}) return self.client.jsonset(json_key, Path(path), json_object)
def get_answers(self, rj: RedisClient, clear: bool = True) -> List[Answer]: """ Get all answers the frontend has received. """ if not clear: raise NotImplementedError key = f"alg-{self.ident}-answers" if key in rj.keys(): pipe = rj.pipeline() pipe.jsonget(key, Path(".")) pipe.jsonset(key, Path("."), []) answers, success = pipe.execute() return answers return []
def test_get_field(self): pool = RedisPool(urls=("localhost", 6379)) json = pool.json() obj = { 'answer': 42, 'arr': [None, True, 3.14], 'truth': { 'coord': 'out there' } } json.jsonset('obj', Path.rootPath(), obj) self.assertEqual(json.jsonget('obj', Path('.truth.coord')), 'out there')
def update_cache(tweet_id=None): cache = rj_queue.jsonget( 'tweet_ids', Path.rootPath()) #know what tweets are cached from tweet_ids in the q if tweet_id: isPresentinCache = rj_cache.jsonget(tweet_id, Path.rootPath()) if isPresentinCache: #if present, update.. else leave it tweet = remove_mongo_id(list(db['tweets'].find({'id': tweet_id}))) if len(tweet) > 0: rj_cache.jsonset(tweet_id, Path.rootPath(), tweet[0]) else: #delete print('Deleting...') rj_cache.jsondel(tweet_id, Path.rootPath()) all_tweets = rj_queue.jsonget('tweet_ids', Path.rootPath()) rj_queue.jsonarrpop('tweet_ids', Path.rootPath(), all_tweets.index(tweet_id)) else: for item in cache: #to update many tweets db_item = remove_mongo_id(list(db['tweets'].find({'id': item}))) if len(db_item) > 0: rj_cache.jsonset(item, Path.rootPath(), db_item[0]) else: print('deleting..') rj_cache.jsondel(tweet_id, Path.rootPath()) print('cache updated')
def main(): rj = Client(host='localhost', port=6379, decode_responses=True) obj = { 'answer': 42, 'arr': [None, True, 3.14], 'truth': { 'coord': 'out there' } } rj.jsonset('obj', Path.rootPath(), obj) # Get something print 'Is there anybody... {}?'.format( rj.jsonget('obj', Path('.truth.coord')))
def tweetService(): # get contents checkifObjectExist() for obj in rj.jsonget('tweet', Path('.tweetRequest')): print(obj) rj.jsonarrpop('tweet', Path('.tweetRequest')) if (obj['service'] == 'comment'): res = commentAction(obj['requestPath'], obj['method'], obj['headers'], obj['body']) else: form = None if 'form' not in obj else obj['form'] file = None if 'file' not in obj else obj['file'] res = tweetAction(obj['requestPath'], obj['method'], obj['headers'], file, form) print(res.text.encode('utf8'))
def testJSONSetGetDelShouldSucceed(self): "Test basic JSONSet/Get/Del" self.assertTrue(rj.jsonset('foo', Path.rootPath(), 'bar')) self.assertEqual('bar', rj.jsonget('foo')) self.assertEqual(1, rj.jsondel('foo')) self.assertFalse(rj.exists('foo'))
def addModule(self, mod): logger.info('Adding module to hub {}'.format(mod['name'])) # Store the module object as a document m = RedisModule(self.dconn, self.sconn, self.autocomplete, mod['name']) m.save(mod) # Add a reference to it in the master catalog self.dconn.jsonset( self._hubkey, Path('.modules["{}"]'.format(m.get_id())), { 'id': m.get_id(), 'key': m.get_key(), 'created': str(_toepoch(self._ts)), }) # Schedule a job to refresh repository statistics, starting from now and every hour s = Scheduler(connection=self.qconn) job = s.schedule( scheduled_time=datetime(1970, 1, 1), func=callRedisModuleUpateStats, args=[m.get_id()], interval=60 * 60, # every hour repeat=None, # indefinitely ttl=0, result_ttl=0) return m
def wrapper(*args, **kwargs): auth_header = request.headers.get('Authorization') current_user = None if auth_header: try: access_token = auth_header.split(' ')[1] try: token = jwt.decode(access_token, current_app.config['SECRET_KEY']) current_user = User.find_by_username(token['uid']) user_token = rj.jsonget(token['hash'], Path.rootPath()) if not user_token: current_user = None except jwt.ExpiredSignatureError as e: return f(*args, **kwargs, current_user=None) # raise e except (jwt.DecodeError, jwt.InvalidTokenError) as e: return f(*args, **kwargs, current_user=None) # raise e except: rest_api.abort(401, TOKEN['unknown']) except IndexError: return f(*args, **kwargs, current_user=None) # raise jwt.InvalidTokenError else: return f(*args, **kwargs, current_user=None) return f(*args, **kwargs, current_user=current_user)