Example #1
0
    def testCustomEncoderDecoderShouldSucceed(self):
        "Test a custom encoder and decoder"

        class CustomClass(object):
            key = ''
            val = ''

            def __init__(self, k='', v=''):
                self.key = k
                self.val = v

        class TestEncoder(json.JSONEncoder):
            def default(self, obj):
                if isinstance(obj, CustomClass):
                    return 'CustomClass:{}:{}'.format(obj.key, obj.val)
                return json.JSONEncoder.encode(self, obj)

        class TestDecoder(json.JSONDecoder):
            def decode(self, obj):
                d = json.JSONDecoder.decode(self, obj)
                if isinstance(d, six.string_types) and \
                        d.startswith('CustomClass:'):
                    s = d.split(':')
                    return CustomClass(k=s[1], v=s[2])
                return d

        rj = Client(encoder=TestEncoder(),
                    decoder=TestDecoder(),
                    port=port,
                    decode_responses=True)
        rj.flushdb()

        # Check a regular string
        self.assertTrue(rj.jsonset('foo', Path.rootPath(), 'bar'))
        self.assertEqual('string', rj.jsontype('foo', Path.rootPath()))
        self.assertEqual('bar', rj.jsonget('foo', Path.rootPath()))

        # Check the custom encoder
        self.assertTrue(
            rj.jsonset('cus', Path.rootPath(), CustomClass('foo', 'bar')))
        # Check the custom decoder
        obj = rj.jsonget('cus', Path.rootPath())
        self.assertIsNotNone(obj)
        self.assertEqual(CustomClass, obj.__class__)
        self.assertEqual('foo', obj.key)
        self.assertEqual('bar', obj.val)

        # Test resetting the decoder after the client have been created
        rj.setDecoder(json.JSONDecoder())
        obj = rj.jsonget('cus', Path.rootPath())
        self.assertIsNotNone(obj)
        self.assertNotEqual(CustomClass, obj.__class__)

        # Test setting the decoder after the client have been created
        rj.setDecoder(TestDecoder())
        obj = rj.jsonget('cus', Path.rootPath())
        self.assertIsNotNone(obj)
        self.assertEqual(CustomClass, obj.__class__)
        self.assertEqual('foo', obj.key)
        self.assertEqual('bar', obj.val)
Example #2
0
    def post(self, request):
        validated_data = request.serializer.validated_data

        task = Task.objects.create(title=validated_data['title'],
                                   description=validated_data['description'],
                                   status=Task.CREATED,
                                   user_created=request.user,
                                   date_create_task=datetime.now())
        if validated_data['user_assigned']:
            task.user_assigned = validated_data['user_assigned']

        task.save()

        if validated_data['user_assigned'] and validated_data[
                'user_assigned'] != request.user:
            AddNotificationTaskStatus(task.user_assigned, task, "created")

        # add in ReJSON database
        rj = Client(
            host='localhost',
            port=6379,
        )
        rj.jsonset('task:' + str(task.id), Path.rootPath(),
                   TaskSerializer(task).data)
        rj.execute_command('JSON.NUMINCRBY acc .total 1')
        rj.execute_command('JSON.SET acc .maxId ' + str(task.id))

        return Response(status=201)
Example #3
0
async def receive(attr_dict):
    redis_path_param_default = '_d'
    uri = "ws://tasks.logstash:3232"
    async with websockets.connect(uri, ping_interval=None) as websocket:
        data = await websocket.recv()
        print(f"> {data}")

        # Parse this message and extract the resource-group
        res_data = json.loads(data)
        res_id = res_data['id'].replace('/', '__')
        print('> res_id-- ' + res_id)
        rg = res_id.split('__')[3]
        print('> rg-- ' + rg)
        rejson_key = rg
        print('> attr_dict-- ' + str(attr_dict))

        # Check if the resource-group is present in the HashMap and it's attribute value
        if rg in attr_dict.keys():
            print('> true')
            attribute = attr_dict[rg]
            redis_path_param = res_id + '_' + res_data[attribute]
        else:
            redis_path_param = res_id + redis_path_param_default

        # use the rejson module to insert the data into Redis db
        rj = Client(host='redis-server', port=6379, decode_responses=True)
        print('> redis_path_param-- ' + redis_path_param)
        redis_path_param = redis_path_param.replace('.', '$')
        redis_path_param = redis_path_param.replace('-', '_')
        #rj.jsonset(rejson_key,Path.rootPath(),json.dumps({}))
        rj.jsonset(rejson_key, Path('.' + redis_path_param), res_data)
Example #4
0
def redis_test():
    rj = Client(host='localhost', port=6379)

    # Set the key `obj` to some object
    obj = {
        'answer': 42,
        'arr': [None, True, 3.14],
        'truth': {
            'coord': 'out there'
        }
    }
    rj.jsonset('obj', Path.rootPath(), obj)

    # Get something
    print ('Is there anybody... {}?'.format(
        rj.jsonget('obj', Path('.truth.coord'))
    ))

    # Delete something (or perhaps nothing), append something and pop it
    rj.jsondel('obj', Path('.arr[0]'))
    rj.jsonarrappend('obj', Path('.arr'), 'something')
    print ('{} popped!'.format(rj.jsonarrpop('obj', Path('.arr'))))

    # Update something else
    rj.jsonset('obj', Path('.answer'), 2.17)

    # And use just like the regular redis-py client
    jp = rj.pipeline()
    jp.set('foo', 'bar')
    jp.jsonset('baz', Path.rootPath(), 'qaz')
    jp.execute()
Example #5
0
def main():
    rj = Client(host='localhost', port=6379, decode_responses=True)
    obj = {
       'answer': 42,
       'arr': [None, True, 3.14],
       'truth': {
           'coord': 'out there'
       }
    }
    rj.jsonset('obj', Path.rootPath(), obj)
Example #6
0
class Connector():
    
    def __init__(self):
        self._rj = Client(host='localhost', port=6379, decode_responses=True)

    def saveLog(self, logJson, logId):
        print("saveLog")
        self._rj.jsonset(logId, Path.rootPath(), logJson)

    def getLog(self, logId):
        return self._rj.jsonget(logId, Path.rootPath())
def main():
    rj = Client(host='localhost', port=6379, decode_responses=True)
    obj = {
        'answer': 42,
        'arr': [None, True, 3.14],
        'truth': {
            'coord': 'out there'
        }
    }

    rj.jsonset('obj', Path.rootPath(), obj)

    # Get something
    print 'Is there anybody... {}?'.format(
        rj.jsonget('obj', Path('.truth.coord')))
Example #8
0
class Redis(object):
    def __init__(self):
        self.cfg = read_cfg("redis")
        self.rj = Client(host=self.cfg.get("ip"),
                         port=self.cfg.get("port"),
                         decoder=RedisJsonDecoder(),
                         decode_responses=True)
        self.logger = logger.myLogger("Redis")

    def create_key(self, infos, is_uuid=False):
        key = infos.get("cms") + '_' + infos.get("version") + '_' + self.generate_values(infos, "Plugins") + '_' + self.generate_values(infos, "Themes") \
            if is_uuid else infos.get("cms") + '_' + infos.get("version")

        return key[:-1] if key.endswith('_') else key

    def generate_values(self, infos, place):
        key = ''
        data = infos.get(place)
        for keyy in data.keys():
            key = key + keyy + ':' + data.get(keyy) + '_'
        return key[:-1]

    def update_redis_just_cms(self, infos, exploits):
        key = self.create_key(infos)
        obj = {"data": infos, "exploits": exploits if exploits else {}}
        self.rj.jsonset(key, Path.rootPath(), obj)
        self.logger.info(f"Inserted {key} just cms...")

    def update_redis_full(self, infos, exploits):
        key = self.create_key(infos, True)
        obj = {"data": infos, "exploits": exploits if exploits else {}}
        self.rj.jsonset(key, Path.rootPath(), obj)
        self.logger.info(f"Inserted full {key}...")

    def get_redis_just_cms(self, infos):

        key = self.create_key(infos)
        self.logger.info(f"Getting just cms {key}...")
        return self.rj.jsonget(key, Path(self.cfg.get("path")))

    def get_redis_full(self, infos):
        key = self.create_key(infos, True)
        self.logger.info(f"Getting full cms {key}...")

        return self.rj.jsonget(key, Path(self.cfg.get("path")))

    def get_rj(self):
        return self.rj
Example #9
0
def save_simgrids_setup_data_rejson(id, setupJson, rejson_host, rejson_key,
                                    rejson_db):
    saveJson = {**get_default_json(), **setupJson}
    rj = Client(host=rejson_host, port=rejson_key, db=rejson_db)
    redis_key = rj.incr('simgrids_key')
    saveJson["id"] = redis_key
    json_key = "simgrids_key_{0}".format(redis_key)
    status = rj.jsonset(json_key, Path.rootPath(), saveJson)
    return redis_key, saveJson
Example #10
0
    def get(self, request):
        rj = Client(
            host='localhost',
            port=6379,
        )

        deleteRedisJSON(rj)

        tasks = Task.objects.all()
        for task in tasks:
            rj.jsonset('task:' + str(task.id), Path.rootPath(),
                       TaskSerializer(task).data)

        accountant = dict()
        accountant.update({"total": tasks.count()})
        accountant.update({"maxId": tasks.last().id})
        rj.jsonset('acc', Path.rootPath(), accountant)

        return Response(status=200)
Example #11
0
    def testUsageExampleShouldSucceed(self):
        "Test the usage example"

        # Create a new rejson-py client
        rj = Client(host='localhost', port=port, decode_responses=True)

        # Set the key `obj` to some object
        obj = {
            'answer': 42,
            'arr': [None, True, 3.14],
            'truth': {
                'coord': 'out there'
            }
        }
        rj.jsonset('obj', Path.rootPath(), obj)

        # Get something
        rv = rj.jsonget('obj', Path('.truth.coord'))
        self.assertEqual(obj['truth']['coord'], rv)

        # Delete something (or perhaps nothing), append something and pop it
        value = "something"
        rj.jsondel('obj', Path('.arr[0]'))
        rj.jsonarrappend('obj', Path('.arr'), value)
        rv = rj.jsonarrpop('obj', Path('.arr'))
        self.assertEqual(value, rv)

        # Update something else
        value = 2.17
        rj.jsonset('obj', Path('.answer'), value)
        rv = rj.jsonget('obj', Path('.answer'))
        self.assertEqual(value, rv)

        # And use just like the regular redis-py client
        jp = rj.pipeline()
        jp.set('foo', 'bar')
        jp.jsonset('baz', Path.rootPath(), 'qaz')
        jp.execute()
        rv1 = rj.get('foo')
        self.assertEqual('bar', rv1)
        rv2 = rj.jsonget('baz')
        self.assertEqual('qaz', rv2)
Example #12
0
def webhook_save():
    if request.method == 'POST':
        timestamp_human = datetime.datetime.now()
        timestamp = int(time.time())
        nowDatetime = timestamp_human.strftime('%Y-%m-%d(%H:%M:%S)')
        req_data = request.get_json()
        alertname = req_data['commonLabels']['alertname']
        severity = ''
        receiver = req_data['receiver']
        key_name = str(timestamp) + "_" + alertname + "_" + receiver
        try:
            # conn = redis.Redis(host=REDIS_SERVER, port=6379, db=0, password=REDIS_PWD)
            conn = Client(host=REDIS_SERVER,
                          port=6379,
                          db=0,
                          password=REDIS_PWD)
            conn.ping()
            print
            'Redis connected %s' % (REDIS_SERVER)
        except Exception as e:
            print
            'Error:', e
            exit('Failed to connecting')

        conn = Client(host=REDIS_SERVER, port=6379)
        conn.jsonset(key_name, Path.rootPath(), req_data)
        data = json.dumps(conn.jsonget(key_name))
        print
        data
        # Redis : SCAN 0 match 1527911[1-9][1-9]*

    else:
        abort(400)

    if not conn.exists(key_name):
        print
        "Error: %s is doesn't exist" % (key_name)

    return jsonify({'status': 'success'}), 200
Example #13
0
class Database:
    """Database handler"""
    def __init__(self, database_name, port=6379):
        self.database = Client(host=database_name,
                               port=port,
                               decode_responses=True)

    def flush(self):
        """Flushing a certain table of the database"""
        return self.database.flushdb()

    def import_data(self, keyname, data):
        """Load data into the database"""
        return self.database.jsonset(keyname, Path.rootPath(), data)

    def ping(self):
        """Tests the connection"""
        return self.database.ping()
class ReJson:
    """Facade for ReJson"""
    def __init__(self, host: str, port: Union[str, int]) -> None:
        """Instantiate a connection to ReJson.

        :param host: The hostname/ip of the Redis instance.
        :type host: str
        :param port: The port of the Redis instance.
        :type port: int
        """
        self._client = Client(host=host, port=port, decode_responses=True)

    def keys(self) -> Json:
        """Get all keys"""
        return self._client.keys()

    def post(self, key: str, obj: Json) -> None:
        """Post a new Json object to the store.

        :param key: The key to store the Json at.
        :type key: str
        :param obj: What to store.
        :type obj: Json
        """
        self._client.jsonset(key, Path.rootPath(), obj)

    def get(self, key: str) -> Json:
        """[summary]

        :param key: The key that the Json object was stored at.
        :type key: str
        :return: The Json stored at `key`.
        :rtype: Json
        """
        return self._client.jsonget(key, Path.rootPath())

    def update(self, key: str, path: str, value: Json) -> None:
        """[summary]

        :param key: The key that the Json object was stored at.
        :type key: str
        :param path: A period seperated string of keys to traverse the Json.
        :type path: str
        :param value: The new value.
        :type value: Json
        """
        self._client.jsonset(key, Path(f".{path}"), value)

    def append(self, key: str, path: str, *values: Json) -> None:
        """Append to some array within a Json obejct.

        :param key: The key that the Json object was stored at.
        :type key: str
        :param path: A period seperated string of keys to traverse the Json.
        :type path: str
        """
        self._client.jsonarrappend(key, Path(f".{path}"), *values)

    def pop(self, key: str, path: str) -> Json:
        """Pop from from array within a Json object.

        :param key: The key that the Json object was stored at.
        :type key: str
        :param path: A period seperated string of keys to traverse the Json.
        :type path: str
        :return: The Json value popped from the array.
        :rtype: Json
        """
        return self._client.jsonarrpop(key, f".{path}")

    def remove(self, key: str, path: str, value: Json) -> None:
        """Remove something from some array within a Json object.
        
        :param key: The key that the Json object was stored at.
        :type key: str
        :param path: A period seperated string of keys to travers the Json.
        :type path: str
        :param value: The value to remove from the array.
        :type value: Json
        """
        index = self._client.jsonarrindex(key, f".{path}", value)
        self._client.jsondel(key, f"{path}[{index}]")
Example #15
0
class Hub(object):
    dconn = None  # document store connection
    sconn = None  # search index connection
    qconn = None  # queue connection
    gh = None
    autocomplete = None
    repo = None
    _ts = None
    _hubkey = 'hub:catalog'
    _ixname = 'ix'
    _acname = 'ac'

    def __init__(self,
                 ghlogin_or_token=None,
                 docs_url=None,
                 search_url=None,
                 queue_url=None,
                 repo=None):
        timestamp = datetime.utcnow()
        logger.info('Initializing temporary hub {}'.format(timestamp))

        if ghlogin_or_token:
            self.gh = Github(ghlogin_or_token)
        elif 'GITHUB_TOKEN' in os.environ:
            self.gh = Github(os.environ['GITHUB_TOKEN'])
        else:
            logger.info('Env var ' 'GITHUB_TOKEN' ' not found')

        if docs_url:
            pass
        elif 'DOCS_REDIS_URL' in os.environ:
            docs_url = os.environ['DOCS_REDIS_URL']
        else:
            logger.critical('No Redis for document storage... bye bye.')
            raise RuntimeError('No Redis for document storage... bye bye.')
        self.dconn = ReJSONClient().from_url(docs_url)

        if search_url:
            pass
        elif 'SEARCH_REDIS_URL' in os.environ:
            search_url = os.environ['SEARCH_REDIS_URL']
        else:
            search_url = docs_url
        conn = Redis(connection_pool=ConnectionPool().from_url(search_url))
        self.sconn = RediSearchClient(self._ixname, conn=conn)
        self.autocomplete = AutoCompleter(self._acname, conn=conn)

        if queue_url:
            pass
        elif 'QUEUE_REDIS_URL' in os.environ:
            queue_url = os.environ['QUEUE_REDIS_URL']
        else:
            queue_url = docs_url
        self.qconn = StrictRedis.from_url(queue_url)

        if repo:
            pass
        elif 'REDISMODULES_REPO' in os.environ:
            repo = os.environ['REDISMODULES_REPO']
        else:
            logger.critical('No REDISMODULES_REPO... bye bye.')
            raise RuntimeError('No REDISMODULES_REPO... bye bye.')
        self.repo = repo

        # Check if hub exists
        if self.dconn.exists(self._hubkey):
            self._ts = datetime.fromtimestamp(
                float(self.dconn.jsonget(self._hubkey, Path('.created'))))
            logger.info('Latching to hub {}'.format(self._ts))
        else:
            self._ts = timestamp
            logger.info('Creating hub {}'.format(self._ts))
            self.createHub()
            self.addModulesRepo(self.repo)

    def get_repo_url(self):
        return 'https://github.com/{}'.format(self.repo)

    def createHub(self):
        logger.info('Creating the hub in the database {}'.format(self._ts))
        # Store the master modules catalog as an object
        self.dconn.jsonset(
            self._hubkey, Path.rootPath(), {
                'created': str(_toepoch(self._ts)),
                'modules': {},
                'submissions': [],
                'submit_enabled': False
            })

        # Create a RediSearch index for the modules
        # TODO: catch errors
        self.sconn.create_index(
            (TextField('name', sortable=True), TextField('description'),
             NumericField('stargazers_count', sortable=True),
             NumericField('forks_count', sortable=True),
             NumericField('last_modified', sortable=True)),
            stopwords=stopwords)

    def deleteHub(self):
        # TODO
        pass

    def addModule(self, mod):
        logger.info('Adding module to hub {}'.format(mod['name']))
        # Store the module object as a document
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, mod['name'])
        m.save(mod)

        # Add a reference to it in the master catalog
        self.dconn.jsonset(
            self._hubkey, Path('.modules["{}"]'.format(m.get_id())), {
                'id': m.get_id(),
                'key': m.get_key(),
                'created': str(_toepoch(self._ts)),
            })

        # Schedule a job to refresh repository statistics, starting from now and every hour
        s = Scheduler(connection=self.qconn)
        job = s.schedule(
            scheduled_time=datetime(1970, 1, 1),
            func=callRedisModuleUpateStats,
            args=[m.get_id()],
            interval=60 * 60,  # every hour
            repeat=None,  # indefinitely
            ttl=0,
            result_ttl=0)
        return m

    """
    Adds modules to the hub from a local directory
    TODO: deprecate asap
    """

    def addModulesPath(self, path):
        logger.info('Loading modules from local path {}'.format(path))
        # Iterate module JSON files
        for filename in os.listdir(path):
            if filename.endswith(".json"):
                with open('{}/{}'.format(path, filename)) as fp:
                    mod = json.load(fp)

                m = self.addModule(mod['name'], mod)

    """
    Adds a modules to the hub from a github repository
    """

    def addModulesRepo(self, name, path='/modules/'):
        # TODO: check for success
        q = Queue(connection=self.qconn)
        q.enqueue(callLoadModulesFromRepo, name, path)

    def loadModulesFromRepo(self, name, path):
        logger.info('Loading modules from Github {} {}'.format(name, path))
        # TODO: error handling, sometimes not all contents are imported?
        repo = self.gh.get_repo(name)
        files = repo.get_dir_contents(path)
        for f in files:
            mod = json.loads(f.decoded_content)
            m = self.addModule(mod)

    """
    Submits a module to the hub
    """

    def submitModule(self, repo_id, **kwargs):
        logger.info('Module submitted to hub {}'.format(repo_id))
        repo_id = repo_id.lower()
        ts = datetime.utcnow()
        res = {'id': repo_id, 'status': 'failed'}

        if not self.dconn.jsonget(self._hubkey, Path('submit_enabled')):
            res['message'] = 'Module submission is currently disabled'
            return res

        # Check if the module is already listed
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, repo_id)
        if m.exists:
            # TODO: return in search results
            res['message'] = 'Module already listed in the hub'
            return res

        # Check if there's an active submission, or if the failure was too recent
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            status = submission.status
            if status != 'failed':
                res['status'] = 'active'
                res['message'] = 'Active submission found for module'
                return res
            else:
                # TODO: handle failed submissions
                res['message'] = 'Module already submitted to the hub and had failed, please reset manually for now'
                return res

        # Store the new submission
        submission.save(**kwargs)

        # Record the submission in the catalog
        # TODO: find a good use for that, e.g. 5 last submissions
        self.dconn.jsonarrappend(self._hubkey, Path('.submissions'), {
            'id': submission.get_id(),
            'created': submission.created,
        })

        # Add a job to process the submission
        q = Queue(connection=self.qconn)
        job = q.enqueue(callProcessSubmission, submission.get_id())
        if job is None:
            res['message'] = 'Submission job could not be created'
            # TODO: design retry path
            logger.error(
                'Could not create submission processing job for {}'.format(
                    submission.get_id()))
        else:
            res['status'] = 'queued'
            submission.status = res['status']
            submission.job = job.id

        return res

    def viewSubmissionStatus(self, repo_id):
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            res = {
                'id': submission.get_id(),
                'status': submission.status,
                'message': submission.message,
            }
            if 'finished' == res['status']:
                res['pull_number'] = submission.pull_number
                res['pull_url'] = submission.pull_url
            return res

    def processSubmission(self, repo_id):
        logger.info('Processing submision for {}'.format(repo_id))
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            return submission.process(self.gh, self.repo)

    def viewModules(self, query=None, sort=None):
        if not query:
            # Use a purely negative query to get all modules
            query = '-etaoinshrdlu'
        q = Query(query).no_content().paging(0, 1000)
        if sort:
            if sort == 'relevance':
                pass
            elif sort == 'update':
                q.sort_by('last_modified')
            elif sort == 'stars':
                q.sort_by('stargazers_count', asc=False)
            elif sort == 'forks':
                q.sort_by('forks_count', asc=False)
            elif sort == 'name':
                q.sort_by('name')

        results = self.sconn.search(q)
        mods = []
        fetch_duration = 0
        # TODO: this should be pipelined
        for doc in results.docs:
            m = RedisModule(self.dconn, self.sconn, self.autocomplete, doc.id)
            res, duration = _durationms(m.to_dict)
            mods.append(res)
            fetch_duration += duration

        return {
            'results': results.total,
            'search_duration': '{:.3f}'.format(results.duration),
            'fetch_duration': '{:.3f}'.format(fetch_duration),
            'total_duration':
            '{:.3f}'.format(fetch_duration + results.duration),
            'modules': mods,
        }

    def viewSearchSuggestions(self, prefix):
        suggestions = self.autocomplete.get_suggestions(prefix)
        return [s.string for s in suggestions]
Example #16
0
class JobsAPI(Resource):
    def __init__(self):
        self.redis = Client(host='127.0.0.1', port=6379, decode_responses=True)

    def get(self, **kwargs):
        if kwargs.get('job_id'):
            job_id = kwargs.get('job_id')
            if self.redis.exists(job_id):
                parser = reqparse.RequestParser()

                if request.url_rule.rule == '/jobs/<string:job_id>/next':
                    parser.add_argument('expired_duration',
                                        type=int,
                                        default=300)
                    args = parser.parse_args(strict=True)
                    if self.redis.jsonget(job_id, Path('.items')):
                        ttl = args.get('expired_duration')
                        items = self.redis.jsonget(job_id, Path('.items'))
                        for item in items:
                            if not self.redis.exists(f'hold_{item}'):
                                self.redis.execute_command(
                                    'SET', f'hold_{item}', job_id)
                                self.redis.execute_command(
                                    'EXPIRE', f'hold_{item}', ttl)
                                return output_json(
                                    {
                                        'status': 'ok',
                                        'job_id': job_id,
                                        'ttl': ttl,
                                        'index': items.index(item),
                                        'item': item
                                    }, 200)
                    return output_json(
                        {
                            'status': 'error',
                            'job_id': job_id,
                            'description': 'Items list is empty.'
                        }, 400)

                if request.url_rule.rule == '/jobs/<string:job_id>/items':
                    parser.add_argument('active',
                                        default='true',
                                        choices=('true', 'false'))
                    args = parser.parse_args(strict=True)
                    items = self.redis.jsonget(job_id, Path('.items'))
                    done_items = self.redis.jsonget(job_id, Path('.done'))
                    if args.get('active') == 'true':
                        active_items = []
                        for item in items:
                            if not self.redis.exists(f'hold_{item}') and \
                                    items.index(item) not in done_items:
                                active_items.append(item)
                        return output_json(
                            {
                                'status': 'ok',
                                'job_id': job_id,
                                'items': active_items
                            }, 200)
                    return output_json(
                        {
                            'status': 'ok',
                            'job_id': job_id,
                            'items': items + done_items
                        }, 200)
            else:
                return output_json(
                    {
                        'status': 'error',
                        'job_id': job_id,
                        'description': 'The job is not in the queue.'
                    }, 400)

        return output_json(
            {
                'status': 'ok',
                'jobs': [i for i in self.redis.keys() if i[:5] != 'hold_']
            }, 200)

    def post(self, **kwargs):
        if request.url_rule.rule == '/jobs/<string:job_id>/items/<int:item_index>/done':
            job_id = kwargs.get('job_id')
            item_index = kwargs.get('item_index')
            done_item = self.redis.jsonget(job_id, Path('.items'))[item_index]
            if item_index in self.redis.jsonget(job_id, Path('.done')):
                return output_json(
                    {
                        'status': 'error',
                        'description': 'The item already was marked as done.',
                        'job_id': job_id,
                        'index': item_index,
                        'item': done_item
                    }, 400)
            self.redis.delete(f'hold_{done_item}')
            self.redis.jsonarrappend(job_id, Path('.done'), item_index)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'The item is marked as done.',
                    'job_id': job_id,
                    'index': item_index,
                    'item': done_item
                }, 200)

        if request.url_rule.rule == '/jobs/<string:job_id>/items/<int:item_index>/error':
            job_id = kwargs.get('job_id')
            item_index = kwargs.get('item_index')
            error_item = self.redis.jsonget(job_id, Path('.items'))[item_index]
            if item_index in self.redis.jsonget(job_id, Path('.error')):
                return output_json(
                    {
                        'status': 'error',
                        'description': 'The item already was marked as error.',
                        'job_id': job_id,
                        'index': item_index,
                        'item': error_item
                    }, 400)
            self.redis.delete(f'hold_{error_item}')
            self.redis.jsonarrappend(job_id, Path('.error'), item_index)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'The item is marked as error.',
                    'job_id': job_id,
                    'index': item_index,
                    'item': error_item
                }, 200)

        if isinstance(request.json, list) and request.json:
            job_id = str(uuid.uuid4())

            data = {'items': request.json, 'done': [], 'error': []}

            if self.redis.jsonset(job_id, Path.rootPath(), data):
                return output_json(
                    {
                        'status': 'ok',
                        'description': 'Job is added to queue.',
                        'job_id': job_id
                    }, 201)
        else:
            return output_json(
                {
                    'status': 'error',
                    'description': 'Wrong request!'
                }, 400)

    def delete(self, job_id):
        if self.redis.exists(job_id):
            self.redis.delete(job_id)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'Job is deleted.'
                }, 200)
        else:
            return output_json(
                {
                    'status': 'error',
                    'description': 'The job is not in the queue.'
                }, 400)
import argparse
import json
from rejson import Client, Path

rj = Client(host='localhost', password='******')

# parse out the JSON file from the command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--json',
                    nargs=1,
                    help="JSON file",
                    type=argparse.FileType('r'))
arguments = parser.parse_args()

# write JSON with REJSON
rj.jsonset('some_json', Path.rootPath(), json.loads(arguments.json[0].read()))

# get a single (string) value from the JSON
value_of_myjson = rj.jsonget('some_json', Path('myjson'))
print('myjson in some_json: {}'.format(value_of_myjson))

# create an array
rj.jsonset('some_json', Path('secondary.spanishNumbers'), ["uno"])

# Append some values to the Array
rj.jsonarrappend('some_json', Path('secondary.spanishNumbers'), "tres",
                 "cuatro")

# insert a value to the Array
rj.jsonarrinsert('some_json', Path('secondary.spanishNumbers'), '1', 'dos')
Example #18
0
import json
from rejson import Client, Path

rj = Client(host='localhost', port=6379, decode_responses=True)
""" obj = {
    'answer': 42,
    'arr': [None, True, 3.14],
    'truth': {
        'coord': 'out there'
    }
}
rj.jsonset('obj1', Path.rootPath(), obj) """

# temp = rj.jsonget('89b2c519c47ffff', Path('.'))
temp = rj.jsonmget(Path('.'), '89b2c519c47ffff', '89b2c519c48ffff',
                   '89b2c519c49ffff')
print(json.dumps(temp))
# connect to queue
rj_queue = Client(host='localhost', port=1111, decode_responses=True)
#Connect to cache
rj_cache = Client(host='localhost', port=1112, decode_responses=True)
#Connect to queue backend
rj_queue_mongodb = Client(host='localhost', port=1113, decode_responses=True)

# checking if array of json(tweets) present in queue backend? if yes then wont set or else will set to []
arr_queue_mongodb = rj_queue_mongodb.jsonget('tweets', Path.rootPath())
if arr_queue_mongodb is not None:
    print(
        "Array of tweets is already present in queue_backend with length: {}".
        format(len(arr_queue_mongodb)))
else:
    print("initializing an empty array tweets in the rj_queue_backend")
    rj_queue_mongodb.jsonset('tweets', Path.rootPath(), [])

cache_limit = 5


#polls the data from queue, modifies the data by applying business logic, sends it to cache and the backend queue
@tl.job(interval=timedelta(seconds=2))
def send_to_cache_and_backend_queue():

    current_cache_size = len(rj_cache.keys())
    current_q_size = len(rj_queue.jsonget('tweets', Path.rootPath()))

    print('current cache size is ' + str(current_cache_size))
    print('current q size is ' + str(current_q_size))

    if current_q_size > 0:
Example #20
0
 def cache(self, obj):
     rj = Client(host='redis', port=6379, decode_responses=True)
     rj.jsonset(self.name, Path.rootPath(), obj)
Example #21
0
import json

rj = Client(host='localhost', port=6379, decode_responses=True)

# Set the key `obj` to some object
obj = {
    'answer': 42,
    'arr': [None, True, 3.14],
    'truth': {
        'coord': 'out there'
    }
}

jsondata = json.dumps(obj)

rj.jsonset('obj', Path('A2AA'), obj)

# Get something
temp = rj.jsonget('obj', Path('A2AA.truth.coord'))

print (f'Is there anybody... {temp}?')

# Delete something (or perhaps nothing), append something and pop it
rj.jsondel('obj', Path('.arr[0]'))
rj.jsonarrappend('obj', Path('.arr'), 'something')
popped = rj.jsonarrpop('obj', Path('.arr'))
print(f'{popped} popped!')

# Update something else
rj.jsonset('obj', Path('.answer'), 2.17)
Example #22
0
class StoreAndQuery():
    ''' read a json and store it into redis and then query '''
    def __init__(self):
        self.tests = ['ru.4', '5k4', 'u;4', 'ji3', '5', '2l4',
                      'xk7']  # no need bracket here
        # the redis server
        self.rj = Client(host='localhost', port=6379, decode_responses=True)
        # phonetic table, radical to han characters
        self.fn = 'phone.json'
        self.objname = 'obj'
        self.data = None
        self.check_and_store()

    def check_and_store(self):
        ''' if no data here, read json and store it '''
        try:
            res = self.test_one_query('284', show=False)
            if res is None:
                print('[INFO] no data stored? read and store it')
                self.data = read_jsonfile(self.fn)
                self.rj.jsonset(self.objname, Path.rootPath(), self.data)
                print(f'[INFO] read {self.fn} and store as {self.objname}')
        except redis.exceptions.ConnectionError as e:
            print('[ERROR] cannot connect to redis server:\n', e)
            print('\nNeed start the service of redis/rejson first.')
            sys.exit(1)

    @staticmethod
    def transcode(ans: List) -> List:
        ''' transcode, do not know why rejson split a han char (3-byte utf-8)
            into 3 unicode chars (no really utf8 encode/decode, just splited)
            so here I could combine them back

            For example, "δΈ­" in utf8 is "e4 b8 ad", query out of redis, I got
            "\u00e4 \u00b8 \u00ad".
        '''
        u8ans = []
        for m in ans:
            ansi = m.encode('ISO8859-1')  # m is str, ansi is bytes
            s = ansi.decode('UTF-8')  # s is str in correct unicode char
            u8ans.append(s)
        return u8ans

    @staticmethod
    def show_result(res: List) -> None:
        ''' show only 10 results '''
        break_flag = False
        for i, s in enumerate(res):
            if i > 9:
                print('...', end=' ')
                break_flag = True
                break
            print(s, end=' ')
        if break_flag:
            print(res[-1])
        else:
            print()

    def test_one_query(self, key, show=True) -> List:
        ''' test one query
        127.0.0.1:6379> json.get  obj  noescape "zul4"
        "[\"\xe8\xa6\x85\",\"\xf0\xa1\xa0\x8d\"]"
        '''
        qkey = '["' + key + '"]'  # like ["su;6"]
        try:
            r = self.rj.jsonget(self.objname, Path(qkey))
            if r:
                if show:
                    print(key)
                res = self.transcode(r)
                if len(res) and show:
                    self.show_result(res)
                return res
            else:
                return None
        except redis.exceptions.ResponseError as e:
            print(e)

    def test_query(self) -> None:
        ''' test redis json '''
        for v in tests.vals:
            self.test_one_query(v)

    def test_json(self) -> None:
        ''' test json obj '''
        for v in tests.vals:
            try:
                print(self.data[v])
            except KeyError as e:
                print('KeyError: ', e)

    def action(self) -> None:
        ''' action '''
        self.test_query()
Example #23
0
from pymongo import MongoClient
from bson.json_util import dumps

# start timeloop
tl = Timeloop()

# connect to queue
rj = Client(host='redis', port=6379, decode_responses=True)

#connect to database
client = MongoClient('mongo', 27017)
db = client.mydb

app = Flask(__name__) # initialize the flask app

rj.jsonset('tasks', Path.rootPath(), []) #initialize an empty array in the queue

@app.route('/add_task',methods=['POST'])
def send_to_queue():
    print("Sending task to queue...")
    data=request.get_json()
    rj.jsonarrinsert('tasks', Path.rootPath(), 0,data)
    return jsonify(data)

@app.route('/get_tasks')
def get_all_tasks():
    data=list(db['to-do'].find())
    return dumps(data)

@app.route('/update_task',methods=['POST'])
def update_task():
Example #24
0
from rejson import Client, Path

rj = Client(host='localhost', port='6379', decode_responses=True)
obj = {
    'answer': 42,
    'arr': [None, True, 3.14],
    'truth': {
        'coord': 'out there'
    }
}
rj.jsonset('obj', Path.rootPath(), obj)
print(f"Is there anybody... {rj.jsonget('obj', Path('.truth.coord'))}")
Example #25
0
from rejson import Client, Path

rj = Client(host='localhost', port=6379, decode_responses=True)
print("connect successful")
base_directory = "../data/"
cnt = 0
for (dirpath, dirnames, filenames) in os.walk(base_directory):
    print("dirpath=" + dirpath)
    for file in filenames:
        # print("file=" + file)
        if ("json" in file):
            shortname = file.replace(".json", "")
            print("shortname is" + shortname)
            openname = dirpath + "/" + file
            print("openname is " + openname)
            data = json.loads(open(openname, "r").readline())
            print("data is ")
            print(data['data'])
            print("members is")
            print(data['data']['members'])
            for member in data['data']['members']:
                print("the id is", member['id'])
                memberIdInt = member['id']
                memberId = str(memberIdInt)
                memberIdFloat = float(memberIdInt)
                keyname = "member:" + memberId
                idxKeyName = "memberIndex:" + memberId
                zkeyname = "identifier:"
                #  can't use jsonset command because index not available but otherwise, this worked
                rj.jsonset(keyname, Path.rootPath(), member)
Example #26
0
class RedisDriver(DatabaseDriver):

    _host = "localhost"
    _port = 6379
    _client = None

    def __init__(self, host: str = "localhost", port: int = 6379):
        self._host = host
        self._port = port
        self._client = Client(host=host,
                              port=port,
                              decode_responses=True,
                              encoder=JSONSchemaObject.JSONSchemaEncoder())

    def find_by_ref(self, ref: str):

        return self._client.jsonget(ref)

    def find_id_by(self, idx: str, value: str, version: str):

        result = []
        for member in self._client.smembers("{}:{}".format(idx, value)):

            if version == "all":
                result.append(member)
                continue

            # we split the index to check against the version
            idxs = str(member).split(":")

            # the _version is the second token of idxs
            if idxs[1] == version:
                result.append(member)

        return result

    def save(self, obj_list: list, indexed_attrs: list):

        # First cycle is just to verify if we do not have any
        # index integrity violation
        for obj in indexed_attrs:

            # We do not store neither _id or _version
            if obj[1] == "_id" or obj[1] == "_version":
                continue

            if obj[2] is None or obj[2] == "":
                raise ValueError("Indexed value {} must not be empty".format(
                    obj[1]))

            # the indexed is composed by schema path:indexes:attr_name
            indexed_key = store_name = "{}:indexes:{}:{}".format(
                obj[0], obj[1], obj[2])

            # we already have this key let's get any value and make
            # sure we this belongs to the same id
            for member in self._client.smembers(indexed_key):
                # we only need to use one element since the _id MUST be equal
                idxs = str(member).split(":")

                # the _id is the first token of idxs, check if we recieved the same
                # id, if not this is a index violation
                if not str(obj[3]).startswith(idxs[0]):
                    raise ValueError(
                        "{}:{} not unique, another object already have that value"
                        .format(obj[1], obj[2]))

                # we just need one iteration
                break

        # this cycle we just store the indexes
        for obj in indexed_attrs:

            if obj[2] is None or obj[2] == "" or obj[1] == "_id" or obj[
                    1] == "_version":
                continue

            # Set the store name and store data
            store_name = "{}:indexes:{}:{}".format(obj[0], obj[1], obj[2])
            store_data = obj[3]
            self._client.sadd(store_name, store_data)

        # We now store the actual objects, and return the added ids
        ids = []
        for obj in obj_list:

            # Set the store name and store data
            store_name = "{}:{}".format(obj[0], obj[1])
            store_data = obj[2]
            self._client.jsonset(store_name, Path.rootPath(), store_data)
            ids.append(obj[1])

        return ids
Example #27
0
        if msg.error():
            # Error or event
            if msg.error().code() == KafkaError._PARTITION_EOF:
                # End of partition event
                sys.stderr.write('%% %s [%d] reached end at offset %d\n' %
                                 (msg.topic(), msg.partition(), msg.offset()))
            else:
                # Error
                raise KafkaException(msg.error())
        else:
            # great success
            sys.stderr.write(
                '%% %s [%d] at offset %d with key %s:\n' %
                (msg.topic(), msg.partition(), msg.offset(), str(msg.key())))
            bytes_reader = io.BytesIO(msg.value())
            decoder = BinaryDecoder(bytes_reader)
            reader = DatumReader(schema)
            msg_dict = (reader.read(decoder))
            print(json.dumps(msg_dict))
            redis_client.set(msg_dict['name'], str(json.dumps(msg_dict)))
            rejson_client.jsonset(msg_dict['name'] + '_json', Path.rootPath(),
                                  msg_dict)

except KeyboardInterrupt:
    sys.stderr.write('%% Aborted by user\n')

finally:
    # Close down consumer to commit final offsets.
    c.close()
# end kafka consumer
Example #28
0
class RejsonDb(KeyValueStorage):
    def __init__(self, conf):
        """
        arguments:
        conf -- a dictionary containing 'settings' module compatible configuration of the plug-in
        """
        self._host = conf['host']
        self._port = int(conf['port'])
        self._db = int(conf['id'])
        self.redis = Client(host=self._host,
                            port=self._port,
                            db=self._db,
                            decode_responses=True)
        self._scan_chunk_size = 50

        try:
            self.redis.jsonget('-')
        except ResponseError as e:
            if 'unknown command' in str(e):
                logging.fatal(
                    "Rejson DB Plug-in requires Redis with RedisJSON module enabled"
                )
            else:
                raise e

    def rename(self, key, new_key):
        return self.redis.rename(key, new_key)

    def list_get(self, key, from_idx=0, to_idx=-1):
        """
        Returns a stored list. If there is a non-list value stored with the passed key
        then TypeError is raised.

        arguments:
        key -- data access key
        from_idx -- optional start index
        to_idx -- optional (default is -1) end index (including, i.e. unlike Python);
        negative values are supported (-1 = last, -2 = penultimate,...)
        """
        data = self.get(key, [])
        if isinstance(data, list):
            if to_idx == -1:
                return data[from_idx:]
            return data[from_idx:to_idx + 1]
        raise TypeError('Object is not a list')

    def list_append(self, key, value):
        """
        Add a value at the end of a list

        arguments:
        key -- data access key
        value -- value to be pushed
        """
        if not self.exists(key):
            self.set(key, [])
        self.redis.jsonarrappend(key, Path.rootPath(), value)

    def list_pop(self, key):
        """
        Removes and returns the first element of the list stored at key.

        arguments:
        key -- list access key
        """
        return self.redis.jsonarrpop(key)

    def list_len(self, key):
        """
        Returns length of a list. If there is a non-list value stored with the passed key
        then TypeError is raised.

        arguments:
        key -- data access key
        """
        if not self.exists(key):
            return 0
        return self.redis.jsonarrlen(key)

    def list_set(self, key, idx, value):
        """
        Sets the list element at index to value

        arguments:
        key -- list access key
        idx -- a zero based index where the set should be performed
        value -- a JSON-serializable value to be inserted
        """
        # TODO the operation pair should be atomic to avoid possible race conditions
        self.redis.jsonarrpop(key, Path.rootPath(), idx)
        return self.redis.jsonarrinsert(key, Path.rootPath(), idx, value)

    def list_trim(self, key, keep_left, keep_right):
        """
        Trims the list from the beginning to keep_left - 1 and from keep_right to the end.
        The function does not return anything.

        arguments:
        key -- data access key
        keep_left -- the first value to be kept
        keep_right -- the last value to be kept
        """
        self.redis.jsonarrtrim(key, Path.rootPath(), keep_left, keep_right)

    def hash_get(self, key, field):
        """
        Gets a value from a hash table stored under the passed key

        arguments:
        key -- data access key
        field -- hash table entry key
        """
        if self.redis.jsontype(key, Path(f'["{field}"]')) is None:
            return None
        return self.redis.jsonget(key, Path(f'["{field}"]'), no_escape=True)

    def hash_set(self, key, field, value):
        """
        Puts a value into a hash table stored under the passed key

        arguments:
        key -- data access key
        field -- hash table entry key
        value -- a value to be stored
        """
        if not self.exists(key):
            self.set(key, {})
        self.redis.jsonset(key, Path(f'["{field}"]'), value)

    def hash_del(self, key, field):
        """
        Removes a field from a hash item

        arguments:
        key -- hash item access key
        field -- the field to be deleted
        """
        self.redis.jsondel(key, Path(f'["{field}"]'))

    def hash_get_all(self, key):
        """
        Returns a complete hash object (= Python dict) stored under the passed
        key. If the provided key is not present then an empty dict is returned.

        arguments:
        key -- data access key
        """
        return self.get(key)

    def get(self, key, default=None):
        """
        Gets a value stored with passed key and returns its JSON decoded form.

        arguments:
        key -- data access key
        default -- a value to be returned in case there is no such key
        """
        data = self.redis.jsonget(key, Path.rootPath(), no_escape=True)
        if data is None:
            return default
        return data

    def set(self, key, data):
        """
        Saves 'data' with 'key'.

        arguments:
        key -- an access key
        data -- a dictionary containing data to be saved
        """
        self.redis.jsonset(key, Path.rootPath(), data)

    def set_ttl(self, key, ttl):
        """
        Set auto expiration timeout in seconds.

        arguments:
        key -- data access key
        ttl -- number of seconds to wait before the value is removed
        (please note that update actions reset the timer to zero)
        """
        self.redis.expire(key, ttl)

    def get_ttl(self, key):
        return self.redis.ttl(key)

    def clear_ttl(self, key):
        self.redis.persist(key)

    def remove(self, key):
        """
        Removes a value specified by a key

        arguments:
        key -- key of the data to be removed
        """
        self.redis.jsondel(key)

    def exists(self, key):
        """
        Tests whether there is a value with the specified key

        arguments:
        key -- the key to be tested

        returns:
        boolean value
        """
        return self.redis.exists(key)

    def setnx(self, key, value):
        """
        An atomic operation "set if not exists".

        returns:
        1 if the key was set
        0 if the key was not set
        """
        return self.redis.jsonset(key, Path.rootPath(), value, nx=True)

    def getset(self, key, value):
        """
        An atomic operation which obtains current key first and then
        sets a new value under that key

        returns:
        previous key if any or None
        """
        data = self.get(key)
        self.set(key, value)
        return data

    def incr(self, key, amount=1):
        """
        Increments the value of 'key' by 'amount'.  If no key exists,
        the value will be initialized as 'amount'
        """
        if not self.exists(key):
            self.set(key, 0)
        return self.redis.jsonnumincrby(key, Path.rootPath(), amount)

    def hash_set_map(self, key, mapping):
        """
        Set key to value within hash 'name' for each corresponding
        key and value from the 'mapping' dict.
        Before setting, the values are json-serialized
        """
        return self.set(key, mapping)
Example #29
0
class RedisJson:
    def __init__(self):
        self._rjson = Client(host='localhost',
                             port=6379,
                             decode_responses=True)
        self._root_path = Path.rootPath()

    '''
        Insert JSON into db

        Structure of JSON to insert:
        {
            'lat'  : 80.844,
            'long' : -43.139,
            'resources' : {
                'mask' : 450,
                'vaccine' : 56,
                'oxygen' : 800,
                ...
            },
            'updated' : <unix time ms>
        }
    '''

    def insert(self, key, data):
        self._rjson.jsonset(key, self._root_path, data)

    '''
        Return list of all JSON objects stored in db

        TODO: added this for now, but loading everything in memory doesn't seem
        like a great idea, maybe RedisSearch will help with this. Or maybe make
        this return a generator which can be iterated through
    '''

    def get(self):
        results = []
        for key in self._rjson.scan_iter():
            results.append(self._rjson.jsonget(key, self._root_path))

        return results

    '''
        Update field of a JSON object in db

        Syntax for `path` argument:
        E.g. we have {
            'key1' : value1,
            'key2' : {
                'key3' : value2
            }
        }

        To update value2, `path` should be ".key2.key3"
    '''

    def update(self, key, path, new_value):
        self._rjson.jsonset(key, path, new_value)

    '''
        Delete a JSON value from the db
    '''

    def delete(self, key):
        self._rjson.jsondel(key, self._root_path)
Example #30
0
from rejson import Client, Path
import json

try:
    rj = Client(host='localhost', port=6379, decode_responses=True)
    with open('jsonorg-example.json') as data_file:
        data = json.load(data_file)
    print(str(type(data)))
    ret = rj.jsonset('jsonorg_example', Path.rootPath(), data)
    print(str(ret))
except Exception as e:
    print("Exception: {}".format(e))