Example #1
0
    def testCustomEncoderDecoderShouldSucceed(self):
        "Test a custom encoder and decoder"

        class CustomClass(object):
            key = ''
            val = ''

            def __init__(self, k='', v=''):
                self.key = k
                self.val = v

        class TestEncoder(json.JSONEncoder):
            def default(self, obj):
                if isinstance(obj, CustomClass):
                    return 'CustomClass:{}:{}'.format(obj.key, obj.val)
                return json.JSONEncoder.encode(self, obj)

        class TestDecoder(json.JSONDecoder):
            def decode(self, obj):
                d = json.JSONDecoder.decode(self, obj)
                if isinstance(d, six.string_types) and \
                        d.startswith('CustomClass:'):
                    s = d.split(':')
                    return CustomClass(k=s[1], v=s[2])
                return d

        rj = Client(encoder=TestEncoder(),
                    decoder=TestDecoder(),
                    port=port,
                    decode_responses=True)
        rj.flushdb()

        # Check a regular string
        self.assertTrue(rj.jsonset('foo', Path.rootPath(), 'bar'))
        self.assertEqual('string', rj.jsontype('foo', Path.rootPath()))
        self.assertEqual('bar', rj.jsonget('foo', Path.rootPath()))

        # Check the custom encoder
        self.assertTrue(
            rj.jsonset('cus', Path.rootPath(), CustomClass('foo', 'bar')))
        # Check the custom decoder
        obj = rj.jsonget('cus', Path.rootPath())
        self.assertIsNotNone(obj)
        self.assertEqual(CustomClass, obj.__class__)
        self.assertEqual('foo', obj.key)
        self.assertEqual('bar', obj.val)

        # Test resetting the decoder after the client have been created
        rj.setDecoder(json.JSONDecoder())
        obj = rj.jsonget('cus', Path.rootPath())
        self.assertIsNotNone(obj)
        self.assertNotEqual(CustomClass, obj.__class__)

        # Test setting the decoder after the client have been created
        rj.setDecoder(TestDecoder())
        obj = rj.jsonget('cus', Path.rootPath())
        self.assertIsNotNone(obj)
        self.assertEqual(CustomClass, obj.__class__)
        self.assertEqual('foo', obj.key)
        self.assertEqual('bar', obj.val)
Example #2
0
class Redis(object):
    def __init__(self):
        self.cfg = read_cfg("redis")
        self.rj = Client(host=self.cfg.get("ip"),
                         port=self.cfg.get("port"),
                         decoder=RedisJsonDecoder(),
                         decode_responses=True)
        self.logger = logger.myLogger("Redis")

    def create_key(self, infos, is_uuid=False):
        key = infos.get("cms") + '_' + infos.get("version") + '_' + self.generate_values(infos, "Plugins") + '_' + self.generate_values(infos, "Themes") \
            if is_uuid else infos.get("cms") + '_' + infos.get("version")

        return key[:-1] if key.endswith('_') else key

    def generate_values(self, infos, place):
        key = ''
        data = infos.get(place)
        for keyy in data.keys():
            key = key + keyy + ':' + data.get(keyy) + '_'
        return key[:-1]

    def update_redis_just_cms(self, infos, exploits):
        key = self.create_key(infos)
        obj = {"data": infos, "exploits": exploits if exploits else {}}
        self.rj.jsonset(key, Path.rootPath(), obj)
        self.logger.info(f"Inserted {key} just cms...")

    def update_redis_full(self, infos, exploits):
        key = self.create_key(infos, True)
        obj = {"data": infos, "exploits": exploits if exploits else {}}
        self.rj.jsonset(key, Path.rootPath(), obj)
        self.logger.info(f"Inserted full {key}...")

    def get_redis_just_cms(self, infos):

        key = self.create_key(infos)
        self.logger.info(f"Getting just cms {key}...")
        return self.rj.jsonget(key, Path(self.cfg.get("path")))

    def get_redis_full(self, infos):
        key = self.create_key(infos, True)
        self.logger.info(f"Getting full cms {key}...")

        return self.rj.jsonget(key, Path(self.cfg.get("path")))

    def get_rj(self):
        return self.rj
Example #3
0
def redis_test():
    rj = Client(host='localhost', port=6379)

    # Set the key `obj` to some object
    obj = {
        'answer': 42,
        'arr': [None, True, 3.14],
        'truth': {
            'coord': 'out there'
        }
    }
    rj.jsonset('obj', Path.rootPath(), obj)

    # Get something
    print ('Is there anybody... {}?'.format(
        rj.jsonget('obj', Path('.truth.coord'))
    ))

    # Delete something (or perhaps nothing), append something and pop it
    rj.jsondel('obj', Path('.arr[0]'))
    rj.jsonarrappend('obj', Path('.arr'), 'something')
    print ('{} popped!'.format(rj.jsonarrpop('obj', Path('.arr'))))

    # Update something else
    rj.jsonset('obj', Path('.answer'), 2.17)

    # And use just like the regular redis-py client
    jp = rj.pipeline()
    jp.set('foo', 'bar')
    jp.jsonset('baz', Path.rootPath(), 'qaz')
    jp.execute()
Example #4
0
def webhook_view(timestamp=None,
                 alert_type=None,
                 reciever=None,
                 key_name=None):
    if request.method == 'GET':
        conn = Client(host=REDIS_SERVER, port=6379, db=0, password=REDIS_PWD)
        params = timestamp + '_' + alert_type + '_' + reciever
        data = json.dumps(conn.jsonget(params))
        return render_template('info.html', data=data)
 def find_by_state(self):
     try:
         rj = Client(
             host='redis',
             port=6379,
             decode_responses=True)
         return rj.jsonget(self.name)
     except:
         return None
Example #6
0
    def testUsageExampleShouldSucceed(self):
        "Test the usage example"

        # Create a new rejson-py client
        rj = Client(host='localhost', port=port, decode_responses=True)

        # Set the key `obj` to some object
        obj = {
            'answer': 42,
            'arr': [None, True, 3.14],
            'truth': {
                'coord': 'out there'
            }
        }
        rj.jsonset('obj', Path.rootPath(), obj)

        # Get something
        rv = rj.jsonget('obj', Path('.truth.coord'))
        self.assertEqual(obj['truth']['coord'], rv)

        # Delete something (or perhaps nothing), append something and pop it
        value = "something"
        rj.jsondel('obj', Path('.arr[0]'))
        rj.jsonarrappend('obj', Path('.arr'), value)
        rv = rj.jsonarrpop('obj', Path('.arr'))
        self.assertEqual(value, rv)

        # Update something else
        value = 2.17
        rj.jsonset('obj', Path('.answer'), value)
        rv = rj.jsonget('obj', Path('.answer'))
        self.assertEqual(value, rv)

        # And use just like the regular redis-py client
        jp = rj.pipeline()
        jp.set('foo', 'bar')
        jp.jsonset('baz', Path.rootPath(), 'qaz')
        jp.execute()
        rv1 = rj.get('foo')
        self.assertEqual('bar', rv1)
        rv2 = rj.jsonget('baz')
        self.assertEqual('qaz', rv2)
Example #7
0
class Connector():
    
    def __init__(self):
        self._rj = Client(host='localhost', port=6379, decode_responses=True)

    def saveLog(self, logJson, logId):
        print("saveLog")
        self._rj.jsonset(logId, Path.rootPath(), logJson)

    def getLog(self, logId):
        return self._rj.jsonget(logId, Path.rootPath())
class RedisClient:
    def __init__(self):
        logger.debug(os.environ.get("REDIS_HOST"))
        self.client = Client(host=os.environ.get("REDIS_HOST", "localhost"),
                             decode_responses=True)

    def getAnyShape(self, index):
        try:
            shape = self.client.jsonget(index)
            return shape
        except Exception as e:
            logger.warning('Fail to get the {} shape from Redis {}'.format(
                index, e))
def main():
    rj = Client(host='localhost', port=6379, decode_responses=True)
    obj = {
        'answer': 42,
        'arr': [None, True, 3.14],
        'truth': {
            'coord': 'out there'
        }
    }

    rj.jsonset('obj', Path.rootPath(), obj)

    # Get something
    print 'Is there anybody... {}?'.format(
        rj.jsonget('obj', Path('.truth.coord')))
Example #10
0
def webhook_save():
    if request.method == 'POST':
        timestamp_human = datetime.datetime.now()
        timestamp = int(time.time())
        nowDatetime = timestamp_human.strftime('%Y-%m-%d(%H:%M:%S)')
        req_data = request.get_json()
        alertname = req_data['commonLabels']['alertname']
        severity = ''
        receiver = req_data['receiver']
        key_name = str(timestamp) + "_" + alertname + "_" + receiver
        try:
            # conn = redis.Redis(host=REDIS_SERVER, port=6379, db=0, password=REDIS_PWD)
            conn = Client(host=REDIS_SERVER,
                          port=6379,
                          db=0,
                          password=REDIS_PWD)
            conn.ping()
            print
            'Redis connected %s' % (REDIS_SERVER)
        except Exception as e:
            print
            'Error:', e
            exit('Failed to connecting')

        conn = Client(host=REDIS_SERVER, port=6379)
        conn.jsonset(key_name, Path.rootPath(), req_data)
        data = json.dumps(conn.jsonget(key_name))
        print
        data
        # Redis : SCAN 0 match 1527911[1-9][1-9]*

    else:
        abort(400)

    if not conn.exists(key_name):
        print
        "Error: %s is doesn't exist" % (key_name)

    return jsonify({'status': 'success'}), 200
Example #11
0
def retrieve_playthrough_json(
        gamename,
        redis=None,  # redis server: expected to be a RedisJSON client connection
        ptid=playthrough_id(),  # default playthrough, can optionally specify
        redisbasekey=REDIS_FTWC_PLAYTHROUGHS,
        randseed=DEFAULT_PTHRU_SEED):

    if redis is None:
        _rj = Client(host='localhost', port=6379, decode_responses=True)
    else:
        assert isinstance(redis, Client)
        _rj = redis
    jsonobj = _rj.jsonget(f'{redisbasekey}:{gamename}', Path('.' + ptid))
    if redis is None:
        _rj.close()
    step_array = [
    ]  # convert json dict data (with redundant keys) to an array for convenience
    for i, step_key in enumerate(list(jsonobj.keys())):
        assert step_key == format_stepkey(
            i)  # format_stepkey() from twutils.playthroughs
        step_array.append(jsonobj[step_key])
    assert len(step_array) == len(jsonobj.keys())
    return step_array
Example #12
0
    def main(stdscr, running):

        stdscr.clear()
        curses.curs_set(0)
        begin_x = 0
        begin_y = 0
        height = 8
        width = 40
        curses.noecho()
        i = 0
        curses.start_color()
        curses.use_default_colors()
        stdscr.nodelay(1)
        ticksaway = 0
        quantity = 1
        oldScreenSize = None
        # pos = client.positions()
        oldTrades = []
        y = 0
        tpos = 0
        trcounter = []
        trid = 10
        # account = client.account()
        rj = Client(host='localhost', port=6379, decode_responses=True)

        def timedinput(stdscr):

            inputwin = stdscr.derwin(height, width, 10, begin_x)

            tbox = Textbox(inputwin)

            tbox.edit()
            k = tbox.gather()
            wsdata.inputstr = k.rstrip()
            inputwin.erase()

            inputcheck(wsdata.inputstr, activeInst, client)

            return

        for i in range(0, curses.COLORS):

            curses.init_pair(i + 1, i, -1)

        while (running):

            if (i >= 1000):
                pos = None
                sendWs(
                    msg('get_open_orders_by_instrument',
                        {"instrument_name": activeInst}, f"op-{trid}"))
                trid += 1
                sendWs(
                    msg('get_position', {"instrument_name": activeInst},
                        f"po-{trid}"))
                trid += 1
                sendWs(
                    msg("get_account_summary", {"currency": "BTC"},
                        f"acc-{trid}"))
                trid += 1

                i = 0
            i += 1
            screensize = stdscr.getmaxyx()
            if (oldScreenSize != screensize):
                stdscr.erase()
            key = stdscr.getch()
            poswinWidth = screensize[1] - screensize[1] // 4
            tradewinWidth = screensize[1] - screensize[1] // 4 - 40
            priswin = stdscr.derwin(height, width, begin_y, begin_x)

            ordwin = stdscr.derwin(screensize[0], screensize[1] // 4, 0,
                                   screensize[1] - screensize[1] // 4)
            tradewin = stdscr.derwin(screensize[0] - 10, tradewinWidth, 0, 40)
            posWin = stdscr.derwin(10, poswinWidth, screensize[0] - 10, 0)
            posWin.box()

            hh = 1
            y = 0
            # wsAlive = t.isAlive()

            #set ordeprice and contracts
            ticksaway = uInput(key, inputs['closerToMarket'], ticksaway, -1)
            ticksaway = uInput(key, inputs['furtherFromMarket'], ticksaway, 1)
            quantity = uInput(key, inputs['lessCorntracts'], quantity, -1)
            quantity = uInput(key, inputs['moreCorntracts'], quantity, 1)

            # if(key == ord(inputs['vimMode'])):
            #     t2 = threading.Thread(target=timedinput, args=[stdscr])
            #     t2.start()

            if (key == ord(inputs['cancelAll'])):
                client.cancelall('all')
                ordwin.erase()

            if (ticksaway == 0):
                post = False
            else:
                post = True
            ticksaway = ikkenull(ticksaway, 0)
            quantity = ikkenull(quantity, 1)

            if (key == ord(inputs['buy'])):
                bb = rj.jsonget('obj',
                                Path('.ticker[-1]["data"]["best_bid_price"]'))
                sendWs(buySell('market', bb - ticksaway, 'buy'))
            if (key == ord(inputs['sell'])):
                ba = rj.jsonget('obj',
                                Path('.ticker[+1]["data"]["best_ask_price"]'))
                sendWs(buySell('market', ba - ticksaway, 'sell'))

            try:
                ordwin.erase()
                ordwin.box()
                ordwin.addstr(1, 2, 'orders')
                ords = rj.jsonget('obj', Path('.orders'))

                for it in ords['result']:
                    hh += 1
                    ordwin.addstr(
                        0 + hh, 2,
                        str(' {direction} : {amount} : {price}'.format(**it)))
            except:
                ordwin.erase()
                ordwin.box()
                ordwin.addstr(2, 10, str(threading.active_count()))

            try:
                priswin.erase()
                priswin.box()
                # priswin.addstr(1 , 1, activeInst)
                # priswin.addstr(1 , 20, str(threading.active_count()))
                # priswin.addstr(6 ,10 , str(wsdata.inputstr))
                priswin.addstr(
                    4, 1,
                    str(
                        rj.jsonget('obj',
                                   Path('.ticker[-1]["data"]["last_price"]'))),
                    curses.color_pair(3))

            except:
                priswin.addstr(1, 20, 'Nih')

            posWin.addstr(1, 2, 'positions')
            try:
                acc = rj.jsonget('obj', Path('.acc'))
                posWin.addstr(
                    1, poswinWidth // 2,
                    "eq: {equity} af: {available_funds}".format(
                        **acc['result']))

            except:
                posWin.addstr(1, poswinWidth // 2, 'Ohnononono')

            try:
                pos = rj.jsonget('obj', Path('.pos'))
                # for po in pos:
                y += 1
                # posWin.addstr(2 + y, 2, "{instrument} : {direction} : {size} : ".format(**po))
                if (pos['result']['total_profit_loss'] > 0):
                    posWin.addstr(2 + y, 10, "{size}".format(**pos['result']))
                    posWin.addstr(
                        2 + y, 30,
                        "{total_profit_loss}".format(**pos['result']),
                        curses.color_pair(3))
                else:
                    posWin.addstr(
                        2 + y, 30,
                        "{total_profit_loss}".format(**pos['result']),
                        curses.color_pair(2))
            except:
                posWin.addstr(2, 2, 'No open positions')

            try:

                tradewin.erase()
                tradewin.box()
                tr = rj.jsonget('obj', Path('.trades'))
                for trade in tr[::-1]:
                    tradewin.addstr(
                        1 + tpos, 2,
                        "{trade_id}:{amount} : {price} : {direction} {tick_direction}"
                        .format(**trade))
                    tpos += 1
                    if (tpos > screensize[0] - 13):
                        break
                tpos = 0
            except:
                tradewin.erase()
                tradewin.box()
                tradewin.addstr(2, 2, 'Nono Trada')

            priswin.addstr(
                3, 1,
                str(ticksaway) + ' : ' + str(quantity) + ' contracts')
            oldScreenSize = stdscr.getmaxyx()
            sleep(0.01)
            stdscr.refresh()
            if key == 120:
                running = False
Example #13
0
class Hub(object):
    dconn = None  # document store connection
    sconn = None  # search index connection
    qconn = None  # queue connection
    gh = None
    autocomplete = None
    repo = None
    _ts = None
    _hubkey = 'hub:catalog'
    _ixname = 'ix'
    _acname = 'ac'

    def __init__(self,
                 ghlogin_or_token=None,
                 docs_url=None,
                 search_url=None,
                 queue_url=None,
                 repo=None):
        timestamp = datetime.utcnow()
        logger.info('Initializing temporary hub {}'.format(timestamp))

        if ghlogin_or_token:
            self.gh = Github(ghlogin_or_token)
        elif 'GITHUB_TOKEN' in os.environ:
            self.gh = Github(os.environ['GITHUB_TOKEN'])
        else:
            logger.info('Env var ' 'GITHUB_TOKEN' ' not found')

        if docs_url:
            pass
        elif 'DOCS_REDIS_URL' in os.environ:
            docs_url = os.environ['DOCS_REDIS_URL']
        else:
            logger.critical('No Redis for document storage... bye bye.')
            raise RuntimeError('No Redis for document storage... bye bye.')
        self.dconn = ReJSONClient().from_url(docs_url)

        if search_url:
            pass
        elif 'SEARCH_REDIS_URL' in os.environ:
            search_url = os.environ['SEARCH_REDIS_URL']
        else:
            search_url = docs_url
        conn = Redis(connection_pool=ConnectionPool().from_url(search_url))
        self.sconn = RediSearchClient(self._ixname, conn=conn)
        self.autocomplete = AutoCompleter(self._acname, conn=conn)

        if queue_url:
            pass
        elif 'QUEUE_REDIS_URL' in os.environ:
            queue_url = os.environ['QUEUE_REDIS_URL']
        else:
            queue_url = docs_url
        self.qconn = StrictRedis.from_url(queue_url)

        if repo:
            pass
        elif 'REDISMODULES_REPO' in os.environ:
            repo = os.environ['REDISMODULES_REPO']
        else:
            logger.critical('No REDISMODULES_REPO... bye bye.')
            raise RuntimeError('No REDISMODULES_REPO... bye bye.')
        self.repo = repo

        # Check if hub exists
        if self.dconn.exists(self._hubkey):
            self._ts = datetime.fromtimestamp(
                float(self.dconn.jsonget(self._hubkey, Path('.created'))))
            logger.info('Latching to hub {}'.format(self._ts))
        else:
            self._ts = timestamp
            logger.info('Creating hub {}'.format(self._ts))
            self.createHub()
            self.addModulesRepo(self.repo)

    def get_repo_url(self):
        return 'https://github.com/{}'.format(self.repo)

    def createHub(self):
        logger.info('Creating the hub in the database {}'.format(self._ts))
        # Store the master modules catalog as an object
        self.dconn.jsonset(
            self._hubkey, Path.rootPath(), {
                'created': str(_toepoch(self._ts)),
                'modules': {},
                'submissions': [],
                'submit_enabled': False
            })

        # Create a RediSearch index for the modules
        # TODO: catch errors
        self.sconn.create_index(
            (TextField('name', sortable=True), TextField('description'),
             NumericField('stargazers_count', sortable=True),
             NumericField('forks_count', sortable=True),
             NumericField('last_modified', sortable=True)),
            stopwords=stopwords)

    def deleteHub(self):
        # TODO
        pass

    def addModule(self, mod):
        logger.info('Adding module to hub {}'.format(mod['name']))
        # Store the module object as a document
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, mod['name'])
        m.save(mod)

        # Add a reference to it in the master catalog
        self.dconn.jsonset(
            self._hubkey, Path('.modules["{}"]'.format(m.get_id())), {
                'id': m.get_id(),
                'key': m.get_key(),
                'created': str(_toepoch(self._ts)),
            })

        # Schedule a job to refresh repository statistics, starting from now and every hour
        s = Scheduler(connection=self.qconn)
        job = s.schedule(
            scheduled_time=datetime(1970, 1, 1),
            func=callRedisModuleUpateStats,
            args=[m.get_id()],
            interval=60 * 60,  # every hour
            repeat=None,  # indefinitely
            ttl=0,
            result_ttl=0)
        return m

    """
    Adds modules to the hub from a local directory
    TODO: deprecate asap
    """

    def addModulesPath(self, path):
        logger.info('Loading modules from local path {}'.format(path))
        # Iterate module JSON files
        for filename in os.listdir(path):
            if filename.endswith(".json"):
                with open('{}/{}'.format(path, filename)) as fp:
                    mod = json.load(fp)

                m = self.addModule(mod['name'], mod)

    """
    Adds a modules to the hub from a github repository
    """

    def addModulesRepo(self, name, path='/modules/'):
        # TODO: check for success
        q = Queue(connection=self.qconn)
        q.enqueue(callLoadModulesFromRepo, name, path)

    def loadModulesFromRepo(self, name, path):
        logger.info('Loading modules from Github {} {}'.format(name, path))
        # TODO: error handling, sometimes not all contents are imported?
        repo = self.gh.get_repo(name)
        files = repo.get_dir_contents(path)
        for f in files:
            mod = json.loads(f.decoded_content)
            m = self.addModule(mod)

    """
    Submits a module to the hub
    """

    def submitModule(self, repo_id, **kwargs):
        logger.info('Module submitted to hub {}'.format(repo_id))
        repo_id = repo_id.lower()
        ts = datetime.utcnow()
        res = {'id': repo_id, 'status': 'failed'}

        if not self.dconn.jsonget(self._hubkey, Path('submit_enabled')):
            res['message'] = 'Module submission is currently disabled'
            return res

        # Check if the module is already listed
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, repo_id)
        if m.exists:
            # TODO: return in search results
            res['message'] = 'Module already listed in the hub'
            return res

        # Check if there's an active submission, or if the failure was too recent
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            status = submission.status
            if status != 'failed':
                res['status'] = 'active'
                res['message'] = 'Active submission found for module'
                return res
            else:
                # TODO: handle failed submissions
                res['message'] = 'Module already submitted to the hub and had failed, please reset manually for now'
                return res

        # Store the new submission
        submission.save(**kwargs)

        # Record the submission in the catalog
        # TODO: find a good use for that, e.g. 5 last submissions
        self.dconn.jsonarrappend(self._hubkey, Path('.submissions'), {
            'id': submission.get_id(),
            'created': submission.created,
        })

        # Add a job to process the submission
        q = Queue(connection=self.qconn)
        job = q.enqueue(callProcessSubmission, submission.get_id())
        if job is None:
            res['message'] = 'Submission job could not be created'
            # TODO: design retry path
            logger.error(
                'Could not create submission processing job for {}'.format(
                    submission.get_id()))
        else:
            res['status'] = 'queued'
            submission.status = res['status']
            submission.job = job.id

        return res

    def viewSubmissionStatus(self, repo_id):
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            res = {
                'id': submission.get_id(),
                'status': submission.status,
                'message': submission.message,
            }
            if 'finished' == res['status']:
                res['pull_number'] = submission.pull_number
                res['pull_url'] = submission.pull_url
            return res

    def processSubmission(self, repo_id):
        logger.info('Processing submision for {}'.format(repo_id))
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            return submission.process(self.gh, self.repo)

    def viewModules(self, query=None, sort=None):
        if not query:
            # Use a purely negative query to get all modules
            query = '-etaoinshrdlu'
        q = Query(query).no_content().paging(0, 1000)
        if sort:
            if sort == 'relevance':
                pass
            elif sort == 'update':
                q.sort_by('last_modified')
            elif sort == 'stars':
                q.sort_by('stargazers_count', asc=False)
            elif sort == 'forks':
                q.sort_by('forks_count', asc=False)
            elif sort == 'name':
                q.sort_by('name')

        results = self.sconn.search(q)
        mods = []
        fetch_duration = 0
        # TODO: this should be pipelined
        for doc in results.docs:
            m = RedisModule(self.dconn, self.sconn, self.autocomplete, doc.id)
            res, duration = _durationms(m.to_dict)
            mods.append(res)
            fetch_duration += duration

        return {
            'results': results.total,
            'search_duration': '{:.3f}'.format(results.duration),
            'fetch_duration': '{:.3f}'.format(fetch_duration),
            'total_duration':
            '{:.3f}'.format(fetch_duration + results.duration),
            'modules': mods,
        }

    def viewSearchSuggestions(self, prefix):
        suggestions = self.autocomplete.get_suggestions(prefix)
        return [s.string for s in suggestions]
Example #14
0
from datetime import datetime
from pprint import pprint
from rejson import Client, Path

rj = Client(host='localhost', port=6379, decode_responses=True)

# Get single key:value pair
pprint(rj.jsonget("redis_club_urls:item73", Path.rootPath()))

# Convert timestamp from iso to datetime
timestamp = rj.jsonget("redis_club_urls:item73",
                       Path.rootPath())['last_modified']
timestamp = datetime.fromisoformat(timestamp)

# Get all keys
print(rj.keys())

# Get all values matching pattern from keys
for key in rj.scan_iter("redis_club_urls:item:*"):
    print(rj.jsonget(key, Path('.club_page')))

# Add all values matching pattern to new key
for key in rj.scan_iter("redis_club_urls:item:*"):
    val = rj.jsonget(key, Path('.club_page'))
    rj.sadd('all_urls', val)
Example #15
0
cache_limit = 5

#connect to db
client = MongoClient('localhost', 27017)
db = client.mydb

#flask settings
app = Flask(__name__)  # initialize the flask app
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
app.config["JWT_SECRET_KEY"] = "vikramflaskjwttoken"
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
JWTManager(app)

# checking if array of json present in queue? if no then will set to []
arr = rj_queue.jsonget('tweets', Path.rootPath())
if arr is not None:
    print("array is already present in queue with length: {}".format(len(arr)))
    print(arr)
else:
    print("initializing an empty array in rj_queue")
    rj_queue.jsonset('tweets', Path.rootPath(), [])

#have the list if most reccent tweets in queue with key 'tweet_ids'
arr_tweet_ids = rj_queue.jsonget('tweet_ids', Path.rootPath())
if arr_tweet_ids is not None:
    print("Array of tweets is already present in the cache with length: {}".
          format(len(arr_tweet_ids)))
else:
    print("initializing an empty array tweet_ids in the rj_queue")
    rj_queue.jsonset('tweet_ids', Path.rootPath(), [])
Example #16
0
import firmware
import json
from common import *
from rejson import Client, Path
rj = Client(host='localhost', port=6379, decode_responses=True)

print("Current timestamp", get_current_timestamp())
all_pairs = rj.jsonget("pairs").values()
all_pair_address = rj.jsonget("pairs").keys()
print(all_pairs)
analysis = firmware.Analysis(all_pairs)
analysis.scan_cycles()
from rejson import Client, Path
from datetime import timedelta
from timeloop import Timeloop

# start timeloop
tl = Timeloop()
# connect to queue
rj_queue = Client(host='localhost', port=1111, decode_responses=True)
#Connect to cache
rj_cache = Client(host='localhost', port=1112, decode_responses=True)
#Connect to queue backend
rj_queue_mongodb = Client(host='localhost', port=1113, decode_responses=True)

# checking if array of json(tweets) present in queue backend? if yes then wont set or else will set to []
arr_queue_mongodb = rj_queue_mongodb.jsonget('tweets', Path.rootPath())
if arr_queue_mongodb is not None:
    print(
        "Array of tweets is already present in queue_backend with length: {}".
        format(len(arr_queue_mongodb)))
else:
    print("initializing an empty array tweets in the rj_queue_backend")
    rj_queue_mongodb.jsonset('tweets', Path.rootPath(), [])

cache_limit = 5


#polls the data from queue, modifies the data by applying business logic, sends it to cache and the backend queue
@tl.job(interval=timedelta(seconds=2))
def send_to_cache_and_backend_queue():

    current_cache_size = len(rj_cache.keys())
Example #18
0
# Set the key `obj` to some object
obj = {
    'answer': 42,
    'arr': [None, True, 3.14],
    'truth': {
        'coord': 'out there'
    }
}

jsondata = json.dumps(obj)

rj.jsonset('obj', Path('A2AA'), obj)

# Get something
temp = rj.jsonget('obj', Path('A2AA.truth.coord'))

print (f'Is there anybody... {temp}?')

# Delete something (or perhaps nothing), append something and pop it
rj.jsondel('obj', Path('.arr[0]'))
rj.jsonarrappend('obj', Path('.arr'), 'something')
popped = rj.jsonarrpop('obj', Path('.arr'))
print(f'{popped} popped!')

# Update something else
rj.jsonset('obj', Path('.answer'), 2.17)

# And use just like the regular redis-py client
jp = rj.pipeline()
jp.set('foo', 'bar')
Example #19
0
class StoreAndQuery():
    ''' read a json and store it into redis and then query '''
    def __init__(self):
        self.tests = ['ru.4', '5k4', 'u;4', 'ji3', '5', '2l4',
                      'xk7']  # no need bracket here
        # the redis server
        self.rj = Client(host='localhost', port=6379, decode_responses=True)
        # phonetic table, radical to han characters
        self.fn = 'phone.json'
        self.objname = 'obj'
        self.data = None
        self.check_and_store()

    def check_and_store(self):
        ''' if no data here, read json and store it '''
        try:
            res = self.test_one_query('284', show=False)
            if res is None:
                print('[INFO] no data stored? read and store it')
                self.data = read_jsonfile(self.fn)
                self.rj.jsonset(self.objname, Path.rootPath(), self.data)
                print(f'[INFO] read {self.fn} and store as {self.objname}')
        except redis.exceptions.ConnectionError as e:
            print('[ERROR] cannot connect to redis server:\n', e)
            print('\nNeed start the service of redis/rejson first.')
            sys.exit(1)

    @staticmethod
    def transcode(ans: List) -> List:
        ''' transcode, do not know why rejson split a han char (3-byte utf-8)
            into 3 unicode chars (no really utf8 encode/decode, just splited)
            so here I could combine them back

            For example, "中" in utf8 is "e4 b8 ad", query out of redis, I got
            "\u00e4 \u00b8 \u00ad".
        '''
        u8ans = []
        for m in ans:
            ansi = m.encode('ISO8859-1')  # m is str, ansi is bytes
            s = ansi.decode('UTF-8')  # s is str in correct unicode char
            u8ans.append(s)
        return u8ans

    @staticmethod
    def show_result(res: List) -> None:
        ''' show only 10 results '''
        break_flag = False
        for i, s in enumerate(res):
            if i > 9:
                print('...', end=' ')
                break_flag = True
                break
            print(s, end=' ')
        if break_flag:
            print(res[-1])
        else:
            print()

    def test_one_query(self, key, show=True) -> List:
        ''' test one query
        127.0.0.1:6379> json.get  obj  noescape "zul4"
        "[\"\xe8\xa6\x85\",\"\xf0\xa1\xa0\x8d\"]"
        '''
        qkey = '["' + key + '"]'  # like ["su;6"]
        try:
            r = self.rj.jsonget(self.objname, Path(qkey))
            if r:
                if show:
                    print(key)
                res = self.transcode(r)
                if len(res) and show:
                    self.show_result(res)
                return res
            else:
                return None
        except redis.exceptions.ResponseError as e:
            print(e)

    def test_query(self) -> None:
        ''' test redis json '''
        for v in tests.vals:
            self.test_one_query(v)

    def test_json(self) -> None:
        ''' test json obj '''
        for v in tests.vals:
            try:
                print(self.data[v])
            except KeyError as e:
                print('KeyError: ', e)

    def action(self) -> None:
        ''' action '''
        self.test_query()
Example #20
0
class RejsonDb(KeyValueStorage):
    def __init__(self, conf):
        """
        arguments:
        conf -- a dictionary containing 'settings' module compatible configuration of the plug-in
        """
        self._host = conf['host']
        self._port = int(conf['port'])
        self._db = int(conf['id'])
        self.redis = Client(host=self._host,
                            port=self._port,
                            db=self._db,
                            decode_responses=True)
        self._scan_chunk_size = 50

        try:
            self.redis.jsonget('-')
        except ResponseError as e:
            if 'unknown command' in str(e):
                logging.fatal(
                    "Rejson DB Plug-in requires Redis with RedisJSON module enabled"
                )
            else:
                raise e

    def rename(self, key, new_key):
        return self.redis.rename(key, new_key)

    def list_get(self, key, from_idx=0, to_idx=-1):
        """
        Returns a stored list. If there is a non-list value stored with the passed key
        then TypeError is raised.

        arguments:
        key -- data access key
        from_idx -- optional start index
        to_idx -- optional (default is -1) end index (including, i.e. unlike Python);
        negative values are supported (-1 = last, -2 = penultimate,...)
        """
        data = self.get(key, [])
        if isinstance(data, list):
            if to_idx == -1:
                return data[from_idx:]
            return data[from_idx:to_idx + 1]
        raise TypeError('Object is not a list')

    def list_append(self, key, value):
        """
        Add a value at the end of a list

        arguments:
        key -- data access key
        value -- value to be pushed
        """
        if not self.exists(key):
            self.set(key, [])
        self.redis.jsonarrappend(key, Path.rootPath(), value)

    def list_pop(self, key):
        """
        Removes and returns the first element of the list stored at key.

        arguments:
        key -- list access key
        """
        return self.redis.jsonarrpop(key)

    def list_len(self, key):
        """
        Returns length of a list. If there is a non-list value stored with the passed key
        then TypeError is raised.

        arguments:
        key -- data access key
        """
        if not self.exists(key):
            return 0
        return self.redis.jsonarrlen(key)

    def list_set(self, key, idx, value):
        """
        Sets the list element at index to value

        arguments:
        key -- list access key
        idx -- a zero based index where the set should be performed
        value -- a JSON-serializable value to be inserted
        """
        # TODO the operation pair should be atomic to avoid possible race conditions
        self.redis.jsonarrpop(key, Path.rootPath(), idx)
        return self.redis.jsonarrinsert(key, Path.rootPath(), idx, value)

    def list_trim(self, key, keep_left, keep_right):
        """
        Trims the list from the beginning to keep_left - 1 and from keep_right to the end.
        The function does not return anything.

        arguments:
        key -- data access key
        keep_left -- the first value to be kept
        keep_right -- the last value to be kept
        """
        self.redis.jsonarrtrim(key, Path.rootPath(), keep_left, keep_right)

    def hash_get(self, key, field):
        """
        Gets a value from a hash table stored under the passed key

        arguments:
        key -- data access key
        field -- hash table entry key
        """
        if self.redis.jsontype(key, Path(f'["{field}"]')) is None:
            return None
        return self.redis.jsonget(key, Path(f'["{field}"]'), no_escape=True)

    def hash_set(self, key, field, value):
        """
        Puts a value into a hash table stored under the passed key

        arguments:
        key -- data access key
        field -- hash table entry key
        value -- a value to be stored
        """
        if not self.exists(key):
            self.set(key, {})
        self.redis.jsonset(key, Path(f'["{field}"]'), value)

    def hash_del(self, key, field):
        """
        Removes a field from a hash item

        arguments:
        key -- hash item access key
        field -- the field to be deleted
        """
        self.redis.jsondel(key, Path(f'["{field}"]'))

    def hash_get_all(self, key):
        """
        Returns a complete hash object (= Python dict) stored under the passed
        key. If the provided key is not present then an empty dict is returned.

        arguments:
        key -- data access key
        """
        return self.get(key)

    def get(self, key, default=None):
        """
        Gets a value stored with passed key and returns its JSON decoded form.

        arguments:
        key -- data access key
        default -- a value to be returned in case there is no such key
        """
        data = self.redis.jsonget(key, Path.rootPath(), no_escape=True)
        if data is None:
            return default
        return data

    def set(self, key, data):
        """
        Saves 'data' with 'key'.

        arguments:
        key -- an access key
        data -- a dictionary containing data to be saved
        """
        self.redis.jsonset(key, Path.rootPath(), data)

    def set_ttl(self, key, ttl):
        """
        Set auto expiration timeout in seconds.

        arguments:
        key -- data access key
        ttl -- number of seconds to wait before the value is removed
        (please note that update actions reset the timer to zero)
        """
        self.redis.expire(key, ttl)

    def get_ttl(self, key):
        return self.redis.ttl(key)

    def clear_ttl(self, key):
        self.redis.persist(key)

    def remove(self, key):
        """
        Removes a value specified by a key

        arguments:
        key -- key of the data to be removed
        """
        self.redis.jsondel(key)

    def exists(self, key):
        """
        Tests whether there is a value with the specified key

        arguments:
        key -- the key to be tested

        returns:
        boolean value
        """
        return self.redis.exists(key)

    def setnx(self, key, value):
        """
        An atomic operation "set if not exists".

        returns:
        1 if the key was set
        0 if the key was not set
        """
        return self.redis.jsonset(key, Path.rootPath(), value, nx=True)

    def getset(self, key, value):
        """
        An atomic operation which obtains current key first and then
        sets a new value under that key

        returns:
        previous key if any or None
        """
        data = self.get(key)
        self.set(key, value)
        return data

    def incr(self, key, amount=1):
        """
        Increments the value of 'key' by 'amount'.  If no key exists,
        the value will be initialized as 'amount'
        """
        if not self.exists(key):
            self.set(key, 0)
        return self.redis.jsonnumincrby(key, Path.rootPath(), amount)

    def hash_set_map(self, key, mapping):
        """
        Set key to value within hash 'name' for each corresponding
        key and value from the 'mapping' dict.
        Before setting, the values are json-serialized
        """
        return self.set(key, mapping)
class ReJson:
    """Facade for ReJson"""
    def __init__(self, host: str, port: Union[str, int]) -> None:
        """Instantiate a connection to ReJson.

        :param host: The hostname/ip of the Redis instance.
        :type host: str
        :param port: The port of the Redis instance.
        :type port: int
        """
        self._client = Client(host=host, port=port, decode_responses=True)

    def keys(self) -> Json:
        """Get all keys"""
        return self._client.keys()

    def post(self, key: str, obj: Json) -> None:
        """Post a new Json object to the store.

        :param key: The key to store the Json at.
        :type key: str
        :param obj: What to store.
        :type obj: Json
        """
        self._client.jsonset(key, Path.rootPath(), obj)

    def get(self, key: str) -> Json:
        """[summary]

        :param key: The key that the Json object was stored at.
        :type key: str
        :return: The Json stored at `key`.
        :rtype: Json
        """
        return self._client.jsonget(key, Path.rootPath())

    def update(self, key: str, path: str, value: Json) -> None:
        """[summary]

        :param key: The key that the Json object was stored at.
        :type key: str
        :param path: A period seperated string of keys to traverse the Json.
        :type path: str
        :param value: The new value.
        :type value: Json
        """
        self._client.jsonset(key, Path(f".{path}"), value)

    def append(self, key: str, path: str, *values: Json) -> None:
        """Append to some array within a Json obejct.

        :param key: The key that the Json object was stored at.
        :type key: str
        :param path: A period seperated string of keys to traverse the Json.
        :type path: str
        """
        self._client.jsonarrappend(key, Path(f".{path}"), *values)

    def pop(self, key: str, path: str) -> Json:
        """Pop from from array within a Json object.

        :param key: The key that the Json object was stored at.
        :type key: str
        :param path: A period seperated string of keys to traverse the Json.
        :type path: str
        :return: The Json value popped from the array.
        :rtype: Json
        """
        return self._client.jsonarrpop(key, f".{path}")

    def remove(self, key: str, path: str, value: Json) -> None:
        """Remove something from some array within a Json object.
        
        :param key: The key that the Json object was stored at.
        :type key: str
        :param path: A period seperated string of keys to travers the Json.
        :type path: str
        :param value: The value to remove from the array.
        :type value: Json
        """
        index = self._client.jsonarrindex(key, f".{path}", value)
        self._client.jsondel(key, f"{path}[{index}]")
rj = Client(host='localhost', password='******')

# parse out the JSON file from the command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--json',
                    nargs=1,
                    help="JSON file",
                    type=argparse.FileType('r'))
arguments = parser.parse_args()

# write JSON with REJSON
rj.jsonset('some_json', Path.rootPath(), json.loads(arguments.json[0].read()))

# get a single (string) value from the JSON
value_of_myjson = rj.jsonget('some_json', Path('myjson'))
print('myjson in some_json: {}'.format(value_of_myjson))

# create an array
rj.jsonset('some_json', Path('secondary.spanishNumbers'), ["uno"])

# Append some values to the Array
rj.jsonarrappend('some_json', Path('secondary.spanishNumbers'), "tres",
                 "cuatro")

# insert a value to the Array
rj.jsonarrinsert('some_json', Path('secondary.spanishNumbers'), '1', 'dos')

# get the array back
value_of_spanishNumbers = rj.jsonget('some_json',
                                     Path('secondary.spanishNumbers'))
Example #23
0
class RedisDriver(DatabaseDriver):

    _host = "localhost"
    _port = 6379
    _client = None

    def __init__(self, host: str = "localhost", port: int = 6379):
        self._host = host
        self._port = port
        self._client = Client(host=host,
                              port=port,
                              decode_responses=True,
                              encoder=JSONSchemaObject.JSONSchemaEncoder())

    def find_by_ref(self, ref: str):

        return self._client.jsonget(ref)

    def find_id_by(self, idx: str, value: str, version: str):

        result = []
        for member in self._client.smembers("{}:{}".format(idx, value)):

            if version == "all":
                result.append(member)
                continue

            # we split the index to check against the version
            idxs = str(member).split(":")

            # the _version is the second token of idxs
            if idxs[1] == version:
                result.append(member)

        return result

    def save(self, obj_list: list, indexed_attrs: list):

        # First cycle is just to verify if we do not have any
        # index integrity violation
        for obj in indexed_attrs:

            # We do not store neither _id or _version
            if obj[1] == "_id" or obj[1] == "_version":
                continue

            if obj[2] is None or obj[2] == "":
                raise ValueError("Indexed value {} must not be empty".format(
                    obj[1]))

            # the indexed is composed by schema path:indexes:attr_name
            indexed_key = store_name = "{}:indexes:{}:{}".format(
                obj[0], obj[1], obj[2])

            # we already have this key let's get any value and make
            # sure we this belongs to the same id
            for member in self._client.smembers(indexed_key):
                # we only need to use one element since the _id MUST be equal
                idxs = str(member).split(":")

                # the _id is the first token of idxs, check if we recieved the same
                # id, if not this is a index violation
                if not str(obj[3]).startswith(idxs[0]):
                    raise ValueError(
                        "{}:{} not unique, another object already have that value"
                        .format(obj[1], obj[2]))

                # we just need one iteration
                break

        # this cycle we just store the indexes
        for obj in indexed_attrs:

            if obj[2] is None or obj[2] == "" or obj[1] == "_id" or obj[
                    1] == "_version":
                continue

            # Set the store name and store data
            store_name = "{}:indexes:{}:{}".format(obj[0], obj[1], obj[2])
            store_data = obj[3]
            self._client.sadd(store_name, store_data)

        # We now store the actual objects, and return the added ids
        ids = []
        for obj in obj_list:

            # Set the store name and store data
            store_name = "{}:{}".format(obj[0], obj[1])
            store_data = obj[2]
            self._client.jsonset(store_name, Path.rootPath(), store_data)
            ids.append(obj[1])

        return ids
class Csv2Redis:
    def __init__(self, filename: str):
        self.rj = Client(host=os.environ.get("REDIS_HOST", "localhost"),
                         decode_responses=True)
        self.filename = filename
        # Hardcoded data we need for 2020 election candidates
        self.candidate_ids = {
            "David Cohen": "3h2g45h3j",
            'Jacob "Jake" Tonkel': "089wegvb7",
            "Dev Davis": "456hjkl2l",
            "Lan Diep": "cf90g8cii",
        }
        self.referendums = {
            "df7g8y6d8": {
                "id":
                "df7g8y6d8",
                "electionDate":
                "2020-11-03",
                "name":
                "Measure G",
                "description":
                "Charter Amendment regarding Independent Police Auditor, Planning Commission, Redistricting",
                "ballotLanguage":
                "Shall the City Charter be amended to: expand the Independent Police Auditor’s oversight, including review of officer-involved shootings and use of force incidents causing death or great bodily injury, review of department-initiated investigations against officers, and other technical amendments; increase the Planning Commission to 11 members with Council appointing one member from each Council District and one “at-large” member; and allow the Council to establish timelines for redistricting when Census results are late?",
            },
            "35j6kh45m": {
                "id":
                "35j6kh45m",
                "electionDate":
                "2020-11-03",
                "name":
                "Measure H",
                "description":
                "Cardroom Tax",
                "ballotLanguage":
                "To fund general San Jose services, including fire protection, disaster preparedness, 911 emergency response, street repair, youth programs, addressing homelessness, and supporting vulnerable residents, shall an ordinance be adopted increasing the cardroom tax rate from 15% to 16.5%, applying the tax to third party providers at these rates: up to $25,000,000 at 5%; $25,000,001 to $30,000,000 at 7.5%; and over $30,000,000 at 10%, increasing card tables by 30, generating approximately $15,000,000 annually, until repealed?",
            },
        }
        self.extra_candidate_data = {
            "David Cohen": {
                "seat": "Councilmember District 4",
                "ballotDesignation":
                "Governing Board Member, Berryessa Union School District",
                "website": "www.electdavidcohen.com",
                "twitter": "electdavidcohen",
                "votersEdge":
                "http://votersedge.org/ca/en/election/2020-11-03/santa-clara-county/city-council-city-of-san-jose-district-4/David-Cohen",
                "profilePhoto": "",
            },
            "Lan Diep": {
                "seat": "Councilmember District 4",
                "ballotDesignation": "City Councilmember",
                "website": "www.lanforsanjose.com",
                "twitter": "ltdiep",
                "votersEdge":
                "http://votersedge.org/ca/en/election/2020-11-03/santa-clara-county/city-council-city-of-san-jose-district-4/Lan-Diep",
                "profilePhoto": "",
            },
            'Jacob "Jake" Tonkel': {
                "seat": "Councilmember District 6",
                "ballotDesignation": "Senior Biomedical Engineer",
                "website": "www.jake4d6.com",
                "twitter": "jake4d6",
                "votersEdge":
                "http://votersedge.org/ca/en/election/2020-11-03/santa-clara-county/city-council-city-of-san-jose-district-6/Jake-Tonkel",
                "profilePhoto": "",
            },
            "Dev Davis": {
                "seat": "Councilmember District 6",
                "ballotDesignation": "Councilwoman/Mother",
                "website": "www.devdavis.com",
                "twitter": "devdavisca",
                "votersEdge":
                'http://votersedge.org/ca/en/election/2020-11-03/santa-clara-county/city-council-city-of-san-jose-district-6/Devora-"Dev"-Davis',
                "profilePhoto": "",
            },
        }

    def read_data_sheet(self):
        # Read the clean csv file from Google sheet. This file won't work on just aggregated csv from scrapper.
        # Skip every other line. If the clean csv changes to every line, we need to update this as well.
        if not os.path.exists(self.filename):
            logger.warning(
                "{} does not exist. Please double check the file path.".format(
                    self.filename))
        if not os.path.isfile(self.filename):
            logger.warning(
                "Only process csv file. Please double check {} is a file.".
                format(self.filename))
        filetype, _ = mimetypes.guess_type(self.filename)
        if "csv" in filetype:
            self.data = pd.read_csv(
                self.filename,
                skiprows=lambda x: x % 2 == 1,
                sep=",",
                quotechar='"',
                encoding="iso-8859-1",
            )
        elif "spreadsheet" in filetype:
            logger.info("Reading plain text csv is faster and is encouraged.")
            self.data = pd.read_excel(
                self.filename,
                skiprows=lambda x: x % 2 == 1,
            )
        else:
            logger.warning("Only read csv and spreadsheet file for now.")
            return
        if self.data.shape[0] < 100:
            logger.info("{} only have {} row.".format(self.filename,
                                                      self.data.shape[0]))
        # Add candidate ID column. candidate ID is <Ballot Item>;<CandidateControlledName>;<Election Date>
        self.data["Ballot Item"] = self.data["Ballot Item"].str.replace(
            "-", " ")
        self.data["ID"] = (
            self.data["Ballot Item"].str.replace(" ", "_") + ";" +
            self.data["CandidateControlledName"].str.replace(" ", "_") + ";" +
            self.data["Election Date"].map(str))
        # Round Amount to decimal 2
        self.data["Amount"] = (self.data["Amount"].map(str).str.replace(
            ",", "").replace("$",
                             "").replace("'",
                                         "").astype(float).round(decimals=2))
        self.metadata = str(
            datetime.fromtimestamp(os.path.getmtime(self.filename)))

    def get_ids(self, ids) -> str:
        """
        :param ids: list of strings in format <Ballot Item>;<CandidateControlledName>;<Election Date>
        :return: unique candidate id from candidate_ids dict
        """
        ret = []
        for id in ids:
            cand_name = id.split(";")[1].replace("_", " ")
            # If there are independent contributions, they won't be associated with a candidate.
            if cand_name in self.candidate_ids:
                ret.append(self.candidate_ids[cand_name])
        return ret

    def set_path_in_redis(self, path_name, data_shape):
        """
        :param path_name: str representing the name of our new path
        :param data_shape: dict/json of data we're inserting
        """
        with self.rj.pipeline() as pipe:
            pipe.jsonset(path_name, Path.rootPath(), data_shape)
            pipe.execute()
        logger.debug("The new shape set in redis is {}".format(
            self.rj.jsonget(path_name)))

    def set_referendums_shape_in_redis(self) -> None:
        """
        Set the referendums key in redis with the appropriate data (currently hardcoded)
        """
        try:
            self.set_path_in_redis(
                "referendums",
                {"Referendums": list(self.referendums.values())})
        except Exception as e:
            logger.debug(e)

    def set_metadata_shape_in_redis(self) -> None:
        """
        Set metadata key in redis with date last processed
        """
        try:
            self.set_path_in_redis("metadata",
                                   {"DateProcessed": self.metadata})
        except Exception as e:
            logger.debug(e)

    def setElectionShapeInRedis(self) -> bool:
        """
        Populate election shape into redis
        """
        data = self.data
        electionShape = {"Elections": {}}
        dataAmount = data[[
            "Ballot Item",
            "CandidateControlledName",
            "Election Date",
            "Entity_City",
            "Entity_ST",
            "Amount",
            "Rec_Type",
            "ID",
        ]]
        # There are 4 types RCPT, EXPN, LOAN, S497.
        elections = {}
        for ed in dataAmount["Election Date"].unique():
            dataPerElectionDate = dataAmount[dataAmount["Election Date"] == ed]
            totalContributions = dataPerElectionDate[
                dataPerElectionDate["Rec_Type"] == "RCPT"]["Amount"].sum(
                ).round(decimals=2) + dataPerElectionDate[
                    dataPerElectionDate["Rec_Type"] ==
                    "LOAN"]["Amount"].sum().round(decimals=2)

            officeElections = []
            # Hardcoded from frontend data
            referendums = list(self.referendums.keys())
            for bi in dataPerElectionDate["Ballot Item"].unique():
                dataPerElectionDateAndBallotItem = dataPerElectionDate[
                    dataPerElectionDate["Ballot Item"] == bi]
                totalContributionsPerBallotItem = (
                    dataPerElectionDateAndBallotItem[
                        dataPerElectionDateAndBallotItem["Rec_Type"] == "RCPT"]
                    ["Amount"].sum().round(
                        decimals=2)) + dataPerElectionDateAndBallotItem[
                            dataPerElectionDateAndBallotItem["Rec_Type"] ==
                            "LOAN"]["Amount"].sum().round(decimals=2)
                if not "measure" in bi:
                    officeElections.append({
                        "Title":
                        bi,
                        "CandidateIDs":
                        self.get_ids(dataPerElectionDateAndBallotItem["ID"].
                                     unique().tolist()),
                        "TotalContributions":
                        totalContributionsPerBallotItem,
                    })
                else:
                    referendums.append({
                        "Title":
                        bi,
                        "Description":
                        bi,
                        "TotalContributions":
                        totalContributionsPerBallotItem,
                    })
            elections = {
                "Title": "{} Election Cycle".format(ed.split("/")[2]),
                "Date": ed,
                "TotalContributions": totalContributions,
                "FundingByGeo": self.getFundingByGeo(dataPerElectionDate),
                "OfficeElections": officeElections,
                "Referendums": referendums,
            }
            electionShape["Elections"][ed] = elections
            electionShape["Metadata"] = self.metadata
        self.set_path_in_redis("elections", electionShape)
        return True

    def setCandidateShapeInRedis(self, electionDate="11/3/2020") -> bool:
        """
        Populate candidate shape into redis
        Redis data spec
        Candidates: [{
            ID: "councilmember-district-6;dev-davis;11-3-2020",
            Name: "Dev Davis",
            TotalRCPT: 300,
            TotalLOAN: 100,
            TotalEXPN: 100,
            FundingByType: {
                IND: 300,
                COM: 100
            },
            FundingByGeo: {
                CA: 300,
                NonSJ: 200,
                SJ: 100
                NonCA: 0
            }
            ExpenditureByType: {
        # TODO: We could have populated candidates for all election date but right now the spec only asks for the current year.
        """
        # TODO: TotalFunding - understand how TotalFunding is calculated and perhaps add TotalFunding
        data = self.data
        candidateShape = {"Candidates": []}
        dataAmount = data[[
            "Ballot Item",
            "CandidateControlledName",
            "Election Date",
            "Amount",
            "Rec_Type",
            "Entity_Cd",
            "Entity_Nam L",
            "Entity_Nam F",
            "Entity_City",
            "Entity_ST",
            "Expn_Code",
            "ID",
        ]]
        candidateIDs = pd.unique(dataAmount["ID"])

        for cid in candidateIDs:
            candidate = dict()
            name = cid.split(";")[1].replace("_", " ")
            # This is for contributions to "independent" committees
            if name not in self.candidate_ids:
                continue
            candidate["ID"] = self.candidate_ids[name]
            candidate["Name"] = name
            candidate.update(self.extra_candidate_data[name])
            dataPerCandidate = dataAmount[
                (dataAmount["CandidateControlledName"] == name)
                & (dataAmount["Election Date"] == electionDate)]

            # Get transaction by type
            totalByRecType = (dataPerCandidate.groupby(
                ["Rec_Type"])[["Amount"]].sum().round(decimals=2).to_dict())
            if "RCPT" in totalByRecType["Amount"]:
                candidate["TotalRCPT"] = totalByRecType["Amount"]["RCPT"]
            if "EXPN" in totalByRecType["Amount"]:
                candidate["TotalEXPN"] = totalByRecType["Amount"]["EXPN"]
            if "LOAN" in totalByRecType["Amount"]:
                candidate["TotalLOAN"] = totalByRecType["Amount"]["LOAN"]
            if "S497" in totalByRecType["Amount"]:
                candidate["TotalS497"] = totalByRecType["Amount"]["S497"]
            candidate["TotalFunding"] = candidate["TotalRCPT"] + candidate[
                "TotalLOAN"]

            # Get funding by committee type
            recpDataPerCandidate = dataPerCandidate[
                dataPerCandidate["Rec_Type"].isin(
                    ["RCPT", "LOAN"]
                )]  # dataPerCandidate[(dataPerCandidate['Rec_Type'] == 'RCPT')
            totalByComType = (recpDataPerCandidate.groupby(
                ["Entity_Cd"])[["Amount"]].sum().round(decimals=2).to_dict())
            candidate["FundingByType"] = totalByComType["Amount"]

            # Get funding by geo
            candidate["FundingByGeo"] = self.getFundingByGeo(
                recpDataPerCandidate)

            # Get expenditure by type
            expnDataPerCandidate = dataPerCandidate[
                dataPerCandidate["Rec_Type"] == "EXPN"]
            totalByExpnType = (expnDataPerCandidate.groupby(
                ["Expn_Code"])[["Amount"]].sum().round(decimals=2).to_dict())
            candidate["ExpenditureByType"] = totalByExpnType["Amount"]

            # Get Committees
            totalByCommittees = (recpDataPerCandidate[
                recpDataPerCandidate["Entity_Cd"] == "COM"].groupby([
                    "Entity_Nam L"
                ])[["Amount"]].sum().round(decimals=2).to_dict())
            totalByCommitteesList = [{
                "Name":
                c,
                "TotalFunding":
                totalByCommittees["Amount"][c]
            } for c in totalByCommittees["Amount"]]
            candidate["Committees"] = totalByCommitteesList

            candidateShape["Candidates"].append(candidate)
            candidateShape["Metadata"] = self.metadata
            logger.debug(candidateShape)
        self.set_path_in_redis("candidates", candidateShape)
        return True

    def getFundingByGeo(self, data):
        """
        Get total funding by GEO.

        :param data: A filtered Pandas table with Entity_City and Entity_ST columns populated.
        :return: Geo funding of the shape:
            {
                CA: 300,
                NonSJ: 200,
                SJ: 100
                NonCA: 0
            }

        """
        totalByGeoSJ = (
            data[data["Entity_City"] == "San Jose"]["Amount"].sum().round(
                decimals=2))
        totalByGeoNonSJ = (
            data[data["Entity_City"] != "San Jose"]["Amount"].sum().round(
                decimals=2))

        totalByGeoCA = data[data["Entity_ST"] == "CA"]["Amount"].sum().round(
            decimals=2)
        totalByGeoNonCA = (
            data[data["Entity_ST"] != "CA"]["Amount"].sum().round(decimals=2))
        return {
            "SJ": totalByGeoSJ,
            "NonSJ": totalByGeoNonSJ,
            "CA": totalByGeoCA,
            "NonCA": totalByGeoNonCA,
        }
Example #25
0
class JobsAPI(Resource):
    def __init__(self):
        self.redis = Client(host='127.0.0.1', port=6379, decode_responses=True)

    def get(self, **kwargs):
        if kwargs.get('job_id'):
            job_id = kwargs.get('job_id')
            if self.redis.exists(job_id):
                parser = reqparse.RequestParser()

                if request.url_rule.rule == '/jobs/<string:job_id>/next':
                    parser.add_argument('expired_duration',
                                        type=int,
                                        default=300)
                    args = parser.parse_args(strict=True)
                    if self.redis.jsonget(job_id, Path('.items')):
                        ttl = args.get('expired_duration')
                        items = self.redis.jsonget(job_id, Path('.items'))
                        for item in items:
                            if not self.redis.exists(f'hold_{item}'):
                                self.redis.execute_command(
                                    'SET', f'hold_{item}', job_id)
                                self.redis.execute_command(
                                    'EXPIRE', f'hold_{item}', ttl)
                                return output_json(
                                    {
                                        'status': 'ok',
                                        'job_id': job_id,
                                        'ttl': ttl,
                                        'index': items.index(item),
                                        'item': item
                                    }, 200)
                    return output_json(
                        {
                            'status': 'error',
                            'job_id': job_id,
                            'description': 'Items list is empty.'
                        }, 400)

                if request.url_rule.rule == '/jobs/<string:job_id>/items':
                    parser.add_argument('active',
                                        default='true',
                                        choices=('true', 'false'))
                    args = parser.parse_args(strict=True)
                    items = self.redis.jsonget(job_id, Path('.items'))
                    done_items = self.redis.jsonget(job_id, Path('.done'))
                    if args.get('active') == 'true':
                        active_items = []
                        for item in items:
                            if not self.redis.exists(f'hold_{item}') and \
                                    items.index(item) not in done_items:
                                active_items.append(item)
                        return output_json(
                            {
                                'status': 'ok',
                                'job_id': job_id,
                                'items': active_items
                            }, 200)
                    return output_json(
                        {
                            'status': 'ok',
                            'job_id': job_id,
                            'items': items + done_items
                        }, 200)
            else:
                return output_json(
                    {
                        'status': 'error',
                        'job_id': job_id,
                        'description': 'The job is not in the queue.'
                    }, 400)

        return output_json(
            {
                'status': 'ok',
                'jobs': [i for i in self.redis.keys() if i[:5] != 'hold_']
            }, 200)

    def post(self, **kwargs):
        if request.url_rule.rule == '/jobs/<string:job_id>/items/<int:item_index>/done':
            job_id = kwargs.get('job_id')
            item_index = kwargs.get('item_index')
            done_item = self.redis.jsonget(job_id, Path('.items'))[item_index]
            if item_index in self.redis.jsonget(job_id, Path('.done')):
                return output_json(
                    {
                        'status': 'error',
                        'description': 'The item already was marked as done.',
                        'job_id': job_id,
                        'index': item_index,
                        'item': done_item
                    }, 400)
            self.redis.delete(f'hold_{done_item}')
            self.redis.jsonarrappend(job_id, Path('.done'), item_index)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'The item is marked as done.',
                    'job_id': job_id,
                    'index': item_index,
                    'item': done_item
                }, 200)

        if request.url_rule.rule == '/jobs/<string:job_id>/items/<int:item_index>/error':
            job_id = kwargs.get('job_id')
            item_index = kwargs.get('item_index')
            error_item = self.redis.jsonget(job_id, Path('.items'))[item_index]
            if item_index in self.redis.jsonget(job_id, Path('.error')):
                return output_json(
                    {
                        'status': 'error',
                        'description': 'The item already was marked as error.',
                        'job_id': job_id,
                        'index': item_index,
                        'item': error_item
                    }, 400)
            self.redis.delete(f'hold_{error_item}')
            self.redis.jsonarrappend(job_id, Path('.error'), item_index)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'The item is marked as error.',
                    'job_id': job_id,
                    'index': item_index,
                    'item': error_item
                }, 200)

        if isinstance(request.json, list) and request.json:
            job_id = str(uuid.uuid4())

            data = {'items': request.json, 'done': [], 'error': []}

            if self.redis.jsonset(job_id, Path.rootPath(), data):
                return output_json(
                    {
                        'status': 'ok',
                        'description': 'Job is added to queue.',
                        'job_id': job_id
                    }, 201)
        else:
            return output_json(
                {
                    'status': 'error',
                    'description': 'Wrong request!'
                }, 400)

    def delete(self, job_id):
        if self.redis.exists(job_id):
            self.redis.delete(job_id)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'Job is deleted.'
                }, 200)
        else:
            return output_json(
                {
                    'status': 'error',
                    'description': 'The job is not in the queue.'
                }, 400)
Example #26
0
from rejson import Client, Path
try:
    rj = Client(host='localhost', port=6379, decode_responses=True)
    ret = rj.jsonget('jsonorg_example', Path('.glossary'))
    print(ret)
except Exception as e:
    print("Exception: {}".format(e))
Example #27
0
class RedisJson:
    def __init__(self):
        self._rjson = Client(host='localhost',
                             port=6379,
                             decode_responses=True)
        self._root_path = Path.rootPath()

    '''
        Insert JSON into db

        Structure of JSON to insert:
        {
            'lat'  : 80.844,
            'long' : -43.139,
            'resources' : {
                'mask' : 450,
                'vaccine' : 56,
                'oxygen' : 800,
                ...
            },
            'updated' : <unix time ms>
        }
    '''

    def insert(self, key, data):
        self._rjson.jsonset(key, self._root_path, data)

    '''
        Return list of all JSON objects stored in db

        TODO: added this for now, but loading everything in memory doesn't seem
        like a great idea, maybe RedisSearch will help with this. Or maybe make
        this return a generator which can be iterated through
    '''

    def get(self):
        results = []
        for key in self._rjson.scan_iter():
            results.append(self._rjson.jsonget(key, self._root_path))

        return results

    '''
        Update field of a JSON object in db

        Syntax for `path` argument:
        E.g. we have {
            'key1' : value1,
            'key2' : {
                'key3' : value2
            }
        }

        To update value2, `path` should be ".key2.key3"
    '''

    def update(self, key, path, new_value):
        self._rjson.jsonset(key, path, new_value)

    '''
        Delete a JSON value from the db
    '''

    def delete(self, key):
        self._rjson.jsondel(key, self._root_path)