예제 #1
0
def webhook_save():
    if request.method == 'POST':
        timestamp_human = datetime.datetime.now()
        timestamp = int(time.time())
        nowDatetime = timestamp_human.strftime('%Y-%m-%d(%H:%M:%S)')
        req_data = request.get_json()
        alertname = req_data['commonLabels']['alertname']
        severity = ''
        receiver = req_data['receiver']
        key_name = str(timestamp) + "_" + alertname + "_" + receiver
        try:
            # conn = redis.Redis(host=REDIS_SERVER, port=6379, db=0, password=REDIS_PWD)
            conn = Client(host=REDIS_SERVER,
                          port=6379,
                          db=0,
                          password=REDIS_PWD)
            conn.ping()
            print
            'Redis connected %s' % (REDIS_SERVER)
        except Exception as e:
            print
            'Error:', e
            exit('Failed to connecting')

        conn = Client(host=REDIS_SERVER, port=6379)
        conn.jsonset(key_name, Path.rootPath(), req_data)
        data = json.dumps(conn.jsonget(key_name))
        print
        data
        # Redis : SCAN 0 match 1527911[1-9][1-9]*

    else:
        abort(400)

    if not conn.exists(key_name):
        print
        "Error: %s is doesn't exist" % (key_name)

    return jsonify({'status': 'success'}), 200
예제 #2
0
class JobsAPI(Resource):
    def __init__(self):
        self.redis = Client(host='127.0.0.1', port=6379, decode_responses=True)

    def get(self, **kwargs):
        if kwargs.get('job_id'):
            job_id = kwargs.get('job_id')
            if self.redis.exists(job_id):
                parser = reqparse.RequestParser()

                if request.url_rule.rule == '/jobs/<string:job_id>/next':
                    parser.add_argument('expired_duration',
                                        type=int,
                                        default=300)
                    args = parser.parse_args(strict=True)
                    if self.redis.jsonget(job_id, Path('.items')):
                        ttl = args.get('expired_duration')
                        items = self.redis.jsonget(job_id, Path('.items'))
                        for item in items:
                            if not self.redis.exists(f'hold_{item}'):
                                self.redis.execute_command(
                                    'SET', f'hold_{item}', job_id)
                                self.redis.execute_command(
                                    'EXPIRE', f'hold_{item}', ttl)
                                return output_json(
                                    {
                                        'status': 'ok',
                                        'job_id': job_id,
                                        'ttl': ttl,
                                        'index': items.index(item),
                                        'item': item
                                    }, 200)
                    return output_json(
                        {
                            'status': 'error',
                            'job_id': job_id,
                            'description': 'Items list is empty.'
                        }, 400)

                if request.url_rule.rule == '/jobs/<string:job_id>/items':
                    parser.add_argument('active',
                                        default='true',
                                        choices=('true', 'false'))
                    args = parser.parse_args(strict=True)
                    items = self.redis.jsonget(job_id, Path('.items'))
                    done_items = self.redis.jsonget(job_id, Path('.done'))
                    if args.get('active') == 'true':
                        active_items = []
                        for item in items:
                            if not self.redis.exists(f'hold_{item}') and \
                                    items.index(item) not in done_items:
                                active_items.append(item)
                        return output_json(
                            {
                                'status': 'ok',
                                'job_id': job_id,
                                'items': active_items
                            }, 200)
                    return output_json(
                        {
                            'status': 'ok',
                            'job_id': job_id,
                            'items': items + done_items
                        }, 200)
            else:
                return output_json(
                    {
                        'status': 'error',
                        'job_id': job_id,
                        'description': 'The job is not in the queue.'
                    }, 400)

        return output_json(
            {
                'status': 'ok',
                'jobs': [i for i in self.redis.keys() if i[:5] != 'hold_']
            }, 200)

    def post(self, **kwargs):
        if request.url_rule.rule == '/jobs/<string:job_id>/items/<int:item_index>/done':
            job_id = kwargs.get('job_id')
            item_index = kwargs.get('item_index')
            done_item = self.redis.jsonget(job_id, Path('.items'))[item_index]
            if item_index in self.redis.jsonget(job_id, Path('.done')):
                return output_json(
                    {
                        'status': 'error',
                        'description': 'The item already was marked as done.',
                        'job_id': job_id,
                        'index': item_index,
                        'item': done_item
                    }, 400)
            self.redis.delete(f'hold_{done_item}')
            self.redis.jsonarrappend(job_id, Path('.done'), item_index)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'The item is marked as done.',
                    'job_id': job_id,
                    'index': item_index,
                    'item': done_item
                }, 200)

        if request.url_rule.rule == '/jobs/<string:job_id>/items/<int:item_index>/error':
            job_id = kwargs.get('job_id')
            item_index = kwargs.get('item_index')
            error_item = self.redis.jsonget(job_id, Path('.items'))[item_index]
            if item_index in self.redis.jsonget(job_id, Path('.error')):
                return output_json(
                    {
                        'status': 'error',
                        'description': 'The item already was marked as error.',
                        'job_id': job_id,
                        'index': item_index,
                        'item': error_item
                    }, 400)
            self.redis.delete(f'hold_{error_item}')
            self.redis.jsonarrappend(job_id, Path('.error'), item_index)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'The item is marked as error.',
                    'job_id': job_id,
                    'index': item_index,
                    'item': error_item
                }, 200)

        if isinstance(request.json, list) and request.json:
            job_id = str(uuid.uuid4())

            data = {'items': request.json, 'done': [], 'error': []}

            if self.redis.jsonset(job_id, Path.rootPath(), data):
                return output_json(
                    {
                        'status': 'ok',
                        'description': 'Job is added to queue.',
                        'job_id': job_id
                    }, 201)
        else:
            return output_json(
                {
                    'status': 'error',
                    'description': 'Wrong request!'
                }, 400)

    def delete(self, job_id):
        if self.redis.exists(job_id):
            self.redis.delete(job_id)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'Job is deleted.'
                }, 200)
        else:
            return output_json(
                {
                    'status': 'error',
                    'description': 'The job is not in the queue.'
                }, 400)
예제 #3
0
class Hub(object):
    dconn = None  # document store connection
    sconn = None  # search index connection
    qconn = None  # queue connection
    gh = None
    autocomplete = None
    repo = None
    _ts = None
    _hubkey = 'hub:catalog'
    _ixname = 'ix'
    _acname = 'ac'

    def __init__(self,
                 ghlogin_or_token=None,
                 docs_url=None,
                 search_url=None,
                 queue_url=None,
                 repo=None):
        timestamp = datetime.utcnow()
        logger.info('Initializing temporary hub {}'.format(timestamp))

        if ghlogin_or_token:
            self.gh = Github(ghlogin_or_token)
        elif 'GITHUB_TOKEN' in os.environ:
            self.gh = Github(os.environ['GITHUB_TOKEN'])
        else:
            logger.info('Env var ' 'GITHUB_TOKEN' ' not found')

        if docs_url:
            pass
        elif 'DOCS_REDIS_URL' in os.environ:
            docs_url = os.environ['DOCS_REDIS_URL']
        else:
            logger.critical('No Redis for document storage... bye bye.')
            raise RuntimeError('No Redis for document storage... bye bye.')
        self.dconn = ReJSONClient().from_url(docs_url)

        if search_url:
            pass
        elif 'SEARCH_REDIS_URL' in os.environ:
            search_url = os.environ['SEARCH_REDIS_URL']
        else:
            search_url = docs_url
        conn = Redis(connection_pool=ConnectionPool().from_url(search_url))
        self.sconn = RediSearchClient(self._ixname, conn=conn)
        self.autocomplete = AutoCompleter(self._acname, conn=conn)

        if queue_url:
            pass
        elif 'QUEUE_REDIS_URL' in os.environ:
            queue_url = os.environ['QUEUE_REDIS_URL']
        else:
            queue_url = docs_url
        self.qconn = StrictRedis.from_url(queue_url)

        if repo:
            pass
        elif 'REDISMODULES_REPO' in os.environ:
            repo = os.environ['REDISMODULES_REPO']
        else:
            logger.critical('No REDISMODULES_REPO... bye bye.')
            raise RuntimeError('No REDISMODULES_REPO... bye bye.')
        self.repo = repo

        # Check if hub exists
        if self.dconn.exists(self._hubkey):
            self._ts = datetime.fromtimestamp(
                float(self.dconn.jsonget(self._hubkey, Path('.created'))))
            logger.info('Latching to hub {}'.format(self._ts))
        else:
            self._ts = timestamp
            logger.info('Creating hub {}'.format(self._ts))
            self.createHub()
            self.addModulesRepo(self.repo)

    def get_repo_url(self):
        return 'https://github.com/{}'.format(self.repo)

    def createHub(self):
        logger.info('Creating the hub in the database {}'.format(self._ts))
        # Store the master modules catalog as an object
        self.dconn.jsonset(
            self._hubkey, Path.rootPath(), {
                'created': str(_toepoch(self._ts)),
                'modules': {},
                'submissions': [],
                'submit_enabled': False
            })

        # Create a RediSearch index for the modules
        # TODO: catch errors
        self.sconn.create_index(
            (TextField('name', sortable=True), TextField('description'),
             NumericField('stargazers_count', sortable=True),
             NumericField('forks_count', sortable=True),
             NumericField('last_modified', sortable=True)),
            stopwords=stopwords)

    def deleteHub(self):
        # TODO
        pass

    def addModule(self, mod):
        logger.info('Adding module to hub {}'.format(mod['name']))
        # Store the module object as a document
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, mod['name'])
        m.save(mod)

        # Add a reference to it in the master catalog
        self.dconn.jsonset(
            self._hubkey, Path('.modules["{}"]'.format(m.get_id())), {
                'id': m.get_id(),
                'key': m.get_key(),
                'created': str(_toepoch(self._ts)),
            })

        # Schedule a job to refresh repository statistics, starting from now and every hour
        s = Scheduler(connection=self.qconn)
        job = s.schedule(
            scheduled_time=datetime(1970, 1, 1),
            func=callRedisModuleUpateStats,
            args=[m.get_id()],
            interval=60 * 60,  # every hour
            repeat=None,  # indefinitely
            ttl=0,
            result_ttl=0)
        return m

    """
    Adds modules to the hub from a local directory
    TODO: deprecate asap
    """

    def addModulesPath(self, path):
        logger.info('Loading modules from local path {}'.format(path))
        # Iterate module JSON files
        for filename in os.listdir(path):
            if filename.endswith(".json"):
                with open('{}/{}'.format(path, filename)) as fp:
                    mod = json.load(fp)

                m = self.addModule(mod['name'], mod)

    """
    Adds a modules to the hub from a github repository
    """

    def addModulesRepo(self, name, path='/modules/'):
        # TODO: check for success
        q = Queue(connection=self.qconn)
        q.enqueue(callLoadModulesFromRepo, name, path)

    def loadModulesFromRepo(self, name, path):
        logger.info('Loading modules from Github {} {}'.format(name, path))
        # TODO: error handling, sometimes not all contents are imported?
        repo = self.gh.get_repo(name)
        files = repo.get_dir_contents(path)
        for f in files:
            mod = json.loads(f.decoded_content)
            m = self.addModule(mod)

    """
    Submits a module to the hub
    """

    def submitModule(self, repo_id, **kwargs):
        logger.info('Module submitted to hub {}'.format(repo_id))
        repo_id = repo_id.lower()
        ts = datetime.utcnow()
        res = {'id': repo_id, 'status': 'failed'}

        if not self.dconn.jsonget(self._hubkey, Path('submit_enabled')):
            res['message'] = 'Module submission is currently disabled'
            return res

        # Check if the module is already listed
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, repo_id)
        if m.exists:
            # TODO: return in search results
            res['message'] = 'Module already listed in the hub'
            return res

        # Check if there's an active submission, or if the failure was too recent
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            status = submission.status
            if status != 'failed':
                res['status'] = 'active'
                res['message'] = 'Active submission found for module'
                return res
            else:
                # TODO: handle failed submissions
                res['message'] = 'Module already submitted to the hub and had failed, please reset manually for now'
                return res

        # Store the new submission
        submission.save(**kwargs)

        # Record the submission in the catalog
        # TODO: find a good use for that, e.g. 5 last submissions
        self.dconn.jsonarrappend(self._hubkey, Path('.submissions'), {
            'id': submission.get_id(),
            'created': submission.created,
        })

        # Add a job to process the submission
        q = Queue(connection=self.qconn)
        job = q.enqueue(callProcessSubmission, submission.get_id())
        if job is None:
            res['message'] = 'Submission job could not be created'
            # TODO: design retry path
            logger.error(
                'Could not create submission processing job for {}'.format(
                    submission.get_id()))
        else:
            res['status'] = 'queued'
            submission.status = res['status']
            submission.job = job.id

        return res

    def viewSubmissionStatus(self, repo_id):
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            res = {
                'id': submission.get_id(),
                'status': submission.status,
                'message': submission.message,
            }
            if 'finished' == res['status']:
                res['pull_number'] = submission.pull_number
                res['pull_url'] = submission.pull_url
            return res

    def processSubmission(self, repo_id):
        logger.info('Processing submision for {}'.format(repo_id))
        submission = Submission(self.dconn, repo_id)
        if submission.exists:
            return submission.process(self.gh, self.repo)

    def viewModules(self, query=None, sort=None):
        if not query:
            # Use a purely negative query to get all modules
            query = '-etaoinshrdlu'
        q = Query(query).no_content().paging(0, 1000)
        if sort:
            if sort == 'relevance':
                pass
            elif sort == 'update':
                q.sort_by('last_modified')
            elif sort == 'stars':
                q.sort_by('stargazers_count', asc=False)
            elif sort == 'forks':
                q.sort_by('forks_count', asc=False)
            elif sort == 'name':
                q.sort_by('name')

        results = self.sconn.search(q)
        mods = []
        fetch_duration = 0
        # TODO: this should be pipelined
        for doc in results.docs:
            m = RedisModule(self.dconn, self.sconn, self.autocomplete, doc.id)
            res, duration = _durationms(m.to_dict)
            mods.append(res)
            fetch_duration += duration

        return {
            'results': results.total,
            'search_duration': '{:.3f}'.format(results.duration),
            'fetch_duration': '{:.3f}'.format(fetch_duration),
            'total_duration':
            '{:.3f}'.format(fetch_duration + results.duration),
            'modules': mods,
        }

    def viewSearchSuggestions(self, prefix):
        suggestions = self.autocomplete.get_suggestions(prefix)
        return [s.string for s in suggestions]
예제 #4
0
class RejsonDb(KeyValueStorage):
    def __init__(self, conf):
        """
        arguments:
        conf -- a dictionary containing 'settings' module compatible configuration of the plug-in
        """
        self._host = conf['host']
        self._port = int(conf['port'])
        self._db = int(conf['id'])
        self.redis = Client(host=self._host,
                            port=self._port,
                            db=self._db,
                            decode_responses=True)
        self._scan_chunk_size = 50

        try:
            self.redis.jsonget('-')
        except ResponseError as e:
            if 'unknown command' in str(e):
                logging.fatal(
                    "Rejson DB Plug-in requires Redis with RedisJSON module enabled"
                )
            else:
                raise e

    def rename(self, key, new_key):
        return self.redis.rename(key, new_key)

    def list_get(self, key, from_idx=0, to_idx=-1):
        """
        Returns a stored list. If there is a non-list value stored with the passed key
        then TypeError is raised.

        arguments:
        key -- data access key
        from_idx -- optional start index
        to_idx -- optional (default is -1) end index (including, i.e. unlike Python);
        negative values are supported (-1 = last, -2 = penultimate,...)
        """
        data = self.get(key, [])
        if isinstance(data, list):
            if to_idx == -1:
                return data[from_idx:]
            return data[from_idx:to_idx + 1]
        raise TypeError('Object is not a list')

    def list_append(self, key, value):
        """
        Add a value at the end of a list

        arguments:
        key -- data access key
        value -- value to be pushed
        """
        if not self.exists(key):
            self.set(key, [])
        self.redis.jsonarrappend(key, Path.rootPath(), value)

    def list_pop(self, key):
        """
        Removes and returns the first element of the list stored at key.

        arguments:
        key -- list access key
        """
        return self.redis.jsonarrpop(key)

    def list_len(self, key):
        """
        Returns length of a list. If there is a non-list value stored with the passed key
        then TypeError is raised.

        arguments:
        key -- data access key
        """
        if not self.exists(key):
            return 0
        return self.redis.jsonarrlen(key)

    def list_set(self, key, idx, value):
        """
        Sets the list element at index to value

        arguments:
        key -- list access key
        idx -- a zero based index where the set should be performed
        value -- a JSON-serializable value to be inserted
        """
        # TODO the operation pair should be atomic to avoid possible race conditions
        self.redis.jsonarrpop(key, Path.rootPath(), idx)
        return self.redis.jsonarrinsert(key, Path.rootPath(), idx, value)

    def list_trim(self, key, keep_left, keep_right):
        """
        Trims the list from the beginning to keep_left - 1 and from keep_right to the end.
        The function does not return anything.

        arguments:
        key -- data access key
        keep_left -- the first value to be kept
        keep_right -- the last value to be kept
        """
        self.redis.jsonarrtrim(key, Path.rootPath(), keep_left, keep_right)

    def hash_get(self, key, field):
        """
        Gets a value from a hash table stored under the passed key

        arguments:
        key -- data access key
        field -- hash table entry key
        """
        if self.redis.jsontype(key, Path(f'["{field}"]')) is None:
            return None
        return self.redis.jsonget(key, Path(f'["{field}"]'), no_escape=True)

    def hash_set(self, key, field, value):
        """
        Puts a value into a hash table stored under the passed key

        arguments:
        key -- data access key
        field -- hash table entry key
        value -- a value to be stored
        """
        if not self.exists(key):
            self.set(key, {})
        self.redis.jsonset(key, Path(f'["{field}"]'), value)

    def hash_del(self, key, field):
        """
        Removes a field from a hash item

        arguments:
        key -- hash item access key
        field -- the field to be deleted
        """
        self.redis.jsondel(key, Path(f'["{field}"]'))

    def hash_get_all(self, key):
        """
        Returns a complete hash object (= Python dict) stored under the passed
        key. If the provided key is not present then an empty dict is returned.

        arguments:
        key -- data access key
        """
        return self.get(key)

    def get(self, key, default=None):
        """
        Gets a value stored with passed key and returns its JSON decoded form.

        arguments:
        key -- data access key
        default -- a value to be returned in case there is no such key
        """
        data = self.redis.jsonget(key, Path.rootPath(), no_escape=True)
        if data is None:
            return default
        return data

    def set(self, key, data):
        """
        Saves 'data' with 'key'.

        arguments:
        key -- an access key
        data -- a dictionary containing data to be saved
        """
        self.redis.jsonset(key, Path.rootPath(), data)

    def set_ttl(self, key, ttl):
        """
        Set auto expiration timeout in seconds.

        arguments:
        key -- data access key
        ttl -- number of seconds to wait before the value is removed
        (please note that update actions reset the timer to zero)
        """
        self.redis.expire(key, ttl)

    def get_ttl(self, key):
        return self.redis.ttl(key)

    def clear_ttl(self, key):
        self.redis.persist(key)

    def remove(self, key):
        """
        Removes a value specified by a key

        arguments:
        key -- key of the data to be removed
        """
        self.redis.jsondel(key)

    def exists(self, key):
        """
        Tests whether there is a value with the specified key

        arguments:
        key -- the key to be tested

        returns:
        boolean value
        """
        return self.redis.exists(key)

    def setnx(self, key, value):
        """
        An atomic operation "set if not exists".

        returns:
        1 if the key was set
        0 if the key was not set
        """
        return self.redis.jsonset(key, Path.rootPath(), value, nx=True)

    def getset(self, key, value):
        """
        An atomic operation which obtains current key first and then
        sets a new value under that key

        returns:
        previous key if any or None
        """
        data = self.get(key)
        self.set(key, value)
        return data

    def incr(self, key, amount=1):
        """
        Increments the value of 'key' by 'amount'.  If no key exists,
        the value will be initialized as 'amount'
        """
        if not self.exists(key):
            self.set(key, 0)
        return self.redis.jsonnumincrby(key, Path.rootPath(), amount)

    def hash_set_map(self, key, mapping):
        """
        Set key to value within hash 'name' for each corresponding
        key and value from the 'mapping' dict.
        Before setting, the values are json-serialized
        """
        return self.set(key, mapping)