Esempio n. 1
0
def update_new_nodes():
    """
    periodic task for update new node with data from existing nodes.
    Task check if we are added new redis service url and copy all data from
    existing nodes
    """
    conn = StrictRedis.from_url(settings.REDIS_URL)
    nodes_list = str(conn.get('nodes_list'))

    # all nodes are new
    if not nodes_list:
        conn.set('nodes_list', settings.REDIS_NODES_URL)
    # it seems we add new node
    elif nodes_list != settings.REDIS_NODES_URL:
        new_nodes = filter(
            lambda x: x not in nodes_list.split(','),
            settings.REDIS_NODES_URL.split(',')
        )

        for node_url in new_nodes:
            for redis_url in settings.REDIS_NODES_URL.split(','):
                try:
                    conn = StrictRedis.from_url(redis_url)
                    exiting_data = Dict(key='data', redis=conn)

                    conn = StrictRedis.from_url(node_url)
                    new_data = Dict(key='data', redis=conn)
                    new_data.update(exiting_data.copy())
                except ConnectionError:
                    pass

        conn.set('nodes_list', settings.REDIS_NODES_URL)
 def __init__(self, redis, prefix):
     super(RedisDictRefsContainer, self).__init__(
         Dict(redis=redis, pickler=cPickle,
              key='{0:s}:refs'.format(prefix)))
     self._peeled = Dict(redis=redis,
                         pickler=cPickle,
                         key='{0:s}:peeled'.format(prefix))
Esempio n. 3
0
def redis_dict(redis_instance, redis_list_key=None):
    if redis_list_key and redis_instance.exists(redis_list_key):
        return Dict(redis=redis_instance,
                    key=redis_instance.get(redis_list_key))
    else:
        redis_list_instance = Dict(redis=redis_instance)
        if redis_list_key:
            redis_instance.set(redis_list_key, redis_list_instance.key)
        return redis_list_instance
Esempio n. 4
0
async def get_history(request):
    """
    view for get data history by name
    """
    name = request.rel_url.query.get('name')
    settings = Settings()

    if name is None or name == '':
        raise web.HTTPBadRequest

    for redis_url in settings.REDIS_NODES_URL.split(','):
        try:
            conn = StrictRedis.from_url(redis_url)
            value = Dict(key='data', redis=conn)[name]
            # sort by timestamp
            return web.json_response(
                sorted(value, key=itemgetter('timestamp'), reverse=True))

        except (ConnectionError, KeyError):
            pass

    return web.json_response({
        'success': False,
        'reason': 'It seems all nodes are down'
    })
 def __init__(self, redis, prefix):
     BaseRepo.__init__(self, RedisObjectStore(redis, prefix),
                       RedisDictRefsContainer(redis, prefix))
     self._named_files = Dict(redis=redis,
                              pickler=cPickle,
                              key='{0:s}:named_files'.format('prefix'))
     self._config = ConfigFile()
     self.bare = True
Esempio n. 6
0
    def __init__(self, app, base_url, redis, slack_client):
        """ Init. """
        if not all(REQUIRED_KEYS):
            self._logger.critical('%s must be set.', REQUIRED_KEYS)
            sys.exit(1)

        self._base_url = base_url
        self.cache = CTFdCache(solves=List(key='ctfd_submission_db',
                                           redis=redis),
                               teams=Dict(key='ctfd_teams', redis=redis),
                               users=Dict(key='ctfd_users', redis=redis))
        self._api.headers.update({
            'Authorization': f'Token {CTFD_TOKEN}',
            'Content-type': 'application/json'
        })
        self._slack = slack_client

        self.bind_route(app)
Esempio n. 7
0
def remove_data_nodes(self, redis_url: str, name: str):
    """
    task for remove current node data by name
    """
    try:
        conn = StrictRedis.from_url(redis_url)
        data = Dict(key='data', redis=conn)
        del data[name]
    except ConnectionError:
        self.retry(countdown=5)
Esempio n. 8
0
def _get_response_to_command(manager: asterisk.manager.Manager,
                             command_to_manager: str, key: str,
                             loop: asyncio.events.BaseDefaultEventLoopPolicy):
    """
    Getting an answer from an asterisk, and put it to a redis storage
    """
    asyncio.sleep(1, loop=loop)
    manager, response = _do_command_safety(manager, command_to_manager)
    monitor = Dict(key=settings.MONITORING_KEY)
    monitor[key] = response.data
    asyncio. async (_get_response_to_command(manager, command_to_manager,
                                             response, loop),
                    loop=loop)
def process_mfc_counters(counters=None, data=None):
    decimal.getcontext().prec = 6

    if counters is None:
        data = r.blpop(config.get('constants', 'REDIS_PARSER_QUEUE_KEY'))
        counters = json.loads(data[1])

    if counters['data'] is None:
        LOG.critical("Device: %s, %s IP: %s" %
                     (counters['device_id'], counters['name'], counters['ip']))
        LOG.critical(
            "MFC response doesn't have any counter data. skipping sample: %s" %
            (counters['sample_id']))
    else:
        gl_bytes = Counter(counters['data']['glbl']['bytes'])
        # MFC CHR
        tot_bytes = sum(gl_bytes.values())
        tot_cache = counters['data']['glbl']['bytes']['ram'] + counters[
            'data']['glbl']['bytes']['disk']
        # Handle Zero condition. Cumulative sum could be 0
        if tot_bytes == 0:
            counters['data']['chr'] = 0
        else:
            counters['data']['chr'] = float(
                (decimal.Decimal(tot_cache) / decimal.Decimal(tot_bytes)) *
                100)

        #Calculate current throughput
        mfcs_cur_thrpt = Dict(key=config.get('constants',
                                             'REDIS_MFC_CUR_THRPT_KEY'),
                              redis=r)
        try:
            counters['data']['cur_thrpt'] = gl_bytes - mfcs_cur_thrpt[
                counters['device_id']]
            counters['data']['cur_thrpt']['total'] = sum(
                counters['data']['cur_thrpt'].values())
            counters['data']['cur_thrpt']['cache'] = counters['data']['cur_thrpt']['ram'] + \
                                                     counters['data']['cur_thrpt']['disk']
            mfcs_cur_thrpt[counters['device_id']] = gl_bytes
        except KeyError:
            LOG.debug("current throughput hashmap - Initial update for " +
                      str(counters['device_id']))
            counters['data']['cur_thrpt'] = mfcs_cur_thrpt[
                counters['device_id']] = gl_bytes
            counters['data']['cur_thrpt']['total'] = counters['data'][
                'cur_thrpt']['cache'] = 0

        r.rpush(config.get('constants', 'REDIS_MFC_STORE_QUEUE_KEY'),
                json.dumps(counters))

    return counters
Esempio n. 10
0
    def __init__(self, controller):
        ApiObject.__init__(self,
                           controller,
                           oid='',
                           name='',
                           desc='',
                           active='')
        try:
            self._prefix = task_scheduler.conf.CELERY_REDIS_SCHEDULER_KEY_PREFIX
            self._redis = self.controller.redis_scheduler.conn
            self._pickler = pickle
            self.objid = '*'

            # create or get dictionary from redis
            self.redis_entries = Dict(key=self._prefix, redis=self._redis)
        except:
            pass
Esempio n. 11
0
def update_data_nodes(
        self, redis_url: str, name: str, timestamp: int, author: str
):
    """
    task for update current node data
    """
    try:
        conn = StrictRedis.from_url(redis_url)
        data = Dict(key='data', redis=conn)
        if name in data:
            data[name] = data[name] + [
                {'timestamp': timestamp, 'author': author}
            ]
        else:
            data[name] = [{'timestamp': timestamp, 'author': author}]
    except ConnectionError:
        self.retry(countdown=5)
Esempio n. 12
0
async def dump(request):
    """
    view for for render all dump data
    """
    settings = Settings()

    for redis_url in settings.REDIS_NODES_URL.split(','):
        try:
            conn = StrictRedis.from_url(redis_url)
            redis_data = Dict(key='data', redis=conn)

            return web.json_response(
                {name: value
                 for name, value in redis_data.items()})

        except ConnectionError:
            pass

    return web.json_response({
        'success': False,
        'reason': 'It seems all nodes are down'
    })
Esempio n. 13
0
    if args.host:
        host = args.host

    if args.port:
        port = args.port

    slave = '{} {}'.format(host, port)
    print slave
    #rc = redislite.StrictRedis(serverconfig={'slaveof': slave})
    rc = redislite.StrictRedis(host=host, port=port)
    lst = List(redis=rc, key='speed')

    print "Keys in redis: ", rc.keys()
    print "No of items in redis['speed']: ", len(lst)
    print "Weekly speed\n", processor.average_speed_weekly(lst)

    settings = Dict(redis=rc, key='settings')
    if settings.get('last_test'):
        print 'Last test: ', settings['last_test']
    else:
        print 'No last run'

    settings['last_test'] = datetime.now()

    print "Last hour aggr: ", settings.get('last_hour') if settings.get(
        'last_hour') else 'not yet'
    print "Last day aggr: ", settings.get('last_hour') if settings.get(
        'last_day') else 'not yet'
    print lst[-2:]
Esempio n. 14
0
 def get_cached_token(self):
     if self.username:
         token_info = Dict(key=self.redis_key, redis=get_redis()).get(self.username)
         if token_info and self.is_token_expired(token_info):
             token_info = self.refresh_access_token(token_info['refresh_token'])
         return token_info
Esempio n. 15
0
 def _save_token_info(self, token_info):
     if self.username:
         Dict(key=self.redis_key, redis=get_redis())[self.username] = token_info
Esempio n. 16
0
from fastapi import FastAPI
from starlette.responses import RedirectResponse, HTMLResponse

import hashlib
from enum import Enum
import time

import web_utils
from web_utils import BrowserType

from redis_collections import Dict
from redis import StrictRedis

conn = StrictRedis()
page_dict = Dict(redis=conn, writeback=True, key='SPYDERWEB__PAGE_DICT')


class FilterType(str, Enum):
    filter_param = "param"
    filter_regex = "regex"


app = FastAPI(title='SpyderWeb', version='0.3.0')


@app.get("/")
async def read_root():
    response = RedirectResponse(url='/docs')
    return response

Esempio n. 17
0
#redis.connection.socket = gevent.socket
r = redis.Redis(host=config.get('redis', 'db_host'),
                port=config.get('redis', 'db_port'),
                db=config.get('redis', 'db'))
r_keys = {
    'dev_list': config.get('constants', 'REDIS_DEV_LIST_KEY'),
    'sync_dev_list': config.get('constants', 'REDIS_SYNC_DEV_LIST_KEY'),
    'mfc_uuid': config.get('constants', 'REDIS_MFC_UUID_HASH_KEY'),
    'cur_thrpt': config.get('constants', 'REDIS_MFC_CUR_THRPT_KEY'),
    'unsync_dev_list': config.get('constants', 'REDIS_UNSYNC_DEV_LIST_KEY'),
    'new_found_dev_list': config.get('constants',
                                     'REDIS_NEW_FOUND_DEV_LIST_KEY')
}
mfa_dev_list = List(key=r_keys['dev_list'], redis=r)
sync_dev_list = List(key=r_keys['sync_dev_list'], redis=r)
mfc_uuid = Dict(key=r_keys['mfc_uuid'], redis=r)
"""Cluster Request Tasks"""


@celery.task(rate_limit=2)
def request_config_mfc(ip, data=None):
    if data is None:
        data = """<mfc-request><header><type>GET</type></header>
        <data>running-config mfc-cluster mfc</data></mfc-request>"""
    mfc_con = MfcHttpConnection(ip)
    resp = mfc_con.send_request(data)
    redis.rpush(config.get('constants', 'REDIS_CONFIG_XML_QUEUE_KEY'),
                [ip, resp])
    return resp

Esempio n. 18
0
SLACK_SIGNING_SECRET = os.getenv('SLACK_SIGNING_SECRET')

app = Flask('slackmirror')
app.logger.setLevel(os.getenv('LOG_LEVEL', 'DEBUG'))

if not DEBUG:
    json_logging.init_flask(enable_json=True)
    json_logging.init_request_instrument(app)

slack_client = WebClient(os.getenv('SLACK_BOT_TOKEN'))
slack_events_adapter = SlackEventAdapter(SLACK_SIGNING_SECRET, "/slack/events", server=app)
CORS(app)
logging.getLogger('flask_cors').level = logging.DEBUG

r = redis.Redis().from_url(REDIS_URL)
cache = Dict(key='cache', redis=r)
messages = Deque(key='messages', redis=r, maxlen=MAX_MESSAGES)

def replace_slack_tags(t):
    t = re.sub(r'<@([a-zA-Z0-9]+)>', replace_user_id_with_name, t)
    t = re.sub(r':([a-zA-Z0-9_-]+)(::[a-zA-Z0-9_-])?:', replace_coloncode_to_emoji, t)
    t = re.sub(r'<(https?://.+?)\|([^>]+?)>', rf'<a href="\1" target="_blank">\2</a>', t)
    t = re.sub(r'<(https?://.+?)>', rf'<a href="\1" target="_blank">\1</a>', t)
    t = re.sub('<#[a-zA-Z0-9]+\|([a-zA-Z0-9æøåÅÆØäöÄÖ\-_]+)>', r"#\1", t)
    t = re.sub(r'\n{3,}', "\n\n", t)

    return t

def replace_user_id_with_name(m):
    return '@'+id_to_obj('user', m.group(1))['name']
# -*- coding: utf-8 -*-
"""
Created on Sun Mar  8 12:36:40 2020

@author: Emmet
"""
from flask import Flask, render_template
import redis

import datetime
from redis_collections import Dict


app = Flask(__name__, template_folder='templat')
collections = Dict()
redis_host = "localhost"
redis_port = 5001
redis_password = ""

# Main page
@app.route('/')
def hello_world():
   
     return refresh_page()

def refresh_page():
    
    result = list(collections.find({'date':{'$gt':datetime.datetime.utcnow() - datetime.timedelta(minutes=1)}}))
    sumOfSentiment = 0
    count = 0
    for res in result:
 def __init__(self, redis, prefix):
     super(RedisObjectStore, self).__init__()
     self._data = Dict(redis=redis,
                       pickler=cPickle,
                       key='{0:s}:data'.format(prefix))
Esempio n. 21
0
def get_2fa_otp_dict() -> Dict:
    """
    Function return dict from redis with otp data
    """
    return Dict(key='2fa_otp', redis=get_redis_connection())
Esempio n. 22
0
def get_2fa_recovery_code_dict() -> Dict:
    """
    Function return dict from redis with recovery code data
    """
    return Dict(key='2fa_recovery_code', redis=get_redis_connection())
Esempio n. 23
0
        'id':
        fields.String(required=True,
                      description='The feature flag switch identity'),
        'description':
        fields.String(description='More details about the feature flag'),
        'state':
        fields.String(description='State of feature flag'),
        'active':
        fields.Boolean(readonly=True, description='Active or not')
    })

# REDIS
redis_host = os.environ.get('REDIS_HOST', '10.0.0.19')
redis_port = int(os.environ.get('REDIS_PORT', 6379))
redis_client = redis.Redis(host=redis_host, port=redis_port)
redis_dict = Dict(redis=redis_client, key='woven')

# GUTTER
manager = get_gutter_client(storage=redis_dict, autocreate=True)


def prepare_to_return(switch_p):
    cloud_logging.info('[Switch] --> {}'.format(switch_p))
    _active = manager.active(switch_p.name)
    return {
        'id': switch_p.name,
        'description': switch_p.description,
        'state': switch_p.state_string,
        'active': _active
    }
Esempio n. 24
0
 def create_dict(self, *args, **kwargs):
     kwargs['redis'] = self.redis
     return Dict(*args, **kwargs)
Esempio n. 25
0
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from redis_collections import Dict
import redislite

# database stored in a file (simplest way)
# TODO: change engine type if needed
db_uri = "sqlite:///database.sql"
engine = create_engine(db_uri)

Base = declarative_base()
Session = sessionmaker(bind=engine)

cache_uri = 'storage.rdb'
redis_connection = redislite.StrictRedis(cache_uri)
Cache = Dict(redis=redis_connection, key='storage')