Пример #1
0
    def run(self):
        t1 = Thread(target=sd_redis.watch_redis)
        t1.start()
        while not getattr(sd_redis, 'redis_master'):
            time.sleep(0.01)

        self.cache = ZRedis(host=sd_redis.redis_master,
                            password=redis_db_pw,
                            db=os.getenv('REDIS_DB'))
        self.connect()
Пример #2
0
    def run(self):
        """
        Rabbit and riak watchers and log processor are run.

        """
        t1 = Thread(target=sd_rabbit.watch_rabbit)
        t2 = Thread(target=sd_redis.watch_redis)
        t1.start()
        t2.start()
        while not hasattr(sd_rabbit, 'rabbit_nodes') and not hasattr(sd_redis, 'redis_master'):
            time.sleep(0.01)

        self.cache = ZRedis(host=sd_redis.redis_master,
                            password=redis_db_pw,
                            db=os.getenv('REDIS_DB'))
        self.connection = self.connect()
        self.connection.ioloop.start()
Пример #3
0
def find_redis_role(redis_nodes):
    roles = {'master': [], 'slave': []}
    for node in redis_nodes:
        try:
            cache = ZRedis(host=node)
            cache.execute('PING')
        except Exception as e:
            zlogger.error(str(e))
            # If failPassword should be provided because Redis Master expects that.
            cache = ZRedis(host=node,
                           password=redis_db_pw)

        execute_role = cache.execute('ROLE')
        role_of_node = execute_role[0].decode()
        roles[role_of_node].append(node)

    return roles.get('master')[0], random.choice(roles.get('slave' if roles['slave'] else 'master'))
Пример #4
0
    def update_instances(self, riak_pb, rabbit_cl, redis_master):
        """
        Updated riak, rabbit and redis clients are set to wanted instance(PushWorker or 
        MessagingWorker)
        
        Args:
            kwargs (dict):
                - riak_pb(obj): riak client obj
                - rabbit_cl (obj): rabbit_ client obj
                - redis_master (str): redis master ip address


        """
        from zopsm.lib.sd_redis import redis_db_pw

        cache = ZRedis(host=redis_master,
                       password=redis_db_pw,
                       db=os.getenv('REDIS_DB'))

        self['riak_pb', 'rabbit_cl', 'cache'] = [riak_pb, rabbit_cl, cache]
Пример #5
0
master = os.getenv('REDIS_MASTER')

if WORKING_ENVIRONMENT in ["zopsm", "develop"]:
    # Consul service and check registration
    check = consul.Check.http(
        url=f'http://{host_ipv4}:{container_port}/v1/ping',
        timeout='1s',
        interval='10s',
        deregister='2m')
    consul_client.agent.service.register(name='auth',
                                         service_id=f'{container_name}',
                                         address=f'{host_ipv4}',
                                         port=int(container_port),
                                         check=check)

cache = ZRedis(host=master, password=redis_db_pw, db=os.getenv('REDIS_DB'))


def generate_user_token(lenght=64):
    """
    Generates a urlsafe ascii default 40 byte length user auth token

    Args:
        lenght(int): length of token

    Returns: (str)

    """
    return secrets.token_urlsafe(lenght)

Пример #6
0
from zopsm.lib.credis import ZRedis
from zopsm.lib import sd_redis

cache = ZRedis(
    host=sd_redis.watch_redis(single=True)[0],
    password=
    '******',
    db=1)
"""
"Tenants:123:Projects:456:Subscriber:123456:Contacts": {"subscriberId1","subscriberId3","subscriberId2",},

"RefreshTokens:{temp_token}": {"tenant:"123", "account":"007", "project":"456", "service":"789", "user":"******"}
"""

users = [
    "testUser0", "testUser1", "testUser2", "testUser3", "testUser4",
    "testUser5", "testUser6", "testUser7", "testUser8", "testUser9"
]

possible_channels = ["testChannel1", "testChannel2", "testChannel3"]

contact_lists = {
    "testUser0": ["testUser4", "testUser8", "testUser9"],
    "testUser1": [],
    "testUser2": ["testUser3", "testUser5", "testUser7"],
    "testUser3": ["testUser2"],
    "testUser4": ["testUser0", "testUser6"],
    "testUser5": ["testUser2", "testUser6", "testUser9"],
    "testUser6": ["testUser4", "testUser5"],
    "testUser7": ["testUser2", "testUser8", "testUser9"],
    "testUser8": ["testUser0", "testUser7", "testUser9"],
Пример #7
0
 def __init__(self):
     super(ZopsKeyValueUserStorage,
           self).__init__(ZRedis(host=slave, db=os.getenv('REDIS_DB')))
Пример #8
0
import os
from zopsm.lib.credis import ZRedis
from zopsm.lib.sd_redis import redis_db_pw

slave = os.getenv('REDIS_SLAVE')

cache = ZRedis(host=slave, db=os.getenv('REDIS_DB'))


def build_pika_message(job, params):
    return {"job": job, "params": params}


def generate_private_message_channel_name(sender, receiver):
    ids = sorted([sender, receiver])
    return "prv_{}_{}".format(ids[0], ids[1])
Пример #9
0
class QueueManager(object):
    def __init__(self):

        self.connection = None
        self.channel = None
        self._connected = False

        self.event_listeners = {}
        self.connections = {}
        self.cache = None
        self.run()

    def run(self):
        t1 = Thread(target=sd_redis.watch_redis)
        t1.start()
        while not getattr(sd_redis, 'redis_master'):
            time.sleep(0.01)

        self.cache = ZRedis(host=sd_redis.redis_master,
                            password=redis_db_pw,
                            db=os.getenv('REDIS_DB'))
        self.connect()

    def connect(self, host=None):
        """
        Connection with Rabbit.

        """
        if self._connected:
            zlogger.info('PikaClient: Already connecting to RabbitMQ')
            return

        if not host:
            _, host = list(json.loads(sd_rabbit.rabbit_nodes).items())[0]
        zlogger.info('PikaClient: Connecting to RabbitMQ in Queue Manager')
        param = ConnectionParameters(host=host,
                                     port=5672,
                                     virtual_host=VIRTUAL_HOST,
                                     credentials=sd_rabbit.rabbit_credential)
        self.connection = TornadoConnection(param,
                                            on_open_callback=self.on_connected)

        self.connection.add_on_close_callback(self.on_closed)

        self._connected = True
        zlogger.info("Connection is successful: host:{}".format(host))

    def on_connected(self, connection):
        """
        AMQP connection callback.
        Creates input channel.

        Args:
            connection: AMQP connection

        """
        zlogger.info('PikaClient: connected to RabbitMQ')
        self.connection.channel(self.on_channel_open)

    def on_channel_open(self, channel):
        """
        Input channel creation callback, exchange declaring.

        """
        zlogger.info('PikaClient: Channel open, Declaring exchange')
        self.channel = channel
        self.channel.exchange_declare(exchange='messages',
                                      type='topic',
                                      durable=True)

    def on_message(self, channel, method, header, body):
        """
        When message is received, prepare notifier list.
        
        """
        user_id = method.consumer_tag
        notify_list = self.event_listeners[user_id]
        self.notify_listeners(body, notify_list)

    def listen_messages(self, queue_name, user_id):
        """
        Listen rabbit messages.
        
        """
        self.channel.basic_consume(consumer_callback=self.on_message,
                                   queue=queue_name,
                                   consumer_tag=user_id,
                                   no_ack=True)

    def notify_listeners(self, message, notify_list):
        """
        Write message to notifier list.
        
        """
        for listener in notify_list:
            listener.write_message(message)

    def add_event_listener(self, listener, user_info):
        """
        Add listener to user set. If queue creation is new, 
        
        """
        user_id = user_info.get("user")
        if not self.event_listeners.get(user_id):
            queue_name = self.get_queue_name(user_id)
            self.channel.queue_declare(queue=queue_name,
                                       auto_delete=True,
                                       callback=None)
            self.event_listeners.setdefault(user_id, []).append(listener)
            self.input_queue_bind(queue_name, user_info)
            self.listen_messages(queue_name, user_id)
            self.cache.sadd('QueueList:{}'.format(user_id), queue_name)

        else:
            self.event_listeners[user_id].append(listener)
        zlogger.info(
            "New websocket connection is added for user which has user_id:{}".
            format(user_id))

    def remove_event_listener(self, listener, user_id):
        """
        Remove listener from listener list.
        
        """
        try:
            if self.event_listeners.get(user_id):
                self.event_listeners[user_id].remove(listener)
                if not self.event_listeners[user_id]:
                    self.channel.queue_delete(
                        queue=self.get_queue_name(user_id))
                    del self.event_listeners[user_id]

        except Exception as exc:
            zlogger.error(
                "An error occurred on remove_event_listener method inside QueueManager. "
                "User Id: {}, Exception: {}".format(user_id, exc))

    def input_queue_bind(self, queue, user_info):
        """
        Input queue   declaration callback.
        Input Queue/Exchange binding done here

        Args:
            queue: input queue
            user_info: user information dict include project, service and user ids

        """
        bind_list = self.get_bind_list(user_info)

        for route_key in bind_list:
            self.channel.queue_bind(callback=None,
                                    exchange='messages',
                                    queue=queue,
                                    routing_key=route_key)

    def get_bind_list(self, user_info):
        """
        Args:
            user_info: user information dict include project, service and user ids
        """
        user_id = user_info.get("user")
        project_id = user_info.get("project")
        service = user_info.get("service")

        bind_list = [user_id]

        # 'CACHE_SUBSCRIBER_CHANNELS' key is Channels list of a user
        subs_channels_key = CACHE_SUBSCRIBER_CHANNELS.format(
            project_id=project_id, service=service, subscriber_id=user_id)

        channels = self.cache.smembers(subs_channels_key)
        channels = [channel_id.decode() for channel_id in channels]

        bind_list.extend(channels)
        return bind_list

    def on_closed(self, connection, _, __):
        """
        Moves listeners from close node's queue manager to queue's new master node.
        
        """
        self._connected = False

    @staticmethod
    def get_queue_name(user_id):
        """
        Gets queue name according to user id.
        
        """
        return "{}_{}".format(user_id, os.getpid())
Пример #10
0
class EventProcessor(LogProcessor):
    def on_log_message(self, ch, method, properties, body):
        """
        Gets log messages from rabbit.

        Args:
            ch: Channel
            method: Method
            properties: Props
            body(dict): Body
                - params(dict): kwargs for methods
                - method(str): methods name

        Returns:
            None
        """

        try:
            body = json.loads(body)
            method_name = body.get('method', None)
            event_worker_method = getattr(self, method_name) if method_name else None
            if event_worker_method is not None:
                event_worker_method(**body['params'])
        except Exception as exc:
            zlogger.error("An error occurred on_log_message method inside EventProcessor."
                          "Exception: {}, ".format(exc))

    def run(self):
        """
        Rabbit and riak watchers and log processor are run.

        """
        t1 = Thread(target=sd_rabbit.watch_rabbit)
        t2 = Thread(target=sd_redis.watch_redis)
        t1.start()
        t2.start()
        while not hasattr(sd_rabbit, 'rabbit_nodes') and not hasattr(sd_redis, 'redis_master'):
            time.sleep(0.01)

        self.cache = ZRedis(host=sd_redis.redis_master,
                            password=redis_db_pw,
                            db=os.getenv('REDIS_DB'))
        self.connection = self.connect()
        self.connection.ioloop.start()

    def status_notify_contacts(self, **kwargs):
        """
        Method to notify online contacts about a subscriber's status update.
        Args:
            **kwargs(dict):
                - project_id(str):
                - service(str):
                - subscriber_id(str):
                - status_message(str):
                - behavioral_status(str):
                - status_intentional(str):
                - last_activity_time(str):
                - contacts_to_notify_key(str):

        Returns:

        """
        exchange = "messages"
        contacts_to_notify_key = kwargs['contacts_to_notify_key']
        contacts_to_notify = self.cache.smembers(contacts_to_notify_key)
        message = {
            "type": "status_delivery",
            "subscriberId": kwargs['subscriber_id'],
            "lastActivityTime": kwargs['last_activity_time'],
            "statusMessage": kwargs['status_message'],
            "behavioralStatus": kwargs['behavioral_status'],
            "statusIntentional": kwargs['status_intentional'],
        }

        for contact in contacts_to_notify:
            self.channel.basic_publish(
                exchange,
                contact.decode(),
                json.dumps(message, ensure_ascii=False))

        self.cache.delete(contacts_to_notify_key)

    def fails_non_blocking_jobs(self, **kwargs):
        """

        Args:
            **kwargs(dict):
                - trackingId(str): unique identifier matches with the triggered event
                - data(dict):
                    - title(str): title of the error
                    - description(str): "Event has failed."
                    - code(int): 500
                - usr_id(str): id of the target or subscriber to notify

        Returns:

        """
        exchange = "messages"
        message = {
            "type": "error",
            "trackingId": kwargs['trackingId'],
            "data": kwargs['data']
        }

        self.channel.basic_publish(
            exchange,
            kwargs['usr_id'],
            json.dumps(message, ensure_ascii=False))

    def channel_message_event(self, **kwargs):
        """

        Args:
            **kwargs(dict):
                - channelId(str): roc channel id
                - data(dict):
                    - title(str): title of the message
                    - body(str): message body
                    - sentTime(str): string representation of sent time
                    - creation_time(str): string representation of creation time(riak object)
                    - last_update_time(str): string representation of last update time(riak object)
                    - sender(str): id of the sender user

        Returns:

        """
        exchange = "messages"
        message = {
            "type": "channel_message",
            "messageId": kwargs['data'].get("id", None),
            "channelId": kwargs['channelId'],
            "title": kwargs['data'].get("title", None),
            "body": kwargs['data'].get("body", None),
            "sentTime": kwargs['data'].get("sentTime", None),
            "sender": kwargs['data'].get("sender", None),
        }

        self.channel.basic_publish(
            exchange,
            kwargs['channelId'],
            json.dumps(message, ensure_ascii=False))

    def direct_message_event(self, **kwargs):
        """

        Args:
            **kwargs(dict):
                - channelId(str): roc channel id
                - data(dict):
                    - title(str): title of the message
                    - body(str): message body
                    - sentTime(str): string representation of sent time
                    - creation_time(str): string representation of creation time(riak object)
                    - last_update_time(str): string representation of last update time(riak object)
                    - sender(str): id of the sender user

        Returns:

        """
        exchange = "messages"
        message = {
            "type": "direct_message",
            "messageId": kwargs['data'].get("id", None),
            "title": kwargs['data'].get("title", None),
            "body": kwargs['data'].get("body", None),
            "sentTime": kwargs['data'].get("sentTime", None),
            "sender": kwargs['data'].get("sender", None),
        }

        self.channel.basic_publish(
            exchange,
            kwargs['data']['receiver'],
            json.dumps(message, ensure_ascii=False))
Пример #11
0
import websocket
from concurrent.futures import ThreadPoolExecutor
from zopsm.lib.credis import ZRedis
import json


NUMBER_OF_THREADS = 5  # For online users

pool = ThreadPoolExecutor(NUMBER_OF_THREADS)

cache = ZRedis()


def assertions_5(counter):
    """

    Args:
        counter (dict): redis test counter dict

    Returns:

    """
    assert counter["testChannel3"] == 1


def assertions_6(counter):
    """

    Args:
        counter (dict): redis test counter dict