Esempio n. 1
0
def message_forwarder(messaging_ports):
    cmd = os.path.join(os.getenv('PANDIR'), 'panoptes-utils', 'scripts',
                       'run_messaging_hub.py')
    args = [cmd]
    # Note that the other programs using these port pairs consider
    # them to be pub and sub, in that order, but the forwarder sees things
    # in reverse: it subscribes to the port that others publish to,
    # and it publishes to the port that others subscribe to.
    for _, (sub, pub) in messaging_ports.items():
        args.append('--pair')
        args.append(str(sub))
        args.append(str(pub))

    get_root_logger().info('message_forwarder fixture starting: {}', args)
    proc = subprocess.Popen(args,
                            stdout=subprocess.DEVNULL,
                            stderr=subprocess.DEVNULL)
    # It takes a while for the forwarder to start, so allow for that.
    # TODO(jamessynge): Come up with a way to speed up these fixtures.
    time.sleep(3)
    yield messaging_ports
    proc.terminate()
Esempio n. 2
0
def can_connect_to_mongo():
    global _can_connect_to_mongo
    if _can_connect_to_mongo is None:
        logger = get_root_logger()
        try:
            PanDB(db_type='mongo',
                  db_name='panoptes_testing',
                  logger=logger,
                  connect=True)
            _can_connect_to_mongo = True
        except Exception:
            _can_connect_to_mongo = False
        logger.info('can_connect_to_mongo = {}', _can_connect_to_mongo)
    return _can_connect_to_mongo
Esempio n. 3
0
    def __init__(self,
                 host='localhost',
                 port=3040,
                 connect=True,
                 *args,
                 **kwargs):
        self.logger = get_root_logger()

        self._host = host
        self._port = port

        self._socket = None

        self._is_connected = False
        if connect:
            self.connect()
Esempio n. 4
0
class SocialTwitter(object):
    """Social Messaging sink to output to Twitter."""

    logger = get_root_logger()

    def __init__(self, **kwargs):
        consumer_key = kwargs.get('consumer_key', '')
        if consumer_key == '':
            raise ValueError('consumer_key parameter is not defined.')
        consumer_secret = kwargs.get('consumer_secret', '')
        if consumer_secret == '':
            raise ValueError('consumer_secret parameter is not defined.')
        access_token = kwargs.get('access_token', '')
        if access_token == '':
            raise ValueError('access_token parameter is not defined.')
        access_token_secret = kwargs.get('access_token_secret', '')
        if access_token_secret == '':
            raise ValueError('access_token_secret parameter is not defined.')

        # Output timestamp should always be True by default otherwise Twitter will reject duplicate statuses.
        self.output_timestamp = kwargs.get("output_timestamp", True)

        # Create a new twitter api object
        try:
            auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
            auth.set_access_token(access_token, access_token_secret)

            self.api = tweepy.API(auth)
        except tweepy.TweepError:
            msg = 'Error authenicating with Twitter. Please check your Twitter configuration.'
            self.logger.warning(msg)
            raise ValueError(msg)

    def send_message(self, msg, timestamp):
        try:
            # update_status returns a tweepy Status instance, but we
            # drop it on the floor because we don't have anything we
            # can do with it.
            if self.output_timestamp:
                self.api.update_status('{} - {}'.format(msg, timestamp))
            else:
                self.api.update_status(msg)
        except tweepy.TweepError:
            self.logger.debug(
                'Error tweeting message. Please check your Twitter configuration.'
            )
Esempio n. 5
0
def pytest_runtest_logreport(report):
    """Adds the failure info that pytest prints to stdout into the log."""
    if report.skipped or report.outcome != 'failed':
        return
    try:
        logger = get_root_logger()
        logger.critical('')
        logger.critical('  TEST {} FAILED during {}\n\n{}\n', report.nodeid,
                        report.when, report.longreprtext)
        cnt = 15
        if report.capstdout:
            logger.critical('{}Captured stdout during {}{}\n{}\n', '= ' * cnt,
                            report.when, ' =' * cnt, report.capstdout)
        if report.capstderr:
            logger.critical('{}Captured stderr during {}{}\n{}\n', '* ' * cnt,
                            report.when, ' *' * cnt, report.capstderr)
    except Exception:
        pass
Esempio n. 6
0
def pytest_runtest_logfinish(nodeid, location):
    """Signal the complete finish of running a single test item.

    This hook will be called after pytest_runtest_setup(),
    pytest_runtest_call() and pytest_runtest_teardown() hooks.

    Args:
        nodeid (str) – full id of the item
        location – a triple of (filename, linenum, testname)
    """
    try:
        logger = get_root_logger()
        logger.critical('')
        logger.critical('       END TEST {}', nodeid)
        logger.critical('')
        logger.critical('##########' * 8)
    except Exception:
        pass
Esempio n. 7
0
    def __init__(self, bucket_name, project_id='panoptes-survey'):
        """Create an object that can interact easily with storage buckets.

        Note:
            This assumes that you have authenticated to the Google Cloud network
            using your provided auth_key.

            See the README:

            https://github.com/panoptes/POCS/tree/develop/pocs/utils/google

        Args:
            bucket_name (str): Name of bucket to use.
            project_id (str, optional): Project id hosting the bucket. Default 'panoptes-survey'
        Raises:
            error.GoogleCloudError: Error raised if valid connection cannot be formed for
                given project, bucket, and authorization.
        """
        self.logger = get_root_logger()
        super(PanStorage, self).__init__()

        try:
            self.unit_id = load_config()['pan_id']
        except KeyError:
            raise error.GoogleCloudError("Missing pan_id in config "
                                         "Cannot connect to Google services.")

        assert re.match(r'PAN\d\d\d', self.unit_id) is not None

        self.project_id = project_id
        self.bucket_name = bucket_name

        try:
            self.client = storage.Client(project=self.project_id)
            self.bucket = self.client.get_bucket(bucket_name)
        except exceptions.Forbidden:
            raise error.GoogleCloudError(
                "Storage bucket does not exist or no permissions. "
                "Ensure that the auth_key has valid permissions to the bucket. "
                "or that you have executed 'gcloud auth'")

        self.logger.info("Connected to storage bucket {}", self.bucket_name)
Esempio n. 8
0
class SocialSlack(object):

    """Social Messaging sink to output to Slack."""

    logger = get_root_logger()

    def __init__(self, **kwargs):
        self.web_hook = kwargs.get('webhook_url', '')
        if self.web_hook == '':
            raise ValueError('webhook_url parameter is not defined.')
        else:
            self.output_timestamp = kwargs.get('output_timestamp', False)

    def send_message(self, msg, timestamp):
        try:
            if self.output_timestamp:
                post_msg = '{} - {}'.format(msg, timestamp)
            else:
                post_msg = msg

            # We ignore the response body and headers of a successful post.
            requests.post(self.web_hook, json={'text': post_msg})
        except Exception as e:
            self.logger.debug('Error posting to slack: {}'.format(e))
Esempio n. 9
0
class PanMessaging(object):
    """Provides messaging services within a PANOPTES robotic telescope.

    Supports broadcasting messages from publishers (e.g. a POCS or
    ArduinoIO class instance) to subscribers (also typically class
    instances). The publishers and subscribers may be in the same
    process, or in separate processes. The messages all go through
    a message forwarder; this is a process which listens for messages
    from all publishers on one TCP port and forwards each message to
    all subscribers that are connected to a second TCP port.

    Do not create PanMessaging instances directly. Publishers should
    call PanMessaging.create_publisher to create an instance of
    PanMessaging, on which they can then call send_message.
    Subscribers should call PanMessaging.create_subscriber to create
    an instance of PanMessaging, on which they can then call
    receive_message.

    Messages are sent to topics, a name that can be used to allow
    a high-level partitioning of messages. A topic name may not
    include whitespace. Among the currently used topic names are:

      * PANCHAT (sent from POCS.say)
      * PAWS-CMD (sent from PAWS websockets.py)
      * POCS (sent by class POCS)
      * POCS-CMD (sent by class POCS)
      * STATUS (sent by class POCS)
      * weather (from peas/sensors.py)
      * environment (from peas/sensors.py)
      * telemetry:commands (in ArduinoIO... new)
      * camera:commands (in ArduinoIO... new)

    And some other topics are used in tests:

      * Test-Topic (test_messaging.py)
      * RUNNING (test_pocs.py)
      * POCS-CMD (test_pocs.py)

    The method receive_message will return messages from all topics;
    the caller must check the returned topic name to determine if
    the message value is of interest.

    Note: PAWS doesn't use PanMessaging, which will likely result in
    problems as we evolve PanMessaging and the set of topics.
    TODO: Figure out how to share PanMessaging with PAWS.

    The value of a message being sent may be a string (in which case it
    is wrapped in a dict(message=<value>, timestamp=<now>) or a dict,
    in which case it will be "scrubbed", i.e. the dict entries will be
    modified as necessary to so that the dict can be serialized using
    json.dumps.

    TODO Pick an encoding of strings (e.g. UTF-8) so that non-ASCII
    strings may be sent and received without corruption of the data
    or exceptions being thrown.

    ZeroMQ is used to provide the underlying pub-sub support. ZeroMQ
    supports only a very basic message format: an array of bytes.
    PanMessaging converts the provided message topic and value into
    a byte array of this format:
        <topic-name><space><serialized-value>
    """
    logger = get_root_logger()

    # Topic names must consist of the characters.
    topic_name_re = re.compile('[a-zA-Z][-a-zA-Z0-9_.:]*')

    def __init__(self, **kwargs):
        """Do not call this directly."""
        # Create a new context
        self.context = zmq.Context()
        self.socket = None

    @classmethod
    def create_forwarder(cls, sub_port, pub_port, ready_fn=None, done_fn=None):
        subscriber, publisher = PanMessaging.create_forwarder_sockets(
            sub_port, pub_port)
        PanMessaging.run_forwarder(subscriber,
                                   publisher,
                                   ready_fn=ready_fn,
                                   done_fn=done_fn)

    @classmethod
    def create_forwarder_sockets(cls, sub_port, pub_port):
        cls.logger.info('Creating forwarder sockets for {} -> {}', sub_port,
                        pub_port)
        subscriber = PanMessaging.create_subscriber(sub_port,
                                                    bind=True,
                                                    connect=False)
        publisher = PanMessaging.create_publisher(pub_port,
                                                  bind=True,
                                                  connect=False)
        return subscriber, publisher

    @classmethod
    def run_forwarder(cls, subscriber, publisher, ready_fn=None, done_fn=None):
        publisher.logger.info('run_forwarder')
        try:
            if ready_fn:
                ready_fn()
            publisher.logger.info('run_forwarder calling zmq.device')
            zmq.device(zmq.FORWARDER, subscriber.socket, publisher.socket)
        except KeyboardInterrupt:
            pass
        except Exception as e:
            publisher.logger.warning(e)
            publisher.logger.warning("bringing down zmq device")
        finally:
            publisher.logger.info(
                'run_forwarder closing publisher and subscriber')
            publisher.close()
            subscriber.close()
            if done_fn:
                done_fn()

    @classmethod
    def create_publisher(cls,
                         port,
                         bind=False,
                         host='localhost',
                         connect=True):
        """ Create a publisher

        Args:
            port (int): The port (on localhost) to bind to.

        Returns:
            A ZMQ PUB socket
        """
        obj = cls()

        obj.logger.debug(
            "Creating publisher. Binding to port {} ".format(port))

        socket = obj.context.socket(zmq.PUB)

        if bind:
            socket.bind(f'tcp://*:{port}')
        elif connect:
            socket.connect(f'tcp://{host}:{port}')

        obj.socket = socket

        return obj

    @classmethod
    def create_subscriber(cls,
                          port,
                          topic='',
                          host='localhost',
                          bind=False,
                          connect=True):
        """ Create a listener

        Args:
            port (int):         The port (on localhost) to bind to.
            topic (str):      Which topic or topic prefix to subscribe to.

        """
        obj = cls()
        obj.logger.debug("Creating subscriber. Port: {} \tTopic: {}".format(
            port, topic))

        socket = obj.context.socket(zmq.SUB)

        if bind:
            try:
                socket.bind(f'tcp://*:{port}')
            except zmq.error.ZMQError:
                obj.logger.debug('Problem binding port {}'.format(port))
        elif connect:
            socket.connect(f'tcp://{host}:{port}')

        socket.setsockopt_string(zmq.SUBSCRIBE, topic)

        obj.socket = socket

        return obj

    def send_message(self, topic, message):
        """ Responsible for actually sending message across a topic

        Args:
            topic(str):   Name of topic to send on. The name must
                match topic_name_re.
            message:   Message to be sent (a string or a dict).
        """
        if not isinstance(topic, str):
            raise ValueError('Topic name must be a string')
        elif not self.topic_name_re.fullmatch(topic):
            raise ValueError('Topic name ("{}") is not valid'.format(topic))

        if isinstance(message, str):
            message = {
                'message': message,
                'timestamp': current_time(pretty=True),
            }
        elif isinstance(message, dict):
            message = self.scrub_message(message)
        else:
            raise ValueError('Message value must be a string or dict')

        msg_object = dumps(message, skipkeys=True)

        full_message = '{} {}'.format(topic, msg_object)

        if topic == 'PANCHAT':
            self.logger.info("{} {}".format(topic, message['message']))

        # Send the message
        self.socket.send_string(full_message, flags=zmq.NOBLOCK)

    def receive_message(self, blocking=True, flags=0, timeout_ms=0):
        """Receive a message

        Receives a message for the current subscriber. Blocks by default, pass
        `flags=zmq.NOBLOCK` for non-blocking.

        Args:
            blocking (bool, optional): If True, blocks until message
                received or timeout__ms elapsed (if timeout_ms > 0).
            flag (int, optional): Any valid recv flag, e.g. zmq.NOBLOCK
            timeout_ms (int, optional): Time in milliseconds to wait for
                a message to arrive. Only applies if blocking is True.

        Returns:
            tuple(str, dict): Tuple containing the topic and a dict
        """
        topic = None
        msg_obj = None
        if not blocking:
            flags = flags | zmq.NOBLOCK
        elif timeout_ms > 0:
            # Wait until a message is available or the timeout expires.
            # TODO(jamessynge): Remove flags=..., confirm that works with
            # the default flags value of zmq.POLLIN.
            self.socket.poll(timeout=timeout_ms,
                             flags=(zmq.POLLIN | zmq.POLLOUT))
            # Don't block at this point, because we will have waited as long
            # as necessary.
            flags = flags | zmq.NOBLOCK
        try:
            message = self.socket.recv_string(flags=flags)
        except Exception as e:
            pass
        else:
            topic, msg = message.split(' ', maxsplit=1)
            try:
                msg_obj = loads(msg)
            except Exception:
                msg_obj = from_yaml(msg)

        return topic, msg_obj

    def close(self):
        """Close the socket """
        self.socket.close()
        self.context.term()

    def scrub_message(self, message):
        result = {}

        for k, v in message.items():
            if isinstance(v, dict):
                v = self.scrub_message(v)

            if isinstance(v, u.Quantity):
                v = v.value

            if isinstance(v, datetime.datetime):
                v = v.isoformat()

            if isinstance(v, ObjectId):
                v = str(v)

            if isinstance(v, Time):
                v = str(v.isot).split('.')[0].replace('T', ' ')

            # Hmmmm. What is going on here? We need some documentation.
            if k.endswith('_time'):
                v = str(v).split(' ')[-1]

            if isinstance(v, float):
                v = round(v, 3)

            result[k] = v

        return result
Esempio n. 10
0
def db(db_type):
    return PanDB(db_type=db_type,
                 db_name='panoptes_testing',
                 logger=get_root_logger(),
                 connect=True)
Esempio n. 11
0
            arg_error(
                'Port pair {} -> {} invalid. Ports must be distinct.'.format(
                    sub, pub))
        validate_unique_port(pub)
        sub_and_pub_pairs.append((sub, pub))

    if args.from_config and args.config_host:
        try:
            config = get_config(host=args.config_host)
        except Exception as e:
            arg_error(
                f'Invalid config. Is the config server running? Error: {e!r}')

        add_pair(config['messaging']['cmd_port'])
        add_pair(config['messaging']['msg_port'])

    if args.pairs:
        for sub, pub in args.pairs:
            add_pair(sub, pub)

    if args.ports:
        for sub in args.ports:
            add_pair(sub)

    if not sub_and_pub_pairs:
        arg_error('Found no port pairs to forward between.')

    the_root_logger = get_root_logger()

    run_forwarders(sub_and_pub_pairs)