Ejemplo n.º 1
0
    def __init__(self,
                 back_plane_ip_address=BANYAN_IP,
                 process_name=None,
                 com_port='None',
                 baud_rate=115200,
                 log=False,
                 quiet=False,
                 loop_time="0.1"):
        """
        This is constructor for the Monitor class
        :param back_plane_ip_address: IP address of the currently running backplane
        :param subscriber_port: subscriber port number - matches that of backplane
        :param publisher_port: publisher port number - matches that of backplane
        """

        # initialize the base class
        super().__init__(back_plane_ip_address,
                         process_name=process_name,
                         numpy=True)

        m.patch()

        # Set the subscriber topic that will be used to listen for messages to be relayed to Unity
        self.set_subscriber_topic('send_to_unity')

        self.receive_loop()
Ejemplo n.º 2
0
    def __init__(self):
        self.onl = Online()
        self.vis = Metric_Visualizer()
        self.serv = ExperienceServer(self.ob_callback, deserialize_obs(), 4)
        self.exp_path = self.get_exp_path()

        m.patch()
Ejemplo n.º 3
0
def _compress_msgpack(data, uncompress=None):
    ''' Compress/Uncompress msgpack data '''

    # import the package
    try:
        import msgpack
        import msgpack_numpy as m
    except ImportError as e:
        compress_with = 'json'
        raise BrainWarning(
            'Must have Python packages msgpack and msgpack_numpy '
            'installed to use msgpack compression.  Defaulting to json')
    else:
        m.patch()

    # do the compression
    try:
        if uncompress:
            comp_data = msgpack.unpackb(data, raw=False)
        else:
            comp_data = msgpack.packb(data, use_bin_type=True)
    except Exception as e:
        raise BrainError('Cannot (un)compress msgpack data. {0}'.format(e))
    else:
        return comp_data
Ejemplo n.º 4
0
    def __init__(self, connect_to='tcp://127.0.0.1:5555'):
        """
        sublist:list of rostopics you want sent over
        """
        #important zmq initialization stuff
        self.zmq_context = zmq.SerializingContext()
        self.zmq_socket = self.zmq_context.socket(zmq.REQ)
        self.zmq_socket.connect(connect_to)

        #convenient lambdas to use later on
        self.rostojson = lambda x: json_message_converter.convert_ros_message_to_json(
            x)
        self.jsontoros = lambda topic, x: json_message_converter.convert_json_to_ros_message(
            topic, x)

        #Syncs to each frame
        self.lidar_sub = rospy.Subscriber("/scan", LaserScan,
                                          self.lidar_callback)
        self.steer_sub = rospy.Subscriber(
            "/vesc/high_level/ackermann_cmd_mux/output", AckermannDriveStamped,
            self.steer_callback)
        self.cam_sub = rospy.Subscriber("/usb_cam/image_raw", Image,
                                        self.cam_callback)

        #Hooks sends to the camera, so we need the latest of each observation
        self.latest_obs = {}
        m.patch()
        self.bridge = CvBridge()
Ejemplo n.º 5
0
 def __init__(self, max_msg_len=8192):
     """
     Args:
         max_msg_len (int): The maximum number of bytes to read from the socket.
     """
     self.max_msg_len = max_msg_len
     # Make all msgpack methods use the numpy-aware de/encoders.
     mnp.patch()
Ejemplo n.º 6
0
def load():
    global is_loaded
    if not is_loaded:
        # Patch numpy types into msgpack
        msgpack_numpy.patch()

        logbook.StreamHandler(sys.stdout, level=logbook.DEBUG).push_application()
        logbook.compat.redirect_logging()
        is_loaded = True
Ejemplo n.º 7
0
 def __init__(self):
     self.onl = Online()
     self.vis = Metric_Visualizer()
     self.trainer = Trainer(sess_type="online")
     self.serv = ExperienceServer(self.ob_callback, deserialize_obs(), 4)
     self.exp_path = self.trainer.get_exp_path()
     self.modelpath = self.trainer.get_model_path()
     self.modelname = self.trainer.get_model_name()
     m.patch()
Ejemplo n.º 8
0
def create_compressed_msgpack(data, outfilepath):
    import zstandard as zstd
    import msgpack
    import msgpack_numpy
    msgpack_numpy.patch()

    compressor = zstd.ZstdCompressor(level=22)
    with open(outfilepath, 'wb') as f:
        print('writing', outfilepath)
        f.write(compressor.compress(msgpack.packb(data, use_bin_type=True)))
Ejemplo n.º 9
0
 def __init__(self,
              recv_callback,
              deser_func,
              deser_length,
              open_port='tcp://*:5555'):
     """Opens a zmq.ROUTER to recv batches of 'experiences' from F110 & process them"""
     self.zmq_context = zmq.Context()
     self.zmq_socket = self.zmq_context.socket(zmq.ROUTER)
     self.zmq_socket.bind(open_port)
     self.recv_callback = recv_callback
     self.deser_func = deser_func
     self.deser_length = deser_length
     m.patch()
     threading.Thread.__init__(self)
Ejemplo n.º 10
0
    def __init__(self, back_plane_ip_address=BANYAN_IP,
                 process_name=None, com_port='None', baud_rate=115200, log=False, quiet=False, loop_time="0.1"):
        """
        This is constructor for the Monitor class
        :param back_plane_ip_address: IP address of the currently running backplane
        :param subscriber_port: subscriber port number - matches that of backplane
        :param publisher_port: publisher port number - matches that of backplane
        """

        # initialize the base class
        super().__init__(back_plane_ip_address,  process_name=process_name, numpy=True)


        m.patch()

        self.receive_loop()
Ejemplo n.º 11
0
def load_pickle(path):
    try:
        import msgpack
        import msgpack_numpy as m
        m.patch()
        return msgpack.unpack(open(path, 'r'))
    except:
        try:
            return pickle.load(open(path, 'rb'),
                               fix_imports=True,
                               encoding="latin1")
        except TypeError:
            try:
                return pickle.load(open(path, 'r'))
            except ValueError:
                return cPickle.loads(path)
Ejemplo n.º 12
0
    def __init__(self, env_fns, engine):
        super(SubProcEnvManager, self).__init__(env_fns, engine)
        self.waiting = False
        self.closed = False
        self.processes = []

        self._zmq_context = zmq.Context()
        self._zmq_ports = []
        self._zmq_sockets = []

        # make a temporary env to get stuff
        dummy = env_fns[0]()
        self._observation_space = dummy.observation_space
        self._action_space = dummy.action_space
        self._cpu_preprocessor = dummy.cpu_preprocessor
        self._gpu_preprocessor = dummy.gpu_preprocessor
        dummy.close()

        # Allows msgpack to work with NumPy
        m.patch()

        # iterate envs to get torch shared memory through pipe then close it
        shared_memories = []

        for w_ind in range(self.nb_env):
            pipe, w_pipe = mp.Pipe()
            socket, port = zmq_robust_bind_socket(self._zmq_context)

            process = mp.Process(
                target=worker,
                args=(w_pipe, pipe, port, CloudpickleWrapper(env_fns[w_ind])),
            )
            process.daemon = True
            process.start()
            self.processes.append(process)

            self._zmq_sockets.append(socket)

            pipe.send(("get_shared_memory", None))
            shared_memories.append(pipe.recv())

            # switch to zmq socket and close pipes
            pipe.send(("switch_zmq", None))
            pipe.close()
            w_pipe.close()

        self.shared_memories = listd_to_dlist(shared_memories)
Ejemplo n.º 13
0
def load():
    global is_loaded
    if not is_loaded:
        # Patch numpy types into msgpack
        msgpack_numpy.patch()

        # Redirect flask logger to logbook
        werkzeug_logger = logging.getLogger('werkzeug')
        del werkzeug_logger.handlers[:]
        werkzeug_logger.addHandler(RedirectLoggingHandler())

        # Override the built-in werkzeug logging function in order to change the log line format.
        from werkzeug.serving import WSGIRequestHandler
        WSGIRequestHandler.log = lambda self, type, message, *args: getattr(
            werkzeug_logger, 'debug')('%s %s' %
                                      (self.address_string(), message % args))

        # Register loggers
        for handler in log_handlers:
            handler.push_application()
        is_loaded = True
Ejemplo n.º 14
0
def save_pickle(path, obj):
    import msgpack
    import msgpack_numpy as m
    m.patch()
    return msgpack.pack(obj, open(path, 'w'))
Ejemplo n.º 15
0
 def __init__(self):
     m.patch()
     pass
Ejemplo n.º 16
0
 def setUp(self):
      patch()
Ejemplo n.º 17
0
Copyright (C) 2012-2013 iSolver Software Solutions
Distributed under the terms of the GNU General Public License (GPL version 3 or any later version).

.. moduleauthor:: Sol Simpson <*****@*****.**> + contributors, please see credits section of documentation.
.. fileauthor:: Sol Simpson <*****@*****.**>
"""
from __future__ import division

from builtins import range
from builtins import object
from psychopy.iohub import Computer
import msgpack
try:
    import msgpack_numpy as m
    m.patch()
except Exception:
    pass
import struct
from weakref import proxy
from psychopy.iohub.util import NumPyRingBuffer as RingBuffer
from psychopy.iohub import print2err, printExceptionDetailsToStdErr
getTime=Computer.getTime

MAX_PACKET_SIZE=64*1024

from gevent import sleep, Greenlet

class SocketConnection(object):
    def __init__(self,local_host=None,local_port=None,remote_host=None,remote_port=None,rcvBufferLength=1492, broadcast=False, blocking=0, timeout=0):
        self._local_port= local_port
Ejemplo n.º 18
0
import logging
import sys
import time
import zlib
from hashlib import sha3_224
import socket

from typing import Callable

logger = logging.getLogger(__name__)

from confluent_kafka import Producer, Consumer, KafkaError
import msgpack
import msgpack_numpy

msgpack_numpy.patch()  # add numpy array support for msgpack

from kafka_rpc.aes import AESEncryption
from kafka_rpc.topic_manage import KafkaControl


class KRPCServer:
    def __init__(self, host: str, port: int, handle, topic_name: str, server_name: str = None,
                 num_partitions: int = 64, replication_factor: int = 1,
                 max_polling_timeout: float = 0.001, **kwargs):
        """
        Init Kafka RPCServer.

        Multiple KRPCServer can be instantiated to balance to load.
        If any server is down, the other KRPCServer will automatically take it place.
Ejemplo n.º 19
0
    def __init__(self,
                 back_plane_ip_address=None,
                 subscriber_port='43125',
                 publisher_port='43124',
                 process_name='None',
                 numpy=False,
                 external_message_processor=None,
                 receive_loop_idle_addition=None,
                 connect_time=0.3,
                 subscriber_list=None,
                 event_loop=None):
        """
        The __init__ method sets up all the ZeroMQ "plumbing"

        :param back_plane_ip_address: banyan_base back_planeIP Address -
                                      if not specified, it will be set to the
                                      local computer.

        :param subscriber_port: banyan_base back plane subscriber port.
               This must match that of the banyan_base backplane

        :param publisher_port: banyan_base back plane publisher port.
                               This must match that of the banyan_base backplane.

        :param process_name: Component identifier in banner at component startup.

        :param numpy: Set true if you wish to include numpy matrices in your messages.

        :param external_message_processor: external method to process messages

        :param receive_loop_idle_addition: an external method called in the idle section
                                           of the receive loop

        :param connect_time: a short delay to allow the component to connect to the Backplane
        """

        # call to super allows this class to be used in multiple inheritance scenarios when needed
        super(BanyanBaseAIO, self).__init__()

        self.backplane_exists = False

        self.back_plane_ip_address = None
        self.numpy = numpy
        self.external_message_processor = external_message_processor
        self.receive_loop_idle_addition = receive_loop_idle_addition
        self.connect_time = connect_time
        self.subscriber_list = subscriber_list
        self.my_context = None
        self.subscriber = None
        self.publisher = None
        self.the_task = None

        if event_loop:
            self.event_loop = event_loop
        else:
            # fix for "not implemented" bugs in Python 3.8
            if sys.platform == 'win32':
                asyncio.set_event_loop_policy(
                    asyncio.WindowsSelectorEventLoopPolicy())
            self.event_loop = asyncio.get_event_loop()

        # if using numpy apply the msgpack_numpy monkey patch
        if numpy:
            m.patch()

        # If no back plane address was specified, determine the IP address of the local machine
        if back_plane_ip_address:
            self.back_plane_ip_address = back_plane_ip_address
        else:
            # check for a running backplane
            for pid in psutil.pids():
                p = psutil.Process(pid)
                try:
                    p_command = p.cmdline()
                # ignore these psutil exceptions
                except (psutil.AccessDenied, psutil.ZombieProcess):
                    continue
                try:
                    if any('backplane' in s for s in p_command):
                        self.backplane_exists = True
                    else:
                        continue
                except UnicodeDecodeError:
                    continue

            if not self.backplane_exists:
                raise RuntimeError(
                    'Backplane is not running - please start it.')
            # determine this computer's IP address
            s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
            # use the google dns
            try:
                s.connect(('8.8.8.8', 1))
                self.back_plane_ip_address = s.getsockname()[0]
            except:
                self.back_plane_ip_address = '127.0.0.1'
            finally:
                s.close()

        self.subscriber_port = subscriber_port
        self.publisher_port = publisher_port

        print('\n************************************************************')
        print(process_name + ' using Back Plane IP address: ' +
              self.back_plane_ip_address)
        print('Subscriber Port = ' + self.subscriber_port)
        print('Publisher  Port = ' + self.publisher_port)
        print('************************************************************')
Ejemplo n.º 20
0
    def __init__(self,
                 back_plane_csv_file=None,
                 process_name='None',
                 loop_time=.1,
                 numpy=False,
                 connect_time=0.3):
        """
        The __init__ method sets up all the ZeroMQ "plumbing"

        :param back_plane_csv_file: full path to .csv file with backplane descriptors

        :param process_name: identifier for your component printed at startup on the console

        :param loop_time: receive loop sleep time

        :param numpy: Set true if you wish to include numpy matrices in your messages

        :param connect_time: a short delay to allow the component to connect to the Backplane

        :return:
        """

        # socket type - used for calls to find_socket
        self.SUB_SOCK = 0
        self.PUB_SOCK = 1

        if back_plane_csv_file is None:
            raise ValueError(
                'You must specify a valid .csv backplane descriptor file')

        # file specified, make sure it exists
        if not os.path.isfile(back_plane_csv_file):
            raise ValueError("Can't find backplane configuration file")

        if process_name == 'None':
            print('Warning: No Process Name Was Specified')

        self.numpy = numpy

        self.connect_time = connect_time

        # if using numpy apply the msgpack_numpy monkey patch
        if numpy:
            m.patch()

        self.loop_time = loop_time

        # get a zeromq context
        self.context = zmq.Context()

        # a list of dictionaries describing connections to the back planes
        self.backplane_table = []

        print("\nUsing Backplane Descriptor File: ", back_plane_csv_file)

        with open(back_plane_csv_file) as csvfile:
            reader = csv.DictReader(csvfile)
            print(
                '\n************************************************************\n'
            )
            for row in reader:
                # make sure backplane name is unique
                if any(d['backplane_name'] == row['backplane_name']
                       for d in self.backplane_table):
                    raise RuntimeError(
                        'Duplicate Back Plane Name - check your .csv file')

                print(process_name + ' using ' + row['backplane_name'] +
                      ' Black plane at IP Address: ' + row['ip_address'])

                # setup a publisher and subscriber for each backplane
                subscriber = None
                if row['subscriber_port']:
                    subscriber = self.context.socket(zmq.SUB)
                    connect_string = "tcp://" + row['ip_address'] + ':' + row[
                        'subscriber_port']
                    subscriber.connect(connect_string)

                publisher = None
                if row['publisher_port']:
                    publisher = self.context.socket(zmq.PUB)
                    connect_string = "tcp://" + row['ip_address'] + ':' + row[
                        'publisher_port']
                    publisher.connect(connect_string)

                # get topics and subscribe to them
                # test that topic string has a leading and trailing []
                if row['subscriber_port']:
                    print('    Subscriber Port = ' + row['subscriber_port'])
                    topic_list = row['subscriber_topic']
                    if '[' not in topic_list:
                        raise RuntimeError(
                            'Topic field must begin with "[" and end with "]" '
                        )
                    if ']' not in topic_list:
                        raise RuntimeError(
                            'Topic field must begin with "[" and end with "]" '
                        )

                    # make sure that the topic string does not contain a space character
                    if ' ' in topic_list:
                        raise RuntimeError(
                            'Topics may not contain a space character')

                    topic_list = topic_list[1:-1].split(',')

                    # subscribe to topics in list
                    for t in topic_list:
                        if sys.version_info[0] < 3:
                            t = t.encode()
                        print('        Subscribed to topic: ' + t)
                        self.set_subscriber_topic(t, subscriber)
                else:
                    print('    Subscriber Port = None Specified')

                if row['publisher_port']:
                    print('    Publisher  Port = ' + row['publisher_port'])
                else:
                    print('    Publisher  Port = None Specified')

                # update backplane table with new entry
                self.backplane_table.append({
                    'backplane_name':
                    row['backplane_name'],
                    'subscriber':
                    subscriber,
                    'publisher':
                    publisher
                })

            # wait for the last Backplane TCP connection
            time.sleep(self.connect_time)

            print()
            print('Loop Time = ' + str(loop_time) + ' seconds\n')
            print(
                '************************************************************')
Ejemplo n.º 21
0
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  You should have received a copy of the GNU General Public License
#  along with QAMpy.  If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2018 Jochen Schröder, Mikael Mazur

import zmq
import numpy as np
import msgpack
import msgpack_numpy as msgp_npy
from qampy.core.phaserecovery import blindphasesearch

# careful we cannot use the unpatched msgpack because it messes up dictionary keys to bytes
msgp_npy.patch()


def pack_array(A):
    return msgpack.packb(A)


def unpack_array(A):
    return msgpack.unpackb(A)


def send_array(socket, A, flags=0, copy=True, track=False):
    socket.send(pack_array(A), flags=flags, copy=copy, track=track)


def recv_array(socket, flags=0, copy=True, track=False):
import sys
from datetime import datetime
from datetime import date   
import pdb
import colorama
colorama.init(strip=False)

import msgpack
import msgpack_numpy


import simplejson
import XTSMobjectify
import DataBomb
import InfiniteFileStream
msgpack_numpy.patch()#This patch actually changes the behavior of "msgpack"
#specifically, it changes how, "encoding='utf-8'" functions when unpacking
import XTSM_Server_Objects
import file_locations


import numpy
from enthought.traits.api import HasTraits
from enthought.traits.api import Int as TraitedInt
from enthought.traits.api import Str as TraitedStr

import collections
DEBUG = True
      
NUM_RETAINED_XTSM=10
Ejemplo n.º 23
0
import os
import shutil
import string
import sqlite3
import pandas as pd
from contextlib import closing
from typing import MutableMapping

import msgpack
import msgpack_numpy

from cloudexp.exp import DEFAULT_DATA_FILE_NAME

from mom.logged_object import LoggedObject

msgpack_numpy.patch()


class ExpData(LoggedObject):
    """
    parses an experiment file into an sql table,
    or set path to the correct database if this is a previously parsed experiment
    takes file path as a parameter
    """
    valid_chars = f"-_.() {string.ascii_letters}{string.digits}"

    def __init__(self, output_path, export_path="exports", log_name=None):
        LoggedObject.__init__(self, log_name)
        if not output_path:
            raise Exception("No data output path")
Ejemplo n.º 24
0
import os,sys,time
from worker import WorkerService
import numpy as np
import zlib
import zmq

from larcv import larcv

import msgpack
import msgpack_numpy as m
m.patch()

os.environ["GLOG_minloglevel"] = "1"

from workermessages import decode_larcv1_metamsg
from larcvdataset import LArCVDataset

class LArCV2ThreadIOWorker( WorkerService ):
    """ This worker simply receives data and replies with dummy string. prints shape of array. """

    def __init__( self,configfile,fillername,identity,ipaddress,port=0,batchsize=None,verbosity=0):
        super( LArCV2ThreadIOWorker, self ).__init__(identity,ipaddress)
        self.configfile = configfile
        self.fillername = fillername
        self.batchsize = batchsize
        self.larcvloader = LArCVDataset(self.configfile,fillername)
        self.products = {}
        self.compression_level = 4
        self.print_msg_size = False
        self.num_reads = 0
        if self.batchsize is not None:
Ejemplo n.º 25
0
        for l in locs:
            labels.append('{:.2f}%'.format(l * 100.))
        plt.xticks(locs, labels)
        locs, _ = plt.yticks()
        labels = []
        for l in locs:
            labels.append('{:.2f}%'.format(l * 100.))
        plt.yticks(locs, labels)
        plt.ylabel('Makespan increase')
        plt.xlabel('Flowtime increase')
        plt.tight_layout()
    plt.legend()


if __name__ == '__main__':
    msgpack_numpy.patch()  # Magic.

    parser = argparse.ArgumentParser(
        description='Launches a battery of experiments in parallel')
    parser.add_argument('--input',
                        action='store',
                        default=None,
                        help='Where to the results are stored.')
    args = parser.parse_args()

    # Problem results.
    statistics = collections.defaultdict(
        lambda: collections.defaultdict(lambda: collections.defaultdict(dict)))

    all_results = run.read_results(args.input)
    for raw_args, results in all_results.items():
Ejemplo n.º 26
0
import numpy as np
import pandas as pd
import sqlite3 as sq3
import msgpack as msg
import msgpack_numpy as mn
import prep
import os
import glob

mn.patch()

def main():
	data_file = '../data/spectra/swift_uvspec/swift_uv_log.txt'
	files = glob.glob("..\data\spectra\swift_uvspec\*.flm")
	con = sq3.connect('../data/kaepora_v1.db')
	with open(data_file) as data:
		data_dict = {}
		for line in data.readlines()[1:]:
			data_dict[line.split()[4]] = line.split()[0:4]
		for spec_file in files:
			with open(spec_file) as spec:
				spectrum = np.loadtxt(spec)
				source = 'swift_uv'
				print spec_file	
				sn_data = data_dict[spec_file.split('\\')[4]]
				if sn_data[0][0:2].lower() == 'sn':
					sn_name = sn_data[0][2:]
				else:
					sn_name = sn_name = sn_data[0]
				print sn_name
				redshift = float(sn_data[1])
Ejemplo n.º 27
0
from datetime import datetime
import random
from itertools import repeat
from collections import OrderedDict

import numpy as np
import torch
import psutil
import msgpack
import humanize
import msgpack_numpy as msgpack_np
from PIL import Image

import utils.datastructures as datastructures

msgpack_np.patch()


def set_seeds(seed):
    random.seed(seed)
    np.random.seed(seed)
    torch.manual_seed(seed)


def memory_summary():
    vmem = psutil.virtual_memory()
    msg = (
        f">>> Currently using {vmem.percent}% of system memory "
        f"{humanize.naturalsize(vmem.used)}/{humanize.naturalsize(vmem.available)}"
    )
    print(msg)
Ejemplo n.º 28
0
    def __init__(self, back_plane_ip_address=None, subscriber_port='43125',
                 publisher_port='43124', process_name='None', loop_time=.1, numpy=False):
        """
        The __init__ method sets up all the ZeroMQ "plumbing"

        :param back_plane_ip_address: banyan_base back_planeIP Address -
                                      if not specified, it will be set to the
                                      local computer.

        :param subscriber_port: banyan_base back plane subscriber port.
               This must match that of the banyan_base backplane

        :param publisher_port: banyan_base back plane publisher port. This must match that of
                               the banyan_base backplane.

        :param process_name: Component identifier in banner at component startup.

        :param loop_time: Receive loop sleep time.

        :param numpy: Set true if you wish to include numpy matrices in your messages.
        """

        # call to super allows this class to be used in multiple inheritance scenarios when needed
        super(BanyanBase, self).__init__()

        self.backplane_exists = False

        self.back_plane_ip_address = None
        self.numpy = numpy

        # if using numpy apply the msgpack_numpy monkey patch
        if numpy:
            m.patch()

        # If no back plane address was specified, determine the IP address of the local machine
        if back_plane_ip_address:
            self.back_plane_ip_address = back_plane_ip_address
        else:
            # check for a running backplane
            for pid in psutil.pids():
                p = psutil.Process(pid)
                p_command = p.cmdline()
                if any('backplane' in s for s in p_command):
                    self.backplane_exists = True
                else:
                    continue

            if not self.backplane_exists:
                raise RuntimeError('Backplane is not running - please start it.')
            # determine this computer's IP address
            s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
            # use the google dns
            s.connect(('8.8.8.8', 0))
            self.back_plane_ip_address = s.getsockname()[0]

        self.subscriber_port = subscriber_port
        self.publisher_port = publisher_port

        self.loop_time = loop_time

        print()

        print('\n************************************************************')
        print(process_name + ' using Back Plane IP address: ' + self.back_plane_ip_address)
        print('Subscriber Port = ' + self.subscriber_port)
        print('Publisher  Port = ' + self.publisher_port)
        print('Loop Time = ' + str(loop_time) + ' seconds')
        print('************************************************************')

        # establish the zeromq sub and pub sockets and connect to the backplane
        self.context = zmq.Context()
        self.subscriber = self.context.socket(zmq.SUB)
        connect_string = "tcp://" + self.back_plane_ip_address + ':' + self.subscriber_port
        self.subscriber.connect(connect_string)

        self.publisher = self.context.socket(zmq.PUB)
        connect_string = "tcp://" + self.back_plane_ip_address + ':' + self.publisher_port
        self.publisher.connect(connect_string)
Ejemplo n.º 29
0
    def __init__(self,
                 back_plane_ip_address=None,
                 subscriber_port='43125',
                 publisher_port='43124',
                 process_name='None',
                 loop_time=.1,
                 numpy=False,
                 external_message_processor=None,
                 receive_loop_idle_addition=None,
                 connect_time=0.3):
        """
        The __init__ method sets up all the ZeroMQ "plumbing"

        :param back_plane_ip_address: banyan_base back_planeIP Address -
                                      if not specified, it will be set to the
                                      local computer.

        :param subscriber_port: banyan_base back plane subscriber port.
               This must match that of the banyan_base backplane

        :param publisher_port: banyan_base back plane publisher port.
                               This must match that of the banyan_base backplane.

        :param process_name: Component identifier in banner at component startup.

        :param loop_time: Receive loop sleep time.

        :param numpy: Set true if you wish to include numpy matrices in your messages.

        :param external_message_processor: external method to process messages

        :param receive_loop_idle_addition: an external method called in the idle section
                                           of the receive loop

        :param connect_time: a short delay to allow the component to connect to the Backplane
        """

        # call to super allows this class to be used in multiple inheritance scenarios when needed
        super(BanyanBase, self).__init__()

        self.backplane_exists = False

        self.back_plane_ip_address = None
        self.numpy = numpy
        self.external_message_processor = external_message_processor
        self.receive_loop_idle_addition = receive_loop_idle_addition
        self.connect_time = connect_time

        # if using numpy apply the msgpack_numpy monkey patch
        if numpy:
            m.patch()

        # If no back plane address was specified, determine the IP address of the local machine
        if back_plane_ip_address:
            self.back_plane_ip_address = back_plane_ip_address
        else:
            # check for a running backplane
            for pid in psutil.pids():
                p = psutil.Process(pid)
                try:
                    p_command = p.cmdline()
                except psutil.AccessDenied:
                    # occurs in Windows - ignore
                    continue
                try:
                    if any('backplane' in s for s in p_command):
                        self.backplane_exists = True
                    else:
                        continue
                except UnicodeDecodeError:
                    continue

            if not self.backplane_exists:
                raise RuntimeError(
                    'Backplane is not running - please start it.')
            # determine this computer's IP address
            s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
            # use the google dns
            try:
                s.connect(('8.8.8.8', 1))
                self.back_plane_ip_address = s.getsockname()[0]
            except:
                self.back_plane_ip_address = '127.0.0.1'
            finally:
                s.close()

        self.subscriber_port = subscriber_port
        self.publisher_port = publisher_port

        self.loop_time = loop_time

        print('\n************************************************************')
        print(process_name + ' using Back Plane IP address: ' +
              self.back_plane_ip_address)
        print('Subscriber Port = ' + self.subscriber_port)
        print('Publisher  Port = ' + self.publisher_port)
        print('Loop Time = ' + str(loop_time) + ' seconds')
        print('************************************************************')

        # establish the zeromq sub and pub sockets and connect to the backplane
        self.context = zmq.Context()
        self.subscriber = self.context.socket(zmq.SUB)
        connect_string = "tcp://" + self.back_plane_ip_address + ':' + self.subscriber_port
        self.subscriber.connect(connect_string)

        self.publisher = self.context.socket(zmq.PUB)
        connect_string = "tcp://" + self.back_plane_ip_address + ':' + self.publisher_port
        self.publisher.connect(connect_string)

        # Allow enough time for the TCP connection to the Backplane complete.
        time.sleep(self.connect_time)
Ejemplo n.º 30
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: serialize.py
# Author: Yuxin Wu <*****@*****.**>

import msgpack
import msgpack_numpy
msgpack_numpy.patch()

__all__ = ['loads', 'dumps']


def dumps(obj):
    """
    Serialize an object.

    Returns:
        str
    """
    return msgpack.dumps(obj, use_bin_type=True)


def loads(buf):
    """
    Args:
        buf (str): serialized object.
    """
    return msgpack.loads(buf)
Ejemplo n.º 31
0
import asyncio
import ue_asyncio
import server_utils as util
from unreal_engine.classes import Engine2LearnSettings, GameplayStatics, InputSettings
from unreal_engine.structs import Key
from unreal_engine.enums import EInputEvent

import msgpack
import msgpack_numpy as mnp

import re
import pydevd
import sys

# make msgpack use the numpy-specific de/encoders
mnp.patch()

sys.path.append(
    "c:/program files/pycharm 2017.2.2/debug-eggs/"
)  # always need to add this to the sys.path (location of PyCharm debug eggs)

# cleanup previous tasks
for task in asyncio.Task.all_tasks():
    task.cancel()


def seed(message):
    """
    Sets the random seed of the Game to some int value.
    """
    if "value" not in message:
Ejemplo n.º 32
0
import argparse
from functools import partial
import pprint

import msgpack
import msgpack_numpy as mpn

import bluesky_kafka
import databroker.assets.handlers
import event_model
import ophyd.sim

# mpn.patch() is recommended by msgpack-numpy
# as the way to patch msgpack for numpy
mpn.patch()


class ExampleWorker(event_model.SingleRunDocumentRouter):

    def start(self, start_doc):
        print(f"start: {start_doc}")

    def descriptor(self, descriptor_doc):
        print(f"descriptor: {descriptor_doc}")

    def event(self, event_doc):
        print(f"event: {event_doc}")

    def event_page(self, event_page_doc):
        print(f"event_page: {event_page_doc}")
Ejemplo n.º 33
0
hardware - it allows the XTSM server to attach incoming data 'databombs' to
a list,  filestream them raw to disk, unpack their contents, notify listeners
of their arrival, and create copies and links to the data for other elements

This is managed through two primary classes and their subclasses: 

    DataBombCatcher
        FileStream
        DataBomb
    DataListenerManager
        DataListener
        
@author: Nate
"""
import msgpack, msgpack_numpy, StringIO, sys, time, struct, uuid, io, datetime, os, pdb
msgpack_numpy.patch()#This patch actually changes the behavior of "msgpack"
#specifically, it changes how, "encoding='utf-8'" functions when unpacking
from xml.sax import saxutils
import xstatus_ready
import file_locations
import InfiniteFileStream
import simplejson

"""
raw_buffer_folders should contain an entry for the raw data destination folder
keyed by the MAC address of the host computer.  to add an entry for a new
computer, find the MAC address using import uuid / print uuid.getnode()
"""

class DataBomb(xstatus_ready.xstatus_ready):
    """
Ejemplo n.º 34
0
#!/usr/bin/env python -i 
# Provide a function to return the SNs index for a SQL query.
#
import numpy as np
import sqlite3 as sq3
import msgpack as msg
import msgpack_numpy as mn

mn.patch()

class supernova(object):
     """Attributes can be added"""

def selectsn(sndb,sqlstr):

     con = sq3.connect(sndb)
     cur = con.cursor()
     
     cur.execute(sqlstr)

     SN_Array = []
     names = []
     for row in cur:
          SN = supernova()
          SN.filename = row[0]
          SN.name = row[1]
          SN.redshift = row[2]
          SN.minwave = row[3]
#           spectra = msg.unpackb(row[5])
#           SN.spectrum = spectra
          SN_Array.append(SN)
Ejemplo n.º 35
0
 def setUp(self):
      patch()