Exemplo n.º 1
0
def process_get_response():
    glog.info('get_response is called from ' + str(request.environ['REMOTE_ADDR']) + ":" + str(request.environ['REMOTE_PORT']))
    query = urlparse(request.url).query
    if not query:
        # this might be attacker
        return "hello world"
    query_components = urllib.parse.parse_qs(query)
    # https://stackoverflow.com/questions/8928730/processing-http-get-input-parameter-on-server-side-in-python
    if not check_key(query_components):
        return "no key", HTTP_ERROR_CODE_NOKEY
    decode_content = msg_np.unpackb(request.data)
    session_id = decode_content['session_id']
    vec_in = decode_content['vec_in']
    glog.info('predict vec_in shape ' + str(vec_in.shape))
    vec_out = model_wrapper.predict(vec_in)
    ret_data = {
        "session_id" : session_id,
        "vec_out" : vec_out
    }
    response = app.response_class(
        response=msg_np.packb(ret_data),
        status=200,
        mimetype='application/msgpack_numpy'
    )
    return response
Exemplo n.º 2
0
    def _load_next(self):
        if len(self._preload) == 0:
            if len(self.load_ordering) == 0:
                raise StopIteration

            new_preload = []
            lengths = []
            with lmdb.open(
                    self.lmdb_features_dir,
                    map_size=int(self.lmdb_map_size),
                    readonly=True,
                    lock=False,
            ) as lmdb_env, lmdb_env.begin(buffers=True) as txn:
                for _ in range(self.preload_size):
                    if len(self.load_ordering) == 0:
                        break

                    print(self.load_ordering.pop())
                    new_preload.append(
                        msgpack_numpy.unpackb(txn.get(
                            str(self.load_ordering.pop()).encode()),
                                              raw=False))

                    lengths.append(len(new_preload[-1][0]))

            sort_priority = list(range(len(lengths)))
            random.shuffle(sort_priority)

            sorted_ordering = list(range(len(lengths)))
            sorted_ordering.sort(key=lambda k: (lengths[k], sort_priority[k]))

            for idx in _block_shuffle(sorted_ordering, self.batch_size):
                self._preload.append(new_preload[idx])

        return self._preload.pop()
Exemplo n.º 3
0
    def _sync(self):
        """
        Send output data and receive input data.

        Notes
        -----
        Assumes that the attributes used for input and output already
        exist.

        Each message is a tuple containing a module ID and data; for
        outbound messages, the ID is that of the destination module.
        for inbound messages, the ID is that of the source module.
        Data is serialized before being sent and unserialized when
        received.
        """

        if self.net in ['none', 'ctrl']:
            self.logger.info('not synchronizing with network')
        else:
            self.logger.info('synchronizing with network')

            # Send outbound data:
            if self.net in ['out', 'full']:

                # Send all data in outbound buffer:
                send_ids = [out_id for out_id in self._out_ids]
                for out_id, data in self._out_data:
                    self.sock_data.send(msgpack.packb((out_id, data)))
                    send_ids.remove(out_id)
                    self.logger.info('sent to   %s: %s' % (out_id, str(data)))

                # Send data tuples containing None to those modules for which no
                # actual data was generated to satisfy the barrier condition:
                for out_id in send_ids:
                    self.sock_data.send(msgpack.packb((out_id, None)))
                    self.logger.info('sent to   %s: %s' % (out_id, None))

                # All output IDs should be sent data by this point:
                self.logger.info('sent data to all output IDs')

            # Receive inbound data:
            if self.net in ['in', 'full']:

                # Wait until inbound data is received from all source modules:
                while not all((q for q in self._in_data.itervalues())):
                    # Use poller to avoid blocking:
                    if is_poll_in(self.sock_data, self.data_poller):
                        in_id, data = msgpack.unpackb(self.sock_data.recv())
                        self.logger.info('recv from %s: %s ' %
                                         (in_id, str(data)))

                        # Ignore incoming data containing None:
                        if data is not None:
                            self._in_data[in_id].append(data)

                    # Stop the synchronization if a quit message has been received:
                    if not self.running:
                        self.logger.info('run loop stopped - stopping sync')
                        break
                self.logger.info('recv data from all input IDs')
Exemplo n.º 4
0
    def _sync(self):
        """
        Send output data and receive input data.
            
        Notes
        -----
        Assumes that the attributes used for input and output already
        exist.

        Each message is a tuple containing a module ID and data; for
        outbound messages, the ID is that of the destination module.
        for inbound messages, the ID is that of the source module.
        Data is serialized before being sent and unserialized when
        received.

        """
        
        if self.net in ['none', 'ctrl']:
            self.logger.info('not synchronizing with network')
        else:
            self.logger.info('synchronizing with network')

            # Send outbound data:
            if self.net in ['out', 'full']:

                # Send all data in outbound buffer:
                send_ids = self.out_ids
                for out_id, data in self._out_data:
                    self.sock_data.send(msgpack.packb((out_id, data)))
                    send_ids.remove(out_id)
                    self.logger.info('sent to   %s: %s' % (out_id, str(data)))
                
                # Send data tuples containing None to those modules for which no
                # actual data was generated to satisfy the barrier condition:
                for out_id in send_ids:
                    self.sock_data.send(msgpack.packb((out_id, None)))
                    self.logger.info('sent to   %s: %s' % (out_id, None))

                # All output IDs should be sent data by this point:
                self.logger.info('sent data to all output IDs')

            # Receive inbound data:
            if self.net in ['in', 'full']:
                # Wait until inbound data is received from all source modules:  
                while not all((q for q in self._in_data.itervalues())):
                    # Use poller to avoid blocking:
                    if is_poll_in(self.sock_data, self.data_poller):
                        in_id, data = msgpack.unpackb(self.sock_data.recv())
                        self.logger.info('recv from %s: %s ' % (in_id, str(data)))

                        # Ignore incoming data containing None:
                        if data is not None:
                            self._in_data[in_id].append(data)

                    # Stop the synchronization if a quit message has been received:
                    if not self.running:
                        self.logger.info('run loop stopped - stopping sync')
                        break
                self.logger.info('recv data from all input IDs')
def on_response(ch, method, props, body):
    # Check if it's our message
    if corr_id != props.correlation_id:
        raise Exception()

    global response
    response=str(body)
    print(m.unpackb(response, object_hook = m.decode))
Exemplo n.º 6
0
def msgpack_loader(mp_path):
    """Msgpack provides a faster serialisation routine than pickle, so is preferable
    for loading and deserialising large feature sets from disk."""
    tic = time.time()
    with open(mp_path, "rb") as f:
        buffer = f.read()
        print(f"[I/O: {time.time() - tic:.1f}s]", end=" ")
        tic = time.time()
        ## super danger! yang :utf-8 ==> latin
        data = msgpack_np.unpackb(buffer,
                                  object_hook=msgpack_np.decode,
                                  encoding="latin")
        print(f"[deserialisation: {time.time() - tic:.1f}s]", end=" ")
    return data
Exemplo n.º 7
0
    def __getitem__(self, idx):
        if self.lmdb_env is None:
            self.lmdb_env = lmdb.open(str(self.lmdb_file), map_size=1<<36, 
                                      readonly=True, lock=False)
        
        shape_name, _ = self.shapes[idx]
        with self.lmdb_env.begin(buffers=True) as txn:
            pts = msgpack_numpy.unpackb(txn.get(str(idx).encode()), raw=False)

        pt_idxs = np.arange(0, self.n_points)
        np.random.shuffle(pt_idxs)

        pts = pts[pt_idxs, :]
        pts, normals = pts[:, :3], pts[:, 3:]
        pts = self.normalize_pointclouds(pts)

        return pts, normals, self.classes[shape_name]
Exemplo n.º 8
0
    def _data_handler(self, msg):
        """
        Data port handler.

        Notes
        -----
        Assumes that each message contains a source module ID
        (provided by zmq) and a serialized tuple; the tuple contains
        the destination module ID and the data to be transmitted.
        """

        if len(msg) != 2:
            self.logger.info('skipping malformed message: %s' % str(msg))
        else:

            # When a message arrives, increase the corresponding received_count
            in_id = msg[0]
            out_id, data = msgpack.unpackb(msg[1])
            self.logger.info('recv from %s: %s' % (in_id, data))

            # Increase the appropriate count in recv_counts by 1
            self._recv_counts[(in_id, out_id)] += 1
            self._data_to_route.append((in_id, out_id, data))

            # When data with source/destination IDs corresponding to
            # every entry in the routing table has been received up to
            # the current time step, deliver the data in the buffer:
            if all(self._recv_counts.values()):
                self.logger.info('recv from all modules')
                for in_id, out_id, data in self._data_to_route:
                    self.logger.info('sent to   %s: %s' % (out_id, data))

                    # Route to the destination ID and send the source ID
                    # along with the data:
                    self.sock_data.send_multipart(
                        [out_id, msgpack.packb((in_id, data))])

                # Reset the incoming data buffer
                self._data_to_route = []

                # Decrease all values in recv_counts to indicate that an
                # execution time_step has been succesfully completed
                for k in self._recv_counts.iterkeys():
                    self._recv_counts[k] -= 1
                self.logger.info('----------------------')
Exemplo n.º 9
0
    def _data_handler(self, msg):
        """
        Data port handler.

        Notes
        -----
        Assumes that each message contains a source module ID
        (provided by zmq) and a serialized tuple; the tuple contains
        the destination module ID and the data to be transmitted.
        """

        if len(msg) != 2:
            self.logger.info('skipping malformed message: %s' % str(msg))
        else:

            # When a message arrives, increase the corresponding received_count
            in_id = msg[0]
            out_id, data = msgpack.unpackb(msg[1])
            self.logger.info('recv from %s: %s' % (in_id, data))
            # Increase the appropriate count in recv_counts by 1
            self.recv_counts[(in_id,out_id)] += 1
            self.data_to_route.append((in_id, out_id, data))
            # When data with source/destination IDs corresponding to
            # every entry in the routing table has been received upto
            # current time step, deliver the data in the buffer:
            #if all((c for c in self.recv_counts.values())):
            if all(self.recv_counts.values()):
                self.logger.info('recv from all modules')
                for in_id, out_id, data in self.data_to_route:
                    self.logger.info('sent to   %s: %s' % (out_id, data))

                    # Route to the destination ID and send the source ID
                    # along with the data:
                    self.sock_data.send_multipart([out_id,
                                                   msgpack.packb((in_id, data))])

                # Reset the incoming data buffer
                self.data_to_route = []
                # Decrease all values in recv_counts to indicate that an
                # execution time_step has been succesfully completed
                for k in self.recv_counts.iterkeys(): self.recv_counts[k]-=1
                self.logger.info('----------------------')
Exemplo n.º 10
0
    def __getitem__(self, idx):
        if self._lmdb_env is None:
            self._lmdb_env = lmdb.open(
                self._lmdb_file, map_size=1 << 36, readonly=True, lock=False
            )

        with self._lmdb_env.begin(buffers=True) as txn:
            ele = msgpack_numpy.unpackb(txn.get(str(idx).encode()), raw=False)

        point_set = ele["pc"]

        pt_idxs = np.arange(0, self.num_points)
        np.random.shuffle(pt_idxs)

        point_set = point_set[pt_idxs, :]
        point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])

        if self.transforms is not None:
            point_set = self.transforms(point_set)

        return point_set, ele["lbl"]
Exemplo n.º 11
0
    def _data_handler(self, msg):
        """
        Data port handler.

        Notes
        -----
        Assumes that each message contains a source module ID
        (provided by zmq) and a serialized tuple; the tuple contains
        the destination module ID and the data to be transmitted.
        """

        if len(msg) != 2:
            self.log_info('skipping malformed message: %s' % str(msg))
        else:

            # Queue arriving messages:
            in_id = msg[0]
            out_id, data = msgpack.unpackb(msg[1])
            self.log_info('recv from %s: %s' % (in_id, data))
            self._recv_queues[(in_id, out_id)].appendleft(data)

            # When data with source/destination IDs corresponding to
            # every entry in the routing table has been received up to
            # the current time step (i.e., all queues for each 
            # source/destination pair contain something), deliver the data:
            if all(self._recv_queues.values()):
                self.log_info('recv from all modules')
                for t in self._recv_queues:
                    in_id, out_id = t
                    data = self._recv_queues[t].pop()
                    self.log_info('sent to   %s: %s' % (out_id, data))

                    # Route to the destination ID and send the source ID
                    # along with the data:
                    self.sock_data.send_multipart([out_id,
                                                   msgpack.packb((in_id, data))])

                self.log_info('----------------------')
Exemplo n.º 12
0
def numpy_from_Redis(redis_client,key)->np.array:
    """Retrieve Numpy array from Redis key 'key'"""
    packed_arr = redis_client.get(key)
    array = m.unpackb(redis_client.get(key))
    return array
Exemplo n.º 13
0
HTTP_ERROR_CODE_BIGAMY = 360
HTTP_ERROR_CODE_NOKEY = 401
HTTP_ERROR_CODE_NO_ACTION = 405
# flask
app = Flask(__name__)
# set flask log level
logging.getLogger('werkzeug').setLevel(logging.ERROR)
logging.getLogger("requests").setLevel(logging.WARNING)

# test
tmp = {
    'a' : 'abc',
    'vec' : np.zeros((3, 3))
}
content = msg_np.packb(tmp)
content2 = msg_np.unpackb(content)
glog.info(content2['a'])
glog.info(str(content2['vec']))
############
import lstm_model
model_wrapper = lstm_model.LstmModel()
last_model_file = '1_final.h5'
glog.info('loading ' + last_model_file)
model_wrapper.load_model_from_file(file=last_model_file)

def get_timestamp_for_dingding():
    return time.strftime("[%m/%d-%H:%M:%S]", time.localtime())

def check_key(query_components):
    if not "api_key" in query_components.keys():
        glog.info("no api_key")
Exemplo n.º 14
0
    def _sync(self):
        """
        Send output data and receive input data.

        Notes
        -----
        Assumes that the attributes used for input and output already
        exist.

        Each message is a tuple containing a module ID and data; for
        outbound messages, the ID is that of the destination module.
        for inbound messages, the ID is that of the source module.
        Data is serialized before being sent and unserialized when
        received.
        """

        if self.net in ['none', 'ctrl']:
            self.log_info('not synchronizing with network')
        else:
            self.log_info('synchronizing with network')

            # Send outbound data:
            start = time.time()
            self._put_out_data()
            if self.net in ['out', 'full']:

                # Send all data in outbound buffer:
                send_ids = [out_id for out_id in self._out_ids]
                for out_id, data in self._out_data:
                    self.sock_data.send(msgpack.packb((out_id, data)))
                    send_ids.remove(out_id)
                    if not self.time_sync:
                        self.log_info('sent to   %s: %s' % (out_id, str(data)))

                # Send data tuples containing None to those modules for which no
                # actual data was generated to satisfy the barrier condition:
                for out_id in send_ids:
                    self.sock_data.send(msgpack.packb((out_id, None)))
                    if not self.time_sync:
                        self.log_info('sent to   %s: %s' % (out_id, None))

                # All output IDs should be sent data by this point:
                if not self.time_sync:
                    self.log_info('sent data to all output IDs')

            # Receive inbound data:
            if self.net in ['in', 'full']:

                # Wait until inbound data is received from all source modules:
                recv_ids = set(self._in_ids)
                nbytes = 0
                while recv_ids:

                    # Poll to avoid blocking:
                    if self.sock_data.poll(POLL_TIMEOUT):
                        data_packed = self.sock_data.recv()
                        in_id, data = msgpack.unpackb(data_packed)
                        if not self.time_sync:
                            self.log_info('recv from %s: %s' % (in_id, str(data)))

                        # Ignore incoming data containing None:
                        if data is not None:
                            self._in_data[in_id].append(data)

                            # Record number of bytes of transmitted serialized data:
                            nbytes += len(data_packed)

			# Remove source module ID from set of IDs from which to
                        # expect data:
                        recv_ids.discard(in_id)

                    # Stop the synchronization if a quit message has been received:
                    if not self.running:
                        if not self.time_sync:
                            self.log_info('run loop stopped - stopping sync')
                        break

                if not self.time_sync:
                    self.log_info('recv data from all input IDs')
            self._get_in_data()

            # Transmit synchronization time:
            stop = time.time()
            if self.time_sync:
                self.log_info('sent timing data to master')
                self.sock_time.send(msgpack.packb((self.id, self.steps, 'sync',
                                                   (start, stop, nbytes))))
Exemplo n.º 15
0
import numpy as np
from scipy.stats.mstats import mode
import msgpack_numpy as msgnp
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_score

from connectDB import *
db = connectDB()
apps = db.read_table('applications')

handle = open('devAppBase_september', 'rb')
dev_AppBase = msgnp.unpackb(handle.read(), object_hook=msgnp.decode)
handle.close()

handle = open('apps', 'rb')
app_names = msgnp.unpackb(handle.read(), object_hook=msgnp.decode)
handle.close()

# k = 10

# app_clusters = {}
# for device_id, data in dev_AppBase.items():
# 	X, app_ids = data
# 	model = KMeans(n_clusters = k)
# 	labels = model.fit_predict(X)
# 	app_clusters[device_id] = (labels, app_ids)


def cluster(dev_AppBase, app_names, device_id):
	import numpy as np
	from scipy.stats.mstats import mode
Exemplo n.º 16
0
    def run(self):
        self._init_net()
        sock_time = self.zmq_ctx.socket(zmq.ROUTER)
        sock_time.bind('tcp://*:%s' % self.port_time)
        sync_router(sock_time, self.ids)
        self.log_info('time port initialized')
        self.running = True
        counter = 0
        total_sync_time = 0.0
        total_sync_nbytes = 0.0
        received_data = {}
        self.start_time = 0.0
        self.stop_time = 0.0
        self.average_throughput = 0.0
        self.average_step_sync_time = 0.0
        while True:
            if sock_time.poll(10):

                # Receive timing data:
                id, data = sock_time.recv_multipart()
                id, steps, time_type, data = msgpack.unpackb(data)
                self.log_info('time data: %s, %s, %s, %s' % (id, steps,
                                                             time_type, str(data)))

                # The time_type may be 'start' (emulation run loop start time), 
                # 'stop' (emulation loop stop time), or 'sync' (emulation sync
                # time data):
                if time_type == 'start':
                    self.start_time = data
                elif time_type == 'stop':
                    self.stop_time = data
                elif time_type == 'sync':
                    start, stop, nbytes = data

                    # Collect timing data for each execution step:
                    if steps not in received_data:
                        received_data[steps] = {}                    
                    received_data[steps][id] = (start, stop, nbytes)

                    # After adding the latest timing data for a specific step, check
                    # whether data from all modules has arrived for that step:
                    if set(received_data[steps].keys()) == self.ids:

                        # The duration an execution is assumed to be the longest of
                        # the received intervals:
                        step_sync_time = max([(d[1]-d[0]) for d in received_data[steps].values()])

                        # Obtain the total number of bytes received by all of the
                        # modules during the execution step:
                        step_nbytes = sum([d[2] for d in received_data[steps].values()])

                        total_sync_time += step_sync_time
                        total_sync_nbytes += step_nbytes

                        self.average_throughput = (self.average_throughput*counter+\
                                                   step_nbytes/step_sync_time)/(counter+1)
                        self.average_step_sync_time = (self.average_step_sync_time*counter+\
                                                       step_sync_time)/(counter+1)

                        # Clear the data for the processed execution step so that
                        # that the received_data dict doesn't consume unnecessary memory:
                        del received_data[steps]

                        counter += 1
                    
            if not self.running:
                self.log_info('stopping run loop')
                break
        self.log_info('done')

        if total_sync_time > 0.0:
            self.total_throughput = total_sync_nbytes/total_sync_time
        else:
            self.total_throughput = 0.0
        self.log_info('avg step sync time/avg per-step throughput' \
                      '/total transm throughput/run loop duration:' \
                      '%s, %s, %s, %s' % \
                      (self.average_step_sync_time, self.average_throughput, 
                       self.total_throughput, self.stop_time-self.start_time))
        self.queue.put((self.average_step_sync_time, self.average_throughput,
                        self.total_throughput, self.stop_time-self.start_time))
Exemplo n.º 17
0
import msgpack
import msgpack_numpy as m
import numpy as np
import json
m.patch()

from miles_index.redis import r

known_face_encodings = m.unpackb(r.get('known_face_encodings'))
profiles = json.loads(r.get('profiles'))