Ejemplo n.º 1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-a', '--address', default='localhost', help='rabbitmq host')
    parser.add_argument('--timeout', type=int, default=10, help='queue timeout')
    parser.add_argument('-i', '--in_queue', help='input queue')
    parser.add_argument('-o', '--out_queue', help='input queue')
    args = parser.parse_args()

    with get_queue(args.address, args.in_queue) as (in_conn, in_channel), get_queue(args.address, args.out_queue) as (out_conn, out_channel):
        def callback(ch, method, properties, body):
            data = pickle.loads(body)
            if data['type'] == 'data':
                fr_in = data['frames']
                print('received frame', fr_in[0]['I3EventHeader'].event_id,
                      fr_in[0]['I3EventHeader'].sub_event_stream)
                fr_out = process_frames(fr_in)
                data2 = pickle.dumps({
                    'type': 'data',
                    'frames': fr_out,
                })
            else:
                raise Exception('bad type')
            out_channel.basic_publish(exchange='',
                                      routing_key=args.out_queue,
                                      body=data2)
        consumer(in_conn, in_channel, args.in_queue, callback, timeout=args.timeout)

    print('done!')
Ejemplo n.º 2
0
def main():
    parser = make_parser()
    args = parser.parse_args()

    if not valid_args(args):
        parser.print_help()
    elif args.consumer:
        consumer(args)
    elif args.producer:
        producer(args)
    else:
        parser.print_help()
Ejemplo n.º 3
0
def main():
    parser = make_parser()
    args = parser.parse_args()

    if not valid_args(args):
        parser.print_help()
    elif args.consumer:
        consumer(args)
    elif args.producer:
        producer(args)
    else:
        parser.print_help()
Ejemplo n.º 4
0
def run():
    from kombu import Connection, Exchange, Queue
    from consumer import consumer
    with Connection(config['mq_connection']) as conn:
        ex = Exchange(config['query_exchange'])
        queue = Queue(config['query_queue'], exchange=ex,
                routing_key=config['query_routing_key'])
        worker = consumer(conn, queue, ex)
        worker.run()
Ejemplo n.º 5
0
def main():
    parser = make_parser()
    args = parser.parse_args()

    # setup logging
    numeric_log_level = getattr(logging, args.log_level.upper(), None)
    if not isinstance(numeric_log_level, int):
        raise ValueError('invalid log level: %s' % args.log_level)
    logging.basicConfig(level=numeric_log_level)

    clean_kafka_configs(args)
    if not valid_args(args):
        parser.print_help()
    elif args.consumer:
        consumer(args)
    elif args.producer:
        producer(args)
    else:
        parser.print_help()
Ejemplo n.º 6
0
def main():
    """
        Monitor a website and upload analytics to a database.
    """
    message_value = consumer()
    logger.info(message_value)
    message_value = _byte_to_dict()
    message_value["id"] = secrets.randbelow(15)
    write_to_db(message_value)

    return create_http_response(statuscode, body)
Ejemplo n.º 7
0
def main():

    parser = make_parser()
    args = parser.parse_args()

    # setup logging
    numeric_log_level = getattr(logging, args.log_level.upper(), None)
    if not isinstance(numeric_log_level, int):
        raise ValueError('invalid log level: %s' % args.log_level)
    logging.basicConfig(level=numeric_log_level)

    clean_kafka_configs(args)
    if not valid_args(args):
        parser.print_help()
    elif args.consumer:
        consumer(args)
    elif args.producer:
        producer(args)
    else:
        parser.print_help()
Ejemplo n.º 8
0
    def __init__(self, name, parent=None):
        UVMComponent.__init__(self, name, parent)
        UVMPhase.m_phase_trace = True
        self.p1 = producer("producer1", self)
        self.p2 = producer("producer2", self)
        self.f = UVMTLMFIFO("fifo", self)
        self.c = consumer("consumer", self)

        # Create connections between components
        self.p1.out.connect(self.c.input)
        self.p2.out.connect(self.f.blocking_put_export)
        self.c.out.connect(self.f.get_export)
        self.error = False
Ejemplo n.º 9
0
    def __init__(self, name, parent=None):
        super().__init__(name, parent)
        self.prods = []
        self.prods2 = []
        self.cons = []
        self.fifos = []
        self.n = 128

        for i in range(self.n):
            self.prods.append(producer("producer" + str(i), self))
            self.prods2.append(producer("producer2_" + str(i), self))
            self.cons.append(consumer("consumer" + str(i), self))
            self.fifos.append(UVMTLMFIFO("fifo" + str(i), self))
        for i in range(self.n):
            self.prods[i].out.connect(self.cons[i].input)
            self.prods2[i].out.connect(self.fifos[i].blocking_put_export)
            self.cons[i].out.connect(self.fifos[i].get_export)
of using it with my neural network.
Know all i have to do is 
save values to the class and then we can
get rolling.
'''
'''
sharing dict
http://stackoverflow.com/questions/6832554/python-multiprocessing-how-do-i-share-a-dict-among-multiple-processes

'''

if __name__ == '__main__':

    toClassifiers = multiprocessing.Queue()
    toRaspberryPieQueue = multiprocessing.Queue()
    accountPickle = "./account.pickle"
    pickle.dump({}, open(accountPickle, "wb+"))
    myServer = myServer(accountPickle)

    process_server = Process(target=myServer.runServer, args=())
    #process_producer = producer(toClassifiers, toRaspberryPieQueue)
    process_consumer = consumer(toRaspberryPieQueue, toClassifiers,
                                accountPickle)

    process_server.start()
    #process_producer.start()
    process_consumer.start()

    #process_producer.join()
    #process_consumer.join()
Ejemplo n.º 11
0
def consume():
    """
    call consumer.py
    """
    cons = consumer()
    return jsonify({"message": cons})
Ejemplo n.º 12
0
        #print [x for x in set(consumer_list).difference([consumer_id])]
        #unique_consumer_list = []
        #print [ x for x in set([y.Consumer.iloc[0] for y in IP_Map])]


        #tuples of ips and unique consumers attached to them
        print [(y.IP.iloc[0],set(y.Consumer.iloc[l].tolist())) for l in [range(len(y.Consumer)) for y in IP_Map]]


        #print zip([n.IP.iloc[0] for n in IP_Map] , set(consumer_list))
        #return consumer.consumer(consumer_id, set([x.IP.iloc[0] for x in IP_Map]), consumer_key.alt_Browser.iloc[0].split('-')[1],
         #                                 dict(zip([recursive_add_consumers(x) for x in (set(consumer_list).difference(set([consumer_id]))).difference(seen)],
        #                                          [0] * len(set(consumer_list).difference([consumer_id])))))
        #
        return consumer.consumer(consumer_id, set([x.IP.iloc[0] for x in IP_Map]), consumer_key.alt_Browser.iloc[0].split('-')[1],
                                   dict(zip([recursive_add_consumers(x) for x in (set(consumer_list).difference(set([consumer_id]))).difference(seen)],
                                                   [0] * len(set(consumer_list).difference([consumer_id])))))
            # for y in set(x.IP):
            #     samples[unique_consumers.iloc[l,0]].related_ips.add(y)
            #     for y in set(x.Consumer):
            #         samples[unique_consumers.iloc[l,0]].related_consumers_weights[y] = 0
            # else:
            #
            #     samples[unique_consumers.iloc[l,0]] = consumer.consumer(unique_consumers.iloc[l,0],set(x.IP),
            #                                                             consumer_key.alt_Browser.iloc[0].split('-')[1],
            #                                                             dict(zip([samples[x] for x in set(x.Consumer)],[0]*len(set(x.Consumer)) )))
            #

def read_csv():

    # data = df.from_csv("sample.csv", header=0, index_col=None)
Ejemplo n.º 13
0
    rc, res = yield db_execute(STORE_TASK, task['task_uuid'] ,
                                task['task_priority'], task['created_at'],
                                task['dna'],
                                task['company_id'], str(task['site_asset_ids']))
    if rc:
        if not len(task['query_scope']) == 0:
            for i in task['query_scope']:
                try :
                    r, re = yield  db_query(CHECK_SCOPE, meta)
                    #r= yield db_result(r)
                    if r:
                        rc, res = yield db_execute(STORE_SCOPE, task_id, meta_uuid)
                    else:
                        logger.info('query_scope is not in vddbMetaContent table, task_uuid: %s,\
                                query_scope: %s', task['task_uuid'], i)
                except:
                    logger.error('failed to store scope ,task_uuid: %s, query_scope :%s ',
                                task['task_uuid'], i)



from kombu import Connection, Exchange, Queue
from consumer import consumer
with Connection(config['result_connection']) as conn:
    ex= Exchange(config['result_exchange'])
    queue = Queue('result_queue', exchange=ex,
                routing_key=config['result_routing_key'])
    worker = consumer(conn, queue, ex) 
    worker.run()