Пример #1
0
    def front_end_thread(self):
        """ A thread for receive a query and execute a query in sequential."""
        # Query receiving socket
        self.server_socket = self.context.socket(zmq.REP)
        self.server_socket.bind("tcp://*:%s" % EXTERNAL_PORT)

        # Initiate the query's invoke_id
        i = START_QID

        while 1:
            # Receive a user query (QueryObject instance)
            query = self.server_socket.recv_pyobj()
            logging.debug("receive query : {}".format(query))

            # Create a message
            msg = ECHOMessage(ECHOMessage.MSG_TYPE_INVOKE,
                              i, "localhost", query)

            # Send a query for an execution, then receive
            self.sender.send(msg.serialize())
            digested_message = self.receiver.recv()
            msg = ECHOMessage.deserialize(digested_message)

            # Return only compacted results to client
            self.server_socket.send(msg.data)

            # Increase invoke_id
            i += 1
Пример #2
0
    def front_end_thread(self):
        """ A thread for receive a query and execute a query in sequential."""
        # Query receiving socket
        self.server_socket = self.context.socket(zmq.REP)
        self.server_socket.bind("tcp://*:%s" % EXTERNAL_PORT)

        # Initiate the query's invoke_id
        i = START_QID

        while 1:
            # Receive a user query (QueryObject instance)
            query = self.server_socket.recv_pyobj()
            logging.debug("receive query : {}".format(query))

            # Create a message
            msg = ECHOMessage(ECHOMessage.MSG_TYPE_INVOKE, i, "localhost",
                              query)

            # Send a query for an execution, then receive
            self.sender.send(msg.serialize())
            digested_message = self.receiver.recv()
            msg = ECHOMessage.deserialize(digested_message)

            # Return only compacted results to client
            self.server_socket.send(msg.data)

            # Increase invoke_id
            i += 1
Пример #3
0
    def run(self):
        """ Override method run() in a Thread"""
        
        #assign function addresses locally
        dispatch = self.dispatch
        receiver = self.receiver
        
        while 1:
            #receive a digested message from other nodes
            digested_message = receiver.recv()

            dispatch(ECHOMessage.deserialize(digested_message))
Пример #4
0
    def receive_thread(self):
        """ a thread for getting the result message from the system."""
        # Assign variable locally
        receiver = self.receiver
        time_of_queries = self.time_of_queries

        while 1:
            results = receiver.recv()
            msg = ECHOMessage.deserialize(results)

            # Logging results
            logging.info(time.time() - time_of_queries[msg.get_id()])
Пример #5
0
    def run(self):
        """ Override method run() in a Thread"""

        #assign function addresses locally
        dispatch = self.dispatch
        receiver = self.receiver

        while 1:
            #receive a digested message from other nodes
            digested_message = receiver.recv()

            dispatch(ECHOMessage.deserialize(digested_message))
Пример #6
0
    def receive_thread(self):
        """ a thread for getting the result message from the system."""
        # Assign variable locally
        receiver = self.receiver
        time_of_queries = self.time_of_queries

        while 1:
            results = receiver.recv()
            msg = ECHOMessage.deserialize(results)

            # Logging results
            logging.info(time.time() - time_of_queries[msg.get_id()])
Пример #7
0
def main():

    keywords = set()

    ####################
    # getting keywords #
    ####################
    for server_name in server_names:
        connection = pymongo.MongoClient(server_name, 27017)
        collection = connection["sensor"]["objects"]
        objects_cursor = collection.find({}, {
            '_id': 0,
            'content': 0,
            'last-updated': 0
        })
        for obj in objects_cursor:
            for keyword in _flatten_to_list(obj):
                if isinstance(keyword,unicode) and len(keyword)<200 and\
                   not ' ' in keyword and not '/' in keyword and\
                   not '@' in keyword and not 'mac-address' in keyword and\
                   not '[' in keyword and not 'image-id' in keyword and\
                   not 'command-line' in keyword and not 'memory-util' in keyword and\
                   not 'count' in keyword and not 'link_' in keyword and\
                   not 'group' in keyword and not 'sum' in keyword and\
                   not 'max' in keyword and not 'min' in keyword and\
                   not 'project' in keyword and not '_fake' in keyword:
                    #term.count(':')<=1 and term.count('-') <=2 and \
                    keywords.add(keyword)
        print "keywords length of %s = %d" % (server_name, len(keywords))

    #######################################
    # check the number of returned result #
    #######################################
    qid = 999999
    tailered_keywords = set()

    context = zmq.Context()
    receiver = context.socket(zmq.PULL)
    receiver.bind("tcp://*:{}".format(INTERFACE_PORT))
    sender = context.socket(zmq.PUSH)
    sender.connect("tcp://localhost:{}".format(DISPATCHER_PORT))
    for keyword in keywords:
        #        print keyword
        # create query object
        match_query, project_query, aggr_query, group_query, pass_one_query, link_attributes, pass_two_query, flags = nsInterface(
            "count(object-name) " + keyword, True)

        if not match_query:
            print "unable to parse %s" % keyword
            continue

        if 'aggregation' in flags:
            query = QueryObject(match_query,
                                project_query,
                                aggr_query,
                                group_query,
                                parameters={
                                    'isRank': False,
                                    "isApprox": False
                                })
        else:
            print 'parsing query error'

        # create echo msg and send
        msg = ECHOMessage(ECHOMessage.MSG_TYPE_INVOKE, qid, "localhost", query)
        sender.send(msg.serialize())
        qid += 1
        # receive results
        results = receiver.recv()
        msg = ECHOMessage.deserialize(results)
        n_object_match = msg.get_data()[0]['object-name-count']

        if n_object_match > MIN_DF_THRESHOLD and n_object_match < DF_THRESHOLD:
            tailered_keywords.add(keyword)


#        print len(tailered_keywords)
    print "tailered keywords length = " + str(len(tailered_keywords))

    ###################
    # add combination #
    ###################

    temp_pool = set()
    for i in xrange(MIN_COMBINATION, MAX_COMBINATION):
        for j in xrange(0, 400):
            temp_pool.add(' | '.join(random.sample(tailered_keywords, i)))

    print "total keywords length = " + str(
        len(tailered_keywords) + len(temp_pool))
    ###################
    # write to a file #
    ###################

    print 'writing...'
    with open(FILE, 'w') as f:
        if MIN_COMBINATION == 1:
            f.write('\n'.join(tailered_keywords))
        f.write('\n'.join(temp_pool))
    print 'DONE'
Пример #8
0
    def run(self):
        logging.basicConfig(
                    filename=str(path[0]) + '/../stats_' +
                             str(self.args.rate) + '.log',
                    level=logging.DEBUG, format='%(message)s')
        logging.info("=========execution start with poisson arrival rate \
                     {0} query per sec==========".format(self.args.rate))

        # Setup connection
        context = zmq.Context()
        self.receiver = context.socket(zmq.PULL)
        self.receiver.bind("tcp://*:{}".format(INTERFACE_PORT))
        self.sender = context.socket(zmq.PUSH)
        self.sender.connect("tcp://localhost:{}".format(DISPATCHER_PORT))

        # Start receiving thread
        t = threading.Thread(target=self.receive_thread)
        t.daemon = True
        t.start()

        # Load queries, (time consuming)
        print "loading queries..."
        queries = self.read_file_to_query_list(self.args.queryListFile)
        queries_pool_size = len(queries) - 1
        print "loading queries... completed"

        # Assign variable locally
        start_time = time.time()
        duration = self.args.time
        rate = self.args.rate
        sender = self.sender
        time_of_queries = self.time_of_queries

        print "=======start load generator======"

        while (time.time() - start_time) < duration:
            t0 = time.time()

            # Randomly pick a query
            query = queries[random.randint(0, queries_pool_size)]

            # Generate message
            msg = ECHOMessage(ECHOMessage.MSG_TYPE_INVOKE,
                              self.qid, "localhost", query)

            # Record timestamp
            time_of_queries[self.qid] = time.time()

            # Send a message
            sender.send(msg.serialize())
            self.qid += 1

            # Poisson process wait time
            nextTime = -log(1.0 - random.random()) / rate

            try:
                time.sleep((t0 + nextTime) - time.time())
            except IOError:
                # In case we have negative value in sleep parameter
                pass

        self.finishing()
Пример #9
0
def main():
  
    keywords = set()
    
    ####################
    # getting keywords #
    ####################
    for server_name in server_names:
        connection = pymongo.MongoClient(server_name,27017)
        collection = connection["sensor"]["objects"]
        objects_cursor = collection.find({},{'_id':0,'content':0,'last-updated':0})
        for obj in objects_cursor:
            for keyword in _flatten_to_list(obj):
                if isinstance(keyword,unicode) and len(keyword)<200 and\
                   not ' ' in keyword and not '/' in keyword and\
                   not '@' in keyword and not 'mac-address' in keyword and\
                   not '[' in keyword and not 'image-id' in keyword and\
                   not 'command-line' in keyword and not 'memory-util' in keyword and\
                   not 'count' in keyword and not 'link_' in keyword and\
                   not 'group' in keyword and not 'sum' in keyword and\
                   not 'max' in keyword and not 'min' in keyword and\
                   not 'project' in keyword and not '_fake' in keyword:
                    #term.count(':')<=1 and term.count('-') <=2 and \
                    keywords.add(keyword)
        print "keywords length of %s = %d" %(server_name,len(keywords))
        
    #######################################
    # check the number of returned result #
    #######################################
    qid = 999999
    tailered_keywords = set()
    
    context = zmq.Context()
    receiver = context.socket(zmq.PULL)
    receiver.bind("tcp://*:{}".format(INTERFACE_PORT))
    sender = context.socket(zmq.PUSH)
    sender.connect ("tcp://localhost:{}".format(DISPATCHER_PORT))
    for keyword in keywords:
#        print keyword
        # create query object
        match_query,project_query,aggr_query,group_query,pass_one_query,link_attributes,pass_two_query,flags = nsInterface("count(object-name) "+keyword,True)
        
        if not match_query:
            print "unable to parse %s" % keyword
            continue
        
        if 'aggregation' in flags:
            query = QueryObject(match_query,project_query,aggr_query,group_query,parameters={'isRank':False,"isApprox":False})
        else:
            print 'parsing query error'
        
        # create echo msg and send
        msg = ECHOMessage(ECHOMessage.MSG_TYPE_INVOKE,qid,"localhost",query)
        sender.send(msg.serialize())          
        qid += 1
        # receive results
        results = receiver.recv()
        msg = ECHOMessage.deserialize(results)
        n_object_match = msg.get_data()[0]['object-name-count']
        
        if n_object_match > MIN_DF_THRESHOLD and n_object_match < DF_THRESHOLD:
            tailered_keywords.add(keyword)
#        print len(tailered_keywords)    
    print "tailered keywords length = "+str(len(tailered_keywords))
    
    ###################
    # add combination #
    ###################

    temp_pool = set()
    for i in xrange(MIN_COMBINATION,MAX_COMBINATION):
        for j in xrange(0,400):
            temp_pool.add(' | '.join(random.sample(tailered_keywords,i)))
            
    print "total keywords length = "+str(len(tailered_keywords)+len(temp_pool))    
    ###################
    # write to a file #
    ###################
    
    print 'writing...'    
    with open(FILE,'w') as f:
        if MIN_COMBINATION == 1:
            f.write('\n'.join(tailered_keywords))
        f.write('\n'.join(temp_pool))
    print 'DONE'
Пример #10
0
    def run(self):
        logging.basicConfig(filename=str(path[0]) + '/../stats_' +
                            str(self.args.rate) + '.log',
                            level=logging.DEBUG,
                            format='%(message)s')
        logging.info("=========execution start with poisson arrival rate \
                     {0} query per sec==========".format(self.args.rate))

        # Setup connection
        context = zmq.Context()
        self.receiver = context.socket(zmq.PULL)
        self.receiver.bind("tcp://*:{}".format(INTERFACE_PORT))
        self.sender = context.socket(zmq.PUSH)
        self.sender.connect("tcp://localhost:{}".format(DISPATCHER_PORT))

        # Start receiving thread
        t = threading.Thread(target=self.receive_thread)
        t.daemon = True
        t.start()

        # Load queries, (time consuming)
        print "loading queries..."
        queries = self.read_file_to_query_list(self.args.queryListFile)
        queries_pool_size = len(queries) - 1
        print "loading queries... completed"

        # Assign variable locally
        start_time = time.time()
        duration = self.args.time
        rate = self.args.rate
        sender = self.sender
        time_of_queries = self.time_of_queries

        print "=======start load generator======"

        while (time.time() - start_time) < duration:
            t0 = time.time()

            # Randomly pick a query
            query = queries[random.randint(0, queries_pool_size)]

            # Generate message
            msg = ECHOMessage(ECHOMessage.MSG_TYPE_INVOKE, self.qid,
                              "localhost", query)

            # Record timestamp
            time_of_queries[self.qid] = time.time()

            # Send a message
            sender.send(msg.serialize())
            self.qid += 1

            # Poisson process wait time
            nextTime = -log(1.0 - random.random()) / rate

            try:
                time.sleep((t0 + nextTime) - time.time())
            except IOError:
                # In case we have negative value in sleep parameter
                pass

        self.finishing()