Exemplo n.º 1
0
    def printresults():
        logger.info('{} {} {} {}'.format(dataStartTime.strftime("%Y-%m-%d %H:%M:%S"), dataEndTime.strftime("%Y-%m-%d %H:%M:%S"), runTime, queryPerSec))
        line = list()
        querypermin = queryPerSec * 60
        endtime = datetime.now(timezone('UTC')) + timedelta(minutes=runtime)
        popularitylist = list()
        while True:
            tm.sleep(5)
            time = datetime.now(timezone('UTC'))
            logger.info("Time: {}".format(time.strftime("%Y-%m-%d %H:%M:%S")))
            if time >= endtime:
                break

            #Query generated every minute. This is to optimize the overhead of query generation and also because segment granularity is minute
            newquerylist = list()
            if filename != "":
                newquerylist = QueryGenerator.generateQueriesFromFile(dataStartTime, dataEndTime, querypermin, timeAccessGenerator, periodAccessGenerator, filename)
            elif isbatch == True:
                newquerylist = QueryGenerator.generateQueries(dataStartTime, dataEndTime, querypermin, timeAccessGenerator, periodAccessGenerator, popularitylist)
            else:
                #newquerylist = QueryGenerator.generateQueries(dataStartTime, time, querypermin, timeAccessGenerator, periodAccessGenerator, popularitylist)
                newquerylist = QueryGenerator.generateQueries(dataStartTime, time, querypermin, timeAccessGenerator, periodAccessGenerator, popularitylist)
            
            #print str(newquerylist)
            
            for query in newquerylist:
                print query.interval
                try:
                    line.append(applyOperation(query, config, logger))
                except Exception as inst:
                    logger.info(type(inst))     # the exception instance
                    logger.info(inst.args)      # arguments stored in .args
                    logger.info(inst)           # __str__ allows args to be printed directly
                    x, y = inst.args
                    logger.info('x =', x)
                    logger.info('y =', y)
        

            nextminute = time + timedelta(minutes=1)
            timediff = (nextminute - datetime.now(timezone('UTC'))).total_seconds()
            if timediff > 0:
                tm.sleep(timediff)
            #    yield gen.sleep(timediff)

        wait_iterator = gen.WaitIterator(*line)
        while not wait_iterator.done():
            try:
                result = yield wait_iterator.next()
            except Exception as e:
                logger.info("Error {} from {}".format(e, wait_iterator.current_future))
            else:
                logger.info("Result {} received from {} at {}".format(
                    result, wait_iterator.current_future,
                    wait_iterator.current_index))
Exemplo n.º 2
0
 def printresults():
     logger.info('{} {} {} {}'.format(start.strftime("%Y-%m-%d %H:%M:%S"),
                                      end.strftime("%Y-%m-%d %H:%M:%S"),
                                      runtime, queryPerSec))
     line = list()
     querypermin = queryPerSec * 60
     endtime = datetime.now(timezone('UTC')) + timedelta(minutes=runtime)
     popularitylist = list()
     newquerylist = list()
     if filename != "":
         newquerylist = QueryGenerator.generateQueriesFromFile(
             start, end, querypermin * runtime, timeAccessGenerator,
             periodAccessGenerator, filename)
     elif isbatch == True:
         newquerylist = QueryGenerator.generateQueries(
             start, end, querypermin * runtime, timeAccessGenerator,
             periodAccessGenerator, popularitylist)
Exemplo n.º 3
0
    def printresults():
        logger.log(
            STATS, '{} {} {} {} {}'.format(start.strftime("%Y-%m-%d %H:%M:%S"),
                                           end.strftime("%Y-%m-%d %H:%M:%S"),
                                           runtime, queryPerSec, queryratio))

        querypermin = queryPerSec * 60
        endtime = datetime.now(timezone('UTC')) + timedelta(minutes=runtime)
        line = list()
        popularitylist = list()
        newquerylist = list()

        if filename != "":
            newquerylist = QueryGenerator.generateQueriesFromFile(
                start, end, querypermin * runtime, timeAccessGenerator,
                periodAccessGenerator, querytype, queryratio, filename)
        elif isbatch == True:
            newquerylist = QueryGenerator.generateQueries(
                start, end, querypermin * runtime, timeAccessGenerator,
                periodAccessGenerator, popularitylist, querytype, queryratio,
                logger)
        else:
            #logger.info("Run.py start queryendtime "+str(start)+", "+str(endtime))
            queryStartInterval = start
            queryEndInterval = start + timedelta(minutes=1)
            for i in range(0, runtime):
                logger.info("Start generating queries for interval " +
                            str(queryStartInterval) + " - " +
                            str(queryEndInterval))
                newquerylist.extend(
                    QueryGenerator.generateQueries(
                        queryStartInterval, queryEndInterval, querypermin,
                        timeAccessGenerator, periodAccessGenerator,
                        popularitylist, querytype, queryratio, logger))
                queryEndInterval = queryEndInterval + timedelta(minutes=1)

            logger.info("Finished generating queries. num queries generated " +
                        str(len(newquerylist)))

        if filename != "" or isbatch == True:
            count = 0
            time = datetime.now(timezone('UTC'))
            logger.info("Time: {}".format(time.strftime("%Y-%m-%d %H:%M:%S")))
            nextminute = time + timedelta(minutes=1)
            for query in newquerylist:
                try:
                    line.append(
                        applyOperation(query, config, brokernameurl, logger))
                except Exception as inst:
                    logger.error(type(inst))  # the exception instance
                    logger.error(inst.args)  # arguments stored in .args
                    logger.error(
                        inst)  # __str__ allows args to be printed directly
                    x, y = inst.args
                    logger.error('x =', x)
                    logger.error('y =', y)

                count = count + 1
                if count >= querypermin:
                    timediff = (nextminute -
                                datetime.now(timezone('UTC'))).total_seconds()
                    if timediff > 0:
                        yield gen.sleep(timediff)
                    count = 0
                    time = datetime.now(timezone('UTC'))
                    logger.info("Time: {}".format(
                        time.strftime("%Y-%m-%d %H:%M:%S")))
                    nextminute = time + timedelta(minutes=1)
        else:
            # frequency of queries per millisecond
            queryPerMilliSecond = float(queryPerSec) / 1000
            # number of samples spaced by 1 millisecond
            numSamples = runtime * 60 * 1000
            numQueries, querySchedule = genPoissonQuerySchedule(
                queryPerMilliSecond, numSamples)
            logger.info("Poisson numQueries = " + str(numQueries))

            queryScheduleIdx = 0
            count = 0
            while count < len(newquerylist):
                sample = querySchedule[queryScheduleIdx]
                #logger.info("Poisson sample is "+str(sample[0])+", "+str(sample[1]))
                if (sample[0] == 0):
                    #logger.info("Sleeping for "+str(sample[1]))
                    yield gen.sleep(
                        float(sample[1]) /
                        1000)  # divide by 1000 to convert it into seconds
                else:
                    for i in range(0, sample[0]):
                        try:
                            line.append(
                                applyOperation(newquerylist[count], config,
                                               brokernameurl, logger))
                            #applyOperation(newquerylist[count], config, brokernameurl, logger)
                            newquerylist[count].setTxTime(datetime.now())
                            #logger.info("Running query "+str(sample[0]))
                        except Exception as inst:
                            logger.error(type(inst))  # the exception instance
                            logger.error(
                                inst.args)  # arguments stored in .args
                            logger.error(
                                inst
                            )  # __str__ allows args to be printed directly
                        count = count + 1
                        if count >= len(newquerylist):
                            break
                queryScheduleIdx = queryScheduleIdx + 1

        wait_iterator = gen.WaitIterator(*line)
        while not wait_iterator.done():
            try:
                result = yield wait_iterator.next()
            except Exception as e:
                logger.error("Error {} from {}".format(
                    e, wait_iterator.current_future))
Exemplo n.º 4
0
def threadoperation(start, time, numqueries, timeAccessGenerator,
                    minqueryperiod, maxqueryperiod, periodAccessGenerator,
                    config, logger, x, values):

    successfulquerytime = 0
    successfulquerycount = 0
    failedquerytime = 0
    failedquerycount = 0
    totalquerytime = 0
    totalquerycount = 0
    endtime = datetime.now() + dt.timedelta(minutes=runtime)
    currentSegRank = []
    #oldest_timestamp = start.total_seconds()
    break_flag = 0
    #print "16"
    while True:
        #while break_flag <1:
        break_flag = break_flag + 1
        #print "1"
        if datetime.now() >= endtime:
            break

        time = datetime.now(timezone('UTC'))
        #newquerylist = QueryGenerator.generateQueries(start, time, numqueries, timeAccessGenerator, minqueryperiod, maxqueryperiod, periodAccessGenerator);
        if (len(currentSegRank) == 0):
            #print "4"
            y = time - start
            z = y.total_seconds()
            x = dt.timedelta(seconds=z)
            distance = x.total_seconds()
            for i in range(0, int(round(distance))):
                timepoint = start + dt.timedelta(0, i)
                currentSegRank.append(timepoint)
        else:
            #y = time.total_seconds()
            #z = start.total_seconds()
            #print "2"
            new_interval = (time - start).total_seconds()
            #print "time"
            print time
            #print "start"
            print start
            print new_interval
            for i in range(0, int(round(new_interval))):
                samples = randZipf(1 + len(currentSegRank), 1.2, 1)
                timepoint = start + dt.timedelta(0, i)
                print "add to rank: index"
                print samples[0]
                print "timepoint"
                print timepoint
                print "list seg size %d " % len(currentSegRank)
                #len(currentSegRank)
                currentSegRank.insert(samples[0], timepoint)
                print currentSegRank

        print filename

        if (filename != ""):
            #break_flag = 1
            newquerylist = QueryGenerator.generateQueriesFromFile(
                start, time, timeAccessGenerator, minqueryperiod,
                maxqueryperiod, periodAccessGenerator, filename)
        #elif(accessdistribution == "dynamiczip"):
        #newquerylist = QueryGenerator.generateQueries(start, time, numqueries, timeAccessGenerator, minqueryperiod, maxqueryperiod, periodAccessGenerator, currentSegRank)
        else:
            #print "3"
            newquerylist = QueryGenerator.generateQueries(
                start, time, numqueries, timeAccessGenerator, minqueryperiod,
                maxqueryperiod, periodAccessGenerator, currentSegRank)
        line = applyOperation(newquerylist[0], config, logger)

        print line[0:10]
        if (line[0][0:10] == "Successful"):
            print line[1].count
            successfulquerytime += float(line[0][12:])
            successfulquerycount += 1
            totalquerytime += float(line[0][12:])
            totalquerycount += 1
        elif (line[0][0:6] == "Failed"):
            failedquerytime += float(line[0][8:])
            failedquerycount += 1
            totalquerytime += float(line[0][8:])
            totalquerycount += 1

    datastructure = [
        successfulquerytime, successfulquerycount, failedquerytime,
        failedquerycount, totalquerytime, totalquerycount
    ]
    values.put(datastructure)