def generateSysMessagesZipFile(emailAddress, dumpFileNamePrefix, sensorId,
                               sessionId):
    dumpFileName = sessionId + "/" + dumpFileNamePrefix + ".txt"
    zipFileName = sessionId + "/" + dumpFileNamePrefix + ".zip"
    dirname = util.getPath(STATIC_GENERATED_FILE_LOCATION + sessionId)
    if not os.path.exists(dirname):
        os.makedirs(dirname)
    dumpFilePath = util.getPath(STATIC_GENERATED_FILE_LOCATION) + dumpFileName
    zipFilePath = util.getPath(STATIC_GENERATED_FILE_LOCATION) + zipFileName
    if os.path.exists(dumpFilePath):
        os.remove(dumpFilePath)
    if os.path.exists(zipFilePath):
        os.remove(zipFilePath)
    systemMessages = DbCollections.getSystemMessages().find(
        {SENSOR_ID: sensorId})
    if systemMessages is None:
        util.debugPrint("generateZipFileForDownload: No system info found")
        return
    dumpFile = open(dumpFilePath, "a")
    zipFile = zipfile.ZipFile(zipFilePath, mode="w")
    try:
        for systemMessage in systemMessages:
            data = msgutils.getCalData(systemMessage)
            del systemMessage["_id"]
            if CAL in systemMessage and DATA_KEY in systemMessage[CAL]:
                del systemMessage[CAL][DATA_KEY]
            systemMessage[DATA_TYPE] = ASCII
            systemMessageString = json.dumps(systemMessage,
                                             sort_keys=False,
                                             indent=4) + "\n"
            length = len(systemMessageString)
            dumpFile.write(str(length))
            dumpFile.write("\n")
            dumpFile.write(systemMessageString)
            if data is not None:
                dataString = str(data)
                dumpFile.write(dataString)
                dumpFile.write("\n")
        zipFile.write(dumpFilePath,
                      arcname=dumpFileNamePrefix + ".txt",
                      compress_type=zipfile.ZIP_DEFLATED)
        zipFile.close()
        session = SessionLock.getSession(sessionId)
        if session is None:
            os.remove(dumpFilePath)
            os.remove(zipFilePath)
            return
        url = Config.getGeneratedDataPath() + "/" + zipFileName
        watchForFileAndSendMail(emailAddress, url, zipFileName)
    except:
        print "Unexpected error:", sys.exc_info()[0]
        print sys.exc_info()
        traceback.print_exc()
    finally:
        os.remove(dumpFilePath)
        zipFile.close()
def watchForFileAndSendMail(emailAddress, url, uri):
    """
    Watch for the dump file to appear and send an email to the user
    after it has appeared.
    """
    for i in range(0, 100):
        filePath = util.getPath(STATIC_GENERATED_FILE_LOCATION + uri)
        if os.path.exists(filePath) and os.stat(filePath).st_size != 0:
            message = "This is an automatically generated message.\n"\
                      + "The requested data has been generated.\n"\
                      + "Please retrieve your data from the following URL: \n"\
                      + url \
                      + "\nYou must retrieve this file within 24 hours."
            util.debugPrint(message)
            SendMail.sendMail(message, emailAddress,
                              "Your Data Download Request")
            return
        else:
            util.debugPrint("Polling for file " + filePath)
            time.sleep(10)

    message = "This is an automatically generated message.\n"\
              + "Tragically, the requested data could not be generated.\n"\
              + "Sorry to have dashed your hopes into the ground.\n"
    SendMail.sendMail(message, emailAddress, "Your Data Download Request")
示例#3
0
def generateSpectrumForSweptFrequency(msg, sessionId, minFreq, maxFreq):
    try:
        chWidth = Config.getScreenConfig()[CHART_WIDTH]
        chHeight = Config.getScreenConfig()[CHART_HEIGHT]

        spectrumData = msgutils.trimSpectrumToSubBand(msg, minFreq, maxFreq)
        noiseFloorData = msgutils.trimNoiseFloorToSubBand(
            msg, minFreq, maxFreq)
        nSteps = len(spectrumData)
        freqDelta = float(maxFreq - minFreq) / float(1E6) / nSteps
        freqArray = [
            float(minFreq) / float(1E6) + i * freqDelta
            for i in range(0, nSteps)
        ]
        plt.figure(figsize=(chWidth, chHeight))
        plt1 = plt.scatter(freqArray,
                           spectrumData,
                           color='red',
                           label="Signal Power")
        plt2 = plt.scatter(freqArray,
                           noiseFloorData,
                           color='black',
                           label="Noise Floor")
        plt.legend(handles=[plt1, plt2])
        xlabel = "Freq (MHz)"
        plt.xlabel(xlabel)
        ylabel = "Power (dBm)"
        plt.ylabel(ylabel)
        locationMessage = DbCollections.getLocationMessages().find_one(
            {"_id": ObjectId(msg["locationMessageId"])})
        t = msg["t"]
        tz = locationMessage[TIME_ZONE_KEY]
        title = "Spectrum at " + timezone.formatTimeStampLong(t, tz)
        plt.title(title)
        spectrumFile = sessionId + "/" + msg[SENSOR_ID] + "." + str(
            msg['t']) + "." + str(minFreq) + "." + str(
                maxFreq) + ".spectrum.png"
        spectrumFilePath = util.getPath(
            STATIC_GENERATED_FILE_LOCATION) + spectrumFile
        plt.savefig(spectrumFilePath, pad_inches=0, dpi=100)
        plt.clf()
        plt.close()
        # plt.close("all")
        urlPrefix = Config.getGeneratedDataPath()
        retval = {
            "status": "OK",
            "spectrum": urlPrefix + "/" + spectrumFile,
            "freqArray": freqArray,
            "spectrumData": spectrumData.tolist(),
            "noiseFloorData": noiseFloorData.tolist(),
            "title": title,
            "xlabel": xlabel,
            "ylabel": ylabel
        }
        return retval
    except:
        print "Unexpected error:", sys.exc_info()[0]
        print sys.exc_info()
        traceback.print_exc()
        raise
def generateOccupancyForFFTPower(msg, fileNamePrefix):
    chWidth = Config.getScreenConfig()[CHART_WIDTH]
    chHeight = Config.getScreenConfig()[CHART_HEIGHT]

    measurementDuration = DataMessage.getMeasurementDuration(msg)
    nM = DataMessage.getNumberOfMeasurements(msg)
    n = DataMessage.getNumberOfFrequencyBins(msg)
    cutoff = DataMessage.getThreshold(msg)
    # miliSecondsPerMeasurement = float(measurementDuration * 1000) / float(nM)
    spectrogramData = msgutils.getData(msg)
    # Generate the occupancy stats for the acquisition.
    occupancyCount = [0 for i in range(0, nM)]
    for i in range(0, nM):
        occupancyCount[i] = float(
            len(filter(lambda x: x >= cutoff,
                       spectrogramData[i, :]))) / float(n) * 100
    timeArray = [i for i in range(0, nM)]
    minOccupancy = np.minimum(occupancyCount)
    maxOccupancy = np.maximum(occupancyCount)
    plt.figure(figsize=(chWidth, chHeight))
    plt.axes([0, measurementDuration * 1000, minOccupancy, maxOccupancy])
    plt.xlim([0, measurementDuration])
    plt.plot(timeArray, occupancyCount, "g.")
    plt.xlabel("Time (s) since start of acquisition")
    plt.ylabel("Band Occupancy (%)")
    plt.title("Band Occupancy; Cutoff: " + str(cutoff))
    occupancyFilePath = util.getPath(
        STATIC_GENERATED_FILE_LOCATION) + fileNamePrefix + '.occupancy.png'
    plt.savefig(occupancyFilePath)
    plt.clf()
    plt.close()
    # plt.close('all')
    return fileNamePrefix + ".occupancy.png"
示例#5
0
def generateSpectrumForFFTPower(msg, milisecOffset, sessionId):
    chWidth = Config.getScreenConfig()[CHART_WIDTH]
    chHeight = Config.getScreenConfig()[CHART_HEIGHT]

    startTime = msg["t"]
    nM = int(msg["nM"])
    n = int(msg["mPar"]["n"])
    measurementDuration = int(msg["mPar"]["td"])
    miliSecondsPerMeasurement = float(
        measurementDuration * MILISECONDS_PER_SECOND) / float(nM)
    powerVal = np.array(msgutils.getData(msg))
    spectrogramData = np.transpose(powerVal.reshape(nM, n))
    col = int(milisecOffset / miliSecondsPerMeasurement)
    util.debugPrint("Col = " + str(col))
    spectrumData = spectrogramData[:, col]
    maxFreq = msg["mPar"]["fStop"]
    minFreq = msg["mPar"]["fStart"]
    nSteps = len(spectrumData)
    freqDelta = float(maxFreq - minFreq) / float(1E6) / nSteps
    freqArray = [
        float(minFreq) / float(1E6) + i * freqDelta for i in range(0, nSteps)
    ]
    plt.figure(figsize=(chWidth, chHeight))
    plt.scatter(freqArray, spectrumData, color='red', label='Signal Power')
    # TODO -- fix this when the sensor is calibrated.
    wnI = msg[NOISE_FLOOR]
    noiseFloorData = [wnI for i in range(0, len(spectrumData))]
    plt.scatter(freqArray, noiseFloorData, color='black', label="Noise Floor")
    xlabel = "Freq (MHz)"
    ylabel = "Power (dBm)"
    plt.xlabel(xlabel)
    plt.ylabel(ylabel)
    locationMessage = DbCollections.getLocationMessages().find_one(
        {"_id": ObjectId(msg["locationMessageId"])})
    t = msg["t"] + milisecOffset / float(MILISECONDS_PER_SECOND)
    tz = locationMessage[TIME_ZONE_KEY]
    title = "Spectrum at " + timezone.formatTimeStampLong(t, tz)
    plt.title(title)
    spectrumFile = sessionId + "/" + msg[SENSOR_ID] + "." + str(
        startTime) + "." + str(milisecOffset) + ".spectrum.png"
    spectrumFilePath = util.getPath(
        STATIC_GENERATED_FILE_LOCATION) + spectrumFile
    plt.savefig(spectrumFilePath, pad_inches=0, dpi=100)
    plt.clf()
    plt.close()
    # plt.close("all")
    retval = {
        "status": "OK",
        "spectrum": Config.getGeneratedDataPath() + "/" + spectrumFile,
        "freqArray": freqArray,
        "spectrumData": spectrumData.tolist(),
        "noiseFloorData": noiseFloorData,
        "title": title,
        "xlabel": xlabel,
        "ylabel": ylabel
    }
    return retval
示例#6
0
文件: actRec.py 项目: vonalan/HAR
def reload(stipfile, actions, dbname):
    path1 = util.getPath(dbname + ".cpkl")
    if not os.path.exists(path1):
        print str(datetime.datetime.now())[:19] + " loading datasets ... "
        flags, inputSet, targetSet = dataLoader3.loadDataSet(
            stipfile, actions, path1)
    else:
        util.exPath([path1])
        flags, inputSet, targetSet = dataLoader3.cpklload(path1)

    return flags, inputSet, targetSet
示例#7
0
文件: actRec.py 项目: vonalan/HAR
def cluster1(r, KList, actions, stipfile, dbname):
    path4 = util.getPath(dbname + "_ycobjs_r" + str(r) + ".cpkl")
    '''bitter of changing api frequently'''
    '''*********************************************************'''
    splitlist, xtrain, xtest, xtrainM, xtestM, ytrainM, ytestM, xrandMX = split1(
        r, actions, stipfile, dbname)

    if not os.path.exists(path4):
        print str(datetime.datetime.now()
                  )[:19] + " counting datasets ..., round: " + str(r)
        ctrain, ctest = splitter.count(xtrain, xtest)
        ycobjs = [ctrain, ctest, ytrainM, ytestM]
        dataLoader3.cpklsave(ycobjs, path4)
    else:
        util.exPath([path4])
    '''************************************************************************************************'''
    ctrain, ctest, ytrainM, ytestM = dataLoader3.cpklload(path4)
    if numpy.shape(xtrainM)[0] > 100000:
        '''idx = idxrandom(numpy.shape(xtrainM)[0], 10000)'''
        idx = splitter.idxrandom(numpy.shape(xtrainM)[0], 100000)
        xrandMX2 = xtrainM[idx, :]
    else:
        xrandMX2 = xtrainM

    print str(
        datetime.datetime.now())[:19] + " The shape of xrandMX2 is : " + str(
            numpy.shape(xrandMX2))
    '''************************************************************************************************'''

    for k in KList:
        path3 = util.getPath(dbname + "_bow" + "_r" + str(r) + "_k" + str(k) +
                             ".cpkl")

        if not os.path.exists(path3):
            cluster.kMeans(r, k, ctrain, ctest, xtrainM, xtestM, xrandMX2,
                           path3)
        else:
            util.exPath([path3])
    '''bitter of changing api frequetly'''
def getLogs(sessionId):
    """

    Bunlde the logs on the server and return a URL to it.

    URL Path:
        sessionId the session Id of the login in session.

    """
    try:
        zipFileName = sessionId + "/logs.zip"
        dirname = util.getPath(STATIC_GENERATED_FILE_LOCATION + sessionId)
        if not os.path.exists(dirname):
            os.makedirs(dirname)
        zipFilePath = util.getPath(
            STATIC_GENERATED_FILE_LOCATION) + zipFileName
        if os.path.exists(zipFilePath):
            os.remove(zipFilePath)
        zipFile = zipfile.ZipFile(zipFilePath, mode="w")
        for f in [
                "/var/log/admin.log", "/var/log/monitoring.log",
                "/var/log/federation.log", "/var/log/streaming.log",
                "/var/log/occupancy.log", "/var/log/flask/federation.log",
                "/var/log/servicecontrol.log",
                "/var/log/flask/spectrumbrowser.log",
                "/var/log/flask/spectrumdb.log", "/var/log/nginx/access.log",
                "/var/log/nginx/error.log"
        ]:
            if os.path.exists(f):
                zipFile.write(f, compress_type=zipfile.ZIP_DEFLATED)
        zipFile.close()
        url = Config.getGeneratedDataPath() + "/" + zipFileName
        return jsonify({"status": "OK", "url": url})
    except:
        print "Unexpected error:", sys.exc_info()[0]
        print sys.exc_info()
        traceback.print_exc()
        util.logStackTrace(sys.exc_info())
        raise
def setDefaults():
    global mc
    debugFlagDefaults = {
        "MSOD_DISABLE_AUTH": disableAuthentication,
        "MSOD_RELAXED_PASSWORDS": debugRelaxedPasswords,
        "MSOD_GENERATE_TEST_CASE": generateTestCase,
        "MSOD_DISABLE_SESSION_ID_CHECK": disableSessionIdCheck,
        "MSOD_DEBUG_LOGGING": debug
    }
    dirname = util.getPath(STATIC_GENERATED_FILE_LOCATION + "unit-tests")
    if not os.path.exists(dirname):
        os.makedirs(dirname)
    mc.set("MSOD_DEBUG_FLAGS", debugFlagDefaults)
示例#10
0
def scanGeneratedDirs():
    """
        Scan generated directories and remove any if they are over 2 days old.
        """
    dname = util.getPath(STATIC_GENERATED_FILE_LOCATION)
    subdirs = os.listdir(dname)
    for dirname in subdirs:
        fname = os.path.join(dname, dirname)
        if os.path.isdir(fname) and dirname.startswith("user"):
            mtime = os.path.getmtime(fname)
            current_time = time.time()
            if current_time - mtime > 2 * SECONDS_PER_DAY:
                shutil.rmtree(fname)
示例#11
0
文件: pdf.py 项目: ouracademy/demos
def addImages(origin, vb, sign, output):
    image_rectangle = fitz.Rect(500, 40, 550, 100)
    file_handle = fitz.open(origin)
    num_pages = file_handle.pageCount
    for page_number in range(0, num_pages):
        page = file_handle[page_number]
        page.insertImage(image_rectangle, filename=vb)
        if page_number == num_pages - 1:
            x, y, x1, y1 = page.searchFor("Lima, 04 de enero de 2021")[0]
            rectangule = fitz.Rect(shape_sign(x + 30, y + 30))
            page.insertImage(rectangule, filename=sign)
    output_file = 'output-' + output
    file_handle.save(getPath(output_file))
    return output_file
示例#12
0
文件: actRec.py 项目: vonalan/HAR
def cluster2(r, KList, actions, stipfile, dbname):
    path4 = util.getPath(dbname + "_ycobjs_r" + str(r) + ".cpkl")
    '''bitter of changing api frequetly'''
    '''*********************************************************'''
    splitlist, ctrain, ctest, ctrainM, ctestM, xtrainM, xtestM, ytrainM, ytestM, xrandMX = split2(
        r, actions, stipfile, dbname)

    if not os.path.exists(path4):
        ycobjs = [ctrainM, ctestM, ytrainM, ytestM]
        dataLoader3.cpklsave(ycobjs, path4)
    else:
        util.exPath([path4])

    for k in KList:
        path3 = util.getPath(dbname + "_bow" + "_r" + str(r) + "_k" + str(k) +
                             ".cpkl")

        if not os.path.exists(path3):
            cluster.kMeans(r, k, ctrain, ctest, xtrainM, xtestM, xrandMX,
                           path3)
        else:
            util.exPath([path3])
    '''bitter of changing api frequetly'''
示例#13
0
文件: actRec.py 项目: vonalan/HAR
def split2(r, actions, stipfile, dbname):
    path2 = util.getPath(dbname + "_origin_r" + str(r) + ".cpkl")

    if os.path.exists(path2):
        print str(datetime.datetime.now()
                  )[:19] + " Files: " + path2 + " are already exist! "
        splitlist, ctrain, ctest, ctrainM, ctestM, xtrainM, xtestM, ytrainM, ytestM, xrandMX = dataLoader3.cpklload(
            path2)
    else:
        print str(datetime.datetime.now()
                  )[:19] + " splitting datasets ..., round: " + str(r)
        flags, inputSet, targetSet = reload(stipfile, actions, dbname)
        '''*********************************************************'''
        splitlist, ctrain, ctest, ctrainM, ctestM, xtrainM, xtestM, ytrainM, ytestM, xrandMX = splitter.splitDataSet(
            actions, flags, inputSet, targetSet, path2)

    return splitlist, ctrain, ctest, ctrainM, ctestM, xtrainM, xtestM, ytrainM, ytestM, xrandMX
def checkForDumpAvailability(uri):
    """
    Check if the dump file (relative to static/generated) is available yet.
    """
    dumpFilePath = util.getPath(STATIC_GENERATED_FILE_LOCATION) + uri

    if not os.path.exists(dumpFilePath):
        return False
    elif os.stat(dumpFilePath).st_size == 0:
        return False
    else:
        size = os.stat(dumpFilePath).st_size
        for i in range(1, 10):
            time.sleep(1)
            newSize = os.stat(dumpFilePath).st_size
            if newSize != size:
                return False
        return True
示例#15
0
def bc_learn(bool_evaluate, robot, policy_func, dataset, optim_batch_size=64, max_iters=5*1e3,
          adam_epsilon=1e-5, optim_stepsize=3e-4,
          ckpt_dir=None, log_dir=None, task_name=None,
          verbose=False):

    val_per_iter = int(max_iters/10)
    pi = policy_func("pi", robot.observation_space, robot.action_space)  # Construct network for new policy
    saver = tf.train.Saver()

    if bool_evaluate:
        saver.restore(tf.get_default_session(), U_.getPath() + '/model/bc.ckpt')
        return pi

    # placeholder
    ob = U.get_placeholder_cached(name="ob")
    ac = pi.pdtype.sample_placeholder([None])
    stochastic = U.get_placeholder_cached(name="stochastic")
    loss = tf.reduce_mean(tf.square(ac-pi.ac))
    var_list = pi.get_trainable_variables()
    adam = MpiAdam(var_list, epsilon=adam_epsilon)
    lossandgrad = U.function([ob, ac, stochastic], [loss]+[U.flatgrad(loss, var_list)])

    U.initialize()
    adam.sync()
    print("Pretraining with Behavior Cloning...")
    for iter_so_far in tqdm(range(int(max_iters))):
        ob_expert, ac_expert = dataset.get_next_batch(optim_batch_size, 'train')
        train_loss, g = lossandgrad(ob_expert, ac_expert, True)
        adam.update(g, optim_stepsize)
        if verbose and iter_so_far % val_per_iter == 0:
            ob_expert, ac_expert = dataset.get_next_batch(-1, 'val')
            val_loss, _ = lossandgrad(ob_expert, ac_expert, True)
            print("Training loss: {}, Validation loss: {}".format(train_loss, val_loss))
            saver.save(tf.get_default_session(), 'model/bc.ckpt')

    return pi
示例#16
0
def argsparser():
    parser = argparse.ArgumentParser("Tensorflow Implementation of End-effector Learning from Demonstration")
    parser.add_argument('--seed', help='RNG seed', type=int, default=0)
    parser.add_argument('--task', type=str, choices=['train', 'evaluate'], default='train')
    parser.add_argument('--expert_path', type=str, default=U_.getDataPath() + '/obs_acs.npz')
    parser.add_argument('--checkpoint_dir', help='the directory to save model', default=U_.getPath() + '/checkpoint')
    parser.add_argument('--log_dir', help='the directory to save log file', default=U_.getPath() + '/log')
    # Network Configuration (Using MLP Policy)
    parser.add_argument('--policy_hidden_size', type=int, default=100)
    parser.add_argument('--adversary_hidden_size', type=int, default=100)
    # Optimization Configuration
    parser.add_argument('--g_step', help='number of steps to train policy in each epoch', type=int, default=3)
    parser.add_argument('--d_step', help='number of steps to train discriminator in each epoch', type=int, default=1)
    # Algorithms Configuration
    parser.add_argument('--max_kl', type=float, default=0.01)
    parser.add_argument('--policy_entcoeff', help='entropy coefficiency of policy', type=float, default=0)
    parser.add_argument('--adversary_entcoeff', help='entropy coefficiency of discriminator', type=float, default=1e-3)
    # Traing Configuration
    parser.add_argument('--save_per_iter', help='save model every xx iterations', type=int, default=100)
    parser.add_argument('--num_timesteps', help='number of timesteps per episode', type=int, default=5e6)
    # for evaluatation
    boolean_flag(parser, 'evaluate', default=False, help='use stochastic/deterministic policy to evaluate')
    boolean_flag(parser, 'stochastic_policy', default=False, help='use stochastic/deterministic policy to evaluate')
    return parser.parse_args()
示例#17
0
def getOccupancies(sensorId, sys2detect, minFreq, maxFreq, startTime, seconds,
                   sessionId):
    freqRange = msgutils.freqRange(sys2detect, minFreq, maxFreq)
    dataMessages = DbCollections.getDataMessages(sensorId)
    dataMessage = dataMessages.find_one({})
    if dataMessages is None:
        return {STATUS: "NOK", STATUS_MESSAGE: "No Data Found"}
    endTime = startTime + seconds
    query = {
        SENSOR_ID: sensorId,
        FREQ_RANGE: freqRange,
        "$and": [{
            TIME: {
                "$gte": startTime
            }
        }, {
            TIME: {
                "$lte": endTime
            }
        }]
    }
    # print query
    cur = dataMessages.find(query)
    if cur is None or cur.count() == 0:
        return {STATUS: "NOK", STATUS_MESSAGE: "No Data Found"}
    occupancyFileName = sessionId + "/" + sensorId + ":" + freqRange + ".occupancy." + str(
        startTime) + "-" + str(seconds) + ".txt"
    if not os.path.exists(
            util.getPath(STATIC_GENERATED_FILE_LOCATION) + sessionId):
        os.mkdir(util.getPath(STATIC_GENERATED_FILE_LOCATION) + sessionId)
    occupancyFileUrl = Config.getGeneratedDataPath() + "/" + occupancyFileName
    occupancyFilePath = util.getPath(
        STATIC_GENERATED_FILE_LOCATION) + occupancyFileName
    occupancyFile = open(occupancyFilePath, "w")
    timeFileName = sessionId + "/" + sensorId + ":" + freqRange + ".occupancy.time." + str(
        startTime) + "-" + str(seconds) + ".txt"
    if not os.path.exists(
            util.getPath(STATIC_GENERATED_FILE_LOCATION) + sessionId):
        os.mkdir(util.getPath(STATIC_GENERATED_FILE_LOCATION) + sessionId)
    timeFileUrl = Config.getGeneratedDataPath() + "/" + timeFileName
    timeFilePath = util.getPath(STATIC_GENERATED_FILE_LOCATION) + timeFileName
    timeFile = open(timeFilePath, "w")
    tm = None
    timeSinceStart = 0
    try:
        for dataMessage in cur:
            del dataMessage["_id"]
            # print dumps(dataMessage,indent = 4)
            nM = DataMessage.getNumberOfMeasurements(dataMessage)
            td = DataMessage.getMeasurementDuration(dataMessage)
            tm = DataMessage.getTimePerMeasurement(dataMessage)
            occupancyStartTime = dataMessage[TIME]
            occupancyEndTime = occupancyStartTime + nM * tm
            occupancyData = msgutils.getOccupancyData(dataMessage)
            secondsPerEntry = float(td) / float(nM)

            if startTime <= occupancyStartTime and endTime >= occupancyEndTime:
                sindex = 0
                findex = nM
            elif startTime > occupancyStartTime and endTime < occupancyEndTime:
                sindex = int(
                    (startTime - occupancyStartTime) / secondsPerEntry)
                findex = int(nM -
                             (occupancyEndTime - endTime) / secondsPerEntry)
            elif startTime >= occupancyStartTime:
                # print "Case 3 ", startTime, occupancyStartTime
                sindex = int(
                    (startTime - occupancyStartTime) / secondsPerEntry)
                findex = nM
            elif endTime <= occupancyEndTime:
                sindex = 0
                findex = int(nM -
                             (occupancyEndTime - endTime) / secondsPerEntry)
            timeSinceStart = timeSinceStart + sindex * tm
            print "sindex/findex", sindex, findex
            for i in range(sindex, findex):
                occupancy = str(int(occupancyData[i]))
                occupancyFile.write(occupancy + "\n")
            for i in range(sindex, findex):
                timeFile.write(str(timeSinceStart) + "\n")
                timeSinceStart = timeSinceStart + tm
        occupancyFile.close()
        timeFile.close()
        return {
            STATUS: "OK",
            OCCUPANCY_FILE_URL: occupancyFileUrl,
            TIME_FILE_URL: timeFileUrl
        }

    except:
        print "Unexpected error:", sys.exc_info()[0]
        print sys.exc_info()
        traceback.print_exc()
        util.logStackTrace(sys.exc_info())
    finally:
        timeFile.close()
        occupancyFile.close()
示例#18
0
def loadGwtSymbolMap():
    symbolMapDir = util.getPath(
        "static/WEB-INF/deploy/spectrumbrowser/symbolMaps/")
    load_symbol_map(symbolMapDir)
    symbolMapDir = util.getPath("static/WEB-INF/deploy/admin/symbolMaps/")
    load_symbol_map(symbolMapDir)
def generateSingleDaySpectrogramAndOccupancyForSweptFrequency(
        sensorId, lat, lon, alt, sessionId, startTime, sys2detect, fstart,
        fstop, subBandMinFreq, subBandMaxFreq, cutoff):
    """
    Generate single day spectrogram and occupancy for SweptFrequency

    Parameters:

    - msg: the data message
    - sessionId: login session id.
    - startTime: absolute start time.
    - sys2detect: the system to detect.
    - fstart: start frequency.
    - fstop: stop frequency
    - subBandMinFreq: min freq of subband.
    - subBandMaxFreq: max freq of subband.
    - cutoff: occupancy threshold.

    """
    try:
        chWidth = Config.getScreenConfig()[CHART_WIDTH]
        chHeight = Config.getScreenConfig()[CHART_HEIGHT]

        locationMessage = DbCollections.getLocationMessages().find_one({
            SENSOR_ID:
            sensorId,
            LAT:
            lat,
            LON:
            lon,
            ALT:
            alt
        })
        if locationMessage is None:
            return {STATUS: NOK, ERROR_MESSAGE: "Location message not found"}

        tz = locationMessage[TIME_ZONE_KEY]
        startTimeUtc = timezone.getDayBoundaryTimeStampFromUtcTimeStamp(
            startTime, tz)
        startMsg = DbCollections.\
            getDataMessages(sensorId).find_one(
                {TIME:{"$gte":startTimeUtc},
                 LOCATION_MESSAGE_ID:str(locationMessage["_id"]),
                 FREQ_RANGE:msgutils.freqRange(sys2detect, fstart, fstop)})
        if startMsg is None:
            util.debugPrint("Not found")
            return {STATUS: NOK, ERROR_MESSAGE: "Data Not Found"}
        if DataMessage.getTime(startMsg) - startTimeUtc > SECONDS_PER_DAY:
            util.debugPrint("Not found - outside day boundary: " +
                            str(startMsg['t'] - startTimeUtc))
            return {
                STATUS: NOK,
                ERROR_MESSAGE: "Not found - outside day boundary."
            }

        msg = startMsg
        sensorId = msg[SENSOR_ID]
        powerValues = msgutils.trimSpectrumToSubBand(msg, subBandMinFreq,
                                                     subBandMaxFreq)
        vectorLength = len(powerValues)
        if cutoff is None:
            cutoff = DataMessage.getThreshold(msg)
        else:
            cutoff = int(cutoff)
        spectrogramFile = sessionId + "/" + sensorId + "." + str(
            startTimeUtc) + "." + str(cutoff) + "." + str(
                subBandMinFreq) + "." + str(subBandMaxFreq)
        spectrogramFilePath = util.getPath(
            STATIC_GENERATED_FILE_LOCATION) + spectrogramFile
        powerVal = np.array(
            [cutoff for i in range(0, MINUTES_PER_DAY * vectorLength)])
        spectrogramData = powerVal.reshape(vectorLength, MINUTES_PER_DAY)
        # artificial power value when sensor is off.
        sensorOffPower = np.transpose(
            np.array([2000 for i in range(0, vectorLength)]))

        prevMessage = msgutils.getPrevAcquisition(msg)

        if prevMessage is None:
            util.debugPrint("prevMessage not found")
            prevMessage = msg
            prevAcquisition = sensorOffPower
        else:
            prevAcquisitionTime = timezone.getDayBoundaryTimeStampFromUtcTimeStamp(
                prevMessage['t'], tz)
            util.debugPrint("prevMessage[t] " + str(prevMessage['t']) +
                            " msg[t] " + str(msg['t']) + " prevDayBoundary " +
                            str(prevAcquisitionTime))
            prevAcquisition = np.transpose(
                np.array(
                    msgutils.trimSpectrumToSubBand(prevMessage, subBandMinFreq,
                                                   subBandMaxFreq)))
        occupancy = []
        timeArray = []
        maxpower = -1000
        minpower = 1000
        # Note that we are starting with the first message.
        count = 1
        while True:
            acquisition = msgutils.trimSpectrumToSubBand(
                msg, subBandMinFreq, subBandMaxFreq)
            occupancyCount = float(
                len(filter(lambda x: x >= cutoff, acquisition)))
            occupancyVal = occupancyCount / float(len(acquisition))
            occupancy.append(occupancyVal)
            minpower = np.minimum(minpower, msgutils.getMinPower(msg))
            maxpower = np.maximum(maxpower, msgutils.getMaxPower(msg))
            if prevMessage['t1'] != msg['t1']:
                # GAP detected so fill it with sensorOff
                sindex = get_index(DataMessage.getTime(prevMessage),
                                   startTimeUtc)
                if get_index(DataMessage.getTime(prevMessage),
                             startTimeUtc) < 0:
                    sindex = 0
                for i in range(
                        sindex,
                        get_index(DataMessage.getTime(msg), startTimeUtc)):
                    spectrogramData[:, i] = sensorOffPower
            elif DataMessage.getTime(prevMessage) > startTimeUtc:
                # Prev message is the same tstart and prevMessage is in the range of interest.
                # Sensor was not turned off.
                # fill forward using the prev acquisition.
                for i in range(
                        get_index(DataMessage.getTime(prevMessage),
                                  startTimeUtc),
                        get_index(msg["t"], startTimeUtc)):
                    spectrogramData[:, i] = prevAcquisition
            else:
                # forward fill from prev acquisition to the start time
                # with the previous power value
                for i in range(
                        0, get_index(DataMessage.getTime(msg), startTimeUtc)):
                    spectrogramData[:, i] = prevAcquisition
            colIndex = get_index(DataMessage.getTime(msg), startTimeUtc)
            spectrogramData[:, colIndex] = acquisition
            timeArray.append(
                float(DataMessage.getTime(msg) - startTimeUtc) / float(3600))
            prevMessage = msg
            prevAcquisition = acquisition
            msg = msgutils.getNextAcquisition(msg)
            if msg is None:
                lastMessage = prevMessage
                for i in range(
                        get_index(DataMessage.getTime(prevMessage),
                                  startTimeUtc), MINUTES_PER_DAY):
                    spectrogramData[:, i] = sensorOffPower
                break
            elif DataMessage.getTime(msg) - startTimeUtc >= SECONDS_PER_DAY:
                if msg['t1'] == prevMessage['t1']:
                    for i in range(
                            get_index(DataMessage.getTime(prevMessage),
                                      startTimeUtc), MINUTES_PER_DAY):
                        spectrogramData[:, i] = prevAcquisition
                else:
                    for i in range(
                            get_index(DataMessage.getTime(prevMessage),
                                      startTimeUtc), MINUTES_PER_DAY):
                        spectrogramData[:, i] = sensorOffPower

                lastMessage = prevMessage
                break
            count = count + 1

        # generate the spectrogram as an image.
        if not os.path.exists(spectrogramFilePath + ".png"):
            fig = plt.figure(figsize=(chWidth, chHeight))
            frame1 = plt.gca()
            frame1.axes.get_xaxis().set_visible(False)
            frame1.axes.get_yaxis().set_visible(False)
            cmap = plt.cm.spectral
            cmap.set_under(UNDER_CUTOFF_COLOR)
            cmap.set_over(OVER_CUTOFF_COLOR)
            dirname = util.getPath(STATIC_GENERATED_FILE_LOCATION) + sessionId
            if maxpower < cutoff:
                maxpower = cutoff
                minpower = cutoff
            if not os.path.exists(dirname):
                os.makedirs(dirname)
            fig = plt.imshow(spectrogramData,
                             interpolation='none',
                             origin='lower',
                             aspect='auto',
                             vmin=cutoff,
                             vmax=maxpower,
                             cmap=cmap)
            util.debugPrint("Generated fig")
            plt.savefig(spectrogramFilePath + '.png',
                        bbox_inches='tight',
                        pad_inches=0,
                        dpi=100)
            plt.clf()
            plt.close()
        else:
            util.debugPrint("File exists - not generating image")

        util.debugPrint("FileName: " + spectrogramFilePath + ".png")

        util.debugPrint("Reading " + spectrogramFilePath + ".png")
        # get the size of the generated png.
        reader = png.Reader(filename=spectrogramFilePath + ".png")
        (width, height, pixels, metadata) = reader.read()

        util.debugPrint("width = " + str(width) + " height = " + str(height))

        # generate the colorbar as a separate image.
        if not os.path.exists(spectrogramFilePath + ".cbar.png"):
            norm = mpl.colors.Normalize(vmin=cutoff, vmax=maxpower)
            fig = plt.figure(figsize=(chWidth * 0.3, chHeight * 1.2))
            ax1 = fig.add_axes([0.0, 0, 0.1, 1])
            mpl.colorbar.ColorbarBase(ax1,
                                      cmap=cmap,
                                      norm=norm,
                                      orientation='vertical')
            plt.savefig(spectrogramFilePath + '.cbar.png',
                        bbox_inches='tight',
                        pad_inches=0,
                        dpi=50)
            plt.clf()
            plt.close()
        else:
            util.debugPrint(spectrogramFilePath + ".cbar.png" +
                            " exists -- not generating")

        localTime, tzName = timezone.getLocalTime(startTimeUtc, tz)

        # step back for 24 hours.
        prevAcquisitionTime = msgutils.getPrevDayBoundary(startMsg)
        nextAcquisitionTime = msgutils.getNextDayBoundary(lastMessage)
        meanOccupancy = np.mean(occupancy)
        maxOccupancy = np.max(occupancy)
        minOccupancy = np.min(occupancy)
        medianOccupancy = np.median(occupancy)

        result = {
            "spectrogram":
            Config.getGeneratedDataPath() + "/" + spectrogramFile + ".png",
            "cbar": Config.getGeneratedDataPath() + "/" + spectrogramFile +
            ".cbar.png",
            "maxPower": maxpower,
            "maxOccupancy": maxOccupancy,
            "minOccupancy": minOccupancy,
            "meanOccupancy": meanOccupancy,
            "medianOccupancy": medianOccupancy,
            "cutoff": cutoff,
            "aquisitionCount": count,
            "minPower": minpower,
            "tStartTimeUtc": startTimeUtc,
            "timeDelta": HOURS_PER_DAY,
            "prevAcquisition": prevAcquisitionTime,
            "nextAcquisition": nextAcquisitionTime,
            "formattedDate": timezone.formatTimeStampLong(startTimeUtc, tz),
            "image_width": float(width),
            "image_height": float(height)
        }

        result["timeArray"] = timeArray
        result["occupancyArray"] = occupancy
        if "ENBW" in lastMessage["mPar"]:
            enbw = lastMessage["mPar"]["ENBW"]
            result["ENBW"] = enbw

        if "RBW" in lastMessage["mPar"]:
            rbw = lastMessage["mPar"]["RBW"]
            result["RBW"] = rbw
        result[STATUS] = OK
        util.debugPrint(result)
        return result
    except:
        print "Unexpected error:", sys.exc_info()[0]
        print sys.exc_info()
        traceback.print_exc()
        util.logStackTrace(sys.exc_info())
        raise
def generateSingleAcquisitionSpectrogramAndOccupancyForFFTPower(
        sensorId, sessionId, threshold, startTime, minFreq, maxFreq, leftBound,
        rightBound):
    util.debugPrint(
        "generateSingleAcquisitionSpectrogramAndOccupancyForFFTPower " +
        " sensorId = " + sensorId + " leftBound = " + str(leftBound) +
        " rightBound = " + str(rightBound))
    dataMessages = DbCollections.getDataMessages(sensorId)
    chWidth = Config.getScreenConfig()[CHART_WIDTH]
    chHeight = Config.getScreenConfig()[CHART_HEIGHT]

    if dataMessages is None:
        return {STATUS: NOK, ERROR_MESSAGE: "Data message collection found "}
    msg = dataMessages.find_one({SENSOR_ID: sensorId, "t": startTime})
    if msg is None:
        return {
            STATUS: NOK,
            ERROR_MESSAGE: "No data message found at " + str(int(startTime))
        }
    if threshold is None:
        cutoff = DataMessage.getThreshold(msg)
    else:
        cutoff = int(threshold)
    startTime = DataMessage.getTime(msg)
    fs = gridfs.GridFS(DbCollections.getSpectrumDb(), msg[SENSOR_ID] + "_data")
    sensorId = msg[SENSOR_ID]
    messageBytes = fs.get(ObjectId(msg[DATA_KEY])).read()
    util.debugPrint("Read " + str(len(messageBytes)))
    spectrogramFile = sessionId + "/" + sensorId + "." + str(
        startTime) + "." + str(leftBound) + "." + str(rightBound) + "." + str(
            cutoff)
    spectrogramFilePath = util.getPath(
        STATIC_GENERATED_FILE_LOCATION) + spectrogramFile
    if leftBound < 0 or rightBound < 0:
        util.debugPrint("Bounds to exlude must be >= 0")
        return {STATUS: NOK, ERROR_MESSAGE: "Invalid bounds specified"}
    measurementDuration = DataMessage.getMeasurementDuration(msg)
    miliSecondsPerMeasurement = float(measurementDuration * 1000) / float(
        DataMessage.getNumberOfMeasurements(msg))
    leftColumnsToExclude = int(leftBound / miliSecondsPerMeasurement)
    rightColumnsToExclude = int(rightBound / miliSecondsPerMeasurement)
    if leftColumnsToExclude + rightColumnsToExclude >= DataMessage.getNumberOfMeasurements(
            msg):
        util.debugPrint("leftColumnToExclude " + str(leftColumnsToExclude) +
                        " rightColumnsToExclude " + str(rightColumnsToExclude))
        return {STATUS: NOK, ERROR_MESSAGE: "Invalid bounds"}
    util.debugPrint("LeftColumns to exclude " + str(leftColumnsToExclude) +
                    " right columns to exclude " + str(rightColumnsToExclude))

    noiseFloor = DataMessage.getNoiseFloor(msg)
    nM = DataMessage.getNumberOfMeasurements(
        msg) - leftColumnsToExclude - rightColumnsToExclude
    n = DataMessage.getNumberOfFrequencyBins(msg)
    locationMessage = msgutils.getLocationMessage(msg)
    lengthToRead = n * DataMessage.getNumberOfMeasurements(msg)
    # Read the power values
    power = msgutils.getData(msg)
    powerVal = np.array(power[n * leftColumnsToExclude:lengthToRead -
                              n * rightColumnsToExclude])
    minTime = float(
        leftColumnsToExclude * miliSecondsPerMeasurement) / float(1000)
    spectrogramData = powerVal.reshape(nM, n)
    maxpower = msgutils.getMaxPower(msg)
    if maxpower < cutoff:
        maxpower = cutoff
    # generate the spectrogram as an image.
    if (not os.path.exists(spectrogramFilePath + ".png")) or\
       DebugFlags.getDisableSessionIdCheckFlag():
        dirname = util.getPath(STATIC_GENERATED_FILE_LOCATION) + sessionId
        if not os.path.exists(dirname):
            os.makedirs(
                util.getPath(STATIC_GENERATED_FILE_LOCATION) + sessionId)
        fig = plt.figure(figsize=(chWidth, chHeight))  # aspect ratio
        frame1 = plt.gca()
        frame1.axes.get_xaxis().set_visible(False)
        frame1.axes.get_yaxis().set_visible(False)
        cmap = plt.cm.spectral
        cmap.set_under(UNDER_CUTOFF_COLOR)
        fig = plt.imshow(np.transpose(spectrogramData),
                         interpolation='none',
                         origin='lower',
                         aspect="auto",
                         vmin=cutoff,
                         vmax=maxpower,
                         cmap=cmap)
        util.debugPrint("Generated fig " + spectrogramFilePath + ".png")
        plt.savefig(spectrogramFilePath + '.png',
                    bbox_inches='tight',
                    pad_inches=0,
                    dpi=100)
        plt.clf()
        plt.close()
    else:
        util.debugPrint("File exists -- not regenerating")

    # generate the occupancy data for the measurement.
    occupancyCount = [0 for i in range(0, nM)]
    for i in range(0, nM):
        occupancyCount[i] = float(
            len(filter(lambda x: x >= cutoff,
                       spectrogramData[i, :]))) / float(n)
    timeArray = [
        int((i + leftColumnsToExclude) * miliSecondsPerMeasurement)
        for i in range(0, nM)
    ]

    # get the size of the generated png.
    reader = png.Reader(filename=spectrogramFilePath + ".png")
    (width, height, pixels, metadata) = reader.read()

    if (not os.path.exists(spectrogramFilePath + ".cbar.png")) or \
       DebugFlags.getDisableSessionIdCheckFlag():
        # generate the colorbar as a separate image.
        norm = mpl.colors.Normalize(vmin=cutoff, vmax=maxpower)
        fig = plt.figure(figsize=(chWidth * 0.2,
                                  chHeight * 1.22))  # aspect ratio
        ax1 = fig.add_axes([0.0, 0, 0.1, 1])
        mpl.colorbar.ColorbarBase(ax1,
                                  cmap=cmap,
                                  norm=norm,
                                  orientation='vertical')
        plt.savefig(spectrogramFilePath + '.cbar.png',
                    bbox_inches='tight',
                    pad_inches=0,
                    dpi=50)
        plt.clf()
        plt.close()

    nextAcquisition = msgutils.getNextAcquisition(msg)
    prevAcquisition = msgutils.getPrevAcquisition(msg)

    if nextAcquisition is not None:
        nextAcquisitionTime = DataMessage.getTime(nextAcquisition)
    else:
        nextAcquisitionTime = DataMessage.getTime(msg)

    if prevAcquisition is not None:
        prevAcquisitionTime = DataMessage.getTime(prevAcquisition)
    else:
        prevAcquisitionTime = DataMessage.getTime(msg)

    tz = locationMessage[TIME_ZONE_KEY]

    timeDelta = DataMessage.getMeasurementDuration(
        msg) - float(leftBound) / float(1000) - float(rightBound) / float(1000)

    meanOccupancy = np.mean(occupancyCount)
    maxOccupancy = np.max(occupancyCount)
    minOccupancy = np.min(occupancyCount)
    medianOccupancy = np.median(occupancyCount)

    result = {
        "spectrogram":
        Config.getGeneratedDataPath() + "/" + spectrogramFile + ".png",
        "cbar":
        Config.getGeneratedDataPath() + "/" + spectrogramFile + ".cbar.png",
        "maxPower": maxpower,
        "cutoff": cutoff,
        "noiseFloor": noiseFloor,
        "minPower": msgutils.getMinPower(msg),
        "maxFreq": DataMessage.getFmax(msg),
        "minFreq": DataMessage.getFmin(msg),
        "minTime": minTime,
        "timeDelta": timeDelta,
        "measurementsPerAcquisition": DataMessage.getNumberOfMeasurements(msg),
        "binsPerMeasurement": DataMessage.getNumberOfFrequencyBins(msg),
        "measurementCount": nM,
        "maxOccupancy": maxOccupancy,
        "minOccupancy": minOccupancy,
        "meanOccupancy": meanOccupancy,
        "medianOccupancy": medianOccupancy,
        "currentAcquisition": DataMessage.getTime(msg),
        "prevAcquisition": prevAcquisitionTime,
        "nextAcquisition": nextAcquisitionTime,
        "formattedDate": timezone.formatTimeStampLong(DataMessage.getTime(msg),
                                                      tz),
        "image_width": float(width),
        "image_height": float(height)
    }
    # Now put in the occupancy data
    result[STATUS] = OK
    util.debugPrint(
        "generateSingleAcquisitionSpectrogramAndOccupancyForFFTPower:returning (abbreviated): "
        + str(result))
    result["timeArray"] = timeArray
    result["occupancyArray"] = occupancyCount

    return result
示例#21
0
def generatePowerVsTimeForSweptFrequency(sensorId, startTime, freqHz,
                                         sessionId):
    """
    generate a power vs. time plot for swept frequency readings.
    The plot is generated for a period of one day.
    """
    chWidth = Config.getScreenConfig()[CHART_WIDTH]
    chHeight = Config.getScreenConfig()[CHART_HEIGHT]

    dataMessages = DbCollections.getDataMessages(sensorId)
    if dataMessages is None:
        return {
            STATUS: NOK,
            ERROR_MESSAGE: "Data Message Collection not found"
        }
    msg = dataMessages.find_one({
        SENSOR_ID: sensorId,
        "t": {
            "$gt": int(startTime)
        }
    })
    (maxFreq, minFreq) = msgutils.getMaxMinFreq(msg)
    locationMessage = msgutils.getLocationMessage(msg)
    timeZone = locationMessage[TIME_ZONE_KEY]
    if freqHz > maxFreq:
        freqHz = maxFreq
    if freqHz < minFreq:
        freqHz = minFreq
    n = int(msg["mPar"]["n"])
    freqIndex = int(
        float(freqHz - minFreq) / float(maxFreq - minFreq) * float(n))
    powerArray = []
    timeArray = []
    startTime = timezone.getDayBoundaryTimeStampFromUtcTimeStamp(
        msg['t'], timeZone)
    while True:
        data = msgutils.getData(msg)
        powerArray.append(data[freqIndex])
        timeArray.append(float(msg['t'] - startTime) / float(3600))
        nextMsg = msgutils.getNextAcquisition(msg)
        if nextMsg is None:
            break
        elif nextMsg['t'] - startTime > SECONDS_PER_DAY:
            break
        else:
            msg = nextMsg

    plt.figure(figsize=(chWidth, chHeight))
    plt.xlim([0, 23])
    freqMHz = float(freqHz) / 1E6
    title = "Power vs. Time at " + str(freqMHz) + " MHz"
    plt.title(title)
    xlabel = "Time (H) from start of day"
    plt.xlabel(xlabel)
    ylabel = "Signal Power (dBm)"
    plt.ylabel(ylabel)
    plt.xlim([0, 23])
    plt.scatter(timeArray, powerArray)
    spectrumFile = sessionId + "/" + msg[SENSOR_ID] + "." + str(
        startTime) + "." + str(freqMHz) + ".power.png"
    spectrumFilePath = util.getPath(
        STATIC_GENERATED_FILE_LOCATION) + spectrumFile
    plt.savefig(spectrumFilePath, pad_inches=0, dpi=100)
    plt.clf()
    plt.close()
    retval = {
        STATUS: OK,
        "powervstime": Config.getGeneratedDataPath() + "/" + spectrumFile,
        "timeArray": timeArray,
        "powerValues": powerArray,
        "title": title,
        "xlabel": xlabel,
        "ylabel": ylabel
    }
    return retval
示例#22
0
def generatePowerVsTimeForFFTPower(sensorId, startTime, leftBound, rightBound,
                                   freqHz, sessionId):
    """
    Generate a power vs. time plot for FFTPower readings. The plot is generated for one acquistion.
    """
    chWidth = Config.getScreenConfig()[CHART_WIDTH]
    chHeight = Config.getScreenConfig()[CHART_HEIGHT]
    msg = DbCollections.getDataMessages(sensorId).find_one({
        SENSOR_ID: sensorId,
        "t": int(startTime)
    })
    if msg is None:
        errorMessage = "Message not found"
        util.debugPrint(errorMessage)
        return {STATUS: NOK, ERROR_MESSAGE: errorMessage}
    n = int(msg["mPar"]["n"])
    measurementDuration = msg["mPar"]["td"]
    miliSecondsPerMeasurement = float(
        measurementDuration * MILISECONDS_PER_SECOND) / float(msg["nM"])
    leftColumnsToExclude = int(leftBound / miliSecondsPerMeasurement)
    rightColumnsToExclude = int(rightBound / miliSecondsPerMeasurement)
    if leftColumnsToExclude + rightColumnsToExclude >= msg["nM"]:
        util.debugPrint("leftColumnToExclude " + str(leftColumnsToExclude) +
                        " rightColumnsToExclude " + str(rightColumnsToExclude))
        return None
    nM = int(msg["nM"]) - leftColumnsToExclude - rightColumnsToExclude
    power = np.array(msgutils.getData(msg))
    lengthToRead = int(n * msg["nM"])
    powerVal = power[n * leftColumnsToExclude:lengthToRead -
                     n * rightColumnsToExclude]
    spectrogramData = np.transpose(powerVal.reshape(nM, n))
    maxFreq = msg["mPar"]["fStop"]
    minFreq = msg["mPar"]["fStart"]
    freqDeltaPerIndex = float(maxFreq - minFreq) / float(n)
    row = int((freqHz - minFreq) / freqDeltaPerIndex)
    util.debugPrint("row = " + str(row))
    if row < 0:
        util.debugPrint("WARNING: row < 0")
        row = 0
    powerValues = spectrogramData[row, :]
    timeArray = [
        float((leftColumnsToExclude + i) * miliSecondsPerMeasurement) /
        float(MILISECONDS_PER_SECOND) for i in range(0, nM)
    ]
    plt.figure(figsize=(chWidth, chHeight))
    plt.xlim([
        float(leftBound) / float(MILISECONDS_PER_SECOND),
        float(measurementDuration * MILISECONDS_PER_SECOND - rightBound) /
        float(MILISECONDS_PER_SECOND)
    ])
    plt.scatter(timeArray, powerValues)

    freqMHz = float(freqHz) / 1E6
    title = "Power vs. Time at " + str(freqMHz) + " MHz"
    plt.title(title)
    spectrumFile = sessionId + "/" + msg[SENSOR_ID] + "." + str(startTime) + "." + str(leftBound) + "." + str(rightBound) + \
        "." + str(freqMHz) + ".power.png"
    spectrumFilePath = util.getPath(
        STATIC_GENERATED_FILE_LOCATION) + spectrumFile
    xlabel = "Time (s) from start of acquistion"
    ylabel = "Signal Power (dBm)"
    plt.xlabel(xlabel)
    plt.ylabel(ylabel)
    plt.savefig(spectrumFilePath, pad_inches=0, dpi=100)
    plt.clf()
    plt.close()
    retval = {
        "powervstime": Config.getGeneratedDataPath() + "/" + spectrumFile,
        "powerValues": powerValues.tolist(),
        "timeArray": timeArray,
        "title": title,
        "xlabel": xlabel,
        "ylabel": ylabel
    }
    retval[STATUS] = OK
    return retval
示例#23
0
def getUnitTestFile():
    dirname = util.getPath(STATIC_GENERATED_FILE_LOCATION + "unit-tests")
    return dirname + "/unit-test.json"
示例#24
0
文件: logger.py 项目: csdn727/tuangou
import logging
from logging import CRITICAL, DEBUG, ERROR, FATAL, INFO, NOTSET, WARN, WARNING
from util import getPath

log_level = INFO
log_path = getPath()


def singleton(cls):
    instances = {}

    def getinstance():
        if cls not in instances:
            instances[cls] = cls()
        return instances[cls]
    return getinstance


@singleton
class Logger(object):

    def __init__(self):

        logger = logging.getLogger()
        FORMAT = "[%(asctime)s][%(filename)s:%(lineno)d][%(funcName)s][%(levelname)s] %(message)s"
        hdlr = logging.FileHandler(log_path + 'tuangou.log')
        logging.basicConfig(format=FORMAT)
        formatter = logging.Formatter(FORMAT)
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)
        logger.setLevel(log_level)
示例#25
0
def learn(env,
          policy_func,
          reward_giver,
          expert_dataset,
          rank,
          g_step,
          d_step,
          entcoeff,
          save_per_iter,
          timesteps_per_batch,
          ckpt_dir,
          log_dir,
          task_name,
          gamma,
          lam,
          max_kl,
          cg_iters,
          cg_damping=1e-2,
          vf_stepsize=3e-4,
          d_stepsize=3e-4,
          vf_iters=3,
          max_timesteps=0,
          max_episodes=0,
          max_iters=0,
          callback=None):

    nworkers = MPI.COMM_WORLD.Get_size()
    rank = MPI.COMM_WORLD.Get_rank()
    np.set_printoptions(precision=3)
    # Setup losses and stuff
    # ----------------------------------------
    ob_space = env.observation_space
    ac_space = env.action_space
    pi = policy_func("pi", ob_space, ac_space)
    saver = tf.train.Saver(
        var_list=tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='pi'))
    saver.restore(tf.get_default_session(), U_.getPath() + '/model/bc.ckpt')

    oldpi = policy_func("oldpi", ob_space, ac_space)
    atarg = tf.placeholder(
        dtype=tf.float32,
        shape=[None])  # Target advantage function (if applicable)
    ret = tf.placeholder(dtype=tf.float32, shape=[None])  # Empirical return

    ob = U.get_placeholder_cached(name="ob")
    ac = pi.pdtype.sample_placeholder([None])

    kloldnew = oldpi.pd.kl(pi.pd)
    ent = pi.pd.entropy()
    meankl = tf.reduce_mean(kloldnew)
    meanent = tf.reduce_mean(ent)
    entbonus = entcoeff * meanent

    vferr = tf.reduce_mean(tf.square(pi.vpred - ret))

    ratio = tf.exp(pi.pd.logp(ac) -
                   oldpi.pd.logp(ac))  # advantage * pnew / pold
    surrgain = tf.reduce_mean(ratio * atarg)

    optimgain = surrgain + entbonus
    losses = [optimgain, meankl, entbonus, surrgain, meanent]
    loss_names = ["optimgain", "meankl", "entloss", "surrgain", "entropy"]

    dist = meankl

    all_var_list = pi.get_trainable_variables()
    var_list = [
        v for v in all_var_list
        if v.name.startswith("pi/pol") or v.name.startswith("pi/logstd")
    ]
    vf_var_list = [v for v in all_var_list if v.name.startswith("pi/vff")]
    assert len(var_list) == len(vf_var_list) + 1
    d_adam = MpiAdam(reward_giver.get_trainable_variables())
    vfadam = MpiAdam(vf_var_list)

    get_flat = U.GetFlat(var_list)
    set_from_flat = U.SetFromFlat(var_list)
    klgrads = tf.gradients(dist, var_list)
    flat_tangent = tf.placeholder(dtype=tf.float32,
                                  shape=[None],
                                  name="flat_tan")
    shapes = [var.get_shape().as_list() for var in var_list]
    start = 0
    tangents = []
    for shape in shapes:
        sz = U.intprod(shape)
        tangents.append(tf.reshape(flat_tangent[start:start + sz], shape))
        start += sz
    gvp = tf.add_n([
        tf.reduce_sum(g * tangent)
        for (g, tangent) in zipsame(klgrads, tangents)
    ])  # pylint: disable=E1111
    fvp = U.flatgrad(gvp, var_list)

    assign_old_eq_new = U.function(
        [], [],
        updates=[
            tf.assign(oldv, newv)
            for (oldv,
                 newv) in zipsame(oldpi.get_variables(), pi.get_variables())
        ])
    compute_losses = U.function([ob, ac, atarg], losses)
    compute_lossandgrad = U.function([ob, ac, atarg], losses +
                                     [U.flatgrad(optimgain, var_list)])
    compute_fvp = U.function([flat_tangent, ob, ac, atarg], fvp)
    compute_vflossandgrad = U.function([ob, ret],
                                       U.flatgrad(vferr, vf_var_list))

    @contextmanager
    def timed(msg):
        if rank == 0:
            print(colorize(msg, color='magenta'))
            tstart = time.time()
            yield
            print(
                colorize("done in %.3f seconds" % (time.time() - tstart),
                         color='magenta'))
        else:
            yield

    def allmean(x):
        assert isinstance(x, np.ndarray)
        out = np.empty_like(x)
        MPI.COMM_WORLD.Allreduce(x, out, op=MPI.SUM)
        out /= nworkers
        return out

    U.initialize()
    th_init = get_flat()
    MPI.COMM_WORLD.Bcast(th_init, root=0)
    set_from_flat(th_init)
    d_adam.sync()
    vfadam.sync()
    if rank == 0:
        print("Init param sum", th_init.sum())

    # Prepare for rollouts
    # ----------------------------------------
    seg_gen = traj_segment_generator(pi,
                                     env,
                                     reward_giver,
                                     timesteps_per_batch,
                                     stochastic=True)

    episodes_so_far = 0
    timesteps_so_far = 0
    iters_so_far = 0
    tstart = time.time()
    lenbuffer = deque(maxlen=40)  # rolling buffer for episode lengths
    rewbuffer = deque(maxlen=40)  # rolling buffer for episode rewards
    true_rewbuffer = deque(maxlen=40)

    assert sum([max_iters > 0, max_timesteps > 0, max_episodes > 0]) == 1

    g_loss_stats = stats(loss_names)
    d_loss_stats = stats(reward_giver.loss_name)
    ep_stats = stats(["True_rewards", "Rewards", "Episode_length"])
    # if provide pretrained weight

    while True:
        if callback: callback(locals(), globals())
        if max_timesteps and timesteps_so_far >= max_timesteps:
            break
        elif max_episodes and episodes_so_far >= max_episodes:
            break
        elif max_iters and iters_so_far >= max_iters:
            break

        # Save model
        if rank == 0 and iters_so_far % save_per_iter == 0 and ckpt_dir is not None:
            fname = os.path.join(ckpt_dir, task_name)
            print('save model as ', fname)
            try:
                os.makedirs(os.path.dirname(fname))
            except OSError:
                # folder already exists
                pass
            saver = tf.train.Saver()
            saver.save(tf.get_default_session(), fname)

        print("********** Iteration %i ************" % iters_so_far)

        def fisher_vector_product(p):
            return allmean(compute_fvp(p, *fvpargs)) + cg_damping * p

        # ------------------ Update G ------------------
        print("Optimizing Policy...")
        for _ in range(g_step):
            with timed("sampling"):
                seg = seg_gen.next()
            add_vtarg_and_adv(seg, gamma, lam)
            # ob, ac, atarg, ret, td1ret = map(np.concatenate, (obs, acs, atargs, rets, td1rets))
            ob, ac, atarg, tdlamret = seg["ob"], seg["ac"], seg["adv"], seg[
                "tdlamret"]
            vpredbefore = seg[
                "vpred"]  # predicted value function before udpate
            atarg = (atarg - atarg.mean()) / atarg.std(
            )  # standardized advantage function estimate

            if hasattr(pi, "ob_rms"):
                pi.ob_rms.update(ob)  # update running mean/std for policy

            args = seg["ob"], seg["ac"], atarg
            fvpargs = [arr[::5] for arr in args]

            assign_old_eq_new(
            )  # set old parameter values to new parameter values
            with timed("computegrad"):
                tmp_result = compute_lossandgrad(seg["ob"], seg["ac"], atarg)
                lossbefore = tmp_result[:-1]
                g = tmp_result[-1]
            lossbefore = allmean(np.array(lossbefore))
            g = allmean(g)
            if np.allclose(g, 0):
                print("Got zero gradient. not updating")
            else:
                with timed("cg"):
                    stepdir = cg(fisher_vector_product,
                                 g,
                                 cg_iters=cg_iters,
                                 verbose=rank == 0)
                assert np.isfinite(stepdir).all()
                shs = .5 * stepdir.dot(fisher_vector_product(stepdir))
                lm = np.sqrt(shs / max_kl)
                # print("lagrange multiplier:", lm, "gnorm:", np.linalg.norm(g))
                fullstep = stepdir / lm
                expectedimprove = g.dot(fullstep)
                surrbefore = lossbefore[0]
                stepsize = 1.0
                thbefore = get_flat()
                for _ in range(10):
                    thnew = thbefore + fullstep * stepsize
                    set_from_flat(thnew)
                    meanlosses = allmean(
                        np.array(compute_losses(seg["ob"], seg["ac"], atarg)))
                    surr = meanlosses[0]
                    kl = meanlosses[1]
                    improve = surr - surrbefore
                    print("Expected: %.3f Actual: %.3f" %
                          (expectedimprove, improve))
                    if not np.isfinite(meanlosses).all():
                        print("Got non-finite value of losses -- bad!")
                    elif kl > max_kl * 1.5:
                        print("violated KL constraint. shrinking step.")
                    elif improve < 0:
                        print("surrogate didn't improve. shrinking step.")
                    else:
                        print("Stepsize OK!")
                        break
                    stepsize *= .5
                else:
                    print("couldn't compute a good step")
                    set_from_flat(thbefore)
                if nworkers > 1 and iters_so_far % 20 == 0:
                    paramsums = MPI.COMM_WORLD.allgather(
                        (thnew.sum(),
                         vfadam.getflat().sum()))  # list of tuples
                    assert all(
                        np.allclose(ps, paramsums[0]) for ps in paramsums[1:])
            with timed("vf"):
                for _ in range(vf_iters):
                    for (mbob, mbret) in dataset.iterbatches(
                        (seg["ob"], seg["tdlamret"]),
                            include_final_partial_batch=False,
                            batch_size=128):
                        if hasattr(pi, "ob_rms"):
                            pi.ob_rms.update(
                                mbob)  # update running mean/std for policy
                        g = allmean(compute_vflossandgrad(mbob, mbret))
                        vfadam.update(g, vf_stepsize)

        print("ev_tdlam_before", explained_variance(vpredbefore, tdlamret))

        # ------------------ Update D ------------------
        print("Optimizing Discriminator...")
        print(fmt_row(13, reward_giver.loss_name))
        ob_expert, ac_expert = expert_dataset.get_next_batch(len(ob))
        batch_size = len(ob) // d_step
        d_losses = [
        ]  # list of tuples, each of which gives the loss for a minibatch
        for ob_batch, ac_batch in tqdm(
                dataset.iterbatches((ob, ac),
                                    include_final_partial_batch=False,
                                    batch_size=batch_size)):
            ob_expert, ac_expert = expert_dataset.get_next_batch(len(ob_batch))
            # update running mean/std for reward_giver
            if hasattr(reward_giver, "obs_rms"):
                reward_giver.obs_rms.update(
                    np.concatenate((ob_batch, ob_expert), 0))
            tmp_result = reward_giver.lossandgrad(ob_batch, ac_batch,
                                                  ob_expert, ac_expert)
            newlosses = tmp_result[:-1]
            g = tmp_result[-1]
            d_adam.update(allmean(g), d_stepsize)
            d_losses.append(newlosses)
        print(fmt_row(13, np.mean(d_losses, axis=0)))

        timesteps_so_far += len(seg['ob'])
        iters_so_far += 1

        print("EpisodesSoFar", episodes_so_far)
        print("TimestepsSoFar", timesteps_so_far)
        print("TimeElapsed", time.time() - tstart)
示例#26
0
# TODO: function to be refactored into util.py
def driverUpdate():
    browserVersion = browser.capabilities['browserVersion'].rsplit('.', 1)[
        0]  # newer webdrivers use 'browserVersion' as key instead of 'version'
    driverVersion = browser.capabilities['chrome'][
        'chromedriverVersion'].rsplit('.', 1)[0]
    print("Current browser version is: " + browserVersion +
          "\nCurrent driver version is: " + driverVersion)

    if browserVersion != driverVersion:
        # TODO: write function in util.py to download appropriate driver
        os.remove('chromedriver.exe')
        util.getDriver(browserVersion)


util.getPath()
# TODO: add exception handling no pre-exisiting web driver
browser = webdriver.Chrome()
driverUpdate()
browser.get(
    "https://www.beartracks.ualberta.ca/psp/uahebprd/EMPLOYEE/HRMS/c/ZSS_STUDENT_CENTER.ZSS_WATCH_LIST.GBL?FolderPath=PORTAL_ROOT_OBJECT.ZSS_ACADEMICS.ZSS_AC_PLAN.ZSS_WATCH_LIST_GBL_1&amp;IsFolder=false&amp;IgnoreParamTempl=FolderPath%2cIsFolder"
)

username = browser.find_element_by_id('username')
username.send_keys('')  # enter username here

password = browser.find_element_by_id('user_pass')
password.send_keys('')  # enter password here

password.submit()
def generateZipFile(sensorId, startTime, days, sys2detect, minFreq, maxFreq,
                    dumpFileNamePrefix, sessionId):
    util.debugPrint("generateZipFile: " + sensorId + "/" + str(days) + "/" +
                    str(minFreq) + "/" + str(maxFreq) + "/" + sessionId)
    dumpFileName = sessionId + "/" + dumpFileNamePrefix + ".txt"
    zipFileName = sessionId + "/" + dumpFileNamePrefix + ".zip"
    dirname = util.getPath(STATIC_GENERATED_FILE_LOCATION + sessionId)
    if not os.path.exists(dirname):
        os.makedirs(dirname)
    dumpFilePath = util.getPath(STATIC_GENERATED_FILE_LOCATION) + dumpFileName
    zipFilePath = util.getPath(STATIC_GENERATED_FILE_LOCATION) + zipFileName
    if os.path.exists(dumpFilePath):
        os.remove(dumpFilePath)
    if os.path.exists(zipFilePath):
        os.remove(zipFilePath)
    endTime = int(startTime) + int(days) * SECONDS_PER_DAY
    freqRange = msgutils.freqRange(sys2detect, int(minFreq), int(maxFreq))
    query = {SENSOR_ID: sensorId,
             "t": {"$lte": int(endTime)},
             "t": {"$gte": int(startTime)},
             FREQ_RANGE: freqRange}
    firstMessage = DbCollections.getDataMessages(sensorId).find_one(query)

    if firstMessage is None:
        util.debugPrint("No data found")
        return

    locationMessage = msgutils.getLocationMessage(firstMessage)

    if locationMessage is None:
        util.debugPrint("generateZipFileForDownload: No location info found")
        return

    systemMessage = DbCollections.getSystemMessages().find_one({SENSOR_ID: sensorId})
    if systemMessage is None:
        util.debugPrint("generateZipFileForDownload: No system info found")
        return

    dumpFile = open(dumpFilePath, "a")
    zipFile = zipfile.ZipFile(zipFilePath, mode="w")
    try:
        # Write out the system message.
        data = msgutils.getCalData(systemMessage)
        systemMessage[DATA_TYPE] = ASCII
        if CAL in systemMessage and DATA_KEY in systemMessage[CAL]:
            del systemMessage[CAL][DATA_KEY]
        del systemMessage["_id"]
        systemMessageString = json.dumps(systemMessage,
                                         sort_keys=False,
                                         indent=4)
        length = len(systemMessageString)
        dumpFile.write(str(length))
        dumpFile.write("\n")
        dumpFile.write(systemMessageString)
        if data is not None:
            dataString = str(data)
            dumpFile.write(dataString)

        # Write out the location message.
        del locationMessage["_id"]
        locationMessageString = json.dumps(locationMessage,
                                           sort_keys=False,
                                           indent=4)
        locationMessageLength = len(locationMessageString)
        dumpFile.write(str(locationMessageLength))
        dumpFile.write("\n")
        dumpFile.write(locationMessageString)

        # Write out the data messages one at a time
        c = DbCollections.getDataMessages(sensorId).find(query)
        for dataMessage in c:
            data = msgutils.getData(dataMessage)
            # delete fields we don't want to export
            del dataMessage["_id"]
            del dataMessage["locationMessageId"]
            del dataMessage[DATA_KEY]
            del dataMessage["cutoff"]
            dataMessage["Compression"] = "None"
            dataMessageString = json.dumps(dataMessage,
                                           sort_keys=False,
                                           indent=4)
            length = len(dataMessageString)
            dumpFile.write(str(length))
            dumpFile.write("\n")
            dumpFile.write(dataMessageString)
            if dataMessage[DATA_TYPE] == ASCII:
                dumpFile.write(str(data))
            elif dataMessage[DATA_TYPE] == BINARY_INT8:
                for dataByte in data:
                    dumpFile.write(struct.pack('b', dataByte))
            elif dataMessage[DATA_TYPE] == BINARY_INT16:
                for dataWord in data:
                    dumpFile.write(struct.pack('i', dataWord))
            elif dataMessage[DATA_TYPE] == BINARY_FLOAT32:
                for dataWord in data:
                    dumpFile.write(struct.pack('f', dataWord))
        zipFile.write(dumpFilePath,
                      arcname=dumpFileNamePrefix + ".txt",
                      compress_type=zipfile.ZIP_DEFLATED)
        zipFile.close()
    except:
        print "Unexpected error:", sys.exc_info()[0]
        print sys.exc_info()
        traceback.print_exc()
        util.logStackTrace(sys.exc_info())
    finally:
        dumpFile.close()
        os.remove(dumpFilePath)
        zipFile.close()
示例#28
0
文件: run.py 项目: ouracademy/demos
def getPdf():
    data = request.args
    output_file = getPath(data.get('name'))
    return send_file(output_file)