Beispiel #1
0
def make_exchange_body(name, ex, C, SU, TU, nu):
    rows = []
    U = ["KRW", "BTC", "ETH", "USDT"]
    for u in U:
        if u in SU:
            currencyOpt = u == "KRW" or u == "USDT"
            if (issubclass(type(ex), get_type_u(u))):
                rows.append(["%s (%s)" % (name, u)] + util.make_row(
                    C, get_u_last(ex, u), currencyOpt=currencyOpt))
            if (u != nu and issubclass(type(ex), get_type_u(u))):
                currencyOpt = nu == "KRW" or nu == "USDT"
                if u in TU:
                    NP = make_NP(ex=ex, tu=u, nu=nu)
                    rows.append(
                        ["%s (%s->%s)" % (name, u, nu)] + util.make_row(
                            C, NP, roundOpt=8, currencyOpt=currencyOpt))
    return rows
Beispiel #2
0
def make_NRPR_body(
    EX, RP, C, TU, nu, SA
):  # EX:Exchange, RP : RNPR pairs, C : Currencies, TU : Target Unit, nu : norm unit, SA : slack alarm
    rows = []
    for tu in TU:
        for rp in RP:
            if (rp[0] == rp[1] and tu == nu):
                continue
            NP_0 = make_NP(ex=rp[0], tu=tu, nu=nu)
            NP_1 = make_NP(ex=rp[1], tu=nu, nu=nu)
            if NP_0 is None or NP_1 is None:
                continue
            elif issubclass(type(rp[0]), get_type_u(tu)) == False:
                continue
            diff = util.get_diff_last(NP_0, NP_1)
            rows.append(
                ["%s(%s)/%s(%s) (+)" % (rp[0].symbol, tu, rp[1].symbol, nu)] +
                util.make_row(C, {k: v
                                  for (k, v) in diff.items() if v > 100}, 4))
            rows.append(
                ["%s(%s)/%s(%s) (-)" % (rp[0].symbol, tu, rp[1].symbol, nu)] +
                util.make_row(C, {k: v
                                  for (k, v) in diff.items() if v <= 100}, 4))
    return rows
Beispiel #3
0
    def make_ts(cursor):
        i = 0
        for row in cursor:
            out = make_row(row, location, ds, si)

            yield out
def run_model(coll, data, location, si, ds, count):
    name = 'measured_flow'
    MODEL_PARAMS = model_params3
    # MODEL_PARAMS['modelParams']['sensorParams']['encoders'][name] = input_encoder
    shifter = InferenceShifter()
    # classifier_encoder = {k: v for k, v in input_encoder.items()}
    # classifier_encoder['classifierOnly'] = True
    # classifier_encoder['name'] = '_classifierInput'
    # MODEL_PARAMS['modelParams']['sensorParams']['encoders']['_classifierInput'] = classifier_encoder
    model = ModelFactory.create(MODEL_PARAMS)
    model.enableInference({'predictedField': 'measured_flow'})
    readings_per_week = 288 * 7
    if ds == 'sm':
        # get approx readings per week
        sdate = datetime(2012, 3, 1)
        readings_per_week = coll.database['scats_sm_small'].find({
            "site_no":
            location['site_no'],
            "strategic_input":
            int(si),
            "datetime": {
                "$gte": sdate,
                "$lte": sdate + timedelta(days=7)
            }
        }).count()
    print("Readings per week for {}: {}".format(ds, readings_per_week))
    anomaly_likelihood_helper = anomaly_likelihood.AnomalyLikelihood(
        historicWindowSize=readings_per_week)

    # output = nupic_anomaly_output.NuPICPlotOutput(location['site_no'])
    prog = tqdm(total=count, desc="HTM")
    for row in data:
        to_process = make_row(row, location, ds, si)

        result = model.run(to_process)
        result = shifter.shift(result)
        raw_anomaly_score = result.inferences['anomalyScore']
        likelihood = anomaly_likelihood_helper.anomalyProbability(
            to_process['measured_flow'], raw_anomaly_score,
            to_process['datetime'])
        pred = result.inferences["multiStepBestPredictions"][1]
        # output.write(to_process['datetime'], to_process['measured_flow'], pred, raw_anomaly_score)
        # print("observed:", last, "predicted:", pred)
        # last = to_process['measured_flow']
        # print("raw anomaly:", raw_anomaly_score, "likelihood:", likelihood)
        if likelihood >= threshold:
            try:
                doc = {
                    'site_no': location['site_no'],
                    'strategic_input': si,
                    'algorithm': 'HTM',
                    'datetime': row['datetime'],
                    'ds': ds,
                    'other': {
                        'likelihood': float(likelihood),
                        'score': float(raw_anomaly_score)
                    }
                }
                # print(doc)
                coll.insert_one(doc)
            except Exception as e:
                print(e)
        prog.update()