Exemplo n.º 1
0
def get_report():
    if not session.get('logged_in', False):
        return redirect(url_for('user_pages.login'))
    #create temporary file and send it
    generate_report(session)
    try:
        return send_from_directory(os.path.join(os.getcwd(), 'tmp'),
                                   'report.csv',
                                   as_attachment=True)
    except Exception as e:
        print(e)
        return redirect(url_for('index'))
Exemplo n.º 2
0
def report():
    results = utils.load_data()
    trained_data = train(results)
    utils.generate_report(trained_data,
                          original_data=results,
                          title='Stochastic Gradient Descent of FFT',
                          experiment_id="5",
                          experiment_type="regression",
                          description=__doc__,
                          train=train,
                          processing_method="FFT",
                          learning_alg="Stochastic Gradient Descent",
                          num_iterations=10)
Exemplo n.º 3
0
def report():
    results = utils.load_data()
    trained_data = train(results)
    utils.generate_report(trained_data,
                          original_data=results,
                          title='Ridge Regression',
                          experiment_id="10",
                          experiment_type="regression",
                          description=__doc__,
                          train=train,
                          processing_method="None",
                          learning_alg="Ridge Regression",
                          num_iterations=10)
Exemplo n.º 4
0
def report():
    results = utils.load_data()
    trained_data = train(results)
    utils.generate_report(
        trained_data,
        original_data=results,
        title='Ordinary Least Squares Linear Regression of FFT',
        experiment_id="2",
        description=__doc__,
        train=train,
        processing_method="FFT",
        learning_alg="Ordinary Least Squares",
        num_iterations=10)
def test_generate_report(_MockBinarizer):
    _MockBinarizer.transform.side_effect = [[[0, 0, 0, 1, 0], [1, 1, 0, 0, 0]],
                                            [[0, 0, 0, 1, 0], [0, 0, 0, 0, 1]]]
    input_data = HDFS_RESULT_EXAMPLE.copy()
    input_data['MDEC_DK'] = input_data['MDEC_DK'].apply(
        lambda x: ast.literal_eval(x))
    output = generate_report(input_data, ORACLE_RESULT_EXAMPLE, _MockBinarizer)
    assert output.strip().startswith('precision')
Exemplo n.º 6
0
def get_report():
    file_name = generate_report()
    try:
        return send_from_directory(os.path.join(os.getcwd(), 'tmp'),
                                   '{}.txt'.format(file_name))
    except Exception as e:
        print(e)
        return redirect(url_for('config'))
Exemplo n.º 7
0
def report():
    tested_data = json.loads(request.data)
    url = generate_report(tested_data)

    ip = str(tested_data['camInfo']['ip'])
    port = str(tested_data['camInfo']['port'])

    device = Device.query.filter(Device.ip == ip, Device.port == port).first()
    if device is None:
        device = Device(ip=ip,
                        port=port,
                        type='device',
                        name='%s:%s' % (ip, port))
        db.session.add(device)

    dbreport = TestResults(device=device,
                           user=g.user,
                           url=url,
                           rawText=json.dumps(tested_data))
    db.session.add(dbreport)
    db.session.commit()

    return jsonify(response=url)
Exemplo n.º 8
0
def report():
    tested_data = json.loads(request.data)
    url = generate_report(tested_data)
    return jsonify(response=url)
Exemplo n.º 9
0
#            input_data = input_data.reshape(*input_data.shape)
            input_data_filtered = sc.filter_dataset(pd.DataFrame(input_data, columns=sku.columns)).values
            input_data_cluster, _, _ = sc.predict_class(input_data_filtered.reshape(int(default_section['n_steps']),-1), plot_cluster=False)
            forecaster = forecasters[input_data_cluster]
            pred = forecaster.predict(input_data.reshape(-1, *input_data.shape))[:, -12:, :].reshape(1, -1)
            rescaled_pred = sp.inverse_transform_seq(pred.ravel(), key)
            rescaled_pred_padded = np.r_[[np.nan for _ in range(window_offset)], rescaled_pred]
            partial_results.append(rescaled_pred_padded.ravel())
            if window_offset == 0:
                figure = plt.figure(figsize=(8, 6))
                plt.plot(list(range(312,324)), rescaled_true.ravel(), 'o-', label='oryginał')
                plt.plot(list(range(312,324)), rescaled_pred.ravel(),'o-', label='prognoza')
                plt.grid()
                plt.xlabel('tydzien')
                plt.ylabel(default_section['forecast_column'], )
                plt.title(f'Prognoza dla `{key}`', loc='right')
                plt.legend()
                imgdata = BytesIO()
                figure.savefig(imgdata, format='png')
                sku_image_data_dict[key] = imgdata
        partial_results.append(rescaled_true.ravel())
        sku_partial_forecasts_dict[key] = partial_results
    
    
#%% ReportGenerator evaluation
from utils import generate_report
generate_report(sku_partial_forecasts_dict, sku_image_data_dict)

            

Exemplo n.º 10
0
def main():
    # FETCH & PARSE DATA
    SUNSET_TIMES = []
    np.set_printoptions(suppress=True)

    # DATA_DA = pandas.DataArray representation of data
    DATA_DA = pd.read_csv(args.infile,
                          #  skiprows=1,
                          usecols=range(2, 14),
                          converters=dict(((x, utils.timestampToSeconds)
                                           for x in range(0, 14)))
                          )

    # Transform DA from rows/cols to cols/rows
    DATA_DA = DATA_DA.T

    # flatten 2d array to all be in the same array
    DATA_FLAT = np.concatenate(tuple(
        (x[~np.isnan(x)] for x in DATA_DA.to_numpy())
    ))
    XVALS = np.arange(1, 366)

    data = {}

    print("Beginning calculations... hold on! (%d generations, %d predictions)" %
          (args.tries, args.tries*100))

    POLYOPT_DATA = utils.generate_report("Opt poly", DATA_FLAT, XVALS, utils.wrapper(
        gen_opt_poly_model,
        XVALS,
        DATA_FLAT,
        maxDegree=50,
    ), times=args.tries)

    POLYFIX_DEGREE = 15
    POLYFIX_DATA = utils.generate_report("Fixed poly (δ%d)" % POLYFIX_DEGREE, DATA_FLAT, XVALS, utils.wrapper(
        gen_poly_model,
        XVALS,
        DATA_FLAT,
        degree=POLYFIX_DEGREE
    ), times=args.tries)

    TRIG_DATA_1 = utils.generate_report("Trig1", DATA_FLAT, XVALS, utils.wrapper(
        gen_trig_model,
        XVALS,
        DATA_FLAT
    ), times=args.tries)

    TRIG_DATA_2 = utils.generate_report("Trig2", DATA_FLAT, XVALS, utils.wrapper(
        gen_trig_model,
        XVALS[:180],
        DATA_FLAT[:180]
    ), times=args.tries)
    TRIG_DATA_2["model"]["predictions"] = TRIG_DATA_2["model"]["model"](XVALS)

    COMBINED_DATA = utils.generate_report("Combined", DATA_FLAT, XVALS, utils.wrapper(
        gen_combined_model,
        XVALS,
        DATA_FLAT,
        maxDegree=50,
    ), times=args.tries)

    data["Opt poly (δ%d)" % POLYOPT_DATA["model"]["degree"]] = POLYOPT_DATA
    data["Fix poly (δ%d)" % POLYFIX_DATA["model"]["degree"]] = POLYFIX_DATA
    data["Cos1"] = TRIG_DATA_1
    data["Cos2"] = TRIG_DATA_2
    data["Combined (δ%d)" % COMBINED_DATA["model"]["poly"]
         ["degree"]] = COMBINED_DATA

    TABLE_ROW = "%s{:>22}%s | " + " | ".join(("%s{:^14}%s",)*len(data.keys()))
    TRANSLATION_DICT = {
        "differencesquares": ("Sum of difference^2", int),
        "incorrectvalues": ("# of incorrect values", len),
        "generation_time": ("Generation time (ms)", lambda x: "{:.4f}".format(x*1000)),
        "prediction_time": ("Prediction time (ms)", lambda x: "{:.4f}".format(x*1000))
    }

    print("Calculations complete; final data:\n\n")

    print(TABLE_ROW.format("", *(k for k in data.keys())) %
          ((utils.ACCENT_BR, utils.RESET)*(len(data.keys())+1)))
    for key, translation in TRANSLATION_DICT.items():
        values = [value[key] for value in data.values()]

        # filter passed
        formatted = TABLE_ROW.format(
            translation[0], *map(translation[1], values)
        ) % (utils.ACCENT_BR, utils.RESET, *("",)*(2*len(data.keys())))

        print(formatted)

    # graph results

    COLOURS = ["firebrick", "royalblue",
               "darkorchid", "darkslategrey", "forestgreen"]

    for i, (key, value) in enumerate(data.items()):
        plt.plot(XVALS, value["model"]["predictions"],
                 color=COLOURS[i], label=key)

    plt.plot(XVALS, DATA_FLAT, "o", color="black",
             label="Actual data", ms=0.7)

    # inspiration from https://stackoverflow.com/questions/26646362/numpy-array-is-not-json-serializable

    class SafeEncoder(json.JSONEncoder):
        """JSON encoder for our data so that everything goes swellingly.
        Don't worry about this.
        """

        def default(self, obj):
            if isinstance(obj, np.ndarray):
                return "%^" + str(obj.tolist()) + "%^"
            elif callable(obj):  # if is function
                return repr(obj)
            return json.JSONEncoder.default(self, obj)

    if args.outfile:
        with open(args.outfile, "w") as f:
            # recursively delete certain keys i.e. predictions

            def recursivelyRemove(obj, blacklist=[]):
                if isinstance(obj, dict):
                    for key in list(obj.keys()):
                        if key in blacklist:
                            del obj[key]
                        else:
                            obj[key] = recursivelyRemove(
                                obj[key], blacklist=blacklist)
                return obj

            outputData = recursivelyRemove(data, blacklist=[
                "predictions",
                "trig",
                "poly"
            ])

            outputJSON = json.dumps(outputData, cls=SafeEncoder,
                                    indent=4, sort_keys=True, ensure_ascii=False)

            outputJSON = outputJSON.replace("\"%^", "").replace("%^\"", "")
            f.write(outputJSON)

        print("\n"*2 + Fore.YELLOW + "  Equations and data written to `%s`." %
              args.outfile)

    if args.graph:
        plt.legend()
        plt.show()