Ejemplo n.º 1
0
def report1(request):
    keys = []
    values = []
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=1)
    image_base64 = createBarChart(
        keys, values, 'App Count',
        'Google Play App Store Count By Category > 400')
    return render(request, 'analyzer/main.html', {
        'name': "Jon",
        'date': datetime.now(),
        'image_base64': image_base64,
    })
Ejemplo n.º 2
0
def case2(request):
    keys = []
    values = []
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=5)
    image_base64 = createPieChart(keys, values, 'India trade import 2010-2018')
    keys.clear()
    values.clear()
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=5)
    config = {'rotation': 90}
    image_base64_1 = createBarChart(keys,
                                    values,
                                    'Total(millions $USD)',
                                    'India trade import 2010-2018',
                                    configs=config)
    keys.clear()
    values.clear()
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=6)
    image_base64_2 = createPieChart(keys, values,
                                    'India trade export 2010-2018')
    keys.clear()
    values.clear()
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=6)
    config = {'rotation': 90}
    image_base64_3 = createBarChart(keys,
                                    values,
                                    'Total(millions $USD)',
                                    'India trade export 2010-2018',
                                    configs=config)
    return render(
        request, 'analyzer/case2.html', {
            'report5a': image_base64,
            'report5b': image_base64_1,
            'report6a': image_base64_2,
            'report6b': image_base64_3,
        })
Ejemplo n.º 3
0
def case3(request):
    keys = []
    values = []
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=4)
    image_base64 = createPieChart(keys, values, 'Oakland Crime Rate 2011-2016')
    keys.clear()
    values.clear()
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=4)
    config = {'rotation': 90}
    image_base64_1 = createBarChart(keys,
                                    values,
                                    'Count',
                                    'Oakland Crime Rate 2011-2016',
                                    configs=config)
    return render(request, 'analyzer/case3.html', {
        'report4a': image_base64,
        'report4b': image_base64_1,
    })
Ejemplo n.º 4
0
def submit(request):
    data = {}
    if request.method == 'POST':
        keys = []
        values = []
        DataProcessor.getInstance().loadAndProcess(keys, values, report_type=7)
        image_base64 = createBarChart(keys, values, 'Company',
                                      'Average Empoyee Rating')
        data = {
            "title": request.POST.get("title", "defaultTitle"),
            "description": request.POST.get("description",
                                            "defaultDescription"),
            "news": request.POST.get("news", "defaultNews"),
            "dataSet": request.POST.get("dataSet", "defaultDataset"),
            "bar": request.POST.get("bar", "defaultBar"),
            "pie": request.POST.get("pie", "defaultPie"),
            "report1": image_base64
        }
    return render(request, 'analyzer/new.html', data)
Ejemplo n.º 5
0
def case1(request):
    keys = []
    values = []
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=1)
    image_base64 = createBarChart(
        keys, values, 'App Count',
        'Google Play App Store Count By Category > 400 ')
    keys.clear()
    values.clear()
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=2)
    config = {'rotation': 90}
    image_base64_1 = createBarChart(
        keys,
        values,
        'App Count',
        'Google Play App Store Count By Category < 400',
        configs=config)
    return render(request, 'analyzer/case1.html', {
        'report1': image_base64,
        'report2': image_base64_1
    })
Ejemplo n.º 6
0
def report6(request):
    keys = []
    values = []
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=6)
    image_base64 = createPieChart(keys, values, 'India trade export 2010-2018')
    keys.clear()
    values.clear()
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=6)
    config = {'rotation': 90}
    image_base64_1 = createBarChart(keys,
                                    values,
                                    'Total(millions $USD)',
                                    'India trade export 2010-2018',
                                    configs=config)
    return render(
        request, 'analyzer/main1.html', {
            'name': "Jon",
            'date': datetime.now(),
            'image_base64': image_base64,
            'image_base64_1': image_base64_1,
        })
Ejemplo n.º 7
0
def report4(request):
    keys = []
    values = []
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=4)
    image_base64 = createPieChart(keys, values, 'Oakland Crime Rate 2011-2016')
    keys.clear()
    values.clear()
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=4)
    config = {'rotation': 90}
    image_base64_1 = createBarChart(keys,
                                    values,
                                    'Count',
                                    'Oakland Crime Rate 2011-2016',
                                    configs=config)
    return render(
        request, 'analyzer/main1.html', {
            'name': "Jon",
            'date': datetime.now(),
            'image_base64': image_base64,
            'image_base64_1': image_base64_1,
        })
Ejemplo n.º 8
0
def report3(request):
    keys = []
    values = []
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=1)
    image_base64 = createBarChart(
        keys, values, 'App Count',
        'Google Play App Store Count By Category > 400 ')
    keys.clear()
    values.clear()
    DataProcessor.getInstance().loadAndProcess(keys, values, report_type=2)
    config = {'rotation': 90}
    image_base64_1 = createBarChart(
        keys,
        values,
        'App Count',
        'Google Play App Store Count By Category < 400',
        configs=config)
    return render(
        request, 'analyzer/main1.html', {
            'name': "Jon",
            'date': datetime.now(),
            'image_base64': image_base64,
            'image_base64_1': image_base64_1,
        })
Ejemplo n.º 9
0
def main():
    #

    #FixInterruptHandling().fix()

    #

    if (Options.PrepareFeatures):
        dataPrepared = DataPrepare.run(Options.OutputFeaturesPath,
                                       Options.InputFeaturesPath,
                                       Options.TrainDataStartDate,
                                       Options.FuturePredictionStartDate,
                                       Options.FuturePredictionEndDate())

    #

    output_data_dict_train, output_data_dict_future = DataReader.get_output_features(
    )

    #

    input_data_dict_train, input_data_dict_future = DataReader.get_input_features(
    )

    #
    Options.InputFeatureSize = len(input_data_dict_train.values())

    #

    plotter = Plotter()
    #plotter.plot_date_price(date_values, price_values, Options.RegressionAssetName)

    wave_no = 0
    input1 = list(input_data_dict_train.values())[wave_no]
    #plotter.plot_xy(date_values, input1, list(input_data_dict.keys())[wave_no], 'Dates', 'Power')

    # Construct PDS

    input_pds = pd.DataFrame.from_dict(input_data_dict_train)
    real_close_pds = pd.DataFrame(
        data=output_data_dict_train[Options.RealClosePricesKey])
    output_pds = pd.DataFrame(
        data=output_data_dict_train[Options.OutputValuesKey])
    date_values = output_data_dict_train[Options.OutputDateValuesKey]

    future_input_pds = pd.DataFrame.from_dict(input_data_dict_future)
    future_output_pds, future_real_close_pds, future_date_values = DataReader.get_future_output_features(
        output_data_dict_future)

    # End Construct PDS

    # Data Pre Processor

    pre_processor = DataPreProcessor()
    #output_pds = pre_processor.to_wavelet(output_pds)
    #output_pds = pre_processor.filter_by_savgol(output_pds, 51, 3)

    processor = DataProcessor(input_pds, output_pds, date_values,
                              Options.TrainDataSize, Options.TestDataSize)

    # LAZY LOAD Neural Network lib ?
    from core.nnmodel import NNModel

    ### ONE BY ONE START
    if (Options.DataWindow == Options.DataWindowType.OneByOne):

        x_train_o, y_train_o, date_train = processor.get_one_by_one_data(
            'train',
            seq_len=1,
            multiply_y_vector=Options.MultiplyDataByCustomFactor,
            normalise=Options.NormaliseData)
        x_val_o, y_val_o, date_val = processor.get_one_by_one_data(
            'val',
            seq_len=1,
            multiply_y_vector=Options.MultiplyDataByCustomFactor,
            normalise=Options.NormaliseData)
        model = NNModel()

        model.build_one_by_one_model()
        callbacks = model.get_callbacks(Options.KerasOneByOneEpochs)
        model.fit_one_by_one(x_train_o, y_train_o, x_val_o, y_val_o,
                             Options.KerasOneByOneBatchSize,
                             Options.KerasOneByOneEpochs, callbacks)
        y_train_o_pred = model.predict(x_train_o, 'x_train_o')
        model.save_pred_to_csv(date_train, y_train_o_pred,
                               Options.PredictionSaveDirs(),
                               Options.GetPredictionSaveFileName('train'))

        plotter.plot_different_scale(y_train_o_pred,
                                     y_train_o,
                                     date_train,
                                     y_label1="Train Prediction",
                                     y_label2="Train")

        y_val_pred = model.predict(x_val_o, 'x_val_o')
        model.save_pred_to_csv(date_val, y_val_pred,
                               Options.PredictionSaveDirs(),
                               Options.GetPredictionSaveFileName('val'))
        plotter.plot_different_scale(y_val_pred,
                                     y_val_o,
                                     date_val,
                                     y_label1="Validation Prediction",
                                     y_label2="Validation")

        processor2 = DataProcessor(input_pds, real_close_pds, date_values,
                                   Options.TrainDataSize,
                                   Options.TestDataSize)  #DELETE THIS
        temp, y_test_real, date_real = processor2.get_one_by_one_data(
            'test',
            seq_len=1,
            multiply_y_vector=Options.MultiplyDataByCustomFactor,
            normalise=Options.NormaliseData)  #DELETE THIS

        x_test_o, y_test_o, date_test = processor.get_one_by_one_data(
            'test',
            seq_len=1,
            multiply_y_vector=Options.MultiplyDataByCustomFactor,
            normalise=Options.NormaliseData)
        y_test_o_pred = model.predict(x_test_o, 'x_test_o')
        model.save_pred_to_csv(date_test, y_test_o_pred,
                               Options.PredictionSaveDirs(),
                               Options.GetPredictionSaveFileName('test'))

        plotter.plot_different_scale(y_test_o_pred,
                                     y_test_real,
                                     y_label1="Test Prediction",
                                     y_label2="Real")
        plotter.plot_different_scale(y_test_o_pred,
                                     y_test_o,
                                     date_real,
                                     y_label1="Test Prediction",
                                     y_label2="Real RPO")

        future_pred = model.predict(future_input_pds, 'future_input_pds')
        plotter.plot_different_scale(
            future_pred,
            future_real_close_pds,
            np.array([]),
            y_label1="Future Prediction",
            y_label2="Future Real")  #np.array(future_date_values) throws ex
        model.save_pred_to_csv(np.array(future_date_values), future_pred,
                               Options.PredictionSaveDirs(),
                               Options.GetPredictionSaveFileName('future'))

    ### ONE BY ONE END

    if (Options.DataWindow == Options.DataWindowType.OneByOneTelosSearch):
        from core.nntelossearch import NNTelosSearch

        telosSearch = NNTelosSearch()
        x_train_o, y_train_o = processor.get_train_data_OBO(
            seq_len=1, normalise=Options.NormaliseData)
        telosSearch.minimize(x_train_o, y_train_o, [], [])

    #TELOS END

    if (Options.DataWindow == Options.DataWindowType.WindowBatch):

        #x_train, y_train = processor.get_train_data(seq_len=Options.WindowSequenceLength, normalise = Options.NormaliseData)
        #y_train = y_train * Options.MultiplyDataByCustomFactor
        #x_test, y_test = processor.get_test_data(seq_len = Options.WindowSequenceLength, normalise = Options.NormaliseData)
        #y_test = y_test * Options.MultiplyDataByCustomFactor

        model = NNModel()
        model.build_windowed_batch_model()
        callbacks = model.get_callbacks(Options.KerasWindowedEpochs)

        x_train, y_train, date_train = processor.get_window_train_data(
            arr_type='train',
            seq_len=Options.WindowSequenceLength,
            step=Options.WindowShiftStep,
            feature_len=Options.InputFeatureSize,
            multiply_y_vector=1)

        x_val, y_val, date_val = processor.get_window_train_data(
            arr_type='val',
            seq_len=Options.WindowSequenceLength,
            step=Options.WindowShiftStep,
            feature_len=Options.InputFeatureSize,
            multiply_y_vector=1)

        x_test, y_test, date_test = processor.get_window_train_data(
            arr_type='test',
            seq_len=Options.WindowSequenceLength,
            step=Options.WindowShiftStep,
            feature_len=Options.InputFeatureSize,
            multiply_y_vector=1)

        if (Options.FlattenWindowVector):
            x_train = np.asarray(x_train, dtype=np.float32).reshape(
                -1, Options.WindowSequenceLength * Options.InputFeatureSize)
            x_val = np.asarray(x_val, dtype=np.float32).reshape(
                -1, Options.WindowSequenceLength * Options.InputFeatureSize)
            x_test = np.asarray(x_test, dtype=np.float32).reshape(
                -1, Options.WindowSequenceLength * Options.InputFeatureSize)

        model.fit_one_by_one(x_train, y_train, x_val, y_val,
                             Options.KerasWindowedBatchSize,
                             Options.KerasWindowedEpochs, callbacks)

        if (Options.FlattenWindowVector):
            plotter.plot_different_scale(model.predict(x_train, 'x_train'),
                                         y_train,
                                         date_train,
                                         y_label1="Train Prediction",
                                         y_label2="Train")
            plotter.plot_different_scale(model.predict(x_val, 'x_val'),
                                         y_val,
                                         date_val,
                                         y_label1="Validation Prediction",
                                         y_label2="Validation")
            plotter.plot_different_scale(model.predict(x_test, 'x_test'),
                                         y_test,
                                         date_test,
                                         y_label1="Test Prediction",
                                         y_label2="Test")
        else:
            train_pred_point_by_point = model.predict_point_by_point(x_train)
            plotter.plot_different_scale(train_pred_point_by_point,
                                         y_train,
                                         date_train,
                                         y_label1="Train Prediction",
                                         y_label2="Train")

            test_pred_point_by_point = model.predict_point_by_point(x_test)
            plotter.plot_different_scale(test_pred_point_by_point,
                                         y_test,
                                         date_test,
                                         y_label1="Test Prediction",
                                         y_label2="Real")

    #WINDOW_END

    print("THE_END")