Beispiel #1
0
def execute_custom_sql():
    payload = request.json
    sql_text = payload['query']

    records = execute_query(sql_text, None)
    result = format_data(records)
    return result, 201
Beispiel #2
0
def query_execution(query_id):
    parameter_names = list(request.args)
    paramerter_values = list()
    [paramerter_values.append(request.args[parm]) for parm in parameter_names]

    app.logger.debug('Query parameters for {}'.format(query_id))
    app.logger.debug(parameter_names)
    app.logger.debug(paramerter_values)

    # Holds Query results
    records = list()
    parameters = []

    if len(paramerter_values) == 0:
        parameters = None
    else:
        parameters = paramerter_values

    if query_id in qry.keys():
        query = qry[query_id]['query']
        records = execute_query(query, parameters)
    else:
        records.append('Exception: ' + query_id +
                       ' not found in list of configured queries')

    # Format Query results in JSON form.
    result = format_data(records)
    return result
def main():
    # import data
    if DEBUG:
        data_train = pd.read_csv(
            '../data/training-train.csv'
        )  #pd.read_csv('../data/training-small-train.csv')#
        data_validation = pd.read_csv(
            '../data/training-validate.csv'
        )  #pd.read_csv('../data/training-small-validate.csv')#
    else:
        data_train = pd.read_csv('../data/training.csv')
        data_validation = pd.read_csv('../data/testData.csv')

    data_train['train_flag'] = True
    data_validation['train_flag'] = False
    data = pd.concat((data_train, data_validation))

    # keep missing flags for both training and validation
    ytr_missing = np.array(
        data_train.loc[:, 'COVAR_y1_MISSING':'COVAR_y3_MISSING'])
    yvl_missing = np.array(
        data_validation.loc[:, 'COVAR_y1_MISSING':'COVAR_y3_MISSING'])

    # remove temporary data
    del data_train
    del data_validation

    # basic formatting
    Xtr, ytr, Xvl, yvl = utils.format_data(data,
                                           preprocessing=USE_PREPROCESSING)
    del data

    # preprocess data
    if USE_PREPROCESSING:
        use_pca = False  # apply PCA (True) or standard normalization (False)
        Xtr, Xvl = utils.preprocess(Xtr, Xvl, use_pca)

    # create RNN instance
    n_features = len(Xtr[0])
    n_outputs = len(ytr[0])
    nn_solver = RNN(n_features=n_features,
                    n_outputs=n_outputs,
                    n_neurons=hidden_size,
                    param_update_scheme=param_update_scheme,
                    learning_rate=learning_rate,
                    activation_rule=activation_rule,
                    use_batch_step=USE_BATCH_TRAINING,
                    batch_step_size=batch_step_size,
                    relu_neg_slope=relu_neg_slope,
                    use_dropout_regularization=use_dropout_regularization,
                    dropout_threshold=dropout_threshold,
                    reg_strenght=reg_strenght,
                    use_regularization=use_regularization,
                    sgd_shuffle=sgd_shuffle)

    if not PREDICT_ONLY:
        trainAndTest(nn_solver, Xtr, ytr, ytr_missing, Xvl, yvl, yvl_missing)
    else:
        predictByModel(nn_solver, Xvl,
                       '../models/DeepNN/model_2016-08-03T15_39_15.mat')
Beispiel #4
0
    def datas_dict(self):
        self.datas['YEAR'] = [
            date.strftime("%Y")
            for date in self.resultat_datas.keys().tolist()
        ]
        self.datas['EBITDA'] = utl.format_data(self.ebitda.values.tolist())
        self.datas['Bénéfice Net'] = utl.format_data(
            self.benefice_net.values.tolist())
        self.datas['Revenus Total'] = utl.format_data(
            self.revenue_total.values.tolist())
        self.datas["Actifs Total"] = utl.format_data(
            self.actifs_total.values.tolist())
        self.datas["Chiffre d'affaires"] = utl.format_data(
            self.chiffre_affaire.values.tolist())
        self.datas["Trésorie"] = utl.format_data(
            self.cash_flow.values.tolist())
        self.datas["Capitaux Propre"] = utl.format_data(
            self.total_capitaux_propre.values.tolist())
        self.datas['Score'] = [self.total_score()]

        self.bna_years()
        self.per_years()
        self.debt_ratio()
        self.bvps_ratio()
        self.capitalisation_ratio()
        self.dividendes_ratio()
        self.roe_roa_ratio(roa=False)
        self.roe_roa_ratio(roa=True)
Beispiel #5
0
def report_sql_monitor(sql_id):
    query = 'SELECT DBMS_SQLTUNE.REPORT_SQL_MONITOR(' + "'" + sql_id + "')" + ' FROM DUAL'

    records = execute_query(query, '')

    # Data of this query is a LOB Object. Convert it to a String first.
    result = convert_lob_object_to_string(records[1][0])

    records = [['STATUS'], [result]]
    result = format_data(records)
    return result
Beispiel #6
0
    def send_api(self, data: dict):
        '''发送接口文件的指定接口数据'''
        raw = yaml.safe_dump(data)

        for k, v in self.params.items():
            raw = raw.replace(f"${{{k}}}", repr(v))
        data = yaml.safe_load(raw)

        url = data.get("url")
        method = data.get("method")
        params = data.get("params")
        jsons = data.get("json")

        raw = json.dumps(data, indent=2, ensure_ascii=False)
        logger.info("本次加载的数据为:\n{raw}".format(raw=raw))

        r = requests.request(url=url, method=method, params=params, json=jsons)
        return format_data(r)
Beispiel #7
0
def get_list_of_data_dictionary_tables():
    query = qry['data-dictionary-views']
    records = execute_query(query, [])
    result = format_data(records)
    return result
Beispiel #8
0
def get_table(table):
    records = execute_query('SELECT * FROM ' + table + ' WHERE ROWNUM < 1000',
                            [])
    result = format_data(records)
    return result
Beispiel #9
0
 def capitalisation_ratio(self):
     # Capitalistion annee prece : prix * nbr actions
     capitalisation = [i * self.actions for i in self.price_dates]
     self.datas['Capitalisation'] = utl.format_data(capitalisation)
     self.data_analyse['Capitalisation'] = capitalisation