Ejemplo n.º 1
0
def train():
    '''
    Train the model, need train.h5 -> run prepare_data.py 
    before training module execution
    '''
    pd.main()
    srcnn_model = model()
    data, label = pd.read_training_data("./model/train.h5")
    # srcnn_model.load_weights("m_model_adam.h5")
    srcnn_model.fit(data, label, batch_size=128, epochs=30)
    srcnn_model.save_weights("./model/srcnn_model.h5")
Ejemplo n.º 2
0
def main(input_filepath, output_filepath):
    """ Runs data processing scripts to turn raw data from (../raw) into
        cleaned data ready to be analyzed (saved in ../processed).
    """

    machine_name = "PerschmannHermleC32USpindle"
    raw_dir_path = utils.get_raw_dir_path(project_dir)
    interim_dir_path = utils.get_interim_dir_path(project_dir)
    processed_dir_path = utils.get_processed_dir_path(project_dir)

    # load relevant data collectors after selection
    collector_load, collector_speed, metadata = prepare_data.main(
        project_dir, machine_name
    )
    X_load = utils.to_same_length_time_series_dataset(collector_load)
    X_speed = utils.to_same_length_time_series_dataset(collector_speed)

    # get labels
    labels = prepare_labels.create_labels_from_metadata(metadata)
    """TO DO: transform labels into categories, then one hot encode"""
    
    # compute autocorrelation
    X_load_autocorr = process_dataset.to_autocorrelation_dataset(X_load)
    X_speed_autocorr = process_dataset.to_autocorrelation_dataset(X_speed)
    
    # 

    print("LENGTH LABEL VECTOR {}".format(len(labels)))
    print("project_dir: {}".format(project_dir))
    print("FINISHED")

    logger = logging.getLogger(__name__)
    logger.info("making final data set from raw data")
Ejemplo n.º 3
0
def main():
    train_X,train_y,test_X,test_y=prepare_data.main()
    train,train_X,train_y,test_X,test_y=prepare_logistic.main(train_X,train_y,test_X,test_y)
    aucs=x_validation.main(train)
def main():
    prepare_data.main()
    nn_classifier.main()
Ejemplo n.º 5
0
def write_records(records, table_name):
    insert_header = f'''
        insert into {table_name}
        values
        '''
    for batch in chunker(records, 1000):
        insert_values = ','.join(batch)
        insert_statement = insert_header + insert_values
        mssql_cursor.execute(insert_statement)
        mssql_conn.commit()

def main():
    print('DATA LOADING')
    
    empty_import_tables()
    read_import_interviews()
    read_import_funnel()
    read_import_fc()
    read_import_open()

    print('Data loading complete', end='\n\n')

if __name__ == '__main__':
    import initialize
    import prepare_data
    initialize.main()
    prepare_data.main()

    from timeit import timeit
    print(timeit(main,number=1))
Ejemplo n.º 6
0
def main():
    prepare_data.main()
    nn_classifier.main()