Example #1
0
    def _load_data(self, config):
        global_init(config['metadata_location'])
        df_train, df_test, index_to_label = load_training_data_to_dataframe()

        # Fixes issue with interaction between Ray actors and TF as described here:
        # https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/tf_mnist_example.py
        from tensorflow.keras.preprocessing.image import ImageDataGenerator
        idg_train = ImageDataGenerator()
        flow_args = dict(
            x_col='filename',
            y_col='class_id_as_string',
            batch_size=config['batchsize'],
            class_mode='categorical',
            shuffle=True,
        )
        self.train_gen = idg_train.flow_from_dataframe(
            df_train,
            # save_to_dir='./',  # For debugging...
            **flow_args)
        idg_test = ImageDataGenerator()
        self.test_gen = idg_test.flow_from_dataframe(df_test, **flow_args)

        self.index_to_label = index_to_label
raise NotImplementedError("Need to refactor.  Uses old schema")

import os

from quick_redraw.data.image_record import ImageRecord
from quick_redraw.data.db_session import global_init, create_session
from quick_redraw.data.training_data import TrainingData
# from quick_redraw.data.training_data_record import TrainingDataRecord

db_file = './training_data_db_inserts.sqlite'

print("DELETING OLD TEMP DB")
os.remove(db_file)

global_init(db_file)

image = ImageRecord(label='cat', file_raw='raw.png', file_normalized='norm.png')

tdrs = [
    # TrainingDataRecord(),
    # TrainingDataRecord(),
    # TrainingDataRecord(),
]

tdrs[0].image = image

index_to_label = ['cat', 'dog']
label_to_index = {'cat': 0, 'dog': 1}

td = TrainingData(index_to_label=index_to_label, label_to_index=label_to_index)
td.train.extend(tdrs)
    td.training_images.extend(training_images)
    td.testing_images.extend(testing_images)

    s = create_session()
    s.add(td)
    s.commit()


def parse_arguments() -> argparse.Namespace:
    parser = argparse.ArgumentParser(
        description=
        "Collect normalized images and split/store them in train and test "
        "groups")
    parser.add_argument('db_location',
                        type=str,
                        action="store",
                        help="Path to the database")
    parser.add_argument('test_size',
                        type=float,
                        action="store",
                        help="Fractional size of the test database")
    return parser.parse_args()


if __name__ == '__main__':
    args = parse_arguments()

    global_init(args.db_location)

    create_training_data_from_image_db(test_size=args.test_size)
Example #4
0
def db_init():
    # global_init builds tables and populates session factory
    global_init('', echo=True)
Example #5
0
def init_db(metadata_location, echo):
    global_init(metadata_location, echo=echo)