def write_data_sequence(self):

        attributes = {"original_idl_file": self.options.input_path}
        output_data_sequence = \
            DataSequence(self.options.output_path, Detections, attributes)

        for detections in self.data_sequence:
            output_data_sequence.write(detections)

        del output_data_sequence  # close the file

        print("Created output file", self.options.output_path)
        return
 def write_data_sequence(self):
     
     attributes = { "original_idl_file": self.options.input_path }
     output_data_sequence = \
         DataSequence(self.options.output_path, Detections, attributes)
     
     for detections in self.data_sequence:
         output_data_sequence.write(detections)
     
     del output_data_sequence # close the file
     
     print("Created output file", self.options.output_path)    
     return
    def read_and_check(self):

        self.assertTrue(os.path.exists(self.test_filename))
        self.assertTrue(os.path.getsize(self.test_filename) > 0)

        data_sequence_in = DataSequence(self.test_filename, TestData)

        read_attributes = data_sequence_in.get_attributes()
        read_data1 = data_sequence_in.read()
        read_data2 = data_sequence_in.read()
        read_data3 = data_sequence_in.read()

        self.assertEqual(read_attributes, self.attributes)
        self.assertEqual(read_data1, self.data1)
        self.assertEqual(read_data2, self.data2)
        self.assertEqual(read_data3, self.data3)
        return
    def read_and_check(self):
        
        self.assertTrue(os.path.exists(self.test_filename))    
        self.assertTrue(os.path.getsize(self.test_filename) > 0)

        data_sequence_in = DataSequence(self.test_filename, TestData)

        read_attributes = data_sequence_in.get_attributes()
        read_data1 = data_sequence_in.read()
        read_data2 = data_sequence_in.read()
        read_data3 = data_sequence_in.read()

        self.assertEqual(read_attributes, self.attributes)
        self.assertEqual(read_data1, self.data1)
        self.assertEqual(read_data2, self.data2)
        self.assertEqual(read_data3, self.data3)
        return
Ejemplo n.º 5
0
def train(config):
    train_seq = DataSequence.from_table(config.TRAIN_FP,
                                        config.DATA_COL,
                                        config.TARGET_COL,
                                        config.NUM_CLASSES,
                                        config.BATCH_SIZE,
                                        augment=True)
    val_seq = DataSequence.from_table(config.VAL_FP,
                                      config.DATA_COL,
                                      config.TARGET_COL,
                                      config.NUM_CLASSES,
                                      config.BATCH_SIZE,
                                      augment=False)

    # reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=Config.reduce_lr_factor, patience=Config.reduce_lr_patience, cooldown=0, mode='min', verbose=1, min_lr=1e-8)
    early_stop = EarlyStopping(monitor='val_loss',
                               patience=config.EARLYSTOP_PATIENCE,
                               verbose=1,
                               mode='min')
    ckpt_fp = os.path.join(config.CHECKPOINT_DIR,
                           "model-{epoch:02d}-{val_loss:.2f}.hdf5")
    model_checkpoint = ModelCheckpoint(
        filepath=ckpt_fp,
        monitor='val_loss',
        save_weights_only=config.SAVE_WEIGHTS_ONLY,
        save_best_only=config.SAVE_BEST_ONLY,
        verbose=1,
        mode='min')
    csv_logger = CSVLogger(config.TRAIN_LOG_FP)
    callbacks = [early_stop, model_checkpoint, csv_logger]

    config.MODEL.summary()

    history = config.MODEL.fit_generator(generator=train_seq,
                                         validation_data=val_seq,
                                         steps_per_epoch=len(train_seq),
                                         validation_steps=len(val_seq),
                                         epochs=config.EPOCHS,
                                         verbose=1,
                                         callbacks=callbacks,
                                         shuffle=True
                                         # workers=2
                                         )
def open_data_sequence(data_filepath):

    assert os.path.exists(data_filepath)

    the_data_sequence = DataSequence(data_filepath, Detections)

    def data_sequence_reader(data_sequence):
        while True:
            data = data_sequence.read()
            if data is None:
                raise StopIteration
            else:
                yield data

    return data_sequence_reader(the_data_sequence)
    def test_read_write_sequence(self):
        """
        Test data sequence creation and reading
        """

        if os.path.exists(self.test_filename):
            os.remove(self.test_filename)

        data_sequence_out = \
            DataSequence(self.test_filename, TestData, self.attributes)

        data_sequence_out.write(self.data1)
        data_sequence_out.write(self.data2)
        data_sequence_out.write(self.data3)

        data_sequence_out.flush()

        self.read_and_check()
        return
    def test_read_write_sequence(self):
        """
        Test data sequence creation and reading
        """
        
        if os.path.exists(self.test_filename):
            os.remove(self.test_filename)
            
        data_sequence_out = \
            DataSequence(self.test_filename, TestData, self.attributes)

        data_sequence_out.write(self.data1)
        data_sequence_out.write(self.data2)
        data_sequence_out.write(self.data3)

        data_sequence_out.flush()
    
        self.read_and_check()
        return