Ejemplo n.º 1
0
def makeNewAreaDataset():
    new_data = rawdata.RawData.load(locNames='untrain',
                                    special_layers='all',
                                    new_data='not_none')
    newDataSet = dataset.Dataset(new_data, dataset.Dataset.vulnerablePixels)
    pointLst = newDataSet.toListTest(newDataSet.points)
    # ptList = masterDataSet.sample(sampleEvenly=False)
    # pointLst = random.sample(pointLst, SAMPLE_SIZE)
    test = dataset.Dataset(new_data, pointLst)
    return test
Ejemplo n.º 2
0
def openDatasets():
    data = rawdata.load()
    masterDataSet = dataset.Dataset(data, dataset.Dataset.vulnerablePixels)
    ptList = masterDataSet.sample(sampleEvenly=False)
    # masterDataSet.points = dataset.Dataset.toDict(ptList)
    trainPts, validatePts, testPts =  util.partition(ptList, ratios=[.6,.7])
    train = dataset.Dataset(data, trainPts)
    validate = dataset.Dataset(data, validatePts)
    test = dataset.Dataset(data, testPts)
    return train, validate, test
Ejemplo n.º 3
0
def openDatasets():
    data = rawdata.RawData.load(locNames='all', special_layers='all')
    masterDataSet = dataset.Dataset(
        data, dataset.Dataset.vulnerablePixels
    )  #this loops through vulnerablePixels for each location... should grab all veg image
    sample_size = 100
    print("SAMPLE SIZE: ", sample_size)
    ptList = masterDataSet.sample(goalNumber=sample_size,
                                  sampleEvenly=False)  #goalNumber=sample_size,
    trainPts, validatePts, testPts = util.partition(ptList,
                                                    ratios=[.7, .8])  #.85,.99
    train = dataset.Dataset(data, trainPts)
    validate = dataset.Dataset(data, validatePts)
    test = dataset.Dataset(data, testPts)

    return train, validate, test
Ejemplo n.º 4
0
def makeSmallDatasets(pass_arr):
    data = pass_arr[0]
    set_type = pass_arr[1]
    type_str = pass_arr[2]
    return_dict = {}
    return_dict[type_str] = dataset.Dataset(data, set_type)
    return return_dict
Ejemplo n.º 5
0
def get_data(batch_size, data_name, data_root='./my_ai/'):
    data_loader = data.DataLoader(
        dataset.Dataset(
            path=data_root,
            transform_data=transforms.Compose([
                # transforms.RandomHorizontalFlip(),
                # channel_change(),

                color_change(),
                ToTensor(),
                tensor_pad(28)
            ]),
            transform_labels=transforms.Compose([
                # transforms.RandomHorizontalFlip(),

                ToTensor(),
                tensor_pad(28)
            ]),
            data_name=data_name
        ),
        batch_size=batch_size,
        shuffle=True,
        num_workers=1
    )
    return data_loader
Ejemplo n.º 6
0
def test():
    ds = dataset.Dataset()
    # ds2 = ds.copy()
    # ds.save2()
    for i in ds.getDays():
        print(i)
    print(ds)
    pass
Ejemplo n.º 7
0
def setup_data_generators(
        metadata,
        folder_images,
        field_class="dx",
        test_size=0.1,
        validation_size=0.2,
        aux_data=False,
        augment=True,
        batch_size=50,
        balancing=True,
        seed=None) -> Tuple[dataset.Dataset, dataset.Dataset, dataset.Dataset]:
    if seed is None:
        seed = np.random.randint(0, 1e7)
    train_data, test_data, _, _ = model_selection.train_test_split(
        metadata,
        metadata[field_class],
        test_size=0.1,
        stratify=metadata[field_class],
        random_state=seed)
    train_data, validate_data, _, _ = model_selection.train_test_split(
        train_data,
        train_data[field_class],
        test_size=0.2,
        stratify=train_data[field_class],
        random_state=seed)

    # set up image generators
    get_dataset = (lambda data, aux_data, class_order=None, augment=True:
                   dataset.Dataset(folder_images,
                                   data,
                                   target_size=(300, 225),
                                   augmentation=augment,
                                   aux_data=aux_data,
                                   batch_size=batch_size,
                                   class_order=class_order))

    # sync class order with training generator
    train_gen = get_dataset(train_data, False)
    test_gen = get_dataset(test_data,
                           False,
                           class_order=train_gen.unique_classes,
                           augment=False)
    validate_gen = get_dataset(validate_data,
                               False,
                               class_order=train_gen.unique_classes)

    # balance datasets
    if balancing:
        train_gen.balance(mode="upsampling", aggressiveness=0.7)
        test_gen.balance(mode="upsampling", aggressiveness=0.7)
        validate_gen.balance(mode="upsampling", aggressiveness=0.7)

    return train_gen, test_gen, validate_gen
Ejemplo n.º 8
0
    def predict(self):
        selectedBurns = []
        mod = self.burnList.model()
        for index in range(mod.rowCount()):
            i = mod.item(index)
            # print(i.checkState())
            if i.checkState() == QtCore.Qt.Checked:
                selectedBurns.append(i.text())
        print('opening the data for the burns,', selectedBurns)
        data = rawdata.RawData.load(burnNames=selectedBurns, dates='all')
        ds = dataset.Dataset(data, dataset.Dataset.vulnerablePixels)

        from lib import model
        modelFileName = self.modelLineEdit.text()
        print('loading model', modelFileName)
        mod = model.load(modelFileName)
        print(mod)