Esempio n. 1
0
 def get_files(self):
     file_names = []
     with data.open_data_file(utils.FILE_MAP_FILE_NAME, utils.READ_MODE) as file_map:
         file_data = json.load(file_map)
         for file in file_data[JSON_FILES]:
             file_names.append(file[JSON_FILE_NAME])
         return file_names
Esempio n. 2
0
 def update_cloud_map(self):
     with data.open_data_file(utils.CLOUD_MAP_FILE_NAME, utils.READ_WRITE_MODE) as cloud_map:
         cloud_data = json.load(cloud_map)
         cloud_data[JSON_INIT] = self.cloud_init
         cloud_map.seek(0)
         json.dump(cloud_data, cloud_map, indent=2)
         cloud_map.truncate()
Esempio n. 3
0
 def get_leaf_id(self, data_id):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                              utils.READ_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] == data_id:
                 return entry[JSON_LEAF_ID]
Esempio n. 4
0
 def add_data(self, data_id):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.READ_WRITE_MODE) as position_map:
         position_data = json.load(position_map)
         position_data.append({JSON_LEAF_ID: -config.get_random_leaf_id(), JSON_DATA_ID: data_id})
         position_map.seek(0)
         json.dump(position_data, position_map, indent=2, sort_keys=True)
         position_map.truncate()
Esempio n. 5
0
 def save_to_file(self):
     with data.open_data_file(utils.KEY_MAP_FILE_NAME, utils.WRITE_MODE) as key_map:
         salt = utils.byte_to_str(self.salt)
         waes_key = utils.byte_to_str(self.waes_key)
         wmac_key = utils.byte_to_str(self.wmac_key)
         json.dump({JSON_SALT: salt, JSON_WAES_KEY: waes_key, JSON_WMAC_KEY: wmac_key}, key_map, indent=2,
                   sort_keys=True)
Esempio n. 6
0
 def save_to_file(self):
     with data.open_data_file(utils.KEY_MAP_FILE_NAME, utils.WRITE_MODE) as key_map:
         salt = utils.byte_to_str(self.salt)
         waes_key = utils.byte_to_str(self.waes_key)
         wmac_key = utils.byte_to_str(self.wmac_key)
         json.dump({JSON_SALT: salt, JSON_WAES_KEY: waes_key, JSON_WMAC_KEY: wmac_key}, key_map, indent=2,
                   sort_keys=True)
Esempio n. 7
0
 def get_file_len(self, filename):
     with data.open_data_file(utils.FILE_MAP_FILE_NAME,
                              utils.READ_MODE) as file_map:
         file_data = json.load(file_map)
         for file in file_data[JSON_FILES]:
             if file[JSON_FILE_NAME] == filename:
                 return file[JSON_FILE_SIZE]
Esempio n. 8
0
 def data_id_exist(self, data_id):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.READ_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] == data_id:
                 return True
         return False
Esempio n. 9
0
File: cloud.py Progetto: sidd36/ORAM
 def update_cloud_map(self):
     with data.open_data_file(utils.CLOUD_MAP_FILE_NAME,
                              utils.READ_WRITE_MODE) as cloud_map:
         cloud_data = json.load(cloud_map)
         cloud_data[JSON_INIT] = self.cloud_init
         cloud_map.seek(0)
         json.dump(cloud_data, cloud_map, indent=2)
         cloud_map.truncate()
Esempio n. 10
0
 def load_cloud_map(self):
     with data.open_data_file(utils.CLOUD_MAP_FILE_NAME, utils.READ_MODE) as cloud_map:
         try:
             cloud_data = json.load(cloud_map)
             return cloud_data[JSON_TOKEN], cloud_data[JSON_INIT]
         except(ValueError, KeyError):
             logger.warning('Error in cloud map.')
             raise ErrorInCloudMap('Error in cloud map.')
Esempio n. 11
0
 def get_files(self):
     file_names = []
     with data.open_data_file(utils.FILE_MAP_FILE_NAME,
                              utils.READ_MODE) as file_map:
         file_data = json.load(file_map)
         for file in file_data[JSON_FILES]:
             file_names.append(file[JSON_FILE_NAME])
         return file_names
Esempio n. 12
0
 def data_id_exist(self, data_id):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                              utils.READ_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] == data_id:
                 return True
         return False
Esempio n. 13
0
File: cloud.py Progetto: sidd36/ORAM
 def load_cloud_map(self):
     with data.open_data_file(utils.CLOUD_MAP_FILE_NAME,
                              utils.READ_MODE) as cloud_map:
         try:
             cloud_data = json.load(cloud_map)
             return cloud_data[JSON_TOKEN], cloud_data[JSON_INIT]
         except (ValueError, KeyError):
             logger.warning('Error in cloud map.')
             raise ErrorInCloudMap('Error in cloud map.')
Esempio n. 14
0
File: cloud.py Progetto: sidd36/ORAM
 def create_cloud_map(self):
     with data.open_data_file(utils.CLOUD_MAP_FILE_NAME,
                              utils.WRITE_MODE) as cloud_map:
         json.dump({
             JSON_TOKEN: TOKEN_PLACEHOLDER,
             JSON_INIT: False
         },
                   cloud_map,
                   indent=2)
Esempio n. 15
0
 def __init__(self):
     if not data.file_exists(utils.FILE_MAP_FILE_NAME):
         with data.open_data_file(utils.FILE_MAP_FILE_NAME,
                                  utils.WRITE_MODE) as file_map:
             json.dump({
                 JSON_FILES: (),
                 JSON_ID_COUNTER: 0
             },
                       file_map,
                       indent=2)
Esempio n. 16
0
 def choose_new_leaf_id(self, data_id):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.READ_WRITE_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] == data_id:
                 entry[JSON_LEAF_ID] = -config.get_random_leaf_id()
                 break
         position_map.seek(0)
         json.dump(position_data, position_map, indent=2, sort_keys=True)
         position_map.truncate()
Esempio n. 17
0
 def delete_file(self, filename):
     with data.open_data_file(utils.FILE_MAP_FILE_NAME, utils.READ_WRITE_MODE) as file_map:
         file_data = json.load(file_map)
         files = file_data[JSON_FILES]
         for entry in list(files):
             if entry[JSON_FILE_NAME] == filename:
                 files.remove(entry)
                 break
         file_map.seek(0)
         json.dump(file_data, file_map, indent=2, sort_keys=True)
         file_map.truncate()
Esempio n. 18
0
 def add_data(self, data_id):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                              utils.READ_WRITE_MODE) as position_map:
         position_data = json.load(position_map)
         position_data.append({
             JSON_LEAF_ID: -config.get_random_leaf_id(),
             JSON_DATA_ID: data_id
         })
         position_map.seek(0)
         json.dump(position_data, position_map, indent=2, sort_keys=True)
         position_map.truncate()
Esempio n. 19
0
 def choose_new_leaf_id(self, data_id):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                              utils.READ_WRITE_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] == data_id:
                 entry[JSON_LEAF_ID] = -config.get_random_leaf_id()
                 break
         position_map.seek(0)
         json.dump(position_data, position_map, indent=2, sort_keys=True)
         position_map.truncate()
Esempio n. 20
0
 def add_file(self, file_name, file_size, data_items, data_id_counter):
     with data.open_data_file(utils.FILE_MAP_FILE_NAME, utils.READ_WRITE_MODE) as file_map:
         file_data = json.load(file_map)
         file_data[JSON_FILES].append(
             {JSON_FILE_NAME: file_name, JSON_FILE_SIZE: file_size, JSON_DATA_ITEMS: data_items}
         )
         # updating the data id counter
         file_data[JSON_ID_COUNTER] = data_id_counter
         file_map.seek(0)
         json.dump(file_data, file_map, indent=2, sort_keys=True)
         file_map.truncate()
Esempio n. 21
0
 def load_from_file(cls):
     with data.open_data_file(utils.KEY_MAP_FILE_NAME, utils.READ_MODE) as key_map:
         try:
             json_key_map = json.load(key_map)
             salt = utils.str_to_byte(json_key_map[JSON_SALT])
             waes_key = utils.str_to_byte(json_key_map[JSON_WAES_KEY])
             wmac_key = utils.str_to_byte(json_key_map[JSON_WMAC_KEY])
             key_file = KeyFile(salt, waes_key, wmac_key)
             return key_file
         except(ValueError, KeyError):
             raise ErrorInKeyMap('key.map might be empty or the data is not a valid JSON format.')
Esempio n. 22
0
def main(overwrite=False):
    # convert input images into an hdf5 file
    if overwrite or not os.path.exists(config["data_file"]):
        training_files, subject_ids = fetch_training_data_files(
            return_subject_ids=True)

        write_data_to_file(training_files,
                           config["data_file"],
                           image_shape=config["image_shape"],
                           subject_ids=subject_ids)
    data_file_opened = open_data_file(config["data_file"])

    if not overwrite and os.path.exists(config["model_file"]):
        model = load_old_model(config["model_file"])
    else:
        # instantiate new model
        model = is_model(input_shape=config["input_shape"],
                         n_labels=config["n_labels"],
                         initial_learning_rate=config["initial_learning_rate"],
                         n_base_filters=config["n_base_filters"])

    # get training and testing generators
    train_generator, validation_generator, n_train_steps, n_validation_steps = get_training_and_validation_generators(
        data_file_opened,
        batch_size=config["batch_size"],
        data_split=config["validation_split"],
        overwrite=overwrite,
        validation_keys_file=config["validation_file"],
        training_keys_file=config["training_file"],
        n_labels=config["n_labels"],
        labels=config["labels"],
        patch_shape=config["patch_shape"],
        validation_batch_size=config["validation_batch_size"],
        validation_patch_overlap=config["validation_patch_overlap"],
        training_patch_start_offset=config["training_patch_start_offset"],
        permute=config["permute"],
        augment=config["augment"],
        skip_blank=config["skip_blank"],
        augment_flip=config["flip"],
        augment_distortion_factor=config["distort"])

    # run training
    train_model(model=model,
                model_file=config["model_file"],
                training_generator=train_generator,
                validation_generator=validation_generator,
                steps_per_epoch=n_train_steps,
                validation_steps=n_validation_steps,
                initial_learning_rate=config["initial_learning_rate"],
                learning_rate_drop=config["learning_rate_drop"],
                learning_rate_patience=config["patience"],
                early_stopping_patience=config["early_stop"],
                n_epochs=config["n_epochs"])
    data_file_opened.close()
Esempio n. 23
0
 def load_from_file(cls):
     with data.open_data_file(utils.KEY_MAP_FILE_NAME, utils.READ_MODE) as key_map:
         try:
             json_key_map = json.load(key_map)
             salt = utils.str_to_byte(json_key_map[JSON_SALT])
             waes_key = utils.str_to_byte(json_key_map[JSON_WAES_KEY])
             wmac_key = utils.str_to_byte(json_key_map[JSON_WMAC_KEY])
             key_file = KeyFile(salt, waes_key, wmac_key)
             return key_file
         except(ValueError, KeyError):
             raise ErrorInKeyMap('key.map might be empty or the data is not a valid JSON format.')
Esempio n. 24
0
 def delete_file(self, filename):
     with data.open_data_file(utils.FILE_MAP_FILE_NAME,
                              utils.READ_WRITE_MODE) as file_map:
         file_data = json.load(file_map)
         files = file_data[JSON_FILES]
         for entry in list(files):
             if entry[JSON_FILE_NAME] == filename:
                 files.remove(entry)
                 break
         file_map.seek(0)
         json.dump(file_data, file_map, indent=2, sort_keys=True)
         file_map.truncate()
Esempio n. 25
0
 def get_leaf_ids(self, data_ids):
     copy_data_ids = list(data_ids)
     leaf_ids = []
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.READ_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] in data_ids:
                 leaf_ids.append((entry[JSON_DATA_ID], entry[JSON_LEAF_ID]))
                 copy_data_ids.remove(entry[JSON_DATA_ID])
                 if not copy_data_ids:
                     # stop iterating when all leaf ids are found
                     break
         return leaf_ids
Esempio n. 26
0
 def update_leaf_id(self, data_id, is_in_cloud):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.READ_WRITE_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] == data_id:
                 if is_in_cloud:
                     entry[JSON_LEAF_ID] = abs(entry[JSON_LEAF_ID])
                 else:
                     entry[JSON_LEAF_ID] = -entry[JSON_LEAF_ID]
                 break
         position_map.seek(0)
         json.dump(position_data, position_map, indent=2, sort_keys=True)
         position_map.truncate()
Esempio n. 27
0
 def delete_data_ids(self, data_ids):
     copy_data_ids = list(data_ids)
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.READ_WRITE_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in list(position_data):
             if entry[JSON_DATA_ID] in data_ids:
                 position_data.remove(entry)
                 copy_data_ids.remove(entry[JSON_DATA_ID])
                 if not copy_data_ids:
                     # stop iterating when all data ids are deleted
                     break
         position_map.seek(0)
         json.dump(position_data, position_map, indent=2, sort_keys=True)
         position_map.truncate()
Esempio n. 28
0
 def update_leaf_id(self, data_id, is_in_cloud):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                              utils.READ_WRITE_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] == data_id:
                 if is_in_cloud:
                     entry[JSON_LEAF_ID] = abs(entry[JSON_LEAF_ID])
                 else:
                     entry[JSON_LEAF_ID] = -entry[JSON_LEAF_ID]
                 break
         position_map.seek(0)
         json.dump(position_data, position_map, indent=2, sort_keys=True)
         position_map.truncate()
Esempio n. 29
0
 def get_leaf_ids(self, data_ids):
     copy_data_ids = list(data_ids)
     leaf_ids = []
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                              utils.READ_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] in data_ids:
                 leaf_ids.append((entry[JSON_DATA_ID], entry[JSON_LEAF_ID]))
                 copy_data_ids.remove(entry[JSON_DATA_ID])
                 if not copy_data_ids:
                     # stop iterating when all leaf ids are found
                     break
         return leaf_ids
Esempio n. 30
0
 def add_file(self, file_name, file_size, data_items, data_id_counter):
     with data.open_data_file(utils.FILE_MAP_FILE_NAME,
                              utils.READ_WRITE_MODE) as file_map:
         file_data = json.load(file_map)
         file_data[JSON_FILES].append({
             JSON_FILE_NAME: file_name,
             JSON_FILE_SIZE: file_size,
             JSON_DATA_ITEMS: data_items
         })
         # updating the data id counter
         file_data[JSON_ID_COUNTER] = data_id_counter
         file_map.seek(0)
         json.dump(file_data, file_map, indent=2, sort_keys=True)
         file_map.truncate()
Esempio n. 31
0
 def delete_data_ids(self, data_ids):
     copy_data_ids = list(data_ids)
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                              utils.READ_WRITE_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in list(position_data):
             if entry[JSON_DATA_ID] in data_ids:
                 position_data.remove(entry)
                 copy_data_ids.remove(entry[JSON_DATA_ID])
                 if not copy_data_ids:
                     # stop iterating when all data ids are deleted
                     break
         position_map.seek(0)
         json.dump(position_data, position_map, indent=2, sort_keys=True)
         position_map.truncate()
def main(overwrite=False):
    # convert input images into an hdf5 file
    data_file_opened = open_data_file(config["data_file"])

    model = model_3d_1(input_shape=config["input_shape"],
                       initial_learning_rate=config["initial_learning_rate"],
                       opt=args.opt
                       )
    if not overwrite and os.path.exists(config["model_file"]):
        print('load model !!')
        load_old_model(config["model_file"], model)


    # get training and testing generators
    train_generator, validation_generator, n_train_steps, n_validation_steps = get_training_and_validation_generators(
        data_file_opened,
        batch_size=config["batch_size"],
        data_split=config["validation_split"],
        overwrite=overwrite,
        validation_keys_file=config["validation_file"],
        training_keys_file=config["training_file"],
        patch_shape=config["patch_shape"],
        validation_batch_size=config["validation_batch_size"],
        validation_patch_overlap=config["validation_patch_overlap"],
        training_patch_start_offset=config["training_patch_start_offset"],
        )

    # run training
    train_model(model=model,
                model_file=config["model_file"],
                training_generator=train_generator,
                validation_generator=validation_generator,
                steps_per_epoch=n_train_steps,
                validation_steps=n_validation_steps,
                initial_learning_rate=config["initial_learning_rate"],
                learning_rate_drop=config["learning_rate_drop"],
                learning_rate_patience=config["patience"],
                early_stopping_patience=config["early_stop"],
                n_epochs=config["n_epochs"])
    data_file_opened.close()
Esempio n. 33
0
 def __init__(self):
     if not data.file_exists(utils.POSITION_MAP_FILE_NAME):
         with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                                  utils.WRITE_MODE) as position_map:
             json.dump((), position_map, indent=2)
Esempio n. 34
0
def test_open_GOLD_2():
    with data.open_data_file("GOLD_2.csv"):
        pass
Esempio n. 35
0
def test_open_MTGOXUSD():
    with data.open_data_file("MTGOXUSD.csv"):
        pass
Esempio n. 36
0
 def get_id_counter(self):
     with data.open_data_file(utils.FILE_MAP_FILE_NAME,
                              utils.READ_MODE) as file_map:
         file_data = json.load(file_map)
         return file_data[JSON_ID_COUNTER]
Esempio n. 37
0
 def get_leaf_id(self, data_id):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.READ_MODE) as position_map:
         position_data = json.load(position_map)
         for entry in position_data:
             if entry[JSON_DATA_ID] == data_id:
                 return entry[JSON_LEAF_ID]
Esempio n. 38
0
 def __init__(self):
     if not data.file_exists(utils.POSITION_MAP_FILE_NAME):
         with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.WRITE_MODE) as position_map:
             json.dump((), position_map, indent=2)
Esempio n. 39
0
 def count_data_ids(self):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME,
                              utils.READ_MODE) as position_map:
         position_data = json.load(position_map)
         return len(position_data)
Esempio n. 40
0
 def __init__(self):
     if not data.file_exists(utils.FILE_MAP_FILE_NAME):
         with data.open_data_file(utils.FILE_MAP_FILE_NAME, utils.WRITE_MODE) as file_map:
             json.dump({JSON_FILES: (), JSON_ID_COUNTER: 0}, file_map, indent=2)
Esempio n. 41
0
 def get_id_counter(self):
     with data.open_data_file(utils.FILE_MAP_FILE_NAME, utils.READ_MODE) as file_map:
         file_data = json.load(file_map)
         return file_data[JSON_ID_COUNTER]
Esempio n. 42
0
 def get_file_len(self, filename):
     with data.open_data_file(utils.FILE_MAP_FILE_NAME, utils.READ_MODE) as file_map:
         file_data = json.load(file_map)
         for file in file_data[JSON_FILES]:
             if file[JSON_FILE_NAME] == filename:
                 return file[JSON_FILE_SIZE]
Esempio n. 43
0
def test_open_data_file():
    sentinel = object()
    with mock.patch("builtins.open", return_value=sentinel) as mock_open:
        assert data.open_data_file("file name") is sentinel
        mock_open.assert_called_with("data/file name", mode="r")
Esempio n. 44
0
def test_open_NASDAQ_AAPL():
    with data.open_data_file("NASDAQ_AAPL.csv"):
        pass
Esempio n. 45
0
 def count_data_ids(self):
     with data.open_data_file(utils.POSITION_MAP_FILE_NAME, utils.READ_MODE) as position_map:
         position_data = json.load(position_map)
         return len(position_data)
Esempio n. 46
0
config[
    "permute"] = True  # data shape must be a cube. Augments the data by permuting in various directions
config["distort"] = None  # switch to None if you want no distortion
config["augment"] = config["flip"] or config["distort"]
config[
    "skip_blank"] = False  # if True, then patches without any target will be skipped

config["data_file"] = os.path.abspath("brats_data_isensee_2018.h5")
config["model_file"] = os.path.abspath("isensee_2018_model.h5")
config["weigths_file"] = os.path.abspath("isensee_2018_weights.h5")
config["training_file"] = os.path.abspath("isensee_training_ids.pkl")
config["validation_file"] = os.path.abspath("isensee_validation_ids.pkl")
config[
    "overwrite"] = False  # If True, will previous files. If False, will use previously written files.

data_file_opened = open_data_file(config["data_file"])

# Tao train, validation generator
train_generator, validation_generator, n_train_steps, n_validation_steps = get_training_and_validation_generators(
    data_file_opened,
    batch_size=config["batch_size"],
    data_split=config["validation_split"],
    overwrite=config["overwrite"],
    validation_keys_file=config["validation_file"],
    training_keys_file=config["training_file"],
    n_labels=config["n_labels"],
    labels=config["labels"],
    validation_batch_size=config["validation_batch_size"],
    permute=config["permute"],
    augment=config["augment"],
    skip_blank=config["skip_blank"],
Esempio n. 47
0
 def create_cloud_map(self):
     with data.open_data_file(utils.CLOUD_MAP_FILE_NAME, utils.WRITE_MODE) as cloud_map:
         json.dump({JSON_TOKEN: TOKEN_PLACEHOLDER, JSON_INIT: False}, cloud_map, indent=2)