Esempio n. 1
0
def create_sI_run_filelist(glob_file='/path/to/core50_root_dir/*/*/*',
                           dest_bp='/insert/your/path/sI_inc/',
                           dest_cum_bp='/insert/your/path/sI_cum/',
                           all_sess=range(11),
                           all_objs=range(50),
                           cumulative=True,
                           train_sess=[0, 1, 3, 4, 5, 7, 8, 10],
                           test_sess=[2, 6, 9],
                           batch_order=[x for x in range(8)]):
    """ Given some parameters, it creates the batches filelist and
        eventually the cumulative ones. """

    # Adjusting the batch order
    app = [-1] * 8
    for i, batch_idx in enumerate(batch_order):
        app[i] = train_sess[batch_idx]
    train_sess = app

    # Loading all the file lists divided by session
    filelist_all_sess = load_filelist_per_sess(glob_file)

    # Create training batches filelists
    for i, sess in enumerate(train_sess):
        # create first batch
        create_filelist(dest_bp + "train_batch_" + str(i).zfill(2),
                        filelist_all_sess, [sess], all_objs)

    # Creating test filelist
    create_filelist(dest_bp + "test", filelist_all_sess, test_sess, all_objs)

    # Creating the cumulative version if requested
    if cumulative:
        all_lines = []
        for batch_id in range(len(train_sess)):
            with open(
                    dest_bp + 'train_batch_' + str(batch_id).zfill(2) +
                    '_filelist.txt', 'r') as f:
                all_lines += f.readlines()
            with open(
                    dest_cum_bp + 'train_batch_' + str(batch_id).zfill(2) +
                    '_filelist.txt', 'w') as f:
                for line in all_lines:
                    f.write(line)
        shutil.copy(dest_bp + "test_filelist.txt", dest_cum_bp)
Esempio n. 2
0
def create_sII_run_filelist(
    glob_file='/path/to/core50_root_dir/*/*/*',
    dest_bp='/insert/your/path/sII_inc/',
    dest_cum_bp='/insert/your/path/sII_cum/',
    all_sess=range(11),
    all_objs=range(50),
    change_labels=True,
    cumulative=True,
    train_sess=[0, 1, 3, 4, 5, 7, 8, 10],
    test_sess=[2, 6, 9],
    batch_order=[x for x in range(9)]):
    """ Given some parameters, it creates the batches filelist and
        eventually the cumulative ones. """

    # Loading the filelists devided by sessions
    filelist_all_sess = load_filelist_per_sess(glob_file)

    objs = [[]] * 9
    objs_test = []

    # Here the creations of the batches (which class to choose for
    # each of them) is **independent** by the external seed. This means that
    # the units are static throughout the runs while only their order
    # can change. This is the same as for the NI scenario where the content of
    # the batches is fixed.

    # first batch
    objs[0] = [i * 5 for i in range(10)]
    objs_test += objs[0][:]

    # inc batches
    for batch_id in range(1, 9):
        if batch_id % 2 == 0:
            objs[batch_id] = [i * 5 + int(round(batch_id / 2.0)) for i in
                              range(5, 10)]
        else:
            objs[batch_id] = [i * 5 + int(round(batch_id / 2.0)) for i in
                              range(5)]

    # the first batch stay the same regardless of the order of inc batches
    app = [[]] * 9
    app[0] = objs[0][:]
    for i, batch_idx in enumerate(batch_order[1:]):
        app[i+1] = objs[batch_idx][:]
        objs_test += objs[batch_idx][:]
    objs = app

    # Since the freezeweights strategy needs the encountered classes to be in
    # adjacent neurons the labels are changed for this task (i.e. o1 has not
    # necessary the label 0).
    if change_labels:
        label_map = {label: i for i, label in enumerate(objs_test)}
        print "Label map:", label_map

    print "obj train:", len(objs), sorted(objs)

    for batch_id in range(9):
        create_filelist(dest_bp + "train_batch_" + str(batch_id).zfill(2),
                        filelist_all_sess, train_sess, objs[batch_id],
                        label_map=label_map)

    print "obj test:", len(objs_test), sorted(objs_test)

    create_filelist(dest_bp + "test", filelist_all_sess,
                    test_sess, objs_test, label_map=label_map)

    # create the cumulative version
    if cumulative:
        all_lines = []
        for batch_id in range(len(batch_order)):
            with open(dest_bp + 'train_batch_' +
                              str(batch_id).zfill(2) + '_filelist.txt',
                      'r') as f:
                all_lines += f.readlines()
            with open(dest_cum_bp + 'train_batch_' +
                              str(batch_id).zfill(2) + '_filelist.txt',
                      'w') as f:
                for line in all_lines:
                    f.write(line)
        shutil.copy(dest_bp + "test_filelist.txt", dest_cum_bp)
Esempio n. 3
0
def create_sIII_run_filelist(
    glob_file='data/core50_128x128/*/*/*',
    dest_bp='/insert/your/path/sIII_inc/',
    dest_cum_bp='/insert/your/path/sIII_cum/',
    all_sess=range(11),
    all_objs=range(50),
    cumulative=True,
    batch_order=[x for x in range(79)]):
    """ Given some parameters, it creates the batches filelist and
        eventually the cumulative ones. """

    # Here the creations of the units (which obj in which sess)
    # is **independent** by the external seed. This means that the units are
    # static throughout the runs while only their order can change. This is
    # the same as for the NI and NC scenarios where the batches are fixed.
    rnd_state = np.random.get_state()
    np.random.seed(0)

    filelist_all_sess = load_filelist_per_sess(glob_file)
    train_sess = [0, 1, 3, 4, 5, 7, 8, 10]
    test_sess = [2, 6, 9]

    # Selecting the five objs for batch
    first_ten_objs = [i * 5 for i in range(10)]
    objs_after_first_b = []
    for id in all_objs:
        if id not in first_ten_objs:
            objs_after_first_b.append(id)

    np.random.shuffle(objs_after_first_b)
    objs_per_batch = np.reshape(objs_after_first_b, (8, 5))
    objs_per_batch = [row for row in objs_per_batch]

    # Creating units for classes after first batch
    units = []
    for sess in train_sess:
        for objs_id in objs_per_batch:
            units.append((sess, objs_id))

    # Creating for the first 10 classes split in two groups
    for sess in train_sess[1:]:
        units.append((sess, first_ten_objs[:5]))
        units.append((sess, first_ten_objs[5:]))

    # Suffling units
    np.random.shuffle(units)

    print("Number of incremental units: ", len(units))
    print("----- Unit details (sess, objs) ------")
    for unit in units:
        print(unit)

    # Creating first batch
    create_filelist(dest_bp + "train_batch_00", filelist_all_sess, [0],
                    first_ten_objs)

    # Creating test
    create_filelist(dest_bp + "test", filelist_all_sess, test_sess,
                    all_objs)

    # Reordering incremental units based on batch order
    new_units = [[]] * 78
    for i, id in enumerate(batch_order[1:]):
        new_units[i] = units[id-1]
    units = new_units

    # Creating incremental batches with units
    for batch_id, unit in enumerate(units):
        create_filelist(dest_bp + "train_batch_" +
                        str(batch_id + 1).zfill(2),
                        filelist_all_sess, [unit[0]], unit[1])

    # Creating the cumulative version
    if cumulative:
        all_lines = []
        for batch_id in range(len(units) + 1):
            with open(dest_bp + 'train_batch_' +
                      str(batch_id).zfill(2) + '_filelist.txt', 'r') as f:
                all_lines += f.readlines()
            with open(dest_cum_bp + 'train_batch_' +
                      str(batch_id).zfill(2) + '_filelist.txt', 'w') as f:
                for line in all_lines:
                    f.write(line)
        shutil.copy(dest_bp + "test_filelist.txt", dest_cum_bp)

    # Resetting previous rnd state
    np.random.set_state(rnd_state)
Esempio n. 4
0
def create_sII_run_filelist(glob_file='data/core50_128x128/*/*/*',
                            dest_bp='data/sII_inc/',
                            dest_cum_bp='data/sII_cum/',
                            cumulative=False,
                            train_sess=[0, 1, 3, 4, 5, 7, 8, 10],
                            test_sess=[2, 6, 9],
                            batch_order=[x for x in range(9)]):
    """ Given some parameters, it creates the batches filelist and
        eventually the cumulative ones. """

    # Loading the filelists devided by sessions
    filelist_all_sess = load_filelist_per_sess(glob_file)

    objs = [[]] * 9
    objs_test = []

    # Here the creations of the batches (which class to choose for
    # each of them) is **independent** by the external seed. This means that
    # the units are static throughout the runs while only their order
    # can change. This is the same as for the NI scenario where the content of
    # the batches is fixed.

    # first batch
    # objs[0] = [i * 5 for i in range(10)]
    objs[0] = [i * 5 for i in range(10)]
    objs_test += objs[0][:]

    # inc batches
    for batch_id in range(1, 9):
        if batch_id % 2 == 0:
            objs[batch_id] = [
                i * 5 + int(math.ceil(batch_id / 2.0)) for i in range(5, 10)
            ]
        else:
            objs[batch_id] = [
                i * 5 + int(math.ceil(batch_id / 2.0)) for i in range(5)
            ]

    # the first batch stay the same regardless of the order of inc batches
    app = [[]] * 9
    app[0] = objs[0][:]
    for i, batch_idx in enumerate(batch_order[1:]):
        app[i + 1] = objs[batch_idx][:]
        objs_test += objs[batch_idx][:]
    objs = app

    print("obj train:", len(objs), sorted(objs))

    for batch_id in range(9):
        create_filelist(dest_bp + "train_batch_" + str(batch_id).zfill(2),
                        filelist_all_sess, train_sess, objs[batch_id])

    print("obj test:", len(objs_test), sorted(objs_test))

    create_filelist(dest_bp + "test", filelist_all_sess, test_sess, objs_test)

    # create the cumulative version
    if cumulative:
        all_lines = []
        for batch_id in range(len(batch_order)):
            with open(
                    dest_bp + 'train_batch_' + str(batch_id).zfill(2) +
                    '_filelist.txt', 'r') as f:
                all_lines += f.readlines()
            with open(
                    dest_cum_bp + 'train_batch_' + str(batch_id).zfill(2) +
                    '_filelist.txt', 'w') as f:
                for line in all_lines:
                    f.write(line)
        shutil.copy(dest_bp + "test_filelist.txt", dest_cum_bp)