STEPS_PER_EPOCH = np.ceil(len(train_paths)/BATCH_SIZE)
STEPS_PER_EPOCH_TRAIN = np.ceil(len(train_paths)/BATCH_SIZE*0.7)
STEPS_PER_EPOCH_VAL = np.ceil(len(train_paths)/BATCH_SIZE*0.3)

CLASS_NAMES_MAKE = np.array(np.unique([name for name in make]))
CLASS_NAMES_MODEL = np.array(np.unique([name for name in model]))
CLASS_NAMES_YEAR = np.array(np.unique([name for name in year]))
CLASS_NAMES_CAR = np.array(np.unique([name for name in car]))

os.chdir(dataset_dir)

AUTOTUNE = tf.data.experimental.AUTOTUNE

path_list = tf.data.Dataset.from_tensor_slices(train_paths)

labeled_ds = data_prep.prepare_ds(path_list, CLASS_NAMES_MAKE, CLASS_NAMES_MODEL, CLASS_NAMES_YEAR, IMG_WIDTH, one_output='make')

train_size = math.ceil(0.7 * len(train_paths))
train_ds = labeled_ds.take(train_size)
val_ds = labeled_ds.skip(train_size)

train_set = data_prep.prepare_for_training(train_ds, batch_size = BATCH_SIZE)
val_set = data_prep.prepare_for_training(val_ds, batch_size = BATCH_SIZE)

mirrored_strategy = tf.distribute.MirroredStrategy()

def params_search(trial):
    with mirrored_strategy.scope():
        
        neuron_1 = 457
        kernel_1 = 7 
STEPS_PER_EPOCH_TRAIN = np.ceil(len(train_paths) / BATCH_SIZE * 0.7)
STEPS_PER_EPOCH_VAL = np.ceil(len(train_paths) / BATCH_SIZE * 0.3)

CLASS_NAMES_MAKE = np.array(np.unique([name for name in make]))
CLASS_NAMES_MODEL = np.array(np.unique([name for name in model]))
CLASS_NAMES_YEAR = np.array(np.unique([name for name in year]))
CLASS_NAMES_CAR = np.array(np.unique([name for name in car]))

os.chdir(dataset_dir)

AUTOTUNE = tf.data.experimental.AUTOTUNE

path_list = tf.data.Dataset.from_tensor_slices(train_paths)

labeled_ds = data_prep.prepare_ds(path_list, CLASS_NAMES_MAKE,
                                  CLASS_NAMES_MODEL, CLASS_NAMES_YEAR,
                                  IMG_WIDTH)

train_size = math.ceil(0.7 * len(train_paths))
train_ds = labeled_ds.take(train_size)
val_ds = labeled_ds.skip(train_size)

train_set = data_prep.prepare_for_training(train_ds, batch_size=BATCH_SIZE)
val_set = data_prep.prepare_for_training(val_ds, batch_size=BATCH_SIZE)

mirrored_strategy = tf.distribute.MirroredStrategy()


def params_search(trial):
    with mirrored_strategy.scope():