# Ops for initializing the two different iterators
training_init_op = train_iterator.make_initializer(tr_data.data)
validation_init_op = valid_iterator.make_initializer(val_data.data)

# TF placeholder for graph input and output
x = tf.placeholder(tf.float32, [batch_size, 227, 227, 3])
y = tf.placeholder(tf.float32, [batch_size, num_classes])
keep_prob = tf.placeholder(tf.float32)

# Initialize model
model = AlexNet(x, keep_prob, num_classes)

# Load the pretrained weights into the non-trainable layer
load_op = model.load_initial_weights_ops()

# Link variable to model output
logits = model.fc8

# Op for calculating the loss
with tf.name_scope("cross_ent"):
    loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=y))

var_list = [v for v in tf.trainable_variables()]
for var in var_list:
    if 'weights' in var.name.split('/')[1]:
        weights[var.name.split('/')[0]] = var

# Train op
with tf.name_scope("train"):