# initializate optimizer
    opt_enc = optim.Adam(enc.parameters(), lr=LEARNING_RATE, betas=(0.0, 0.9))
    padding_epoch = len(str(enc_epochs))
    padding_i = len(str(len(loader)))
    writer_enc = SummaryWriter(f"logs_kernel_3/enc")

    for epoch in range(enc_epochs):
        # Target labels not needed! <3 unsupervised
        for batch_idx, (real, _) in enumerate(loader):
            real = real.to(device)
            cur_batch_size = real.shape[0]
            opt_enc.zero_grad()
            z = enc(real)
            fake = gen(z)

            real_features = critic.forward_features(real)
            fake_features = critic.forward_features(fake)

            #izif architecture
            loss_imgs = criterion(fake, real)
            loss_features = criterion(fake_features, real_features)
            enc_loss = loss_imgs + kappa * loss_features

            enc_loss.backward()
            opt_enc.step()
            # if i % CRITIC_ITERATIONS == 0:
            # e_losses.append(e_loss)
        # enc.eval()
        writer_enc.add_scalar('enc_loss', enc_loss.item(), epoch)
        print(f"[Epoch {epoch:{padding_epoch}}/{enc_epochs}] "
              f"[Batch {batch_idx:{padding_i}}/{len(loader)}] "