def main(params): data = polyvore_dataset() transforms = data.get_data_transforms() X_train, X_test, y_train, y_test, n_classes = data.create_dataset() train_set = (X_train, y_train, transforms['train']) test_set = (X_test, y_test, transforms['test']) dataset_size = {'train': len(y_train), 'test': len(y_test)} params = { 'batch_size': Config['batch_size'], 'n_classes': n_classes, 'shuffle': True } train_generator = DataGenerator(train_set, dataset_size, params) test_generator = DataGenerator(test_set, dataset_size, params)
from tensorflow.keras.layers import MaxPooling2D, Dense, Dropout, Input, Conv2D, Flatten from data import polyvore_dataset, DataGenerator, PredictDataGenerator from tensorflow.keras.utils import plot_model from tensorflow.keras.models import Model import matplotlib.pyplot as plt from utils import Config import tensorflow as tf import numpy as np from tensorflow.keras import regularizers from sklearn.preprocessing import LabelEncoder if __name__ == '__main__': # data generators dataset = polyvore_dataset() transforms = dataset.get_data_transforms() X_train, X_test, y_train, y_test, n_classes, le_dictionary = dataset.create_dataset( ) if Config['debug']: train_set = (X_train[:100], y_train[:100], transforms['train']) test_set = (X_test[:100], y_test[:100], transforms['test']) dataset_size = {'train': 100, 'test': 100} else: train_set = (X_train, y_train, transforms['train']) test_set = (X_test, y_test, transforms['test']) dataset_size = {'train': len(y_train), 'test': len(y_test)} params = { 'batch_size': Config['batch_size'], 'n_classes': n_classes, 'shuffle': True