def create_model(self, brand=None): layers = tf.keras.layers dropout_rate = 0.2 model = tf.keras.models.Sequential() model.add( layers.Conv2D(64, (4, 4), activation='relu', input_shape=(self.imageSize[0], self.imageSize[1], 1))) model.add(layers.MaxPooling2D((3, 3))) # model.add(layers.BatchNormalization()) model.add(layers.Conv2D(128, (3, 3), activation='relu')) model.add(layers.MaxPooling2D((2, 2))) model.add(layers.Conv2D(128, (3, 3), activation='relu')) model.add(layers.Flatten()) model.add(layers.Dense(128, activation='relu')) model.add(layers.Dropout(dropout_rate)) num_labels = len(self.labels) if brand is None else len( self.brand_shoes_map[detailing.label_brand(brand)]) model.add(layers.Dense(num_labels, activation='softmax')) model.summary() model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy']) return model
def create_testing_data(self): for query in self.labels: brand = detailing.label_brand(query) path = self.test_data_dir + query for filename in listdir(path): self.test_data.append(self.create_np_info(filename, path, brand)) np.random.shuffle(self.test_data) np.save("./npy_data/test_whole_data.npy", self.test_data)
def create_testing_data(self): for query in self.shoe_names: brand_label = brand_detailing.label_brand(query) path = self.test_data_dir + query for filename in listdir(path): info = self.create_np_info(filename, path, brand_label) if info is None: continue else: self.test_data.append(info) np.random.shuffle(self.test_data) np.save("./npy_data/test_whole_data.npy", self.test_data)
def run_all_networks(self): # copy to avoid unwanted mutations in original data-set all_train_data = self.train_data.copy() all_test_data = self.test_data.copy() brand_results = {} # create folder for storing all brand network states network_paths = "./network_states" try: os.mkdir(network_paths) except OSError: print() # get all data under one brand, execute network for that data for brand in detailing.known_brands: if brand != "nike": continue if detailing.label_brand(brand) not in self.brand_shoes_map.keys(): continue train_data = [] test_data = [] print(brand) for i in range(len(all_train_data)): train = all_train_data[i] if detailing.known_brands[train[2]] == brand: train_data.append(train) for i in range(len(all_test_data)): test = all_test_data[i] if detailing.known_brands[test[2]] == brand: test_data.append(test) np.random.shuffle(train_data) np.random.shuffle(test_data) test_loss, test_acc = self.run_network(brand, train_data, test_data, network_paths) brand_results.update({brand: [test_loss, test_acc]}) for brand in brand_results.keys(): print("BRAND: " + brand) print("Test loss: " + str(brand_results[brand][0])) print("Test accuracy: " + str(brand_results[brand][1]) + "\n")
def create_brand_shoes_map(labels): brand_shoes_map = {} for model in labels: model_brand = detailing.label_brand(model) if model_brand == -1: print("ERROR: " + model + " (NO KNOWN BRAND IDENTIFIER)") continue if model_brand not in brand_shoes_map.keys(): brand_shoes_map.update({model_brand: [model]}) else: mbt = brand_shoes_map[model_brand] mbt.append(model) brand_shoes_map.update({model_brand: mbt}) print(brand_shoes_map) with open(brand_shoe_map_path, "wb") as map_file: pickle.dump(brand_shoes_map, map_file) return brand_shoes_map