def create_training_dataset(self, event): """ """ num_shuffles = self.shuffle.GetValue() config_file = auxiliaryfunctions.read_config(self.config) trainindex = self.trainingindex.GetValue() if self.userfeedback.GetStringSelection() == "Yes": userfeedback = True else: userfeedback = False if config_file.get("multianimalproject", False): if self.cropandlabel.GetStringSelection() == "Yes": n_crops, height, width = [ int(text.GetValue()) for _, text in self.crop_widgets ] deeplabcut.cropimagesandlabels( self.config, n_crops, (height, width), userfeedback ) else: random = False deeplabcut.create_multianimaltraining_dataset( self.config, num_shuffles, Shuffles=[self.shuffle.GetValue()], net_type=self.net_choice.GetValue(), ) else: if self.model_comparison_choice.GetStringSelection() == "No": deeplabcut.create_training_dataset( self.config, num_shuffles, Shuffles=[self.shuffle.GetValue()], userfeedback=userfeedback, net_type=self.net_choice.GetValue(), augmenter_type=self.aug_choice.GetValue(), ) if self.model_comparison_choice.GetStringSelection() == "Yes": deeplabcut.create_training_model_comparison( self.config, trainindex=trainindex, num_shuffles=num_shuffles, userfeedback=userfeedback, net_types=self.net_type, augmenter_types=self.aug_type, )
def create_training_dataset(self, event): """ """ num_shuffles = self.shuffle.GetValue() config_file = auxiliaryfunctions.read_config(self.config) trainindex = self.trainingindex.GetValue() if self.userfeedback.GetStringSelection() == "Yes": userfeedback = True else: userfeedback = False if config_file.get("multianimalproject", False): deeplabcut.create_multianimaltraining_dataset( self.config, num_shuffles, Shuffles=[self.shuffle.GetValue()], net_type=self.net_choice.GetValue(), ) else: if self.model_comparison_choice.GetStringSelection() == "No": deeplabcut.create_training_dataset( self.config, num_shuffles, Shuffles=[self.shuffle.GetValue()], userfeedback=userfeedback, net_type=self.net_choice.GetValue(), augmenter_type=self.aug_choice.GetValue(), ) if self.model_comparison_choice.GetStringSelection() == "Yes": deeplabcut.create_training_model_comparison( self.config, trainindex=trainindex, num_shuffles=num_shuffles, userfeedback=userfeedback, net_types=self.net_type, augmenter_types=self.aug_type, )
) df = pd.DataFrame(fake_data, index=index, columns=columns) output_path = os.path.join(image_folder, f"CollectedData_{SCORER}.csv") df.to_csv(output_path) df.to_hdf( output_path.replace("csv", "h5"), "df_with_missing", format="table", mode="w" ) print("Artificial data created.") print("Checking labels...") deeplabcut.check_labels(config_path, draw_skeleton=False) print("Labels checked.") print("Creating train dataset...") deeplabcut.create_multianimaltraining_dataset( config_path, net_type=NET, crop_size=(200, 200) ) print("Train dataset created.") # Check the training image paths are correctly stored as arrays of strings trainingsetfolder = auxiliaryfunctions.GetTrainingSetFolder(cfg) datafile, _ = auxiliaryfunctions.GetDataandMetaDataFilenames( trainingsetfolder, 0.8, 1, cfg, ) datafile = datafile.split(".mat")[0] + ".pickle" with open(os.path.join(cfg["project_path"], datafile), "rb") as f: pickledata = pickle.load(f) num_images = len(pickledata)
) df = pd.DataFrame(fake_data, index=index, columns=columns) output_path = os.path.join(image_folder, f"CollectedData_{SCORER}.csv") df.to_csv(output_path) df.to_hdf( output_path.replace("csv", "h5"), "df_with_missing", format="table", mode="w" ) print("Artificial data created.") print("Checking labels...") deeplabcut.check_labels(config_path, draw_skeleton=False) print("Labels checked.") print("Creating train dataset...") deeplabcut.create_multianimaltraining_dataset( config_path, net_type=NET, crop_size=(200, 200) ) print("Train dataset created.") # Check the training image paths are correctly stored as arrays of strings trainingsetfolder = auxiliaryfunctions.GetTrainingSetFolder(cfg) datafile, _ = auxiliaryfunctions.GetDataandMetaDataFilenames( trainingsetfolder, 0.8, 1, cfg, ) datafile = datafile.split(".mat")[0] + ".pickle" with open(os.path.join(cfg["project_path"], datafile), "rb") as f: pickledata = pickle.load(f) num_images = len(pickledata) assert all(len(pickledata[i]["joints"]) == 3 for i in range(num_images)) print("Editing pose config...")
df.to_csv(output_path) df.to_hdf(output_path.replace("csv", "h5"), "df_with_missing", format="table", mode="w") print("Artificial data created.") print("Cropping and exchanging") deeplabcut.cropimagesandlabels(config_path, userfeedback=False) print("Checking labels...") deeplabcut.check_labels(config_path, draw_skeleton=False) print("Labels checked.") print("Creating train dataset...") deeplabcut.create_multianimaltraining_dataset(config_path, net_type=NET) print("Train dataset created.") print("Editing pose config...") model_folder = auxiliaryfunctions.GetModelFolder(TRAIN_SIZE, 1, cfg, cfg["project_path"]) pose_config_path = os.path.join(model_folder, "train/pose_cfg.yaml") edits = { "global_scale": 0.5, "batch_size": 1, "save_iters": N_ITER, "display_iters": N_ITER // 2, # "multi_step": [[0.001, N_ITER]], } deeplabcut.auxiliaryfunctions.edit_config(pose_config_path, edits) print("Pose config edited.")