def save_results_as_csv(self): FileOperations.save_file_with_fallback( save_method_or_function=self._write_results_to_csv_file, filename=FileOperations.apply_extension_to_filename( original_filename=self.save_file_name, file_extension='.csv'), fallback_filename=FileOperations.apply_extension_to_filename( original_filename=DefaultFilenames.SAVE_FALLBACK, file_extension='.csv'))
def save(self, filename=DefaultFilenames.MODEL_FILENAME): filename_with_extension = FileOperations.apply_extension_to_filename( original_filename=filename, file_extension=self.file_extension ) FileOperations.save_file_with_fallback( save_method_or_function=self._dump_model_to_file_with_pickle, filename=filename_with_extension, fallback_filename=FileOperations.apply_extension_to_filename( original_filename=DefaultFilenames.MODEL_FALLBACK, file_extension=self.file_extension ) )
def save_results_boxplot(self, results_data_category, filename): PlotOperations.initialize_figure() self._add_default_boxplot_graph_settings() results_data = self._determine_results_data(results_data_category) labels = self._determine_plot_labels(results_data_category) filename_with_extension = FileOperations.apply_extension_to_filename( original_filename=filename, file_extension='.png') self._queue_boxplots(results_data, labels) FileOperations.save_file_with_fallback( save_method_or_function=PlotOperations.save_plots, filename=filename_with_extension, fallback_filename=FileOperations.apply_extension_to_filename( original_filename=DefaultFilenames.PLOT_FALLBACK, file_extension='.png'))
def convert(self, file_name): temp_dir = './temp_Dir' file_operations = FileOperations() file_operations.unzip_csar(file_name, temp_dir) entry_file_name = file_operations.read_tosca_meta_file(temp_dir) node_list, node_keys = self.get_nodes(entry_file_name, temp_dir) yaml, content = file_operations.read_file(Path(temp_dir, entry_file_name)) updated_content = self.operation_connectTo(content, node_keys, node_list) updated_content = self.operaton_encryption(updated_content,node_keys) file_operations.write_file(yaml, updated_content, (Path(temp_dir, entry_file_name))) modified = file_operations.zip_csar(file_name, temp_dir) return modified
def save_plots(filename): if(PlotOperations.figure is None): PlotOperations.initialize_figure() filename_with_extension = FileOperations.apply_extension_to_filename( original_filename=filename, file_extension='.png' ) PlotOperations._plot_queued_plots() plt.savefig(filename_with_extension, bbox_inches='tight') plt.close(PlotOperations.figure) PlotOperations.figure = None PlotOperations.clear_queued_plots()
def __init__( self, evaluation_number, model_number ): self.evaluation_number = evaluation_number self.model_number = model_number self.results = None self.results_filename = FileOperations.apply_extension_to_filename( original_filename=DefaultFilenames.evaluation_results_filename( evaluation_number=evaluation_number ), file_extension='.csv' )
datafolder = foldername + "data/" cwd = os.getcwd() newpath = cwd + "/" + datafolder if not os.path.exists(newpath): os.makedirs(newpath) imagefile = datafolder + filename + '.png' textfile = open(datafolder + filename + '.txt', 'a') file_operations.save_to_folder(textfile, imagefile, bounding_box, final) # todo get classes through GUI classes = [] #interface = CLI() file_operations = FileOperations() motor = MotorControl() camera = ImageCapture(RPI_CAMERA) image_processor = ImageProcessing(camera.capture()) delay = 1 / 1000.0 #images = input("How many images do you want per category (5 categories)?") images = 10000 STEPS_FOR_FULL_CIRCLE = 12360 steps = int(STEPS_FOR_FULL_CIRCLE / images) classes = ["Banana"] #, "Rolle"] only_snippets = False only_train_images = True ## Section for the configuration
import argparse import sys from file_operations import FileOperations parser = argparse.ArgumentParser( description="Trim whitespace from all lines in the input file") parser.add_argument("file_path", help="Full path to input file") if __name__ == "__main__": args = parser.parse_args() if len(sys.argv) < 1: parser.print_help() exit(-1) file_operations = FileOperations(args.file_path) file_operations.execute()
def setUp(self): self.data_pipeline_parser = ModifyConnectionType() self.file_operations = FileOperations() self.data_encryption = DataEncryption() self.verify_connection_type = ValidateConnectionType() self.temp_dir = './temp_Dir'
class FileParser(unittest.TestCase): def setUp(self): self.data_pipeline_parser = ModifyConnectionType() self.file_operations = FileOperations() self.data_encryption = DataEncryption() self.verify_connection_type = ValidateConnectionType() self.temp_dir = './temp_Dir' def test_encryption_decryption_password_mismatch( self, file_name='DPP_Testing.csar'): self.file_operations.unzip_csar(file_name, self.temp_dir) entry_file_name = self.file_operations.read_tosca_meta_file( self.temp_dir) node_list, node_keys = self.data_pipeline_parser.get_nodes( entry_file_name, self.temp_dir) yaml, content = self.file_operations.read_file( Path(self.temp_dir, entry_file_name)) encrypt_decrypt_nodes = self.data_encryption.get_encrypt_decrypt_nodes( content, node_keys) shutil.rmtree(self.temp_dir) self.assertTrue(len(encrypt_decrypt_nodes) > 0) def test_duplicate_connections(self, file_name='DPP_Testing.csar'): self.file_operations.unzip_csar(file_name, self.temp_dir) entry_file_name = self.file_operations.read_tosca_meta_file( self.temp_dir) node_list, node_keys = self.data_pipeline_parser.get_nodes( entry_file_name, self.temp_dir) yaml, content = self.file_operations.read_file( Path(self.temp_dir, entry_file_name)) self.verify_connection_type.get_host_connection_nodes( content, node_list) self.verify_connection_type.get_nodelist_to_edit(node_list) updated_content, duplicate_node_relationships = self.verify_connection_type.delete_duplicate_connection_node( content) shutil.rmtree(self.temp_dir) self.assertTrue(len(duplicate_node_relationships) > 0) def test_wrong_connections(self, file_name='DPP_Testing.csar'): self.file_operations.unzip_csar(file_name, self.temp_dir) entry_file_name = self.file_operations.read_tosca_meta_file( self.temp_dir) node_list, node_keys = self.data_pipeline_parser.get_nodes( entry_file_name, self.temp_dir) yaml, content = self.file_operations.read_file( Path(self.temp_dir, entry_file_name)) self.verify_connection_type.get_host_connection_nodes( content, node_list) self.verify_connection_type.get_nodelist_to_edit(node_list) updated_content, duplicate_node_relationships = self.verify_connection_type.delete_duplicate_connection_node( content) updated_content = self.verify_connection_type.make_changes_remote_nodes( updated_content) updated_content = self.verify_connection_type.make_changes_local_nodes( updated_content) shutil.rmtree(self.temp_dir) self.assertTrue( (len(self.verify_connection_type.remote_nodes_to_change) + len(self.verify_connection_type.local_nodes_to_change)) > 0)