def main(): # check config have to be done first check_config() if args.clean: deleteFolder(root_output_dir) load_core_config() if args.list != "sketch": find_board() if args.list == "board": for board in board_fqbn: print(board) print(f"{len(board_fqbn)} board(s) available") quit() manage_inos() if args.list == "sketch": for sketch in sketch_list: print(sketch) print(f"{len(sketch_list)} sketches found") quit() if core_config: parse_core_config() createFolder(build_output_dir) createFolder(output_dir) build_all() deleteFolder(build_output_dir) sys.exit(nb_build_failed)
def checkConfig(): global repo_local_path global hal_dest_path global cmsis_dest_path global system_dest_path global md_HAL_path global md_CMSIS_path global stm32_def config_file_path = script_path / path_config_filename if config_file_path.is_file(): try: config_file = open(config_file_path, "r") path_config = json.load(config_file) # Common path repo_local_path = Path(path_config["REPO_LOCAL_PATH"]) config_file.close() hal_dest_path = repo_local_path / repo_core_name / hal_dest_path md_HAL_path = hal_dest_path / md_HAL_path cmsis_dest_path = repo_local_path / repo_core_name / cmsis_dest_path system_dest_path = repo_local_path / repo_core_name / system_dest_path md_CMSIS_path = cmsis_dest_path / md_CMSIS_path stm32_def = (repo_local_path / repo_core_name / "cores" / "arduino" / "stm32" / stm32_def) except IOError: print(f"Failed to open {config_file}!") else: create_config(config_file_path) createFolder(repo_local_path)
def knnCall(exp_config): """ Evaluate the representation using knn and compute knn-mse on a set of images. :param exp_config: (dict) """ folder_path = '{}/NearestNeighbors/'.format(exp_config['log-folder']) createFolder(folder_path, "NearestNeighbors folder already exist") printGreen("\nEvaluating the state representation with KNN") args = ['--seed', str(exp_config['knn-seed']), '--n-samples', str(exp_config['knn-samples'])] if exp_config.get('ground-truth', False): args.extend(['--ground-truth']) if exp_config.get('multi-view', False): args.extend(['--multi-view']) if exp_config.get('relative-pos', False): args.extend(['--relative-pos']) for arg in ['log-folder', 'n-neighbors', 'n-to-plot']: args.extend(['--{}'.format(arg), str(exp_config[arg])]) ok = subprocess.call(['python', '-m', 'evaluation.knn_images'] + args) printConfigOnError(ok, exp_config, "knnCall")
def createSystemFiles(serie): print(f"Creating system files for {serie}...") system_serie = system_dest_path / f"STM32{serie}xx" createFolder(system_serie) # Generate stm32yyxx_hal_conf_file.h stm32_hal_conf_file = system_serie / stm32yyxx_hal_conf_file.replace( "yy", serie.lower()) out_file = open(stm32_hal_conf_file, "w", newline="\n") out_file.write(stm32yyxx_hal_conf_file_template.render(serie=serie)) out_file.close() # Copy system_stm32*.c file from CMSIS device template system_stm32_path = cmsis_dest_path / f"STM32{serie}xx" / "Source" / "Templates" filelist = sorted(system_stm32_path.glob("system_stm32*.c")) file_number = len(filelist) if file_number: if file_number == 1: file_number = 0 else: menu_list = "Several system stm32 files exist:\n" for index, fp in enumerate(filelist): menu_list += f"{index}. {fp.name}\n" menu_list += "Your choice: " while file_number >= len(filelist): file_number = int(input(menu_list)) copyFile(filelist[file_number], system_serie) else: print("No system files found!") # Copy stm32yyxx_hal_conf_default.h file hal_conf_base = f"stm32{serie.lower()}xx_hal_conf" hal_serie_path = hal_dest_path / f"STM32{serie}xx_HAL_Driver" hal_conf_file = hal_serie_path / "Inc" / f"{hal_conf_base}_template.h" hal_conf_default = system_serie / f"{hal_conf_base}_default.h" copyFile(hal_conf_file, hal_conf_default)
def createGroundTruthFolder(exp_config): """ Create folder and save exp_config in order to compute knn-mse :param exp_config: (dict) :return: (dict) """ log_folder = "logs/{}/baselines/ground_truth/".format(exp_config['data-folder']) createFolder(log_folder, "") exp_config['log-folder'] = log_folder exp_config['ground-truth'] = True saveConfig(exp_config) return exp_config
def main(): db = MySQLdb.connect(host=credentials.get("host"), user=credentials.get("user"), passwd=credentials.get("passwd"), db=credentials.get("db")) cursor = db.cursor() # Create directory utils.createFolder(filename.get("folderout")) initialDate = datetime.datetime.now() genDDLTables(cursor) genDDLProcedures(cursor) genDDLFunctions(cursor) genStatementsInserts(cursor) print "[FINISH]", (datetime.datetime.now() - initialDate), "total time."
def saveFile(frame_origin, frame, left, top, right, bottom): folder_name = utils.getDateStr() file_name = utils.getDateTime() out_folder = os.path.join(folder_out, folder_name) utils.createFolder(out_folder) #check if not exist then creategit origin_file = os.path.join(out_folder, "{}.jpeg".format(file_name)) cv.imwrite(origin_file, frame_origin) out_file = os.path.join(out_folder, "{}_detect.jpeg".format(file_name)) cv.imwrite(out_file, frame) crop = frame_origin[top:bottom, left:right] crop_file = os.path.join(out_folder, "{}_crop.jpeg".format(file_name)) cv.imwrite(crop_file, crop) print("Saved: " + origin_file) lpr_ai4thai(origin_file, out_file, crop_file)
def getLogFolderName(exp_config): """ Create experiment name using experiment config and current time. It also try to create the experiment folder. It returns both full path to the log folder and experiment_name :param exp_config: (dict) :return: (str, str) """ date = datetime.datetime.now().strftime("%y-%m-%d_%Hh%M_%S") model_str = "_{}_".format(exp_config['model-type']) srl_str = "ST_DIM{}".format(exp_config['state-dim']) losses = exp_config["losses"] if losses is not str(): losses = "_".join(losses) experiment_name = "{}{}{}_{}".format(date, model_str, srl_str, losses) printBlue("\nExperiment: {}\n".format(experiment_name)) log_folder = "logs/{}/{}".format(exp_config['data-folder'], experiment_name) createFolder(log_folder, "Experiment folder already exist") return log_folder, experiment_name
def create_output_log_tree(): # Log output file with open(log_file, "w") as file: file.write(f"{build_separator}\nStarts ") file.write(time.strftime("%A %d %B %Y %H:%M:%S ")) file.write(f"\nLog will be available here:\n{output_dir.resolve()}\n") file.write(f"{build_separator}\n") # Folders for board in board_fqbn: createFolder(output_dir / board / bin_dir) createFolder(output_dir / board) createFolder(build_output_dir / board)
parser.add_argument('--training-set-size', type=int, default=-1, help='Limit size of the training set (default: -1)') parser.add_argument('--state-dim', type=int, default=3, help='State dimension') input = getInputBuiltin() args = parser.parse_args() DISPLAY_PLOTS = not args.no_display_plots plot_script.INTERACTIVE_PLOT = DISPLAY_PLOTS args.data_folder = parseDataFolder(args.data_folder) args.method = "pca" log_folder = "logs/{}/baselines/{}".format(args.data_folder, getModelName(args)) createFolder(log_folder, "{} folder already exist".format(args.method)) folder_path = '{}/NearestNeighbors/'.format(log_folder) createFolder(folder_path, "NearestNeighbors folder already exist") saveExpConfig(args, log_folder) print('Log folder: {}'.format(log_folder)) print('Loading data ... ') rewards = np.load("data/{}/preprocessed_data.npz".format( args.data_folder))['rewards'] images_path = np.load("data/{}/ground_truth.npz".format( args.data_folder))['images_path'] if args.training_set_size > 0: limit = args.training_set_size images_path = images_path[:limit]
# Else, there is only one dimension n_actions = 1 # Try to convert old python 2 format try: images_path = np.array( [path.decode("utf-8") for path in ground_truth['images_path']]) except AttributeError: images_path = ground_truth['images_path'] # Building the experiment config file exp_config = buildConfig(args) if args.log_folder == "": # Automatically create dated log folder for configs createFolder("logs/{}".format(exp_config['data-folder']), "Dataset log folder already exist") # Check that the dataset is already preprocessed log_folder, experiment_name = getLogFolderName(exp_config) args.log_folder = log_folder else: experiment_name = "{}_{}".format(args.model_type, losses) exp_config['log-folder'] = args.log_folder exp_config['experiment-name'] = experiment_name exp_config['n_actions'] = n_actions exp_config['continuous_action'] = args.continuous_action exp_config['multi-view'] = args.multi_view if "dae" in losses: exp_config['occlusion-percentage'] = args.occlusion_percentage print('Log folder: {}'.format(args.log_folder))
# -*- coding: utf-8 -*- import utils import os from shutil import copyfile #%% train test split tree_dir='dataset/pix2code/dsl' tree_files = [f for f in os.listdir(tree_dir) if os.path.isfile(os.path.join(tree_dir, f))] img_dir='dataset/pix2code/png' img_files = [f for f in os.listdir(img_dir) if os.path.isfile(os.path.join(img_dir, f))] train_test_split_rate = 0.8 utils.createFolder(os.path.join('bin', 'tree_train')) for file in tree_files[:int(len(tree_files)*0.8)]: copyfile(os.path.join(tree_dir, file), os.path.join('bin', 'tree_train', file)) utils.createFolder(os.path.join('bin', 'tree_eval')) for file in tree_files[int(len(tree_files)*0.8):]: copyfile(os.path.join(tree_dir, file), os.path.join('bin', 'tree_eval', file)) utils.createFolder(os.path.join('bin', 'img_train')) for file in img_files[:int(len(img_files)*0.8)]: copyfile(os.path.join(img_dir, file), os.path.join('bin', 'img_train', file)) utils.createFolder(os.path.join('bin', 'img_eval')) for file in img_files[int(len(tree_files)*0.8):]: copyfile(os.path.join(img_dir, file), os.path.join('bin', 'img_eval', file))
parser.add_argument('--log-folder', type=str, default='', help='Override the default log-folder') args = parser.parse_args() args.cuda = not args.no_cuda and th.cuda.is_available() DISPLAY_PLOTS = not args.no_display_plots plot_script.INTERACTIVE_PLOT = DISPLAY_PLOTS N_EPOCHS = args.epochs BATCH_SIZE = args.batch_size args.data_folder = parseDataFolder(args.data_folder) log_folder = args.log_folder if log_folder == '': name = getModelName(args) log_folder = "logs/{}/baselines/{}".format(args.data_folder, name) createFolder(log_folder, "supervised folder already exist") folder_path = '{}/NearestNeighbors/'.format(log_folder) createFolder(folder_path, "NearestNeighbors folder already exist") print('Log folder: {}'.format(log_folder)) print('Loading data ... ') training_data, ground_truth, true_states, _ = loadData(args.data_folder) rewards, episode_starts = training_data['rewards'], training_data['episode_starts'] images_path = ground_truth['images_path'] state_dim = true_states.shape[1] if args.training_set_size > 0: limit = args.training_set_size
def wrap(arg_core, arg_cmsis, log): global stm32_series # check config have to be done first checkConfig(arg_core, arg_cmsis) stm32_series = genSTM32List(HALDrivers_path, "") # Remove old file deleteFolder(HALoutSrc_path) createFolder(HALoutSrc_path) deleteFolder(LLoutSrc_path) createFolder(LLoutSrc_path) deleteFolder(LLoutInc_path) createFolder(LLoutInc_path) if CMSIS_Startupfile.is_file(): CMSIS_Startupfile.unlink() all_ll_h_list = [] # key: peripheral, value: serie list ll_h_dict = {} ll_c_dict = {} hal_c_dict = {} # Search all files for each series for serie in stm32_series: src = HALDrivers_path / f"STM32{serie}xx_HAL_Driver" / "Src" inc = HALDrivers_path / f"STM32{serie}xx_HAL_Driver" / "Inc" if src.exists(): if log: print(f"Generating for {serie}...") lower = serie.lower() # Search stm32yyxx_[hal|ll]*.c file filelist = src.glob(f"**/stm32{lower}xx_*.c") for fp in filelist: legacy = True if fp.parent.name == "Legacy" else False # File name fn = fp.name found = peripheral_c_regex.match(fn) if "_template" in fn: continue peripheral = found.group(1) if found else "hal" if "_ll_" in fn: if peripheral in ll_c_dict: if legacy: # Change legacy value if exists current_list = ll_c_dict.pop(peripheral) if current_list[-1][0] == lower: current_list.pop() current_list.append((lower, legacy)) ll_c_dict[peripheral] = current_list else: ll_c_dict[peripheral].append((lower, legacy)) else: ll_c_dict[peripheral] = [(lower, legacy)] else: if peripheral in hal_c_dict: if legacy: # Change legacy value if exists current_list = hal_c_dict.pop(peripheral) if current_list[-1][0] == lower: current_list.pop() current_list.append((lower, legacy)) hal_c_dict[peripheral] = current_list else: hal_c_dict[peripheral].append((lower, legacy)) else: hal_c_dict[peripheral] = [(lower, legacy)] # Search stm32yyxx_ll_*.h file filelist = inc.glob(f"stm32{lower}xx_ll_*.h") for fp in filelist: # File name fn = fp.name found = peripheral_h_regex.match(fn) if not found: continue peripheral = found.group(1) # Amend all LL header list all_ll_h_list.append(fn.replace(lower, "yy")) if peripheral in ll_h_dict: ll_h_dict[peripheral].append(lower) else: ll_h_dict[peripheral] = [lower] # Generate stm32yyxx_hal_*.c file for key, value in hal_c_dict.items(): if key == "hal": filepath = HALoutSrc_path / c_file.replace("zz", "hal").replace( "_ppp", "") else: filepath = HALoutSrc_path / c_file.replace("zz", "hal").replace( "ppp", key) out_file = open(filepath, "w", newline="\n") out_file.write( c_file_template.render(periph=key, type="hal", serieslist=value)) out_file.close() # Generate stm32yyxx_ll_*.c file for key, value in ll_c_dict.items(): filepath = LLoutSrc_path / c_file.replace("zz", "ll").replace( "ppp", key) out_file = open(filepath, "w", newline="\n") out_file.write( c_file_template.render(periph=key, type="ll", serieslist=value)) out_file.close() # Generate stm32yyxx_ll_*.h file for key, value in ll_h_dict.items(): filepath = LLoutInc_path / ll_h_file.replace("ppp", key) out_file = open(filepath, "w", newline="\n") out_file.write(ll_h_file_template.render(periph=key, serieslist=value)) out_file.close() if log: print("done") # Filter all LL header file all_ll_h_list = sorted(set(all_ll_h_list)) # Generate the all LL header file all_ll_file = open(LLoutInc_path / all_ll_h_file, "w", newline="\n") all_ll_file.write( all_ll_header_file_template.render(ll_header_list=all_ll_h_list)) all_ll_file.close() # CMSIS startup files printCMSISStartup(log) # system stm32 files printSystemSTM32(log) # CMSIS DSP C source file if not CMSIS_path.is_dir(): print(f"Could not find {CMSIS_path}") print("CMSIS DSP generation skipped.") else: # Delete all subfolders deleteFolder(CMSIS_DSP_outSrc_path / "*") dirlist = [] for path_object in CMSIS_DSPSrc_path.glob("**/*"): if path_object.is_file(): if path_object.name.endswith(".c"): dirlist.append(path_object.parent.name) dirlist = sorted(set(dirlist)) for dn in dirlist: fdn = CMSIS_DSP_outSrc_path / dn if not fdn.is_dir(): createFolder(fdn) out_file = open(fdn / (f"{dn}.c"), "w", newline="\n") all_ll_file.write(dsp_file_template.render(dsp_path=dn)) out_file.close() return 0