def _storeArticle(article): """ _safeArticle(Dict) -> Bool private help method to safe an aticle param article:Dict - """ # try: #make a path according to the article's topics path = re_sub('http://www.spiegel.de/','', article['link']).split('/') filename = path.pop(-1) storePath = os_path_join(BASE_PATH,os_path_join(*path)) #create directories if not os_path_exists(storePath): os_makedirs(storePath) #write article as json to the file with open(os_path_join(storePath, filename),'w') as o: json.dump(article, o) #write the article name to the log if os_path_isfile(BASE_PATH + 'article_log'): log = open(BASE_PATH + 'article_log','a') else: log = open(BASE_PATH + 'article_log','w') log.write(article['link'] + '\n') log.close() return True
def __init__(self, scenario_uri, time_between_set): """ Creator :type scenario_uri: string :param scenario_uri: location of the scenario. It can be a local file or an URI """ self._scenario_uri = scenario_uri self._fixed_time_between_set = DEFAULT_TIME_BETWEEN_SET self._scenario_values = [] self._num_of_aps = 0 self._ap_list = None self._attenuators_insts = [] self._line_number = 1 # starts by 1 because in RSSI file scenario starts from line 1 self._save_line = [] self._save_file = [] abs_path = os_path.abspath(os_path.dirname(__file__)) log_path = os_path.join(abs_path, "log_files") self._abs_log_name = os_path.join( log_path, 'HOST_UC_NAV3_' + str(datetime.now().strftime('%Y_%m_%d___%H_%M_%S')) + '.txt') # If logs folder doesn't exist, create it. if not os_path.exists(log_path): os_makedirs(log_path) self._abs_script_hostapd = os_path.join(abs_path, "Scripts_HOSTAP", 'edit_hostapd.sh') self._log = LogicDataDebug(None, self._abs_log_name)
def __init__(self, dataset_dir: str): super().__init__(dataset_dir) decoded_csv_dir = os_path_join(self.original_format_dir, 'decoded') os_makedirs(decoded_csv_dir, exist_ok=True) self.mask_handler = AirBus_Mask_Handler()
def session_file_start(self, duration_str): """ creates subdir for today if not exists start temporary log file for all events in the session """ if self.date_today != datetime.now().strftime("%Y-%m-%d"): self.subdir_month = datetime.now().strftime("%Y-%m") self.date_today = datetime.now().strftime("%Y-%m-%d") self.subdir = path.join('.' + path.sep + 'stats', self.subdir_month) self.file_path_stats_today = path.join('.' + path.sep + 'stats', self.subdir_month, self.date_today + '.csv') if not path.exists(self.subdir): try: os_makedirs(self.subdir) except Exception: print("Can't create stats folder ") logfile = path.join('.' + path.sep + 'stats', 'last_session_events.csv') with open(logfile, 'w', newline='') as myFile: writer = csv.writer(myFile, delimiter=',') writer.writerow([ datetime.now().strftime('%Y-%m-%d %H:%M:%S'), duration_str, "Session started" ])
def __to_save_img_seq(self, video_capture: VideoCapture, video_name: str): # Frame counter i = 0 # To be 100% sure that we get the filename without the extension video_name = os_path_splitext(video_name)[0] image_sequence_dir = os_path_join(self.im_seqs_dir, video_name) while video_capture.isOpened(): ret, frame = video_capture.read() if ret: os_makedirs(image_sequence_dir, exist_ok=True) if video_name is None: raise Exception( 'VideoHandlerError: if video_to_image_sequence receives a "save_sequences=True" then "video_name" must also receive a value. ' ) frame_name = '{0:06}'.format(i + 1) + '_' + video_name + '.jpg' # Save image cv2_imwrite(os_path_join(image_sequence_dir, frame_name), frame) # wait 1ms and make another check before 'breaking' if cv2_waitKey(1) & 0xFF == ord('q'): break i += 1 else: break print('Total frames of sequence read:', i - 1)
def entrypoint(args=sys_argv[1:]): parser = build_parser() params = parser.parse_args(args) newpath = os_path.join(params.outdir) if not os_path.exists(newpath): os_makedirs(newpath) if params.smarts: rxnInput = ['-smarts', params.rxn] elif params.smartsfile: rxnInput = ['-smartsfile', params.rxn] else: rxnInput = ['-rxn', params.rxn] pc = preLoad(params.datadir) analyse(rxnInput, params.tar, params.datadir, params.outdir + '/', params.outfile, params.d, params.host, NoMSA=params.NoMSA, pc=pc)
def test_config_load(tmp_path): # # Import the module and functions relevant to this particular set of tests from encommon.config import config_load from encommon.readwrite import writefile from os import makedirs as os_makedirs from os import path as os_path from yaml import dump as yaml_dump # # Create the expected directory structure for the configuration validation os_makedirs(os_path.join(tmp_path, "folder")) # # Initial section for instantizing variables expected by remaining routine expect = {"base": {"k": "v"}, "folder": {"file": {"subset": {"k": "v"}}}} # # Write the initial content to the various files using temporary directory file_path = os_path.join(tmp_path, "base.yml") writefile(file_path, yaml_dump({"k": "v"}), truncate=True) file_path = os_path.join(tmp_path, "folder", "file.subset.yml") writefile(file_path, yaml_dump({"k": "v"}), truncate=True) # # Load and parse the YAML configuration enumerating additional directories config = config_load(tmp_path) # # Assert the relevant conditions indicating either test success or failure assert config["base"]["k"] == "v" assert config["folder"]["file"]["subset"]["k"] == "v"
def _make_dir(filename): dirname = os_path.dirname(filename) if dirname != '' and not os_path.exists(dirname): try: os_makedirs(dirname) except OSError as exc: # Guard against race condition if exc.errno != errno_EEXIST: raise
def makedir(self): """creates the directory and data file and log file""" if not os_path_exists(self.dir_path): os_makedirs(self.dir_path) log_info("Made directory at: {0}".format(self.dir_path)) if not os_path_exists(self.file_path): self.save_file = self._default_save_file() #self.create_file() make_log_file(self.log_path, mode="w")
def makedir(self): """creates the directory and data file and log file""" if not os_path_exists(self.dir_path): os_makedirs(self.dir_path) log_info("Made directory at: {0}".format(self.dir_path)) if not os_path_exists(self.file_path): self.save_file=self._default_save_file() #self.create_file() make_log_file(self.log_path, mode="w")
def make(path): ret = True; try: os_makedirs(path); except: ret = False; #endtry return ret;
def __init__(self, tmpbase: str): b: pathlib.Path = pathlib.Path(tmpbase) for k in [ 'repodir', 'repodir_s', 'repodir_updater', 'repodir_updater_chk', 'reexecdir', 'updaterdir' ]: setattr(self, k, b / k) os_makedirs(getattr(self, k))
def processFilesContent(self): """ This function processes all the files and returns a checkboxes list to show to the user @returns list of matched Metabolites """ if not os_path.exists(self.getTemporalDir() ): os_makedirs(self.getTemporalDir() ) omicSummary = None logging.info("CREATING THE TEMPORAL CACHE FOR JOB " + self.getJobID() + "..." ) KeggInformationManager().createTranslationCache(self.getJobID()) try: logging.info("PROCESSING GENE BASED FILES..." ) for inputOmic in self.geneBasedInputOmics: [omicName, omicSummary] = self.parseGeneBasedFiles(inputOmic) logging.info(" * PROCESSED " + omicName + "..." ) inputOmic["omicSummary"] = omicSummary logging.info("PROCESSING GENE BASED FILES...DONE" ) logging.info("PROCESSING COMPOUND BASED FILES..." ) checkBoxesData=[] for inputOmic in self.compoundBasedInputOmics: [omicName, checkBoxesData, omicSummary] = self.parseCompoundBasedFile(inputOmic, checkBoxesData) logging.info(" * PROCESSED " + omicName + "..." ) inputOmic["omicSummary"] = omicSummary #REMOVE REPETITIONS AND ORDER ALPHABETICALLY # checkBoxesData = unifyAndSort(checkBoxesData, lambda checkBoxData: checkBoxData["title"].lower()) checkBoxesData = unifyAndSort(checkBoxesData, lambda checkBoxData: checkBoxData.getTitle().lower()) logging.info("PROCESSING COMPOUND BASED FILES...DONE" ) #GENERATE THE COMPRESSED FILE WITH MATCHING, COPY THE FILE AT RESULTS DIR AND CLEAN TEMPORAL FILES #COMPRESS THE RESULTING FILES AND CLEAN TEMPORAL DATA #TODO: MOVE THIS CODE TO JOBINFORMATIONMANAGER logging.info("COMPRESSING RESULTS...") fileName = "mapping_results_" + self.getJobID() logging.info("OUTPUT FILES IS " + self.getOutputDir() + fileName) logging.info("TEMPORAL DIR IS " + self.getTemporalDir() + "/") self.compressDirectory(self.getOutputDir() + fileName, "zip", self.getTemporalDir() + "/") logging.info("COMPRESSING RESULTS...DONE") # Save the metabolites matching data to allow recovering the job self.foundCompounds = checkBoxesData return checkBoxesData except Exception as ex: raise ex finally: logging.info("REMOVING THE TEMPORAL CACHE FOR JOB " + self.getJobID() + "..." ) KeggInformationManager().clearTranslationCache(self.getJobID())
def make_dir(dir_path: str) -> None: """ make the directories recursively. \n :param dir_path: The path to that directory. :return: True on success. | False on failure. """ try: os_makedirs(dir_path, exist_ok=True) except FileExistsError: Logger.log([make_dir, f"{dir_path} exists. "])
def create_parents_dirs(file_path): '''Create all parents directories from provided file path (mkdir -p $file_path).''' try: parentdirpath = os_path.dirname(file_path) if not os_path.exists(parentdirpath): os_makedirs(parentdirpath, 0o775) except Exception as e: print_log( LOG.ERROR, "Can't create parents directories of {}. {}".format( file_path, str(e)))
def _loosedb_raw_object_write(loosedb, presumedhex: shahex, objloose: bytes): # assert not loosedb.has_object(_hex2bin(presumedhex)) objpath = loosedb.db_path(loosedb.object_path(presumedhex)) # assert not os_path_exists(objpath) os_makedirs(os_path_dirname(objpath), exist_ok=True) with _file_open_mkdirp(objpath) as f: f.write(objloose) # FIXME: #loosedb.update_cache(force=True) assert loosedb.has_object(_hex2bin(presumedhex))
def folder(directory): """ Input: Directory to make folder Output: Folder created if already doesn't exist """ try: if not os_path.exists(directory): os_makedirs(directory) except OSError: print "Error Creating " + directory return 0
def __init__(self, config_dct): super(ICJudge, self).__init__(config_dct) self._ic_lag = self._config_dct["ic_lag"] self._tdf = pd_DataFrame( data=None, columns=["ticker", "y_predict", "y_test", "date", "score"]) ICJudge.TEST_COUNT += 1 self._this_path = p_join(ICJudge.REPORT_PATH, str(ICJudge.TEST_COUNT)) if not os_exists(self._this_path): os_makedirs(self._this_path)
def work(self): if os_path_exists(self.profilesDir): shutil_rmtree(self.profilesDir) os_makedirs(self.profilesDir) stuInfos = self.getStuInfo(self.infoPath) dates, stuDatas = self.getDatasFromDing(self.excelsDir, stuInfos) self.generateEveryoneProfile(self.profilesDir, stuInfos, dates, stuDatas) self._finished.emit()
def ensure_directory(path): if os_path.isdir(path): return try: os_makedirs(path) return except OSError as e: if e.errno == errno.EEXIST: return msg = f"directory {path} not found and failed to create." raise BlueShiftPathException(msg=msg)
def move_files_to_folder(*args, **kwargs): # Maximum backup allowed by user BACKUP_COUNT = bpy.context.user_preferences.filepaths.save_version # If saving backups option is 'ON' if BACKUP_COUNT: # Function level constants PATH = bpy.data.filepath # Full path FILE = bpy.path.display_name_from_filepath(PATH) # File name CWD = os_path_dirname(PATH) # Current Working Directory CBD = os_path_join(CWD, BACKUP_FOLDER_NAME) # Current Backup Directory REXT = r"{}\.blend(\d+)$".format(FILE) # Regex to catch backups EXT = "{}.blend{}" # Extension placeholder OLD = EXT.format(FILE, BACKUP_COUNT) # Oldest backup name # Create backup directory if not exists try: os_makedirs(CBD) except OSError as e: if e.errno != EEXIST: # If other error appears then "dir already exists" reraise # the caught error again and print out the traceback raise OSError("\n".join(traceback_extract_stack())) from None # Get all files in current directory, move them to the # backup folder, if they are backup files and maintain # the backup folder's instances for filename in reversed(sorted(os_listdir(CWD))): # If file is a backup file try: index = int(re_findall(REXT, filename)[-1]) # If file's index is greater than the # current number of backups allowed the full path # of the file will be returned and will be deleted # else os.remove will raise FileNotFoundError os_remove( increase_index_and_move( src_folder=CWD, dst_folder=CBD, file=FILE, extension=EXT, src_index=index, dst_index=index, max_index=BACKUP_COUNT, ) ) # If file is not a backup file except (IndexError, FileNotFoundError): pass # If everything went fine, print out information if PRINT_INFO: print(INFO_TEXT.format(CWD, CBD))
def save_obj(obj, name): """ This function save an object as a pickle. :param obj: object to save :param name: name of the pickle file. :return: - """ # if any directory on path doesn't exist - create it os_makedirs(os_path_dirname(name), exist_ok=True) with open(name + '.pkl', 'wb') as f: pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def transferListRcvd(self, res, filelist): remoteDirectory, _, _ = self.getRemoteFile() localDirectory = self["local"].getCurrentDirectory() remoteDir = os_path.split(remoteDirectory) remoteBasenameDir = os_path.basename(remoteDir[0]) targetDirectory = localDirectory + remoteBasenameDir + "/" if not os_path.exists(targetDirectory): os_makedirs(targetDirectory) localDirectory = targetDirectory self.queue = [(True, remoteDirectory + file["filename"], localDirectory + file["filename"], file["size"]) for file in filelist.files if file["filetype"] == "-"] self.nextQueue()
def __save_image_sequence(self, img_seq, video_name): # Step 1 - Create folder if it does not exist image_sequence_dir = os_path_join(self.im_seqs_dir, video_name) os_makedirs( image_sequence_dir, exist_ok=True ) # Step 2 - For every image: for i, img in enumerate(img_seq): # Step 2.1 - Save image in the directory as 'xxxxxx' + "video_name".jpg # Set image name frame_name = '{0:06}'.format(i + 1) + '_' + video_name + '.jpg' # Save image cv2_imwrite( os_path_join(image_sequence_dir, frame_name), img ) """
def atualizar_arquivos(self, versao): """ Pega os arquivos baixados de uma versão e sobrescreve os arquivos da versão atualmente em execução """ destino_upgrade = os_path.join(self.dest_download, 'safira-ide-{}/'.format(versao)) lista_arquivos = self.listar_arquivos(destino_upgrade, versao) for arquivo in lista_arquivos: arquivo = arquivo.strip('/') # Obter o diretório anterior ../ regx2 = r'(.{1,})(\/|\\).*$' destino_1 = re_search(regx2, str(self.dest_download)).group(1) destino_final = os_path.join(destino_1, arquivo) local_arquivo_enviar = os_path.join(destino_upgrade, arquivo) ultimo_diretorio_destino = re_search(regx2, destino_final).group(1) # Se o diretório não existe, crie-o if not os_path.exists(ultimo_diretorio_destino): print('[cria] ', ultimo_diretorio_destino) # Cria os diretórios e subdiretórios os_makedirs(ultimo_diretorio_destino) else: print('[exis] ', ultimo_diretorio_destino) try: print('[de ] ', local_arquivo_enviar) print('[para] ', destino_final) # Tenta copiar o arquivo para o destino shutil_copy(local_arquivo_enviar, destino_final) except Exception as erro: return [ False, "Erro ao copiar arquivo: " + erro + 'Arquivo' + local_arquivo_enviar + 'destino' + destino_final ] return [True, ""]
def main(): args = parse_args() with open(args.config) as f: configs = json_load(f) filenames = [] for n, config in enumerate(configs): random_seed(args.seed) fn_name = config.pop('_fn') os_makedirs(args.path, exist_ok=True) filename = '{:03}.{}.txt'.format(n, fn_name) filename = path_join(args.path, filename) with open(filename, 'w') as f: REGISTERED_FUNCTIONS[fn_name](f, **config) filenames.append(filename) LOG.debug('# %s', filename) return { 'outputs': filenames }
def test_findfiles(tmp_path): # # Import the module and functions relevant to this particular set of tests from encommon.readwrite import findfiles from encommon.readwrite import writefile from os import makedirs as os_makedirs from os import path as os_path # # Initial section for instantizing variables expected by remaining routine string = "String which will be used for seeding the findfiles function" expect = ["test.txt", "folder/test.txt"] # # Create an initial directory structure for parsing for various assertions os_makedirs(os_path.join(tmp_path, "folder", "another"), exist_ok=True) writefile(os_path.join(tmp_path, "test.txt"), string) writefile(os_path.join(tmp_path, "test.yml"), string) writefile(os_path.join(tmp_path, "folder", "test.txt"), string) writefile(os_path.join(tmp_path, "folder", "test.yml"), string) # # Assert the relevant conditions indicating either test success or failure assert sorted(findfiles(tmp_path, r"\S+\/test.txt$")) == sorted(expect) assert sorted(findfiles(tmp_path, "*.txt")) == sorted(expect)
def make_project_dir(self): new_dir = self.make_project_dir_url() os_makedirs(new_dir, exist_ok=True) os_makedirs(os_path_join(new_dir, 'Train'), exist_ok=True) os_makedirs(os_path_join(new_dir, 'Test'), exist_ok=True) return new_dir
def fazer_backup_versao(self): """Faz uma cópia dos arquivos em um diretório de backups""" lista_arquivos = self.listar_arquivos2('.') for arquivo_origem in lista_arquivos: arquivo_origem = arquivo_origem.strip('/') # Obter o diretório anterior ../ regx2 = r'(.{1,})(\/|\\).*$' destino_final_file = self.dest_backup destino_final_arquivo = os_path.join(destino_final_file, arquivo_origem) ultimo_diretorio_destino = re_search( regx2, destino_final_arquivo).group(1) # Se o diretório não existe, crie-o if not os_path.exists(ultimo_diretorio_destino): print('[cria] ', ultimo_diretorio_destino) # Cria os diretórios e subdiretórios os_makedirs(ultimo_diretorio_destino) else: print('[exis] ', ultimo_diretorio_destino) try: print('[de ] ', arquivo_origem) print('[para] ', destino_final_arquivo) # Tenta copiar o arquivo para o destino shutil_copy(arquivo_origem, destino_final_arquivo) except Exception as erro: return [ False, "Erro ao copiar arquivo: " + erro + 'Arquivo' + arquivo_origem + 'destino' + destino_final_arquivo ] return [True, ""]
def initializeDirectories(self): """ This function initialize the directories for the current Job instance @returns the Job instance """ if not os_path.exists(self.getTemporalDir()): os_makedirs(self.getTemporalDir()) if not os_path.exists(self.getInputDir()): os_makedirs(self.getInputDir()) if not os_path.exists(self.getOutputDir()): os_makedirs(self.getOutputDir()) return self
def _build(sources, outfolder, gentoc, toc, external_css): for pagename, source in sources.items(): # If there is no valid source if source is None: continue # Clear soup and add en empty, new body SOUP.body.decompose() new(SOUP.html, 'body') # Set title SOUP.html.head.title.string = pagename # Get essentail values filename, depends = toc[pagename] # Constants SECTIONS = 7 # Build basic structure column1 = new(SOUP.body, 'div', id='column1') column2 = new(SOUP.body, 'div', id='column2') generic = new(column1, 'div', id='generic') sidebar = new(column1, 'div', id='sidebar') content = new(column2, 'div', id='content') # OPTIONAL: custom header section try: _html_format(generic, source['HEAD'], outfolder) new(generic, 'br') except KeyError: SECTIONS -= 1 # OPTIONAL: custom menu in sidebar try: _html_format(sidebar, source['MENU'], outfolder) new(sidebar, 'br') except KeyError: SECTIONS -= 1 # OPTIONAL: custom abstract and introduction try: _html_format(content, source['INFO'], outfolder) new(content, 'br') except KeyError: SECTIONS -= 1 # OPTIONAL: index if gentoc: sidebar_type = new(sidebar, 'div') new(sidebar_type, 'p', class_='label', string='Modules:') new(sidebar, 'br') _indx_format(sidebar, pagename, toc) new(sidebar, 'br') # TODO: Implement a Schema validator for better user-feedback # TODO: add FOOT key # TODO: add EXEC to cdoc to add "interactive" python snippets to code # EXEC: | # with open('VERSION') as file: # # Insert to USER:About # DOC[USER][0].insert(0, {'name': 'Version', 'info': file.read()}) # OPTIONAL: text and code try: blocks = source['TEXT'] for block in blocks: # Get the first element of the list as the section name try: section = string_capwords(block[0]) except IndexError: continue sidebar_text = new(sidebar, 'div') new(sidebar_text, 'p', class_='label', string='{}:'.format(section)) new(sidebar_text, 'br') content_text = new(content, 'div') new(content_text, 'h2', class_='title', string=section) for user in block[1:]: _text_format(sidebar_text, content_text, user) new(sidebar, 'br') except KeyError: SECTIONS -= 1 # OPTIONAL: user defined try: userdefs = source['USER'] for userdef in userdefs: # Get the first element of the list as the section name try: section = string_capwords(userdef[0]) except IndexError: continue sidebar_user = new(sidebar, 'div') new(sidebar_user, 'p', class_='label', string='{}:'.format(section)) new(sidebar_user, 'br') content_user = new(content, 'div') new(content_user, 'h2', class_='title', string=section) for user in userdef[1:]: _user_format(sidebar_user, content_user, user) new(sidebar, 'br') except KeyError: SECTIONS -= 1 # OPTIONAL: type definitions try: types = source['TYPE'] sidebar_type = new(sidebar, 'div') new(sidebar_type, 'p', class_='label', string='Types:') new(sidebar_type, 'br') content_type = new(content, 'div') new(content_type, 'h2', class_='title', string='Types') for type in types: _type_format(sidebar_type, content_type, type) new(sidebar, 'br') except KeyError: SECTIONS -= 1 # OPTIONAL: function definitions try: funcs = source['FUNC'] sidebar_func = new(sidebar, 'div') new(sidebar_func, 'p', class_='label', string='Functions:') new(sidebar_func, 'br') content_func = new(content, 'div') new(content_func, 'h2', class_='title', string='Functions') for func in funcs: _func_format(sidebar_func, content_func, func) except KeyError: SECTIONS -= 1 # Create HTML file if SECTIONS: output = os_path_join(outfolder, filename) with open(output, 'w', encoding='utf-8') as file: file.write(SOUP.decode(formatter='html')) print('CDOC: {!r} processed'.format(output)) continue print('CDOC: !!! WARNING !!! in {!r} no data provided'.format(pagename)) # Create folder if not exists to css stylepath = os_path_join(outfolder, 'css') try: os_makedirs(stylepath) except OSError as e: if not (e.errno == errno_EEXIST and os_path_isdir(stylepath)): raise # Create CSS path stylesheet = os_path_join(stylepath, 'cdoc.css') # If using the user created custom CSS if external_css: copyfile(external_css, stylesheet) # If using the default CSS else: with open(stylesheet, 'w', encoding='utf-8') as file: file.write(STYLE) print('CDOC: {!r} processed'.format(stylesheet))
def _create_dirs(self): if not os_path.isdir(self._storage_directory): os_makedirs(self._storage_directory, 0o755) if not os_path.isdir(self._temp_directory): os_makedirs(self._temp_directory, 0o755)
def makedir(self): """creates the directory and data file and log file""" if not os_path_exists(self.folder.dir_path): os_makedirs(self.folder.dir_path) log_info("Made directory at: {0}".format(self.folder.dir_path))
def mkdirs(dir): return os_makedirs(dir)
def makeSurePathExists(path): try: os_makedirs(path) except OSError as exception: if exception.errno != errno.EEXIST: raise
opt.basic_output_on.dprint("<>"*50) opt.basic_output_on.dprint("<>"*50) opt.basic_output_on.dprint("\n\n") error_list.append((svgfile, "Completed Successfully.")) return ############################################################################### # Check if output (sub)directories exist, create subdirectories if they don't # exist mes = ("\n\nThe output_directory given in options does not exist. To fix this " "change output_directory in options, or create the folder:\n" "%s" % opt.output_directory) assert os_path.exists(opt.output_directory), mes if not os_path.exists(opt.pickle_dir): # debug folder os_makedirs(opt.pickle_dir) if not os_path.exists(opt.output_directory_debug): # pickle folder os_makedirs(opt.output_directory_debug) ############################################################################### ###Batch run all SVG filed in input directory ################################# ############################################################################### error_list = [] if os_path.isdir(opt.input_directory): svgfiles = listdir(opt.input_directory) else: svgfiles = [opt.input_directory] opt.input_directory = os_path.join(opt.input_directory, os_path.pardir) for svgfile in svgfiles:
# # now go create directory structure, always checking if paths already exist print "###" print("creating ASKI directory structure with '" + str(n_iter_steps) + "' iteration steps in main directory '" + main_path + "'") print "###" print "" # print "###" # # create main path, if it does not exist if os_path.exists(main_path): print "# NOTE, main directory '" + main_path + "' already exists." else: print "mkdir '" + main_path + "'" os_makedirs(main_path) # # create empty model parameter correlation file if it does not exist, otherwise leave it untouched open(os_path.join(main_path, param.sval('PARAMETER_CORRELATION_FILE')), 'a').close() # # create all iteration step paths, if they do not exist for i in range(n_iter_steps): iter_path = os_path.join( main_path, param.sval('ITERATION_STEP_PATH') + '{0:03d}'.format(i + 1)) # if os_path.exists(iter_path): print "# NOTE, iteration step path '" + iter_path + "' already exists. Continuing, doing nothing." else: create_iter_dir(iter_path, i + 1)
def createDir(dirPath: str) -> None: os_makedirs(dirPath) return
def makedirs(the_path): if not path.exists(the_path): os_makedirs(the_path)