def main(period: str = typer.Option("2103", prompt="YYMM"), management_folder: Path = typer.Option( '/Users/z/Dropbox (MBJ)/mbj/management', prompt="Dropbox Management path")) -> None: assert int(period), "Period should be of the form yymm" yy = period[:2] yyyy = f"20{yy}" mm = period[2:4] path = Path(management_folder / yyyy / 'salary' / f'{yyyy}-{mm}' / '_share') console.print(path) path.mkdir(parents=True, exist_ok=True) if True: with open(Path(path) / f'{period} summary places.txt', "w") as f: f.write(f'Name\tWhere\tDays\n') n = "" for item in get_hours_user_place(year=int(yyyy), month=int(mm)): if n != item[0]: n = item[0] # f.write("\n") f.write(f'{n}\t{item[1]}\t{item[2]}\n') print_summary(period, path, UIDS)
def test_searches_recursively(self): # Create a deeply nested folder structure sequence_num = 3 base_root = Path(self.temp_folder) true_sequence = 3, 0, 2 decoy_sequence = 2, 1, 1 true_path = '' for lvl1 in range(5): lvl1_path = base_root / "folder_{0}".format(lvl1) for lvl2 in range(4): lvl2_path = lvl1_path / "folder_{0}".format(lvl2) for lvl3 in range(3): path = lvl2_path / "folder_{0}".format(lvl3) path.mkdir(parents=True, exist_ok=True) if (lvl1, lvl2, lvl3) == true_sequence: true_path = path self.make_required_files(true_path, sequence_num) elif (lvl1, lvl2, lvl3) == decoy_sequence: self.make_required_files(path, 2) else: (path / 'decoy.txt').touch() # Search that structure for the one folder that has all we need result = kitti_loader.find_root(base_root, sequence_num) self.assertEqual(true_path, result) # Clean up after ourselves shutil.rmtree(base_root)
def test_searches_recursively(self): # Create a deeply nested folder structure base_root = Path(self.temp_folder) true_sequence = 3, 0, 2 true_path = '' for lvl1 in range(5): lvl1_path = base_root / "folder_{0}".format(lvl1) for lvl2 in range(4): lvl2_path = lvl1_path / "folder_{0}".format(lvl2) for lvl3 in range(3): path = lvl2_path / "folder_{0}".format(lvl3) path.mkdir(parents=True, exist_ok=True) if (lvl1, lvl2, lvl3) == true_sequence: true_path = path for filename in self.required_files: (path / filename).touch() else: (path / 'decoy.txt').touch() # Search that structure for the one folder that has all we need result = tum_loader.find_files(base_root) self.assertEqual(( true_path, true_path / 'rgb.txt', true_path / 'depth.txt', true_path / 'groundtruth.txt', ), result) # Clean up after ourselves shutil.rmtree(base_root)
def mkdir_helper(p: Union[str, Path], num_retries: int = 1) -> None: """ Wrapper around `os.mkdir` that will work correctly even if the directory already exists. """ prev_exception = None if isinstance(p, str): path = Path(p) else: path = p for attempt in range(0, num_retries): try: path.mkdir(parents=True, exist_ok=True) return except Exception as e: prev_exception = e log_and_print( f"failed to make directory {p} (attempt {attempt}): {str(e)}") if num_retries > 1: log_and_print( f"failed to make directory {p} after {num_retries} attempts, failing" ) if prev_exception: raise prev_exception
def check_paths_exist(app_configs=None, **kwargs): """ Check if the required file and folders exist, and try to create the possible ones. """ REQUIRED_PATHS = [ settings.MEDIA_ROOT, settings.STATIC_ROOT, os.path.join(settings.MEDIA_ROOT, 'avatars') ] errors = [] def created_the_absent(file, error_id): errors.append( Debug( msg='"{}" could not be found but successfully created'.format( file), id='apps.sour_and_perky.D{}'.format(str(error_id).zfill(3)), )) for index, path in enumerate(REQUIRED_PATHS): path = pathlib.Path(path) if not path.exists(): with contextlib.suppress(FileExistsError): path.mkdir(parents=True) created_the_absent(path, index) return errors
def page_handler(url, title, page=0): path = PATH / 'Games' / title try: path.mkdir() except: pass while True: page_source = requests.get(f'{url}?p={page}').content html = bs4.BeautifulSoup(page_source, 'lxml') for image in html.findAll(class_='gdtm'): href = image.find(href=True).get('href') page_source = requests.get(href).content page = bs4.BeautifulSoup(page_source, 'lxml') src = page.find(src=True) name = html.find(title=True) name = path / name.split(': ')[-1] name.write_bytes(requests.get(src).content) time.sleep(60) else: if timeout := html.find( text=re.compile('Your IP address has been temporarily.+')): print(timeout) timeout = timeout.split('.')[-1].split() time.sleep((int(timeout[4]) * 60) + int(timeout[7])) page += 1
def __init__(self, dirloc=None, time_period=7): ''' This script sets up the API key and directories to store data dirloc should be a string like: '/home/pablo/Desktop/' ''' self.time_period = time_period self.dirlocation = dirloc + '/NASDAQ_{}'.format( datetime.date(datetime.now())) # YYYY_MM_DD if os.path.exists(self.dirlocation): os.chdir(self.dirlocation) else: path = pathlib.Path(self.dirlocation) path.mkdir(parents=True, exist_ok=True) os.chdir(self.dirlocation) make_dirs = [ './STOCKS_DAILY', './SMA_DAILY', './ANALYSIS', './DAILY_CHARTS_SPECTRAL', './DAILY_SMA_COMPARE_CHARTS', 'DAILY_EXPLOSIVE_REGRESSION_CHARTS' ] for make in make_dirs: if os.path.exists(make): pass else: os.mkdir(make) self.api_key = ''
def make_file_tree(spec, path): if isinstance(spec, str): path.write(spec) elif isinstance(spec, dict): path.mkdir() for nm in spec: make_file_tree(spec[nm], path.join(nm))
def test_save_with_directory_fails(self, tmp_path): path = tmp_path / "somedir" doc = FileMappedDocument(path) path.mkdir() with pytest.raises(IsADirectoryError): doc.save()
def _get_cache_dir() -> Path: if os.name == 'nt': path = Path(os.path.expandvars('%LocalAppData%/jcotton42/ficdl/cache')) else: path = xdg_cache_home().joinpath('jcotton42/ficdl') path.mkdir(mode=0o770, parents=True, exist_ok=True) return path
def matlabUpdate(params): mode = params[0] item = params[1] SDint = params[2] day = params[3] itemType = params[4] try: epsilon = float(params[5]) except: epsilon = 0 MA = str(day) SD = str(int(SDint*100)) itemmode = itemType.replace("bond","").replace("GER","") if itemmode not in ["upper","lower"]: itemmode = "" else: itemmode = itemmode #MApath = "day" + MA + "/" if not epsilon == 0: filenames = "cir_{}_bounded_tor{}_day{}_SD{}_{}.csv".format(mode,epsilon,MA,SD,item) originalPath = "{}/original/tor{}/temp/SD{}/day{}/{}/".format(itemType,epsilon,SD,MA,mode) updatePath = "{}/updating/tor{}/temp/new/SD{}/day{}/{}/".format(itemType,epsilon,SD,MA,mode) newPath = "{}/updating/tor{}/temp/SD{}/day{}/{}/".format(itemType,epsilon,SD,MA,mode) else: if not itemmode == "": filenames = "cir_{mode}_bounded_{itemmode}_day{MA}_SD{SD}_{item}.csv".format(mode = mode,itemmode = itemmode,MA = MA,SD = SD,item = item) else: filenames = "cir_{mode}_bounded_day{MA}_SD{SD}_{item}.csv".format(mode = mode,MA = MA,SD = SD,item = item) originalPath = "{}/original/temp/SD{}/day{}/{}/".format(itemType,SD,MA,mode) updatePath = "{}/updating/temp/new/SD{}/day{}/{}/".format(itemType,SD,MA,mode) newPath = "{}/updating/temp/SD{}/day{}/{}/".format(itemType,SD,MA,mode) #writer = pd.ExcelWriter(mode + "_" + MA + "days_SD" + SD +"_index.xlsx",date_format = 'yyyy/mm/dd',datetime_format='yyyy/mm/dd') path = Path(newPath) path.mkdir(parents=True, exist_ok=True) #index = names[loc+1:-4] #print(names[loc+1:-4]) #pathname = "updating/result/SD{}/{}/day{}/".format(SD,mode,MA) #path = Path(pathname) #path.mkdir(parents=True, exist_ok=True) update = pd.read_csv(updatePath + filenames, parse_dates=['Date'] , dayfirst=True, index_col=0 , na_values=["null"]) if os.path.isfile(originalPath + filenames): raw = pd.read_csv(originalPath + filenames, parse_dates=['Date'] , dayfirst=True, index_col=0 , na_values=["null"]) new = pd.concat([raw,update]) new = new[~new.index.duplicated(keep='last')] else: new = update new.to_csv(newPath + filenames, sep=",", index=True) print(newPath + filenames)
def create_one_sample_t_test(name, maps, output_dir, smoothing_fwhm=6.0): if not op.isdir(output_dir): op.mkdir(output_dir) model = SecondLevelModel(smoothing_fwhm=smoothing_fwhm) design_matrix = pd.DataFrame([1] * len(maps), columns=['intercept']) model = model.fit(maps, design_matrix=design_matrix) z_map = model.compute_contrast(output_type='z_score') nib.save(z_map, op.join(output_dir, "{}_group_zmap.nii.gz".format(name)))
def _gen_file(self, o, f, fold, mode='w'): path = fold #pathlib.Path(fold) if not path.exists(): path.mkdir(parents=True, exist_ok=True) with (path / f).open(mode=mode) as fd: json.dump(o, fd, indent=2, sort_keys=True) print(f"生成 {f} 成功")
def make_sure_path_exists(path): try: from pathlib import Path path = Path(path) path.mkdir(parents=True, exist_ok=True) except OSError as exception: if exception.errno != errno.EEXIST: raise
def _create_path(self, path): path = Path(os.path.abspath(path)) if not path.exists(): path.mkdir(parents=True, exist_ok=True) ignore_marker = path / IGNORE_MARKER if not os.path.lexists(str(ignore_marker)): with ignore_marker.open('w'): pass
def make_cache_dir(pathname): # https://stackoverflow.com/questions/6004073/how-can-i-create-directories-recursively#6004084 path = Path(os.path.dirname(pathname)) try: path.mkdir(parents=True) # python 3.5: , exist_ok=True) except FileExistsError: pass return(True)
def inner(*args, **kwargs): # type: (*Any, **Any) -> Any strpath = fspath(path).format(ppv=protocol) if not ignoreargs: if ignore_first_arg: args = args[1:] _file_ext = file_ext or ".p" if keyfunc is None: hashstr = key_to_hash(args_to_key(args, kwargs), protocol=protocol) else: hashstr = keyfunc(args, kwargs) fullpath = os.path.join(strpath, hashstr + _file_ext) else: if keyfunc or file_ext: raise ValueError("`keyfunc` or `file_ext` can only be specified if ignoreargs is False") if args or kwargs: logger.warning("cache file decorator for %s called with arguments", func.__name__) fullpath = strpath try: invalid = now() - mdatetime(fullpath) > _duration except FileNotFoundError: invalid = True if invalid: cached = False if not ignoreargs: path.mkdir(parents=True, exist_ok=True) if generator and not consume: it = func(*args, **kwargs) logger.info("Writing iterable to cache: %s", fullpath) result: Any = write_iter(it, fullpath, protocol=protocol, safe=True) else: with context("Result calculated in {delta} seconds"): if generator and consume: result = list(func(*args, **kwargs)) else: result = func(*args, **kwargs) logger.info("Writing result to cache: %s", fullpath) write_pickle(result, fullpath, protocol=protocol, safe=True) else: cached = True if generator and not consume: logger.info("Loading iterable from cache: %s", fullpath) result = read_iter(fullpath) else: with context(f"Result loaded from {fullpath} in {{delta}} seconds"): result = read_pickle(fullpath) if return_cached: return cached, result else: return result
def _archive_lesson(filename): if ARCHIVE_FOLDER_NAME + sep in filename: return path = pathlib.Path(root_folder + sep + INPUT_FOLDER_NAME + sep + ARCHIVE_FOLDER_NAME) path.mkdir(exist_ok=True) rename( root_folder + sep + INPUT_FOLDER_NAME + sep + filename, root_folder + sep + INPUT_FOLDER_NAME + sep + ARCHIVE_FOLDER_NAME + sep + filename)
def render(self, report: Report, output_dir='./report'): # Get a unique name for this report report_name = report.get_name() run_string = time.strftime('%Y%m%d_%H%M%S') # Create a sequence of sub-paths to make unique reports by name and time run. path = Path('.') / output_dir / report_name / run_string files_path = path / 'Files' if not path.exists(): path.mkdir(parents=True) assert path.exists(), f"Couldn't create directory {path}" files_path.mkdir(parents=True) assert files_path.exists(), f"Couldn't create directory {path}" plots = [] for unique_id, item in report.get_uniquely_keyed_items(): plots.append(self.make_div(f'<h2>{item.name}</h2><br>')) if isinstance(item, ReportLineGraphItem): plots.append(self.make_plot_from_line_graph_item(item)) elif isinstance(item, ReportTableExplorerItem): plots.append(self.make_table_explorer_item(item)) elif isinstance(item, ReportHeatmapItem): plots.append(self.make_heatmap_item(item)) elif isinstance(item, ReportInteractiveTableItem): plots.append(self.make_interactive_table_item(item)) elif isinstance(item, ReportInteractivePlotItem): plots.append(self.make_interactive_plot_item(item)) elif isinstance(item, ReportTableItem): plots.append(self.make_table_item(item)) elif isinstance(item, ReportImageItem): plots.append(self.make_image_item(item, unique_id, files_path)) elif isinstance(item, ReportFileItem): plots.append(self.make_file_item( item, unique_id, files_path)) # TODO: Not the best unique ID elif isinstance(item, ReportGraphItem): plots.append( self.make_plot_from_graph_item(item, unique_id, path)) # Add a description for every item. if item.description and len(item.description) > 0: plots.append( self.make_div( f'Description:<br>{item.description}<hr width=100%>')) output_file(path / "report.html", title=f'{report.get_name()} (Local Report Render)') show(column(*plots)) # open a browser
def html_path(self, package_path): path = package_path / 'html' path.mkdir(parents=True) for filename in [ 'test1.html', 'test2.html', 'README', 'unrelatedhtml' ]: (path / filename).touch() return path
def doMKDir1(self, newname): if newname: newdir = self.SOURCELIST.getCurrentDirectory() + newname try: os.mkdir(newdir) except: pass self.doRefresh()
def main(self, raw_args: list[str], bin_name: str) -> None: self.arg_parser: ArgumentParser = self.build_arg_parser(bin_name) try: self.cli_args: ArgumentNamespace = self.arg_parser.parse_args( raw_args) except (ArgumentError, ArgumentParserExit) as err: setattr(err, "parser", self.arg_parser) raise err self.tqdm: type[_tqdm_fallback] = _tqdm_fallback if tqdm is not None and self.cli_args.progress_bars: self.tqdm = cast("type[_tqdm_fallback]", tqdm) with self.wrap_print_for_tqdm(): start_time = time.perf_counter() self.project: Project = Project(self.cli_args.project) for path in [ self.project.work_dir, self.project.download_dir, self.project.components_dir, self.project.dist_archives_dir, ]: path.mkdir(exist_ok=True, parents=True) self.http_client: HTTPClient = HTTPClient( network_timeout=self.project.get_conf("project", "network_timeout", int, fallback=None), network_max_retries=self.project.get_conf( "project", "network_max_retries", int, fallback=None), network_retry_wait=self.project.get_conf("project", "network_retry_wait", int, fallback=None), ) self.weblate_client: WeblateClient = WeblateClient( http_client=self.http_client, root_url=self.project.get_conf("weblate", "root_url"), auth_token=self.project.get_conf("weblate", "auth_token", fallback=None), project_name=self.project.get_conf("weblate", "project"), ) self.cli_args.command_fn() elapsed_time = time.perf_counter() - start_time print("Done in {:.2f}s".format(elapsed_time))
def main(): names = get_fanart_gallery_names('https://critrole.com/category/fan-art/') source_gallery_name = names[0] google_photos_album_title = 'CR Fan Art Gallery' gallery_slug = slugify(source_gallery_name) path = Path('.') / 'out' / gallery_slug path.mkdir(parents=True, exist_ok=True) fetch(path, 'https://critrole.com/' + gallery_slug + '/') p = GooglePhotos() p.upload_and_register_photos(path, google_photos_album_title)
def __call__(self, ctx) -> str: from pathlib import Path path = Path(self._path) if not path.is_absolute(): path = Path(ctx._pwd) / path path = path.absolute() if not path.exists(): path.mkdir(parents=True, exist_ok=True) elif not path.is_dir(): raise ConfigurationError("'%s' is not a directory" % self._path) return str(path)
def test_access_denied_during_cleanup(tmp_path, monkeypatch): """Ensure that deleting a numbered dir does not fail because of OSErrors (#4262).""" path = tmp_path / "temp-1" path.mkdir() def renamed_failed(*args): raise OSError("access denied") monkeypatch.setattr(Path, "rename", renamed_failed) lock_path = get_lock_path(path) maybe_delete_a_numbered_dir(path) assert not lock_path.is_file()
def createCheckpointPath(self, GLOVE, CNN_LAYER, POOLING_LAYER, GRU_LAYER, BiLSTM_Layer, LSTM_Layer, DENSE_LAYER): modelFolderName = Logging(self.args).createModelName( GLOVE=GLOVE, CNN_LAYER=CNN_LAYER, POOLING_LAYER=POOLING_LAYER, GRU_LAYER=GRU_LAYER, BiLSTM_Layer=BiLSTM_Layer, LSTM_Layer=LSTM_Layer, DENSE_LAYER=DENSE_LAYER) path = Path(Paths.MODEL_CHECKPOINTS, modelFolderName) path.mkdir(parents=True, exist_ok=True) return str(path.resolve())
def createModelPath(self, GLOVE, CNN_LAYER, POOLING_LAYER, GRU_LAYER, BiLSTM_Layer, LSTM_Layer, DENSE_LAYER): modelFolderName = Logging(self.args).createModelName( GLOVE=GLOVE, CNN_LAYER=CNN_LAYER, POOLING_LAYER=POOLING_LAYER, GRU_LAYER=GRU_LAYER, BiLSTM_Layer=BiLSTM_Layer, LSTM_Layer=LSTM_Layer, DENSE_LAYER=DENSE_LAYER) path = Path(Paths.MODEL, modelFolderName) path.mkdir(parents=True, exist_ok=True) filePath = Path(path, "{}.tf".format(modelFolderName)) return str(filePath.resolve())
async def run(self) -> None: path = self.node.path if os.path.lexists(str(path)): path.unlink() self.logger.debug( "create directory <ITALIC>%(path)s<UPRIGHT>%(parents)s", dict( path=self.node.relative_path, parents=' with parents' if self.parents else '') ) # rwxr-xr-x path.mkdir(mode=0b111101101, parents=self.parents) self.node.modified = True self.node.updated = True
def organize_trainG(gen, out_dir): # generate the gen in train for key in gen: print('-------------') print(key) if key == '/': name = 'slash' else: name = key out = act + '/' + name osp.mkdir(out) listgen = gen[key] for i in listgen: os.system('cp ' + i + ' ' + out)
def create(root: Path, structure: Mapping[str, Any]): for name, val in structure.items(): path = root / name if isinstance(val, dict): path.mkdir() create(path, val) elif isinstance(val, str): with open(path, "w") as f: f.write(val) elif isinstance(val, bytes): with open(path, "wb") as bf: bf.write(val) else: raise ValueError(f"unexpected type: {type(val)}")
def organize_trainO(org, out_dir): # generate the original in train for key in org: print('-------------') print(key) if key == '/': name = 'slash' else: name = key out = act + '/' + name osp.mkdir(out) listorg = org[key] for i in listorg: os.system('cp ' + i + ' ' + out)
def check_dir(path: Path, exc=False, create=False): """ Checks that the directory pointed by path exists. Creates the folder if not existing. """ log.info(_("Checking path: {}").format(str(path))) if (not path.exists()): if create: log.info(_("→ Creating directory {}").format(str(path))) path.mkdir() if exc: raise FileDontExistError(path) elif not path.is_dir(): raise FileNotDirectoryError(path)
def load_notes(path=config.DATA_PATH): """ Scans the given path and returns a list of notes which is sorted by the modification time. Any directory and the tagfile is ignored. Die path argument has to be an instance of pathlib.Path. """ if path.exists(): data = [Note(f) for f in path.iterdir() if f.is_file() and (f.suffix not in config.IGNORE_EXTENSIONS and f != Note.tagfile)] return sorted(data, key=lambda n: n.age) else: error('The directory {} does not exist!'.format(path)) click.echo('Creating new directory {}.'.format(path)) path = config.TRASH_PATH path.mkdir(parents=True) exit(1)
def create_depot(self): config = { 'depot.backend': self.depot_backend } if self.depot_backend.endswith('LocalFileStorage'): path = self.bound_storage_path if not path.exists(): path.mkdir() config['depot.storage_path'] = str(path) elif self.depot_backend.endswith('MemoryFileStorage'): pass else: # implementing non-local file systems is going to be more # invloved, because we do not generate external urls yet raise NotImplementedError() DepotManager.configure(self.bound_depot_id, config)
def _create_run_systemd_directory(self) -> None: """Create /run/systemd/ if it doesn't already exist. 'systemctl --user daemon-reload' checks for disk free space by calling statvfs(3) on /run/systemd [1]. If the directory is missing, daemon-reload fails. Call this function to make daemon-reload pass this check. daemon-reload is right in expecting that /run/systemd exists, since systemd requires sd_booted(3) to return true. However, we force sd_booted(3) to return true, so /run/systemd might not exist. [1] https://github.com/systemd/systemd/blob/v239/src/core/dbus-manager.c#L1277 """ path = pathlib.Path("/run/systemd") try: path.mkdir(exist_ok=False, mode=0o755, parents=False) except OSError: logging.warning( f"Failed to create {path}; ignoring error, but systemd might " f"fail to reload", exc_info=True, )
def authorized_key_set(path): dotssh = path.join('.ssh') if not dotssh.isdir(): dotssh = path.mkdir('.ssh') with dotssh.join('authorized_keys').open() as f: return {parse_openssh_pubkey(line.strip()) for line in f}
def ensure_dir(outdir): path = pathlib.Path(outdir) if not path.exists(): path.mkdir(parents=True)