def watch(self): """Watch the matching process""" logger.debug("Watching process for %s (%s, taken %d)", self.path, self.uri, self.count) path = Path(self.uri) lockpath = path.with_suffix(".lock") pidpath = path.with_suffix(".pid") # Watch for the job def run(): logger.debug("Locking job lock path %s", lockpath) with fasteners.InterProcessLock(lockpath): if not pidpath.is_file(): logger.debug("Job already finished (no PID file)") else: s = "" while s == "": s = pidpath.read_text() logger.info("Loading job watcher from definition") from experimaestro.connectors import Process # FIXME: not always localhost... from experimaestro.connectors.local import LocalConnector connector = LocalConnector.instance() process = Process.fromDefinition(connector, json.loads(s)) process.wait() self.delete() threading.Thread(target=run).start()
def _compile_file(path, component, locale): with io.open(str(path), 'r') as po: catalog = read_po(po, locale=locale, domain=component) logger.info("Compiling component [%s] locale [%s] (%d messages)...", component, locale, len(catalog)) with io.open(str(path.with_suffix('.mo')), 'wb') as mo: write_mo(mo, catalog) with io.open(str(path.with_suffix('.jed')), 'w') as jed: write_jed(jed, catalog)
def populate_db(tar_file=None): ''' Populates the datacube database from a compressed SQL dump. Args: tar_file (str): The path to a tar file to load. Raises OSError if tar file is not found. ''' if not tar_file: from urllib import request url = 'https://raw.githubusercontent.com/ceos-seo/odc-colab/master/database/db_dump.tar.xz' print('No database file supplied. Downloading default index.') resp = request.urlopen(url) if resp.code < 300: tar_file = f'./{url.split("/")[-1]}' with open(tar_file, 'wb') as _file: _file.write(resp.read()) import tarfile from pathlib import Path path = Path(tar_file) if path.exists(): with tarfile.open(path.name, 'r') as tar: tar.extractall() for suffix in path.suffixes: path = path.with_suffix('') _shell_cmd(["psql", "-f", f"{path}", "-d", "datacube"]) else: raise OSError('Tar file does not exist.')
def flatten_wax_image_dir(drive_id: str, local_dir, service): """ Find all images in the given dir, and download them. Some guesswork to allow nested google drive folders. """ files = get_files(drive_id, service, recursively=True) num_yielded = 0 for file in files: num_yielded += 1 if file['mimeType'] == 'image/jpeg': path = file['path'] raw_name = path.with_suffix("").name # If it's just two digits, assume it's a subelement in # something else. if re.search(r"^[0-9]{1,2}$", raw_name): dest = local_dir / path.parent.name / path.name else: dest = local_dir / path.name if local_is_outdated(file, dest): download_file(file['id'], dest, service) if num_yielded == 0: logging.warning( f"No files found for google drive ID {drive_id}: " "this can occur if the Google user for your credentials " "is not allowed to access it.")
def get_save_file_path( history_dir: _gp.Ref[_gp.GlobalPrefs], parent: qw.QWidget, title: str, default_name: str, filters: List[str], default_suffix: str, ) -> Optional[Path]: name = _get_hist_name( qw.QFileDialog.getSaveFileName, history_dir, parent, title, default_name, filters, ) if name: assert name.file_name is not None path = Path(name.file_name) if name.sel_filter == filters[0] and path.suffix == "": path = path.with_suffix(default_suffix) return path else: return None
def __call__(self, parser, namespace, value, option_string=None): path = Path(value) if not path.exists(): path = namespace.io / path path = path.with_suffix('.json') if not path.exists(): raise ValueError('%s does not exist'.format(value)) setattr(namespace, self.dest, value)
def __call__(self, parser, namespace, value, option_string=None): path = Path(value) if not path.exists(): path = get_config('IO_ROOT') / value path = path.with_suffix('.enaml') if not path.exists(): raise ValueError('%s does not exist'.format(value)) setattr(namespace, self.dest, path)
def external_execution(ctx, playbook_configuration_id, playbook_configuration_version, path): """Create bundle for external execution. This command creates tarball which has everything required for external execution of the plugin. This tarball includes commandline for execution with Ansible, the contents of the plugin, generated dynamic inventory. Please pay attention to following: \b - This execution won't be added to the decapod and will be done without any Decapod interaction - You should have installed Ansible 2.3 or newer - Please be sure that ceph-ansible is present in role path of the ansible. http://docs.ansible.com/ansible/intro_configuration.html#roles-path https://github.com/ceph/ceph-ansible """ playbook_configuration_id = str(playbook_configuration_id) subdir_path = "{0}-{1}".format( playbook_configuration_id, playbook_configuration_version ) if path is None: path = subdir_path path = pathlib.Path(path).absolute() playbook_config = \ playbook_configuration.PlaybookConfigurationModel.find_version( playbook_configuration_id, playbook_configuration_version) if not playbook_config: ctx.fail("Cannot find such playbook config") plugin = get_plugin(playbook_config.playbook_id) working_dir = tempfile.TemporaryDirectory(prefix="exec") ctx.call_on_close(working_dir.cleanup) working_dir = pathlib.Path(working_dir.name) tmpdir = working_dir.joinpath(subdir_path).absolute() tmpdir.mkdir() tmpdir.joinpath("fetch_directory").mkdir() copy_decapod_common_playbooks(tmpdir) copy_ceph_ansible(tmpdir) copy_private_ssh_key(tmpdir) copy_ansible_config(tmpdir) copy_plugin_contents(tmpdir, plugin) copy_monitor_keyring(tmpdir, playbook_config) copy_decapod_data(tmpdir, playbook_config) dump_inventory(tmpdir, playbook_config) compose_commandline(tmpdir, playbook_config) shutil.make_archive(path.as_posix(), "gztar", working_dir.as_posix()) click.echo(path.with_suffix(".tar.gz").as_posix())
def unzip_file(path): if not isinstance(path, pathlib.Path): path = pathlib.Path(path) with zipfile.ZipFile(path, "r") as zip_file: for file in tqdm.tqdm(zip_file.namelist(), desc="Unzipping"): zip_file.extract(file, path.parent) return path.with_suffix("")
def external_execution(ctx, playbook_configuration_id, playbook_configuration_version, path): """Create bundle for external execution. This command creates tarball which has everything required for external execution of the plugin. This tarball includes commandline for execution with Ansible, the contents of the plugin, generated dynamic inventory. Please pay attention to following: \b - This execution won't be added to the decapod and will be done without any Decapod interaction - You should have installed Ansible 2.3 or newer - Please be sure that ceph-ansible is present in role path of the ansible. http://docs.ansible.com/ansible/intro_configuration.html#roles-path https://github.com/ceph/ceph-ansible """ playbook_configuration_id = str(playbook_configuration_id) subdir_path = "{0}-{1}".format(playbook_configuration_id, playbook_configuration_version) if path is None: path = subdir_path path = pathlib.Path(path).absolute() playbook_config = \ playbook_configuration.PlaybookConfigurationModel.find_version( playbook_configuration_id, playbook_configuration_version) if not playbook_config: ctx.fail("Cannot find such playbook config") plugin = get_plugin(playbook_config.playbook_id) working_dir = tempfile.TemporaryDirectory(prefix="exec") ctx.call_on_close(working_dir.cleanup) working_dir = pathlib.Path(working_dir.name) tmpdir = working_dir.joinpath(subdir_path).absolute() tmpdir.mkdir() tmpdir.joinpath("fetch_directory").mkdir() copy_decapod_common_playbooks(tmpdir) copy_ceph_ansible(tmpdir) copy_private_ssh_key(tmpdir) copy_ansible_config(tmpdir) copy_plugin_contents(tmpdir, plugin) copy_monitor_keyring(tmpdir, playbook_config) copy_decapod_data(tmpdir, playbook_config) dump_inventory(tmpdir, playbook_config) compose_commandline(tmpdir, playbook_config) shutil.make_archive(path.as_posix(), "gztar", working_dir.as_posix()) click.echo(path.with_suffix(".tar.gz").as_posix())
def get_configuration(scribus_doc): """ return the project configuration merged with the default one""" configuration = CONFIGURATION_DEFAULT path = Path(scribus_doc) config_filename = path.with_suffix('.conf.json') if os.path.isfile(config_filename): if config_filename: with open(config_filename, 'r') as json_file: json_data = json.load(json_file) merge_configuration(configuration, json_data) return configuration
def npz_path(path): path = pathlib.Path(path) p = path if not p.exists(): p = path.with_name(path.name + '.npz') if not p.exists(): p = path.with_suffix('.npz') if not p.exists(): raise FileNotFoundError(path) return p
def process_input(subjects_files, mask): """Process input to obtain data suitable for SRM""" mask_suffix = "_" + mask + ".nii.gz" srm_input = [] for subject_files in subjects_files: srm_input_subject = [] for path in subject_files: if path.name.endswith(mask_suffix): continue img = nib.load(str(path)) mask_path = \ str(path.with_suffix("").with_suffix("")) + mask_suffix mask = nib.load(str(mask_path)) srm_input_subject.append(nilearn.masking.apply_mask(img, mask)) srm_input.append(stats.zscore(np.concatenate(srm_input_subject), axis=0, ddof=1).T) return srm_input
def compress_folder(path: Union[str, Path], compression: int = 9, jobs: int = 1): path = make_path(path) folder = path.name archive = path.with_suffix(".tar.gz").name exe_pv = shutil.which("pv") exe_tar = shutil.which("tar") exe_gzip = shutil.which("tar") exe_pigz = shutil.which("pigz") with cwd.WorkingDir(path.parent): if exe_pigz: if exe_pv: with open(archive, "wb") as fptr: size = get_folder_size(folder) process_tar = subprocess.Popen( [exe_tar, "cf", "-", folder], stdout=subprocess.PIPE, ) process_pv = subprocess.Popen( [exe_pv, "-s", str(size)], stdin=process_tar.stdout, stdout=subprocess.PIPE, ) process_pigz = subprocess.Popen( [exe_pigz, "-" + str(compression), "-p", str(jobs)], stdin=process_pv.stdout, stdout=fptr, ) process_tar.wait() process_pv.wait() process_pigz.wait() else: LOGGER.warning("cannot find pv, no progress will be displayed") with open(archive, "wb") as fptr: process_tar = subprocess.Popen( [exe_tar, "cf", "-", folder], stdout=subprocess.PIPE, ) process_pigz = subprocess.Popen( [exe_pigz, "-" + str(compression), "-p", str(jobs)], stdin=process_tar.stdout, stdout=fptr, ) process_tar.wait() process_pigz.wait() elif exe_gzip: if jobs > 1: LOGGER.warning( "gzip does not support parallel compression, using one thread only", ) if exe_pv: with open(archive, "wb") as fptr: size = get_folder_size(folder) process_tar = subprocess.Popen( [exe_tar, "cf", "-", folder], stdout=subprocess.PIPE, ) process_pv = subprocess.Popen( [exe_pv, "-s", str(size)], stdin=process_tar.stdout, stdout=subprocess.PIPE, ) process_gzip = subprocess.Popen( [exe_gzip, "-" + str(compression)], stdin=process_pv.stdout, stdout=fptr, ) process_tar.wait() process_pv.wait() process_gzip.wait() else: LOGGER.warning("cannot find pv, no progress will be displayed") with open(archive, "wb") as fptr: process_tar = subprocess.Popen( [exe_tar, "cf", "-", folder], stdout=subprocess.PIPE, ) process_gzip = subprocess.Popen( [exe_gzip, "-" + str(compression)], stdin=process_gzip.stdout, stdout=fptr, ) process_tar.wait() process_gzip.wait() else: raise RuntimeError("Cannot find either pigz or gzip")
def article_path(self, article: Article) -> Path: if article.is_index(): assert article.node.parent return self.index_path(article.node.parent) path = self.node_path(article.node.parent) / article.slug return self.fs.join(path.with_suffix(".html"))