def initDB(drop=False): from elixir import metadata, setup_all, drop_all, create_all from genericpath import exists from os import makedirs from posixpath import expanduser DB_NAME = "stockflow.sqlite" log = logging.getLogger(__name__) log.info("Inicializando o Core") dbpath = expanduser("~/.stockflow/") if not exists(dbpath): try: makedirs(dbpath) except OSError: log.warning("Nao foi possivel criar os diretorios, \ usando o home do usuário.") dbpath = expanduser("~") metadata.bind = "".join(("sqlite:///", dbpath, DB_NAME)) metadata.bind.echo = False setup_all() if(drop): drop_all() if not exists("".join((dbpath, DB_NAME))) or drop: log.debug("Criando tabelas...") create_all()
def test_expanduser_home_envvar(self): with support.EnvironmentVarGuard() as env: env['HOME'] = '/home/victor' self.assertEqual(posixpath.expanduser("~"), "/home/victor") # expanduser() strips trailing slash env['HOME'] = '/home/victor/' self.assertEqual(posixpath.expanduser("~"), "/home/victor") for home in '/', '', '//', '///': env['HOME'] = home self.assertEqual(posixpath.expanduser("~"), "/") self.assertEqual(posixpath.expanduser("~/"), "/") self.assertEqual(posixpath.expanduser("~/foo"), "/foo")
def test_expanduser_pwd(self): pwd = support.import_module('pwd') self.assertIsInstance(posixpath.expanduser("~/"), str) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) with support.EnvironmentVarGuard() as env: # expanduser should fall back to using the password database del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir # $HOME can end with a trailing /, so strip it (see #17809) home = home.rstrip("/") or '/' self.assertEqual(posixpath.expanduser("~"), home) # bpo-10496: If the HOME environment variable is not set and the # user (current identifier or name in the path) doesn't exist in # the password database (pwd.getuid() or pwd.getpwnam() fail), # expanduser() must return the path unchanged. def raise_keyerror(*args): raise KeyError with support.swap_attr(pwd, 'getpwuid', raise_keyerror), \ support.swap_attr(pwd, 'getpwnam', raise_keyerror): for path in ('~', '~/.local', '~vstinner/'): self.assertEqual(posixpath.expanduser(path), path)
def test_expanduser_home_envvar(self): with support.EnvironmentVarGuard() as env: env['HOME'] = '/home/victor' self.assertEqual(posixpath.expanduser("~"), "/home/victor") # expanduser() strips trailing slash env['HOME'] = '/home/victor/' self.assertEqual(posixpath.expanduser("~"), "/home/victor") for home in '/', '', '//', '///': with self.subTest(home=home): env['HOME'] = home self.assertEqual(posixpath.expanduser("~"), "/") self.assertEqual(posixpath.expanduser("~/"), "/") self.assertEqual(posixpath.expanduser("~/foo"), "/foo")
def __setitem__(self, key, value): if key == "includes": if isinstance(value, list): value = value[0] for path in split(value): path = self._parser._interpolate("DEFAULT", None, path, self) path = posixpath.expanduser(path) if not posixpath.exists(path): raise Exception, "No such configuration file: %s" % path if posixpath.isdir(path): logging.info("Parsing config filenames from directory: %s", path) def walk_func(arg, directory, names): for name in names: path = posixpath.join(directory, name) if not posixpath.isdir(path): arg._parser.read(path) posixpath.walk(path, walk_func, self) else: logging.info("Parsing config filename: %s", path) self._parser.read(path) # Environment has precedence over configuration elif not key.startswith("CHECKBOX") or key.upper() not in os.environ: super(IncludeDict, self).__setitem__(key, value)
def _check_and_prepare_dir(self): """ Checks if input data is ok. Creates or overwrites dataset folder. Returns True dataset needs to be created opposed to read. """ fs, path, mode = self._fs, self._path, self._mode if path.startswith("s3://"): with open(posixpath.expanduser("~/.activeloop/store"), "rb") as f: stored_username = json.load(f)["_id"] current_username = path.split("/")[-2] if stored_username != current_username: try: fs.listdir(path) except: raise WrongUsernameException(stored_username) exist_meta = fs.exists(posixpath.join(path, "meta.json")) if exist_meta: if "w" in mode: fs.rm(path, recursive=True) fs.makedirs(path) return True return False else: if "r" in mode: raise HubDatasetNotFoundException(path) exist_dir = fs.exists(path) if not exist_dir: fs.makedirs(path) elif get_file_count(fs, path) > 0: if "w" in mode: raise NotHubDatasetToOverwriteException() else: raise NotHubDatasetToAppendException() return True
def __getitem__(self, slice_): """| Get an item to be computed without iterating on the whole dataset. | Creates a dataset view, then a temporary dataset to apply the transform. Parameters: ---------- slice_: slice Gets a slice or slices from dataset """ if not isinstance(slice_, abc.Iterable) or isinstance(slice_, str): slice_ = [slice_] slice_ = list(slice_) subpath, slice_list = slice_split(slice_) if len(slice_list) == 0: slice_list = [slice(None, None, None)] num, ofs = slice_extract_info(slice_list[0], self.shape[0]) ds_view = DatasetView( dataset=self._ds, num_samples=num, offset=ofs, squeeze_dim=isinstance(slice_list[0], int), ) path = posixpath.expanduser("~/.activeloop/tmparray") new_ds = self.store(path, length=num, ds=ds_view, progressbar=False) index = 1 if len(slice_) > 1 else 0 slice_[index] = (slice(None, None, None) if not isinstance(slice_list[0], int) else 0 ) # Get all shape dimension since we already sliced return new_ds[slice_]
def __init__(self, path=None, expanduser=False): super(Path, self).__init__() path = posix.getcwdu() if path is None else path self.value = posixpath.expanduser(path) if expanduser else path self.create_methods() self.parent = runtime.find("Object")
def __call__(cls, *args): if args: value = posixpath.expandvars( posixpath.expanduser( posixpath.join(*args))) else: value = str() return value
def FileNameReturnKey(self, event): from posixpath import isabs, expanduser, join from string import strip # if its a relative path then include the cwd in the name name = strip(self.fileNameEntry.get()) if not isabs(expanduser(name)): self.fileNameEntry.delete(0, 'end') self.fileNameEntry.insert(0, join(self.cwd_print(), name)) self.okButton.flash() self.OkPressed()
def get_globals(script): path = posixpath.expanduser(script) if not posixpath.exists(path): path = get_path(script) if not path: raise Exception("Script not found in PATH: %s" % script) globals = {} exec(compile(open(path).read(), path, 'exec'), globals) return globals
def get_globals(script): path = posixpath.expanduser(script) if not posixpath.exists(path): path = get_path(script) if not path: raise Exception, "Script not found in PATH: %s" % script globals = {} execfile(path, globals) return globals
def __call__(cls, *args, **kwargs): if args: value = posixpath.expandvars( posixpath.expanduser( posixpath.join(*args))) else: value = str() if value and 'obj' in kwargs or 'object' in kwargs: value = pathlib.Path(value) return value
def __init__(self, root, path=None, **kwargs): self.root = root htdocs_dir = self.root.config.get("htdocs_dir") if htdocs_dir: if htdocs_dir.strip() == "": htdocs_dir = None else: htdocs_dir = posixpath.expanduser(htdocs_dir) print htdocs_dir self.path = (path or htdocs_dir or pkg_resources.resource_filename("scrapyd", "frontend/site")) print self.path resource.Resource.__init__(self, **kwargs)
def _get_s3_artifact_cmd_and_envs(artifact_repo): # pylint: disable=unused-argument aws_path = posixpath.expanduser("~/.aws") volumes = [] if posixpath.exists(aws_path): volumes = ["-v", "%s:%s" % (str(aws_path), "/.aws")] envs = { "AWS_SECRET_ACCESS_KEY": os.environ.get("AWS_SECRET_ACCESS_KEY"), "AWS_ACCESS_KEY_ID": os.environ.get("AWS_ACCESS_KEY_ID"), "MLFLOW_S3_ENDPOINT_URL": os.environ.get("MLFLOW_S3_ENDPOINT_URL") } envs = dict((k, v) for k, v in envs.items() if v is not None) return volumes, envs
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), basestring) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assertIsInstance(posixpath.expanduser("~root/"), basestring) self.assertIsInstance(posixpath.expanduser("~foo/"), basestring) with test_support.EnvironmentVarGuard() as env: env['HOME'] = '/' self.assertEqual(posixpath.expanduser("~"), "/") self.assertEqual(posixpath.expanduser("~/foo"), "/foo")
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), basestring) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != "/": self.assertEqual(posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assertIsInstance(posixpath.expanduser("~root/"), basestring) self.assertIsInstance(posixpath.expanduser("~foo/"), basestring) with test_support.EnvironmentVarGuard() as env: env["HOME"] = "/" self.assertEqual(posixpath.expanduser("~"), "/")
def save(self, filepath=None): """Save the persist to the given C{filepath}. If None is specified, then the filename passed during construction will be used. """ if filepath is None: if self.filename is None: return filepath = self.filename filepath = posixpath.expanduser(filepath) if posixpath.isfile(filepath): os.rename(filepath, filepath+".old") dirname = posixpath.dirname(filepath) if dirname and not posixpath.isdir(dirname): os.makedirs(dirname) self._backend.save(filepath, self._hardmap)
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") try: import pwd except ImportError: pass else: self.assert_(isinstance(posixpath.expanduser("~/"), basestring)) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != "/": self.assertEqual(posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assert_(isinstance(posixpath.expanduser("~root/"), basestring)) self.assert_(isinstance(posixpath.expanduser("~foo/"), basestring)) self.assertRaises(TypeError, posixpath.expanduser)
def _get_s3_artifact_cmd_and_envs(artifact_repo): # pylint: disable=unused-argument if platform.system() == "Windows": win_user_dir = os.environ["USERPROFILE"] aws_path = os.path.join(win_user_dir, ".aws") else: aws_path = posixpath.expanduser("~/.aws") volumes = [] if posixpath.exists(aws_path): volumes = ["-v", "%s:%s" % (str(aws_path), "/.aws")] envs = { "AWS_SECRET_ACCESS_KEY": os.environ.get("AWS_SECRET_ACCESS_KEY"), "AWS_ACCESS_KEY_ID": os.environ.get("AWS_ACCESS_KEY_ID"), "MLFLOW_S3_ENDPOINT_URL": os.environ.get("MLFLOW_S3_ENDPOINT_URL") } envs = dict((k, v) for k, v in envs.items() if v is not None) return volumes, envs
def create_application(self, args=sys.argv): # Create data directory data_directory = get_variable("CHECKBOX_DATA", ".") safe_make_directory(data_directory) # Prepend environment options string_options = get_variable("CHECKBOX_OPTIONS", "") args[:0] = split(string_options) (options, args) = self.parse_options(args) # Replace shorthands for shorthand in "blacklist", "blacklist_file", "whitelist", "whitelist_file": key = ".*/jobs_info/%s" % shorthand value = getattr(options, shorthand) if value: options.config.append("=".join([key, value])) # Set logging early set_logging(options.log_level, options.log) # Config setup if len(args) != 2: sys.stderr.write(_("Missing configuration file as argument.\n")) sys.exit(1) config = Config() config_filename = posixpath.expanduser(args[1]) config.read_filename(config_filename) config.read_configs(options.config) section_name = "checkbox/plugins/client_info" section = config.get_section(section_name) if not section: section = config.add_section(section_name) section.set("name", posixpath.basename(args[1]) \ .replace(".ini", "")) section.set("version", config.get_defaults().version) # Check options if options.version: print(config.get_defaults().version) sys.exit(0) return self.application_factory(config)
def terminfo( term=None, binary=False, encoding=None ) : """ Returns a TermInfo structure for the terminal specified. The `binary` parameter controls whether the resulting object has its capabilities represented as latin1-encoded `bytes` objects or as `str` objects. The former is strictly more correct, but can be a bit painful to use. A few terminfo entries representing more obscure hardware can cause utf encoding errors in the `binary=False` mode, but modern stuff is generally fine. The `encoding` parameter is used for handling of string parameters in processing of format-string capabilities. """ key = (term,binary,encoding) info = _cache.get( key ) if info is None : if term is None : term = os.environ[ 'TERM' ] if encoding is None : encoding = _default_encoding path = None entry = _null_entry if term != 'null' : suffix = term[0] if sys.platform == 'darwin' : suffix = '%02X' % ord( suffix ) suffix += '/' + term path = posixpath.expanduser( '~/.terminfo/' + suffix ) if not os.path.exists( path ) : path = '/usr/share/terminfo/' + suffix info = make_terminfo_from_path( path, binary, encoding ) _cache[ (term,binary,encoding) ] = info _cache[ key ] = info return info
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") try: import pwd except ImportError: pass else: self.assert_(isinstance(posixpath.expanduser("~/"), basestring)) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assert_(isinstance(posixpath.expanduser("~root/"), basestring)) self.assert_(isinstance(posixpath.expanduser("~foo/"), basestring)) self.assertRaises(TypeError, posixpath.expanduser)
def __init__(self, home=None): if not home: home = expanduser('~/.%s' % NAME) self.home = home if not path.exists(self.home): mkdir(self.home) self.fields = [ Field('Album', 'album', 'album', Field.SET_IF_EMPTY), Field('Artist', 'artist.name', 'artist', Field.IGNORE), Field('Label', 'label.name', 'grouping', Field.SET_IF_EMPTY), Field('Long Description', 'notes', 'long description', Field.SET_IF_EMPTY), Field('Genre', 'genres[0]', 'genre', Field.SET_IF_EMPTY), Field('Released', 'released', 'released', Field.SET_IF_EMPTY), Field('Title', 'title', 'name', Field.IGNORE), Field('Track Number', 'track number', 'track number', Field.SET_IF_EMPTY), Field('Year', 'year', 'year', Field.SET_IF_EMPTY) ]
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") try: import pwd except ImportError: pass else: self.assert_(isinstance(posixpath.expanduser("~/"), basestring)) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/") ) self.assert_(isinstance(posixpath.expanduser("~root/"), basestring)) self.assert_(isinstance(posixpath.expanduser("~foo/"), basestring)) with test_support.EnvironmentVarGuard() as env: env.set('HOME', '/') self.assertEqual(posixpath.expanduser("~"), "/") self.assertRaises(TypeError, posixpath.expanduser)
def load(self, filepath): filepath = posixpath.expanduser(filepath) if not posixpath.isfile(filepath): raise PersistError("File not found: %s" % filepath) if posixpath.getsize(filepath) == 0: return try: self._hardmap = self._backend.load(filepath) except: filepathold = filepath+".old" if (posixpath.isfile(filepathold) and posixpath.getsize(filepathold) > 0): # warning("Broken configuration file at %s" % filepath) # warning("Trying backup at %s" % filepathold) try: self._hardmap = self._backend.load(filepathold) except: raise PersistError("Broken configuration file at %s" % filepathold) else: raise PersistError("Broken configuration file at %s" % filepath)
+ "\n") # Adds the bash_shortcuts shortcuts: bash_shortcuts += ("alias " + line[0] + "=\"cd " + line[1] + " && ls -a\"" + "\n") # qutebrowser shortcuts: qute_shortcuts += ("config.bind(';" + line[0] + "', 'set downloads.location.directory " + line[1] + " ;; hint links download')" + "\n") # nautilus bookmarks nautilus_shortcuts += ("file://" + posixpath.expanduser(line[1]) + "\n") # Goes through the config file file and adds the shortcuts to both # bash_shortcuts and ranger. with open(home + ".scripts/configs") as conf: for line in csv.reader(conf, dialect="excel-tab"): # fishshortcuts+=("alias "+line[0]+"=\"vim "+line[1]+"\"\n") # fishshortcuts+=("abbr --add "+line[0]+" \"vim "+line[1]+"\"\n") bash_shortcuts += ("alias " + line[0] + "=\"vim " + line[1] + "\"" + "\n") ranger_shortcuts += ("map " + line[0] + " shell vim " + line[1] + "\n")
def test_expanduser(self): self.assertEqual(posixpath.expanduser('foo'), 'foo') self.assertEqual(posixpath.expanduser(b'foo'), b'foo') with support.EnvironmentVarGuard() as env: for home in ('/', '', '//', '///'): with self.subTest(home=home): env['HOME'] = home self.assertEqual(posixpath.expanduser('~'), '/') self.assertEqual(posixpath.expanduser('~/'), '/') self.assertEqual(posixpath.expanduser('~/foo'), '/foo') try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser('~/'), str) self.assertIsInstance(posixpath.expanduser(b'~/'), bytes) if posixpath.expanduser('~') != '/': self.assertEqual( posixpath.expanduser('~') + '/', posixpath.expanduser('~/')) self.assertEqual( posixpath.expanduser(b'~') + b'/', posixpath.expanduser(b'~/')) self.assertIsInstance(posixpath.expanduser('~root/'), str) self.assertIsInstance(posixpath.expanduser('~foo/'), str) self.assertIsInstance(posixpath.expanduser(b'~root/'), bytes) self.assertIsInstance(posixpath.expanduser(b'~foo/'), bytes) with support.EnvironmentVarGuard() as env: del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir home = home.rstrip('/') or '/' self.assertEqual(posixpath.expanduser('~'), home)
def init(self, receiver, context, m, path=None, expanduser=False): path = posix.getcwdu() if path is None else str(path) expanduser = bool(expanduser) receiver.value = posixpath.expanduser(path) if expanduser else path return receiver
def nsidc_icesat2_sync_s3(aws_access_key_id, aws_secret_access_key, aws_region_name, s3_bucket_name, s3_bucket_path, PRODUCTS, RELEASE, VERSIONS, GRANULES, TRACKS, YEARS=None, SUBDIRECTORY=None, CYCLES=None, REGION=None, AUXILIARY=False, INDEX=None, FLATTEN=False, TIMEOUT=None, RETRY=1, PROCESSES=0, CLOBBER=False): #-- get aws session object session = boto3.Session(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=aws_region_name) #-- get s3 object and bucket object s3 = session.resource('s3') bucket = s3.Bucket(s3_bucket_name) #-- logging to standard output logging.basicConfig(level=logging.INFO) #-- compile HTML parser for lxml parser = lxml.etree.HTMLParser() #-- remote https server for ICESat-2 Data HOST = 'https://n5eil01u.ecs.nsidc.org' #-- regular expression operator for finding files of a particular granule #-- find ICESat-2 HDF5 files in the subdirectory for product and release if TRACKS: regex_track = r'|'.join(['{0:04d}'.format(T) for T in TRACKS]) else: regex_track = r'\d{4}' if CYCLES: regex_cycle = r'|'.join(['{0:02d}'.format(C) for C in CYCLES]) else: regex_cycle = r'\d{2}' regex_granule = r'|'.join(['{0:02d}'.format(G) for G in GRANULES]) regex_version = r'|'.join(['{0:02d}'.format(V) for V in VERSIONS]) regex_suffix = r'(.*?)' if AUXILIARY else r'(h5|nc)' default_pattern = ( r'{0}(-\d{{2}})?_(\d{{4}})(\d{{2}})(\d{{2}})(\d{{2}})' r'(\d{{2}})(\d{{2}})_({1})({2})({3})_({4})_({5})(.*?).{6}$') ATL11_pattern = r'({0})_({1})({2})_(\d{{2}})(\d{{2}})_({3})_({4})(.*?).{5}$' ATL1415_pattern = r'({0})_({1})_(\d{{2}})(\d{{2}})_({3})_({4})(.*?).{5}$' #-- regular expression operator for finding subdirectories if SUBDIRECTORY: #-- Sync particular subdirectories for product R2 = re.compile(r'(' + '|'.join(SUBDIRECTORY) + ')', re.VERBOSE) elif YEARS: #-- Sync particular years for product regex_pattern = '|'.join('{0:d}'.format(y) for y in YEARS) R2 = re.compile(r'({0}).(\d+).(\d+)'.format(regex_pattern), re.VERBOSE) else: #-- Sync all available subdirectories for product R2 = re.compile(r'(\d+).(\d+).(\d+)', re.VERBOSE) #-- build list of remote files, remote modification times and AWS S3 files remote_files = [] remote_mtimes = [] s3_files = [] #-- build lists of files or use existing index file if INDEX: #-- read the index file, split at lines and remove all commented lines with open(os.path.expanduser(INDEX), 'r') as f: files = [ i for i in f.read().splitlines() if re.match(r'^(?!\#)', i) ] #-- regular expression operator for extracting information from files rx = re.compile( r'(ATL\d{2})(-\d{2})?_(\d{4})(\d{2})(\d{2})(\d{2})' r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') #-- for each line in the index for f in files: #-- extract parameters from ICESat-2 ATLAS HDF5 file PRD, HEM, YY, MM, DD, HH, MN, SS, TRK, CYC, GRN, RL, VRS, AUX = rx.findall( f).pop() #-- get directories from remote directory product_directory = '{0}.{1}'.format(PRD, RL) sd = '{0}.{1}.{2}'.format(YY, MM, DD) PATH = [HOST, 'ATLAS', product_directory, sd] remote_dir = posixpath.join(HOST, 'ATLAS', product_directory, sd) #-- AWS S3 directory for product and subdirectory if FLATTEN: s3_path = posixpath.expanduser(s3_bucket_path) else: s3_path = posixpath.join(s3_bucket_path, product_directory, sd) #-- find ICESat-2 data file to get last modified time #-- find matching files (for granule, release, version, track) names, lastmod, error = icesat2_toolkit.utilities.nsidc_list( PATH, build=False, timeout=TIMEOUT, parser=parser, pattern=f.strip()) #-- print if file was not found if not names: logging.critical(error) continue #-- add to lists for colname, remote_mtime in zip(names, lastmod): #-- remote and AWS S3 versions of the file remote_files.append(posixpath.join(remote_dir, colname)) s3_files.append(posixpath.join(s3_path, colname)) remote_mtimes.append(remote_mtime) else: #-- for each ICESat-2 product listed for p in PRODUCTS: logging.info('PRODUCT={0}'.format(p)) #-- get directories from remote directory product_directory = '{0}.{1}'.format(p, RELEASE) PATH = [HOST, 'ATLAS', product_directory] #-- compile regular expression operator if p in ('ATL11', ): R1 = re.compile( ATL11_pattern.format(p, regex_track, regex_granule, RELEASE, regex_version, regex_suffix)) elif p in ('ATL14', 'ATL15'): regex_region = '|'.join(REGION) R1 = re.compile( ATL1415_pattern.format(p, regex_region, RELEASE, regex_version, regex_suffix)) else: R1 = re.compile( default_pattern.format(p, regex_track, regex_cycle, regex_granule, RELEASE, regex_version, regex_suffix)) #-- read and parse request for subdirectories (find column names) remote_sub, _, error = icesat2_toolkit.utilities.nsidc_list( PATH, build=False, timeout=TIMEOUT, parser=parser, pattern=R2, sort=True) #-- print if subdirectory was not found if not remote_sub: logging.critical(error) continue #-- for each remote subdirectory for sd in remote_sub: #-- AWS S3 directory for product and subdirectory if FLATTEN: s3_path = posixpath.expanduser(s3_bucket_path) else: s3_path = posixpath.join(s3_bucket_path, product_directory, sd) logging.info("Building file list: {0}".format(sd)) #-- find ICESat-2 data files PATH = [HOST, 'ATLAS', product_directory, sd] remote_dir = posixpath.join(HOST, 'ATLAS', product_directory, sd) #-- find matching files (for granule, release, version, track) names, lastmod, error = icesat2_toolkit.utilities.nsidc_list( PATH, build=False, timeout=TIMEOUT, parser=parser, pattern=R1, sort=True) #-- print if file was not found if not names: logging.critical(error) continue #-- build lists of each ICESat-2 data file for colname, remote_mtime in zip(names, lastmod): #-- remote and AWS S3 versions of the file remote_files.append(posixpath.join(remote_dir, colname)) s3_files.append(posixpath.join(s3_path, colname)) remote_mtimes.append(remote_mtime) #-- sync in series if PROCESSES = 0 if (PROCESSES == 0): #-- sync each ICESat-2 data file for i, remote_file in enumerate(remote_files): #-- sync ICESat-2 files with NSIDC server args = (bucket, remote_file, remote_mtimes[i], s3_files[i]) kwds = dict(TIMEOUT=TIMEOUT, RETRY=RETRY, CLOBBER=CLOBBER) output = http_pull_file(*args, **kwds) #-- print the output string logging.info(output) if output else None else: #-- set multiprocessing start method ctx = mp.get_context("fork") #-- sync in parallel with multiprocessing Pool pool = ctx.Pool(processes=PROCESSES) #-- sync each ICESat-2 data file out = [] for i, remote_file in enumerate(remote_files): #-- sync ICESat-2 files with NSIDC server args = (bucket, remote_file, remote_mtimes[i], s3_files[i]) kwds = dict(TIMEOUT=TIMEOUT, RETRY=RETRY, CLOBBER=CLOBBER) out.append( pool.apply_async(multiprocess_sync, args=args, kwds=kwds)) #-- start multiprocessing jobs #-- close the pool #-- prevents more tasks from being submitted to the pool pool.close() #-- exit the completed processes pool.join() #-- print the output string for output in out: temp = output.get() logging.info(temp) if temp else None
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") try: import pwd except ImportError: pass else: self.assertTrue(isinstance(posixpath.expanduser("~/"), str)) self.assertTrue(isinstance(posixpath.expanduser(b"~/"), bytes)) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assertEqual( posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/")) self.assertTrue(isinstance(posixpath.expanduser("~root/"), str)) self.assertTrue(isinstance(posixpath.expanduser("~foo/"), str)) self.assertTrue(isinstance(posixpath.expanduser(b"~root/"), bytes)) self.assertTrue(isinstance(posixpath.expanduser(b"~foo/"), bytes)) with support.EnvironmentVarGuard() as env: env['HOME'] = '/' self.assertEqual(posixpath.expanduser("~"), "/") self.assertRaises(TypeError, posixpath.expanduser)
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") with support.EnvironmentVarGuard() as env: for home in '/', '', '//', '///': with self.subTest(home=home): env['HOME'] = home self.assertEqual(posixpath.expanduser("~"), "/") self.assertEqual(posixpath.expanduser("~/"), "/") self.assertEqual(posixpath.expanduser("~/foo"), "/foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), str) self.assertIsInstance(posixpath.expanduser(b"~/"), bytes) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assertEqual( posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/")) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes) self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes) with support.EnvironmentVarGuard() as env: # expanduser should fall back to using the password database del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir # $HOME can end with a trailing /, so strip it (see #17809) home = home.rstrip("/") or '/' self.assertEqual(posixpath.expanduser("~"), home)
def path_expand(path): path = posixpath.expanduser(path) return glob(path)
def expanduser(self: _P) -> _P: """Returns a new path with expanded `~` and `~user` constructs.""" return self._new(posixpath.expanduser(self._path_str))
import os import sys import posixpath # FIXME: What would be lovely is an Epiphany extension to call this from Bugzilla pages # FIXME: Should be possible (but currently isn't) to get this from the patch number program = 'metacity' if len(sys.argv) < 2: print 'Specify patch number' sys.exit(255) patchnum = sys.argv[1] patchdir = posixpath.expanduser('~/patch/%s/%s' % (program, patchnum)) os.makedirs(patchdir) os.chdir(patchdir) if os.system("svn co svn+ssh://svn.gnome.org/svn/%s/trunk ." % (program)) != 0: print "Checkout failed." sys.exit(255) if os.system( "wget http://bugzilla.gnome.org/attachment.cgi?id=%s -O - -q|patch -p 0" % (patchnum)) != 0: print "Patch failed." sys.exit(255) if os.system("./autogen.sh") != 0:
def coerce(self, value): path = super(PathVariable, self).coerce(value) return posixpath.expanduser(path)
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), str) self.assertIsInstance(posixpath.expanduser(b"~/"), bytes) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/") ) self.assertEqual( posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/") ) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes) self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes) with support.EnvironmentVarGuard() as env: env['HOME'] = '/' self.assertEqual(posixpath.expanduser("~"), "/")
def edit_news_entry(version): # FIXME: still needs a lot of tidying up. Translator stuff especially needs to be # factored out into a separate function. (contributors, changelog, entries, release_date) = scan_changelog(version) contributors_list = contributors.keys() contributors_list.sort() thanksline = ', '.join([contributors[x] for x in contributors_list]) thanksline = thanksline.replace(contributors[contributors_list[-1]], 'and '+contributors[contributors_list[-1]]) thanks = '%s\n%s\n\n' % (version['string'], '='*len(version['string'])) thanks += wordwrap('Thanks to %s for improvements in this version.' % (thanksline)) thanks += '\n\n' for line in entries: thanks += ' - xxx %s\n' % (line) # and now pick up the translations. translations = {} language_re = re.compile('\*\s*(.+)\.po') for line in file("po/ChangeLog").readlines(): match = language_re.search(line) if match: translations[match.group(1)] = 1 if is_date(line) and line[:10]<release_date: break translator_list = translations.keys() translator_list.sort() last_translator_re = re.compile('Last-Translator:([^<"]*)', re.IGNORECASE) def translator_name(language): name = 'unknown' if ',' in language: language = language[:language.find(',')].replace('.po','') filename = 'po/%s.po' % (language) if not os.access(filename, os.F_OK): # Never mind the translator being unknown, we don't even # know about the language! return 'Mystery translator (%s)' % (language) for line in file(filename).readlines(): match = last_translator_re.search(line) if match: name = match.group(1).rstrip().lstrip() break return "%s (%s)" % (name, language) thanks += '\nTranslations\n' thanks += wordwrap(', '.join([translator_name(x) for x in translator_list]), ' ') thanks += '\n\n' changes = '## '+ ' '.join(changelog).replace('\n', '\n## ') filename = posixpath.expanduser("~/.release-wrangler-%(name)s-%(string)s.txt" % version) tmp = open(filename, 'w') tmp.write('## You are releasing %(name)s, version %(major)s.%(minor)s.%(micro)s.\n' % version) tmp.write('## The text at the foot of the page is the part of the ChangeLog which\n') tmp.write('## has changed since the last release. Please summarise it.\n') tmp.write('## Anything preceded by a # is ignored.\n') tmp.write(thanks) tmp.write(changes) tmp.close() os.system(favourite_editor()+' +6 %s ' % (filename)) # FIXME: if they abort, would be useful to abort here too # Write it out to NEWS version['announcement'] = '' news_tmp = open('NEWS.tmp', 'a') for line in open(filename, 'r').readlines(): if line=='' or line[0]!='#': news_tmp.write(line) version['announcement'] += line for line in open('NEWS').readlines(): news_tmp.write(line) news_tmp.close() os.rename('NEWS.tmp', 'NEWS') changelog_and_checkin('NEWS', '%(major)s.%(minor)s.%(micro)s release.' % (version))
def website_extract_image_titles(website_export_dpath, outfile, verbose): """ Scan photo files exported for andonisooklaris.com and construct list of image filenames and titles, separated by collection. """ args, result = pydoni.__pydonicli_declare_args__(locals()), dict() def echo(*args, **kwargs): kwargs['timestamp'] = True pydoni.echo(*args, **kwargs) website_export_dpath = expanduser(website_export_dpath) if outfile == 'auto': outfile = os.path.join( website_export_dpath, 'Image Titles %s.txt' % pydoni.sysdate(stripchars=True)) elif outfile is not None: assert not os.path.isfile(outfile) files = pydoni.listfiles(path=website_export_dpath, recursive=True, full_names=True) files = [f for f in files if os.path.splitext(f)[1].lower() != '.txt'] if verbose: echo('Files found: ' + str(len(files))) echo('Extracting EXIF metadata...') exifd = pydoni.EXIF(files).extract() echo('EXIF metadata successfully extracted') if outfile is not None: echo('Writing output datafile: ' + outfile) else: exifd = pydoni.EXIF(files).extract() i = 0 tracker = pd.DataFrame(columns=['collection', 'file', 'title']) for file in files: elements = file.replace(website_export_dpath, '').lstrip('/').split('/') subcollection = None collection = elements[0] fname = elements[-1] if len(elements) == 3: subcollection = elements[1] collection += ' - ' + subcollection exif = exifd[os.path.join(website_export_dpath, file)] title = exif['Title'] if 'Title' in exif.keys() else '' year = fname[0:4] title = str(year) + ' ' + str(title) tracker.loc[i] = [collection, fname, title] i += 1 print_lst = [] for collection in tracker['collection'].unique(): print_lst.append('\nCollection: %s\n' % collection) df_print = tracker.loc[tracker['collection'] == collection].drop( 'collection', axis=1) print_lst.append( tabulate(df_print, showindex=False, headers=df_print.columns)) print_str = '\n'.join(print_lst).strip() if outfile is None: print(print_str) else: with open(outfile, 'w') as f: f.write(print_str) if verbose: pydoni.program_complete() result['n_collections'] = len(tracker['collection'].unique()) pydoni.__pydonicli_register__( dict(args=args, result=result, command_name='photo.website_extract_image_titles'))
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo")
def test_expanduser(self) -> None: self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), str) self.assertIsInstance(posixpath.expanduser(b"~/"), bytes) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assertEqual( posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/")) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes) self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes) with support.EnvironmentVarGuard() as env: env['HOME'] = '/' self.assertEqual(posixpath.expanduser("~"), "/") # expanduser should fall back to using the password database del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir self.assertEqual(posixpath.expanduser("~"), home)
def edit_news_entry(version): # FIXME: still needs a lot of tidying up. Translator stuff especially needs to be # factored out into a separate function. (contributors, changelog, entries, release_date) = scan_changelog(version) contributors_list = contributors.keys() contributors_list.sort() thanksline = ', '.join([contributors[x] for x in contributors_list]) thanksline = thanksline.replace( contributors[contributors_list[-1]], 'and ' + contributors[contributors_list[-1]]) thanks = '%s\n%s\n\n' % (version['string'], '=' * len(version['string'])) thanks += wordwrap('Thanks to %s for improvements in this version.' % (thanksline)) thanks += '\n\n' for line in entries: thanks += ' - xxx %s\n' % (line) # and now pick up the translations. translations = {} language_re = re.compile('\*\s*(.+)\.po') for line in file("po/ChangeLog").readlines(): match = language_re.search(line) if match: translations[match.group(1)] = 1 if is_date(line) and line[:10] < release_date: break translator_list = translations.keys() translator_list.sort() last_translator_re = re.compile('Last-Translator:([^<"]*)', re.IGNORECASE) def translator_name(language): name = 'unknown' if ',' in language: language = language[:language.find(',')].replace('.po', '') filename = 'po/%s.po' % (language) if not os.access(filename, os.F_OK): # Never mind the translator being unknown, we don't even # know about the language! return 'Mystery translator (%s)' % (language) for line in file(filename).readlines(): match = last_translator_re.search(line) if match: name = match.group(1).rstrip().lstrip() break return "%s (%s)" % (name, language) thanks += '\nTranslations\n' thanks += wordwrap( ', '.join([translator_name(x) for x in translator_list]), ' ') thanks += '\n\n' changes = '## ' + ' '.join(changelog).replace('\n', '\n## ') filename = posixpath.expanduser( "~/.release-wrangler-%(name)s-%(string)s.txt" % version) tmp = open(filename, 'w') tmp.write( '## You are releasing %(name)s, version %(major)s.%(minor)s.%(micro)s.\n' % version) tmp.write( '## The text at the foot of the page is the part of the ChangeLog which\n' ) tmp.write('## has changed since the last release. Please summarise it.\n') tmp.write('## Anything preceded by a # is ignored.\n') tmp.write(thanks) tmp.write(changes) tmp.close() os.system(favourite_editor() + ' +6 %s ' % (filename)) # FIXME: if they abort, would be useful to abort here too # Write it out to NEWS version['announcement'] = '' news_tmp = open('NEWS.tmp', 'a') for line in open(filename, 'r').readlines(): if line == '' or line[0] != '#': news_tmp.write(line) version['announcement'] += line for line in open('NEWS').readlines(): news_tmp.write(line) news_tmp.close() os.rename('NEWS.tmp', 'NEWS') changelog_and_checkin('NEWS', '%(major)s.%(minor)s.%(micro)s release.' % (version))
import os import sys import posixpath # FIXME: What would be lovely is an Epiphany extension to call this from Bugzilla pages # FIXME: Should be possible (but currently isn't) to get this from the patch number program = 'metacity' if len(sys.argv)<2: print 'Specify patch number' sys.exit(255) patchnum = sys.argv[1] patchdir = posixpath.expanduser('~/patch/%s/%s' % (program, patchnum)) os.makedirs(patchdir) os.chdir(patchdir) if os.system("svn co svn+ssh://svn.gnome.org/svn/%s/trunk ." % (program))!=0: print "Checkout failed." sys.exit(255) if os.system("wget http://bugzilla.gnome.org/attachment.cgi?id=%s -O - -q|patch -p 0" % (patchnum))!=0: print "Patch failed." sys.exit(255) if os.system("./autogen.sh")!=0: print "Autogen failed." sys.exit(255)
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") with test_support.EnvironmentVarGuard() as env: for home in '/', '', '//', '///': env['HOME'] = home self.assertEqual(posixpath.expanduser("~"), "/") self.assertEqual(posixpath.expanduser("~/"), "/") self.assertEqual(posixpath.expanduser("~/foo"), "/foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), basestring) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/") ) self.assertIsInstance(posixpath.expanduser("~root/"), basestring) self.assertIsInstance(posixpath.expanduser("~foo/"), basestring) with test_support.EnvironmentVarGuard() as env: # expanduser should fall back to using the password database del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir # $HOME can end with a trailing /, so strip it (see #17809) home = home.rstrip("/") or '/' self.assertEqual(posixpath.expanduser("~"), home)
def expanduser(self) -> 'GPath': """Returns a new path with expanded `~` and `~user` constructs.""" return posixpath.expanduser(self._path_str)
def test_expanduser(self) -> None: self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), str) self.assertIsInstance(posixpath.expanduser(b"~/"), bytes) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/") ) self.assertEqual( posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/") ) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes) self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes) with support.EnvironmentVarGuard() as env: env['HOME'] = '/' self.assertEqual(posixpath.expanduser("~"), "/") # expanduser should fall back to using the password database del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir self.assertEqual(posixpath.expanduser("~"), home)
def test_expanduser(self): self.assertEqual(posixpath.expanduser("foo"), "foo") self.assertEqual(posixpath.expanduser(b"foo"), b"foo") try: import pwd except ImportError: pass else: self.assertIsInstance(posixpath.expanduser("~/"), str) self.assertIsInstance(posixpath.expanduser(b"~/"), bytes) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != "/": self.assertEqual(posixpath.expanduser("~") + "/", posixpath.expanduser("~/")) self.assertEqual(posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/")) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes) self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes) with support.EnvironmentVarGuard() as env: env["HOME"] = "/" self.assertEqual(posixpath.expanduser("~"), "/") self.assertEqual(posixpath.expanduser("~/foo"), "/foo") # expanduser should fall back to using the password database del env["HOME"] home = pwd.getpwuid(os.getuid()).pw_dir # $HOME can end with a trailing /, so strip it (see #17809) self.assertEqual(posixpath.expanduser("~"), home.rstrip("/"))
def update_event(self, inp=-1): self.set_output_val(0, posixpath.expanduser(self.input(0)))
def test_expanduser_pwd(self): pwd = support.import_module('pwd') self.assertIsInstance(posixpath.expanduser("~/"), str) self.assertIsInstance(posixpath.expanduser(b"~/"), bytes) # if home directory == root directory, this test makes no sense if posixpath.expanduser("~") != '/': self.assertEqual( posixpath.expanduser("~") + "/", posixpath.expanduser("~/") ) self.assertEqual( posixpath.expanduser(b"~") + b"/", posixpath.expanduser(b"~/") ) self.assertIsInstance(posixpath.expanduser("~root/"), str) self.assertIsInstance(posixpath.expanduser("~foo/"), str) self.assertIsInstance(posixpath.expanduser(b"~root/"), bytes) self.assertIsInstance(posixpath.expanduser(b"~foo/"), bytes) with support.EnvironmentVarGuard() as env: # expanduser should fall back to using the password database del env['HOME'] home = pwd.getpwuid(os.getuid()).pw_dir # $HOME can end with a trailing /, so strip it (see #17809) home = home.rstrip("/") or '/' self.assertEqual(posixpath.expanduser("~"), home) # bpo-10496: If the HOME environment variable is not set and the # user (current identifier or name in the path) doesn't exist in # the password database (pwd.getuid() or pwd.getpwnam() fail), # expanduser() must return the path unchanged. with mock.patch.object(pwd, 'getpwuid', side_effect=KeyError), \ mock.patch.object(pwd, 'getpwnam', side_effect=KeyError): for path in ('~', '~/.local', '~vstinner/'): self.assertEqual(posixpath.expanduser(path), path)