def dir_contains(dirname, path, exists=True): """Check if a file of directory is contained in another. Parameters ---------- dirname: str The base directory that should contain `path` path: str The name of a directory or file that should be in `dirname` exists: bool If True, the `path` and `dirname` must exist Notes ----- `path` and `dirname` must be either both absolute or both relative paths""" if exists: dirname = osp.abspath(dirname) path = osp.abspath(path) if six.PY2 or six.PY34: return osp.exists(path) and osp.samefile( osp.commonprefix([dirname, path]), dirname) else: return osp.samefile(osp.commonpath([dirname, path]), dirname) return dirname in osp.commonprefix([dirname, path])
def handle_destination_dir_logging_check(args): """ Perform error checking for command line arguments """ # turn off destination logging if we are running in the destination # directory because we always create logs in the working directory... do_not_log_in_destination = args.no_log_destination cwd = getcwd() logger.debug('cwd: %s', cwd) if args.destination_dir: try: dest_is_cwd = path.samefile(args.destination_dir, cwd) except OSError: dest_is_cwd = False elif args.dir: try: dest_is_cwd = any([path.samefile(d, cwd) for d in args.dir]) except OSError: dest_is_cwd = False else: dest_is_cwd = False logger.debug('dest_is_cwd: %s', dest_is_cwd) if dest_is_cwd: if do_not_log_in_destination: raise RuntimeError('option --no-log-destination cannot be used ' 'when running in the destination directory ' 'because a log is always made in the ' 'directory in which the script is run') do_not_log_in_destination = True return do_not_log_in_destination
def daemon(): files_to_upload = [] dirs_to_upload = [] for root, dirs, files in os.walk(DIRECTORY): for name in files: path = join(root, name)[2:] if samefile(UPLOAD_SCRIPT, path) or samefile(LOG_FILE, path): continue mod_time = get_mod_time(path) try: if mod_time > _mod_times[path]: files_to_upload.append(path) except KeyError: files_to_upload.append(path) _mod_times[path] = mod_time for dir in dirs: path = join(root, dir)[2:] dirs_to_upload.append(path) logging.debug(str(files_to_upload)) with open(UPLOAD_SCRIPT, "w") as f: #for dir in dirs_to_upload: # f.write("mkdir {0}\n".format(dir)) for path in files_to_upload: if path.startswith("."): continue f.write("put {0} {0}\n".format(path)) with open(os.devnull, "w") as null: subprocess.call("gsisftp -b {0} {1}:{2}"\ .format(UPLOAD_SCRIPT, GRID_URL, UPLOAD_DIRECTORY), shell=True, stdout=null)
def test_in_directory_1(): tempdir = tempfile.mkdtemp() try: initialdir = abspath(os.curdir) with in_directory(tempdir): assert samefile(abspath(os.curdir), tempdir) assert samefile(abspath(os.curdir), initialdir) finally: os.rmdir(tempdir)
def test_extracted_folder(monkeypatch, tmpdir): monkeypatch.setattr(downloader, "CACHE_FOLDER", tmpdir) assert path.samefile( downloader._extracted_folder("mongodb-amazon2-x86_64-1.1.1.tar"), path.join(tmpdir, "extract", "mongodb-amazon2-x86_64-1.1.1-tar") ) assert path.samefile( downloader._extracted_folder("mongodb-windows-x86_64-1.1.1.zip"), path.join(tmpdir, "extract", "mongodb-windows-x86_64-1.1.1-zip") )
def test_call(self, tmpdir): orig_dir = os.getcwd() assert osp.samefile(sh.working_dir(), orig_dir) sh.change_dir(tmpdir.strpath) try: assert osp.samefile(tmpdir.strpath, os.getcwd()) assert osp.samefile(tmpdir.strpath, sh.working_dir()) finally: os.chdir(orig_dir)
def test_make_folder(monkeypatch, tmpdir): assert path.samefile( downloader._mkdir_ifnot_exist(tmpdir, "test"), path.join(tmpdir, "test") ) assert path.exists(path.join(tmpdir, "test")) assert path.samefile( downloader._mkdir_ifnot_exist(tmpdir, "test2", "nested"), path.join(tmpdir, "test2", "nested") ) assert path.exists(path.join(tmpdir, "test2", "nested"))
def __eq__(self, other): try: return (type(self) == type(other) and str(self) == str(other) and path.samefile(self.object, other.object)) except OSError as exc: pretty.print_debug(__name__, exc) return False
def umount(self): assert samefile(self.connect_args.path, self.connect_args.mp) if self.connect_args.path!=self.connect_args.mp: self._run_as_root(['/bin/rm', self.connect_args.mp], 'umount') else: logger.info("Nothing to do for unmount of %s" % self.connect_args.path)
def write_output(self, filename, output): """Method for writing hdf5 output. `output` to be written must be either a `memh5.MemGroup` or an `h5py.Group` (which include `hdf5.File` objects). In the latter case the buffer is flushed if `filename` points to the same file and a copy is made otherwise. """ from caput import memh5 import h5py # Ensure parent directory is present. dirname = path.dirname(filename) if not path.isdir(dirname): os.makedirs(dirname) # Cases for `output` object type. if isinstance(output, memh5.MemGroup): # Already in memory. output.to_hdf5(filename, mode='w') elif isinstance(output, h5py.Group): if path.isfile(filename) and path.samefile(output.file.filename, filename): # `output` already lives in this file. output.flush() else: # Copy to memory then to disk # XXX This can be made much more efficient using a direct copy. out_copy = memh5.MemGroup.from_hdf5(output) out_copy.to_hdf5(filename, mode='w')
def smart_copy(src, dest, exclude=[]): """Copy file or directory, while ignoring non-existent or equivalent files """ if exists(dest) and samefile(src, dest): print( "Warning: ignoring smart_copy because src and dest both point to {}" .format(dest)) return if not exists(dirname(dest)): smart_mkdir(dirname(dest)) if isdir(src): tmp_root = join(dirname(dirname(__file__)), 'tmp') tmp = join(tmp_root, f'{basename(src)}_{random.randint(0,1000)}') smart_remove(tmp) copytree(src, tmp, ignore=ignore_patterns(*exclude)) # copy tree with excludes distutils.dir_util.copy_tree(tmp, dest) # then copy tree without overwrite smart_remove(tmp) else: for pattern in exclude: if fnmatch.fnmatch(src, pattern): print('Did not copy {} because of exclude={}'.format( src, exclude)) return copyfile(src, dest)
def samefile(self, other): sname = self.name try: oname = other.name except AttributeError: oname = other return p.samefile(sname, oname)
def test_default_extract_folder(monkeypatch, tmpdir): monkeypatch.setattr(downloader, "CACHE_FOLDER", tmpdir) assert path.samefile( downloader._extract_folder(), path.join(tmpdir, "extract") ) assert path.exists(path.join(tmpdir, "extract"))
def checkPyFoamSiteLocation(name): binDir=path.join(os.environ[name],"bin") etcDir=path.join(os.environ[name],"etc") libDir=path.join(os.environ[name],"lib") if not path.isdir(binDir): print_("MISCONFIGURATION: no directory",binDir,"for site-specific scripts") else: found=False for p in os.environ["PATH"].split(":"): if path.isdir(p): if path.samefile(p,binDir): found=True break if not found: print_("MISCONFIGURATION:",binDir,"is not in the PATH",os.environ["PATH"]) else: print_("Site-specific scripts should be added to",binDir) if not path.isdir(etcDir): print_("MISCONFIGURATION: no directory",etcDir,"for site-specific configurations") else: print_("Site-specific configurations can be added to",etcDir) if not path.isdir(libDir): print_("MISCONFIGURATION: no directory",libDir,"for site-specific library files") else: print_("Site-specific library files can be added to",libDir, "Do NOT add to PYTHONPATH but import as PyFoam.Site")
def __eq__(self, other): return ( isinstance(other, Element) and path.samefile(self.uri, other.uri) and self.etype == other.etype and self.flavor == other.flavor )
def urlcopy(src, dest=None, use_cache=True): cache = ensuredirs(join(here, 'cache')) if use_cache: cached = join(cache, basename(src)) if exists(cached): src = cached if not dest: dest = cache if isdir(dest): dest = join(dest, basename(src)) if urlparse(src).scheme: check_call(['wget', '-O', dest, src]) if use_cache and not exists(cached): copy2(dest, cache) else: if exists(dest) and samefile(src, dest): return dest if isdir(src): if exists(dest): rmtree(dest) copytree(src, dest) else: copy2(src, dest) return dest
def open(cls, dbpath=expanduser("~/.maas.db")): """Load a profiles database. Called without arguments this will open (and create) a database in the user's home directory. **Note** that this returns a context manager which will close the database on exit, saving if the exit is clean. :param dbpath: The path to the database file to create and open. """ # See if we ought to do a one-time migration. migrate_from = expanduser("~/.maascli.db") migrate = isfile(migrate_from) and not exists(dbpath) # Initialise filename with restrictive permissions... os.close(os.open(dbpath, os.O_CREAT | os.O_APPEND, 0o600)) # Final check to see if it's safe to migrate. migrate = migrate and not samefile(migrate_from, dbpath) # before opening it with sqlite. database = sqlite3.connect(dbpath) try: store = cls(database) if migrate: schema_import(database, migrate_from) yield store else: yield store except: raise else: database.commit() finally: database.close()
def append_local_version_label(public_version): try: from git import Repo from os import getcwd from os.path import join, samefile repo = Repo() """ If we're been copied under the working dir of some other Git repo, "git describe" won't return what we're expecting, so don't append anything. The test for this case will also fail if, say, we try to invoke ../setup.py from a subdirectory, but it's better to err on the side of "least surprises". """ if not samefile(repo.git_dir, join(getcwd(), '.git')): return public_version # The tags have a "v" prefix. val = repo.git.describe('--match', 'v' + public_version, '--tags', '--dirty', '--broken') """ Output from "git describe --tags --dirty --broken" is <TAG>[-<NR-OF-COMMITS>-g<ABBRE-HASH>][-dirty][-broken] Convert to a legal Python local version label, dropping the "v" prefix of the tag. """ return val.replace('-', '+', 1).replace('-', '.')[1:] except: return public_version
def get_path_components(self, repo_abspath, abspath): """ Splits given abspath into components until repo_abspath is reached. E.g. if repo_abspath is '/Documents/Hobby/ParaView/' and abspath is '/Documents/Hobby/ParaView/Catalyst/Editions/Base/', function will return: ['.', 'Catalyst', 'Editions', 'Base'] First element is always '.' (concrete symbol depends on OS). @type repo_abspath: string @param repo_abspath: Absolute path to the git repository. @type abspath: string @param abspath: Absolute path to within repo_abspath. @rtype: list @return: List of path components. """ components = [] while not path.samefile(abspath, repo_abspath): abspath, tail = path.split(abspath) if len(tail): components.insert(0, tail) components.insert(0, path.relpath(repo_abspath, repo_abspath)) return components
def premesti_v_0( fime, dirime): log() dir = dirime + '/0' mkdir( dir) bezext,ext = ospath.splitext( fime) if 0: #stari import time sega = str( time.mktime( time.localtime()) ) dirstari = join( dir, sega) for f in glob( globescape( join( dir, basename( bezext))) +'.*'): makedirs( dirstari) rename( f, join( dirstari, basename( f) )) #novi davai = optz.premesti for f in glob( globescape( bezext) +'.*'): of = join( dir, basename( f)) if not exists( of): if davai: rename( f, of) else: link( f, of) else: assert ospath.samefile( f, of), of if davai: os.remove( f)
def clean_svg_files(in_dir, out_dir, match_pat=None, clean=False, strip=False): regex = re.compile(match_pat) if match_pat else None count = 0 if clean and path.samefile(in_dir, out_dir): logging.error('Cannot clean %s (same as in_dir)', out_dir) return out_dir = tool_utils.ensure_dir_exists(out_dir, clean=clean) cleaner = SvgCleaner(strip) for file_name in os.listdir(in_dir): if regex and not regex.match(file_name): continue in_path = os.path.join(in_dir, file_name) logging.debug('read: %s', in_path) with open(in_path) as in_fp: result = cleaner.clean_svg(in_fp.read()) out_path = os.path.join(out_dir, file_name) with codecs.open(out_path, 'w', 'utf-8') as out_fp: logging.debug('write: %s', out_path) out_fp.write(result) count += 1 if not count: logging.warning('Failed to match any files') else: logging.info('Wrote %s files to %s', count, out_dir)
def from_potential_worktree(cls, wd): wd = os.path.abspath(wd) real_wd, _, ret = do_ex("git rev-parse --show-prefix", wd) if ret: return if not real_wd: real_wd = wd else: wd_parents = [] real_wd_parents = ["."] while True: if os.path.abspath(os.path.join(wd, os.pardir)) != wd: wd = os.path.abspath(os.path.join(wd, os.pardir)) wd_parents.append(wd) else: break while True: if os.path.basename(real_wd) != "": real_wd = os.path.join(real_wd, os.pardir) real_wd_parents.append(real_wd) else: break real_wd = str(wd_parents[len(real_wd_parents) - 1]) trace("real root", real_wd) if not samefile(real_wd, wd): return return cls(real_wd)
def _ask_for_path(config_name): """ Asks for Dropbox path. """ conf = MaestralConfig(config_name) default = osp.join(get_home_dir(), conf.get("main", "default_dir_name")) while True: msg = f"Please give Dropbox folder location or press enter for default ['{default}']:" res = input(msg).strip("'\" ") dropbox_path = osp.expanduser(res or default) old_path = osp.expanduser(conf.get("main", "path")) same_path = False try: if osp.samefile(old_path, dropbox_path): same_path = True except FileNotFoundError: pass if osp.exists(dropbox_path) and not same_path: msg = f"Directory '{dropbox_path}' already exist. Do you want to overwrite it?" yes = click.confirm(msg) if yes: return dropbox_path else: pass else: return dropbox_path
def decryptFile(infile, outfile, passw, bufferSize): try: with open(infile, "rb") as fIn: # check that output file does not exist # or that, if exists, is not the same as the input file # (i.e.: overwrite if it seems safe) if path.isfile(outfile): if path.samefile(infile, outfile): raise ValueError("Input and output files " "are the same.") try: with open(outfile, "wb") as fOut: # get input file size inputFileSize = stat(infile).st_size try: # decrypt file stream decryptStream(fIn, fOut, passw, bufferSize, inputFileSize) except ValueError as exd: # should not remove output file here because it is still in use # re-raise exception raise ValueError(str(exd)) except IOError: raise IOError("Unable to write output file.") except ValueError as exd: # remove output file on error remove(outfile) # re-raise exception raise ValueError(str(exd)) except IOError: raise IOError("File \"" + infile + "\" was not found.")
def main(): parser = argparse.ArgumentParser(description='Process matrix configuration and fill templates.') parser.add_argument('templates', metavar='TEMPLATE', nargs='+', help='A template to pass the results in.') parser.add_argument('-c', '--config', dest='config', metavar='FILE', default='setup.cfg', help='Configuration file (ini-style) to pull matrix conf from. Default: %(default)r') parser.add_argument('-s', '--section', dest='section', metavar='SECTION', default='matrix', help='Configuration section to use. Default: %(default)r') parser.add_argument('-d', '--destination', dest='destination', metavar='DIRECTORY', default='.', help='Destination of template output. Default: %(default)r') args = parser.parse_args() jinja = Environment( loader=FileSystemLoader('.'), trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True ) print('Creating matrix from {0}[{1}] ... '.format(args.config, args.section), end='') matrix = from_file(args.config, section=args.section) print('DONE.') for name in args.templates: print('Processing {0} ... '.format(name), end='') dest = join(args.destination, basename(name)) if exists(dest) and samefile(name, dest): raise RuntimeError("This would override the template. Use a different destination.") with open(dest, "w") as fh: fh.write(jinja.get_template(name).render(matrix=matrix)) print("DONE.")
def move_dropbox_directory(self, new_path=None): """ Sets the local Dropbox directory. This moves all local files to the new location and resumes syncing afterwards. :param str new_path: Full path to local Dropbox folder. If not given, the user will be prompted to input the path. :raises: ``OSError`` if moving the directory fails. """ # get old and new paths old_path = self.sync.dropbox_path new_path = new_path or select_dbx_path_dialog(self._config_name) try: if osp.samefile(old_path, new_path): return except FileNotFoundError: pass if osp.exists(new_path): raise FileExistsError(f'Path "{new_path}" already exists.') # move folder from old location or create a new one if no old folder exists if osp.isdir(old_path): shutil.move(old_path, new_path) else: os.makedirs(new_path) # update config file and client self.sync.dropbox_path = new_path
def __init__(self, file = None, match = None, content = None): """Constructor that takes a file, match, and content. @param file The file name of the input. @param match The regular expression matches @param content The ASCII content of the file. """ if file is None: return; self.content = content; file = path.realpath(file); rel = relpath(file); self.file = list(path.split(file)); self.date = path.getmtime(file); if sys.platform!="win32" and path.samefile(path.join(self.file[0],self.file[1]),sys.argv[0]): raise ValueError('Let\'s just forget codegen.py'); try: """['normal','all','mixed'] for example. This(ese) are the replacement types to be used.""" self.types = match[0].split(','); """'z' for example. This is the current file's `type`.""" self.precision = match[2].lower(); """['c','d','s'] for example. This is the current file's destination `types`.""" self.precisions = match[3].lower().split(); if len(self.required_precisions): self.precstmp = []; for prec in self.required_precisions: if prec in self.precisions: self.precstmp.append(prec); self.precisions = self.precstmp; except: raise ValueError(path.join(self.file[0],self.file[1])+' : Invalid conversion string'); self.files_in.append(rel);
def generate(self, mk_config='config.mk', header_config='src/config.h'): if self.gcov_build or self.clangcov_build: self.append('cflags', '-D_PROF') with open(mk_config, 'w') as config: for info in sorted(self.env.keys()): values = self.env[info].strip() config.write('%s := %s\n' % (info.upper(), values)) with open(header_config, 'w') as config: config.write( '// -*- mode: c++; c-basic-offset: 2; indent-tabs-mode: nil -*-\n' ) config.write('// auto-generated by ./configure, do not edit.\n\n') config.write('#pragma once\n') config.write('#ifndef MESH__CONFIG_H\n') config.write('#define MESH__CONFIG_H\n\n') for var in sorted(self.defs.keys()): value = self.defs[var].strip() config.write('#define %s %s\n' % (var.upper().replace('-', '_'), value)) config.write('\n#endif // MESH__CONFIG_H\n') src_dir = dirname(argv[0]) if not samefile(src_dir, getcwd()): copyfile(join(src_dir, 'Makefile'), 'Makefile')
def rename_file(self, old_path, new_path): """Rename a file.""" old_path = old_path.strip("/") new_path = new_path.strip("/") if new_path == old_path: return new_os_path = self._get_os_path(new_path) old_os_path = self._get_os_path(old_path) if (is_hidden(old_os_path, self.root_dir) or is_hidden( new_os_path, self.root_dir)) and not self.allow_hidden: raise web.HTTPError( 400, f"Cannot rename file or directory {old_os_path!r}") # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile( old_os_path, new_os_path): raise web.HTTPError(409, "File already exists: %s" % new_path) # Move the file try: with self.perm_to_403(): shutil.move(old_os_path, new_os_path) except web.HTTPError: raise except Exception as e: raise web.HTTPError( 500, f"Unknown error renaming file: {old_path} {e}") from e
def is_present(file_path, cmds, debug): for o in cmds: if samefile(o["file"], file_path): if debug: print("Found same file:", o["file"], file_path) return True return False
def __init__(self, file=None, match=None, content=None): """Constructor that takes a file, match, and content. @param file The file name of the input. @param match The regular expression matches @param content The ASCII content of the file. """ if file is None: return self.content = content #file = path.realpath(file) rel = relpath(file) self.file = path.split(file) self.date = path.getmtime(file) if sys.platform != "win32" and path.samefile( path.join(self.file[0], self.file[1]), sys.argv[0]): raise ValueError('Let\'s just forget codegen.py') try: # ['normal', 'all', 'mixed'] for example. These are the replacement types to be used. self.types = match[0].split(',') # 'z' for example. This is the current file's `type`. self.precision = match[2].lower() # ['c', 'd', 's'] for example. This is the current file's destination `types`. self.precisions = match[3].lower().split() if len(self.required_precisions): self.precstmp = [] for prec in self.required_precisions: if prec in self.precisions or prec == self.precision: self.precstmp.append(prec) self.precisions = self.precstmp except: raise ValueError( path.join(self.file[0], self.file[1]) + ' : Invalid conversion string') self.files_in.append(rel)
def prep_symlink(outdir, workdir, filename=None): """ Creates a symlink between outdir and workdir. If outdir and workdir are the same directory, then bails out. Both directories should exist prior to call. If filename is None, then creates a symlink to workdir in outdir called ``workdir``. Otherwise, creates a symlink in workdir called filename. If a link ``filename`` already exists, deletes it first. """ from os import remove, symlink from os.path import samefile, lexists, abspath, join from ..misc import Changedir if samefile(outdir, workdir): return if filename is None: with Changedir(workdir) as cwd: if lexists('workdir'): try: remove('workdir') except OSError: pass try: symlink(abspath(workdir), abspath(join(outdir, 'workdir'))) except OSError: pass return with Changedir(workdir) as cwd: if lexists(filename): try: remove(filename) except OSError: pass try: symlink( abspath(join(outdir, filename)), abspath(join(workdir, filename)) ) except OSError: pass
async def rename_file(self, old_path, new_path): """Rename a file.""" old_path = old_path.strip("/") new_path = new_path.strip("/") if new_path == old_path: return new_os_path = self._get_os_path(new_path) old_os_path = self._get_os_path(old_path) # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile( old_os_path, new_os_path): raise web.HTTPError(409, u"File already exists: %s" % new_path) # Move the file try: with self.perm_to_403(): await run_sync(shutil.move, old_os_path, new_os_path) except web.HTTPError: raise except Exception as e: raise web.HTTPError( 500, u"Unknown error renaming file: %s %s" % (old_path, e)) from e
def enumerateFiles(): files = [] newnames = [] lines = parseFile(namelist) for line in lines: splitline = line.split(",") if hasnames: if len(splitline) != 2: print('Error: Invalid input file format') sys.exit() files.append(splitline[0].strip()) newnames.append(splitline[1].strip()) if not access(join(workingdir, files[len(files) - 1]), F_OK): print('Error: Cannot access file', files[len(files) - 1], "from given file list") sys.exit() else: if len(splitline) != 1: print('Error: Invalid input file format') sys.exit() files = [f for f in listdir(workingdir) if not samefile(join(workingdir, f), namelist) and isfile(join(workingdir, f))] newnames = [name.strip() for name in lines] return files, newnames
def rename_file(self, old_path, new_path): """Rename a file.""" old_path = old_path.strip('/') new_path = new_path.strip('/') if new_path == old_path: return # Perform path validation prior to converting to os-specific value since this # is still relative to root_dir. self._validate_path(new_path) new_os_path = self._get_os_path(new_path) old_os_path = self._get_os_path(old_path) # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): raise web.HTTPError(409, u'File already exists: %s' % new_path) # Move the file try: with self.perm_to_403(): shutil.move(old_os_path, new_os_path) except web.HTTPError: raise except Exception as e: raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e))
def test_render_tmp_dir(qtbot, setup_reports, report_file): """Test that rendered files are created in spyder's tempdir.""" reports = setup_reports output_file = reports._render_report(report_file) # Test that outfile is in spyder tmp dir assert osp.samefile(osp.commonprefix([output_file, TEMPDIR]), TEMPDIR)
def __init__(self, cfg, pre_init=None, reset_halt_cmd=DEFAULT_OPENOCD_RESET_HALT_CMD, pre_load=None, load_cmd=None, verify_cmd=None, post_verify=None, do_verify=False, do_verify_only=False, tui=None, config=None, serial=None, use_elf=None, no_halt=False, no_init=False, no_targets=False, tcl_port=DEFAULT_OPENOCD_TCL_PORT, telnet_port=DEFAULT_OPENOCD_TELNET_PORT, gdb_port=DEFAULT_OPENOCD_GDB_PORT, gdb_init=None, no_load=False, target_handle=DEFAULT_OPENOCD_TARGET_HANDLE): super().__init__(cfg) support = path.join(cfg.board_dir, 'support') if not config: default = path.join(support, 'openocd.cfg') if path.exists(default): config = [default] self.openocd_config = config search_args = [] if path.exists(support): search_args.append('-s') search_args.append(support) if self.openocd_config is not None: for i in self.openocd_config: if path.exists(i) and not path.samefile(path.dirname(i), support): search_args.append('-s') search_args.append(path.dirname(i)) if cfg.openocd_search is not None: for p in cfg.openocd_search: search_args.extend(['-s', p]) self.openocd_cmd = [cfg.openocd or 'openocd'] + search_args # openocd doesn't cope with Windows path names, so convert # them to POSIX style just to be sure. self.elf_name = Path(cfg.elf_file).as_posix() self.pre_init = pre_init or [] self.reset_halt_cmd = reset_halt_cmd self.pre_load = pre_load or [] self.load_cmd = load_cmd self.verify_cmd = verify_cmd self.post_verify = post_verify or [] self.do_verify = do_verify or False self.do_verify_only = do_verify_only or False self.tcl_port = tcl_port self.telnet_port = telnet_port self.gdb_port = gdb_port self.gdb_cmd = [cfg.gdb] if cfg.gdb else None self.tui_arg = ['-tui'] if tui else [] self.halt_arg = [] if no_halt else ['-c halt'] self.init_arg = [] if no_init else ['-c init'] self.targets_arg = [] if no_targets else ['-c targets'] self.serial = ['-c set _ZEPHYR_BOARD_SERIAL ' + serial] if serial else [] self.use_elf = use_elf self.gdb_init = gdb_init self.load_arg = [] if no_load else ['-ex', 'load'] self.target_handle = target_handle
def test_Link(self): self.TempFileNX.Link(self.FileHash) self.assertFalse(islink(self.Client[self.FileHash].Path)) self.assertTrue(islink(self.TempFileNX.Path)) self.assertTrue(samefile(self.TempFileNX.Path, self.Client[self.FileHash].Path)) with self.TempFileNX.GetStream() as stream1,\ self.Client[self.FileHash].GetStream() as stream2: self.assertTrue(sameopenfile(stream1.fileno(), stream2.fileno()))
def __eq__(self, other): try: return (type(self) == type(other) and unicode(self) == unicode(other) and path.samefile(self.object, other.object)) except OSError, exc: pretty.print_debug(__name__, exc) return False
def finalize(tmp, app, docs): try: docs = list(docs) eq_(len(docs), 1, docs) path = join(tmp, "moved.txt") assert samefile(docs[0].file_path, path), (docs[0].file_path, path) finally: for doc in docs: doc.close()
def process_default(self, event): # on DELETE file will not exits any more if path.exists(event.pathname) and \ path.exists(self.config.config_file()) and \ path.samefile(event.pathname, self.config.config_file()): self.config.load_config() self._last_event = time() self.start_tests_async(event)
def _copyentity(self, path_from, path_to, is_file): from_path = self.getAbsPath(path_from) (from_base_path, from_leaf_path) = path.split(from_path) params = (from_path, self.getAbsPath(path_to+'/'+from_leaf_path)) if is_file: self.decor_print('copying file', '%s -> %s' % (path_from, path_to), bcolors.OKBLUE) if path.exists(params[1]) and path.samefile(*params): print(bcolors.NOTICE+'skipped (same file)'+bcolors.ENDC) else: copy2(*params) else: self.decor_print('copying dir', '%s -> %s' % (path_from, path_to), bcolors.OKBLUE) if not path.exists(params[0]): print(bcolors.NOTICE+'skipped (origin dir not found)'+bcolors.ENDC) elif path.exists(params[1]) and path.samefile(*params): print(bcolors.NOTICE+'skipped (same dir)'+bcolors.ENDC) else: copytree(*params)
def from_potential_worktree(cls, wd): real_wd, _, ret = do_ex("git rev-parse --show-toplevel", wd) if ret: return trace("real root", real_wd) if not samefile(real_wd, wd): return return cls(real_wd)
def main(argv): try: opts, args = getopt.getopt(argv[1:], 'hvbcm', ["help", "verbose", "bidirection", "backup-files", "consistency", "map-file", "scan-mapfile"]) except: usage(argv[0]) sys.exit(2) if len(args) < 2: usage(argv[0]) sys.exit(2) src = args[0].strip() dest = args[1].strip() maps_dict = {} opts_dict = {} opts_dict['verbose'] = False opts_dict['bidirection'] = False opts_dict['backup-files'] = False opts_dict['consistency'] = False opts_dict['user_confirmation'] = True opts_dict['map-file'] = '~/.cd_map_file' opts_dict['scan-mapfile'] = False for opt, arg in opts: if opt in ("-h", "--help"): usage(argv[0]) sys.exit(2) elif opt in ("-v", "--verbose"): opts_dict['verbose'] = True elif opt in ("-b", "--bidirection"): opts_dict['bidirection'] = True elif opt in ("--backup-files"): opts_dict['backup-files'] = True elif opt in ("-c", "--consistency"): opts_dict['consistency'] = True elif opt in ("-m", "--map-file"): opts_dict['map_file'] = arg elif opt in ("--scan-mapfile"): opts_dict['scan-mapfile'] = True if not path.exists(src): stderr.write('\nError : Source Path (%s) does not exist' % src) elif not path.exists(src): stderr.write('\nError : Dest Path (%s) does not exist' % dest) elif path.abspath(src) == path.abspath(dest): stderr.write('\nError : Source and Destination cannot be same!') else: try: backup_obj = cbackup(src, dest, opts_dict) backup_obj.compare_dir(bidir=opts_dict["bidirection"], diffFile = opts_dict["consistency"],backup = opts_dict["backup-files"]) if opts_dict["scan-mapfile"]: for s,d in backup_obj.itermap(): if not path.samefile(s, src): backup_obj.compare_dir(src=s, dest=d, bidir=opts_dict["bidirection"], diffFile = opts_dict["consistency"],backup = opts_dict["backup-files"]) except Exception, details: stderr.write("\nException : %s\n" % str(details))
def sameFile(sSrcPath, sDestPath): # Macintosh, Unix. if hasattr(osp, 'samefile'): try: return osp.samefile(sSrcPath, sDestPath) except OSError: return False # All other platforms: check for same pathname. return pathEqual(osp.abspath(sSrcPath), osp.abspath(sDestPath))
def test_dropEvent_selects_file(self): self.widget.load_data = Mock() self.widget.source = OWFile.URL event = self._drop_event(QUrl.fromLocalFile(TITANIC_PATH)) self.widget.dropEvent(event) self.assertEqual(self.widget.source, OWFile.LOCAL_FILE) self.assertTrue(path.samefile(self.widget.last_path(), TITANIC_PATH)) self.widget.load_data.assert_called_with()
def search_files_upward(start_path=None): "Search for requirements.txt upward" if not start_path: start_path = op.abspath(op.curdir) if op.exists(op.join(start_path, 'requirements.txt')) or op.exists(op.join(start_path, 'setup.py')): return start_path up_path = op.abspath(op.join(start_path, '..')) if op.samefile(start_path, up_path): return None return search_files_upward(start_path=up_path)
def ln(src, dst): src = gen_src(src) dstTrue = path.join(dst, path.basename(src)) if path.exists(dstTrue): if path.samefile(src, dstTrue): return else: raise ConflictException(dstTrue) run_shell("mkdir -p {}", dst) run_shell("ln -s {} {}", src, dst)
def url_changed(self, url): """Reimplemented to remove file paths from the url string""" try: url = asstring(url.toString()) except AttributeError: pass if url.startswith('file://'): fname = html2file(url) if osp.samefile(self.build_dir, osp.commonprefix([ fname, self.build_dir])): url = osp.splitext(osp.basename(fname))[0] super(UrlHelp, self).url_changed(url)
def bringdown(self, structure, workdir, outdir): """ Copies files back to output directory. Cats input intO output. Removes workdir if different from outdir **and** run was successfull. """ from itertools import chain from os import remove from os.path import join, samefile, exists from shutil import rmtree from glob import iglob from ..misc import copyfile, Changedir from .. import CRYSTAL_filenames, CRYSTAL_delpatterns with Changedir(outdir) as cwd: # remove 'is running' file marker. if exists('.pylada_is_running'): try: remove('.pylada_is_running') except OSError: pass for key, value in CRYSTAL_filenames.iteritems(): copyfile( join(workdir, key), value.format('crystal'), nocopyempty=True, symlink=False, nothrow="never" ) header = ''.join(['#']*20) if len([0 for filename in iglob(join(workdir, 'ERROR.*'))]): string = "" for filename in iglob(join(workdir, 'ERROR.*')): with open(filename, 'r') as file: string += file.read() + '\n' with open('crystal.err', 'w') as out: out.write(string) lines = [] with open('crystal.err', 'r') as out: for line in out: if len(line.rstrip().lstrip()) == 0: continue if line not in lines: lines.append(line.rstrip().lstrip()) with open('crystal.out', 'a') as out: out.write('{0} {1} {0}\n'.format(header, 'ERROR FILE')) out.write('\n'.join(lines)) if len(lines) > 0: out.write('\n') out.write('{0} END {1} {0}\n'.format(header, 'ERROR FILE')) if exists(workdir): if samefile(outdir, workdir): with Changedir(workdir) as cwd: for filepath in chain(*[iglob(u) for u in CRYSTAL_delpatterns]): try: remove(filepath) except OSError: pass elif ExtractBase(outdir).success: try: rmtree(workdir) except OSError: pass try: remove(join(outdir, 'workdir')) except OSError: pass
def search_files_upward(start_path=None): "Search for requirements.txt, setup.py or Pipfile upward" if not start_path: start_path = op.abspath(op.curdir) if any( op.exists(op.join(start_path, filename)) for filename in ('requirements.txt', 'setup.py', 'Pipfile') ): return start_path up_path = op.abspath(op.join(start_path, '..')) if op.samefile(start_path, up_path): return None return search_files_upward(start_path=up_path)
def test_GetMultiFile(self): file_obj1 = self.get_client_file_obj() file_obj2 = self.TempDir[self.TempFileName] file_obj3 = self.TempDir[self.TempFileName + 'a'] file_obj3.PutData(self.FileContentsNX) with file_obj1.GetStream() as stream1: self.assertEqual(file_obj1.GetData(), file_obj2.GetData()) with file_obj2.GetStream() as stream2: self.assertTrue(sameopenfile(stream1.fileno(), stream2.fileno())) self.assertFalse(stream1.closed) with file_obj3.GetStream() as stream2: self.assertFalse(samefile(stream1.name, stream2.name)) self.assertFalse(sameopenfile(stream1.fileno(), stream2.fileno()))
def newLink(s, d): s = osp.abspath(s) if not d.startswith('/'): d = osp.join(os.getenv('HOME'), d) if osp.lexists(d): if osp.exists(d) and osp.samefile(d, s): return os.remove(d) dir = osp.dirname(d) if not osp.exists(dir): os.makedirs(dir) os.symlink(s, d) print d, '-->', s