def _2dr(rnx2dr_path): '''Opens process rxEditGde.py to convert specified rnx to dr file for GipsyX. The subprocess is used in order to run multiple instances at once. If converted file is already present, nothing happens We might want to dump and kill service tree files and stats''' in_file_path = rnx2dr_path[0] out_file_path = rnx2dr_path[1] cache_path = rnx2dr_path[2] staDb_path = rnx2dr_path[3] out_dir = _os.path.dirname(out_file_path) cache_dir = _os.path.join(cache_path, _os.path.basename( out_file_path)) #smth like /cache/anau2350.10d.dr.gz/ if not _os.path.exists(cache_dir): _os.makedirs(cache_dir) _copy(src=in_file_path, dst=cache_dir) #copy in_file_cache_path = _os.path.join(cache_dir, _os.path.basename(in_file_path)) out_file_cache_path = _os.path.join(cache_dir, _os.path.basename(out_file_path)) process = _Popen([ 'rnxEditGde.py', '-dataFile', in_file_cache_path, '-staDb', staDb_path, '-o', out_file_cache_path ], cwd=cache_dir) process.wait() _copy(src=out_file_cache_path, dst=out_dir) #copy result to destination #clear folder in ram _rmtree(cache_dir)
def copy(src_path, dst_path, rel=None, force=True, ignore=None, file_tree=False): if not isfile(src_path): rel_folder = (relpath(src_path, rel) if rel else basename( normpath(src_path))) # dst_path = relpath(dst_path, rel) if rel else dst_path dst_path = join(dst_path, rel_folder) if exists(dst_path) and not force: cprnt(warn="COPY: {0} exists, force flag is off so leaving as is.". format(dst_path)) return dst_path rmtree(dst_path, ignore_errors=True) ignore_arg = ignore_patterns(ignore) if ignore else None copytree(src_path, dst_path, ignore=ignore_arg) return dst_path if (basename(src_path) != src_path) and file_tree: for subdir in src_path.split(sep)[:-1]: dst_path = join(dst_path, subdir) makedirs(dst_path, exist_ok=force) _copy(src_path, dst_path) return dst_path
def cf(src_filename, dest_filename): ''' Copies the contents of source file to the given destination file. ''' ''' Overwrites destination file if it already exists. ''' if src_filename is not None and dest_filename is not None: try: _copy(src_filename, dest_filename) except IOError as error: raise IOError(error.strerror)
def pdb2traj(code, filename=None, verbose=True, url="https://files.rcsb.org/download/", ): r""" Return a :obj:`~mdtraj.Trajectory` from a four-letter PDB code via RSCB PBB lookup Thinly wraps around :obj:`mdtraj.load_pdb`, printing the corresponding citation. Will return None if lookup fails Parameters ---------- code : str four-letter code, e.g. 3SN6 filename : str, default is None if str, save to this file, eventually overwriting verbose : bool, default is False url : str, default is 'https://files.rcsb.org/download' base URL for lookups Returns ------- traj : :obj:`~mdtraj.Trajectory` or None """ url1 = "%s/%s.pdb" % (url.strip("/"),code.strip("/")) #TODO use print_v elsewhere print_v = lambda s, **kwargs: [print(s, **kwargs) if verbose else None][0] print_v("Checking %s" % url1, end=" ...", flush=True) geom = None try: a = _requests.get(url1) if a.ok: with _NamedTemporaryFile(mode="w", suffix=".pdb") as f: f.writelines(a.text) print_v("done") geom = _load_pdb(f.name) #TODO use string.IO if filename is not None: print_v("Saving to %s..." % filename, end="", flush=True) if filename.lower().endswith(".pdb"): _copy(f.name, filename) else: geom.save(filename) print_v(filename) else: raise _URLError(a.text,filename=url1) except (_HTTPError, _URLError) as e: print(url1, ":", e) if geom is not None: pdb2ref(code) return geom
def cache_staDb_path(self): staDb_dir_cached = _os.path.join(self.cache_path, 'staDb', self.project_name) if not _os.path.exists(staDb_dir_cached): _os.makedirs(staDb_dir_cached) _copy(src=self.staDb_path, dst=staDb_dir_cached) staDb_path_cached = _os.path.join(staDb_dir_cached, _os.path.basename(self.staDb_path)) return staDb_path_cached
def prep_ionex_file(file_path, cache_path): file_name = _os.path.basename(file_path) tmp_cache_path = _os.path.abspath(_os.path.join(cache_path,file_name)) #create tmp folder in cache if not _os.path.exists(tmp_cache_path): _os.makedirs(tmp_cache_path) _copy(src = file_path, dst = tmp_cache_path) #copy .Z file to cache cached_file_path = _os.path.abspath(_os.path.join(tmp_cache_path,_os.path.basename(file_path))) uncompress(cached_file_path) file_path = _os.path.join(_os.path.dirname(cached_file_path),_os.path.splitext(file_name)[0]) return file_path
def _ce2cm_single_thread(pos_path_series): pos_src,pos_dst,cache_path = pos_path_series # fuction will rewrite the input pos file by cm corrected version # pos_src = _os.path.abspath(pos_src) #copy to cache. create single test folder _copy(src=pos_src,dst=cache_path) input_path = _os.path.join(cache_path,_os.path.basename(pos_src)) process = _Popen(['orbitCmCorrection','-s','-i',input_path,'-o',pos_dst]) process.wait() _os.remove(input_path)
def install_styles(style_default=STYLE_BASEFILE_STD): """ Sets up style files Keyword Args: style_default (str): location of style file to use by defautl """ mpl_styledir = _join(mpl_configdir(), "stylelib") assert _exists(style_default), \ "STYLEFILE {} missing... indicates a problem with some paths \ or corrupt package. Check source code location" .format(style_default) _copy(style_default, mpl_styledir)
def cache_ionex_files(cache_path, IONEX_products_dir, ionex_type, years_list): #Copying IONEX maps to cache before execution------------------------------------------------------------------------------------- products_dir = _os.path.join(IONEX_products_dir, _os.pardir) ionex_files = _pd.Series( sorted(_glob.glob(products_dir + '/IONEX_merged/' + ionex_type + '*'))) ionex_basenames = ionex_files.str.split('/', expand=True).iloc[:, -1] ionex_years = ionex_basenames.str.slice(-4).astype(int) ionex_files = ionex_files[ionex_years.isin( years_list )] #selecting only those ionex files that are needed according to years list IONEX_cached_path = _os.path.join( cache_path, 'IONEX_merged') #need to remove when start if _os.path.exists(IONEX_cached_path): _rmtree(IONEX_cached_path) _os.makedirs(IONEX_cached_path) for ionex_file in ionex_files: _copy(src=ionex_file, dst=IONEX_cached_path)
def copy(src,dst,src_base='',skip=''): r''' src : sFilePath , list ,or \n strs dst:sPath ''' from shutil import copy as _copy if not py.istr(dst):raise py.ArgumentError('dst must be str') if py.istr(src): if '\n' in src: src=src.splitlines() return copy(src,dst) if src[-1] in ['/','\\']: src=F.ls(src,r=1) else: return _copy(src,dst) if not src_base: U,T,N,F=py.importUTNF() dl=U.unique(U.len(*src),ct=1) min=py.min(*dl) f=[i for i in src if py.len(i)==min][0] if f[-1] in ['/','\\']:f=f[:-1] # Path(f).absolute().parent.absolute().__str__() src_base=f[:py.len(T.sub_last(f.replace('\\','/'),'','/') )+1] src_base_len=py.len(src_base) print('src_base: %r'%src_base,'len(src)==%s'%py.len(src)) while dst[-1] not in ['/','\\']: dst=U.input('not dir! rewrite dst:',default=dst) if py.iterable(src): fns=[] skips=[] for i in src: if skip and skip in i: skips.append(i) continue # fn=getName(i) # if fn in fns: # fn=T.fileName(i) fn=i[src_base_len:] if fn[-1] in ['/','\\']: mkdir(dst+fn) else: _copy(i,dst+fn) fns.append(fn) if skips:return skips,fns return fns raise py.ArgumentUnsupported(src)
def load_mnist_dataset( directory, download=False ): """ Load mnist-like dataset from the directory. If the directory does not exist or is empty, and ```download``` is True, basic mnist dataset is downloaded. Otherwise, exception is raised. The function returns numpy array with (k, 784) shape, where k is number of images in the dataset. """ dataset = 'train-images-idx3-ubyte.gz' dataset_path = _os.path.join( directory, dataset ) if download: if not _os.path.exists( directory ): _os.makedirs( directory ) if _os.path.exists( dataset_path ): _os.remove( dataset_path ) temp_filename, _ = _urllib.request.urlretrieve( DEFAULT_MNIST_URL + dataset ) _copy( temp_filename, dataset_path ) with gfile.GFile( dataset_path ) as f: size = f.size() print( 'Successfully downloaded', dataset, size, 'bytes.' ) if not _os.path.exists( directory ): raise RuntimeError( directory + ': No such directory' ) if not _os.path.isdir( directory ): raise RuntimeError( directory + ': Not a directory' ) print( 'Extracting', dataset ) with gfile.Open( dataset_path, mode='rb' ) as f: with gzip.GzipFile( fileobj=f ) as bytestream: magic = _read32( bytestream ) if magic != 2051: raise ValueError( 'Invalid magic number %d in MNIST image file: %s' % (magic, f.name) ) num_images = _read32( bytestream ) rows = _read32( bytestream ) cols = _read32( bytestream ) buf = bytestream.read( rows * cols * num_images ) data = _np.frombuffer( buf, dtype=_np.uint8 ) data = data.reshape( num_images, rows*cols, 1 ) # normalize data = data.astype( _np.float32 ) data = _np.multiply( data, 1 / 255 ) return data
def shutil_copy(src, dst): return shutil._copy(longpathify(uni(src)), longpathify(uni(dst)))
def copy(source, destination): _copy(source, destination) logger.info('Copied %s to %s' % (source, destination))
def backup(self): """Backup the database.""" timestamp = '{:%Y-%m-%d %H:%M:%S}'.format(_dt.now()) new = '{}.{}'.format(self.location, timestamp) _copy(self.location, new) print('Backup at {}'.format(new))