def transform_to_template_ds(template_fn, src_fn, dst_fn): ds = gdal.Open(template_fn) transform = ds.GetGeoTransform() ul_x, ul_y = transform[0], transform[3] xres, yres = transform[1], transform[5] wkt = ds.GetProjection() x, y = ds.RasterXSize, ds.RasterYSize lr_x, lr_y = ul_x + x * xres, ul_y + y * yres xmin, ymin, xmax, ymax = ul_x, lr_y, lr_x, ul_y assert xmax > xmin assert ymax > ymin if _exists(dst_fn): os.remove(dst_fn) cmd = [ 'gdalwarp', '-ts', x, y, '-te', xmin, ymin, xmax, ymax, '-t_srs', wkt, src_fn, dst_fn ] cmd = [str(v) for v in cmd] p = Popen(cmd) p.wait() assert _exists(dst_fn)
def query_asris(lng, lat): global _url lng = round(lng, 2) lat = round(lat, 2) if not _exists(_cache_dir): os.mkdir(_cache_dir) d = None fn = _join(_cache_dir, '{lng:0.2f},{lat:0.2f}.json'.format(lng=lng, lat=lat)) if _exists(fn): with open(fn) as fp: d = json.load(fp) else: r = requests.get(_url.format(lat=lat, lng=lng)) assert r.status_code == 200 d = json.loads(r.text) d = d['results'] with open(fn, 'w') as fp: json.dump(d, fp, allow_nan=False) _d = {row['layerName'].replace(' (value/1000)', ''): row for row in d} for name in _d: if isfloat(_d[name]['attributes']['Pixel Value']): _d[name]['Value'] = float( _d[name]['attributes']['Pixel Value']) / 1000 else: _d[name]['Value'] = _defaults[name] return _d
def page_media_upload(page_id): user = auth.is_logged_in(request) page = Page.load(page_id) permission = auth.is_page_editor(user, page) overwrite = [] for n in request.files: x = request.files.get(n) media_path = _join(page.blog.path, page.blog.media_path_generated) file_path = _join(media_path, x.filename) if _exists(file_path): from core.error import FileExistsError raise FileExistsError( "File '{}' already exists on the server.".format( utils.html_escape(x.filename))) else: Media.register_media(x.filename, file_path, user, page=page) if not _exists(media_path): makedirs(media_path) x.save(file_path) tags = template_tags(page=page) return template('edit/page_media_list.tpl', **tags.__dict__)
def page_media_upload(page_id): user = auth.is_logged_in(request) page = Page.load(page_id) permission = auth.is_page_editor(user, page) overwrite = [] for n in request.files: x = request.files.get(n) media_path = _join(page.blog.path, page.blog.media_path_generated) file_path = _join(media_path, x.filename) if _exists(file_path): from core.error import FileExistsError raise FileExistsError("File '{}' already exists on the server.".format( utils.html_escape(x.filename))) else: Media.register_media(x.filename, file_path, user, page=page) if not _exists(media_path): makedirs(media_path) x.save(file_path) tags = template_tags(page=page) return template('edit/page_media_list.tpl', **tags.__dict__)
def rm_miui_res(): if _exists(miui_res_dir): shutil.rmtree(miui_res_dir) for dir in get_dependency_projects(): dst_dir = _join(dir, miui_res_dir) if _exists(dst_dir): shutil.rmtree(dst_dir)
def clean(self, all=False): for file in self.TEMP_FILES: if _exists(file): os.remove(file) if all: for dir in self.TEMP_DIRS: if _exists(dir): shutil.rmtree(dir)
def reprocess_scene(scn_fn): p = subprocess.Popen(['python3', 'reprocess_scene.py', cfg_fn, scn_fn], stderr=subprocess.PIPE, stdout=subprocess.PIPE) p.wait() scn_path = scn_fn.replace('.tar.gz', '') if _exists(SCRATCH): scn_path = _join(SCRATCH, _split(scn_path)[-1]) if _exists(scn_path): shutil.rmtree(scn_path)
def process_scene(scn_fn): p = subprocess.Popen(['python3', 'process_scene.py', cfg_fn, scn_fn], stderr=subprocess.PIPE, stdout=subprocess.PIPE) ret = p.communicate() print(ret) scn_path = scn_fn.replace('.tar.gz', '') if _exists(SCRATCH): scn_path = _join(SCRATCH, _split(scn_path)[-1]) if _exists(scn_path): shutil.rmtree(scn_path)
def check_path(path, exp_user=False): """ Verifies that path is not None or an empty string, then returns whether the path exists on the filesystem. :param str path: :param bool exp_user: expand ~ in path string """ if path: return _exists(_expand(path)) if exp_user else _exists(path) return False
def getWorkDir(): """Get current user's Documents folder @rtype: str """ # OS-specific attempt for Windows if os_name == 'nt': try: return getWinUserDocs() except OSError: pass try: return getWinCommonDocs() except OSError: pass user = _expanduser("~") docs = 'Documents' mydocs = 'My Documents' folders = (docs, mydocs) for folder in folders: workdir = ''.join((user, folder)) if _exists(workdir): return workdir.replace('/', '\\') return None
def __init__(self): # { attr, raster_file_path} catalog = glob(_join(_asris_grid_raster_dir, '*')) catalog = [path for path in catalog if os.path.isdir(path)] catalog = {_split(path)[-1]: path for path in catalog} self.catalog = catalog # { attr, raster_attribute table} rats = {} for var, path in catalog.items(): fn = _join(path + '.json') if not _exists(fn): continue with open(fn) as fp: info = json.load(fp) if 'rat' not in info: continue rows = info['rat']['row'] d = {} for row in rows: row = row['f'] d[row[0]] = row[-1] rats[var] = d self.rats = rats
def warp2wgs(fn): dst = fn.split('.') dst.insert(-1, 'wgs') dst = '.'.join(dst) if _exists(dst): os.remove(dst) cmd = ['gdalwarp', '-t_srs', wgs84_proj4, '-r', 'near', fn, dst] p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) p.wait() assert _exists(dst) return dst
def retrieve(self, identifier, skip_acquired=True): datadir = self.datadir assert identifier.startswith('HLS') assert identifier.endswith('.hdf') relative_dir = self._get_identifier_relative_dir(identifier) if skip_acquired: if self.is_acquired(identifier): return self.get_identifier_path(identifier) url = 'https://hls.gsfc.nasa.gov/data/{relative_dir}/{identifier}'\ .format(relative_dir=relative_dir, identifier=identifier) out_dir = self.get_identifier_dir(identifier) if not _exists(out_dir): os.makedirs(out_dir) identifier_path = _join(out_dir, identifier) output = urlopen(url, timeout=60) with open(identifier_path, 'wb') as fp: fp.write(output.read()) output = urlopen(url + '.hdr', timeout=60) with open(identifier_path + '.hdr', 'wb') as fp: fp.write(output.read()) return identifier_path
def getDownloadDir(): """ @return: filepath of download_entry dir @rtype: str Todo- figure out a non stupid way to do this. There should be a special OS folder designated as default folder. Update: http://msdn.microsoft.com/en-us/library/windows/desktop/bb762188(v=vs.85).aspx Use SHGetKnownFolderPath with the correct GUID issue- GUID is written as a string, but needs to be sent to function as a struct(?) need to make custom c structure. """ try: user = _expanduser("~") except: # Todo- figure out how to find dl folder on mac? raise dl_dir = '\\'.join([user, "Downloads"]) if not _exists(dl_dir): raise FileNotFoundError("Couldn't find downloads folder") return dl_dir.replace('/', '\\')
def register(f="setup.py"): """python setup.py register""" f=abspath(expanduser(f)) if _exists(f): r=run("python %s register" % f).std_out if not r.find("Server response (200): OK")>0: raise Exception(r)
def create_timestamped_file(filename, file_ending=".log"): """ Return a timestamped filename (with the date in it) which is not yet present on the system Args: filename (str): Name of the file (full path) (without ending) Keyword Args: file_ending (str): The fileending of the new file Returns: str """ today = _datetime.now() today = today.strftime("%Y_%m_%d_%H_%M") if filename.endswith(file_ending): filename.replace(file_ending, today + file_ending) else: filename += (today + file_ending) filecount = 1 while _exists(filename): filename = filename.replace("." + str(filecount - 1), "") filename = filename + "." + str(filecount) filecount += 1 if filecount >= 60: raise SystemError("More than 1 file per second, " "this is insane.. aborting") return filename
def __init__(self, fn): assert _exists(fn) with open(fn) as fp: lines = fp.readlines() indx0 = [] for i, L in enumerate(lines): if 'Sediment particle information leaving profile' in L: indx0.append(i) class_data = None if len(indx0) > 0: indx0 = indx0[-1] lines = lines[indx0:] assert lines[6].strip().startswith('1') assert lines[7].strip().startswith('2') assert lines[8].strip().startswith('3') assert lines[9].strip().startswith('4') assert lines[10].strip().startswith('5') class_data = _parse_tbl(lines[6:11], ['Class', 'Diameter', 'Specific Gravity', 'Pct Sand', 'Pct Silt', 'Pct Clay', 'Pct OM', 'Detached Sediment Fraction', 'Fraction In Flow Exiting']) self.class_data = class_data
def __init__(self): assert _exists(_grid_path) js = check_output('gdalinfo -json ' + _grid_path, shell=True) rat = json.loads(js.decode())['rat'] field_defs = rat['fieldDefn'] d = {} for row in rat['row']: row = row['f'] px_value = row[0] row = {fd['name']: v for fd, v in zip(field_defs, row)} d[px_value] = row self.rat = d self.rat_field_defs = [fd['name'] for fd in field_defs] landuse_map = {} for px_value, row in d.items(): dom = self.get_dom(px_value) if dom not in landuse_map: if dom.startswith('f'): desc = row['FOREST_TYPE_DESC'] elif dom.startswith('a'): desc = row['COMMODITIES_DESC'] else: desc = row['C18_DESCRIPTION'] landuse_map[dom] = dict(Key=dom, Color=[0, 0, 0, 255], Description=desc, ManagementFile=None) self.landuse_map = {k: landuse_map[k] for k in sorted(landuse_map)}
def run(self): result = {"print": "Could not find Info.plist."} Log.info("Looking for Info.plist file") app_path = application_path(self.unzipped_ipa) filename = "{}/Info.plist".format(app_path) if _exists(filename): Log.info("Parsing Info.plist file") # get plist info info_plist = plist(filename) identifier = info_plist["CFBundleIdentifier"] result = {"{}_info".format(identifier): info_plist} if hasattr(self, "output") and self.output: Log.info("Converting Info.plist to XML file") filename = "{}/{}.info.xml".format(self.output, identifier) with open(filename, "w") as fp: fp.write(plist_dict_to_xml(info_plist)) result.update({ "{}_info_file".format(identifier): filename, "print": "Info file saved in {}.".format(filename) }) return result
def clean(self, all=False): super(AntBuild, self).clean(all) if all: for build_file in self.build_temp_files: if _exists(build_file): os.remove(build_file)
def backup(self): """ backup project file """ if not _exists(self.commit_id): os.mkdir(self.commit_id) dirs = self._BACKUP_DIRS files_suffix = self._BACKUP_FILES tar = tarfile.open('%s/backup.tar.gz' % self.commit_id, mode='w|gz') try: for name in os.listdir(os.path.curdir): if os.path.isdir(name): if name in dirs: if self.verbose: print '\tAdd %s directory' % name tar.add(name) if os.path.isfile(name): for suffix in files_suffix: if name.endswith(suffix): if self.verbose: print '\tAdd %s file' % name tar.add(name) finally: tar.close() self.backup_file = os.path.abspath('%s/backup.tar.gz' % self.commit_id)
def page_media_upload_confirm(page_id): user = auth.is_logged_in(request) page = Page.load(page_id) permission = auth.is_page_editor(user, page) # get file NAMES, attributes, size, etc. first # request.form.getunicode('filename') # check each one on the SERVER side, not the client # if each file is OK, then respond appropriately and have the client send the whole file # if not, respond with a warning to be added to the notification area _g = request.forms.getunicode file_name = _g('filename') file_size = _g('filesize') # check for file types against master list # check for file length # check for name collision for n in request.files: x = request.files.get(n) file_path = _join(page.blog.path, page.blog.media_path_generated, x.filename) if _exists(file_path): pass else: pass
def anu_wepp_management_mod(runs_dir): plant_loops = set() man_fns = glob(_join(runs_dir, '*.man')) backup_dir = _join(runs_dir, 'original_mans') if not _exists(backup_dir): os.mkdir(backup_dir) for man_fn in man_fns: _fn = _split(man_fn)[-1] if 'pw0' in _fn: continue _man_fn = _join(backup_dir, _fn) shutil.move(man_fn, _man_fn) for man_fn in man_fns: _fn = _split(man_fn)[-1] if 'pw0' in _fn: continue man = Management(Key=None, ManagementFile=_fn, ManagementDir=backup_dir, Description='-', Color=(0, 0, 0, 0)) with open(man_fn, 'w') as fp: fp.write(str(man))
def _check_channel_data(cls, channels): for ch in channels: if ch.model == 'pointwise': if not _exists(ch.filep): raise FileNotFoundError( f'The file {ch.filep} does not exist.') if _stat(ch.filep).st_size == 0: raise OSError(f'The file {ch.filep} is empty.') pot_data = np.loadtxt(ch.filep, skiprows=1) msg_init = 'The pointwise potential file' if pot_data.shape[0] < 5: _warning( f'{msg_init} {ch.filep} has less than 5 parameters') if pot_data.shape[0] < 2: _error(f'{msg_init} {ch.filep} has less than 2 parameters') if pot_data.shape[1] == 2: _warning(f'{msg_init} {ch.filep} has 2 columns') if pot_data.shape[1] < 2: _error(f'{msg_init} {ch.filep} has less than 2 columns') if ch.model == 'custom': if not hasattr(ch, '__call__'): _error('Model is set to custum but ' + 'custom function is not provided')
def _call(command, check_file=None, **exc_args): """ Call command in subprocess. Args: command (list or tuple of str): Command to call. check_file (str): Returns file content in exception if exists. exc_args: Extra arguments for exception to raise if error. Raises: apyfal.exceptions.ClientRuntimeException: Error while calling command. """ _get_logger().debug("Running shell command: '%s'" % ' '.join(command)) try: process = _Popen(command, stdout=_PIPE, stderr=_PIPE, universal_newlines=True) outputs = list(process.communicate()) in_error = process.returncode except OSError as exception: in_error = True outputs = [str(exception)] if in_error: if check_file and _exists(check_file): with open(check_file, 'rt') as file: outputs.append(file.read()) raise _exc.ClientRuntimeException(exc='\n'.join( [command if isinstance(command, str) else ' '.join(command)] + [output for output in outputs if output]), **exc_args)
def _init_metering(self, config_env, reload=False): """Initialize metering services. Args: config_env (dict): Host configuration environment. reload (bool): Force reconfiguration. """ # Cached value match with argument: Already configured if (not reload and self._metering_env is not None and config_env == self._metering_env): return # Get current configuration from files cur_env, has_config = self._read_configuration_files() # Define full, up to date environment full_env = cur_env.copy() # If no current configuration, updates with default configuration from # metering server. if not has_config: self._update_with_default_configuration(full_env, config_env) # Updates with user configuration for key, value in config_env.items(): if value is not None: full_env[key] = value # Cached value match with full environment: Already configured if not reload and full_env == self._metering_env: return # Checks if credentials needs to be updated update_credentials = self._credentials_needs_update( config_env, cur_env, full_env) # Checks if configuration needs to be updated update_config = any( full_env.get(key) != cur_env.get(key) for key in full_env if key not in ('client_id', 'client_secret')) # All is already up to date: caches values if not reload and not update_config and not update_credentials: self._metering_env = full_env return # Updates with self._restart_services(): # Clears metering cache if _exists(_cfg.METERING_TMP): _call(['sudo', 'rm', _cfg.METERING_TMP]) # Updates configuration files self._update_configuration_files(full_env, update_config, update_credentials) # Caches values self._metering_env = full_env
def is_processed(fn): global out_dir _fn = _split(fn)[-1].split('-')[0] # res = glob(_join(out_dir, '{}_*_{}_{}_*_{}_{}' # .format(_fn[:4], _fn[4:10], _fn[10:18], _fn[18:20], _fn[20:22]))) res = glob(_join(out_dir, fn.replace('.tar.gz', '') + '*.csv')) return len(res) > 0 or _exists( _join(out_dir, '.{}'.format(_split(fn.replace('.tar.gz', ''))[-1])))
def __init__(self, station, wd='./', cliver="5.3.2"): assert _exists(wd), 'Working dir does not exist' self.wd = wd assert isinstance(station, StationMeta), "station is not a StationMeta object" self.station = station self.cliver = cliver self.cligen532 = _join(_thisdir, "bin", "cligen532") self.cligen53 = _join(_thisdir, "bin", "cligen53") self.cligen52 = _join(_thisdir, "bin", "cligen52") self.cligen43 = _join(_thisdir, "bin", "cligen43") assert _exists(self.cligen52), "Cannot find cligen52 executable" assert _exists(self.cligen52), "Cannot find cligen43 executable"
def is_acquired(self, identifier): assert identifier.startswith('HLS') assert identifier.endswith('.hdf') identifier_path = self.get_identifier_path(identifier) return _exists(identifier_path)
def reproject_raster(src): dst = src[:-4] + '.wgs.vrt' dst2 = src[:-4] + '.wgs.tif' if _exists(dst): os.remove(dst) if _exists(dst2): os.remove(dst2) cmd = ['gdalwarp', '-t_srs', 'EPSG:4326', '-of', 'vrt', src, dst] p = Popen(cmd) p.wait() cmd = ['gdal_translate', '-co', 'COMPRESS=LZW', '-of', 'GTiff', dst, dst2] p = Popen(cmd) p.wait() assert _exists(dst)
def load_graph(self, args, **kwargs): graph = caffe.Net(args.networkfile, args.weights, caffe.TEST) self.save = args.save if self.save and not _exists(self.save): _mkdir(self.save) inputs = self.inputs if self.inputs else self.list_inputs_of_graph(graph) outputs = self.outputs if self.outputs else self.list_outputs_of_graph(graph) return graph, inputs, outputs
def get_hls(self, identifier): if not self.is_acquired(identifier): identifier_path = self.retrieve(identifier) identifier_path = self.get_identifier_path(identifier) assert _exists(identifier_path), identifier_path return HLS(identifier_path)
def _auto_run_filepaths(self): f = join(self.folder, 'auto_run.json') if not _exists(f): self._logger.warn("File %s does not exist.", f) return [] with open(f) as json_file: fpaths = json.load(json_file) fpaths = [str(fpath) for fpath in fpaths] return fpaths
def rescale_ndvi(fn): fn_wgs_tif = fn[:-4] + '.wgs.tif' fn_wgs_vrt = fn[:-4] + '.wgs.vrt' if _exists(fn_wgs_tif): os.remove(fn_wgs_tif) if _exists(fn_wgs_vrt): os.remove(fn_wgs_vrt) cmd = ['gdalwarp', '-t_srs', 'EPSG:4326', '-of', 'vrt', fn, fn_wgs_vrt] p = Popen(cmd) p.wait() cmd = ['gdal_translate', '-co', 'COMPRESS=LZW', '-of', 'GTiff', '-scale', 0, 10000, -1, 1, '-ot', 'Float32', fn_wgs_vrt, fn_wgs_tif] cmd = [str(x) for x in cmd] p = Popen(cmd) p.wait() assert _exists(fn_wgs_tif)
def cp_miui_res(_url=url): apk_path = os.path.join('../miui', miui_res_filename) if not _exists(apk_path): os.mkdir(os.path.dirname(apk_path)) download_file_insecure(_url + miui_res_filename, apk_path) # current directory: copy miui-res.apk if not _exists(miui_res_dir): os.mkdir(miui_res_dir) shutil.copy2(apk_path, miui_res_dir) for dir in get_dependency_projects(): dst_dir = os.path.join(dir, miui_res_dir) if not _exists(dst_dir): os.mkdir(dst_dir) if not _exists(_join(dst_dir, miui_res_filename)): shutil.copy2(apk_path, dst_dir)
def _update_env(): ## Can't import modules, only concrete objects/functions. ## All vars (including imported things) need to have an underscore prefix if you want them ignored by invoke. from os import environ as _environ from os.path import join as _join, expanduser as _expanduser, exists as _exists from getpass import getuser as _getuser _release_auth_file = _expanduser(_join("~", "etc", "release.auth")) if _exists(_release_auth_file): _auth = open(_release_auth_file).read().strip().split(':') _environ['WIKI_USER'] = _auth[0] _environ['WIKI_PASS'] = _auth[1] _environ['JIRA_USER'] = _auth[0] _environ['JIRA_PASS'] = _auth[1] _passwd_file = _expanduser(_join("~", "etc", "passwd")) if _exists(_passwd_file): _pass = open(_passwd_file).read().strip() _environ['SMB_USER'] = _getuser() _environ['SMB_PASS'] = _pass
def _load_config(self, filename): """ Reload configuration :param filename: Path where configuration is stored """ fname = _join(filename, 'bot.cfg') if _exists(fname): with open(fname, 'r') as f: cfg = load(f) self._keywords = cfg['keywords'] self._authors = cfg['authors']
def process_scene(scn_fn, verbose=True): global models, out_dir, sf, bbox, sf_feature_properties_key, sf_feature_properties_delimiter assert '.tar.gz' in scn_fn if verbose: print(scn_fn, out_dir) print('extracting...') scn_path = scn_fn.replace('.tar.gz', '') if _exists(SCRATCH): scn_path = _join(SCRATCH, _split(scn_path)[-1]) extract(scn_fn, scn_path) # Load and crop LandSat Scene print('load') _ls = LandSatScene(scn_path) try: print('clip') ls = _ls.clip(bbox, out_dir) except: ls = None _ls = None shutil.rmtree(scn_path) Path(_join(out_dir, '.{}'.format(_split(scn_path)[-1]))).touch() raise _ls.dump_rgb(_join(ls.basedir, 'rgb.tif'), gamma=1.5) print('ls.basedir', ls.basedir) # Build biomass model bio_model = BiomassModel(ls, models) # Export grids print('exporting grids') bio_model.export_grids(biomass_dir=_join(ls.basedir, 'biomass')) # Analyze pastures print('analyzing pastures') res = bio_model.analyze_pastures(sf, sf_feature_properties_key, sf_feature_properties_delimiter) # get a summary dictionary of the landsat scene print('compiling summary') ls_summary = ls.summary_dict() print('reprojecting scene') scn_dir = _join(out_dir, _ls.product_id) reproject_scene(scn_dir) # ls = None # _ls = None # shutil.rmtree(scn_path) return dict(res=res, ls_summary=ls_summary)
def __init__(self, token, channel, cache): """ Constructor :param token: Authentification token for bot :param channel: Name of the channel where the bot is hosted :param cache: Location where to cache data """ # Bot mention detection self._self_mention = None self._channel = channel self._bot_id = None # Commands self._keywords = [] self._authors = [] self._known_cmd = { 'help': (self._help_callback, ''), 'list_keywords': (self._list_keyords_callback, ''), 'add_keywords': (self._add_keyords_callback, 'List of space separated keywords ' 'to add'), 'run_daily_arxiv_search': (self._run_daily_arxiv_search, '') } # Arxiv wrapper self._cache_folder = cache self._arxiv_cfg = _join(self._cache_folder, 'arxiv.cfg') if not _exists(self._arxiv_cfg): # cs.CV: Compute Vision # cs.AI: Artificial Inteligence # cs.LG: Machine learning # stat.ML: Machine learning # cs.GR: Graphics self._arxiv = ArxivParser( category=['cs.CV', 'cs.AI', 'cs.LG', 'stat.ML', 'cs.GR']) self._arxiv.save_config(self._arxiv_cfg) else: self._arxiv = ArxivParser.from_config(self._arxiv_cfg) # Reload authors/keywords self._load_config(self._cache_folder) # Create client, define message callback + start service # run aynchronously # https://github.com/slackapi/python-slackclient/blob/master/tutorial/PythOnBoardingBot/async_app.py # https://stackoverflow.com/questions/56539228 # Start Slack client + scheduler for daily research loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) self.client = RTMClient(token=token, run_async=True, loop=loop) self.client.on(event='open', callback=self.open_callback) self.client.on(event='message', callback=self.message_callback) loop.run_until_complete( asyncio.gather(self._daily_scheduler(token), self.client.start())) loop.close()
def unique_name(fpath): """ Make a unique name for the filepath by stripping extension, and adding 1, 2... to the end until a unique name is generated. @param fpath: filepath to make unique name for @type fpath: str @return: str @rtype: str """ if not _exists(fpath): return fpath split_path = _splitext(fpath) i = 1 tmplt = "(%d)".join(split_path) new = tmplt % i while _exists(new): i += 1 new = tmplt % i return new
def find_spec(): current = dirname(abspath(__file__)) while 1: spec = join(current, 'build.spec.xml') if _exists(spec): return spec if current == '/': break current = normpath(join(current, '..'))
def clean(f="setup.py"): """python setup.py clean --all""" f=abspath(expanduser(f)) pwd = getcwd() if _exists(f): try: chdir(dirname(f)) command="python setup.py clean --all" run(command) chdir(pwd) except Exception,e: chdir(pwd) raise Exception(str(e))
def _run_pull(): """Update remote repo. Should be started within project directory.""" if env['host_string'] == STAGING['hostname']: run('git checkout staging', pty=True) run('git pull', pty=True) # Setup virtualenv. run('python bootstrap.py') for log_file in LOGS.values(): if not _exists(log_file): run('touch %s' % log_file, pty=True) run('chmod a+w %s' % log_file, pty=True)
def check(bin="python",f="setup.py"): """return True if "python setup.py check" succesfull""" f=abspath(expanduser(f)) if _exists(f): pwd = getcwd() # change current dir to testing relative paths chdir(getenv("HOME")) try: command="%s %s check" % (bin,f) run(command) chdir(pwd) # return current dir except Exception, e: chdir(pwd) # return current dir raise Exception(str(e))
def upload(f="setup.py",distribution="sdist"): """python setup.py distribution upload""" f=abspath(expanduser(f)) pwd = getcwd() if _exists(f): try: chdir(dirname(f)) register(f) command="python setup.py %s upload" % distribution r=run(command).std_out chdir(pwd) if r.find("Server response (200): OK")>0: return True if r.find("you should create a new release")>0: raise Exception(r.splitlines()[-2:]) except Exception,e: chdir(pwd) raise Exception(str(e)) raise Exception(r)
def install(bin="python",f="setup.py"): """python setup.py install""" f=abspath(expanduser(f)) pwd = getcwd() if _exists(f): try: clean(f) # clean first chdir(dirname(f)) command="%s setup.py install" % bin if linux: run("sudo "+command) # idk how sudo in linux, use Mac OS ;) if osx: sudo(command) if windows: run(command) chdir(pwd) return True except Exception: chdir(pwd) return False
def name(): """python setup.py --name""" setup=abspath(expanduser("setup.py")) if _exists(setup): return run("python setup.py --name").std_out.splitlines()[-1]
def exists(self, path): return _exists(self.getsyspath(path))
def mkdir(path): if len(path)>1 and _exists(path)==0: _makedirs(path)
def exists(path): return _exists(expanduser(path))
from tools.integration.verilog import VerilogModel #----------------------------------------------------------------------- # py.test decorators #----------------------------------------------------------------------- from pytest import mark as _mark from distutils.spawn import find_executable as _find_executable from os.path import exists as _exists _has = lambda x: _find_executable( x ) != None requires_xcc = _mark.skipif( not( _has('maven-gcc') and _has('maven-objdump') ), reason='requires cross-compiler toolchain' ) requires_vmh = _mark.skipif( not _exists('../tests/build/vmh'), reason='requires vmh files' ) requires_iverilog = _mark.skipif( not( _has('iverilog') ), reason='requires iverilog' ) requires_verilator = _mark.skipif( not( _has('verilator') ), reason='requires verilator' ) #----------------------------------------------------------------------- # pymtl namespace #----------------------------------------------------------------------- __all__ = [ # Model Construction 'Model', 'VerilogModel',
def calculate_unhashed_fps(self,draw_substructures=False,image_directory='./images_substructures'): # get the dictionary for the substructures idxs = [] substr_ids = [] counts=[] substructure_dictionaries = [] for mol_index,mol in enumerate(self.mols): info={} fp = _GetMorganFingerprint(mol,radius=self.max_radius,bitInfo=info) substructure_dictionary = {k:mol_index for k,v in info.iteritems() if v[0][1] in self.radii} substructure_dictionaries.append({k:mol_index for k,v in info.iteritems() if v[0][1] in self.radii}) substr_ids.append(substructure_dictionary.keys()) idxs.append([mol_index]*len(substructure_dictionary.keys())) counts.append([ len(info.values()[x]) for x in _arange(0,len(info)) if info.values()[x][0][1] in self.radii]) # get the smiles for the substructures amap = {} substructures_smiles = {k:[_MolToSmiles(_PathToSubmol(mol,_FindAtomEnvironmentOfRadiusN(mol,v[0][1],v[0][0]),atomMap=amap))] for k,v in info.iteritems() if v[0][1] in self.radii} self.substructures_smiles.update(substructures_smiles) # generate the images for the substructures if required.. if draw_substructures: if not _exists(image_directory): _makedirs(image_directory) for k,v in info.iteritems(): if k not in self.substructure_dictionary.keys() and v[0][1] in self.radii: image_name="%s/Molecule_%d_substr_%d.pdf"%(image_directory,mol_index,k) env=_FindAtomEnvironmentOfRadiusN(mol,v[0][1],v[0][0]) amap={} submol=_PathToSubmol(mol,env,atomMap=amap) _MolToFile(mol,image_name,size=(300,300),wedgeBonds=True,kekulize=True,highlightAtoms=amap.keys()) #self.substructure_dictionary = self._combine_dicts(substructure_dictionary,self.substructure_dictionary) for d in substructure_dictionaries: for k, v in d.iteritems(): l=self.substructure_dictionary.setdefault(k,[]) if v not in l: l.append(v) idxs = _array([val for sublist in idxs for val in sublist]) counts = _array([val for sublist in counts for val in sublist]) substr_ids_flattened = [val for sublist in substr_ids for val in sublist] substr_ids = _array(substr_ids_flattened) self.substructure_ids = substr_ids if len(self.reference_substructure_keys)==0: print "No input set of keys for the substructures. \nThus, the substructures present in the input molecules will be considered for the calculation of unhashed fingerprints." columns = _array(list(set(self.substructure_dictionary.keys()))) columns = _sort(columns) self.columns_unhashed = columns dimensionality_unhashed = len(columns) else: columns = _array(self.reference_substructure_keys) columns = _sort(columns) self.columns_unhashed = columns dimensionality_unhashed = len(columns) fps_unhashed_binary = _zeros((len(self.mols),dimensionality_unhashed), dtype=int) fps_unhashed_counts = _zeros((len(self.mols),dimensionality_unhashed), dtype=int) mapping = _array([(substr_ids[x]==columns).nonzero() for x in _arange(0,len(substr_ids))]) mapping = mapping.flatten() idxs = _array([idxs[x] for x in _arange(0,len(mapping)) if mapping[x].size != 0]) counts = _array([counts[x] for x in _arange(0,len(mapping)) if mapping[x].size != 0]) mapping = _array([mapping[x] for x in _arange(0,len(mapping)) if mapping[x].size != 0]) if len(mapping) == 0: print "There is no intersection between the substructures \n(i)provided in the reference key set, and\n(ii) the substructures found in the input molecules." return fps_unhashed_binary[idxs,mapping] = _ones(len(mapping)) fps_unhashed_counts[idxs,mapping] = counts self.fps_unhashed_binary = fps_unhashed_binary self.fps_unhashed_counts = fps_unhashed_counts
def version(): """python setup.py --version""" setup=abspath(expanduser("setup.py")) if _exists(setup): return run("python setup.py --version").std_out.splitlines()[-1]
def getFullFilename(path, hint=None): """ Function to get full library path. Figure out what's in the path iteratively, based on 3 common scenarios. @param path: a filepath or filename @type path: str @param hint: the first directory tree in which to search for the file @type hint: str @return: full library path to existing file. @rtype: str Try to find the path by checking for three common cases: 1. filename with extension - base - no base 2. filename with only base 2.1 partially qualified directory name 2.2 fully qualified directory name 3. neither one Build list of folders to search by calling first helper function. Update 1/16/2014- xl nonsense gone I moved the algorithm for executing the search to an inlined dispatch function that receives all the relative args, for the sake of making this function cleaner, but I'm not sure if that level of indirection just makes everything even worse. Having it defined within this function allows it to access path, etc variables without having to explicitly call them. In all, there is much less text in the areas in which the dispatch is called. """ path = path.replace('/', '\\') # Normalize sep type # Was path already good? if _exists(path): return path # Begin process of finding file search_dirs = _lib_path_search_dir_list_builder(hint) base, name = _split(path) ext = _splitext(name)[1] # Most likely- given extension. # no need to check for case of fully qualified basename. # an existing file with a fully qualified base name and extension would # be caught by earlier _exists() if ext: if base: v_print('\nPartially qualified filename \'', path, "\' given, searching for file...") return _get_lib_path_parital_qualname(name, base, search_dirs) # else v_print("\nNo directory given for \'", path, "\', scanning for file...") return _get_lib_path_no_basename(path, search_dirs) # Next, given filename with base, but no extension elif base: drive, _tail = _splitdrive(base) # fully qualified base, just check the dir for matching name if drive: v_print("\nNo file extension given for \'", path, "\', scanning for file...") return _get_lib_path_no_extension(path) # partially qualified base, search dirs. I don't think this works well (at all). # I don't think I managed to get a working unittest for it. else: v_print("\nAttempting to find partially qualified name \'", path, "\' ...") return _get_lib_path_parital_qualname(name, base, search_dirs) # Finally, user gave no context- no base or extension. # Try really hard to find it anyway. else: v_print("\nNo context given for filename, scanning for file.\nIf you give a full filepath, you wouldn't \nhave to wait for the long search.") return _get_lib_path_no_ctxt(path, search_dirs) # noinspection PyUnreachableCode raise SystemExit("Unreachable code reached: fix module olutils")
def fullname(f="setup.py"): """python setup.py --fullname""" f=abspath(expanduser(f)) if _exists(f): return run("python %s --fullname" % f).std_out.splitlines()[-1]