def get_directory_path(dirbranch=None): '''returns absolute path of the leaf directory If directory branch is not present, the function creates the directories under current file directory Parameters ---------- dirbranch : tuple or None tuple of strings representing the directory branch. If `None` the current directory is returned Returns ------- dirpath : string absolute path to the directory Example ------- >>> get_directory_path(['data', 'spots']) C:\PROGRAMSANDEXPERIMENTS\ZEMAX\PROJECTS\2014_Speckle\data\spots ''' wdir = _path.dirname(_path.realpath(__file__)) if dirbranch: dirpath = _path.join(wdir, *dirbranch) if not _path.exists(dirpath): _makedirs(dirpath) return dirpath else: return wdir
def makedirs_for_py2k(path, *args, **kwds): if kwds.get('exist_ok', None): del kwds['exist_ok'] if not os.path.isdir(path): return _makedirs(path, *args, **kwds) else: return _makedirs(path, *args, **kwds)
def make_locations(locations=None, verbose=True): ''' Creates folders :param locations: A list of folders to create (can be a dictionary, see note below) :param verbose: Warn if any folders were created .. note:: * |params_locations_dict| * |param_locations_none| ''' from photon.util.structures import to_list from photon.util.system import shell_notify if not locations: locations = get_locations().values() locations = to_list(locations) r = list() for p in reversed(sorted(locations)): if not _path.exists(p): _makedirs(p) r.append(p) if verbose and r: shell_notify('path created', state=None, more=r) return r
def make_dirs(path: str): if exists(path): return try: _makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise
def startup(): '''Do basic setup for the program. This really should only be run once but has some basic tests to prevent double-assignment''' _chdir(sys.path[0]) _makedirs("logs", exist_ok=True) global args, logger, loop args = parseargs() loop = asyncio.get_event_loop() # Set up logging open(settings.logfile, 'w').close() logger = logging.getLogger(__name__) rootLogger = logging.getLogger() if len(rootLogger.handlers) is 0: rootLogger.setLevel(logging.DEBUG) conFmt = "[%(levelname)8s] %(name)s: %(message)s" ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(conFmt)) rootLogger.addHandler(ch) filFmt = "[%(levelname)8s] %(asctime)s %(name)s: %(message)s" fh = logging.FileHandler(settings.logfile) fh.setFormatter(logging.Formatter(filFmt, "%b-%d %H:%M:%S")) rootLogger.addHandler(fh) util.setLogLevel(args.verbose, True) logger.debug('Logging set up.') logger.debug('Args state: {}'.format(args)) logger.info('Badge check midlayer v{} starting on {} ({})'.format( settings.version, datetime.now().date(), datetime.now().date().strftime("%A"))) # Set up API key try: with open(getSetting('apikey')) as f: settings.magapi.headers['X-Auth-Token'] = str( UUID(f.read().strip())) except FileNotFoundError: logger.fatal( 'Could not find API key file, refusing to run. ({})'.format( path.abspath(getSetting('apikey')))) raise SystemExit except ValueError: logger.fatal('API key not a valid UUID, refusing to run.') raise SystemExit global server try: server except NameError: server = loop.run_until_complete( websockets.serve( prcsConnection, socket.gethostbyname_ex(socket.getfqdn())[-1] + ['127.0.0.1'], getSetting('l_port'))) for s in server.sockets: logger.info('Now listening for connections on {}:{}'.format( *s.getsockname()))
def makedirs(path: str): from os import makedirs as _makedirs from os.path import dirname path = str(path) pathdir = dirname(path) if pathdir == "": pathdir = "." _makedirs(pathdir, exist_ok=True) return pathdir
def __init__(self): """Init """ from pytsite import router as _router self._server_name = _router.server_name() self._path = _reg.get('cache.file_driver_storage', _path.join(_reg.get('paths.storage'), 'cache')) # Create cache directory if not _path.exists(self._path): _makedirs(self._path, 0o755, True)
def _store(self, pool: str, key: str, value: _Any, ttl: int = None) -> _Any: """Store an item into the pool """ f_path = self._get_key_path(pool, key) d_path = _path.dirname(f_path) if not _path.exists(d_path): _makedirs(d_path, 0o755) with open(f_path, 'wb') as f: now = _time() f.write(_pickle_dump({'k': key, 'c': now, 't': ttl, 'e': (now + ttl) if ttl else None, 'v': value})) return value
def mk_tmp_dir(suffix: str = None, prefix: str = None, subdir: str = None) -> str: from pytsite import reg tmp_root = reg.get('paths.tmp') if not tmp_root: raise RuntimeError('Cannot determine temporary directory location') if subdir: tmp_root = _path.join(tmp_root, subdir) if not _path.exists(tmp_root): _makedirs(tmp_root, 0o755) return _mkdtemp(suffix, prefix, tmp_root)
def mk_tmp_file(suffix: str = None, prefix: str = None, subdir: str = None, text: bool = False) -> _Tuple[int, str]: """Create temporary file Returns tuple of two items: file's descriptor and absolute path. """ from pytsite import reg tmp_dir = reg.get('paths.tmp') if not tmp_dir: raise RuntimeError('Cannot determine temporary directory location') if subdir: tmp_dir = _path.join(tmp_dir, subdir) if not _path.exists(tmp_dir): _makedirs(tmp_dir, 0o755) return _mkstemp(suffix, prefix, tmp_dir, text)
def report_to_json(self, results): filename = self.ALIAS.replace(' ', '.').lower() alias = '{type} {name}'.format(type=self.ASESSMENT_TYPE, name=self.ALIAS) # create output dir if it doesn't exist if not _exists(self.OUTPUT_PATH): _makedirs(self.OUTPUT_PATH) with open('{output}/{name}.{type}.json'.format(output=self.OUTPUT_PATH, name=filename, type=self.ASESSMENT_TYPE.lower()), 'w') as f: rdict = { 'app-name': alias, 'type': self.ASESSMENT_TYPE, 'issues': [ { 'id': issue.ID, 'title': issue.ISSUE_TITLE, 'findings': issue.FINDINGS, 'details': issue.DETAILS } for issue in results ]} f.write(dumps(rdict))
def calculate_unhashed_fps(self,draw_substructures=False,image_directory='./images_substructures'): # get the dictionary for the substructures idxs = [] substr_ids = [] counts=[] substructure_dictionaries = [] for mol_index,mol in enumerate(self.mols): info={} fp = _GetMorganFingerprint(mol,radius=self.max_radius,bitInfo=info) substructure_dictionary = {k:mol_index for k,v in info.iteritems() if v[0][1] in self.radii} substructure_dictionaries.append({k:mol_index for k,v in info.iteritems() if v[0][1] in self.radii}) substr_ids.append(substructure_dictionary.keys()) idxs.append([mol_index]*len(substructure_dictionary.keys())) counts.append([ len(info.values()[x]) for x in _arange(0,len(info)) if info.values()[x][0][1] in self.radii]) # get the smiles for the substructures amap = {} substructures_smiles = {k:[_MolToSmiles(_PathToSubmol(mol,_FindAtomEnvironmentOfRadiusN(mol,v[0][1],v[0][0]),atomMap=amap))] for k,v in info.iteritems() if v[0][1] in self.radii} self.substructures_smiles.update(substructures_smiles) # generate the images for the substructures if required.. if draw_substructures: if not _exists(image_directory): _makedirs(image_directory) for k,v in info.iteritems(): if k not in self.substructure_dictionary.keys() and v[0][1] in self.radii: image_name="%s/Molecule_%d_substr_%d.pdf"%(image_directory,mol_index,k) env=_FindAtomEnvironmentOfRadiusN(mol,v[0][1],v[0][0]) amap={} submol=_PathToSubmol(mol,env,atomMap=amap) _MolToFile(mol,image_name,size=(300,300),wedgeBonds=True,kekulize=True,highlightAtoms=amap.keys()) #self.substructure_dictionary = self._combine_dicts(substructure_dictionary,self.substructure_dictionary) for d in substructure_dictionaries: for k, v in d.iteritems(): l=self.substructure_dictionary.setdefault(k,[]) if v not in l: l.append(v) idxs = _array([val for sublist in idxs for val in sublist]) counts = _array([val for sublist in counts for val in sublist]) substr_ids_flattened = [val for sublist in substr_ids for val in sublist] substr_ids = _array(substr_ids_flattened) self.substructure_ids = substr_ids if len(self.reference_substructure_keys)==0: print "No input set of keys for the substructures. \nThus, the substructures present in the input molecules will be considered for the calculation of unhashed fingerprints." columns = _array(list(set(self.substructure_dictionary.keys()))) columns = _sort(columns) self.columns_unhashed = columns dimensionality_unhashed = len(columns) else: columns = _array(self.reference_substructure_keys) columns = _sort(columns) self.columns_unhashed = columns dimensionality_unhashed = len(columns) fps_unhashed_binary = _zeros((len(self.mols),dimensionality_unhashed), dtype=int) fps_unhashed_counts = _zeros((len(self.mols),dimensionality_unhashed), dtype=int) mapping = _array([(substr_ids[x]==columns).nonzero() for x in _arange(0,len(substr_ids))]) mapping = mapping.flatten() idxs = _array([idxs[x] for x in _arange(0,len(mapping)) if mapping[x].size != 0]) counts = _array([counts[x] for x in _arange(0,len(mapping)) if mapping[x].size != 0]) mapping = _array([mapping[x] for x in _arange(0,len(mapping)) if mapping[x].size != 0]) if len(mapping) == 0: print "There is no intersection between the substructures \n(i)provided in the reference key set, and\n(ii) the substructures found in the input molecules." return fps_unhashed_binary[idxs,mapping] = _ones(len(mapping)) fps_unhashed_counts[idxs,mapping] = counts self.fps_unhashed_binary = fps_unhashed_binary self.fps_unhashed_counts = fps_unhashed_counts
import errno as _errno from os import makedirs as _makedirs from os import path as _path from simple_settings import settings as _settings if __name__ == "__main__": with open(_settings.CSV_FILE) as csv_file: # Make destination directory containing all the svg files: try: _makedirs(_settings.DEST_GENERATED_FOLDER) except OSError as osError: if osError.errno == _errno.EEXIST: pass # Already exist: OK # Get the content of source files: with open(_settings.SVG_FILE_REGULAR) as svg_file_regular,\ open(_settings.SVG_FILE_LIFE_TIME) as svg_file_life_time: svg_content_regular, svg_content_life_time = svg_file_regular.read( ), svg_file_life_time.read() for index, line in enumerate(csv_file): # Read names and emails: line = line.strip() if line: name, email, member_type = line.split(",") assert member_type in ("0", "1") member_type = int(member_type) # Select content to use: svg_content = (svg_content_regular, svg_content_life_time)[member_type]
def makedirs(d): try: return _makedirs(d) except OSError: pass
def makedirs(path): try: _makedirs(path) except FileExistsError: pass
def makedirs(dirpath): if not exists(dirpath): _makedirs(dirpath)
def os_makedirs(path, mode=0777): return os._makedirs(longpathify(uni(path)), mode)
__author__ = 'Oleksandr Shepetko' __email__ = '*****@*****.**' __license__ = 'MIT' import logging as _logging from os import path as _path, makedirs as _makedirs from datetime import datetime as _datetime from pytsite import reg as _reg, cleanup as _cleanup _log_dir = _reg.get('paths.log') _log_path = _path.join(_log_dir, _datetime.now().strftime('{}-%Y%m%d.log'.format(_reg.get('env.type')))) _log_level = _logging.DEBUG if _reg.get('debug') else _logging.INFO if not _path.exists(_log_dir): _makedirs(_log_dir, 0o755, True) # Create logger _logger = _logging.getLogger(_reg.get('env.name', 'default')) _logger.setLevel(_log_level) # Setup handler _handler = _logging.FileHandler(_log_path, encoding='utf-8') if _log_level == _logging.DEBUG: fmt = '%(asctime)s %(levelname)7s %(process)d:%(thread)d %(message)s' else: fmt = '%(asctime)s %(levelname)7s %(message)s' _handler.setFormatter(_logging.Formatter(fmt)) _handler.setLevel(_log_level) _logger.addHandler(_handler)
def calculate_unhashed_fps(self, draw_substructures=False, image_directory='./images_substructures'): # get the dictionary for the substructures idxs = [] substr_ids = [] counts = [] for mol_index, mol in enumerate(self.mols): info = {} fp = _GetMorganFingerprint(mol, radius=self.max_radius, bitInfo=info) substructure_dictionary = { k: [mol_index] for k, v in info.iteritems() if v[0][1] in self.radii } substr_ids.append(substructure_dictionary.keys()) idxs.append([mol_index] * len(substructure_dictionary.keys())) counts.append([ len(info.values()[x]) for x in _arange(0, len(info)) if info.values()[x][0][1] in self.radii ]) # get the smiles for the substructures amap = {} substructures_smiles = { k: [ _MolToSmiles( _PathToSubmol(mol, _FindAtomEnvironmentOfRadiusN( mol, v[0][1], v[0][0]), atomMap=amap)) ] for k, v in info.iteritems() if v[0][1] in self.radii } self.substructures_smiles.update(substructures_smiles) # generate the images for the substructures if required.. if draw_substructures: if not _exists(image_directory): _makedirs(image_directory) for k, v in info.iteritems(): if k not in self.substructure_dictionary.keys( ) and v[0][1] in self.radii: image_name = "%s/Molecule_%d_substr_%d.pdf" % ( image_directory, mol_index, k) env = _FindAtomEnvironmentOfRadiusN( mol, v[0][1], v[0][0]) amap = {} submol = _PathToSubmol(mol, env, atomMap=amap) _MolToFile(mol, image_name, size=(300, 300), wedgeBonds=True, kekulize=True, highlightAtoms=amap.keys()) self.substructure_dictionary = self._combine_dicts( substructure_dictionary, self.substructure_dictionary) idxs = _array([val for sublist in idxs for val in sublist]) counts = _array([val for sublist in counts for val in sublist]) substr_ids_flattened = [ val for sublist in substr_ids for val in sublist ] substr_ids = _array(substr_ids_flattened) self.substructure_ids = substr_ids if len(self.reference_substructure_keys) == 0: print( "No input set of keys for the substructures. \nThus, the substructures present in the input molecules will be considered for the calculation of unhashed fingerprints." ) columns = _array(list(set(self.substructure_dictionary.keys()))) columns = _sort(columns) self.columns_unhashed = columns dimensionality_unhashed = len(columns) else: columns = _array(list(set(self.reference_substructure_keys))) columns = _sort(columns) self.columns_unhashed = columns dimensionality_unhashed = len(columns) fps_unhashed_binary = _zeros((len(self.mols), dimensionality_unhashed), dtype=int) fps_unhashed_counts = _zeros((len(self.mols), dimensionality_unhashed), dtype=int) # removing the indices corresponding to the substructures in the test molecules not present in the references set of substructures.. idxs = _array([ idxs[x] for x in _arange(0, len(substr_ids)) if substr_ids[x] in self.columns_unhashed ]) counts = _array([ counts[x] for x in _arange(0, len(substr_ids)) if substr_ids[x] in self.columns_unhashed ]) substr_ids = _array([ substr_ids[x] for x in _arange(0, len(substr_ids)) if substr_ids[x] in self.columns_unhashed ]) mapping = _array([(substr_ids[x] == columns).nonzero() for x in _arange(0, len(substr_ids))]) mapping = mapping.flatten() if len(mapping) == 0: print( "There is no intersection between the substructures \n(i)provided in the reference key set, and\n(ii) the substructures found in the input molecules." ) return fps_unhashed_binary[idxs, mapping] = _ones(len(counts)) fps_unhashed_counts[idxs, mapping] = counts self.fps_unhashed_binary = fps_unhashed_binary self.fps_unhashed_counts = fps_unhashed_counts
def exec(self): requested_width = int(self.arg('width')) requested_height = int(self.arg('height')) p1 = self.arg('p1') p2 = self.arg('p2') filename = self.arg('filename') uid = 'file_image:' + _path.splitext(filename)[0] try: img_file = _file.get(uid) # type: _model.ImageFile except _file.error.FileNotFound as e: raise self.not_found(str(e)) # Align side lengths and redirect aligned_width = _api.align_image_side( requested_width, _api.get_image_resize_limit_width()) aligned_height = _api.align_image_side( requested_height, _api.get_image_resize_limit_height()) if aligned_width != requested_width or aligned_height != requested_height: redirect = _router.rule_url( 'file_storage_odm@image', { 'width': aligned_width, 'height': aligned_height, 'p1': p1, 'p2': p2, 'filename': filename, }) return self.redirect(redirect, 301) # Original size orig_width = img_file.width orig_height = img_file.height orig_ratio = orig_width / orig_height need_resize = True # Calculate new size if not requested_width and not requested_height: # No resize needed, return original image resize_width = orig_width resize_height = orig_height need_resize = False elif requested_width and not requested_height: # Resize by width, preserve aspect ration resize_width = requested_width resize_height = _floor(requested_width / orig_ratio) elif requested_height and not requested_width: # Resize by height, preserve aspect ration resize_width = _floor(requested_height * orig_ratio) resize_height = requested_height else: # Exact resizing resize_width = requested_width resize_height = requested_height # Checking source file storage_path = img_file.get_field('storage_path') if not _path.exists(storage_path): return self.redirect('http://placehold.it/{}x{}'.format( requested_width, requested_height)) # Calculating target file location static_path = _path.join(_reg.get('paths.static'), 'image', 'resize', str(requested_width), str(requested_height), p1, p2, filename) # Create target directory target_dir = _path.dirname(static_path) if not _path.exists(target_dir): _makedirs(target_dir, 0o755, True) if not _path.exists(static_path): # Open source image img = _Image.open(storage_path) # type: _Image # Resize if need_resize: # Crop crop_ratio = resize_width / resize_height crop_width = orig_width crop_height = _floor(crop_width / crop_ratio) crop_top = _floor(orig_height / 2) - _floor(crop_height / 2) crop_left = 0 if crop_height > orig_height: crop_height = orig_height crop_width = _floor(crop_height * crop_ratio) crop_top = 0 crop_left = _floor(orig_width / 2) - _floor(crop_width / 2) crop_right = crop_left + crop_width crop_bottom = crop_top + crop_height cropped = img.crop( (crop_left, crop_top, crop_right, crop_bottom)) img.close() # Resize img = cropped.resize((resize_width, resize_height), _Image.BILINEAR) img.save(static_path) img.close() return self.redirect( img_file.get_url(width=requested_width, height=requested_height))
def mkdir(path): if len(path) > 1 and _exists(path) == 0: _makedirs(path)
def mkdir(path): if len(path)>1 and _exists(path)==0: _makedirs(path)