def get_pattern_names(self): p = paths.pattern_dir extension = '.lp,.txt' patterns = list_directory(p, extension) return [ '', ] + patterns
def migrate_directory(root, clean=False): """ create a migrated directory """ dest = unique_dir(root, 'migrated') for p in list_directory(root, extension='.py'): migrate_file(p, root, dest, clean)
def migrate_directory(root, clean=False): """ create a migrated directory """ dest = unique_dir(root, "migrated") for p in list_directory(root, extension=".py"): migrate_file(p, root, dest, clean)
def logging_setup(name, use_archiver=True, root=None, use_file=True, **kw): """ """ # set up deprecation warnings # import warnings # warnings.simplefilter('default') bdir = paths.log_dir if root is None else root # make sure we have a log directory # if not os.path.isdir(bdir): # os.mkdir(bdir) if use_archiver: # archive logs older than 1 month # lazy load Archive because of circular dependency from pychron.core.helpers.archiver import Archiver a = Archiver(archive_days=14, archive_months=1, root=bdir) a.clean() if use_file: # create a new logging file logname = '{}.current.log'.format(name) logpath = os.path.join(bdir, logname) if os.path.isfile(logpath): backup_logpath, _cnt = unique_path2(bdir, name, delimiter='-', extension='.log', width=5) shutil.copyfile(logpath, backup_logpath) os.remove(logpath) ps = list_directory(bdir, filtername=logname, remove_extension=False) for pi in ps: _h, t = os.path.splitext(pi) v = os.path.join(bdir, pi) shutil.copyfile(v, '{}{}'.format(backup_logpath, t)) os.remove(v) root = logging.getLogger() root.setLevel(gLEVEL) shandler = logging.StreamHandler() handlers = [shandler] if use_file: rhandler = RotatingFileHandler(logpath, maxBytes=1e7, backupCount=50) handlers.append(rhandler) fmt = logging.Formatter(gFORMAT) for hi in handlers: hi.setLevel(gLEVEL) hi.setFormatter(fmt) root.addHandler(hi)
def logging_setup(name, **kw): ''' ''' from pychron.core.helpers.archiver import Archiver # set up deprecation warnings # import warnings # warnings.simplefilter('default') # make sure we have a log directory bdir = os.path.join(paths.root, 'logs') if not os.path.isdir(bdir): os.mkdir(bdir) # create a new logging file logname = '{}.current.log'.format(name) logpath = os.path.join(bdir, logname) if os.path.isfile(logpath): backup_logpath, _cnt = unique_path(bdir, name, extension='log') shutil.copyfile(logpath, backup_logpath) os.remove(logpath) # get all .# paths ps = list_directory(bdir, filtername=logname, remove_extension=False) for pi in ps: _h, t = os.path.splitext(pi) v = os.path.join(bdir, pi) shutil.copyfile(v, '{}{}'.format(backup_logpath, t)) os.remove(v) # if sys.version.split(' ')[0] < '2.4.0': # logging.basicConfig() # else: root = logging.getLogger() root.setLevel(gLEVEL) shandler = logging.StreamHandler() # # # # # global rhandler rhandler = RotatingFileHandler( logpath, maxBytes=1e7, backupCount=5) # for hi in (shandler, rhandler): # for hi in (rhandler,): hi.setLevel(gLEVEL) hi.setFormatter(logging.Formatter(gFORMAT)) root.addHandler(hi) # new_logger('main') # archive logs older than 1 month a = Archiver(archive_days=30, archive_months=6, root=bdir ) a.clean(False)
def _get_templates(self): p = paths.incremental_heat_template_dir extension = '.txt' temps = list_directory(p, extension) if self.template in temps: self.template = temps[temps.index(self.template)] else: self.template = 'Step Heat Template' return ['Step Heat Template', 'None', ''] + temps
def _get_templates(self): p = paths.incremental_heat_template_dir extension = ".txt" temps = list_directory(p, extension) if self.template in temps: self.template = temps[temps.index(self.template)] else: self.template = "Step Heat Template" return ["Step Heat Template", "None", ""] + temps
def _get_patterns(self): p = paths.pattern_dir extension = ".lp" patterns = list_directory(p, extension) return ( ["Pattern", "None", LINE_STR, "Remote Patterns"] + self.remote_patterns + [LINE_STR, "Local Patterns"] + patterns )
def logging_setup(name, use_archiver=True, use_file=True, **kw): """ """ # set up deprecation warnings # import warnings # warnings.simplefilter('default') bdir = paths.log_dir # make sure we have a log directory # if not os.path.isdir(bdir): # os.mkdir(bdir) if use_archiver: # archive logs older than 1 month # lazy load Archive because of circular dependency from pychron.core.helpers.archiver import Archiver a = Archiver(archive_days=14, archive_months=1, root=bdir) a.clean() if use_file: # create a new logging file logname = '{}.current.log'.format(name) logpath = os.path.join(bdir, logname) if os.path.isfile(logpath): backup_logpath, _cnt = unique_path2(bdir, name, delimiter='-', extension='.log', width=5) shutil.copyfile(logpath, backup_logpath) os.remove(logpath) ps = list_directory(bdir, filtername=logname, remove_extension=False) for pi in ps: _h, t = os.path.splitext(pi) v = os.path.join(bdir, pi) shutil.copyfile(v, '{}{}'.format(backup_logpath, t)) os.remove(v) root = logging.getLogger() root.setLevel(gLEVEL) shandler = logging.StreamHandler() handlers = [shandler] if use_file: rhandler = RotatingFileHandler( logpath, maxBytes=1e7, backupCount=5) handlers.append(rhandler) for hi in handlers: hi.setLevel(gLEVEL) hi.setFormatter(logging.Formatter(gFORMAT)) root.addHandler(hi)
def get_sensitivities(self): specs = {} root = os.path.join(paths.meta_root, 'spectrometers') for p in list_directory(root): if p.endswith('.sens.json'): name = p.split('.')[0] p = os.path.join(root, p) obj = dvc_load(p) specs[name] = obj for r in obj: if r['create_date']: r['create_date'] = datetime.strptime(r['create_date'], DATE_FORMAT) return specs
def get_sensitivities(self): specs = {} root = os.path.join(paths.meta_root, 'spectrometers') for p in list_directory(root): if p.endswith('.sens.json'): name = p.split('.')[0] p = os.path.join(root, p) obj = dvc_load(p) specs[name] = obj for r in obj: if r['create_date']: r['create_date'] = datetime.strptime( r['create_date'], DATE_FORMAT) return specs
def _check_similarity(self): sims = [] temps = list_directory(paths.incremental_heat_template_dir, extension='.txt') for ti in temps: if ti == self.name: continue t = IncrementalHeatTemplate() p = os.path.join(paths.incremental_heat_template_dir, ti) try: t.load(p) except BaseException: self.debug('invalid template {}. removing this file'.format(p)) os.remove(p) continue e = self._calculate_similarity(t) if e < 10: sims.append(ti) return sims
def _get_truncations(self): p = paths.truncation_dir extension = ".yaml" temps = list_directory(p, extension, remove_extension=True) return [""] + temps
def get_pattern_names(self): return list_directory(paths.pattern_dir, extension='.lp')
def _names_default(self): return list_directory(self.options_path(), remove_extension=True)
def _get_truncations(self): p = paths.truncation_dir extension = '.yaml' temps = list_directory(p, extension, remove_extension=True) return ['', ] + temps
def _get_patterns(self): p = paths.pattern_dir extension = '.lp' patterns = list_directory(p, extension) return ['Pattern', 'None', LINE_STR, 'Remote Patterns'] + self.remote_patterns + \ [LINE_STR, 'Local Patterns'] + patterns
def get_load_holders(self): p = os.path.join(paths.meta_root, 'load_holders') return list_directory(p, extension='.txt', remove_extension=True)
def get_comment_templates(): p = paths.comment_templates templates = list_directory(p) return templates
def get_pattern_names(self): p = paths.pattern_dir extension = '.lp,.txt' patterns = list_directory(p, extension) return ['', ] + patterns
def _notes_templates_default(self): return [''] + list_directory(paths.template_dir)
def get_pattern_names(self): p = paths.pattern_dir extension = ".lp,.txt" patterns = list_directory(p, extension) return [""] + patterns