def options(key, default=none): global _configuration if _configuration is None: find_configuration() conf = _configuration keys = key.split('.') env_key = key.replace('.', '_').upper() env_key = f'TRACK_{env_key}' env_override = os.environ.get(env_key) if env_override is not None: warning(f'Found ENV override for {env_key}') return env_override for k in keys: if conf is None: break conf = conf.get(k) if conf is None and default is none: warning( f'No configuration found for (key: {key}) and no default was provided' ) if conf is None: return default return conf
def new_trial(self, trial: Trial, auto_increment=False): if trial.uid in self.storage.objects: if not auto_increment: return None trials = self.get_trial(trial) max_rev = 0 for t in trials: max_rev = max(max_rev, t.revision) warning( f'Trial was already completed. Increasing revision number (rev={max_rev + 1})' ) trial.revision = max_rev + 1 trial._hash = None self.storage.objects[trial.uid] = trial self.storage.trials.add(trial.uid) if trial.project_id is not None: project = self.storage.objects.get(trial.project_id) if project is not None or self.strict: project.trials.add(trial) else: warning('Orphan trial') if trial.group_id is not None: group = self.storage.objects.get(trial.group_id) if group is not None or self.strict: group.trials.add(trial.uid) trial.metadata['_update_count'] = 0 return trial
def load_database(json_name): global _print_warning_once if json_name is None: return LocalStorage() if not os.path.exists(json_name): if json_name not in _print_warning_once: warning(f'Local Storage was not found at {json_name}') _print_warning_once.add(json_name) return LocalStorage(target_file=json_name) with open(json_name, 'r') as file: objects = json.load(file) db = dict() projects = set() project_names = dict() groups = set() group_names = dict() trials = set() trial_names = dict() for item in objects: obj = from_json(item) if obj.uid in db: raise RuntimeError('Should be unreachable!') db[obj.uid] = obj if isinstance(obj, Project): projects.add(obj.uid) if obj.name in project_names: error('Non unique project names are not supported') if obj.name is not None: project_names[obj.name] = obj.uid for trial in obj.trials: db[trial.uid] = trial trials.add(trial.uid) for group in obj.groups: db[group.uid] = group groups.add(group.uid) elif isinstance(obj, Trial): trials.add(obj.uid) if obj.name is not None: trial_names[obj.name] = obj.uid elif isinstance(obj, TrialGroup): groups.add(obj.uid) if obj.name is not None: group_names[obj.name] = obj.uid return LocalStorage(json_name, db, projects, groups, trials, project_names, group_names, trial_names)
def _load_config_file(file): global _configuration global _warning_was_printed if file is None: if not _warning_was_printed: warning('No configuration file found') _warning_was_printed = True return with open(file, 'r') as cfile: _configuration = json.load(cfile)
def __getattr__(self, item): """Try to use the backend attributes if not available""" if is_delayed_call(self.trial): warning('Creating a trial without parameters!') self.logger = self.trial() self.trial = self.logger.trial # Look for the attribute in the top level logger if hasattr(self.logger, item): return getattr(self.logger, item) raise AttributeError(item)
def get_protocol(backend_name): """ proto://arg """ arguments = parse_uri(backend_name) log = _protocols.get(arguments['scheme']) if log is None: warning(f'Logger (backend: {backend_name}) was not found!') log = _protocols.get('__default__') if log is make_local: debug('return local protocol') return log(backend_name) else: debug('return multiplexed protocol') return ProtocolMultiplexer( # Make a file Protocol to log everything in memory as well as remotely make_local('file:', strict=False, eager=False), log(backend_name))
def __init__(self, location, addrs, join=None, clean_on_exit=True): self.location = location logs = f'{location}/logs' temp = f'{location}/tmp' external = f'{location}/extern' store = location os.makedirs(logs, exist_ok=True) os.makedirs(temp, exist_ok=True) os.makedirs(external, exist_ok=True) self.location = location self.addrs = addrs self.bin = COCKROACH_BIN.get(os.name) if self.bin is None: raise RuntimeError('Your OS is not supported') if not os.path.exists(self.bin): info('Using system binary') self.bin = 'cockroach' else: hash = COCKROACH_HASH.get(os.name) if compute_version([self.bin]) != hash: warning('Binary Hashes do not match') self.arguments = [ 'start', '--insecure', f'--listen-addr={addrs}', f'--external-io-dir={external}', f'--store={store}', f'--temp-dir={temp}', f'--log-dir={logs}', f'--pid-file={location}/cockroach_pid' ] if join is not None: self.arguments.append(f'--join={join}') self.manager: Manager = Manager() self.properties = self.manager.dict() self.properties['running'] = False self.clean_on_exit = clean_on_exit self._process: Process = None self.cmd = None
def _fetch_objects(self, objects, query, strict=False): matching_objects = [] for obj_id in objects: obj = self.storage.objects.get(obj_id) if obj is None: err = f'stale trial (id: {obj_id}) something is wrong' if strict: raise RuntimeError(err) else: warning(err) continue is_selected = execute_query(obj, query) if is_selected: matching_objects.append(obj) return matching_objects
def process_args(self, args, cache=None): """ replace ids by their object reference so the backend modifies the objects and not a copy""" new_args = dict() for k, v in args.items(): if k == 'trial': if isinstance(v, str): hashid, rev = v.split('_') rev = int(rev) v = self.backend.get_trial( Trial(_hash=hashid, revision=rev)) for i in v: if i.revision == rev: v = i break else: warning( 'Was not able to find the correct trial revision') v = from_json(v) elif k == 'project': if isinstance(v, str): v = self.backend.get_project(Project(name=v)) v = from_json(v) elif k == 'group': if isinstance(v, str): v = self.backend.get_trial_group(TrialGroup(_uid=v)) v = from_json(v) new_args[k] = v return new_args
def _look_for_configuration(file_name='track.config'): config_file = None paths = { os.path.dirname( os.path.realpath(__file__)), # location of the current file os.getcwd(), # Current working directory } files = [] for path in paths: file = f'{path}/{file_name}' if os.path.exists(file): files.append(file) config_file = file if len(files) > 1: warning(f'found multiple configuration file: {", ".join(files)}') elif config_file is not None: info(f'loading configuration from {config_file}') return config_file
def commit(self, file_name_override=None, **kwargs): if self.path: with self.lock.acquire(): self.storage.commit(file_name_override=file_name_override, **kwargs) else: warning('Path undefined!')
def new_trial(self, trial: Trial): warning('CometML does not expose this functionality')
def add_group_trial(self, group: TrialGroup, trial: Trial): warning('CometML does not expose this functionality')
def add_project_trial(self, project: Project, trial: Trial): warning('CometML does not expose this functionality')
def new_trial_group(self, group: TrialGroup): warning('CometML does not expose this functionality')
def new_project(self, project: Project): warning('CometML does not expose this functionality')