def main(args=None): import argparse parser = argparse.ArgumentParser( description="Tool to measure resources consumed" " by a group of processes, no matter how hard they fork." " Does that by creating a temp cgroup and running passed command there." ) parser.add_argument("cmdline", nargs="+", help="Command to run and any arguments for it.") parser.add_argument( "-g", "--cgroup", default="bench/tmp", metavar="{ /path | tagged-path }", help="Hierarchy path to create temp-cgroup under" ' ("/" means root cgroup, default: %(default)s).' " Any missing path components will be created." " If relative name is specified, it will be interpreted from /tagged path.", ) parser.add_argument( "-c", "--rcs", default="cpuacct, blkio, memory", metavar="rc1[,rc2,...]", help="Comma-separated list of rc hierarchies to get metrics from (default: %(default)s)." " Should have corresponding path mounted under {}.".format(cg_root), ) parser.add_argument( "-q", "--quiet", action="store_true", help="Redirect stderr/stdout for started pid to /dev/null." ) parser.add_argument("-d", "--debug", action="store_true", help="Verbose operation mode.") opts = parser.parse_args(sys.argv[1:] if args is None else args) global log import logging logging.basicConfig(level=logging.DEBUG if opts.debug else logging.INFO) log = logging.getLogger() # Check all rc tasks-file paths cg_subpath = "tmp.{}".format(cmd_pid) cg_tasks, cg_path = OrderedDict(), join("tagged", opts.cgroup).lstrip("/") for rc in map(bytes.strip, opts.rcs.split(",")): tasks = join(cg_root, rc, cg_path, cg_subpath, "tasks") assert "\n" not in tasks, repr(tasks) os.makedirs(dirname(tasks)) assert exists(tasks), tasks cg_tasks[rc] = tasks # Append cmdline, send data to child data = cg_tasks.values() if opts.quiet: data.append("-") data = "\n".join(it.chain(data, ["\0".join(map(lambda arg: arg.encode("hex"), opts.cmdline))])) cmd_w.write(struct.pack(len_fmt, len(data)) + data) cmd_w.flush() # Wait for signal to start counting mark = cmd_start_r.read(1) ts0 = time() assert mark == ".", repr(mark) cmd_start_r.close() pid, status = os.waitpid(cmd_pid, 0) ts1 = time() err = status >> 8 if status & 0xFF: print("Unclean exit of child pid due to signal: {}".format((status & 0xFF) >> 1)) err = err or 1 # Make sure everything finished running there leftovers = set() for tasks in cg_tasks.values(): with open(tasks) as src: leftovers.update(map(int, src.read().splitlines())) if leftovers: print( "Main pid has finished, but cgroups have leftover threads" " still running: {}".format(", ".join(map(bytes, leftovers))), file=sys.stderr, ) err = err or 1 # Collect/print accounting data acct = OrderedDict() acct["cmd"] = " ".join(opts.cmdline) acct["wall_clock"] = "{:.3f}".format(ts1 - ts0) acct["exit_status"] = "{} {}".format(status >> 8, status & 0xFF >> 1) acct_srcs = OrderedDict() for cg_path in map(dirname, cg_tasks.viewvalues()): for p in os.listdir(cg_path): acct_srcs[p] = join(cg_path, p) acct_nums = OrderedDict( [ ("cpuacct", ["usage", "usage_percpu"]), ( "memory", [ "max_usage_in_bytes", "memsw.max_usage_in_bytes", "kmem.max_usage_in_bytes", "kmem.tcp.max_usage_in_bytes", ], ), ] ) for rc, metrics in acct_nums.viewitems(): for p in metrics: p = "{}.{}".format(rc, p) if p not in acct_srcs: continue with open(acct_srcs[p]) as src: numbers = map(int, src.read().strip().split()) acct[p] = " ".join(map(num_format, numbers)) for p in "time sectors io_merged io_serviced io_wait_time".split(): p = "blkio.{}".format(p) try: src = acct_srcs[p] except KeyError: pass else: with open(src) as src: src = src.read().splitlines() for line in src: line = line.split() if not line or line[0] == "Total": continue t = None try: dev, t, v = line except ValueError: dev, v = line dev = dev_resolve(*map(int, dev.split(":"))) if not dev: continue label = "{}[{}]".format(p, dev) if t: label += "[{}]".format(t) acct[label] = num_format(int(v)) for k, v in acct.viewitems(): print("{}: {}".format(k, v), file=sys.stderr) # Cleanup tmp dirs leftovers = set() for tasks in cg_tasks.values(): tasks_dir = dirname(tasks) try: os.rmdir(tasks_dir) except (OSError, IOError): leftovers.add(tasks_dir) if leftovers: print("Leftover cgroup dirs remaining:{}\n".format("\n ".join([""] + sorted(leftovers))), file=sys.stderr) err = err or 1 return err
def run_cloners(cls, old_event, new_event, cloners, event_exists=False): all_cloners = OrderedDict((name, cloner_cls(old_event)) for name, cloner_cls in get_event_cloners().iteritems()) if any(cloner.is_internal for name, cloner in all_cloners.iteritems() if name in cloners): raise Exception('An internal cloner was selected') if event_exists: if any(cloner.new_event_only for name, cloner in all_cloners.viewitems() if name in cloners): raise Exception('A new event only cloner was selected') if any(cloner.has_conflicts(new_event) for name, cloner in all_cloners.viewitems() if name in cloners): raise Exception('Cloner target is not empty') # enable internal cloners that are enabled by default or required by another cloner cloners |= {c.name for c in all_cloners.itervalues() if c.is_internal and (c.is_default or c.required_by_deep & cloners)} # enable unavailable cloners that may be pulled in as a dependency nonetheless extra = {c.name for c in all_cloners.itervalues() if not c.is_available and c.always_available_dep and c.required_by_deep & cloners} cloners |= extra active_cloners = OrderedDict((name, cloner) for name, cloner in all_cloners.iteritems() if name in cloners) if not all((c.is_internal or c.is_visible) and c.is_available for c in active_cloners.itervalues() if c.name not in extra): raise Exception('An invisible/unavailable cloner was selected') for name, cloner in active_cloners.iteritems(): if not (cloners >= cloner.requires_deep): raise Exception('Cloner {} requires {}'.format(name, ', '.join(cloner.requires_deep - cloners))) shared_data = {} cloner_names = set(active_cloners) for name, cloner in active_cloners.iteritems(): shared_data[name] = cloner.run(new_event, cloner_names, cloner._prepare_shared_data(shared_data), event_exists=event_exists)
def parseOptions(snakefile, tasks): # Break passed args in to sub lists per supplied task # This allows for: # test -w build --all # Where -w is only for test and --all is only for build args_per_task = OrderedDict() cur_task = None for arg in sys.argv[1:]: if arg in tasks: cur_task = arg args_per_task[arg] = [] else: assert cur_task is not None, 'Tasks must come before args' args_per_task[cur_task].append(arg) # Parse the args for each task providing nice failures and help text parsed_per_task = OrderedDict() for task, args in args_per_task.viewitems(): # Get a parser that knows all of fields in question parser = buildArgParser(snakefile, task) # Store the options with the task # Even if no options were passed in the key must be set so that we # know the user selected this task parsed_per_task[task] = vars(parser.parse_args(args)) return parsed_per_task
class GlyphNameMapper(object): def __init__(self, character_mapping): super(GlyphNameMapper, self).__init__() self.character_mapping = character_mapping self.used_values = set(self.character_mapping.viewvalues()) self.new_mappings = OrderedDict() self.next_glyph_name = ord(max(self.used_values)) if len( self.used_values) else USER_AREA def advance_to_unused_next_glyph_name(self): while self.next_glyph_name in self.used_values: self.next_glyph_name += 1 def get_glyph_name(self, friendly_name): if friendly_name in self.character_mapping: return self.character_mapping[friendly_name] else: this_name = self.next_glyph_name self.used_values.add(this_name) self.advance_to_unused_next_glyph_name() self.new_mappings[friendly_name] = unichr(this_name) return self.new_mappings[friendly_name] def log_new_mappings_if_necessary(self): if len(self.character_mapping) and len(self.new_mappings): suggested_additions = '\n'.join([ '"{}": "\\u{:x}",'.format(name, ord(char)) for name, char in self.new_mappings.viewitems() ]) log.info( "Character mapping was missing some characters. Suggested additions:\n{}" .format(suggested_additions))
def _process(self): form = RequestListFilterForm(request.args, csrf_enabled=False) results = None if request.args and form.validate(): reverse = form.direction.data == 'desc' talks = form.granularity.data == 'talks' from_dt = as_utc(get_day_start( form.start_date.data)) if form.start_date.data else None to_dt = as_utc(get_day_end( form.end_date.data)) if form.end_date.data else None states = {form.state.data} if form.state.data is not None else None results = find_requests(talks=talks, from_dt=from_dt, to_dt=to_dt, states=states) if not talks: results = [(req, req.event, req.event.start_dt) for req in results] results = group_list(results, lambda x: x[2].date(), itemgetter(2), sort_reverse=reverse) results = OrderedDict( sorted(results.viewitems(), key=itemgetter(0), reverse=reverse)) return WPAudiovisualManagers.render_template('request_list.html', form=form, results=results)
class GlyphNameMapper(object): def __init__(self, character_mapping): super(GlyphNameMapper, self).__init__() self.character_mapping = character_mapping self.used_values = set(self.character_mapping.viewvalues()) self.new_mappings = OrderedDict() self.next_glyph_name = ord(max(self.used_values)) if len(self.used_values) else USER_AREA def advance_to_unused_next_glyph_name(self): while self.next_glyph_name in self.used_values: self.next_glyph_name += 1 def get_glyph_name(self, friendly_name): if friendly_name in self.character_mapping: return self.character_mapping[friendly_name] else: this_name = self.next_glyph_name self.used_values.add(this_name) self.advance_to_unused_next_glyph_name() self.new_mappings[friendly_name] = unichr(this_name) return self.new_mappings[friendly_name] def log_new_mappings_if_necessary(self): if len(self.character_mapping) and len(self.new_mappings): suggested_additions = '\n'.join([ '"{}": "\\u{:x}",'.format(name, ord(char)) for name, char in self.new_mappings.viewitems() ]) log.info("Character mapping was missing some characters. Suggested additions:\n{}".format(suggested_additions))
def _process(self): form = RequestListFilterForm(request.args, csrf_enabled=False) results = None if form.validate_on_submit(): reverse = form.direction.data == 'desc' from_dt = as_utc(get_day_start( form.start_date.data)) if form.start_date.data else None to_dt = as_utc(get_day_end( form.end_date.data)) if form.end_date.data else None results = find_requests(from_dt=from_dt, to_dt=to_dt) results = [(req, req.event, req.event.start_dt, contribs, session_blocks) for req, contribs, session_blocks in results] results = group_list(results, lambda x: x[2].date(), itemgetter(2), sort_reverse=reverse) results = OrderedDict( sorted(results.viewitems(), key=itemgetter(0), reverse=reverse)) return WPVCAssistance.render_template( 'request_list.html', form=form, results=results, action=url_for('.request_list'), vc_capable_rooms=get_vc_capable_rooms(), within_working_hours=start_time_within_working_hours)
class QueryResult(object): """ This class represents the query result. """ def __init__(self, path_expr=''): self.path_expr = path_expr self.results = OrderedDict() def add_subset(self, i_subset, values): self.results[i_subset] = values def subset_indices(self): return list(self.results.keys()) def get_values(self, i_subset, flat=False): values = self.results[i_subset] return flatten_list(values) if flat else values def all_values(self, flat=False): if flat: return [self.get_values(i, flat=True) for i in self.subset_indices()] else: return list(self.results.values()) def __iter__(self): return iter(self.results.viewitems())
class FilterPipe(BCRelay): def __init__(self, *argz, **kwz): super(FilterPipe, self).__init__(*argz, **kwz) self.rules = OrderedDict() for name, rule in self.conf.rules.viewitems(): if 'regex' in rule: log.noise('Compiling filter (name: {}): {!r}'.format(name, rule.regex)) check = re.compile(rule.regex) else: check = None # boolean rule try: action, optz = rule.action.split('-', 1) except ValueError: action, optz = rule.action, list() else: if action == 'limit': optz = map(int, optz.split('/')) else: optz = [optz] self.rules[name] = check, action, optz, rule.get('match') self.rule_hits, self.rule_notes, self.rule_drops = dict(), set(), defaultdict(int) def dispatch(self, msg): for name, (check, action, optz, attr) in self.rules.viewitems(): try: msg_match = msg if not attr else (('{'+attr+'}').format(data=msg.data) or '') except Exception as err: log.noise('Filtering attr-get error ({}) for message: {!r}'.format(err, msg)) msg_match = '' if not ( check.search(msg_match) if check is not None else bool(msg_match) ): if 'nomatch' in optz: if action == 'allow': return msg elif action == 'drop': return continue if action == 'limit': if name not in self.rule_hits: self.rule_hits[name] = deque() win, ts, (c, t) = self.rule_hits[name], time(), optz ts_thresh = ts - t win.append(ts) while win[0] < ts_thresh: win.popleft() rate = len(win) if rate > c: log.noise(( 'Rule ({}) triggering rate' ' above threshold ({}/{}): {}' ).format(name, c, t, rate)) self.rule_drops[name] += 1 if name not in self.rule_notes: self.rule_notes.add(name) return ( ' ...limiting messages matching' ' filter-rule {} ({}/{}, dropped (for uptime): {})' )\ .format(name, c, t, self.rule_drops[name]) else: return self.rule_notes.discard(name) return msg elif 'nomatch' not in optz: if action == 'allow': return msg elif action == 'drop': return if self.conf.policy == 'allow': return msg
def test_repr_recursive_values(self): od = OrderedDict() od[42] = od.viewvalues() r = repr(od) # Cannot perform a stronger test, as the contents of the repr # are implementation-dependent. All we can say is that we # want a str result, not an exception of any sort. self.assertIsInstance(r, str) od[42] = od.viewitems() r = repr(od) # Again. self.assertIsInstance(r, str)
def _process(self): form = RequestListFilterForm(request.args) results = None if form.validate_on_submit(): reverse = form.direction.data == 'desc' from_dt = as_utc(get_day_start(form.start_date.data)) if form.start_date.data else None to_dt = as_utc(get_day_end(form.end_date.data)) if form.end_date.data else None results = _find_requests(from_dt=from_dt, to_dt=to_dt) results = group_list(results, lambda req: dateutil.parser.parse(req['requested_at']).date(), sort_reverse=reverse) results = OrderedDict(sorted(results.viewitems(), reverse=reverse)) return WPRoomAssistance.render_template('request_list.html', form=form, results=results, parse_dt=dateutil.parser.parse)
def _process(self): form = VCRoomListFilterForm(request.args, csrf_enabled=False) results = None if request.args and form.validate(): reverse = form.direction.data == 'desc' from_dt = as_utc(get_day_start(form.start_date.data)) if form.start_date.data else None to_dt = as_utc(get_day_end(form.end_date.data)) if form.end_date.data else None results = find_event_vc_rooms(from_dt=from_dt, to_dt=to_dt, distinct=True) results = group_list((r for r in results if r.event_new), key=lambda r: r.event_new.start_dt.date(), sort_by=lambda r: r.event_new.start_dt, sort_reverse=reverse) results = OrderedDict(sorted(results.viewitems(), key=itemgetter(0), reverse=reverse)) return WPVCService.render_template('vc_room_list.html', form=form, results=results, action=url_for('.vc_room_list'))
def _process(self): form = VCRoomListFilterForm(request.args) results = None if request.args and form.validate(): reverse = form.direction.data == 'desc' from_dt = as_utc(get_day_start(form.start_date.data)) if form.start_date.data else None to_dt = as_utc(get_day_end(form.end_date.data)) if form.end_date.data else None results = find_event_vc_rooms(from_dt=from_dt, to_dt=to_dt, distinct=True) results = group_list((r for r in results if r.event), key=lambda r: r.event.getStartDate().date(), sort_by=lambda r: r.event.getStartDate(), sort_reverse=reverse) results = OrderedDict(sorted(results.viewitems(), key=itemgetter(0), reverse=reverse)) return WPVCService.render_template('vc_room_list.html', form=form, results=results, action=url_for('.vc_room_list'))
def test_consistency(source_file): for_check = [] consistent_dictionary = OrderedDict() consistent_list = [] can_save = False with open(source_file, 'r') as source: for operation in source: operation = operation.split() state = str(operation[0]) timestamp = int(operation[1]) vm_id = str(operation[2]) if state == "START": host = str(operation[3]) vcpu = str(operation[4]) for_check.append(vm_id) consistent_list = [state, timestamp, vm_id, host, vcpu] can_save = True else: if consistent_dictionary.has_key(vm_id + "_START"): consistent_list = [state, timestamp, vm_id] can_save = True else: consistent_dictionary.pop(vm_id + "_START") print("Problem in ", vm_id, source_file) can_save = False try: vmindex = for_check.index(vm_id) for_check.pop(vmindex) except ValueError: print("Problem in ", vm_id, source_file) can_save = False break if can_save: consistent_dictionary[vm_id+"_"+state] = consistent_list source.close() if for_check: print("Some problem on consistency", len(for_check), source_file) os.rename(source_file, source_file+"_ERROR") out = open(source_file, "w") for d_id, d_list in consistent_dictionary.viewitems(): if for_check.count(d_id.split('_')[0]) == 0: line = '' for i in range(len(d_list)): line += str(d_list[i])+" " line += '\n' out.writelines(line) out.close()
def criticBot(ratings, num_critics=20, recentness=False): ''' Select a set of critics among the most active users inject their average ratings directly into the user-item matrix ''' start = time.time() #print('Generating criticBot Ratings...') criticBotId = 1339 userRatings = {} itemCounter = Counter() itemRatings = Counter() criticBotRatings = [] i = 0 if recentness: ratings = filterOutOldRatings(ratings, 8) for rating in ratings: if not rating[0] in userRatings: userRatings[rating[0]] = list() userRatings[rating[0]].append(rating) userRatings = OrderedDict(sorted(userRatings.viewitems(), key=lambda (k,v):len(v), reverse=True)) critics = random.sample(range(num_critics*2), num_critics) for user in userRatings: if i in critics: for item in userRatings[user]: itemRatings[item[1]] += item[2] itemCounter[item[1]] += 1 i += 1 if i > max(critics): break itemRatings = {k: float(itemRatings[k])/itemCounter[k] for k in itemRatings} for item in itemRatings: criticBotRatings.append([criticBotId, item, itemRatings[item]]) #print('criticBot used %d seconds to generate %d ratings' %(time.time()-start, len(criticBotRatings))) return criticBotRatings
class QueryResult(object): """ This class represents the query result. """ def __init__(self, path_expr=''): self.path_expr = path_expr self.results = OrderedDict() def add_subset(self, i_subset, values): self.results[i_subset] = values def subset_indices(self): return list(self.results.keys()) def get_values(self, i_subset, flat=False): values = self.results[i_subset] return self.flatten_values(values) if flat else values def all_values(self, flat=False): if flat: return [ self.get_values(i, flat=True) for i in self.subset_indices() ] else: return list(self.results.values()) def __iter__(self): return iter(self.results.viewitems()) def flatten_values(self, values): """ Flatten values as a list with no nesting. :param values: :return: """ flat_values = [] for entry in values: if isinstance(entry, list): flat_values += self.flatten_values(entry) else: flat_values.append(entry) return flat_values
class SortedDotDict(object): def __init__(self, *args, **kwargs): super(SortedDotDict, self).__init__(*args, **kwargs) self._dict = SortedDict() def __contains__(self, *args, **kwargs): return self._dict.__contains__(*args, **kwargs) def __eq__(self, *args, **kwargs): return self._dict.__eq__(*args, **kwargs) def __format__(self, *args, **kwargs): return self._dict.__format__(*args, **kwargs) def __ge__(self, *args, **kwargs): return self._dict.__ge__(*args, **kwargs) def __getattr__(self, key): try: return self._dict[key] except: raise AttributeError(key) def __iter__(self): vals = list(self.values()) for k in vals: yield k def __getitem__(self, key): return self._dict[key] def __setitem__(self, key, value): self._dict[key] = value def __delitem__(self, key): del self._dict[key] def keys(self): return list(self._dict.keys()) def values(self): vals = list(self._dict.values()) vals = [v for v in vals if isinstance(v, (ConfigurationGroup, Value))] vals.sort() return vals def items(self): return list(self._dict.items()) def iterkeys(self): return iter(self._dict.keys()) def itervalues(self): return iter(self._dict.values()) def iteritems(self): return iter(self._dict.items()) def get(self, *args, **kwargs): return self._dict.get(*args, **kwargs) def clear(self): return self._dict.clear() def copy(self): s = SortedDotDict() s._dict = self._dict.copy() return s def fromkeys(self): return self._dict.fromkeys() def has_key(self, key): return key in self._dict def pop(self, *args, **kwargs): return self._dict.pop(*args, **kwargs) def popitem(self, *args, **kwargs): return self._dict.popitem(*args, **kwargs) def setdefault(self, key, default): return self._dict.setdefault(key, default) def update(self, d): return self._dict.update(d) def viewitems(self, *args, **kwargs): return self._dict.viewitems(*args, **kwargs) def viewvalues(self, *args, **kwargs): return self._dict.viewvalues(*args, **kwargs)
def prepare_pickle(): result = dict() for fn in all_files: try: f = pyfits.open(fn) header = f[0].header f.close() except: continue # Objects and arcs: continue if header['IMAGETYP'].upper() == 'OBJECT' or header['IMAGETYP'].upper( ) == 'ARC': # Assuming that all the rest is what we seek for here. continue # Get header info try: k = [header[x] for x in keywords] k[0] = k[0].upper() except: print fn, header['IMAGETYP'].upper() continue k = tuple(k) # Lists or sets cannot be dictionary keys try: d = result[k[1:]] try: d[k[0]].append(fn) except: d[k[0]] = [fn] result[k[1:]] = d except: result[k[1:]] = {k[0]: [fn]} # Split by date for k, v in result.iteritems(): # for each mode c2 = dict() for kk, vv in c.iteritems(): # for each imagetype for vvv in vv: date = vvv.split('/')[-1].split('.')[0].split('-')[1] try: tmp = c2[kk] try: tmp[date].append(vvv) except: tmp[date] = [vvv] c2[kk] = tmp except: c2[kk] = {date: [vvv]} for x, y in c2.iteritems(): print x for yy, yyy in y.iteritems(): print yy, len(yyy) # Print results result = OrderedDict( sorted(result.viewitems(), key=lambda x: len(x[1]), reverse=True)) f = open('calibration_filenames.py', 'wb') dsplit = '#' + 54 * '-' + '\n' f.write('result = {\n') for k, v in result.iteritems(): line = '\n ' + str(k) + ': {\n' f.write(line) for t, names in v.iteritems(): names = sorted(names) f.write(' "%s": ["%s",\n' % (t, names[0])) for n in names[1:-1]: f.write(' "%s",\n' % n) f.write(' "%s"],\n' % names[-1]) #~ f.write(' ],\n') f.write(' },\n') f.write('}') f.close()
class ModelParams(object): def __init__(self, seq_len, num_epochs, learning_rate, batch_size, keep_prob=0.5, beta1=0.9, concat_revcom_input=False, inference_method_key="inferenceA", json_file=None): ##Training parameters ''' if training_file == '' or training_file == 'None': self.training_file = None else: self.training_file = training_file if testing_file == '' or testing_file == 'None': self.testing_file = None else: self.testing_file = testing_file ''' self.num_epochs = int(num_epochs) self.learning_rate = np.float32(learning_rate) self.seq_len = int(seq_len) self.batch_size = int(batch_size) self.keep_prob = float(keep_prob) self.beta1 = float(beta1) self.concat_revcom_input = concat_revcom_input self.inference_method_key = inference_method_key self.inference_method = nucconvmodel.methods_dict[inference_method_key] #self.k_folds = int(k_folds) #self.test_frac = float(test_frac) self.populate_param_dict() self.json_file = json_file @classmethod def init_json(cls, json_file): print "Parsing json file", json_file with open(json_file, 'r') as jf: data = json.load(jf) num_epochs = int(data['num_epochs']) keep_prob = float(data['keep_prob']) #num_iterations = int(data['num_iterations']) learning_rate = np.float32(data['learning_rate']) seq_len = int(data['seq_len']) batch_size = int(data['batch_size']) beta1 = data['beta1'] concat_revcom_input = data['concat_revcom_input'] inference_method_key = data['inference_method_key'] return cls(seq_len, num_epochs, learning_rate, batch_size, keep_prob, beta1, concat_revcom_input, inference_method_key, json_file) def extract_json(self, json_file): """Avoid using this in favor of ModelParams.init_json(json_file)""" self.json_file = os.path.abspath(json_file) #self.json_path = os.path.dirname(os.path.abspath(self.json_filename)) print "Parsing json file", self.json_filename with open(self.json_filename, 'r') as jf: data = json.load(jf) self.num_epochs = int(data['num_epochs']) self.keep_prob = float(data['keep_prob']) #self.num_iterations = int(data['num_iterations']) self.learning_rate = np.float32(data['learning_rate']) self.seq_len = int(data['seq_len']) self.batch_size = int(data['batch_size']) #self.k_folds = data['k_folds'] #self.test_frac = data['test_frac'] self.beta1 = data['beta1'] self.concat_revcom_input = data['concat_revcom_input'] self.inference_method_key = data['inference_method_key'] self.inference_method = nucconvmodel.methods_dict[ self.inference_method_key] self.populate_param_dict() def populate_param_dict(self): self.params_dict = OrderedDict([ ('seq_len', int(self.seq_len)), ('num_epochs', int(self.num_epochs)), ('learning_rate', float(self.learning_rate)), ('batch_size', int(self.batch_size)), ('keep_prob', float(self.keep_prob)), ('beta1', float(self.beta1)), ('concat_revcom_input', self.concat_revcom_input), ('inference_method_key', self.inference_method.__name__), ]) def print_param_values(self): print self.params_dict.values() def print_params(self): for k, v in self.params_dict.viewitems(): print "{}:\t{}".format(k, v) print "\n" def save_as_json(self, out_file): print "Saving ModelParams in", out_file with open(out_file, 'w') as of: #print self.params_dict["inference_method_key"] json.dump(self.params_dict, of)
class DotMap(MutableMapping, OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() if args: assert len(args) == 1 d = args[0] if isinstance(d, dict): for k, v in self.__call_items(d): if isinstance(v, dict): v = DotMap(v) if isinstance(v, list): l = [] for i in v: n = i if isinstance(i, dict): n = DotMap(i) l.append(n) v = l self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and k != IPYTHON_CANNARY: # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in {'_map', IPYTHON_CANNARY}: super(DotMap, self).__setattr__(k, v) else: self[k] = v def __getattr__(self, k): if k == {'_map', IPYTHON_CANNARY}: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k, v in self.__call_items(self._map): if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) joined = ', '.join(items) out = '{0}({1})'.format(self.__class__.__name__, joined) return out __repr__ = __str__ def toDict(self): d = {} for k, v in self.items(): if isinstance(v, DotMap): if id(v) == id(self): v = d else: v = v.toDict() elif isinstance(v, (list, tuple)): l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) if isinstance(v, tuple): v = tuple(l) else: v = l d[k] = v return d def empty(self): return (not any(self)) def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parse_other(cls, other): if isinstance(other, DotMap): return other._map return other def __cmp__(self, other): other = DotMap.parse_other(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parse_other(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parse_other(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parse_other(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parse_other(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parse_other(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parse_other(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self) def __copy__(self): return self.copy() def __deepcopy__(self, memo=None): return self.copy() def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d)
class BaseCache(object): """ BaseCache is a class that saves and operates on an OrderedDict. It has a certain capacity, stored in the attribute `maxsize`. Whether this capacity is reached, can be checked by using the boolean property `is_full`. To implement a custom cache, inherit from this class and override the methods ``__getitem__`` and ``__setitem__``. Call the method `sunpy.database.caching.BaseCache.callback` as soon as an item from the cache is removed. """ __metaclass__ = ABCMeta def __init__(self, maxsize=float('inf')): self.maxsize = maxsize self._dict = OrderedDict() def get(self, key, default=None): # pragma: no cover """Return the corresponding value to `key` if `key` is in the cache, `default` otherwise. This method has no side-effects, multiple calls with the same cache and the same passed key must always return the same value. """ try: return self._dict[key] except KeyError: return default @abstractmethod def __getitem__(self, key): """abstract method: this method must be overwritten by inheriting subclasses. It defines what happens if an item from the cache is attempted to be accessed. """ return # pragma: no cover @abstractmethod def __setitem__(self, key, value): """abstract method: this method must be overwritten by inheriting subclasses. It defines what happens if a new value should be assigned to the given key. If the given key does already exist in the cache or not must be checked by the person who implements this method. """ @abstractproperty def to_be_removed(self): """The item that will be removed on the next :meth:`sunpy.database.caching.BaseCache.remove` call. """ @abstractmethod def remove(self): """Call this method to manually remove one item from the cache. Which item is removed, depends on the implementation of the cache. After the item has been removed, the callback method is called. """ def callback(self, key, value): """This method should be called (by convention) if an item is removed from the cache because it is full. The passed key and value are the ones that are removed. By default this method does nothing, but it can be customized in a custom cache that inherits from this base class. """ @property def is_full(self): """True if the number of items in the cache equals :attr:`maxsize`, False otherwise. """ return len(self._dict) == self.maxsize def __delitem__(self, key): self._dict.__delitem__(key) def __contains__(self, key): return key in self._dict.keys() def __len__(self): return len(self._dict) def __iter__(self): for key in self._dict.__iter__(): yield key def __reversed__(self): # pragma: no cover for key in self._dict.__reversed__(): yield key def clear(self): # pragma: no cover return self._dict.clear() def keys(self): # pragma: no cover return self._dict.keys() def values(self): # pragma: no cover return self._dict.values() def items(self): # pragma: no cover return self._dict.items() def iterkeys(self): # pragma: no cover return self._dict.iterkeys() def itervalues(self): # pragma: no cover for value in self._dict.itervalues(): yield value def iteritems(self): # pragma: no cover for key, value in self._dict.iteritems(): yield key, value def update(self, *args, **kwds): # pragma: no cover self._dict.update(*args, **kwds) def pop(self, key, default=MutableMapping._MutableMapping__marker): # pragma: no cover return self._dict.pop(key, default) def setdefault(self, key, default=None): # pragma: no cover return self._dict.setdefault(key, default) def popitem(self, last=True): # pragma: no cover return self._dict.popitem(last) def __reduce__(self): # pragma: no cover return self._dict.__reduce__() def copy(self): # pragma: no cover return self._dict.copy() def __eq__(self, other): # pragma: no cover return self._dict.__eq__(other) def __ne__(self, other): # pragma: no cover return self._dict.__ne__(other) def viewkeys(self): # pragma: no cover return self._dict.viewkeys() def viewvalues(self): # pragma: no cover return self._dict.viewvalues() def viewitems(self): # pragma: no cover return self._dict.viewitems() @classmethod def fromkeys(cls, iterable, value=None): # pragma: no cover return OrderedDict.fromkeys(iterable, value) def __repr__(self): # pragma: no cover return '{0}({1!r})'.format(self.__class__.__name__, dict(self._dict))
class ConfigMap(MutableMapping, OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() # todo: simplify self._dynamic = True if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] del kwargs['_dynamic'] self._evaluate = True if kwargs: if '_evaluate' in kwargs: self._evaluate = kwargs['_evaluate'] del kwargs['_evaluate'] self._evaluated = False if kwargs: if '_evaluated' in kwargs: self._evaluated = kwargs['_evaluated'] del kwargs['_evaluated'] if args: d = args[0] if isinstance(d, dict): for k, v in self.__call_items(d): if isinstance(v, dict): v = ConfigMap(v, _dynamic=self._dynamic, _evaluate=self._evaluate, _evaluated=self._evaluated) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = ConfigMap(i, _dynamic=self._dynamic, _evaluate=self._evaluate, _evaluated=self._evaluated) l.append(n) v = l self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k, evaluate=None): if evaluate is None: evaluate = self._evaluate if k not in self._map: if k == '_ipython_canary_method_should_not_exist_': raise KeyError if self._dynamic: # automatically extend to new ConfigMap self[k] = ConfigMap() else: # todo: display full recursive path? raise KeyError("'%s' does not exist" % k) var = self._map[k] if evaluate: if isinstance(var, ConfigMethod): var = var.evaluate() # todo: return instead to avoid second config map eval? if isinstance(var, ConfigMap): var = var.evaluate() return var def __setattr__(self, k, v): if k in ['_map', '_dynamic', '_ipython_canary_method_should_not_exist_', '_evaluate', '_evaluated']: super(ConfigMap, self).__setattr__(k, v) else: self[k] = v def __getattr__(self, k): if k in ['_map', '_dynamic', '_ipython_canary_method_should_not_exist_', '_evaluate', '_evaluated']: return self.__getattribute__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k, v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=ConfigMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) joined = ', '.join(items) out = '{0}({1})'.format(self.__class__.__name__, joined) return out def __repr__(self): return str(self) def toDict(self, evaluate=None, with_hidden=True): if evaluate is None: evaluate = bool(self._evaluate) d = {} for k, v in self.items(): if evaluate and isinstance(v, ConfigMethod): v = v.evaluate() if isinstance(v, ConfigMap): v = v.toDict(evaluate=evaluate, with_hidden=with_hidden) if id(v) != id(self) else d elif isinstance(v, list): v = [i.toDict(evaluate=evaluate, with_hidden=with_hidden) if isinstance(i, ConfigMap) else i for i in v] elif isinstance(v, tuple): v = (i.toDict(evaluate=evaluate, with_hidden=with_hidden) if isinstance(i, ConfigMap) else i for i in v) if with_hidden is False \ and (isinstance(k, str) and ((k.startswith('_') and not k.endswith('_')) or k.startswith('~'))): continue d[k] = v return d def evaluate(self): if self._evaluated: return self # TODO: case where config method access a key of the config that is just being evaluated. # shouldn't give an endless loop # todo: make more efficient return ConfigMap(self.toDict(evaluate=True), _dynamic=False, _evaluated=True) def pprint(self, pformat='json'): if pformat == 'json': print(dumps(self.toDict(), indent=4, sort_keys=True, default=str)) else: pprint(self.toDict()) def empty(self): return not any(self) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is ConfigMap: return other._map else: return other def __cmp__(self, other): other = ConfigMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = ConfigMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = ConfigMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = ConfigMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = ConfigMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = ConfigMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = ConfigMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return ConfigMap(self, _dynamic=self._dynamic, _evaluate=self._evaluate, _evaluated=self._evaluated) def __copy__(self): return self.copy() def __deepcopy__(self, memo=None): return self.copy() def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = ConfigMap(_dynamic=False) d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d) # bannerStr def _getListStr(self, items): out = '[' mid = '' for i in items: mid += ' {}\n'.format(i) if mid != '': mid = '\n' + mid out += mid out += ']' return out def _getValueStr(self, k, v): outV = v multiLine = len(str(v).split('\n')) > 1 if multiLine: # push to next line outV = '\n' + v if type(v) is list: outV = self._getListStr(v) out = '{} {}'.format(k, outV) return out def _getSubMapDotList(self, pre, name, subMap): outList = [] if pre == '': pre = name else: pre = '{}.{}'.format(pre, name) def stamp(pre, k, v): valStr = self._getValueStr(k, v) return '{}.{}'.format(pre, valStr) for k, v in subMap.items(): if isinstance(v, ConfigMap) and v != ConfigMap(): subList = self._getSubMapDotList(pre, k, v) outList.extend(subList) else: outList.append(stamp(pre, k, v)) return outList def _getSubMapStr(self, name, subMap): outList = ['== {} =='.format(name)] for k, v in subMap.items(): if isinstance(v, ConfigMap) and v != ConfigMap(): # break down to dots subList = self._getSubMapDotList('', k, v) # add the divit # subList = ['> {}'.format(i) for i in subList] outList.extend(subList) else: out = self._getValueStr(k, v) # out = '> {}'.format(out) out = '{}'.format(out) outList.append(out) finalOut = '\n'.join(outList) return finalOut def bannerStr(self): lines = [] previous = None for k, v in self.items(): if previous == 'ConfigMap': lines.append('-') out = '' if isinstance(v, ConfigMap): name = k subMap = v out = self._getSubMapStr(name, subMap) lines.append(out) previous = 'ConfigMap' else: out = self._getValueStr(k, v) lines.append(out) previous = 'other' lines.append('--') s = '\n'.join(lines) return s
class Metric: def __init__(self): self.__metrics_dict = OrderedDict() self.az_id_list = ['ds1', 'ds2', 'ds3', 'ds4'] def __init_metrics_dict(self, is_print=False): for azid in self.az_id_list: self.__metrics_dict[azid] = OrderedDict() for key in key_list: self.__metrics_dict[azid][key] = [] if is_print: for azid, key in self.__metrics_dict.viewitems(): print '\n\n', azid, for k, value in key.viewitems(): print '\n\t', k, '\n\t\t', value def metrics(self, az_id, command, key, value=None, n=-1): str_error = ("Key (%s) not found for Command %s, with val %s!!" % (key, command, value)) str_ok = ("{%s: %s} -> " % (key, value)) l = len(key_list) m = 3 if command is 'INIT': self.__init_metrics_dict() if key is "ALL": key1 = key_list[0:m] # inteiros l1 = len(key1) key2 = key_list[m:l] # listas vazias l2 = len(key2) key = [key1, key2] print("Init Metrics Size: ", l1, l2) if value == "ZEROS": for kk in key: x = len(kk) for k in kk: if x == l1: self.__metrics_dict[az_id][k] = 0 elif x == l2: self.__metrics_dict[az_id][k] = [] return True else: print("You must specify 'ZEROS'!!") return False print(str_ok, self.__metrics_dict[az_id].viewitems()) elif command is 'set': if key in key_list[0:m]: self.__metrics_dict[az_id][key] = value elif key in key_list[m:l]: self.__metrics_dict[az_id][key].append(value) else: print(str_error) return False print(str_ok, True) #, self.__metrics_dict[az_id].viewitems())) return True elif command is 'get': if key in key_list: print(str_ok, self.__metrics_dict[az_id][key]) return self.__metrics_dict[az_id][key] else: print(str_error) return False elif command is 'add': if key in key_list[0:m]: self.__metrics_dict[az_id][key] += value elif key in key_list[m:l]: if n >= 0: self.__metrics_dict[az_id][key][n] += value else: print("Use 'set' command or specify 'n' position") return False else: print(str_error) return False print(str_ok, self.__metrics_dict[az_id][key]) return self.__metrics_dict[az_id][key] elif command is 'summ': if key in key_list[0:m]: ret = self.__metrics_dict[az_id][key] elif key in key_list[m:l]: ret = sum(values for values in self.__metrics_dict[az_id][key]) else: print(str_error) return False print(str_ok, ret) return ret else: print("Command (" + str(command) + ") not found!!") return False
class AccumUnits(object): """ make & print human readable lists of quantifiable things like time, distance, weight - track them the way computers like (as granular as you wish) output them the way humans like (without having to think about it more than once) todo: add unit conversions """ def __init__(self, unit_names=None, unit_qnts=None, VERBOSE=False): """ whether passed-in or using defaults, last unit should have quant=1 as it is the most 'granular' unit you are using. Pick an instance name to reflect the units being handled """ default_units = ['year', 'month', 'day', 'hr', 'min', 'sec'] # always go big to small default_quants = [12, 30, 24, 60, 60, 1] # don't calculate, just list # default_units = ['mile', 'foot', 'inch'] # just like in physics class # default_quants = [5280, 12, 1] if unit_names is None: if VERBOSE: print('using default unit labels:') print(default_units) self.unit_names = default_units else: self.unit_names = unit_names if unit_qnts is None: if VERBOSE: print('using default unit quantities:') print(default_quants) unit_qnts = default_quants assert(isinstance(self.unit_names, list)) assert(isinstance(unit_qnts, list)) assert(len(set(self.unit_names)) == len(unit_qnts)) # set() to be rid of duplicate names self.timeunits = OrderedDict() sec = 1 self.seclist = deque() while unit_qnts: # multiply to get successive units xun = unit_qnts.pop() self.seclist.appendleft(sec * xun) sec *= xun for ktm, vtm in zip(self.unit_names, self.seclist): # zip them into OrderedDict self.timeunits[ktm] = vtm if VERBOSE: print('{:6} : {:10}'.format(ktm, vtm)) self.VERBOSE = VERBOSE def breakdown(self, rawseconds): """ incoming raw-seconds (or whatever) returned as list of whatever-unit strings """ qt = abs(rawseconds) divtime = [] for plc, (kt, vt) in enumerate(self.timeunits.viewitems()): qt, leftover = divmod(qt, vt) if qt: divtime.append(str(int(qt))+' '+str(kt)) if leftover < 1: if self.VERBOSE: print('({} = fractional {} from given {})'.format(leftover, kt, rawseconds)) print('a stringy-list breakdown (joined): ') return divtime qt = leftover return divtime def breakdict(self, rawseconds): """ incoming raw-seconds (or whatever) returned as dict with {unit_name: quantity-inside-remainder} """ qt = abs(rawseconds) divtime = OrderedDict() for plc, (kt, vt) in enumerate(self.timeunits.viewitems()): qt, leftover = divmod(qt, vt) if qt: divtime[kt] = int(qt) if leftover < 1: if self.VERBOSE: print('({} = fractional {} from given {})'.format(leftover, kt, rawseconds)) print('a dictionary breakdown:') return divtime qt = leftover return divtime def timebetween(self, start, end): """ returns dict of unit-quant breakdown, optionally prints as string """ assert(isinstance(start, int) or isinstance(start, float)) assert(isinstance(end, int) or isinstance(end, float)) quant = end - start if self.VERBOSE: print('between {0} {2}, and {1} {2}'.format(start, end, self.unit_names[-1])) print(" : {}".format(", ".join(self.breakdown(quant)))) return self.breakdict(quant)
class Maps(MutableMapping): """ Converts a dictionary of key:value pairs into a dotted representation of those keys. Normal string representation of keys is still accessible via normal dictionary indexing. Note: If a key contains non-alphanumeric characters (!@#$%, etc, including spaces), they will be replaced with an underscore (_). Examples: >>> # Normal usage >>> test = {"hello": "world"} >>> print(Maps(test)) Output: Maps(hello="world") >>> test = {"hello": "world"} >>> maps = Maps(test) >>> print(maps.hello) Output: "world" >>> test = {"hello": "world"} >>> maps = Maps(test) >>> print(maps["hello"]) Output: "world" >>> # If a dictionary key has non-alphanumeric characters >>> # Notice how a series of special characters is replaced >>> # by only a single underscore >>> test = {"hello joh*&^n": "hi computer"} >>> maps = Maps(test) >>> print(maps) Output: Maps(hello_joh_n="hi computer") Raises: ValueError: An argument is of a legal type but is, or contains, an illegal value. """ # Class-level variables _dynamic: bool _map: OrderedDict def __init__(self, *args, **kwargs) -> None: super().__init__() self._dynamic = True self._map = OrderedDict() if kwargs: for key, value in self._get_items(kwargs): key = re.sub('[^0-9a-zA-Z]+', '_', key) if key != '_dynamic': self._map[key] = value else: self._dynamic = value if args: dictionary = args[0] if not isinstance(dictionary, dict): raise ValueError( "object passed to constructor must be of type 'dict': " f"'{type(dictionary).__name__}'") # Recursive handling tracked_ids = {id(dictionary): self} for key, value in self._get_items(dictionary): if isinstance(key, str): key = re.sub('[^0-9a-zA-Z]+', '_', key) value_id = id(value) if isinstance(value, dict): if value_id in tracked_ids: value = tracked_ids[value_id] else: value = self.__class__(value, _dynamic=self._dynamic) tracked_ids[value_id] = value if isinstance(value, list): listed_items = [] for item in value: temp_item = item if isinstance(item, dict): temp_item = self.__class__(item, _dynamic=self._dynamic) listed_items.append(temp_item) value = listed_items try: self._map[key] = ast.literal_eval(value) except NameError: if value.lower() == "false": self._map[key] = False elif value.lower() == "true": self._map[key] = True else: self._map[key] = value except (SyntaxError, ValueError): # Cannot eval this value self._map[key] = value # Dunder methods def __add__(self, value: object) -> Union[Any, NoReturn]: if self.empty(): return value else: self_type = type(self).__name__ value_type = type(value).__name__ raise TypeError( f"unsupported operand type(s) for +: '{self_type}' and '{value_type}'" ) def __cmp__(self, value: object) -> Any: value = Maps.parse_value(value) return self._map.__cmp__(value) def __contains__(self, name: str) -> bool: return self._map.__contains__(name) def __copy__(self) -> Maps: return self.__class__(self) def __deepcopy__(self) -> Maps: return self.copy() def __delitem__( self, key: str, dict_delitem: Optional[Callable[..., Any]] = dict.__delitem__) -> Any: return self._map.__delitem__(key, dict_delitem=dict_delitem) def __dir__(self) -> Iterable: return self.keys() def __eq__(self, value: Any) -> bool: value = Maps.parse_value(value) if not isinstance(value, dict): return False return self._map.__eq__(value) def __ge__(self, value: Any) -> bool: value = Maps.parse_value(value) return self._map.__ge__(value) def __gt__(self, value: Any) -> bool: value = Maps.parse_value(value) return self._map.__gt__(value) def __iter__(self) -> Iterable: return self._map.__iter__() def __le__(self, value: Any) -> bool: value = Maps.parse_value(value) return self._map.__le__(value) def __len__(self) -> int: return self._map.__len__() def __lt__(self, value: Any) -> bool: value = Maps.parse_value(value) return self._map.__lt__(value) def __ne__(self, value: Any) -> bool: value = Maps.parse_value(value) return self._map.__ne__(value) def __repr__(self) -> str: return str(self) def __str__(self) -> str: items = [] for key, value in self._get_items(self._map): # Recursive assignment case if id(value) == id(self): items.append("{0}={1}(...)".format(key, self.__class__.__name__)) else: items.append("{0}={1}".format(key, repr(value))) joined = ", ".join(items) return "{0}({1})".format(self.__class__.__name__, joined) def __delattr__(self, name: str) -> None: self._map.__delitem__(name) def __getattr__(self, name: str) -> Any: if name in ('_map', '_dynamic', "_ipython_canary_method_should_not_exist_"): return super().__getattr__(name) try: return super(self.__class__, self).__getattribute__(name) except AttributeError: pass return self[name] def __setattr__(self, name: str, value: Any) -> None: if name in ('_map', '_dynamic', "_ipython_canary_method_should_not_exist_"): super().__setattr__(name, value) else: self[name] = value def __getitem__(self, name: str) -> Union[Any, Maps]: if (name not in self._map and self._dynamic and name != "_ipython_canary_method_should_not_exist_"): self[name] = self.__class__() return self._map[name] def __setitem__(self, name: str, value: Any) -> None: self._map[name] = value def __getstate__(self) -> dict: return self.__dict__ def __setstate__(self, value: dict) -> None: self.__dict__.update(value) # Internal methods def _get_items(self, item: Any) -> Iterable: if hasattr(item, 'iteritems') and ismethod(getattr(item, 'iteritems')): return item.iteritems() else: return item.items() # Public methods def clear(self) -> None: """Remove all items from the Maps object.""" self._map.clear() def copy(self) -> Maps: """Makes a copy of the Maps object in memory.""" return self.__copy__() def empty(self) -> bool: """Returns whether the Maps object is empty.""" return (not any(self)) @classmethod def fromkeys(cls, iterable: Iterable, value: Optional[Any] = None) -> Iterable: """Returns a new :obj:`Maps` object with keys supplied from an iterable setting each key in the object with :term:`value`. Args: iterable (:obj:`Iterable`): Any iterable. value (:obj:`obj`, optional): The value to set for the keys. Default is :obj:`None`. Returns: Maps: The :obj:`Maps` object. """ maps = cls() maps.map = OrderedDict.fromkeys(iterable, value) return maps def get(self, key: str, default: Optional[Any] = None) -> Any: """ Returns the value of 'key'. If :term:`key` does not exist, :term:default` is returned instead. Args: key (:obj:`str`): The key to get the value needed from the dict. default (:obj:`obj`, optional): The value to return if :term:`key` does not exist. Returns: Any: The value at :term:`key` or :term:default`. """ return self._map.get(key, default) def has_key(self, key: str) -> bool: return key in self._map def items(self) -> Generator[Tuple[str, Any]]: """Returns a generator yielding a (key, value) pair.""" return self._get_items(self._map) def iteritems(self) -> Iterator: """ Returns an iterator over the Maps oject's (key, value) pairs. """ return self.items() def iterkeys(self) -> Iterator: """Returns an iterator over the Maps object's keys.""" return self._map.iterkeys() def itervalues(self) -> Iterator: """Returns an iterator over the Maps object's values.""" return self._map.itervalues() def keys(self) -> Iterable: """Returns the keys of the Maps object.""" return self._map.keys() def next(self) -> str: """Returns the next key in the dictionary.""" return self._map.next() @classmethod def parse_ini(cls, ini_dict: ConfigParser, to_maps=False) -> Union[dict, Maps]: """ Converts the values from an INI file from all strings to their actual Python base-types (i.e. int, float, bool, etc). If the value cannot be converted, it is kept as a string. If a value of the key:value pairs is not a string, its type is maintained. Note: Any meant-to-be-bool values in the key:value pairs that are not exactly 'False' or 'True', but are similar like 'false' or 'tRue' for example, will be converted to bools. Args: ini_dict (:obj:`ConfigParser`): The dictionary returned by configparser when an INI file is loaded. to_maps (:obj:`bool`): Return a :obj:`Maps` object instead of a :obj:`dict`. Returns: dict or Maps: A dictionary maintaining the same key:value pairs as the input dictionary; however, the values are their Python base-types. If :obj:`to_maps` is :obj:`True`, return a :obj:`Maps` object. Raises: TypeError: An argument is of an illegal type. """ # Check for dict because of recursion; ini_dict is only meant # to be a dict when the function recursively converts the values # from a ConfigParser if not isinstance(ini_dict, (dict, ConfigParser)): raise TypeError( "argument 'ini_dict' must be of type 'ConfigParser': " f"{type(ini_dict).__name__}") if isinstance(ini_dict, ConfigParser): ini_dict_ = {} for section in ini_dict.sections(): ini_dict_[section] = {} for option in ini_dict.options(section): # Parse using configparser option_value = ini_dict.get(section, option) # Parse using os environ matches = [(m.start(0), m.end(0)) for m in re.finditer("&", option_value)] if len(matches) > 0 and len(matches) % 2 == 0: i = 0 while True: try: index_end = matches.pop(i + 1)[1] index_start = matches.pop(i)[0] sub = option_value[index_start:index_end] sub_replace = os.environ[sub[1:-1]] option_value = option_value.replace( sub, sub_replace) except IndexError: break except KeyError: pass ini_dict_[section][option] = option_value ini_dict = ini_dict_ for key, value in ini_dict.items(): if isinstance(value, dict): # Recursively parse dict ini_dict[key] = Maps.parse_ini(value, to_maps=to_maps) else: if not isinstance(value, str): continue try: ini_dict[key] = ast.literal_eval(value) except NameError: if value.lower() == "false": ini_dict[key] = False elif value.lower() == "true": ini_dict[key] = True else: ini_dict[key] = value except (SyntaxError, ValueError): # Cannot eval this value ini_dict[key] = value return Maps(ini_dict) if to_maps else ini_dict @classmethod def parse_value(cls, value: Any) -> Any: """ Checks if :term:`value` subclasses :obj:`Maps`. If so, it returns the :obj:`Maps` object; otherwise the :term:`value` itself. Args: value (:obj:`Any`): The value to parse. Returns: Any: :obj:`OrderedDict` if :term:`value` subclasses :obj:`Maps`, otherwise :term:`value`. """ if issubclass(type(value), Maps): return value.map else: return value def pop(self, key: str, default: Optional[Any] = None) -> Union[Any, NoReturn]: """ Removes and returns the value in the Maps object at 'key'. If 'key' does not exist, then 'default' is returned. Args: key (:obj:`str`): The key to use to remove a value from the Maps object. default (:obj:`obj`, optional): The value to return if :term:`key` does not exist in the :obj:`Maps` object. Returns: Any: The value at :term:`key`, otherwise :term:`default`. """ return self._map.pop() def popitem(self) -> Any: """Removes and returns an arbitrary (key, value) pair from the :obj:`Maps` object. Returns: Any: The arbitrary (key, value) pair. Raises: KeyError: The :obj:`Maps` object is empty. """ return self._map.popitem() def setdefault(self, key: str, default=None) -> Any: """ Returns a value of the 'key' in the Maps object. If 'key' is not found, then 'default' is inserted at 'key' into the Maps object and then returns that value. Args: key: The key to return the value of. default (:obj:`obj`, optional): The value to insert if 'key' does not exist. Defaults to none. Returns: object: The object at 'key' in the Maps object, default' otherwise. """ return self._map.setdefault(key, default) def to_dict(self) -> Union[dict, NoReturn]: """Converts the :obj:`Maps` object to a stdlib dictionary. Returns: dict: The converted :obj:`Maps` object as a dictionary. """ new_dict = {} for key, value in self.items(): if issubclass(type(value), Maps): if id(value) == id(self): value = new_dict else: value = value.to_dict() elif isinstance(value, (tuple, list)): new_list = [] for item in value: temp_item = item if issubclass(type(item), Maps): temp_item = item.to_dict() new_list.append(temp_item) if isinstance(value, tuple): value = tuple(new_list) else: value = new_list new_dict[key] = value return new_dict def update(self, *args, **kwargs) -> None: """Adds or changes existing values using a dictionary or iterator of key:value pairs.""" if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def values(self) -> Any: """Returns the values of the :obj:`Maps` object.""" return self._map.values() def viewitems(self) -> Any: """Returns a new view of the :obj:`Maps` object's items (key:value pairs).""" return self._map.viewitems() def viewkeys(self) -> Any: """Returns a new view of the :obj:`Maps` object's keys.""" return self._map.viewkeys() def viewvalues(self) -> Any: """Returns a new view of the :obj:`Maps` object's values.""" return self._map.viewvalues()
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() if args: d = args[0] if type(d) is dict: for k, v in self.__call_items(d): if type(v) is dict: v = DotMap(v) self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map: # DON'T automatically extend to new DotMap # self[k] = DotMap() raise AttributeError('%s is not defined in DotMap' % k) return self._map[k] def __setattr__(self, k, v): if k == '_map': super(DotMap, self).__setattr__(k, v) else: self[k] = v def __getattr__(self, k): if k == '_map': super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k, v in self.__call_items(self._map): items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k, v in self.items(): if type(v) is DotMap: v = v.toDict() d[k] = v return d def pprint(self): pprint(self.toDict()) # proper dict subclassing def values(self): return self._map.values() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return self def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() if args: d = args[0] if type(d) is dict: for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v) self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map: # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k == '_map': super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == '_map': super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: v = v.toDict() d[k] = v return d def pprint(self): pprint(self.toDict()) # proper dict subclassing def values(self): return self._map.values() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return self def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d
class ModelParams(object): def __init__(self, seq_len, num_epochs, learning_rate, batch_size, keep_prob=0.5, beta1=0.9, concat_revcom_input=False, inference_method_key="inferenceA", json_file=None): """A class for encapsulating model parameters :param seq_len: Sequence length :param num_epochs: Number of epochs :param learning_rate: Learning rate :param batch_size: Mini-batch pull size :param keep_prob: Probability of keeping weight for dropout regularization :param beta1: Beta1 parameter for AdamOptimizer :param concat_revcom_input: If true, concatenate reverse complement of nucleotide sequence to input vector :param inference_method_key: Dictionary key for inference method found in nucconvmodels.py file. Determines which model to use. Example: "inferenceA" will run nucconvmodels.inferenceA :returns: A ModelParams object :rtype: ModelParams """ ##Training parameters ''' if training_file == '' or training_file == 'None': self.training_file = None else: self.training_file = training_file if testing_file == '' or testing_file == 'None': self.testing_file = None else: self.testing_file = testing_file ''' self.num_epochs = int(num_epochs) self.learning_rate = np.float32(learning_rate) self.seq_len = int(seq_len) self.batch_size = int(batch_size) self.keep_prob = float(keep_prob) self.beta1 = float(beta1) self.concat_revcom_input = concat_revcom_input self.inference_method_key = inference_method_key self.inference_method = nucconvmodel.methods_dict[inference_method_key] #self.k_folds = int(k_folds) #self.test_frac = float(test_frac) self.populate_param_dict() @classmethod def init_json(cls, json_file): """ Initialize ModelParams object from a json_file :param json_file: A json file with the appropriate keys for a ModelParams object """ print "Parsing json file", json_file with open(json_file, 'r') as jf: data = json.load(jf) num_epochs = int(data['num_epochs']) keep_prob = float(data['keep_prob']) #num_iterations = int(data['num_iterations']) learning_rate = np.float32(data['learning_rate']) seq_len = int(data['seq_len']) batch_size = int(data['batch_size']) beta1 = data['beta1'] concat_revcom_input = data['concat_revcom_input'] inference_method_key = data['inference_method_key'] return cls(seq_len, num_epochs, learning_rate, batch_size, keep_prob, beta1, concat_revcom_input, inference_method_key, json_file) def extract_json(self, json_file): """Avoid using this in favor of ModelParams.init_json(json_file)""" self.json_file = os.path.abspath(json_file) #self.json_path = os.path.dirname(os.path.abspath(self.json_filename)) print "Parsing json file", self.json_filename with open(self.json_filename, 'r') as jf: data = json.load(jf) self.num_epochs = int(data['num_epochs']) self.keep_prob = float(data['keep_prob']) #self.num_iterations = int(data['num_iterations']) self.learning_rate = np.float32(data['learning_rate']) self.seq_len = int(data['seq_len']) self.batch_size = int(data['batch_size']) #self.k_folds = data['k_folds'] #self.test_frac = data['test_frac'] self.beta1 = data['beta1'] self.concat_revcom_input = data['concat_revcom_input'] self.inference_method_key = data['inference_method_key'] self.inference_method = nucconvmodel.methods_dict[ self.inference_method_key] self.populate_param_dict() def populate_param_dict(self): self.params_dict = OrderedDict([ ('seq_len', int(self.seq_len)), ('num_epochs', int(self.num_epochs)), ('learning_rate', float(self.learning_rate)), ('batch_size', int(self.batch_size)), ('keep_prob', float(self.keep_prob)), ('beta1', float(self.beta1)), ('concat_revcom_input', self.concat_revcom_input), ('inference_method_key', self.inference_method.__name__), ]) def print_param_values(self): print self.params_dict.values() def print_params(self): for k, v in self.params_dict.viewitems(): print "{}:\t{}".format(k, v) print "\n" def save_as_json(self, out_file): print "Saving ModelParams in", out_file with open(out_file, 'w') as of: #print self.params_dict["inference_method_key"] json.dump(self.params_dict, of)
#!/bin/env python # ^_^ encoding: utf-8 ^_^ # @author: icejoywoo # @date: 13-12-24 from collections import OrderedDict ordered_dict = OrderedDict([('first', 1), ('second', 2), ('third', 3)]) ordered_dict2 = OrderedDict([('third', 3), ('first', 1), ('second', 2)]) d = dict([('first', 1), ('third', 3), ('second', 2)]) assert d == ordered_dict assert ordered_dict != ordered_dict2 print "Methods:", dir(ordered_dict) for k, v in ordered_dict.viewitems(): print k, v
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True # mettendo False non funzionano più i test di default. E' normale in quanto si aspettano la creazione dinamica dei figli # =================================== if LORETO: global MY_DICT_TYPES # global var per la classe self._dynamic = False # mettendo False non funzionano più i test di default. E' normale in quanto si aspettano la creazione dinamica dei figli MY_DICT_TYPES = [dict, DotMap] # by Loreto (DEFAULT dictionary) # =================================== if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] if args: d = args[0] if isinstance(d, dict): for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v, _dynamic=self._dynamic) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = DotMap(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): if k is not '_dynamic': self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in {'_map','_dynamic', '_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == {'_map','_dynamic','_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) is list: l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) v = l d[k] = v return d def pprint(self): pprint(self.toDict()) # =================================== if LORETO: # MY_DICT_TYPES = [dict, DotMap] def Ptr(self, listOfQualifiers, create=False): ptr = self for item in listOfQualifiers: if item in ptr: ptr = ptr[item] else: if create: ptr[item] = DotMap() ptr = ptr[item] else: return None return ptr def KeyTree(self, fPRINT=False): return DictToList.KeyTree(self, myDictTYPES=MY_DICT_TYPES, fPRINT=fPRINT) def KeyList(self): return DictToList.KeyList(self, myDictTYPES=MY_DICT_TYPES) def PrintTree(self, fEXIT=False, MaxLevel=10, header=None, printTYPE='LTKV', stackLevel=1): PrintDictionaryTree.PrintDictionary(self, myDictTYPES=MY_DICT_TYPES, printTYPE=printTYPE, fEXIT=fEXIT, MaxLevel=MaxLevel, header=header, stackLevel=stackLevel+1) printDict = PrintTree printTree = PrintTree def GetValue(self, listOfQualifiers=[], fPRINT=False): return DictToList.getValue(self, listOfQualifiers=listOfQualifiers, myDictTYPES=MY_DICT_TYPES, fPRINT=fPRINT) # =================================== def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self.toDict()) def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d)
class Config(MutableMapping, OrderedDict): @classmethod def load(cls, file_path): with open(file_path) as f: params = yaml.load(f.read(), Loader=yaml.FullLoader) # We expand ~ in those yaml entries with `path` # on their keys for making # config files more platform-independent params = { key: (os.path.expanduser(value) if "path" in key and value is not None else value) for key, value in params.items() } return cls(params) def dump(self, file_path): with open(file_path, "w") as f: d = self.to_dict() f.write(yaml.dump(d)) def __init__(self, *args, **kwargs): self._map = OrderedDict() if args: d = args[0] # for recursive assignment handling trackedIDs = {id(d): self} if isinstance(d, dict): for k, v in self.__call_items(d): if isinstance(v, dict): if id(v) in trackedIDs: v = trackedIDs[id(v)] else: v = self.__class__(v) trackedIDs[id(v)] = v if type(v) is list: l = [] for i in v: n = i if isinstance(i, dict): n = self.__class__(i) l.append(n) v = l self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): self._map[k] = v _path_state = list() def __call_items(self, obj): if hasattr(obj, "iteritems") and ismethod(getattr(obj, "iteritems")): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): # print('Called __setitem__') if (k in self._map and not self._map[k] is None and not isinstance(v, type(self._map[k]))): if v is not None: raise ValueError( f"Updating existing value {type(self._map[k])} " f"with different type ({type(v)}).") split_path = k.split(".") current_option = self._map for p in split_path[:-1]: current_option = current_option[p] current_option[split_path[-1]] = v def __getitem__(self, k): split_path = k.split(".") current_option = self._map for p in split_path: if p not in current_option: raise KeyError(p) current_option = current_option[p] return current_option def __setattr__(self, k, v): if k in {"_map", "_ipython_canary_method_should_not_exist_"}: super(Config, self).__setattr__(k, v) else: self[k].update(v) def __getattr__(self, k): if k in {"_map", "_ipython_canary_method_should_not_exist_"}: return super(Config, self).__getattr__(k) try: v = super(self.__class__, self).__getattribute__(k) return v except AttributeError: self._path_state.append(k) pass return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __add__(self, other): if self.empty(): return other else: self_type = type(self).__name__ other_type = type(other).__name__ msg = "unsupported operand type(s) for +: '{}' and '{}'" raise TypeError(msg.format(self_type, other_type)) def __str__(self): items = [] for k, v in self.__call_items(self._map): # recursive assignment case if id(v) == id(self): items.append("{0}={1}(...)".format(k, self.__class__.__name__)) else: items.append("{0}={1}".format(k, repr(v))) joined = ", ".join(items) out = "{0}({1})".format(self.__class__.__name__, joined) return out def __repr__(self): return str(self) def to_dict(self, flatten=False, parent_key="", sep="."): d = {} for k, v in self.items(): if issubclass(type(v), Config): # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.to_dict() elif type(v) in (list, tuple): l = [] for i in v: n = i if issubclass(type(i), Config): n = i.to_dict() l.append(n) if type(v) is tuple: v = tuple(l) else: v = l d[k] = v if flatten: d = flatten_dict(d, parent_key=parent_key, sep=sep) return d def pprint(self, ): pprint(self.to_dict()) def empty(self): return not any(self) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return list(self.keys()) def _ipython_key_completions_(self): return list(self.keys()) @classmethod def parseOther(cls, other): if issubclass(type(other), Config): return other._map else: return other def __cmp__(self, other): other = Config.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = Config.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = Config.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = Config.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = Config.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = Config.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = Config.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return self.__class__(self) def __copy__(self): return self.copy() def __deepcopy__(self, memo=None): return self.copy() def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) == 1: for key, value in args[0].items(): if key in self and isinstance(self[key], dict): if value is None: self[key] = value else: self[key].update(value) else: pass raise ValueError() elif len(args) > 1: raise NotImplementedError # self._map.update(*args) else: raise NotImplementedError def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = cls() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d) # bannerStr def _getListStr(self, items): out = "[" mid = "" for i in items: mid += " {}\n".format(i) if mid != "": mid = "\n" + mid out += mid out += "]" return out def _getValueStr(self, k, v): outV = v multiLine = len(str(v).split("\n")) > 1 if multiLine: # push to next line outV = "\n" + v if type(v) is list: outV = self._getListStr(v) out = "{} {}".format(k, outV) return out def _getSubMapDotList(self, pre, name, subMap): outList = [] if pre == "": pre = name else: pre = "{}.{}".format(pre, name) def stamp(pre, k, v): valStr = self._getValueStr(k, v) return "{}.{}".format(pre, valStr) for k, v in subMap.items(): if isinstance(v, Config) and v != Config(): subList = self._getSubMapDotList(pre, k, v) outList.extend(subList) else: outList.append(stamp(pre, k, v)) return outList def _getSubMapStr(self, name, subMap): outList = ["== {} ==".format(name)] for k, v in subMap.items(): if isinstance(v, self.__class__) and v != self.__class__(): # break down to dots subList = self._getSubMapDotList("", k, v) # add the divit # subList = ['> {}'.format(i) for i in subList] outList.extend(subList) else: out = self._getValueStr(k, v) # out = '> {}'.format(out) out = "{}".format(out) outList.append(out) finalOut = "\n".join(outList) return finalOut def bannerStr(self): lines = [] previous = None for k, v in self.items(): if previous == self.__class__.__name__: lines.append("-") out = "" if isinstance(v, self.__class__): name = k subMap = v out = self._getSubMapStr(name, subMap) lines.append(out) previous = self.__class__.__name__ else: out = self._getValueStr(k, v) lines.append(out) previous = "other" lines.append("--") s = "\n".join(lines) return s
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] if args: d = args[0] if isinstance(d, dict): for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v, _dynamic=self._dynamic) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = DotMap(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): if k is not '_dynamic': self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in {'_map','_dynamic', '_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == {'_map','_dynamic','_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) is list: l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) v = l d[k] = v return d def pprint(self): pprint(self.toDict()) def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self.toDict()) def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d)
class DotMap(MutableMapping, OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True if kwargs: if "_dynamic" in kwargs: self._dynamic = kwargs["_dynamic"] if args: d = args[0] # for recursive assignment handling trackedIDs = {id(d): self} if isinstance(d, dict): for k, v in self.__call_items(d): if isinstance(v, dict): if id(v) in trackedIDs: v = trackedIDs[id(v)] else: v = self.__class__(v, _dynamic=self._dynamic) trackedIDs[id(v)] = v if type(v) is list: l = [] for i in v: n = i if isinstance(i, dict): n = self.__class__(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): if k is not "_dynamic": self._map[k] = v def __call_items(self, obj): if hasattr(obj, "iteritems") and ismethod(getattr(obj, "iteritems")): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): return self._map[k] def __setattr__(self, k, v): if k in { "_map", "_dynamic", "_ipython_canary_method_should_not_exist_", }: super(DotMap, self).__setattr__(k, v) else: self[k] = v def __getattr__(self, k): if k in { "_map", "_dynamic", "_ipython_canary_method_should_not_exist_", }: return super(DotMap, self).__getattr__(k) try: v = super(self.__class__, self).__getattribute__(k) return v except AttributeError: pass return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __add__(self, other): if self.empty(): return other else: self_type = type(self).__name__ other_type = type(other).__name__ msg = "unsupported operand type(s) for +: '{}' and '{}'" raise TypeError(msg.format(self_type, other_type)) def __str__(self): items = [] for k, v in self.__call_items(self._map): # recursive assignment case if id(v) == id(self): items.append("{0}={1}(...)".format(k, self.__class__.__name__)) else: seperator = "\n" if isinstance(v, DotMap) else " " attr_str = f"{k}:{seperator}{v}" attr_str = self._indent(attr_str, 2) items.append(attr_str) joined = "\n".join(items) return joined def __repr__(self): return str(self) def toDict(self): d = {} for k, v in self.items(): if issubclass(type(v), DotMap): # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) in (list, tuple): l = [] for i in v: n = i if issubclass(type(i), DotMap): n = i.toDict() l.append(n) if type(v) is tuple: v = tuple(l) else: v = l d[k] = v return d def pprint(self, pformat="dict"): if pformat == "json": print(dumps(self.toDict(), indent=4, sort_keys=True)) else: pprint(self.toDict()) def empty(self): return not any(self) # proper dict subclassing def values(self): return self._map.values() # # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if issubclass(type(other), DotMap): return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return self.__class__(self) def __copy__(self): return self.copy() def __deepcopy__(self, memo=None): return self.copy() def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): return self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = cls() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d) # bannerStr def _getListStr(self, items): out = "[" mid = "" for i in items: mid += " {}\n".format(i) if mid != "": mid = "\n" + mid out += mid out += "]" return out def _getValueStr(self, k, v): outV = v multiLine = len(str(v).split("\n")) > 1 if multiLine: # push to next line outV = "\n" + v if type(v) is list: outV = self._getListStr(v) out = "{} {}".format(k, outV) return out def _getSubMapDotList(self, pre, name, subMap): outList = [] if pre == "": pre = name else: pre = "{}.{}".format(pre, name) def stamp(pre, k, v): valStr = self._getValueStr(k, v) return "{}.{}".format(pre, valStr) for k, v in subMap.items(): if isinstance(v, DotMap) and v != DotMap(): subList = self._getSubMapDotList(pre, k, v) outList.extend(subList) else: outList.append(stamp(pre, k, v)) return outList def _getSubMapStr(self, name, subMap): outList = ["== {} ==".format(name)] for k, v in subMap.items(): if isinstance(v, self.__class__) and v != self.__class__(): # break down to dots subList = self._getSubMapDotList("", k, v) # add the divit # subList = ['> {}'.format(i) for i in subList] outList.extend(subList) else: out = self._getValueStr(k, v) # out = '> {}'.format(out) out = "{}".format(out) outList.append(out) finalOut = "\n".join(outList) return finalOut def bannerStr(self): lines = [] previous = None for k, v in self.items(): if previous == self.__class__.__name__: lines.append("-") out = "" if isinstance(v, self.__class__): name = k subMap = v out = self._getSubMapStr(name, subMap) lines.append(out) previous = self.__class__.__name__ else: out = self._getValueStr(k, v) lines.append(out) previous = "other" lines.append("--") s = "\n".join(lines) return s def _indent(self, s_, num_spaces): s = s_.split("\n") if len(s) == 1: return s_ first = s.pop(0) s = [(num_spaces * " ") + line for line in s] s = "\n".join(s) s = first + "\n" + s return s
class Robot(object): """ for initializing and running the interface between cpu and robot firmware via serial link """ def __init__(self, baud='115200', port='/dev/ttyACM0', readtimer=0, nl='\n', LOAD=True): self.baud = baud self.port = port self.con = ser.Serial(port=port, baudrate=baud, timeout=readtimer) self.nl = nl self.LOADING = LOAD self.do = { 'pickup_pos': 'G0 X1', 'drop_pos': 'G0 X52', 'fan_on': 'M106', 'fan_off': 'M107', 'servo_drop': 'M280 S57 P0', 'servo_up': 'M280 S120 P0', 'end_stop_status': 'M119', 'positions': 'M114', 'stop': 'M410' } self.times = { 'pickup_pos': 3, 'drop_pos': 3, 'fan_on': 0.1, 'fan_off': 0.1, 'servo_drop': 0.6, 'servo_up': 2.0, 'end_stop_status': 0.1, 'positions': 0.06, 'stop': 0.02 } self.sensor_keys = ["x_min", "y_min", "z_min", "x_max", "y_max"] for w in xrange(5): print("waiting {} seconds to init serial".format(5 - w)) time.sleep(1) print("serial portisOpen={}".format(self.con.isOpen())) # physically home X (arm) Y (hopper) and Z (output bin) to zero positions self.con.write("G28 XZ" + nl) self.con.write("G28 Y" + nl) time.sleep(0.5) # arm 'X' swing out to allow loading of hopper self.con.write(self.do['drop_pos'] + nl + " " + self.do['servo_up'] + nl) self.con.write(self.do['fan_off'] + nl) self.NEED_DROP = False self.CARD_CARRIED = False self.ID_DONE = False self.PICKING_UP = False # adjust sort categories quantity and bin position here: self.bins = OrderedDict([('Low', 125), ('High', 247.5), ('NoID', 50.0)]) self.bin_cuts = OrderedDict([('Low', 0.0), ('High', 0.5), ('NoID', 10000.0)]) self.bin_sliver = 0.2 self.LOADING = True #tl = self.con.readline() #while tl: # print("startup: {}".format(tl.strip())) # tl = self.con.readline() r = self.con.read(self.con.inWaiting()) for p in r.split('echo:'): print p def dothis(self, instruction): """ sends instruction to robot and returns the estimated execution time if available """ if instruction in self.do.keys(): self.con.write(self.do[instruction] + self.nl) return self.times[instruction] self.con.write(instruction + self.nl) return 0.0 def bin_lookup(self, price, binname=None): """returns the bin-name the card-price should be sorted into""" for bk, bv in self.bin_cuts.viewitems(): if price >= bv: binname = bk return binname def sensor_stats(self, min_ret=99, retry=0): """returns dict of end-stop sensors, keyed by sensor name, with values of 'open' or 'TRIGGERED'""" extra_time = 0 if retry > 5: extra_time = 0.1 * retry print("extra time: {} sec".format(extra_time)) if retry > 32: print("too many retries of sensors = {}".format(retry)) return {'y_max': "TRIGGERED", 'x_max': 'y_is_fake!'} wait = self.dothis('end_stop_status') + time.time() + extra_time while (time.time() < wait) and (self.con.inWaiting() < min_ret): pass sensordict = dict([ tuple(chunk.split(": ")) for chunk in self.con.read( size=self.con.inWaiting()).split(self.nl) if (': ' in chunk) and (('_min' in chunk) or ('_max' in chunk)) ]) skeys = sensordict.keys() if all([sk in skeys for sk in self.sensor_keys]): return sensordict retry += 1 # print("Retry sensor_stats() #{}".format(retry)) return self.sensor_stats(min_ret=min_ret, retry=retry) def xyz_pos(self, min_ret=59): """ returns dict of current stepper DESTINATIONS (in float(mm)) keyed by single-letter axis names""" wait = self.dothis("positions") + time.time() # start = time.time() must_have = ['X', 'Y', 'Z', 'E'] xyz_dict = {} while time.time() < wait and (self.con.inWaiting() < min_ret): pass # finalwait = self.con.inWaiting() for positions in [ ps.split(' Count ')[0] for ps in self.con.read( size=self.con.inWaiting()).split(self.nl) if ' Count ' in ps ]: if all([axis in positions for axis in must_have]): for p in positions.split(" "): if ":" in p: k, v = p.split(":") xyz_dict[k.strip()] = float(v.strip()) # print("actual speed: {}, ret: {}".format(time.time() - start, finalwait)) return xyz_dict or self.xyz_pos(min_ret=min_ret - 1) def go_xz(self, bin_name, timeconst=0.07, reverse=False): """ given a destination bin, position everything for the drop, while decrementing for the next drop into the bin and return the estimated time from the present when the drop can happen """ back = 1 if not reverse else -1 newz = float(self.bins[bin_name]) self.bins[bin_name] -= (self.bin_sliver * back) curz = self.xyz_pos()['Z'] if not reverse: x_spot = self.do['drop_pos'].split(' ')[1] x_time = self.times['drop_pos'] else: x_time, x_spot = 0, "" z_time = abs(curz - newz) * timeconst self.dothis("G1 Z" + str(newz) + " " + x_spot) return z_time if z_time > x_time else x_time def hopper_up(self, y_current=None, bite=1.1, timeconst=0.7): """ raise the input hopper by a little bit, return the seconds it is estimated to take""" if y_current is None: try: y_current = self.xyz_pos()['Y'] except KeyError: print( "WARNING: hopper_up couldn't get 'Y' starting position. Moving to zero + 1." ) y_current = 0 self.dothis("G0 Y{}".format(y_current + bite)) return bite * timeconst def load_hopper(self, move=10.0, y_top=220): """ load cards until bottom switch is triggered, indicating max capacity, but only move down while top proximity sensor is triggered. Set self.LOADING false when done""" # first move up until proximity sensor is triggered to get the platform up top print(" - Initializing hopper upwards (until top sensor triggered) - ") self.dothis("G0 Y{}".format(y_top)) power_warn_time = time.time() + 22.0 INITIALIZE_UP = True while INITIALIZE_UP: sensor = self.sensor_stats() if time.time() > power_warn_time: power_warn_time = time.time() + 2.0 print("Is the power-supply on? If not, break and start over.") print("sensors say: {}".format(sensor)) if 'TRIG' in sensor['y_max']: print("top sensor = {}".format(sensor['y_max'])) time.sleep(self.dothis("stop")) INITIALIZE_UP = False xyz = self.xyz_pos() print( "LOAD THE HOPPER. Loading ends when bottom limit switch is triggered." ) print("Positions: {}".format(", ".join( [k + ":" + str(v) for k, v in xyz.viewitems()]))) new_sweep = True destination = max((xyz['Y'] - move), 0) start = time.time() while self.LOADING: sensor = self.sensor_stats() if 'TRIG' in sensor['y_min']: self.dothis("stop") self.dothis("G92 Y0") self.dothis("G0 Y0") self.LOADING = False continue if 'TRIG' in sensor['y_max'] and new_sweep: print("moving down to: Y={}".format(destination)) self.dothis("G0 Y{}".format(destination)) start = time.time() new_sweep = False if 'open' in sensor['y_max'] and not new_sweep: print( "top sensor Open after {} seconds...".format(time.time() - start)) new_sweep = True xyz = self.xyz_pos() if 'Y' in xyz.keys(): destination = max((xyz['Y'] - move), 0) else: print("BAD XYZ: {}".format(", ".join( [k + ":" + str(v) for k, v in xyz.viewitems()]))) xyz = self.xyz_pos() print("DONE LOADING") print("Positions: {}".format(", ".join( [k + ":" + str(v) for k, v in xyz.viewitems()]))) nudge_up = True wait = 0 sensor = self.sensor_stats() while nudge_up: if time.time() > wait: wait = self.hopper_up() + time.time() sensor = self.sensor_stats() if "TRIG" in sensor['y_max']: nudge_up = False time.sleep(self.dothis('fan_on')) return self.hopper_up(bite=0.2)
for table in cur.fetchall(): graph[table[0]] = [] cur.execute("SELECT table_name, referenced_table_name\ AS list_of_fks FROM INFORMATION_SCHEMA.key_column_usage\ WHERE referenced_table_schema = '"+ db + "'\ AND referenced_table_name \ IS NOT NULL ORDER BY table_name, column_name;") for table in cur.fetchall(): if table[1] not in graph[table[0]]: graph[table[0]].append(table[1]) graph = OrderedDict(sorted(graph.viewitems(), key=lambda x: len(x[1]))) for item in graph: print str(item) + ":" + str(graph[item]) while len(graph) > 0: for item in graph: if len(graph[item]) == 0: tables.append(item) dependent_tables = [key for key, value in graph.iteritems() if item \ in value] if dependent_tables: for table in dependent_tables: graph[table].remove(item) del graph[item]
class DotMap(MutableMapping, OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = kwargs.pop('_dynamic', True) self._prevent_method_masking = kwargs.pop('_prevent_method_masking', False) trackedIDs = kwargs.pop('_trackedIDs', {}) if args: d = args[0] # for recursive assignment handling trackedIDs[id(d)] = self src = [] if isinstance(d, MutableMapping): src = self.__call_items(d) elif isinstance(d, Iterable): src = d for k, v in src: if self._prevent_method_masking and k in reserved_keys: raise KeyError('"{}" is reserved'.format(k)) if isinstance(v, dict): idv = id(v) if idv in trackedIDs: v = trackedIDs[idv] else: trackedIDs[idv] = v v = self.__class__(v, _dynamic=self._dynamic, _prevent_method_masking=self. _prevent_method_masking, _trackedIDs=trackedIDs) if type(v) is list: l = [] for i in v: n = i if isinstance(i, dict): idi = id(i) if idi in trackedIDs: n = trackedIDs[idi] else: trackedIDs[idi] = i n = self.__class__( i, _dynamic=self._dynamic, _prevent_method_masking=self. _prevent_method_masking) l.append(n) v = l self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): if self._prevent_method_masking and k in reserved_keys: raise KeyError('"{}" is reserved'.format(k)) self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = self.__class__() return self._map[k] def __setattr__(self, k, v): if k in { '_map', '_dynamic', '_ipython_canary_method_should_not_exist_', '_prevent_method_masking' }: super(DotMap, self).__setattr__(k, v) elif self._prevent_method_masking and k in reserved_keys: raise KeyError('"{}" is reserved'.format(k)) else: self[k] = v def __getattr__(self, k): if k.startswith('__') and k.endswith('__'): raise AttributeError(k) if k in { '_map', '_dynamic', '_ipython_canary_method_should_not_exist_' }: return super(DotMap, self).__getattr__(k) try: v = super(self.__class__, self).__getattribute__(k) return v except AttributeError: pass return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __add__(self, other): if self.empty(): return other else: self_type = type(self).__name__ other_type = type(other).__name__ msg = "unsupported operand type(s) for +: '{}' and '{}'" raise TypeError(msg.format(self_type, other_type)) def __str__(self, seen=None): items = [] seen = {id(self)} if seen is None else seen for k, v in self.__call_items(self._map): # circular assignment case if isinstance(v, self.__class__): if id(v) in seen: items.append('{0}={1}(...)'.format( k, self.__class__.__name__)) else: seen.add(id(v)) items.append('{0}={1}'.format(k, v.__str__(seen))) else: items.append('{0}={1}'.format(k, repr(v))) joined = ', '.join(items) out = '{0}({1})'.format(self.__class__.__name__, joined) return out def __repr__(self): return str(self) def toDict(self, seen=None): if seen is None: seen = {} d = {} seen[id(self)] = d for k, v in self.items(): if issubclass(type(v), DotMap): idv = id(v) if idv in seen: v = seen[idv] else: v = v.toDict(seen=seen) elif type(v) in (list, tuple): l = [] for i in v: n = i if issubclass(type(i), DotMap): idv = id(n) if idv in seen: n = seen[idv] else: n = i.toDict(seen=seen) l.append(n) if type(v) is tuple: v = tuple(l) else: v = l d[k] = v return d def pprint(self, pformat='dict'): if pformat == 'json': print(dumps(self.toDict(), indent=4, sort_keys=True)) else: pprint(self.toDict()) def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if issubclass(type(other), DotMap): return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return self.__class__(self) def __copy__(self): return self.copy() def __deepcopy__(self, memo=None): return self.copy() def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): return self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = cls() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d) # bannerStr def _getListStr(self, items): out = '[' mid = '' for i in items: mid += ' {}\n'.format(i) if mid != '': mid = '\n' + mid out += mid out += ']' return out def _getValueStr(self, k, v): outV = v multiLine = len(str(v).split('\n')) > 1 if multiLine: # push to next line outV = '\n' + v if type(v) is list: outV = self._getListStr(v) out = '{} {}'.format(k, outV) return out def _getSubMapDotList(self, pre, name, subMap): outList = [] if pre == '': pre = name else: pre = '{}.{}'.format(pre, name) def stamp(pre, k, v): valStr = self._getValueStr(k, v) return '{}.{}'.format(pre, valStr) for k, v in subMap.items(): if isinstance(v, DotMap) and v != DotMap(): subList = self._getSubMapDotList(pre, k, v) outList.extend(subList) else: outList.append(stamp(pre, k, v)) return outList def _getSubMapStr(self, name, subMap): outList = ['== {} =='.format(name)] for k, v in subMap.items(): if isinstance(v, self.__class__) and v != self.__class__(): # break down to dots subList = self._getSubMapDotList('', k, v) # add the divit # subList = ['> {}'.format(i) for i in subList] outList.extend(subList) else: out = self._getValueStr(k, v) # out = '> {}'.format(out) out = '{}'.format(out) outList.append(out) finalOut = '\n'.join(outList) return finalOut def bannerStr(self): lines = [] previous = None for k, v in self.items(): if previous == self.__class__.__name__: lines.append('-') out = '' if isinstance(v, self.__class__): name = k subMap = v out = self._getSubMapStr(name, subMap) lines.append(out) previous = self.__class__.__name__ else: out = self._getValueStr(k, v) lines.append(out) previous = 'other' lines.append('--') s = '\n'.join(lines) return s
class Robot(object): """ for initializing and running the interface between cpu and robot firmware via serial link """ def __init__(self, baud='115200', port='/dev/ttyACM0', readtimer=0, nl='\n', LOAD=True): self.baud = baud self.port = port self.con = ser.Serial(port=port, baudrate=baud, timeout=readtimer) self.nl = nl self.LOADING = LOAD self.do = {'pickup_pos': 'G0 X1', 'drop_pos': 'G0 X52', 'fan_on': 'M106', 'fan_off': 'M107', 'servo_drop': 'M280 S57 P0', 'servo_up': 'M280 S120 P0', 'end_stop_status': 'M119', 'positions': 'M114', 'stop': 'M410'} self.times = {'pickup_pos': 3, 'drop_pos': 3, 'fan_on': 0.1, 'fan_off': 0.1, 'servo_drop': 0.6, 'servo_up': 2.0, 'end_stop_status': 0.1, 'positions': 0.06, 'stop': 0.02} self.sensor_keys = ["x_min", "y_min", "z_min", "x_max", "y_max"] for w in xrange(5): print("waiting {} seconds to init serial".format(5 - w)) time.sleep(1) print("serial portisOpen={}".format(self.con.isOpen())) # physically home X (arm) Y (hopper) and Z (output bin) to zero positions self.con.write("G28 XZ" + nl) self.con.write("G28 Y" + nl) time.sleep(0.5) # arm 'X' swing out to allow loading of hopper self.con.write(self.do['drop_pos'] + nl + " " + self.do['servo_up'] + nl) self.con.write(self.do['fan_off'] + nl) self.NEED_DROP = False self.CARD_CARRIED = False self.ID_DONE = False self.PICKING_UP = False # adjust sort categories quantity and bin position here: self.bins = OrderedDict([('Low', 125), ('High', 247.5), ('NoID', 50.0)]) self.bin_cuts = OrderedDict([('Low', 0.0), ('High', 0.5), ('NoID', 10000.0)]) self.bin_sliver = 0.2 self.LOADING = True #tl = self.con.readline() #while tl: # print("startup: {}".format(tl.strip())) # tl = self.con.readline() r = self.con.read(self.con.inWaiting()) for p in r.split('echo:'): print p def dothis(self, instruction): """ sends instruction to robot and returns the estimated execution time if available """ if instruction in self.do.keys(): self.con.write(self.do[instruction] + self.nl) return self.times[instruction] self.con.write(instruction + self.nl) return 0.0 def bin_lookup(self, price, binname=None): """returns the bin-name the card-price should be sorted into""" for bk, bv in self.bin_cuts.viewitems(): if price >= bv: binname = bk return binname def sensor_stats(self, min_ret=99, retry=0): """returns dict of end-stop sensors, keyed by sensor name, with values of 'open' or 'TRIGGERED'""" extra_time = 0 if retry > 5: extra_time = 0.1 * retry print("extra time: {} sec".format(extra_time)) if retry > 32: print("too many retries of sensors = {}".format(retry)) return {'y_max': "TRIGGERED", 'x_max': 'y_is_fake!'} wait = self.dothis('end_stop_status') + time.time() + extra_time while (time.time() < wait) and (self.con.inWaiting() < min_ret): pass sensordict = dict([tuple(chunk.split(": ")) for chunk in self.con.read(size=self.con.inWaiting()).split(self.nl) if (': ' in chunk) and (('_min' in chunk) or ('_max' in chunk))]) skeys = sensordict.keys() if all([sk in skeys for sk in self.sensor_keys]): return sensordict retry += 1 # print("Retry sensor_stats() #{}".format(retry)) return self.sensor_stats(min_ret=min_ret, retry=retry) def xyz_pos(self, min_ret=59): """ returns dict of current stepper DESTINATIONS (in float(mm)) keyed by single-letter axis names""" wait = self.dothis("positions") + time.time() # start = time.time() must_have = ['X', 'Y', 'Z', 'E'] xyz_dict = {} while time.time() < wait and (self.con.inWaiting() < min_ret): pass # finalwait = self.con.inWaiting() for positions in [ps.split(' Count ')[0] for ps in self.con.read(size=self.con.inWaiting()).split(self.nl) if ' Count ' in ps]: if all([axis in positions for axis in must_have]): for p in positions.split(" "): if ":" in p: k, v = p.split(":") xyz_dict[k.strip()] = float(v.strip()) # print("actual speed: {}, ret: {}".format(time.time() - start, finalwait)) return xyz_dict or self.xyz_pos(min_ret=min_ret-1) def go_xz(self, bin_name, timeconst=0.07, reverse=False): """ given a destination bin, position everything for the drop, while decrementing for the next drop into the bin and return the estimated time from the present when the drop can happen """ back = 1 if not reverse else -1 newz = float(self.bins[bin_name]) self.bins[bin_name] -= (self.bin_sliver * back) curz = self.xyz_pos()['Z'] if not reverse: x_spot = self.do['drop_pos'].split(' ')[1] x_time = self.times['drop_pos'] else: x_time, x_spot = 0, "" z_time = abs(curz - newz) * timeconst self.dothis("G1 Z" + str(newz) + " " + x_spot) return z_time if z_time > x_time else x_time def hopper_up(self, y_current=None, bite=1.1, timeconst=0.7): """ raise the input hopper by a little bit, return the seconds it is estimated to take""" if y_current is None: try: y_current = self.xyz_pos()['Y'] except KeyError: print("WARNING: hopper_up couldn't get 'Y' starting position. Moving to zero + 1.") y_current = 0 self.dothis("G0 Y{}".format(y_current + bite)) return bite * timeconst def load_hopper(self, move=10.0, y_top=220): """ load cards until bottom switch is triggered, indicating max capacity, but only move down while top proximity sensor is triggered. Set self.LOADING false when done""" # first move up until proximity sensor is triggered to get the platform up top print(" - Initializing hopper upwards (until top sensor triggered) - ") self.dothis("G0 Y{}".format(y_top)) power_warn_time = time.time() + 22.0 INITIALIZE_UP = True while INITIALIZE_UP: sensor = self.sensor_stats() if time.time() > power_warn_time: power_warn_time = time.time() + 2.0 print("Is the power-supply on? If not, break and start over.") print("sensors say: {}".format(sensor)) if 'TRIG' in sensor['y_max']: print("top sensor = {}".format(sensor['y_max'])) time.sleep(self.dothis("stop")) INITIALIZE_UP = False xyz = self.xyz_pos() print("LOAD THE HOPPER. Loading ends when bottom limit switch is triggered.") print("Positions: {}".format(", ".join([k + ":" + str(v) for k, v in xyz.viewitems()]))) new_sweep = True destination = max((xyz['Y'] - move), 0) start = time.time() while self.LOADING: sensor = self.sensor_stats() if 'TRIG' in sensor['y_min']: self.dothis("stop") self.dothis("G92 Y0") self.dothis("G0 Y0") self.LOADING = False continue if 'TRIG' in sensor['y_max'] and new_sweep: print("moving down to: Y={}".format(destination)) self.dothis("G0 Y{}".format(destination)) start = time.time() new_sweep = False if 'open' in sensor['y_max'] and not new_sweep: print("top sensor Open after {} seconds...".format(time.time()-start)) new_sweep = True xyz = self.xyz_pos() if 'Y' in xyz.keys(): destination = max((xyz['Y'] - move), 0) else: print("BAD XYZ: {}".format(", ".join([k + ":" + str(v) for k, v in xyz.viewitems()]))) xyz = self.xyz_pos() print("DONE LOADING") print("Positions: {}".format(", ".join([k + ":" + str(v) for k, v in xyz.viewitems()]))) nudge_up = True wait = 0 sensor = self.sensor_stats() while nudge_up: if time.time() > wait: wait = self.hopper_up() + time.time() sensor = self.sensor_stats() if "TRIG" in sensor['y_max']: nudge_up = False time.sleep(self.dothis('fan_on')) return self.hopper_up(bite=0.2)
def inheritance_check(variants): m_gf = 'NA12891' m_gm = 'NA12892' d_gf = 'NA12889' d_gm = 'NA12890' mom = 'NA12878' dad = 'NA12877' #sample_ID:[hom_ref, het, hom_alt] # kids = ['NA12879':[0,0,0], 'NA12880':[0,0,0], 'NA12881':[0,0,0], 'NA12882':[0,0,0], 'NA12883':[0,0,0], 'NA12884':[0,0,0], 'NA12885':[0,0,0], 'NA12886':[0,0,0], 'NA12887':[0,0,0], 'NA12888':[0,0,0], 'NA12893':[0,0,0], 'NA12878':[0,0,0],'NA12877':[0,0,0]} #load dict with the children sample names kids = OrderedDict() for i in range(12877,12889): name = "NA"+str(i) kids[name]=[0,0,0] kids['NA12893']=[0,0,0] parents = {'NA12878':[0,0,0],'NA12877':[0,0,0]} hits = [] present_min = 5 absent_max = 0 for var in variants: #must be present in only one parent, and only one grandparent. mom_count = int(var.gts[mom].format['AO']) m_gf_count = int(var.gts[m_gf].format['AO']) m_gm_count = int(var.gts[m_gm].format['AO']) dad_count = int(var.gts[dad].format['AO']) d_gf_count = int(var.gts[d_gf].format['AO']) d_gm_count = int(var.gts[d_gm].format['AO']) #is var a true het in mom? present in mom and one of her parents (and not dad), or dad and one of his parents (and not mom) mom_het = mom_count >= present_min and ((m_gf_count >= present_min and m_gm_count == absent_max) or (m_gf_count == absent_max and m_gm_count >= present_min)) dad_het = dad_count >= present_min and ((d_gf_count >= present_min and d_gm_count == absent_max) or (d_gf_count == absent_max and d_gm_count >= present_min)) #if dad_het and mom_het and var.gts[mom].format['GT'] != "1/1" and var.gts[dad].format['GT'] != "1/1": if dad_het and mom_het: hits.append(var) for kid in kids: if var.gts[kid].format['GT'] == "0/0": kids[kid][0]+=1 elif var.gts[kid].format['GT'] == "0/1": kids[kid][1]+=1 elif var.gts[kid].format['GT'] == "1/1": kids[kid][2]+=1 hom_ref = 0 het = 0 hom_alt = 0 for name, counts in kids.viewitems(): if name == mom or name == dad: continue hom_ref += counts[0] het += counts[1] hom_alt += counts[2] total = hom_ref + het + hom_alt if total <= 0: exit('Zero total') sys.stderr.write("Sample\t0/0\t0/1\t1/1\n") for kid, counts in kids.viewitems(): if kid == mom: kid = "Mom" if kid == dad: kid = "Dad" sys.stderr.write(kid+"\t") total = float(sum(counts)) sys.stderr.write("{0:.2f}\t{1:.2f}\t{2:.2f}\n".format((counts[0]/total)*100, (counts[1]/total)*100, (counts[2]/total)*100)) total = hom_ref + het + hom_alt hom_ref = (hom_ref/float(total)) * 100 het = (het/float(total)) * 100 hom_alt = (hom_alt/float(total)) * 100 sys.stderr.write("\nAggregate: ") sys.stderr.write(str(len(hits))+" total variants.\n") sys.stderr.write("GT\t%\n") sys.stderr.write("0/0\t{0:.2f}\n0/1\t{1:.2f}\n1/1\t{2:.2f}\n".format(hom_ref, het, hom_alt)) return hits
class DotMap(MutableMapping, OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] if args: d = args[0] if isinstance(d, dict): for k, v in self.__call_items(d): if isinstance(v, dict): v = DotMap(v, _dynamic=self._dynamic) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = DotMap(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): if k is not '_dynamic': self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in { '_map', '_dynamic', '_ipython_canary_method_should_not_exist_' }: super(DotMap, self).__setattr__(k, v) else: self[k] = v def __getattr__(self, k): if k in { '_map', '_dynamic', '_ipython_canary_method_should_not_exist_' }: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k, v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) joined = ', '.join(items) out = '{0}({1})'.format(self.__class__.__name__, joined) return out def __repr__(self): return str(self) def toDict(self): d = {} for k, v in self.items(): if type(v) is DotMap: # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) in (list, tuple): l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) if type(v) is tuple: v = tuple(l) else: v = l d[k] = v return d def pprint(self, pformat='dict'): if pformat == 'json': print(dumps(self.toDict(), indent=4, sort_keys=True)) else: pprint(self.toDict()) def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self) def __copy__(self): return self.copy() def __deepcopy__(self, memo=None): return self.copy() def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d) # bannerStr def _getListStr(self, items): out = '[' mid = '' for i in items: mid += ' {}\n'.format(i) if mid != '': mid = '\n' + mid out += mid out += ']' return out def _getValueStr(self, k, v): outV = v multiLine = len(str(v).split('\n')) > 1 if multiLine: # push to next line outV = '\n' + v if type(v) is list: outV = self._getListStr(v) out = '{} {}'.format(k, outV) return out def _getSubMapDotList(self, pre, name, subMap): outList = [] if pre == '': pre = name else: pre = '{}.{}'.format(pre, name) def stamp(pre, k, v): valStr = self._getValueStr(k, v) return '{}.{}'.format(pre, valStr) for k, v in subMap.items(): if isinstance(v, DotMap) and v != DotMap(): subList = self._getSubMapDotList(pre, k, v) outList.extend(subList) else: outList.append(stamp(pre, k, v)) return outList def _getSubMapStr(self, name, subMap): outList = ['== {} =='.format(name)] for k, v in subMap.items(): if isinstance(v, DotMap) and v != DotMap(): # break down to dots subList = self._getSubMapDotList('', k, v) # add the divit # subList = ['> {}'.format(i) for i in subList] outList.extend(subList) else: out = self._getValueStr(k, v) # out = '> {}'.format(out) out = '{}'.format(out) outList.append(out) finalOut = '\n'.join(outList) return finalOut def bannerStr(self): lines = [] previous = None for k, v in self.items(): if previous == 'DotMap': lines.append('-') out = '' if isinstance(v, DotMap): name = k subMap = v out = self._getSubMapStr(name, subMap) lines.append(out) previous = 'DotMap' else: out = self._getValueStr(k, v) lines.append(out) previous = 'other' lines.append('--') s = '\n'.join(lines) return s
def prepare_result(): result = dict() for fn in all_files: try: f = pyfits.open(fn) header = f[0].header f.close() except: continue # Objects and arcs: continue if header['IMAGETYP'].upper() == 'OBJECT' or header['IMAGETYP'].upper( ) == 'ARC': # Assuming that all the rest is what we seek for here. continue # Get header info try: k = [header[x] for x in keywords] k[0] = k[0].upper() except: print fn, header['IMAGETYP'].upper() continue k = tuple(k) # Lists or sets cannot be dictionary keys try: d = result[k[1:]] try: d[k[0]].append(fn) except: d[k[0]] = [fn] result[k[1:]] = d except: result[k[1:]] = {k[0]: [fn]} result = OrderedDict( sorted(result.viewitems(), key=lambda x: len(x[1]), reverse=True)) f = open('calibration_filenames_date.py', 'wb') #~ f=open('calibration_filenames_date_less_keywords.py', 'wb') dsplit = '#' + 54 * '-' + '\n' f.write('result = {\n') # Split by date for k, v in result.iteritems(): # for each mode line = '\n ' + str(k) + ': {\n' f.write(line) # mode c2 = dict() for kk, vv in v.iteritems(): # for each imagetype for vvv in vv: # For each filename folder = vvv.split('/')[-2] date = vvv.split('/')[-1].split('.')[0].split('-')[1] if folder == date: pass else: continue # do not repeat files that have been copied there for easier calibration try: tmp = c2[kk] try: tmp[date].append(vvv) except: tmp[date] = [vvv] c2[kk] = tmp except: c2[kk] = {date: [vvv]} # Print in the file for t, y in c2.iteritems(): # c2[imagetype] = {date: [filenames]} f.write(' "%s": {\n' % (t)) # imagetype for yy, yyy in y.iteritems(): # date: [filenames] yyy = sorted(yyy) #~ f.write(' %s: [\n'%yy) # date f.write(' %s: ["%s",\n' % (yy, yyy[0])) # date for name in yyy[1:-1]: f.write(' "%s",\n' % name) f.write(' "%s"],\n' % yyy[-1]) #~ f.write(' ],\n') f.write(' },\n') # imagetype f.write(' },\n') # mode f.write( '##################################################################\n' ) f.write('}') f.close()
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True # mettendo False non funzionano più i test di default. E' normale in quanto si aspettano la creazione dinamica dei figli # =================================== if LORETO: global MY_DICT_TYPES # global var per la classe self._dynamic = False # mettendo False non funzionano più i test di default. E' normale in quanto si aspettano la creazione dinamica dei figli MY_DICT_TYPES = [dict, DotMap, OrderedDict] # by Loreto (DEFAULT dictionary) # =================================== if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] if args: d = args[0] if isinstance(d, dict): for k, v in self.__call_items(d): if type(v) is dict: v = DotMap(v, _dynamic=self._dynamic) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = DotMap(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): if k is not '_dynamic': self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in { '_map', '_dynamic', '_ipython_canary_method_should_not_exist_' }: super(DotMap, self).__setattr__(k, v) else: self[k] = v def __getattr__(self, k): if k == { '_map', '_dynamic', '_ipython_canary_method_should_not_exist_' }: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k, v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k, v in self.items(): if type(v) is DotMap: # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) is list: l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) v = l d[k] = v return d def pprint(self): pprint(self.toDict()) # =================================== if LORETO: # MY_DICT_TYPES = [dict, DotMap] def Ptr(self, listOfQualifiers, create=False): ptr = self for item in listOfQualifiers: if item in ptr: ptr = ptr[item] else: if create: ptr[item] = DotMap() ptr = ptr[item] else: return None return ptr def KeyTree(self, fPRINT=False): return DictToList.KeyTree(self, myDictTYPES=MY_DICT_TYPES, fPRINT=fPRINT) def KeyList(self): return DictToList.KeyList(self, myDictTYPES=MY_DICT_TYPES) def PrintTree(self, fEXIT=False, maxDepth=10, header=None, whatPrint='LTKV', stackLevel=1): PrintDictionaryTree.PrintDictionary(self, myDictTYPES=MY_DICT_TYPES, whatPrint=whatPrint, fEXIT=fEXIT, maxDepth=maxDepth, header=header, stackLevel=stackLevel + 1) printDict = PrintTree printTree = PrintTree def GetValue(self, listOfQualifiers=[], fPRINT=False): return DictToList.getValue(self, listOfQualifiers=listOfQualifiers, myDictTYPES=MY_DICT_TYPES, fPRINT=fPRINT) # =================================== def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self.toDict()) def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d)
def main(): import argparse parser = argparse.ArgumentParser( description='Collect and dispatch various metrics to destinations.') parser.add_argument('-t', '--destination', metavar='host[:port]', help='host[:port] (default port: 2003, can be overidden' ' via config file) of sink destination endpoint (e.g. carbon' ' linereceiver tcp port, by default).') parser.add_argument('-i', '--interval', type=int, metavar='seconds', help='Interval between collecting and sending the datapoints.') parser.add_argument('-e', '--collector-enable', action='append', metavar='collector', default=list(), help='Enable only the specified metric collectors,' ' can be specified multiple times.') parser.add_argument('-d', '--collector-disable', action='append', metavar='collector', default=list(), help='Explicitly disable specified metric collectors,' ' can be specified multiple times. Overrides --collector-enable.') parser.add_argument('-s', '--sink-enable', action='append', metavar='sink', default=list(), help='Enable only the specified datapoint sinks,' ' can be specified multiple times.') parser.add_argument('-x', '--sink-disable', action='append', metavar='sink', default=list(), help='Explicitly disable specified datapoint sinks,' ' can be specified multiple times. Overrides --sink-enable.') parser.add_argument('-p', '--processor-enable', action='append', metavar='processor', default=list(), help='Enable only the specified datapoint processors,' ' can be specified multiple times.') parser.add_argument('-z', '--processor-disable', action='append', metavar='processor', default=list(), help='Explicitly disable specified datapoint processors,' ' can be specified multiple times. Overrides --processor-enable.') parser.add_argument('-c', '--config', action='append', metavar='path', default=list(), help='Configuration files to process.' ' Can be specified more than once.' ' Values from the latter ones override values in the former.' ' Available CLI options override the values in any config.') parser.add_argument('-a', '--xattr-emulation', metavar='db-path', help='Emulate filesystem extended attributes (used in' ' some collectors like sysstat or cron_log), storing per-path' ' data in a simple shelve db.') parser.add_argument('-n', '--dry-run', action='store_true', help='Do not actually send data.') parser.add_argument('--debug', action='store_true', help='Verbose operation mode.') optz = parser.parse_args() # Read configuration files cfg = AttrDict.from_yaml('{}.yaml'.format( os.path.splitext(os.path.realpath(__file__))[0] )) for k in optz.config: cfg.update_yaml(k) # Logging import logging configure_logging( cfg.logging, logging.DEBUG if optz.debug else logging.WARNING ) if not cfg.logging.tracebacks: class NoTBLogger(logging.Logger): def exception(self, *argz, **kwz): self.error(*argz, **kwz) logging.setLoggerClass(NoTBLogger) log = logging.getLogger(__name__) # Fill "auto-detected" blanks in the configuration, CLI overrides try: if optz.destination: cfg.sinks._default.host = optz.destination cfg.sinks._default.host = cfg.sinks._default.host.rsplit(':', 1) if len(cfg.sinks._default.host) == 1: cfg.sinks._default.host =\ cfg.sinks._default.host[0], cfg.sinks._default.default_port else: cfg.sinks._default.host[1] = int(cfg.sinks._default.host[1]) except KeyError: pass if optz.interval: cfg.loop.interval = optz.interval if optz.dry_run: cfg.debug.dry_run = optz.dry_run if optz.xattr_emulation: cfg.core.xattr_emulation = optz.xattr_emulation # Fake "xattr" module, if requested if cfg.core.xattr_emulation: import shelve xattr_db = shelve.open(cfg.core.xattr_emulation, 'c') class xattr_path(object): def __init__(self, base): assert isinstance(base, str) self.base = base def key(self, k): return '{}\0{}'.format(self.base, k) def __setitem__(self, k, v): xattr_db[self.key(k)] = v def __getitem__(self, k): return xattr_db[self.key(k)] def __del__(self): xattr_db.sync() class xattr_module(object): xattr = xattr_path sys.modules['xattr'] = xattr_module # Override "enabled" collector/sink parameters, based on CLI ep_conf = dict() for ep, enabled, disabled in\ [ ('collectors', optz.collector_enable, optz.collector_disable), ('processors', optz.processor_enable, optz.processor_disable), ('sinks', optz.sink_enable, optz.sink_disable) ]: conf = cfg[ep] conf_base = conf.pop('_default') if 'debug' not in conf_base: conf_base['debug'] = cfg.debug ep_conf[ep] = conf_base, conf, OrderedDict(), enabled, disabled # Init global cfg for collectors/sinks' usage from graphite_metrics import collectors, sinks, loops collectors.cfg = sinks.cfg = loops.cfg = cfg # Init pluggable components import pkg_resources for ep_type in 'collector', 'processor', 'sink': ep_key = '{}s'.format(ep_type) # a bit of a hack conf_base, conf, objects, enabled, disabled = ep_conf[ep_key] ep_dict = dict( (ep.name, ep) for ep in pkg_resources.iter_entry_points('graphite_metrics.{}'.format(ep_key)) ) eps = OrderedDict( (name, (ep_dict.pop(name), subconf or AttrDict())) for name, subconf in conf.viewitems() if name in ep_dict ) eps.update( (name, (module, conf_base)) for name, module in ep_dict.viewitems() ) for ep_name, (ep_module, subconf) in eps.viewitems(): if ep_name[0] == '_': log.debug( 'Skipping {} enty point,' ' prefixed by underscore: {}'.format(ep_type, ep_name) ) subconf.rebase(conf_base) # fill in "_default" collector parameters if enabled: if ep_name in enabled: subconf['enabled'] = True else: subconf['enabled'] = False if disabled and ep_name in disabled: subconf['enabled'] = False if subconf.get('enabled', True): log.debug('Loading {}: {}'.format(ep_type, ep_name)) try: obj = getattr(ep_module.load(), ep_type)(subconf) except Exception as err: log.exception('Failed to load/init {} ({}): {}'.format(ep_type, ep_name, err)) subconf.enabled = False obj = None if subconf.get('enabled', True): objects[ep_name] = obj else: log.debug(( '{} {} (entry point: {})' ' was disabled after init' ).format(ep_type.title(), obj, ep_name)) if ep_type != 'processor' and not objects: log.fatal('No {}s were properly enabled/loaded, bailing out'.format(ep_type)) sys.exit(1) log.debug('{}: {}'.format(ep_key.title(), objects)) loop = dict( (ep.name, ep) for ep in pkg_resources.iter_entry_points('graphite_metrics.loops') ) conf = AttrDict(**cfg.loop) if 'debug' not in conf: conf.debug = cfg.debug loop = loop[cfg.loop.name].load().loop(conf) collectors, processors, sinks = it.imap( op.itemgetter(2), op.itemgetter('collectors', 'processors', 'sinks')(ep_conf) ) log.debug( 'Starting main loop: {} ({} collectors, {} processors, {} sinks)'\ .format(loop, len(collectors), len(processors), len(sinks)) ) loop.start(collectors, processors, sinks)
class BaseCache(object): """ BaseCache is a class that saves and operates on an OrderedDict. It has a certain capacity, stored in the attribute `maxsize`. Whether this capacity is reached, can be checked by using the boolean property `is_full`. To implement a custom cache, inherit from this class and override the methods ``__getitem__`` and ``__setitem__``. Call the method `sunpy.database.caching.BaseCache.callback` as soon as an item from the cache is removed. """ __metaclass__ = ABCMeta def __init__(self, maxsize=float('inf')): self.maxsize = maxsize self._dict = OrderedDict() def get(self, key, default=None): # pragma: no cover """Return the corresponding value to `key` if `key` is in the cache, `default` otherwise. This method has no side-effects, multiple calls with the same cache and the same passed key must always return the same value. """ try: return self._dict[key] except KeyError: return default @abstractmethod def __getitem__(self, key): """abstract method: this method must be overwritten by inheriting subclasses. It defines what happens if an item from the cache is attempted to be accessed. """ return # pragma: no cover @abstractmethod def __setitem__(self, key, value): """abstract method: this method must be overwritten by inheriting subclasses. It defines what happens if a new value should be assigned to the given key. If the given key does already exist in the cache or not must be checked by the person who implements this method. """ @abstractproperty def to_be_removed(self): """The item that will be removed on the next :meth:`sunpy.database.caching.BaseCache.remove` call. """ @abstractmethod def remove(self): """Call this method to manually remove one item from the cache. Which item is removed, depends on the implementation of the cache. After the item has been removed, the callback method is called. """ def callback(self, key, value): """This method should be called (by convention) if an item is removed from the cache because it is full. The passed key and value are the ones that are removed. By default this method does nothing, but it can be customized in a custom cache that inherits from this base class. """ @property def is_full(self): """True if the number of items in the cache equals :attr:`maxsize`, False otherwise. """ return len(self._dict) == self.maxsize def __delitem__(self, key): self._dict.__delitem__(key) def __contains__(self, key): return key in self._dict.keys() def __len__(self): return len(self._dict) def __iter__(self): for key in self._dict.__iter__(): yield key def __reversed__(self): # pragma: no cover for key in self._dict.__reversed__(): yield key def clear(self): # pragma: no cover return self._dict.clear() def keys(self): # pragma: no cover return self._dict.keys() def values(self): # pragma: no cover return self._dict.values() def items(self): # pragma: no cover return self._dict.items() def iterkeys(self): # pragma: no cover return self._dict.iterkeys() def itervalues(self): # pragma: no cover for value in self._dict.itervalues(): yield value def iteritems(self): # pragma: no cover for key, value in six.iteritems(self._dict): yield key, value def update(self, *args, **kwds): # pragma: no cover self._dict.update(*args, **kwds) def pop(self, key, default=MutableMapping._MutableMapping__marker): # pragma: no cover return self._dict.pop(key, default) def setdefault(self, key, default=None): # pragma: no cover return self._dict.setdefault(key, default) def popitem(self, last=True): # pragma: no cover return self._dict.popitem(last) def __reduce__(self): # pragma: no cover return self._dict.__reduce__() def copy(self): # pragma: no cover return self._dict.copy() def __eq__(self, other): # pragma: no cover return self._dict.__eq__(other) def __ne__(self, other): # pragma: no cover return self._dict.__ne__(other) def viewkeys(self): # pragma: no cover return self._dict.viewkeys() def viewvalues(self): # pragma: no cover return self._dict.viewvalues() def viewitems(self): # pragma: no cover return self._dict.viewitems() @classmethod def fromkeys(cls, iterable, value=None): # pragma: no cover return OrderedDict.fromkeys(iterable, value) def __repr__(self): # pragma: no cover return '{0}({1!r})'.format(self.__class__.__name__, dict(self._dict))