class OrderedSet(MutableSet): def __init__(self, sequence=None): super().__init__() if sequence is None: self._data = OrderedDict() else: kwargs = {v: 1 for v in sequence} self._data = OrderedDict(**kwargs) def __contains__(self, item): """Override.""" return self._data.__contains__(item) def __iter__(self): """Override.""" return self._data.__iter__() def __len__(self): """Override.""" return self._data.__len__() def add(self, item): """Override.""" self._data.__setitem__(item, 1) def discard(self, item): """Override.""" if item in self._data: self._data.__delitem__(item) def __repr__(self): return f"{self.__class__.__name__}({list(self._data.keys())})"
def __delete_main(self, keys): """ Delete items for indexing. """ for k in keys: if k[-1] is '/': files_count -= 1 kpart1, kpart2 = self.__get_dir_component(k) if kpart1 is not None: next_index = dict.__getitem__(self, kpart1) if next_index is None: raise KeyError(k) next_index.__delitem__(kpart2) else: if k[-1] is '/': temp_index = dict.__getitem__(new_index, file_key) if type(temp_index) is Index: files_count -= len(temp_index) OrderedDict.__delitem__(self, k) #self._list = None if self._list != None: self._list.remove(k) self._full_key_list = None self._nfiles -= 1 assert self._nfiles >= 0
def __getitem__(self: Dict[CacheKey, CacheValue], key: CacheKey) -> CacheValue: """Gets the item, but also makes it most recent.""" value: CacheValue = OrderedDict.__getitem__(self, key) OrderedDict.__delitem__(self, key) OrderedDict.__setitem__(self, key, value) return value
def __setitem__(self, key, value): containsKey = 1 if key in self else 0 if containsKey: # 如果key已经存在,意味着在深度搜索KD树过程中,我们已经发现了另一个邻居和目标点的距离等于key # 故,把value中传入的新邻居加入列表末端 item = OrderedDict.__getitem__(self, key) item.append(value) OrderedDict.__setitem__(self, key, item) else: # 如果key不存在,意味这这是发现的第一个和目标点的距离等于key的邻居 # 故,在字典中加入新的key,value对 OrderedDict.__setitem__(self, key, [value]) # capacity加一 self.capacity += 1 # 比较capacity和 if self.capacity > self.k: # capacity大于K时,字典中有K+1个最近邻居,把最远的那个删掉 # 按照和目标点的距离对邻居排序 sorted_keys = sorted(self.keys()) # 找到最远的距离,取出对应的邻居列表 item = OrderedDict.__getitem__(self, sorted_keys[-1]) if len(item) > 1: # 如果列表中有多于1个邻居,那么从中任意删除一个 item.pop() else: # 如果列表中只有1个邻居,把key删掉 OrderedDict.__delitem__(self, sorted_keys[-1]) # capacity减一 self.capacity -= 1
def remove(self, item): """ Remove an element from an OrderedSet; it must be a member. If the element is not a member, raise a KeyError. """ OrderedDict.__delitem__(self, item)
def __setitem__(self, key, value): containsKey = 1 if key in self else 0 if containsKey: # when key already exists, it means we already find other neighbors who is distance "key" away from target # So, append the new value to the list of neighbor. item = OrderedDict.__getitem__(self, key) item.append(value) OrderedDict.__setitem__(self, key, item) else: # when key doesn't exist, it means this is the first neighbor who is distance "key" away from target # So add a new key and value map. OrderedDict.__setitem__(self, key, [value]) # increase capacity self.capacity += 1 # check capacity value with k if self.capacity > self.k: # capacity is greater than k, we need to remove 1 farest neighbor # sort dict by key/distance sorted_keys = sorted(self.keys()) # get the farest neighbor item = OrderedDict.__getitem__(self, sorted_keys[-1]) if len(item) > 1: # if more than 1 neighbor share same distance, we just remove 1 of them from list item.pop() else: # otherwise, remove the item OrderedDict.__delitem__(self, sorted_keys[-1]) # decrease capacity self.capacity -= 1
class CollectionList(tk.Frame): """Stores and manages collections""" def __init__(self, parent, collections=None): tk.Frame.__init__(self, parent) self.collections = OrderedDict() self.currentCollection = None # gui self.scrollbar = ttk.Scrollbar(self) self.listbox = tk.Listbox(self, yscrollcommand=self.scrollbar.set, width=30) self.scrollbar.config(command=self.listbox.yview) self.listbox.pack(side=tk.LEFT, fill=tk.Y) self.scrollbar.pack(side=tk.LEFT, fill=tk.Y) self.listbox.bind( '<Double-1>', lambda x: self.master.viewer.set_collection( self.set_cur(pos=self.get_selection()[0][0]))) # load provided collections if collections: for c in collections: self.add_collection(c) self.set_cur() def set_cur(self, name=None, pos=0): if name is None: # TODO: check whether pos is valid name = self.listbox.get(pos) self.currentCollection = self.get_collection(name) return self.currentCollection def add_collection(self, collection): assert isinstance(collection, Collection) self.collections[collection.name] = collection # add to listbox self.listbox.insert(tk.END, collection.name) def get_collection(self, name): if name in self.collections: return self.collections[name] def get_selection(self): ''' return indices (tuple) and names (list) ''' idx = self.listbox.curselection() all_names = list(self.collections) names = [all_names[i] for i in idx] return idx, names def remove_selection(self): idx, names = self.get_selection() # remove from listbox for i in sorted(idx, reverse=True): self.listbox.delete(i) # remove from dict for name in names: if self.currentCollection.name == name: self.set_cur() self.collections.__delitem__(name)
def use(self, key, default=None): if key in self: value = OrderedDict.__getitem__(self, key) OrderedDict.__delitem__(self, key) OrderedDict.__setitem__(self, key, value) return value else: return default
def __delitem__(self, key): """ >>> T = pykov.Matrix({('A','B'): .3, ('A','A'): .7, ('B','A'): 1.}) >>> del(T['B', 'A']) >>> T {('A', 'B'): 0.3, ('A', 'A'): 0.7} """ OrderedDict.__delitem__(self, key)
def _nfit_popping(self, it): tab = self._dict[it.content] tab._delete() OrderedDict.__delitem__(self._dict, it.content) self.callback_call("tab,deleted", it.content) if len(self) == 0: self.callback_call("tabs,empty") return True
def use(self, key, default = None): if key in self: value = OrderedDict.__getitem__(self, key) OrderedDict.__delitem__(self, key) OrderedDict.__setitem__(self, key, value) return value else: return default
def __setitem__(self, key, value): sub_cfg, sub_key = consume_dots(self, key, create_default=True) if sub_key == '__clear__' and value == True: sub_cfg.clear() elif value in ('__remove__',): if sub_cfg.__contains__(sub_key): OrderedDict.__delitem__(sub_cfg, sub_key) else: OrderedDict.__setitem__(sub_cfg, sub_key, value)
def discard(self, item): """ Remove an element from an OrderedSet if it is a member. If the element is not a member, do nothing. """ try: OrderedDict.__delitem__(self, item) except KeyError: pass
def __getitem__(self, key, with_age=False) -> Any: """ Return the item of the dict using d[key] """ with self.lock: # Each item is a list of [value, frame time] item = OrderedDict.__getitem__(self, key) if self.frame - item[1] < self.max_age: if with_age: return item[0], item[1] return item[0] OrderedDict.__delitem__(self, key) raise KeyError(key)
def __delitem__(self, key, *args, **kwargs): """ This method deletes both key and sibling attribute. :param key.: Key. ( Object ) :param \*args: Arguments. ( \* ) :param \*\*kwargs: Key / Value pairs. ( Key / Value pairs ) """ OrderedDict.__delitem__(self, key, *args, **kwargs) OrderedDict.__delattr__(self, key)
def __delattr__(self, attribute): """ This method deletes both key and sibling attribute. :param attribute.: Attribute. ( Object ) """ if hasattr(self, "_OrderedDict__root") and hasattr(self, "_OrderedDict__map"): if self._OrderedDict__root: OrderedDict.__delitem__(self, attribute) OrderedDict.__delattr__(self, attribute)
class Metadata(MutableMapping): def __init__(self, seq=None): self.dct = OrderedDict(seq) if seq else OrderedDict() def __contains__(self, key): return self.dct.__contains__(key) def __getitem__(self, key): return self.dct.__getitem__(key) def __setitem__(self, key, value): self.dct.__setitem__(key, value) def __delitem__(self, key): return self.dct.__delitem__(key) def __iter__(self): return self.dct.__iter__() def __len__(self): return self.dct.__len__() def __repr__(self): return repr(self.dct) def __str__(self): return str(self.dct)
def __delitem__(self, name): """ Prevent deletion of items. Parameters ---------- name : str The name of the item to be deleted. """ if self._disabled: _OrderedDict.__delitem__(self, name) else: raise AttributeError('{!r} objects are read-only.' .format(self.__class__))
class Buckets(object): """Proxy for OrderedDict""" def __init__(self, *args, **kwargs): self._od = OrderedDict(*args, **kwargs) def __getattr__(self, a): return getattr(self._od, a) def __setitem__(self, *args, **kwargs): return self._od.__setitem__(*args, **kwargs) def __getitem__(self, *args, **kwargs): return self._od.__getitem__(*args, **kwargs) def __delitem__(self, *args, **kwargs): return self._od.__delitem__(*args, **kwargs) def __eq__(self, other): if isinstance(other, Buckets): return self._od.__eq__(other._od) else: return self._od.__eq__(other) def copy(self, *args, **kwargs): new = Buckets() new._od = self._od.copy() return new
class CompoundValue(object): def __init__(self, *args, **kwargs): self.__values__ = OrderedDict() # Set default values for container_name, container in self._xsd_type.elements_nested: values = container.default_value if isinstance(container, Indicator): self.__values__.update(values) else: self.__values__[container_name] = values # Set attributes for attribute_name, attribute in self._xsd_type.attributes: self.__values__[attribute_name] = attribute.default_value # Set elements items = _process_signature(self._xsd_type, args, kwargs) for key, value in items.items(): self.__values__[key] = value def __contains__(self, key): return self.__values__.__contains__(key) def __len__(self): return self.__values__.__len__() def __iter__(self): return self.__values__.__iter__() def __repr__(self): return PrettyPrinter().pformat(self.__values__) def __delitem__(self, key): return self.__values__.__delitem__(key) def __getitem__(self, key): return self.__values__[key] def __setitem__(self, key, value): self.__values__[key] = value def __setattr__(self, key, value): if key.startswith('__') or key in ('_xsd_type', '_xsd_elm'): return super(CompoundValue, self).__setattr__(key, value) self.__values__[key] = value def __getattribute__(self, key): if key.startswith('__') or key in ('_xsd_type', '_xsd_elm'): return super(CompoundValue, self).__getattribute__(key) try: return self.__values__[key] except KeyError: raise AttributeError( "%s instance has no attribute '%s'" % ( self.__class__.__name__, key))
def __delitem__(self, key): """Allows deleting data using indexed values for the instance. Args: thekey (str) : String to be used as indexed key. Example: >>> it = StrItero(int) >>> it['one'] = 1 >>> it['two'] = 2 >>> len(it) 2 >>> del it['one'] >>> len(it) 1 """ assert isinstance(key, str) key_ = self.__processKey(key) if self.__processKey else key OrderedDict.__delitem__(self.__stream, key_)
def log_args(self, args_od, args): import json args = OrderedDict(sorted(vars(args).items())) for arg in args_od: if arg in args: args.__delitem__(arg) args_od_str = json.dumps(args_od, indent=2) # for pretty indent args_od_str = args_od_str.replace('\n', ' \n>') self.log_text(step=0, names='args_od', values=args_od_str, curent_time=True) args_str = str(dict(args)) args_str = args_str.replace(' ', ' \n>') self.log_text(step=0, names='args', values=args_str, curent_time=True) pass
class MetafeatureFunctions(object): def __init__(self): self.functions = OrderedDict() self.dependencies = OrderedDict() self.values = OrderedDict() def clear(self): self.values = OrderedDict() def __iter__(self): return self.functions.__iter__() def __getitem__(self, item): return self.functions.__getitem__(item) def __setitem__(self, key, value): return self.functions.__setitem__(key, value) def __delitem__(self, key): return self.functions.__delitem__(key) def __contains__(self, item): return self.functions.__contains__(item) def get_value(self, key): return self.values[key].value def set_value(self, key, item): self.values[key] = item def is_calculated(self, key): """Return if a helper function has already been executed. Necessary as get_value() can return None if the helper function hasn't been executed or if it returned None.""" return key in self.values def get_dependency(self, name): """Return the dependency of metafeature "name". """ return self.dependencies.get(name) def define(self, name, dependency=None): """Decorator for adding metafeature functions to a "dictionary" of metafeatures. This behaves like a function decorating a function, not a class decorating a function""" def wrapper(metafeature_class): instance = metafeature_class() self.__setitem__(name, instance) self.dependencies[name] = dependency return instance return wrapper
def OPTIMO(wlf,n): pages=[] # Read pages from file. f = open(wlf) pages= f.read().splitlines() f.close() cache_list = OrderedDict() page_fault=0 count=0 # Insertion of pages to memory and calculation of page faults. for page in pages: tam_cache=len(cache_list) lista=[] count+=1 if not cache_list.__contains__(page): if tam_cache < n: cache_list[page]='' page_fault += 1 else: lista=[pages[x] for x in range(count+1,len(pages))] list_inters = set(lista).intersection(cache_list) if(len(list_inters)==len(cache_list)): cache_list.popitem() cache_list[page]='' page_fault += 1 else: lista_comp=set(list_inters).symmetric_difference(cache_list) elem=lista_comp.pop() cache_list.__delitem__(elem) cache_list[page]='' page_fault += 1 return page_fault print "Resultados: " print "Miss rate: ", ' '+str(round((float(page_fault*100)/(3721736)),3))+'%' print 'Miss rate (warm cache): ', ' '+str(round((float(page_fault*100)/(3721736-n)),3))+'%' with open("log.csv", "a") as output: output.write('OPTIMO,'+str(page_fault)+','+str(n)+'\n') output.close()
class HelperFunctions: def __init__(self): self.functions = OrderedDict() self.values = OrderedDict() def clear(self): self.values = OrderedDict() self.computation_time = OrderedDict() def __iter__(self): return self.functions.__iter__() def __getitem__(self, item): return self.functions.__getitem__(item) def __setitem__(self, key, value): return self.functions.__setitem__(key, value) def __delitem__(self, key): return self.functions.__delitem__(key) def __contains__(self, item): return self.functions.__contains__(item) def is_calculated(self, key): """Return if a helper function has already been executed. Necessary as get_value() can return None if the helper function hasn't been executed or if it returned None.""" return key in self.values def get_value(self, key): return self.values.get(key).value def set_value(self, key, item): self.values[key] = item def define(self, name): """Decorator for adding helper functions to a "dictionary". This behaves like a function decorating a function, not a class decorating a function""" def wrapper(metafeature_class): instance = metafeature_class() self.__setitem__(name, instance) return instance return wrapper
class OrderedSet: def __init__(self, items=None): self.d = OrderedDict() self.update(items) def update(self, items): if items is not None: for item in items: self.d[item] = 1 def __iter__(self): return self.d.__iter__() def __contains__(self, key): return self.d.__contains__(key) def __delitem__(self, key): return self.d.__delitem__(key) def __len__(self): return self.d.__len__() def add(x): return update(self, [x]) def discard(self, x): if self.__contains__(x): return self.__del__(x) def remove(self, x): if not self.__contains__(x): raise KeyError return self.__del__(x) def _format_op(self, op): if hasattr(op, 'name'): return op.name return str(op) def __repr__(self): if not self: return '%s()' % (self.__class__.__name__, ) return '{%r}' % (','.join([self._format_op(op) for op in self]), )
class OrderedSet: def __init__(self, items=None): self.d = OrderedDict() self.update(items) def update(self, items): if items is not None: for item in items: self.d[item] = 1 def __iter__(self): return self.d.__iter__() def __contains__(self, key): return self.d.__contains__(key) def __delitem__(self, key): return self.d.__delitem__(key) def __len__(self): return self.d.__len__() def add(self, x): return update(self, [x]) def discard(self, x): if self.__contains__(x): return self.__del__(x) def remove(self, x): if not self.__contains__(x): raise KeyError return self.__del__(x) def _format_op(self, op): if hasattr(op, 'name'): return op.name return str(op) def __repr__(self): if not self: return '%s()' % (self.__class__.__name__,) return '{%r}' % (','.join([self._format_op(op) for op in self]),)
class MalHashMap(MalType): def __init__(self, data=None): if not data: self.data = OrderedDict() elif isinstance(data, dict): self.data = OrderedDict(data) elif isinstance(data, MalHashMap): self.data = OrderedDict(data.data) elif isinstance(data, (MalList, MalVector, list)): self.data = OrderedDict() for k, v in zip(data[::2], data[1::2]): self.data[k] = v def __iter__(self): return self.data.__iter__() def __getitem__(self, key): return self.data.__getitem__(key) def __setitem__(self, key, value): return self.data.__setitem__(key, value) def __delitem__(self, key): if key in self.data: return self.data.__delitem__(key) def __len__(self): return len(self.data) def items(self): return self.data.items() def get(self, key): if key in self.data: return self.data[key] return MalNil() def keys(self): return MalList(self.data.keys()) def values(self): return MalList(self.data.values())
class Buckets(object): """Proxy for OrderedDict""" def __init__(self, *args, **kwargs): self._od = OrderedDict(*args, **kwargs) def __getattr__(self, a): return getattr(self._od, a) def __setitem__(self, *args, **kwargs): return self._od.__setitem__(*args, **kwargs) def __getitem__(self, *args, **kwargs): return self._od.__getitem__(*args, **kwargs) def __delitem__(self, *args, **kwargs): return self._od.__delitem__(*args, **kwargs) def __eq__(self, other): if isinstance(other, Buckets): return self._od.__eq__(other._od) else: return self._od.__eq__(other) def copy(self, *args, **kwargs): new = Buckets() new._od = self._od.copy() return new def keys(self, *args, **kwargs): return self._od.keys(*args, **kwargs) def iterkeys(self, *args, **kwargs): return self._od.iterkeys(*args, **kwargs) def __iter__(self, *args, **kwargs): return self._od.__iter__(*args, **kwargs)
class DictStorage(Storage): __slots__ = ['_data'] def __init__(self, **kwargs): super(DictStorage, self).__init__(**kwargs) self._data = OrderedDict() def __getitem__(self, item: Text) -> Any: return self._data.__getitem__(item) def __setitem__(self, key: Text, value: Any): return self._data.__setitem__(key, value) def __delitem__(self, key: Text): return self._data.__delitem__(key) def __contains__(self, item): return self._data.__contains__(item) def load(self): print('Function load is invalid in DictStorage.', file=stderr) def dump(self): print('Function dump is invalid in DictStorage.', file=stderr)
class Headers: '''Utility for managing HTTP headers for both clients and servers. It has a dictionary like interface with few extra functions to facilitate the insertion of multiple header values. Header fields are **case insensitive**, therefore doing:: >>> h = Headers() >>> h['Content-Length'] = '1050' is equivalent to >>> h['content-length'] = '1050' :param headers: optional iterable over header field/value pairs. :param kind: optional headers type, one of ``server``, ``client`` or ``both``. :param strict: if ``True`` only valid headers field will be included. This :class:`Headers` container maintains an ordering as suggested by http://www.w3.org/Protocols/rfc2616/rfc2616.html: .. epigraph:: The order in which header fields with differing field names are received is not significant. However, it is "good practice" to send general-header fields first, followed by request-header or response-header fields, and ending with the entity-header fields. -- rfc2616 section 4.2 The strict parameter is rarely used and it forces the omission on non-standard header fields. ''' @classmethod def make(cls, headers): if not isinstance(headers, cls): headers = cls(headers) return headers def __init__(self, *args, **kwargs): self._headers = OrderedDict() if args or kwargs: self.update(*args, **kwargs) def __repr__(self): return self._headers.__repr__() def __str__(self): return '\r\n'.join(self._ordered()) def __bytes__(self): return str(self).encode(DEFAULT_CHARSET) def __len__(self): return len(self._headers) def update(self, *args, **kwargs): """Extend the headers with an ``iterable``. :param iterable: a dictionary or an iterable over keys, values tuples. """ if len(args) == 1: for key, value in mapping_iterator(args[0]): self.add_header(key, value) elif args: raise TypeError('update expected at most 1 arguments, got %d' % len(args)) for key, value in kwargs.items(): self.add_header(key, value) def override(self, iterable): '''Extend headers by overriding fields form iterable. :param iterable: a dictionary or an iterable over keys, values tuples. ''' seen = set() for key, value in mapping_iterator(iterable): key = key.lower() if key in seen: self.add_header(key, value) else: seen.add(key) self[key] = value def copy(self): return self.__class__(self) def __contains__(self, key): return header_field(key) in self._headers def __getitem__(self, key): key = header_field(key) values = self._headers[key] joiner = HEADER_FIELDS_JOINER.get(key, ', ') if joiner is None: joiner = '; ' return joiner.join(values) def __delitem__(self, key): self._headers.__delitem__(header_field(key)) def __setitem__(self, key, value): key = header_field(key) if key and value: if not isinstance(value, list): value = header_values(key, value) self._headers[key] = value def get(self, key, default=None): '''Get the field value at ``key`` as comma separated values. For example:: >>> from pulsar.utils.httpurl import Headers >>> h = Headers(kind='client') >>> h.add_header('accept-encoding', 'gzip') >>> h.add_header('accept-encoding', 'deflate') >>> h.get('accept-encoding') results in:: 'gzip, deflate' ''' if key in self: return self.__getitem__(key) else: return default def get_all(self, key, default=None): '''Get the values at header ``key`` as a list rather than a string separated by comma (which is returned by the :meth:`get` method). For example:: >>> from pulsar.utils.httpurl import Headers >>> h = Headers(kind='client') >>> h.add_header('accept-encoding', 'gzip') >>> h.add_header('accept-encoding', 'deflate') >>> h.get_all('accept-encoding') results in:: ['gzip', 'deflate'] ''' return self._headers.get(header_field(key), default) def has(self, field, value): '''Check if ``value`` is available in header ``field``.''' value = value.lower() for c in self.get_all(field, ()): if c.lower() == value: return True return False def pop(self, key, *args): return self._headers.pop(header_field(key), *args) def clear(self): '''Same as :meth:`dict.clear`, it removes all headers. ''' self._headers.clear() def getheaders(self, key): # pragma nocover '''Required by cookielib in python 2. If the key is not available, it returns an empty list. ''' return self._headers.get(header_field(key), []) def add_header(self, key, values): '''Add ``values`` to ``key`` header. If the header is already available, append the value to the list. :param key: header name :param values: a string value or a list/tuple of strings values for header ``key`` ''' key = header_field(key) if key and values: if not isinstance(values, (tuple, list)): values = header_values(key, values) current = self._headers.get(key, []) for value in values: if value and value not in current: current.append(value) self._headers[key] = current def remove_header(self, key, value=None): '''Remove the header at ``key``. If ``value`` is provided, it removes only that value if found. ''' key = header_field(key) if key: if value: value = value.lower() values = self._headers.get(key, []) removed = None for v in values: if v.lower() == value: removed = v values.remove(v) self._headers[key] = values return removed else: return self._headers.pop(key, None) def flat(self, version, status): '''Full headers bytes representation''' vs = version + (status, self) return ('HTTP/%s.%s %s\r\n%s' % vs).encode(DEFAULT_CHARSET) def __iter__(self): dj = ', ' for k, values in self._headers.items(): joiner = HEADER_FIELDS_JOINER.get(k, dj) if joiner: yield k, joiner.join(values) else: for value in values: yield k, value def _ordered(self): for key, header in self: yield "%s: %s" % (key, header) yield '' yield ''
class DataEncoderRegistry(collections.abc.MutableMapping): def __init__(self): self._encoders = OrderedDict() def __getitem__(self, key): return self._encoders.__getitem__(key) def __setitem__(self, key, value): if isinstance(value, six.string_types) or not (getattr(value, "encode", None) and callable(value.encode)): raise ScrapbookException( "Can't register object without 'encode' method.") if isinstance(value, six.string_types) or not (getattr(value, "decode", None) and callable(value.decode)): raise ScrapbookException( "Can't register object without 'decode' method.") return self._encoders.__setitem__(key, value) def __delitem__(self, key): return self._encoders.__delitem__(key) def __iter__(self): return self._encoders.__iter__() def __len__(self): return self._encoders.__len__() def register(self, encoder): """ Registers a new name to a particular encoder Parameters ---------- name: str Name of the mime subtype parsed by the encoder. encoder: obj The object which implements the required encoding functions. """ try: self[encoder.name()] = encoder except AttributeError: raise ScrapbookInvalidEncoder( "Encoder has no `name` method available") def deregister(self, encoder): """ Removes a particular encoder from the registry Parameters ---------- name: str Name of the mime subtype parsed by the encoder. """ try: del self[encoder.name()] except AttributeError: del self[encoder] def reset(self): """ Resets the registry to have no encoders. """ self._encoders = {} def determine_encoder_name(self, data): """ Determines the """ for name, encoder in self._encoders.items(): if encoder.encodable(data): return name raise NotImplementedError( "Scrap of type {stype} has no supported encoder registered".format( stype=type(data))) def decode(self, scrap, **kwargs): """ Finds the register for the given encoder and translates the scrap's data from a string or JSON type to an object of the encoder output type. Parameters ---------- scrap: Scrap A partially filled in scrap with data that needs decoding """ # Run validation on encoded data scrap_to_payload(scrap) loader = self._encoders.get(scrap.encoder) if not loader: raise ScrapbookMissingEncoder( 'No encoder found for "{}" encoder type!'.format( scrap.encoder)) return loader.decode(scrap, **kwargs) def encode(self, scrap, **kwargs): """ Finds the register for the given encoder and translates the scrap's data from an object of the encoder type to a JSON typed object. Parameters ---------- scrap: Scrap A partially filled in scrap with data that needs encoding """ encoder = self._encoders.get(scrap.encoder) if not encoder: raise ScrapbookMissingEncoder( 'No encoder found for "{data_type}" data type!'.format( data_type=encoder)) output_scrap = encoder.encode(scrap, **kwargs) # Run validation on encoded data scrap_to_payload(output_scrap) return output_scrap
class Progbar(object): """ Comprehensive review of any progress, this object is fully pickle-able, and can be used for storing history, summaries and report of the progress as well. Parameters ---------- target: int total number of steps expected interval: float Minimum progress display update interval, in seconds. keep: bool whether to keep the progress bar when the epoch finished print_report: bool print updated report along with the progress bar for each update print_summary: bool print epoch summary after each epoch count_func: call-able a function takes the returned batch and return an integer for upating progress. report_func: call-able a function takes the returned batch and a collection of pair (key, value) for constructing the report. progress_func : call-able for post-processing the return value during processing into a number representing addition in the progress name: str or None specific name for the progress bar Examples -------- >>> import numpy as np >>> from odin.utils import Progbar >>> x = list(range(10)) >>> for i in Progbar(target=x): ... pass Note ---- Some special case: * any report key contain "confusionmatrix" will be printed out using `print_confusion` * any report key """ FP = sys.stderr def __init__(self, target, interval=0.08, keep=False, print_progress=True, print_report=True, print_summary=False, count_func=None, report_func=None, progress_func=None, name=None): self.__pb = None # tqdm object if isinstance(target, Number): self.target = int(target) self.__iter_obj = None elif hasattr(target, '__len__'): self.target = len(target) self.__iter_obj = target else: raise ValueError("Unsupport for `target` type: %s" % str(target.__class__)) self._seen_so_far = defaultdict( int) # mapping: epoch_idx -> seen_so_far n = len(str(self.target)) self._counter_fmt = '(%%%dd/%%%dd)' % (n, n) if name is None: name = "Progress-%s" % datetime.utcnow() self._name = name # ====== flags ====== # self.__interval = float(interval) self.__keep = keep self.print_progress = bool(print_progress) self.print_report = bool(print_report) self.print_summary = bool(print_summary) # ====== for history ====== # self._report = OrderedDict() self._last_report = None self._last_print_time = None self._epoch_summarizer_func = {} # ====== recording history ====== # # dictonary: {epoch_id: {key: [value1, value2, ...]}} self._epoch_hist = defaultdict(_default_dict_list_creator) self._epoch_summary = defaultdict(dict) self._epoch_idx = 0 self._epoch_start_time = None # ====== iter information ====== # if self.__iter_obj is None and \ (count_func is not None or report_func is not None): raise RuntimeError( "`count_func` and `report_func` can only be used " "when `target` is an iterator with specific length.") # self.__count_func = _FuncWrap(func=count_func, default_func=lambda x: len(x)) self.__report_func = _FuncWrap(func=report_func, default_func=lambda x: None) # ====== check progress function ====== # self._progress_func = _FuncWrap(func=progress_func, default_func=lambda x: x) # ====== other ====== # self._labels = None # labels for printing the confusion matrix # ==================== History management ==================== # def __getitem__(self, key): return self._report.__getitem__(key) def __setitem__(self, key, val): self._epoch_hist[self.epoch_idx][key].append(val) return self._report.__setitem__(key, val) def __delitem__(self, key): return self._report.__delitem__(key) def __iter__(self): if self.__iter_obj is None: raise RuntimeError("This Progbar cannot be iterated, " "the set `target` must be iterable.") for X in self.__iter_obj: count = self.__count_func(X) report = self.__report_func(X) if report is not None: for key, val in report: self[key] = val self.add(int(count)) yield X del self.__iter_obj del self.__count_func del self.__report_func # ==================== screen control ==================== # @property def epoch_idx(self): return self._epoch_idx @property def nb_epoch(self): return self._epoch_idx + 1 @property def name(self): return self._name @property def labels(self): """ Special labels for printing the confusion matrix. """ return self._labels @property def history(self): """ Return dictonary: {epoch_id : {tensor_name0: [batch_return1, batch_return2, ...], tensor_name1: [batch_return1, batch_return2, ...], ...}, 1 : {tensor_name0: [batch_return1, batch_return2, ...], tensor_name1: [batch_return1, batch_return2, ...], ...}, ... } Example ------- >>> for epoch_id, results in task.history.items(): >>> for tensor_name, values in results.items(): >>> print(tensor_name, len(values)) """ return self._epoch_hist def get_report(self, epoch=-1, key=None): if epoch < 0: epoch = self.nb_epoch + epoch - 1 return self._epoch_hist[epoch] if key is None else \ self._epoch_hist[epoch][key] def set_summarizer(self, key, fn): """ Epoch summarizer is a function, searching in the report for given key, and summarize all the stored values of each epoch into a readable format i.e. the input arguments is a list of stored epoch report, the output is a string. """ if not hasattr(fn, '__call__'): raise ValueError('`fn` must be call-able.') key = str(key) self._epoch_summarizer_func[key] = _FuncWrap(func=fn, default_func=None) return self def set_name(self, name): self._name = str(name) return self def set_labels(self, labels): if labels is not None: self._labels = tuple([str(l) for l in labels]) return self def _formatted_report(self, report_dict, margin='', inc_name=True): """ Convert a dictionary of key -> value to well formatted string.""" if inc_name: text = _MAGENTA + "\t%s" % self.name + _RESET + '\n' else: text = "" report_dict = sorted(report_dict.items(), key=lambda x: str(x[0])) for i, (key, value) in enumerate(report_dict): # ====== check value of key and value ====== # key = margin + str(key).replace('\n', ' ') # ====== special cases ====== # if "confusionmatrix" in key.lower() or \ "confusion_matrix" in key.lower() or \ "confusion-matrix" in key.lower() or \ "confusion matrix" in key.lower(): value = print_confusion(value, labels=self.labels, inc_stats=True) # just print out string representation else: value = str(value) # ====== multiple lines or not ====== # if '\n' in value: text += _YELLOW + key + _RESET + ":\n" for line in value.split('\n'): text += margin + ' ' + line + '\n' else: text += _YELLOW + key + _RESET + ": " + value + "\n" return text[:-1] @property def progress_bar(self): if self.__pb is None: it = range(self.target) self.__pb = _tqdm(iterable=it, desc="Epoch%s" % str(self.epoch_idx), leave=self.__keep, total=self.target, file=Progbar.FP, unit='obj', mininterval=self.__interval, maxinterval=10, miniters=0, position=0) self.__pb.clear() self._epoch_start_time = time.time() return self.__pb @property def seen_so_far(self): return self._seen_so_far[self.epoch_idx] def _generate_epoch_summary(self, epoch, inc_name=False, inc_counter=True): seen_so_far = self._seen_so_far[epoch] if seen_so_far == 0: return '' # ====== include name ====== # if inc_name: s = _MAGENTA + "%s" % self.name + _RESET else: s = "" # ====== create epoch summary ====== # if seen_so_far == self.target: # epoch already finished speed = (1. / self._epoch_summary[epoch]['__avg_time__']) elapsed = self._epoch_summary[epoch]['__total_time__'] else: # epoch hasn't finished avg_time = (time.time() - self._epoch_start_time) / self.seen_so_far \ if self.progress_bar.avg_time is None else \ self.progress_bar.avg_time speed = 1. / avg_time elapsed = time.time() - self._epoch_start_time # ====== counter ====== # if inc_counter: frac = seen_so_far / self.target counter_epoch = self._counter_fmt % (seen_so_far, self.target) percentage = "%6.2f%%%s " % (frac * 100, counter_epoch) else: percentage = '' s += _RED + " Epoch %d " % epoch + _RESET + "%.4f(s) %s%.4f(obj/s)" % \ (elapsed, percentage, speed) # epoch summary summary = dict(self._epoch_summary[epoch]) if len(summary) > 2: summary.pop('__total_time__', None) summary.pop('__avg_time__', None) s += '\n' + self._formatted_report( summary, margin=' ', inc_name=False) return s @property def summary(self): s = _MAGENTA + "Report \"%s\" TotalEpoch: %d\n" % \ (self.name, self.nb_epoch) + _RESET # ====== create summary for each epoch ====== # s += '\n'.join( [self._generate_epoch_summary(i) for i in range(self.nb_epoch)]) return s[:-1] # ==================== same actions ==================== # def add_notification(self, msg): msg = _CYAN + "[%s][%s]Notification:" % \ (datetime.now().strftime('%d/%b-%H:%M:%S'), _MAGENTA + self.name + _CYAN) + _RESET + msg _tqdm.write(msg) return self def _new_epoch(self): if self.__pb is None: return # calculate number of offset lines from last report if self._last_report is None: nlines = 0 else: nlines = len(self._last_report.split('\n')) # ====== reset progress bar (tqdm) ====== # if self.__keep: # keep the last progress on screen self.__pb.moveto(nlines) else: # clear everything for i in range(nlines): Progbar.FP.write('\r') console_width = _environ_cols_wrapper()(Progbar.FP) Progbar.FP.write( ' ' * (79 if console_width is None else console_width)) Progbar.FP.write( '\r') # place cursor back at the beginning of line self.__pb.moveto(1) self.__pb.moveto(-(nlines * 2)) self.__pb.close() # ====== create epoch summary ====== # for key, values in self._epoch_hist[self._epoch_idx].items(): values = [v for v in values] # provided summarizer function if key in self._epoch_summarizer_func: self._epoch_summary[self._epoch_idx][ key] = self._epoch_summarizer_func[key](values) # very heuristic way to deal with sequence of numbers elif isinstance(values[0], Number): self._epoch_summary[self._epoch_idx][key] = np.mean(values) # numpy array elif isinstance(values[0], np.ndarray): self._epoch_summary[self._epoch_idx][key] = sum( v for v in values) # total epoch time total_time = time.time() - self._epoch_start_time self._epoch_summary[self._epoch_idx]['__total_time__'] = total_time # average time for 1 object avg_time = self.__pb.avg_time if avg_time is None: avg_time = total_time / self.target self._epoch_summary[self._epoch_idx]['__avg_time__'] = avg_time # reset all flags self.__pb = None self._last_report = None self._last_print_time = None self._epoch_start_time = None self._epoch_idx += 1 return self @contextmanager def safe_progress(self): """ This context manager will automatically call `pause` if the progress unfinished, hence, it doesn't mesh up the screen. """ yield None if 0 < self.seen_so_far < self.target: self.pause() def pause(self): """ Call `pause` if progress is running, hasn't finish, and you want to print something else on the scree. """ # ====== clear the report ====== # if self._last_report is not None: nlines = len(self._last_report.split("\n")) self.__pb.moveto(-nlines) for i in range(nlines): Progbar.FP.write('\r') console_width = _environ_cols_wrapper()(Progbar.FP) Progbar.FP.write( ' ' * (79 if console_width is None else console_width)) Progbar.FP.write( '\r') # place cursor back at the beginning of line self.__pb.moveto(1) else: nlines = 0 # ====== clear the bar ====== # if self.__pb is not None: self.__pb.clear() self.__pb.moveto(-nlines) # ====== reset the last report ====== # # because we already clean everything, # set _last_report=None prevent # further moveto(-nlines) in add() self._last_report = None return self def add(self, n=1): """ You need to call pause if """ n = self._progress_func(n) if not isinstance(n, Number): raise RuntimeError( "Progress return an object, but not given `progress_func` for post-processing" ) if n <= 0: return self fp = Progbar.FP # ====== update information ====== # seen_so_far = min(self._seen_so_far[self.epoch_idx] + n, self.target) self._seen_so_far[self.epoch_idx] = seen_so_far # ====== check last updated progress, for automatically pause ====== # if _LAST_UPDATED_PROG[0] is None: _LAST_UPDATED_PROG[0] = self elif _LAST_UPDATED_PROG[0] != self: _LAST_UPDATED_PROG[0].pause() _LAST_UPDATED_PROG[0] = self # ====== show report ====== # if self.print_report: curr_time = time.time() # update the report if self._last_print_time is None or \ time.time() - self._last_print_time > self.__interval or\ seen_so_far >= self.target: self._last_print_time = curr_time # move the cursor to last point if self._last_report is not None: nlines = len(self._last_report.split('\n')) self.progress_bar.moveto(-nlines) report = self._formatted_report(self._report) # clear old report if self._last_report is not None: for i, l in enumerate(self._last_report.split('\n')): fp.write('\r') fp.write(' ' * len(l)) fp.write( '\r') # place cursor back at the beginning of line self.progress_bar.moveto(1) self.progress_bar.clear() self.progress_bar.moveto(-i - 1) fp.write(report) fp.flush() self._last_report = report self.progress_bar.moveto(1) # ====== show progress ====== # if self.print_progress: self.progress_bar.update(n=n) else: self.progress_bar # ====== end of epoch ====== # if seen_so_far >= self.target: self._new_epoch() # print summary of epoch if self.print_summary: _tqdm.write( self._generate_epoch_summary(self.epoch_idx - 1, inc_name=True, inc_counter=False)) return self
class DotMap(MutableMapping, OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = kwargs.pop('_dynamic', True) self._prevent_method_masking = kwargs.pop('_prevent_method_masking', False) trackedIDs = kwargs.pop('_trackedIDs', {}) if args: d = args[0] # for recursive assignment handling trackedIDs[id(d)] = self src = [] if isinstance(d, MutableMapping): src = self.__call_items(d) elif isinstance(d, Iterable): src = d for k, v in src: if self._prevent_method_masking and k in reserved_keys: raise KeyError('"{}" is reserved'.format(k)) if isinstance(v, dict): idv = id(v) if idv in trackedIDs: v = trackedIDs[idv] else: trackedIDs[idv] = v v = self.__class__(v, _dynamic=self._dynamic, _prevent_method_masking=self. _prevent_method_masking, _trackedIDs=trackedIDs) if type(v) is list: l = [] for i in v: n = i if isinstance(i, dict): idi = id(i) if idi in trackedIDs: n = trackedIDs[idi] else: trackedIDs[idi] = i n = self.__class__( i, _dynamic=self._dynamic, _prevent_method_masking=self. _prevent_method_masking) l.append(n) v = l self._map[k] = v if kwargs: for k, v in self.__call_items(kwargs): if self._prevent_method_masking and k in reserved_keys: raise KeyError('"{}" is reserved'.format(k)) self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = self.__class__() return self._map[k] def __setattr__(self, k, v): if k in { '_map', '_dynamic', '_ipython_canary_method_should_not_exist_', '_prevent_method_masking' }: super(DotMap, self).__setattr__(k, v) elif self._prevent_method_masking and k in reserved_keys: raise KeyError('"{}" is reserved'.format(k)) else: self[k] = v def __getattr__(self, k): if k.startswith('__') and k.endswith('__'): raise AttributeError(k) if k in { '_map', '_dynamic', '_ipython_canary_method_should_not_exist_' }: return super(DotMap, self).__getattr__(k) try: v = super(self.__class__, self).__getattribute__(k) return v except AttributeError: pass return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __add__(self, other): if self.empty(): return other else: self_type = type(self).__name__ other_type = type(other).__name__ msg = "unsupported operand type(s) for +: '{}' and '{}'" raise TypeError(msg.format(self_type, other_type)) def __str__(self, seen=None): items = [] seen = {id(self)} if seen is None else seen for k, v in self.__call_items(self._map): # circular assignment case if isinstance(v, self.__class__): if id(v) in seen: items.append('{0}={1}(...)'.format( k, self.__class__.__name__)) else: seen.add(id(v)) items.append('{0}={1}'.format(k, v.__str__(seen))) else: items.append('{0}={1}'.format(k, repr(v))) joined = ', '.join(items) out = '{0}({1})'.format(self.__class__.__name__, joined) return out def __repr__(self): return str(self) def toDict(self, seen=None): if seen is None: seen = {} d = {} seen[id(self)] = d for k, v in self.items(): if issubclass(type(v), DotMap): idv = id(v) if idv in seen: v = seen[idv] else: v = v.toDict(seen=seen) elif type(v) in (list, tuple): l = [] for i in v: n = i if issubclass(type(i), DotMap): idv = id(n) if idv in seen: n = seen[idv] else: n = i.toDict(seen=seen) l.append(n) if type(v) is tuple: v = tuple(l) else: v = l d[k] = v return d def pprint(self, pformat='dict'): if pformat == 'json': print(dumps(self.toDict(), indent=4, sort_keys=True)) else: pprint(self.toDict()) def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if issubclass(type(other), DotMap): return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return self.__class__(self) def __copy__(self): return self.copy() def __deepcopy__(self, memo=None): return self.copy() def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): return self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = cls() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d) # bannerStr def _getListStr(self, items): out = '[' mid = '' for i in items: mid += ' {}\n'.format(i) if mid != '': mid = '\n' + mid out += mid out += ']' return out def _getValueStr(self, k, v): outV = v multiLine = len(str(v).split('\n')) > 1 if multiLine: # push to next line outV = '\n' + v if type(v) is list: outV = self._getListStr(v) out = '{} {}'.format(k, outV) return out def _getSubMapDotList(self, pre, name, subMap): outList = [] if pre == '': pre = name else: pre = '{}.{}'.format(pre, name) def stamp(pre, k, v): valStr = self._getValueStr(k, v) return '{}.{}'.format(pre, valStr) for k, v in subMap.items(): if isinstance(v, DotMap) and v != DotMap(): subList = self._getSubMapDotList(pre, k, v) outList.extend(subList) else: outList.append(stamp(pre, k, v)) return outList def _getSubMapStr(self, name, subMap): outList = ['== {} =='.format(name)] for k, v in subMap.items(): if isinstance(v, self.__class__) and v != self.__class__(): # break down to dots subList = self._getSubMapDotList('', k, v) # add the divit # subList = ['> {}'.format(i) for i in subList] outList.extend(subList) else: out = self._getValueStr(k, v) # out = '> {}'.format(out) out = '{}'.format(out) outList.append(out) finalOut = '\n'.join(outList) return finalOut def bannerStr(self): lines = [] previous = None for k, v in self.items(): if previous == self.__class__.__name__: lines.append('-') out = '' if isinstance(v, self.__class__): name = k subMap = v out = self._getSubMapStr(name, subMap) lines.append(out) previous = self.__class__.__name__ else: out = self._getValueStr(k, v) lines.append(out) previous = 'other' lines.append('--') s = '\n'.join(lines) return s
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] if args: d = args[0] if isinstance(d, dict): for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v, _dynamic=self._dynamic) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = DotMap(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): if k is not '_dynamic': self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in {'_map','_dynamic', '_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == {'_map','_dynamic','_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) is list: l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) v = l d[k] = v return d def pprint(self): pprint(self.toDict()) def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self.toDict()) def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d)
def __delitem__ (self, name): self._delitem(name) OrderedDict.__delitem__(self, name)
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() if args: d = args[0] if type(d) is dict: for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v) self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map: # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k == '_map': super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == '_map': super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: v = v.toDict() d[k] = v return d def pprint(self): pprint(self.toDict()) # proper dict subclassing def values(self): return self._map.values() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return self def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d
def __delitem__(self, key, *args, **kwds): l = _lower(key) OrderedDict.__delitem__(self, l, *args, **kwds) del self.__map[l]
def __delattr__(self, name): try: OrderedDict.__delitem__(self, name) except KeyError: raise AttributeError('%r has not attr %r' % (self, name))
class BestPracticeWarning(collections.MutableMapping, base.ValidationError): """Represents a best practice warning. These are built within best practice rule checking methods and attached to :class:`BestPracticeWarningCollection` instances. Note: This class acts like a dictionary and contains the following keys at a minimum: * ``'id'``: The id of a node associated with the warning. * ``'idref'``: The idref of a node associated with the warning. * ``'line'``: The line number of the offending node. * ``'message'``: A message associated with the warning. * ``'tag'``: The lxml tag for the offending node. These keys can be retrieved via the :attr:`core_keys` property. Instances of this class may attach additional keys. These `other keys` can be obtained via the :attr:`other_keys` property. Args: node: The ``lxml._Element`` node associated with this warning. message: A message for this warning. """ def __init__(self, node, message=None): base.ValidationError.__init__(self) self._inner = OrderedDict() self._node = node self['line'] = node.sourceline self['message'] = message self['id'] = node.attrib.get('id') self['idref'] = node.attrib.get('idref') self['tag'] = node.tag def __unicode__(self): return unicode(self.message) def __str__(self): return unicode(self).encode("utf-8") def __getitem__(self, key): return self._inner.__getitem__(key) def __delitem__(self, key): self._inner.__delitem__(key) def __setitem__(self, key, value): self._inner.__setitem__(key, value) def __len__(self): return self._inner.__len__() def __iter__(self): return self._inner.__iter__() @property def line(self): """Returns the line number of the warning node in the input document. """ return self['line'] @property def message(self): """Returns a message associated with the warning. This may return ``None`` if there is no warning message. """ return self['message'] @property def core_keys(self): """Returns a ``tuple`` of the keys that can always be found on instance of this class. Returns: A tuple including the following keys. * ``'id'``: The id of the warning node. The associated value may be ``None``. * ``'idref'``: The idref of the warning node. The associated value may be ``None``. * ``'line'``: The line number of the warning node in the input document. The associated value may be ``None``. * ``'tag'``: The ``{namespace}localname`` value of the warning node. * ``'message'``: An optional message that can be attached to the warning. The associated value may be ``None``. """ return ('id', 'idref', 'line', 'tag', 'message') @property def other_keys(self): """Returns a ``tuple`` of keys attached to instances of this class that are not found in the :attr:`core_keys`. """ return tuple(x for x in self.iterkeys() if x not in self.core_keys) def as_dict(self): """Returns a dictionary representation of this class instance. This is implemented for consistency across other validation error types. The :class:`.BestPracticeWarning` class extends :class:`collections.MutableMapping`, so this method isn't really necessary. """ return dict(self.iteritems())
class BaseCache(object): """ BaseCache is a class that saves and operates on an OrderedDict. It has a certain capacity, stored in the attribute `maxsize`. Whether this capacity is reached, can be checked by using the boolean property `is_full`. To implement a custom cache, inherit from this class and override the methods ``__getitem__`` and ``__setitem__``. Call the method `sunpy.database.caching.BaseCache.callback` as soon as an item from the cache is removed. """ def __init__(self, maxsize=float('inf')): self.maxsize = maxsize self._dict = OrderedDict() def get(self, key, default=None): # pragma: no cover """Return the corresponding value to `key` if `key` is in the cache, `default` otherwise. This method has no side-effects, multiple calls with the same cache and the same passed key must always return the same value. """ try: return self._dict[key] except KeyError: return default @abstractmethod def __getitem__(self, key): """abstract method: this method must be overwritten by inheriting subclasses. It defines what happens if an item from the cache is attempted to be accessed. """ return # pragma: no cover @abstractmethod def __setitem__(self, key, value): """abstract method: this method must be overwritten by inheriting subclasses. It defines what happens if a new value should be assigned to the given key. If the given key does already exist in the cache or not must be checked by the person who implements this method. """ @abstractproperty def to_be_removed(self): """The item that will be removed on the next :meth:`sunpy.database.caching.BaseCache.remove` call. """ @abstractmethod def remove(self): """Call this method to manually remove one item from the cache. Which item is removed, depends on the implementation of the cache. After the item has been removed, the callback method is called. """ def callback(self, key, value): """This method should be called (by convention) if an item is removed from the cache because it is full. The passed key and value are the ones that are removed. By default this method does nothing, but it can be customized in a custom cache that inherits from this base class. """ @property def is_full(self): """True if the number of items in the cache equals :attr:`maxsize`, False otherwise. """ return len(self._dict) == self.maxsize def __delitem__(self, key): self._dict.__delitem__(key) def __contains__(self, key): return key in self._dict.keys() def __len__(self): return len(self._dict) def __iter__(self): for key in self._dict.__iter__(): yield key def __reversed__(self): # pragma: no cover for key in self._dict.__reversed__(): yield key def clear(self): # pragma: no cover return self._dict.clear() def keys(self): # pragma: no cover return list(self._dict.keys()) def values(self): # pragma: no cover return list(self._dict.values()) def items(self): # pragma: no cover return list(self._dict.items()) def iterkeys(self): # pragma: no cover return iter(self._dict.keys()) def itervalues(self): # pragma: no cover for value in self._dict.values(): yield value def iteritems(self): # pragma: no cover for key, value in six.iteritems(self._dict): yield key, value def update(self, *args, **kwds): # pragma: no cover self._dict.update(*args, **kwds) def pop(self, key, default=MutableMapping._MutableMapping__marker): # pragma: no cover return self._dict.pop(key, default) def setdefault(self, key, default=None): # pragma: no cover return self._dict.setdefault(key, default) def popitem(self, last=True): # pragma: no cover return self._dict.popitem(last) def __reduce__(self): # pragma: no cover return self._dict.__reduce__() def copy(self): # pragma: no cover return self._dict.copy() def __eq__(self, other): # pragma: no cover return self._dict.__eq__(other) def __ne__(self, other): # pragma: no cover return self._dict.__ne__(other) def viewkeys(self): # pragma: no cover return self._dict.keys() def viewvalues(self): # pragma: no cover return self._dict.values() def viewitems(self): # pragma: no cover return self._dict.items() @classmethod def fromkeys(cls, iterable, value=None): # pragma: no cover return OrderedDict.fromkeys(iterable, value) def __repr__(self): # pragma: no cover return '{0}({1!r})'.format(self.__class__.__name__, dict(self._dict))
def __delitem__(self, key, **kwargs): OrderedDict.__delitem__(self, self.id(key)) OrderedDict.__delitem__(self, self.rev_id(key))
def __delitem__(self, key): kl = key.lower() if kl not in self.keyMap: raise KeyError(key) OrderedDict.__delitem__(self, self.keyMap[kl]) del self.keyMap[kl]
def __delitem__(self, name): norm_name = HTTPHeaders._normalize_name(name) OrderedDict.__delitem__(self, norm_name) del self._as_list[norm_name]
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True # mettendo False non funzionano più i test di default. E' normale in quanto si aspettano la creazione dinamica dei figli # =================================== if LORETO: global MY_DICT_TYPES # global var per la classe self._dynamic = False # mettendo False non funzionano più i test di default. E' normale in quanto si aspettano la creazione dinamica dei figli MY_DICT_TYPES = [dict, DotMap] # by Loreto (DEFAULT dictionary) # =================================== if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] if args: d = args[0] if isinstance(d, dict): for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v, _dynamic=self._dynamic) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = DotMap(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): if k is not '_dynamic': self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in {'_map','_dynamic', '_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == {'_map','_dynamic','_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) is list: l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) v = l d[k] = v return d def pprint(self): pprint(self.toDict()) # =================================== if LORETO: # MY_DICT_TYPES = [dict, DotMap] def Ptr(self, listOfQualifiers, create=False): ptr = self for item in listOfQualifiers: if item in ptr: ptr = ptr[item] else: if create: ptr[item] = DotMap() ptr = ptr[item] else: return None return ptr def KeyTree(self, fPRINT=False): return DictToList.KeyTree(self, myDictTYPES=MY_DICT_TYPES, fPRINT=fPRINT) def KeyList(self): return DictToList.KeyList(self, myDictTYPES=MY_DICT_TYPES) def PrintTree(self, fEXIT=False, MaxLevel=10, header=None, printTYPE='LTKV', stackLevel=1): PrintDictionaryTree.PrintDictionary(self, myDictTYPES=MY_DICT_TYPES, printTYPE=printTYPE, fEXIT=fEXIT, MaxLevel=MaxLevel, header=header, stackLevel=stackLevel+1) printDict = PrintTree printTree = PrintTree def GetValue(self, listOfQualifiers=[], fPRINT=False): return DictToList.getValue(self, listOfQualifiers=listOfQualifiers, myDictTYPES=MY_DICT_TYPES, fPRINT=fPRINT) # =================================== def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self.toDict()) def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d)
def __delitem__(self, key): """Remove items without raising exceptions.""" if key in self: OrderedDict.__delitem__(self, key) if self.token: self.token.reserialize()
class Response(object): msg = 0x0 type = 'Base response' s = OrderedDict() format = {'addr': '{:04x}', 'ieee': '{:016x}', 'group': '{:04x}'} def __init__(self, msg_data, lqi): self.msg_data = msg_data self.lqi = lqi self.data = OrderedDict() self.decode() def __str__(self): d = ['{}:{}'.format(k, v) for k, v in self.data.items()] return 'RESPONSE 0x{:04X} - {} : {}'.format(self.msg, self.type, ', '.join(d)) def __setitem__(self, key, value): self.data[key] = value def __getitem__(self, key): return self.data[key] def __delitem__(self, key): return self.data.__delitem__(key) def get(self, key, default=None): return self.data.get(key, default) def __contains__(self, key): return self.data.__contains__(key) def __len__(self): return len(self.data) def __iter__(self): return self.data.__iter__() def items(self): return self.data.items() def keys(self): return self.data.keys() def __getattr__(self, attr): return self.data[attr] def decode(self): fmt = '!' msg_data = self.msg_data keys = list(self.s.keys()) for k, v in self.s.items(): if isinstance(v, OrderedDict): keys.remove(k) self.data[k] = [] rest = len(msg_data) - struct.calcsize(fmt) if rest == 0: continue subfmt = '!' + ''.join(v.values()) count = rest // struct.calcsize(subfmt) submsg_data = msg_data[-rest:] msg_data = msg_data[:-rest] for i in range(count): sdata, submsg_data = self._decode(subfmt, v.keys(), submsg_data) self.data[k].append(sdata) elif v == 'rawend': fmt += '{}s'.format(len(msg_data) - struct.calcsize(fmt)) else: fmt += v sdata, msg_data = self._decode(fmt, keys, msg_data) self.data.update(sdata) if msg_data: self.data['additional'] = msg_data # reformat output, TODO: do it live self._format(self.data) self.data['lqi'] = self.lqi def _decode(self, fmt, keys, data): size = struct.calcsize(fmt) sdata = OrderedDict(zip(keys, struct.unpack(fmt, data[:size]))) data = data[size:] return sdata, data def _format(self, data, keys=[]): keys = keys or data.keys() for k in keys: if k in self.format: data[k] = self.format[k].format(data[k]) elif isinstance(data[k], list): if data[k] and isinstance(data[k][0], dict): for subdata in data[k]: self._format(subdata) def _filter_data(self, include=[], exclude=[]): if include: return {k: v for k, v in self.data.items() if k in include} elif exclude: return {k: v for k, v in self.data.items() if k not in exclude} def cleaned_data(self): ''' return cleaned data need to be override in subclass ''' return self.data
return i # If we reached the end of the length of the arguments, return i. if (i == len_of_remaining_page): return i # # Authored by Juan Mite # LRU algorithm # if policy.upper() == 'LRU': print 'Evaluando una caché LRU con '+ str(cache_tam) +' entradas, tiempo estimado 25 segundos...' with open(workload_file) as f: for line in f: if (cache.__contains__(line)): # HIT cache.__delitem__(line) cache[line] = '' hites = hites + 1 else: # MISS misses = misses + 1 cache[line] = '' if len(cache) > cache_tam : cache.popitem(last=False) references = hites + misses print "Resultados: " print "Miss rate: ", ' '+str(round((float(misses)/(references)),3))+'% ('+str(misses)+' misses out of '+str(references)+' references)' print 'Miss rate (warm cache): ', ' '+str(round((float(misses)/(references-cache_tam)),3))+'% ('+str(hites)+' misses out of '+ str(references-cache_tam)+' references)' print 'Efficiency: '+str(round(float(hites)/(references),3)) print 'Hits: '+str(hites) print 'Misses: '+str(misses)
class Collection(object): """ Represents a dataset consisting of a collection of spectra """ def __init__(self, name, directory=None, spectra=None, measure_type='pct_reflect', metadata=None, flags=None): self.name = name self.spectra = spectra self.measure_type = measure_type self.metadata = metadata self.flags = flags if directory: self.read(directory, measure_type) @property def spectra(self): """ A list of Spectrum objects in the collection """ return list(self._spectra.values()) @property def spectra_dict(self): return self._spectra @spectra.setter def spectra(self, value): self._spectra = OrderedDict() if value is not None: # assume value is an iterable such as list for spectrum in value: assert spectrum.name not in self._spectra self._spectra[spectrum.name] = spectrum @property def flags(self): """ A dict of flags for each spectrum in the collection """ return self._flags @flags.setter def flags(self, value): ''' TODO: test this ''' self._flags = defaultdict(lambda: False) if value is not None: for v in value: if v in self._spectra: self._flags[v] = True def flag(self, spectrum_name): self.flags[spectrum_name] = True def unflag(self, spectrum_name): del self.flags[spectrum_name] @property def data(self): ''' Get measurements as a Pandas.DataFrame ''' try: return pd.concat(objs=[s.measurement for s in self.spectra], axis=1, keys=[s.name for s in self.spectra]) except ValueError as err: # typically from duplicate index due to overlapping wavelengths if not all([s.stitched for s in self.spectra]): warnings.warn('ValueError: Try after stitching the overlaps') return None except Exception as e: print("Unexpected exception occurred") raise e def _unflagged_data(self): try: spectra = [s for s in self.spectra if not s.name in self.flags] return pd.concat(objs=[s.measurement for s in spectra], axis=1, keys=[s.name for s in spectra]) except ValueError as err: # typically from duplicate index due to overlapping wavelengths if not all([s.stitched for s in self.spectra]): warnings.warn('ValueError: Try after stitching the overlaps') return None except Exception as e: print("Unexpected exception occurred") raise e def append(self, spectrum): """ insert spectrum to the collection """ assert spectrum.name not in self._spectra assert isinstance(spectrum, Spectrum) self._spectra[spectrum.name] = spectrum def data_with_meta(self, data=True, fields=None): """ Get dataframe with additional columns for metadata fields Parameters ---------- data: boolean whether to return the measurement data or not fields: list names of metadata fields to include as columns. If None, all the metadata will be included. Returns ------- pd.DataFrame: self.data with additional columns """ if fields is None: fields = [ 'file', 'instrument_type', 'integration_time', 'measurement_type', 'gps_time_tgt', 'gps_time_ref', 'wavelength_range' ] meta_dict = {} for field in fields: meta_dict[field] = [ s.metadata[field] if field in s.metadata else None for s in self.spectra ] meta_df = pd.DataFrame(meta_dict, index=[s.name for s in self.spectra]) if data: result = pd.merge(meta_df, self.data.transpose(), left_index=True, right_index=True) else: result = meta_df return result ################################################## # object methods def __getitem__(self, key): return self._spectra[key] def __delitem__(self, key): self._spectra.__delitem__(key) self._flags.__delitem__(key) def __missing__(self, key): pass def __len__(self): return len(self._spectra) def __contains__(self, item): self._spectra.__contains__(item) ################################################## # reader def read(self, directory, measure_type='pct_reflect', ext=[".asd", ".sed", ".sig", ".pico", ".light"], recursive=False, verbose=False): """ read all files in a path matching extension """ directory = abspath(expanduser(directory)) for dirpath, dirnames, filenames in os.walk(directory): if not recursive: # only read given path if dirpath != directory: continue for f in sorted(filenames): f_name, f_ext = splitext(f) if f_ext not in list(ext): # skip to next file continue filepath = os.path.join(dirpath, f) spectrum = Spectrum(name=f_name, filepath=filepath, measure_type=measure_type, verbose=verbose) self.append(spectrum) ################################################## # wrapper around spectral operations def interpolate(self, spacing=1, method='slinear'): ''' ''' for spectrum in self.spectra: spectrum.interpolate(spacing, method) def stitch(self, method='max'): ''' ''' for spectrum in self.spectra: spectrum.stitch(method) def jump_correct(self, splices, reference, method='additive'): ''' ''' for spectrum in self.spectra: spectrum.jump_correct(splices, reference, method) ################################################## # group operations def groupby(self, separator, indices, filler=None): """ Group the spectra using a separator pattern Returns ------- OrderedDict consisting of specdal.Collection objects for each group key: group name value: collection object """ args = [separator, indices] key_fun = separator_keyfun if filler is not None: args.append(filler) key_fun = separator_with_filler_keyfun spectra_sorted = sorted(self.spectra, key=lambda x: key_fun(x, *args)) groups = groupby(spectra_sorted, lambda x: key_fun(x, *args)) result = OrderedDict() for g_name, g_spectra in groups: coll = Collection(name=g_name, spectra=[copy.deepcopy(s) for s in g_spectra]) result[coll.name] = coll return result def plot(self, *args, **kwargs): ''' ''' self.data.plot(*args, **kwargs) pass def to_csv(self, *args, **kwargs): ''' ''' self.data.transpose().to_csv(*args, **kwargs) ################################################## # aggregate def mean(self, append=False, ignore_flagged=True): ''' ''' data = self._unflagged_data() if ignore_flagged else data spectrum = Spectrum(name=self.name + '_mean', measurement=data.mean(axis=1), measure_type=self.measure_type) if append: self.append(spectrum) return spectrum def median(self, append=False, ignore_flagged=True): ''' ''' data = self._unflagged_data() if ignore_flagged else data spectrum = Spectrum(name=self.name + '_median', measurement=data.median(axis=1), measure_type=self.measure_type) if append: self.append(spectrum) return spectrum def min(self, append=False, ignore_flagged=True): ''' ''' data = self._unflagged_data() if ignore_flagged else data spectrum = Spectrum(name=self.name + '_min', measurement=data.min(axis=1), measure_type=self.measure_type) if append: self.append(spectrum) return spectrum def max(self, append=False, ignore_flagged=True): ''' ''' data = self._unflagged_data() if ignore_flagged else data spectrum = Spectrum(name=self.name + '_max', measurement=data.max(axis=1), measure_type=self.measure_type) if append: self.append(spectrum) return spectrum def std(self, append=False, ignore_flagged=True): ''' ''' data = self._unflagged_data() if ignore_flagged else data spectrum = Spectrum(name=self.name + '_std', measurement=data.std(axis=1), measure_type=self.measure_type) if append: self.append(spectrum) return spectrum
def __delitem__(self, name): """Delete a field and all of its values.""" name = name.lower() OrderedDict.__delitem__(self, name)
def __delitem__(self, key): OrderedDict.__delitem__(self, self._EncodeKey(key))