def __sliceToIndex(self, item): start = stop = None if isinstance(item.start, datetime.datetime): start = self._pos_of_time[item.start] else: if item.start is None: start = 0 else: if item.start < 0: start = OrderedDict.__len__(self) + item.start else: start = item.start if isinstance(item.stop, datetime.datetime): stop = self._pos_of_time[item.stop] else: if item.stop is None: stop = OrderedDict.__len__(self) else: if item.stop < 0: stop = OrderedDict.__len__(self) + item.stop else: stop = item.stop return start, stop
class LRUCache: def __init__(self, capacity: int): self.capacity = capacity self.cache = OrderedDict() def get(self, key: int) -> int: val = self.cache.get(key, None) if val: pop_val = self.cache.pop(key) self.cache[key] = pop_val return val if val else -1 def put(self, key: int, value: int) -> None: if self.cache.__len__() + 1 <= self.capacity: if key in self.cache: self.cache.pop(key) self.cache[key] = value else: self.cache[key] = value else: if key in self.cache: self.cache.pop(key) self.cache[key] = value else: self.cache.pop(self.cache.__iter__().__next__()) self.cache[key] = value
class LRUCache(MutableMapping): """A least-recently-used cache. Implementation stolen from `this article`_. .. _this article: https://www.kunxi.org/blog/2014/05/lru-cache-in-python/ """ def __init__(self, capacity): self.capacity = capacity self.cache = OrderedDict() def __getitem__(self, key): value = self.cache.pop(key) self.cache[key] = value # move value to the head of the dict return value def __setitem__(self, key, value): try: self.cache.pop(key) except KeyError: if len(self.cache) >= self.capacity: self.cache.popitem(last=False) self.cache[key] = value def __delitem__(self, key): del self.cache[key] def __iter__(self): return self.cache.__iter__() def __len__(self): return self.cache.__len__()
def start_main(self, event): if self.__is_ark_init: TASK_LIST = OrderedDict() if self.Index.task1_battle_name.GetValue() != "": TASK_LIST[self.Index.task1_battle_name.GetValue()] = int( self.Index.task1_battle_time.GetValue()) if self.Index.task2_battle_name.GetValue() != "": TASK_LIST[self.Index.task2_battle_name.GetValue()] = int( self.Index.task2_battle_time.GetValue()) if self.Index.task3_battle_name.GetValue() != "": TASK_LIST[self.Index.task3_battle_name.GetValue()] = int( self.Index.task3_battle_time.GetValue()) if self.Index.task4_battle_name.GetValue() != "": TASK_LIST[self.Index.task4_battle_name.GetValue()] = int( self.Index.task4_battle_time.GetValue()) print(TASK_LIST) for _ in TASK_LIST.keys(): if _ not in MAIN_TASK_SUPPORT: MessageDialog_OK("{} 不在支持的关卡列表中".format(_), "警告") return False if TASK_LIST.__len__() == 0: MessageDialog_CANCEL("未选择关卡", "提示") return False else: if self.check_before_start(): self.worker['main_battle'] = ArkThread(ark=self.ark, TASK_LIST=TASK_LIST) else: return False else: MessageDialog_OK("请预先初始化 ark 类")
class Metadata(MutableMapping): def __init__(self, seq=None): self.dct = OrderedDict(seq) if seq else OrderedDict() def __contains__(self, key): return self.dct.__contains__(key) def __getitem__(self, key): return self.dct.__getitem__(key) def __setitem__(self, key, value): self.dct.__setitem__(key, value) def __delitem__(self, key): return self.dct.__delitem__(key) def __iter__(self): return self.dct.__iter__() def __len__(self): return self.dct.__len__() def __repr__(self): return repr(self.dct) def __str__(self): return str(self.dct)
class OrderedSet(MutableSet): def __init__(self, sequence=None): super().__init__() if sequence is None: self._data = OrderedDict() else: kwargs = {v: 1 for v in sequence} self._data = OrderedDict(**kwargs) def __contains__(self, item): """Override.""" return self._data.__contains__(item) def __iter__(self): """Override.""" return self._data.__iter__() def __len__(self): """Override.""" return self._data.__len__() def add(self, item): """Override.""" self._data.__setitem__(item, 1) def discard(self, item): """Override.""" if item in self._data: self._data.__delitem__(item) def __repr__(self): return f"{self.__class__.__name__}({list(self._data.keys())})"
class TargetVars(Mapping): """Immutable ordered mapping from target variables to their values.""" EMPTY = EmptyTargetVar() def __init__(self, target_vars=tuple(), is_empty=True): if is_empty: target_vars = [(v, self.EMPTY) for v in target_vars] self._od = OrderedDict(target_vars) # getitem, len, iter wrap OrderedDict behavior def __getitem__(self, k): return self._od.__getitem__(k) def __len__(self): return self._od.__len__() def __iter__(self): return self._od.__iter__() def update(self, *args, **kwargs): cpy = self.copy() cpy._od.update(*args, **kwargs) return cpy def copy(self): return self.__class__(self._od) def __str__(self): """Format target vars for printing""" if len(self) > 1: return "({})".format(", ".join(self._od.keys())) else: return "".join(self._od.keys()) def defined_items(self): """Return copy of instance, omitting entries that are EMPTY""" return self.__class__([(k, v) for k,v in self.items() if v is not self.EMPTY], is_empty=False)
def main_handler(self, battle_task_list=None): if battle_task_list is None: battle_task_list = OrderedDict() self.shell_color.warning_text("[*] 装载模块....") self.shell_color.warning_text("[+] 战斗模块...启动!") flag = False if battle_task_list.__len__() == 0: self.shell_color.failure_text("[!] ? 任务清单为空") for c_id, count in battle_task_list.items(): if c_id not in MAIN_TASK_SUPPORT.keys(): raise IndexError("无此关卡") self.shell_color.helper_text("[+] 战斗-选择{}...启动!".format(c_id)) self.selector.id = c_id flag = self.module_battle(c_id, count) # flag = self.module_battle_for_test(c_id, count) if flag: if not self.__call_by_gui: self.shell_color.warning_text("[*] 所有模块执行完毕...60秒后退出!") self.__wait(60) self.__del() else: self.shell_color.warning_text("[*] 所有模块执行完毕...!") else: if not self.__call_by_gui: self.shell_color.failure_text("[*] 未知模块异常...60秒后退出!") self.__wait(60) self.__del() else: self.shell_color.failure_text("[*] 未知模块异常...系统体现结束")
class LruCache: """ 借助OrderedDict实现缓存。 该对象使用有序字典进行操作。使用了OrderedDict的特性,有序队列。 orderedDic,底层使用了双向链表和hashmap 进行数据有序操作。 使用双向链表,记录有序性。hashman实唯一性。 """ def __init__(self, long_max=100): self.__long_max = long_max self.__cache_data = OrderedDict() def get(self, key): if self.__cache_data.get(key): self.__cache_data.move_to_end(key) return self.__cache_data[key] else: return -1 def put(self, key, value): if self.__cache_data.__len__( ) >= self.__long_max and self.__cache_data.get(key) is None: self.__cache_data.popitem(last=False) self.__cache_data[key] = value else: if self.__cache_data.get(key) is None: self.__cache_data[key] = value else: self.__cache_data[key] = value self.__cache_data.move_to_end(key)
def main_handler(self, battle_task_list=None): self.shell_log.debug_text("base.main_handler") if battle_task_list is None: battle_task_list = OrderedDict() self.shell_log.warning_text("装载模块...") self.shell_log.warning_text("战斗模块...启动") flag = False if battle_task_list.__len__() == 0: self.shell_log.failure_text("任务清单为空!") for c_id, count in battle_task_list.items(): if c_id not in MAIN_TASK_SUPPORT.keys(): raise IndexError("无此关卡!") self.shell_log.helper_text("战斗{} 启动".format(c_id)) self.selector.id = c_id flag = self.module_battle(c_id, count) if flag: if not self.__call_by_gui: self.shell_log.warning_text("所有模块执行完毕... 60s后退出") self.__wait(60) self.__del() else: self.shell_log.warning_text("所有模块执行完毕") else: if not self.__call_by_gui: self.shell_log.warning_text("发生未知错误... 60s后退出") self.__wait(60) self.__del() else: self.shell_log.warning_text("发生未知错误... 进程已结束")
def main_handler(self, battle_task_list=None): if battle_task_list is None: battle_task_list = OrderedDict() self.shell_color.warning_text("[*] 装在模块....") self.shell_color.warning_text("[+] 战斗模块...启动!") flag = False if battle_task_list.__len__() == 0: self.shell_color.failure_text("[!] ⚠ 任务清单为空") for c_id, count in battle_task_list.items(): if c_id not in LIZHI_CONSUME.keys(): raise IndexError("无此关卡") self.shell_color.helper_text("[+] 战斗-选择{}...启动!".format(c_id)) self.selector.id = c_id flag = self.module_battle(c_id, count) # flag = self.module_battle_for_test(c_id, count) if flag: self.shell_color.warning_text("[*] 所有模块执行完毕...无限休眠启动!") if not self.__call_by_gui: self.__wait(1024) self.shell_color.failure_text("[*] 休眠过度...启动自毁程序!") self.__del() else: self.shell_color.failure_text("[*] 未知模块异常...无限休眠启动!") self.__wait(1024) self.shell_color.failure_text("[*] 休眠过度...启动自毁程序!") self.__del()
def __setitem__(self, key, value): if key in self: del self[key] OrderedDict.__setitem__(self, key, value) if OrderedDict.__len__(self) > 1000: print('popitem') self.popitem(False)
def removeHighAndLowFrequencyWords(self, lines, percentage=0.4): tk = TweetTokenizer() dictionary = OrderedDict() # create dictionary for line in lines: l = tk.tokenize(self.normalizeSentence(line)) self.lines.append(l) for token in l: if len(token) > 1 or re.search('\w', token): if dictionary.get(token) is None: dictionary[token] = 1 else: dictionary[token] += 1 # remove high frequency and low frequency words dictionary = sorted(dictionary.items(), key=operator.itemgetter(1), reverse=False) while dictionary[0][1] < 5: del dictionary[0] index = math.floor(dictionary.__len__() * percentage) for i in range(index): del dictionary[0] del dictionary[-1] self.dictionary = dictionary
class UssdAppRouter(object): __slots__ = ('routes', 'base_code') def __init__(self, code=None): self.routes = OrderedDict() self.base_code = code and UssdCode(code) def route(self, code, handler): code = code if isinstance(code, UssdCode) else UssdCode(code) key_code = tuple(code) if key_code in self.routes: raise ValueError( 'Ussd code %s already registered in ussd router.' % (code, )) self.routes[key_code] = (code, handler) def resolve(self, request): ussd_string = request.ussd_string if ussd_string.startswith(self.base_code): ussd_string = ussd_string[len(self.base_code):] codes = sorted(filter(ussd_string.startswith, self.routes.keys())) if codes: code = codes[-1] route_code = '%s*%s' % (self.base_code, code) if self.base_code else code request.route_code = route_code return self.routes[code], route_code return None, None def __len__(self): return self.routes.__len__() def __contains__(self, code): return self.routes.__contains__(code)
def main_handler(self, task_list=None, clear_tasks=False, auto_close=True): logger.debug("base.main_handler") if task_list is None: task_list = OrderedDict() logger.info("装载模块...") logger.info("战斗模块...启动") flag = False if task_list.__len__() == 0: logger.fatal("任务清单为空!") for c_id, count in task_list.items(): if c_id not in MAIN_TASK_SUPPORT.keys(): raise IndexError("无此关卡!") logger.info("战斗{} 启动".format(c_id)) self.selector.id = c_id flag = self.module_battle(c_id, count) if flag: if self.__call_by_gui or auto_close is False: logger.info("所有模块执行完毕") else: if clear_tasks: self.clear_daily_task() logger.info("所有模块执行完毕... 60s后退出") self.__wait(60) self.__del() else: if self.__call_by_gui or auto_close is False: logger.error("发生未知错误... 进程已结束") else: logger.error("发生未知错误... 60s后退出") self.__wait(60) self.__del()
class VariableList: """这个需要是数组,有顺序""" def __init__(self): self.list = OrderedDict() def append(self, var: Variable | str) -> None: if type(var) == Variable: if var.name not in self: self.list[var.name] = var else: if var not in self: var = Variable(var) self.list[var.name] = var def items(self) -> Iterator[Tuple[str, Variable]]: return self.list.items() def key_with_var(self) -> Generator[Tuple[str, Variable], None, None]: for _, var in self.items(): yield (var.key, var) def __getitem__(self, key: str) -> Variable: return self.list[key] def __len__(self) -> int: return self.list.__len__() def has_key(self, key: str) -> bool: return key in self.list def __iter__(self) -> Generator[Tuple[str, Variable], None, None]: for _, var in self.items(): yield (var.key, var) def __contains__(self, key: str) -> bool: return key in self.list def set(self, info: dict): """info = {"name": variables_name, "func": function, "value": value}""" if info['name'] not in self: self.append(info["name"]) var = self.list[info["name"]] var.set(info) def merge(self, other_varlist: "VariableList") -> list: """合并,当原来的列表中有相应的变量才会合并""" for key, var in self.list.items(): if key not in other_varlist: continue var.merge(other_varlist[key]) return self.list def __bool__(self) -> bool: return bool(self.list)
class Ring(t.Generic[T]): def __init__(self, content: t.Iterable[T]): self._raw_content = OrderedSet(content) self._content = OrderedDict( {content: _RingLink(content) for content in self._raw_content}) _content = tuple(self._content.values()) for i in range(len(_content)): _content[i].next = _content[(i + 1) % len(_content)] _content[i].previous = _content[i - 1] try: self._current = _content[-1] except IndexError: raise ValueError('Ring must contain at least one object') @property def all(self) -> OrderedSet: return self._raw_content def current(self) -> T: return self._current.content def next(self) -> T: self._current = self._current.next return self._current.content def previous(self) -> T: self._current = self._current.previous return self._current.content def peek_next(self) -> T: return self._current.next.content def peek_previous(self) -> T: return self._current.previous.content def loop_from(self, start: T) -> t.Iterable[T]: link = self._content[start] while True: yield link.content link = link.next def loop_after(self, start: T) -> t.Iterable[T]: link = self._content[start] while True: link = link.next yield link.content def __iter__(self) -> t.Iterable[T]: while True: yield self.next() def __len__(self): return self._content.__len__() def __eq__(self, other: object) -> bool: return isinstance( other, self.__class__) and self._raw_content == other._raw_content
class CompoundValue(object): def __init__(self, *args, **kwargs): self.__values__ = OrderedDict() # Set default values for container_name, container in self._xsd_type.elements_nested: values = container.default_value if isinstance(container, Indicator): self.__values__.update(values) else: self.__values__[container_name] = values # Set attributes for attribute_name, attribute in self._xsd_type.attributes: self.__values__[attribute_name] = attribute.default_value # Set elements items = _process_signature(self._xsd_type, args, kwargs) for key, value in items.items(): self.__values__[key] = value def __contains__(self, key): return self.__values__.__contains__(key) def __len__(self): return self.__values__.__len__() def __iter__(self): return self.__values__.__iter__() def __repr__(self): return PrettyPrinter().pformat(self.__values__) def __delitem__(self, key): return self.__values__.__delitem__(key) def __getitem__(self, key): return self.__values__[key] def __setitem__(self, key, value): self.__values__[key] = value def __setattr__(self, key, value): if key.startswith('__') or key in ('_xsd_type', '_xsd_elm'): return super(CompoundValue, self).__setattr__(key, value) self.__values__[key] = value def __getattribute__(self, key): if key.startswith('__') or key in ('_xsd_type', '_xsd_elm'): return super(CompoundValue, self).__getattribute__(key) try: return self.__values__[key] except KeyError: raise AttributeError( "%s instance has no attribute '%s'" % ( self.__class__.__name__, key))
def __removeHighAndLow(self, gr, percentage=0.2): od = OrderedDict( sorted(gr.items(), key=operator.itemgetter(1), reverse=True)) index = math.floor(od.__len__() * 0.2) for i in range(index): del od[0] del od[-1] return od
class ATimeCache(object): """Cache class (dictionary) with a limited size, where only the 'max_entries' most recently added or accessed entries are stored.""" def __init__(self, max_entries): self._cache = OrderedDict() self._max_entries = max_entries def _shrink(self): while len(self._cache) > self._max_entries: self._cache.popitem(last=False) def get_max_entries(self): return self._max_entries def set_max_entries(self, value): self._max_entries = value self._shrink() max_entries = property( get_max_entries, set_max_entries, None, "Set or get the cache size") def has_key(self, key): return self._cache.has_key(key) def __eq__(self, other): try: return self._cache.__eq__(other._cache) except: return False def __len__(self): return self._cache.__len__() def __getitem__(self, key): value = self._cache.pop(key) self._cache[key] = value return value def __setitem__(self, key, value): if self._cache.has_key(key): self._cache.pop(key) self._cache.__setitem__(key, value) self._shrink() def __contains__(self, key): return self.has_key(key) def __str__(self): return self.cache.__str__() def __iter__(self): # Iterate directly on the underlying dict, rather than on this # class, in order to change the order of cached items (as # opposed to []/__getitem__, which will reinsert an item on top # of the stack whenever it is looked up. return iter(self._cache)
def copyGeomTable(self, spliteconn, tableName): '''Copy a geom table from PostGIS to Spatialite''' if self.stopThread: return # get fab_catasto field types # working with OrderedDict to maintaing ordering among fields and values try: self.procMessage.emit( "Copia tabella: " + tableName + ". Attenzione operazione lunga!", QgsMessageLog.INFO) records = spliteconn.cursor().execute("PRAGMA table_info(" + tableName + ")") columnNameTypes = {} for record in records: columnNameTypes[record[1]] = record[2] # create query temp = {k: k for k in columnNameTypes.keys()} temp["the_geom"] = "ST_AsText(" + temp["the_geom"] + ")" temp = OrderedDict(sorted(temp.items(), key=lambda x: x[0])) sqlcolumns = temp.values() columnames = temp.keys() # do postgis query sqlquery = "SELECT " + ",".join(sqlcolumns) + " " sqlquery += "FROM " + tableName + ";" self.cursor.execute(sqlquery) self.procMessage.emit( "%s: Copiando n: %d records" % (tableName, self.cursor.rowcount), QgsMessageLog.INFO) # create query string for spatialite sql = 'INSERT INTO ' + tableName + '(' + ','.join( columnames) + ') VALUES ' # copy on SpatiaLite for record in self.cursor.fetchall(): # modify geompetry element valuesDict = OrderedDict(zip(columnames, record)) #valuesDict["the_geom"] = "GeomFromText('%s',%d)" % (valuesDict["the_geom"], DATABASE_SRID) fields = ['?'] * valuesDict.__len__( ) # create a list of ['?', '?', '?', '?', '?', '?', '?', '?', '?', '?'] index = valuesDict.keys().index("the_geom") fields[index] = "GeomFromText( ? ,%d)" % DATABASE_SRID newsql = sql + "( " + ",".join(fields) + " );" #newsql = sql + "(" +",".join( valuesDict.values() ) + ")" spliteconn.cursor().execute(newsql, tuple(valuesDict.values())) if self.stopThread: return except db.Error as e: self.procMessage.emit(e.message, QgsMessageLog.CRITICAL) raise e
class ATimeCache(object): """Cache class (dictionary) with a limited size, where only the 'max_entries' most recently added or accessed entries are stored.""" def __init__(self, max_entries): self._cache = OrderedDict() self._max_entries = max_entries def _shrink(self): while len(self._cache) > self._max_entries: self._cache.popitem(last=False) def get_max_entries(self): return self._max_entries def set_max_entries(self, value): self._max_entries = value self._shrink() max_entries = property(get_max_entries, set_max_entries, None, "Set or get the cache size") def has_key(self, key): return self._cache.has_key(key) def __eq__(self, other): try: return self._cache.__eq__(other._cache) except: return False def __len__(self): return self._cache.__len__() def __getitem__(self, key): value = self._cache.pop(key) self._cache[key] = value return value def __setitem__(self, key, value): if self._cache.has_key(key): self._cache.pop(key) self._cache.__setitem__(key, value) self._shrink() def __contains__(self, key): return self.has_key(key) def __str__(self): return self.cache.__str__() def __iter__(self): # Iterate directly on the underlying dict, rather than on this # class, in order to change the order of cached items (as # opposed to []/__getitem__, which will reinsert an item on top # of the stack whenever it is looked up. return iter(self._cache)
class CouchDB: lock = Lock() def __init__(self): self.write_db = Database(Constants.WRITE_URL) self.read_db = Database(Constants.READ_URL) self.cache = OrderedDict() def getDoc(self, id): document = None try: self.lock.acquire() if id not in self.cache.keys(): #document not in cache document = self.read_db.get(id) if self.cache.__len__() == Constants.CACHE_SIZE: self.cache.popitem(False) #remove the oldest entry from cache self.cache.__setitem__(id, document) else: #get from cache document = self.cache.__getitem__(id) return document finally: self.lock.release() return document def createDoc(self, json): return self.write_db.create(json) def deleteDoc(self, id): try: self.lock.acquire() doc = self.getDoc(id) if id in self.cache: self.cache.pop(id) #clear document from cache self.write_db.delete(doc) finally: self.lock.release() def saveDoc(self, doc): self.db.save(doc) def updateDoc(self, doc): try: self.lock.acquire() if doc[Constants.DOCUMENT_ID] in self.cache: self.cache.pop(doc[Constants.DOCUMENT_ID]) #clear document from cache self.write_db.update(doc) finally: self.lock.release()
class IndexedDict(Mapping): """Wrapper around OrderedDict that allows access via item position or key""" def __init__(self, *args, **kwargs): self._od = OrderedDict(*args, **kwargs) def __getitem__(self, k): try: return list(self._od.values())[k] except TypeError: return self._od[k] def __len__(self): return self._od.__len__() def __iter__(self): return self._od.__iter__()
def copyGeomTable(self, spliteconn, tableName): '''Copy a geom table from PostGIS to Spatialite''' if self.stopThread: return # get fab_catasto field types # working with OrderedDict to maintaing ordering among fields and values try: self.procMessage.emit("Copia tabella: "+tableName + ". Attenzione operazione lunga!", QgsMessageLog.INFO) records = spliteconn.cursor().execute("PRAGMA table_info("+tableName+")") columnNameTypes = {} for record in records: columnNameTypes[record[1]] = record[2] # create query temp = {k:k for k in columnNameTypes.keys()} temp["the_geom"] = "ST_AsText(" + temp["the_geom"] + ")" temp = OrderedDict(sorted(temp.items(), key=lambda x:x[0])) sqlcolumns = temp.values() columnames = temp.keys() # do postgis query sqlquery = "SELECT "+",".join(sqlcolumns) + " " sqlquery += "FROM "+ tableName + ";" self.cursor.execute( sqlquery ) self.procMessage.emit("%s: Copiando n: %d records" % (tableName, self.cursor.rowcount), QgsMessageLog.INFO) # create query string for spatialite sql = 'INSERT INTO '+tableName+'(' + ','.join(columnames) + ') VALUES ' # copy on SpatiaLite for record in self.cursor.fetchall(): # modify geompetry element valuesDict = OrderedDict(zip(columnames,record)) #valuesDict["the_geom"] = "GeomFromText('%s',%d)" % (valuesDict["the_geom"], DATABASE_SRID) fields = ['?'] * valuesDict.__len__() # create a list of ['?', '?', '?', '?', '?', '?', '?', '?', '?', '?'] index = valuesDict.keys().index("the_geom") fields[index] = "GeomFromText( ? ,%d)" % DATABASE_SRID newsql = sql + "( " + ",".join(fields) + " );" #newsql = sql + "(" +",".join( valuesDict.values() ) + ")" spliteconn.cursor().execute(newsql, tuple(valuesDict.values())) if self.stopThread: return except db.Error as e: self.procMessage.emit(e.message, QgsMessageLog.CRITICAL) raise e
def write_leaf_node(node: OrderedDict): assert is_leaf_node(node) if cols == 2: rows = int(node.__len__()) + 1 else: # cols == 1: rows = 2 * node.__len__() + 1 table = document.add_table(rows=rows, cols=cols) for i, (key, val) in enumerate(node.items()): if cols == 2: row = int(i / 2) * 2 col = i % 2 this_cell = table.rows[row].cells[col] this_cell.paragraphs[0].add_run().add_picture(val[0], width=Cm(7.5)) table.rows[row + 1].cells[col].paragraphs[0].add_run(key) else: row = i * 2 col = 0 this_cell = table.rows[row].cells[col] this_cell.paragraphs[0].add_run().add_picture(val[0], width=Cm(15)) table.rows[row + 1].cells[col].paragraphs[0].add_run(key)
class ano_pred_Dataset(Dataset): ''' specialized for ano pred model VAD dataset could not do any data augmentation normalized from [0,255] to [0,1] the channels are bgr( because of cv2 and liteFlownet ''' #video clip mean def __init__(self, dataset_folder, clip_length, size=(256, 256)): self.dir = dataset_folder self.videos = OrderedDict() self.image_height = size[0] self.image_width = size[1] self.clip_length = clip_length self.setup() def __len__(self): return self.videos.__len__() def setup(self): videos = glob.glob(os.path.join(self.dir, '*')) for video in sorted(videos): video_name = video.split('/')[-1] self.videos[video_name] = {} self.videos[video_name]['path'] = video self.videos[video_name]['frame'] = glob.glob( os.path.join(video, '*.jpg')) self.videos[video_name]['frame'].sort() self.videos[video_name]['length'] = len( self.videos[video_name]['frame']) self.videos_keys = self.videos.keys() def __getitem__(self, indice): #each video get 4 frames as input and 1 frames as target output key = list(self.videos_keys)[indice] start = rng.randint(0, self.videos[key]['length'] - self.clip_length) video_clip = [] for frame_id in range(start, start + self.clip_length): #video_clip.append(frame_id) video_clip.append( np_load_frame(self.videos[key]['frame'][frame_id], self.image_height, self.image_width)) #video_clip=to_tensor(video_clip) video_clip = np.array(video_clip) video_clip = torch.from_numpy(video_clip) return video_clip, 0
class OrderedSet(Set): def __init__(self): self.data = OrderedDict() def add(self, element): self.data[element] = 0 def __len__(self): return self.data.__len__() def __contains__(self, value): return value in self.data def __iter__(self): return self.data.__iter__()
def __getitem__(self, item): if isinstance(item, slice): data = [] start, stop = self.__sliceToIndex(item) for point in xrange(start, stop): data += [self.__getitem__(point)] return data if isinstance(item, int): if item < 0: return (OrderedDict.__getitem__(self, self._time_of_pos[(OrderedDict.__len__(self)) + item])) else: return OrderedDict.__getitem__(self, self._time_of_pos[item]) else: return OrderedDict.__getitem__(self, item)
def get_games(self, begin_time=0, count=99): m = Misc() first_game = True recent_games_list = OrderedDict() self.date_last_game = begin_time games_url = 'https://' + self.region + '.api.riotgames.com/lol/match/v3/matchlists/by-account/' + str(self.account_id) +\ '?beginTime=' + str(begin_time) + '&endIndex=' + str(count) + '&api_key='+ self.api_key #print games_url self.json_games_url = URL_resolve( games_url, self.region, "/lol/match/v3/matchlists/by-account/{accountId}").request_to_json( ) if self.json_games_url == -1: return -1 if "matches" in self.json_games_url: all_games = self.json_games_url['matches'] else: m.logging( self.region, str(self.json_games_url) + " was returned after making the following call " + games_url, "error") return -1 for game in all_games: if recent_games_list.__len__() == count: return recent_games_list if first_game == True: self.date_last_game = game['timestamp'] first_game = False if game['queue'] == ARAM_QUEUE1 or game['queue'] == ARAM_QUEUE2: #recent_games_list.append(str(game['gameId'])) recent_games_list[str(game['gameId'])] = str(game['timestamp']) self.aram_games += 1 else: self.non_aram_games += 1 self.total_games += 1 if not game['platformId'] == self.region: m.logging( self.region, "Player " + str(self.account_id) + " is not in " + self.region + " anymore. ", "log") #print "Player " + str(self.account_Id) + " is not in " + self.region + " anymore. " return -2 return recent_games_list
class OrderedSet: def __init__(self, items=None): self.d = OrderedDict() self.update(items) def update(self, items): if items is not None: for item in items: self.d[item] = 1 def __iter__(self): return self.d.__iter__() def __contains__(self, key): return self.d.__contains__(key) def __delitem__(self, key): return self.d.__delitem__(key) def __len__(self): return self.d.__len__() def add(x): return update(self, [x]) def discard(self, x): if self.__contains__(x): return self.__del__(x) def remove(self, x): if not self.__contains__(x): raise KeyError return self.__del__(x) def _format_op(self, op): if hasattr(op, 'name'): return op.name return str(op) def __repr__(self): if not self: return '%s()' % (self.__class__.__name__, ) return '{%r}' % (','.join([self._format_op(op) for op in self]), )
class OrderedSet: def __init__(self, items=None): self.d = OrderedDict() self.update(items) def update(self, items): if items is not None: for item in items: self.d[item] = 1 def __iter__(self): return self.d.__iter__() def __contains__(self, key): return self.d.__contains__(key) def __delitem__(self, key): return self.d.__delitem__(key) def __len__(self): return self.d.__len__() def add(self, x): return update(self, [x]) def discard(self, x): if self.__contains__(x): return self.__del__(x) def remove(self, x): if not self.__contains__(x): raise KeyError return self.__del__(x) def _format_op(self, op): if hasattr(op, 'name'): return op.name return str(op) def __repr__(self): if not self: return '%s()' % (self.__class__.__name__,) return '{%r}' % (','.join([self._format_op(op) for op in self]),)
def start_main(self, event): TASK_LIST = OrderedDict() if self.Index.task1_battle_name.GetValue() != "": TASK_LIST[self.Index.task1_battle_name.GetValue()] = int(self.Index.task1_battle_time.GetValue()) if self.Index.task2_battle_name.GetValue() != "": TASK_LIST[self.Index.task2_battle_name.GetValue()] = int(self.Index.task2_battle_time.GetValue()) if self.Index.task3_battle_name.GetValue() != "": TASK_LIST[self.Index.task3_battle_name.GetValue()] = int(self.Index.task3_battle_time.GetValue()) if self.Index.task4_battle_name.GetValue() != "": TASK_LIST[self.Index.task4_battle_name.GetValue()] = int(self.Index.task4_battle_time.GetValue()) for _ in TASK_LIST.keys(): if _ not in LIZHI_CONSUME or "GT" in _: MessageDialog_OK("{} 不在支持的关卡列表中".format(_), "警告") return False if TASK_LIST.__len__() == 0: MessageDialog_CANCEL("未选择关卡", "提示") return False else: self.worker = ArkThread(ark=self.ark, TASK_LIST=TASK_LIST)
def calculate_offset(cyphered, lang, use_alphabet=False): alphabet_ru = ''.join([chr(c) for c in range(1072, 1104)]) alphabet_en = ''.join([chr(c) for c in range(97, 123)]) alphabet = alphabet_en if lang == 'en' else alphabet_ru frequencies = LanguageFrequencyAnalyzer(lang).frequency_dictionary frequencies_of_chars = {} for char in cyphered: if char.lower() not in frequencies_of_chars.keys(): if not use_alphabet or char.lower() in alphabet: frequencies_of_chars[char.lower()] = 1 else: if not use_alphabet or char.lower() in alphabet: frequencies_of_chars[char.lower()] += 1 # Normalize values factor = 1.0 / sum(frequencies_of_chars.values()) for k in frequencies_of_chars.keys(): frequencies_of_chars[k] = frequencies_of_chars[k] * factor delta = .05 possible_shifts = {} for char in frequencies.keys(): possible_values = (c for c in frequencies_of_chars.keys() if frequencies_of_chars[c] - delta <= frequencies[char] <= frequencies_of_chars[c] + delta) for ch in possible_values: if not use_alphabet: shift = (ord(ch) - ord(char) + modulo) % modulo else: if char in alphabet: shift = (alphabet.index(ch) - alphabet.index(char) + len(alphabet)) % len(alphabet) else: shift = 0 if shift not in possible_shifts.keys() and shift != 0: possible_shifts[shift] = 1 elif shift != 0: possible_shifts[shift] += 1 shift_dict = OrderedDict( sorted(possible_shifts.items(), key=itemgetter(1), reverse=True)) if shift_dict.__len__() > 0: return shift_dict.popitem(last=False), frequencies return None
class MatchmakerQueue: def __init__(self, queue_name: str, player_service: "PlayerService", game_service: "GameService"): self.player_service = player_service self.game_service = game_service self.queue_name = queue_name self.rating_prop = 'ladder_rating' self.queue = OrderedDict() self._logger.info("MatchmakerQueue initialized for {}".format(queue_name)) def notify_potential_opponents(self, search: Search, potential=True): """ Notify opponents who might potentially match the given search object :param search: search object to notify for :param potential: Whether or not we've started or stopped searching :return: """ self._logger.info("Notifying potential opponents") for opponent in self.player_service.players.values(): if opponent == search.player: continue quality = search.quality_with(opponent) if quality >= search.match_threshold: opponent.notify_potential_match(search.player, potential) def push(self, search: Search): """ Push the given search object onto the queue :param search: :return: """ self.queue[search.player] = search def match(self, s1: Search, s2: Search): """ Mark the given two searches as matched :param s1: :param s2: :return: """ if (s1.is_matched or s2.is_matched) or (s1.is_cancelled or s2.is_cancelled): return False s1.match(s2) s2.match(s1) if s1.player in self.queue: del self.queue[s1.player] if s2.player in self.queue: del self.queue[s2.player] asyncio.ensure_future(self.game_service.ladder_service.start_game(s1.player, s2.player)) return True def __len__(self): return self.queue.__len__() async def search(self, player, start_time=None, search=None): """ Search for a match. If a suitable match is found, immediately calls on_matched_with on both players. Otherwise, puts a search object into the Queue and awaits completion :param player: Player to search for a matchup for """ search = search or Search(player, start_time) with server.stats.timer('matchmaker.search'): try: self._logger.debug("Searching for matchup for {}".format(player)) for opponent, opponent_search in self.queue.copy().items(): if opponent == player: continue quality = search.quality_with(opponent) threshold = search.match_threshold self._logger.debug("Game quality between {} and {}: {} (threshold: {})" .format(player, opponent, quality, threshold)) if quality >= threshold: if self.match(search, opponent_search): return self.notify_potential_opponents(search, True) self._logger.debug("Found nobody searching, pushing to queue: {}".format(search)) self.queue[player] = search await search.await_match() self._logger.debug("Search complete: {}".format(search)) self.notify_potential_opponents(search, False) except CancelledError: pass
print "Max Repeat Length of length %d: %d" % (len, max_repeat_count) print "Number of max repeat strings: %d" % max_repeats.__len__() def run(): file = None try: file = open(_fasta_file) read_seqs(file) except Exception, e: print e finally: if file: file.close() print "Counts: " + str(_seq_dict.__len__()) longest = -1 shortest = 999999999 orf_count = 0 longest_orf = -1 orf_start = -1 for title, entry in _seq_dict.iteritems(): seq = entry["seq"] length = len(seq) if length > longest: longest = length if shortest > length: shortest = length if title != 'gi|142022655|gb|EQ086233.1|97':
class EntityCollection(object): """ EntityCollection is a wrapper class around ordered dictionary collection of type OrderedDict. It is created specifically to collect Entity class instances, Each Entity instance has unique segment path value, which is used as a key in the dictionary. """ def __init__(self, *entities): self.logger = logging.getLogger("ydk.types.EntityCollection") self._entity_map = OrderedDict() for entity in entities: self.append(entity) def __eq__(self, other): if not isinstance(other, EntityCollection): return False return self._entity_map.__eq__(other._entity_map) def __ne__(self, other): return not self.__eq__(other) def __len__(self): return self._entity_map.__len__() def _key(self, entity): return entity.path(); def append(self, entities): """ Adds new elements to the end of the dictionary. Allowed entries: - instance of Entity class - list of Entity class instances """ if entities is None: self.logger.debug("Cannot add None object to the EntityCollection") elif isinstance(entities, Entity): key = self._key(entities) self._entity_map[key] = entities elif isinstance(entities, list): for entity in entities: if isinstance(entity, Entity): key = self._key(entity) self._entity_map[key] = entity elif entity is None: self.logger.debug("Cannot add None object to the EntityCollection") else: msg = "Argument %s is not supported by EntityCollection class; data ignored"%type(entity) self._log_error_and_raise_exception(msg, YInvalidArgumentError) else: msg = "Argument %s is not supported by EntityCollection class; data ignored"%type(entities) self._log_error_and_raise_exception(msg, YInvalidArgumentError) def _log_error_and_raise_exception(self, msg, exception_class): self.logger.error(msg) raise exception_class(msg) def entities(self): """ Returns list of all entities in the collection. If collection is empty, it returns an empty list. """ return list(self._entity_map.values()) def keys(self): """ Returns list of keys for the collection entities. If collection is empty, it returns an empty list. """ return list(self._entity_map.keys()) def has_key(self, key): return key in self.keys() def get(self, item): return self.__getitem__(item) def __getitem__(self, item): """ Returns entity store in the collection. Parameter 'item' could be: - a type of int (ordered number of entity) - type of str (segment path of entity) - instance of Entity class """ entity = None if isinstance(item, int): if 0 <= item < len(self): entity = self.entities()[item] elif isinstance(item, str): if item in self.keys(): entity = self._entity_map[item] elif isinstance(item, Entity): key = self._key(item) if key in self.keys(): entity = self._entity_map[key] else: msg = "Argument %s is not supported by EntityCollection class; data ignored"%type(item) self._log_error_and_raise_exception(msg, YInvalidArgumentError) return entity def clear(self): """Deletes all the members of collection""" self._entity_map.clear() def pop(self, item=None): """ Deletes collection item. Parameter 'item' could be: - type of int (ordered number of entity) - type of str (segment path of entity) - instance of Entity class Returns entity of deleted instance or None if item is not found. """ entity = None if len(self) == 0: pass elif item is None: key, entity = self._entity_map.popitem() elif isinstance(item, int): entity = self.__getitem__(item) if entity is not None: key = self._key(entity) entity = self._entity_map.pop(key) elif isinstance(item, str): if item in self.keys(): entity = self._entity_map.pop(item) elif isinstance(item, Entity): key = self._key(item) if key in self.keys(): entity = self._entity_map.pop(key) return entity def __delitem__(self, item): return self.pop(item) def __iter__(self): return iter(self.entities()) def __str__(self): ent_strs = list(); for entity in self.entities(): ent_strs.append(format(entity)) return "Entities in {}: {}".format(self.__class__.__name__, ent_strs)
class YList(EntityCollection): """ Represents a list with support for hanging a parent All YANG based entity classes that have lists in them use YList to represent the list. The "list" statement is used to define an interior data node in the schema tree. A list node may exist in multiple instances in the data tree. Each such instance is known as a list entry. The "list" statement takes one argument, which is an identifier, followed by a block of sub-statements that holds detailed list information. A list entry is uniquely identified by the values of the list's keys, if defined. The keys then could be used to get entities from the YList. """ def __init__(self, parent): super(YList, self).__init__() self.parent = parent self.counter = 1000000 self._cache_dict = OrderedDict() def __setattr__(self, name, value): if name == 'yfilter' and isinstance(value, _YFilter): for e in self: e.yfilter = value super(YList, self).__setattr__(name, value) def _key(self, entity): key_list = [] if hasattr(entity, 'ylist_key_names'): for key in entity.ylist_key_names: if hasattr(entity, key): attr = entity.__dict__[key] if attr is None: key_list = [] break key_list.append(attr) if len(key_list) == 0: self.counter += 1 key = format(self.counter) elif len(key_list) == 1: key = key_list[0] if not isinstance(key, str): key = format(key) else: key = tuple(key_list) return key def _flush_cache(self): for _ in range(len(self._cache_dict)): _, entity = self._cache_dict.popitem(False) self._entity_map[self._key(entity)] = entity def append(self, entities): entities.parent = self.parent if entities is None: self._log_error_and_raise_exception("Cannot add None object to the YList", YInvalidArgumentError) elif isinstance(entities, Entity): key = self._key(entities) self._cache_dict[key] = entities entities.ylist_key = key else: msg = "Argument %s is not supported by YList class; data ignored"%type(entities) self._log_error_and_raise_exception(msg, YInvalidArgumentError) def extend(self, entity_list): for entity in entity_list: self.append(entity) def clear(self): """Deletes all the members of collection""" self._entity_map.clear() self._cache_dict.clear() def keys(self): self._flush_cache() return super(YList, self).keys() def entities(self): self._flush_cache() return super(YList, self).entities() def pop(self, item=None): self._flush_cache() return super(YList, self).pop(item) def __getitem__(self, item): entity = None if isinstance(item, int) and 0 <= item < len(self): entity = self.entities()[item] elif self.has_key(item): entity = self._entity_map[item] elif not isinstance(item, str): entity = self._entity_map[format(item)] return entity def __len__(self): return self._entity_map.__len__() + self._cache_dict.__len__()
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] if args: d = args[0] if isinstance(d, dict): for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v, _dynamic=self._dynamic) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = DotMap(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): if k is not '_dynamic': self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in {'_map','_dynamic', '_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == {'_map','_dynamic','_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) is list: l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) v = l d[k] = v return d def pprint(self): pprint(self.toDict()) def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self.toDict()) def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d)
class TestNodeSet(ExitStack): def __init__(self, names: Iterable[str] = None, count: int = None, nodeReg=None, tmpdir=None, keyshare=True, primaryDecider=None, opVerificationPluginPath=None, testNodeClass=TestNode): super().__init__() self.tmpdir = tmpdir self.primaryDecider = primaryDecider self.opVerificationPluginPath = opVerificationPluginPath self.testNodeClass = testNodeClass self.nodes = OrderedDict() # type: Dict[str, TestNode] # Can use just self.nodes rather than maintaining a separate dictionary # but then have to pluck attributes from the `self.nodes` so keeping # it simple a the cost of extra memory and its test code so not a big # deal if nodeReg: self.nodeReg = nodeReg else: nodeNames = (names if names is not None and count is None else genNodeNames(count) if count is not None else error("only one of either names or count is required")) self.nodeReg = genNodeReg( names=nodeNames) # type: Dict[str, NodeDetail] for name in self.nodeReg.keys(): self.addNode(name) # The following lets us access the nodes by name as attributes of the # NodeSet. It's not a problem unless a node name shadows a member. self.__dict__.update(self.nodes) def addNode(self, name: str) -> TestNode: if name in self.nodes: error("{} already added".format(name)) assert name in self.nodeReg ha, cliname, cliha = self.nodeReg[name] if self.opVerificationPluginPath: pl = PluginLoader(self.opVerificationPluginPath) opVerifiers = pl.plugins['VERIFICATION'] else: opVerifiers = None testNodeClass = self.testNodeClass node = self.enter_context( testNodeClass(name=name, ha=ha, cliname=cliname, cliha=cliha, nodeRegistry=copy(self.nodeReg), basedirpath=self.tmpdir, primaryDecider=self.primaryDecider, opVerifiers=opVerifiers)) self.nodes[name] = node self.__dict__[name] = node return node def removeNode(self, name, shouldClean): self.nodes[name].stop() if shouldClean: self.nodes[name].nodestack.keep.clearAllDir() self.nodes[name].clientstack.keep.clearAllDir() del self.nodes[name] del self.__dict__[name] # del self.nodeRegistry[name] # for node in self: # node.removeNodeFromRegistry(name) def __iter__(self) -> Iterator[TestNode]: return self.nodes.values().__iter__() def __getitem__(self, key) -> TestNode: return self.nodes.get(key) def __len__(self): return self.nodes.__len__() @property def nodeNames(self): return sorted(self.nodes.keys()) @property def f(self): return getMaxFailures(len(self.nodes)) def getNode(self, node: NodeRef) -> TestNode: return node if isinstance(node, Node) \ else self.nodes.get(node) if isinstance(node, str) \ else error("Expected a node or node name") @staticmethod def getNodeName(node: NodeRef) -> str: return node if isinstance(node, str) \ else node.name if isinstance(node, Node) \ else error("Expected a node or node name") def connect(self, fromNode: NodeRef, toNode: NodeRef): fr = self.getNode(fromNode) to = self.getNode(toNode) fr.connect(to.nodestack.ha) def connectAll(self): for c in combinations(self.nodes.keys(), 2): print("connecting {} to {}".format(*c)) self.connect(*c) def getLastMsgReceived(self, node: NodeRef, method: str = None) -> Tuple: return getLastMsgReceivedForNode(self.getNode(node), method) def getAllMsgReceived(self, node: NodeRef, method: str = None) -> Tuple: return getAllMsgReceivedForNode(self.getNode(node), method)
class UniversalOrderedStruct(UniversalCollection): """ Mostly like an OrderedDict, but it behaves like a list, in that (for x in struct) and (x in struct) looks over values, not keys. """ def __init__(self, *initializer): if len(initializer)==1 and type(initializer[0]) is dict: d = initializer[0] initializer = [OrderedDict((k, d[k]) for k in sorted(d.keys()))] self._heart = OrderedDict(*initializer) def __contains__(self, item): return item in self._heart.values() def __setitem__(self, key, value): if key is next or isinstance(key, int): raise InvalidKeyError('This sequence is an {}, and cannot be given {} key: {}'.format(self.__class__.__name__, key.__class__.__name__, key)) self._heart.__setitem__(key, value) def __getitem__(self, selector): if isinstance(selector, (list, slice, np.ndarray)): if isinstance(selector, slice): all_keys = list(self.keys()) start_index = all_keys.index(selector.start) if selector.start is not None else None stop_index = all_keys.index(selector.stop) if selector.stop is not None else None keys = all_keys[start_index:stop_index:selector.step] else: keys = selector return UniversalOrderedStruct((k, self[k]) for k in keys) else: return self._heart.__getitem__(selector) def __repr__(self): rep = self._heart.__repr__() return self.__class__.__name__ + rep[len(OrderedDict.__class__.__name__):] def __iter__(self): return iter(self._heart.values()) def __len__(self): return self._heart.__len__() def has_key(self, key): return key in self._heart def keys(self): return self._heart.keys() def values(self): return self._heart.values() def to_struct(self): return self._heart.copy() @classmethod def from_struct(cls, struct): return cls(struct) def key_in_filter(cls, key, key_filter): if isinstance(key_filter, list): return key in list elif isinstance(key_filter, slice): if key_filter.start is None and key_filter.stop is None and key_filter.step is None: return True else: raise NotImplementedError('Have not yet implemented key filter for slice: {}'.format(key_filter)) else: return key==key_filter
def getTimeFromPos(self, index): """ Returns the date associated with an integer index. """ if index < 0: return self._time_of_pos[(OrderedDict.__len__(self)) + index] else: return self._time_of_pos[index]
# Enter your code here. Read input from STDIN. Print output to STDOUT # https://www.hackerrank.com/challenges/word-order from collections import OrderedDict wordList = OrderedDict() for _ in range(int(raw_input())): word = raw_input() wordList[word] = wordList.get(word,0) + 1 print wordList.__len__() for i in wordList.values(): print i,
class BufferManager(object): """A composition with OrderedDict to manage our text buffers.""" def __init__(self, file_name): """Setup the OrderedDict""" self.buffers = OrderedDict() self.new_buffer(file_name) def __len__(self): """Composition with OrderedDict""" return self.buffers.__len__() def pop(self, *arg): """Composition with OrderedDict""" return self.buffers.pop(*arg) def pop_by_index(self, index): """Pop a buffer by index.""" return self.buffers.pop(list(self.buffers)[index]) def __iter__(self): """Composition with OrderedDict""" return self.buffers.__iter__() def __setitem__(self, *args): """Composition with OrderedDict""" return self.buffers.__setitem__(*args) def __getitem__(self, *args): """Composition with OrderedDict""" return self.buffers.__getitem__(*args) def print_stats(self, buffer_name): """Print stats regarding the specified buffer.""" print('"%s" %iL, %iC' % (buffer_name, self.buffers[buffer_name].length(), self.buffers[buffer_name].num_chars())) def new_buffer(self, file_name): """Setup a new buffer.""" if not file_name: buffer_name = "buffer%i" % (self.__len__()+1) else: buffer_name = file_name self.buffers[buffer_name] = \ ex_buffer.DocumentController(file_name=file_name) self.print_stats(buffer_name) def open_history(self): """Open the readline history file.""" self.buffers['readline history'] = \ ex_buffer.DocumentController( fm=ex_file.HistoryFileManager, nll=ex_buffer.NumberedLineList) self.print_stats('readline history') def get_by_index(self, index): """Get a buffer by index.""" return self.buffers[list(self.buffers)[index]] def get_name_by_index(self, index): """Get the name of a buffer by index.""" return list(self.buffers)[index] def get_index_by_name(self, name): """Get the index of a buffer by name.""" i = 0 for k in self.buffers: if k == name: break i += 1 return i
class ChainSet(object): """ Base class for various methods to rename chains Contains _chains, which maps from the renamed chain to a tuple with the original (object,state,chain). All dict-like accessors work on ChainSets, e.g. chain_set["A"] -> ("obj",1,"A") """ def __init__(self): # Use an OrderedDict in Python >= 1.7 for better printing if _orderedDict: self._chains = OrderedDict() else: self._chains = dict() def map_chain(self, obj, state, origChain ): """ map_chain(string obj,int state, string chain]]) -> string Maps a chain letter to a unique chainID. Results are unique within each instance, and can be used as keys on this chain set. """ raise NotImplementedError("Base class") # delegate most methods to _chains def __getattr__(self,at): if at in "pop popitem update setdefault".split(): raise AttributeError("type object '%s' has no attribute '%s'"%(type(self),at)) return getattr(self._chains,at) def __cmp__(self,other): return self._chains.__cmp__(other) def __eq__(self,other): return self._chains.__eq__(other) def __ge__(self,other): return self._chains.__ge__(other) def __gt__(self,other): return self._chains.__gt__(other) def __le__(self,other): return self._chains.__le__(other) def __lt__(self,other): return self._chains.__lt__(other) def __ne__(self,other): return self._chains.__ne__(other) def __len__(self): return self._chains.__len__() def __contains__(self,key): return self._chains.__contains__(key) def __getitem__(self,key): return self._chains.__getitem__(key) def __iter__(self): return self._chains.__iter__() def __str__(self): return str(self._chains) @staticmethod def _int_to_chain(i,base=_default_base): """ _int_to_chain(int,int) -> str Converts a positive integer to a chain ID. Chain IDs include uppercase characters, numbers, and optionally lowercase letters. i = a positive integer to convert base = the alphabet size to include. Typically 36 or 62. """ if i < 0: raise ValueError("positive integers only") if base < 0 or 62 < base: raise ValueError("Invalid base") quot = int(i)//base rem = i%base if rem < 26: letter = chr( ord("A") + rem) elif rem < 36: letter = str( rem-26) else: letter = chr( ord("a") + rem - 36) if quot == 0: return letter else: return ChainSet._int_to_chain(quot-1,base) + letter
class Stack(object): """ Base class to store pandas objects, with special operations to return as 3d data (eg panel) and to apply functions itemwise. Items are stored in an ordered dict.""" itemlabel = 'Item' _magic=['__len__', '__iter__', '__reversed__', '__contains__', ] def __init__(self, data, keys=None, name='', sort_items=False): self.name = name # Dictionary input if isinstance(data, dict): logger.debug('Initializing "%s" from dictionary.' % self.full_name) if sort_items: logger.debug('Sorting keys') self._data=OrderedDict(sorted(data.keys(), key=lambda t: t[0])) else: self._data=OrderedDict(data) else: if not isinstance(data, Iterable): logger.info('%s constructed from non-iterable... converting ' 'data to an iterable' % self.full_name) data=[data] if keys: if not isinstance(keys, Iterable): logger.info('%s constructed from non-iterable... converting ' 'keys to an iterable' % self.full_name) keys = [keys] if len(keys) != len(data): raise ValueError('Length mistmatch: keys and data (%s,%s)'\ % (len(keys), len(data))) # If keys not passed, generate them else: # Zipped data ((key, df), (key, df)) try: keys, data = zip(*data) except Exception: keys=self._gen_keys(len(data)) if len(keys) > 1: logger.warn("Generating keys %s-%s" % (keys[0], keys[-1])) else: logger.warn("Generating key %s" % keys[0]) self._data=OrderedDict( [ (key, data[i]) for (i, key) in enumerate(keys) ]) @property def _address(self): """ Property to make easily accesible by multicanvas """ return mem_address(super(Stack, self).__repr__()) def _gen_keys(self, length): """ Return a list of itemlables (item0, item1 etc...) using self.itemlabel and a length""" logger.debug('Items not found on %s: generating item list' % self.full_name) return [self.itemlabel+str(i) for i in range(length)] # -------------------- # Dictionary Interface def __getitem__(self, keyslice): """ If single name, used dict interface. If slice or integer, uses list interface. All results parameterized to key, data pairs, passed directly into a new Stack. """ # Slice as list of strings or int [0, 'foo', 2, 'bar'] if hasattr(keyslice, '__iter__'): tuples_out = [] for item in keyslice: if isinstance(item, str): item = self._data.keys().index(item) tuples_out.append(self._data.items()[item]) else: if isinstance(keyslice, int) or isinstance(keyslice, slice): tuples_out = self._data.items()[keyslice] else: tuples_out = [(keyslice, self._data[keyslice])] #keyslice is name # If single item, return TimeSpectra, else, return new Stack # Canonical slicing implementaiton; don't change unless good reason # Because len() wonky with nested tuples (eg (x,y) and [(x1,y1),(x2,y2)] # are both length two, this will work: if sum(1 for x in tuples_out) == 2: return tuples_out[1] #Return timespectra else: return self.__class__(tuples_out) def __delitem__(self, keyslice): """ Delete a single name, or a keyslice from names/canvas """ if isinstance(keyslice, str): idx = self.names.index(keyslice) self.pop(idx) else: raise NotImplementedError("Deletion only supports single entry") def __setitem__(self, name, canvas): """ """ if name in self.names: idx = self.names.index(name) self.pop(idx) self.insert(idx, name, canvas) else: self.names.append(name) def __getattr__(self, attr): """ If attribute not found, try attribute lookup in dictionary. If that is not found, try finding attribute on self._data. For example, self.keys() will first look for self['keys']. Since this isn't found, it calls self._data.keys(). But if I do self.Item1, then it returns self['Item1']. The very rare conflict case that a user has named the items a method that may already exist in the dictionary (eg items=['a','b','keys'] is addressed. """ if attr in self._data.keys(): if hasattr(self._data, attr): raise AttributeError('"%s attribute" found in both the items\ and as a method of the underlying dictionary object.'%(attr)) else: return self[attr] return getattr(self._data, attr) # Attributes deferred to self.data /dictionary def __len__(self): return self._data.__len__() def __iter__(self): return self._data.__iter__() def __reversed__(self): return self._data.__reversed__() def __contains__(self): return self._data.__contains__() def as_3d(self): """ Return 3d structure of data. Default is panel.""" raise Panel(data=self._data) ### Data types without labels #Is this realy necessary? See pyparty.ParticleManger for possibly more consistent implementation def get_all(self, attr, astype=tuple): """Generator/tuple etc.. of (item, attribute) pairs. """ return put._parse_generator( ((item[0], getattr(item[1], attr)) for item in self.items()), astype) def _get_unique(self, attr): """ Inspects Stack itemwise for an attribute for unique values. If non-unique value for the attributes are found, returns "mixed". """ unique = set(self.get_all(attr, astype=dict).values()) if len(unique) > 1: return 'mixed' else: return tuple(unique)[0] #set doesn't support indexing def set_all(self, attr, val, inplace=False): """ Set attributes itemwise. If not inplace, returns new instance of self""" if inplace: for (key, item) in self.items(): try: setattr(item, attr, val) except Exception as E: raise Exception('Could not set %s in "%s". Received the following \ exception:\n "%s"'%(attr, key, E)) else: out=deepcopy(self._data) #DEEPCOPY for item in out: setattr(out[item], attr, val) return self.__class__(out) def apply(self, func, *args, **kwargs): """ Applies a user-passed function, or calls an instance method itemwise. Parameters: ----------- func: str or function If string, must correspond to a method on the object stored itemwise in the stack. If a function, appliked itemwise to objects stored. inplace: False Special kwarg. If true, self._data modified inplace, otherwise new specstack is returned. *args, **kwargs: func arguments. Returns: -------- If not inplace, returns SpecStack after itemwise application. """ inplace=kwargs.pop('inplace', False) if isinstance(func, basestring): if inplace: for item in self: self[item] = getattr(self[item], func)(*args, **kwargs) else: return self.__class__(OrderedDict([(k, getattr(v, func)(*args, \ **kwargs)) for k,v in self.items()])) # function, numpyfunction etc... else: if inplace: for item in self: self[item] = self[item].apply(func)(*args, **kwargs) else: return self.__class__(OrderedDict([(k, v.apply(func, *args, \ **kwargs)) for k,v in self.items()])) @property def full_name(self): """ Timespectra:name or Timespectra:unnamed. Useful for scripts mostly """ outname = getattr(self, 'name', 'unnamed') return '%s:%s' % (self.__class__.__name__, self.name)
class MatchmakerQueue: def __init__(self, queue_name: str, player_service: "PlayerService", game_service: "GameService"): self.player_service = player_service self.game_service = game_service self.queue_name = queue_name self.rating_prop = 'ladder_rating' self.queue = OrderedDict() self._logger.debug("MatchmakerQueue initialized for %s", queue_name) def push(self, search: Search): """ Push the given search object onto the queue :param search: :return: """ self.queue[search.player] = search def match(self, s1: Search, s2: Search): """ Mark the given two searches as matched :param s1: :param s2: :return: """ if (s1.is_matched or s2.is_matched) or (s1.is_cancelled or s2.is_cancelled): return False s1.match(s2) s2.match(s1) if s1.player in self.queue: del self.queue[s1.player] if s2.player in self.queue: del self.queue[s2.player] self.game_service.mark_dirty(self) asyncio.ensure_future(self.game_service.ladder_service.start_game(s1.player, s2.player)) return True def __len__(self): return self.queue.__len__() def to_dict(self): """ Return a fuzzy representation of the searches currently in the queue """ return { 'queue_name': self.queue_name, 'boundary_80s': [search.boundary_80 for player, search in self.queue.items()], 'boundary_75s': [search.boundary_75 for player, search in self.queue.items()] } def __repr__(self): return repr(self.queue) async def search(self, player, start_time=None, search=None): """ Search for a match. If a suitable match is found, immediately calls on_matched_with on both players. Otherwise, puts a search object into the Queue and awaits completion :param player: Player to search for a matchup for """ search = search or Search(player, start_time) with server.stats.timer('matchmaker.search'): try: self._logger.debug("Searching for matchup for %s", player) for opponent, opponent_search in self.queue.copy().items(): if opponent == player: continue quality = search.quality_with(opponent) threshold = search.match_threshold self._logger.debug("Game quality between %s and %s: %f (threshold: %f)", player, opponent, quality, threshold) if quality >= threshold: if self.match(search, opponent_search): return self._logger.debug("Found nobody searching, pushing to queue: %s", search) self.queue[player] = search self.game_service.mark_dirty(self) await search.await_match() self._logger.debug("Search complete: %s", search) except CancelledError: pass finally: # If the queue was cancelled, or some other error occured, # make sure to clean up. self.game_service.mark_dirty(self) if player in self.queue: del self.queue[player]
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() self._dynamic = True # mettendo False non funzionano più i test di default. E' normale in quanto si aspettano la creazione dinamica dei figli # =================================== if LORETO: global MY_DICT_TYPES # global var per la classe self._dynamic = False # mettendo False non funzionano più i test di default. E' normale in quanto si aspettano la creazione dinamica dei figli MY_DICT_TYPES = [dict, DotMap] # by Loreto (DEFAULT dictionary) # =================================== if kwargs: if '_dynamic' in kwargs: self._dynamic = kwargs['_dynamic'] if args: d = args[0] if isinstance(d, dict): for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v, _dynamic=self._dynamic) if type(v) is list: l = [] for i in v: n = i if type(i) is dict: n = DotMap(i, _dynamic=self._dynamic) l.append(n) v = l self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): if k is not '_dynamic': self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map and self._dynamic and k != '_ipython_canary_method_should_not_exist_': # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k in {'_map','_dynamic', '_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == {'_map','_dynamic','_ipython_canary_method_should_not_exist_'}: super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): # bizarre recursive assignment situation (why someone would do this is beyond me) if id(v) == id(self): items.append('{0}=DotMap(...)'.format(k)) else: items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: # bizarre recursive assignment support if id(v) == id(self): v = d else: v = v.toDict() elif type(v) is list: l = [] for i in v: n = i if type(i) is DotMap: n = i.toDict() l.append(n) v = l d[k] = v return d def pprint(self): pprint(self.toDict()) # =================================== if LORETO: # MY_DICT_TYPES = [dict, DotMap] def Ptr(self, listOfQualifiers, create=False): ptr = self for item in listOfQualifiers: if item in ptr: ptr = ptr[item] else: if create: ptr[item] = DotMap() ptr = ptr[item] else: return None return ptr def KeyTree(self, fPRINT=False): return DictToList.KeyTree(self, myDictTYPES=MY_DICT_TYPES, fPRINT=fPRINT) def KeyList(self): return DictToList.KeyList(self, myDictTYPES=MY_DICT_TYPES) def PrintTree(self, fEXIT=False, MaxLevel=10, header=None, printTYPE='LTKV', stackLevel=1): PrintDictionaryTree.PrintDictionary(self, myDictTYPES=MY_DICT_TYPES, printTYPE=printTYPE, fEXIT=fEXIT, MaxLevel=MaxLevel, header=header, stackLevel=stackLevel+1) printDict = PrintTree printTree = PrintTree def GetValue(self, listOfQualifiers=[], fPRINT=False): return DictToList.getValue(self, listOfQualifiers=listOfQualifiers, myDictTYPES=MY_DICT_TYPES, fPRINT=fPRINT) # =================================== def empty(self): return (not any(self)) # proper dict subclassing def values(self): return self._map.values() # ipython support def __dir__(self): return self.keys() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return DotMap(self.toDict()) def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d def __getstate__(self): return self.__dict__ def __setstate__(self, d): self.__dict__.update(d)
class EntityCollection(object): """ EntityCollection is a wrapper class around ordered dictionary collection of type OrderedDict. It is created specifically to collect Entity class instances, Each Entity instance has unique segment path value, which is used as a key in the dictionary. """ def __init__(self, *entities): self._entity_map = OrderedDict() for entity in entities: self.append(entity) self.logger = logging.getLogger("ydk.types.EntityCollection") def __eq__(self, other): if not isinstance(other, EntityCollection): return False return self._entity_map.__eq__(other._entity_map) def __ne__(self, other): return not self.__eq__(other) def __len__(self): return self._entity_map.__len__() def _key(self, entity): return entity.path() def append(self, entities): """ Adds new elements to the end of the dictionary. Allowed entries: - instance of Entity class - list of Entity class instances """ if entities is None: self._log_error_and_raise_exception( "Cannot add None object to the EntityCollection", YInvalidArgumentError) elif isinstance(entities, Entity): key = self._key(entities) self._entity_map[key] = entities elif isinstance(entities, list): for entity in entities: if isinstance(entity, Entity): key = self._key(entity) self._entity_map[key] = entity else: msg = "Argument %s is not supported by EntityCollection class; data ignored" % type( entity) self._log_error_and_raise_exception( msg, YInvalidArgumentError) else: msg = "Argument %s is not supported by EntityCollection class; data ignored" % type( entities) self._log_error_and_raise_exception(msg, YInvalidArgumentError) def _log_error_and_raise_exception(self, msg, exception_class): self.logger.error(msg) raise exception_class(msg) def entities(self): """ Returns list of all entities in the collection. If collection is empty, it returns an empty list. """ return list(self._entity_map.values()) def keys(self): """ Returns list of keys for the collection entities. If collection is empty, it returns an empty list. """ return list(self._entity_map.keys()) def has_key(self, key): return key in self.keys() def get(self, item): return self.__getitem__(item) def __getitem__(self, item): """ Returns entity store in the collection. Parameter 'item' could be: - a type of int (ordered number of entity) - type of str (segment path of entity) - instance of Entity class """ entity = None if isinstance(item, int): if 0 <= item < len(self): entity = self.entities()[item] elif isinstance(item, str): if item in self.keys(): entity = self._entity_map[item] elif isinstance(item, Entity): key = self._key(item) if key in self.keys(): entity = self._entity_map[key] else: msg = "Argument %s is not supported by EntityCollection class; data ignored" % type( item) self._log_error_and_raise_exception(msg, YInvalidArgumentError) return entity def clear(self): """Deletes all the members of collection""" self._entity_map.clear() def pop(self, item=None): """ Deletes collection item. Parameter 'item' could be: - type of int (ordered number of entity) - type of str (segment path of entity) - instance of Entity class Returns entity of deleted instance or None if item is not found. """ entity = None if len(self) == 0: pass elif item is None: key, entity = self._entity_map.popitem() elif isinstance(item, int): entity = self.__getitem__(item) if entity is not None: key = self._key(entity) entity = self._entity_map.pop(key) elif isinstance(item, str): if item in self.keys(): entity = self._entity_map.pop(item) elif isinstance(item, Entity): key = self._key(item) if key in self.keys(): entity = self._entity_map.pop(key) return entity def __delitem__(self, item): return self.pop(item) def __iter__(self): return iter(self.entities()) def __str__(self): ent_strs = list() for entity in self.entities(): ent_strs.append(format(entity)) return "Entities in {}: {}".format(self.__class__.__name__, ent_strs)
class TestNodeSet(ExitStack): def __init__(self, config, names: Iterable[str] = None, count: int = None, nodeReg=None, tmpdir=None, keyshare=True, primaryDecider=None, pluginPaths: Iterable[str] = None, testNodeClass=TestNode): super().__init__() self.tmpdir = tmpdir assert config is not None self.config = config self.keyshare = keyshare self.primaryDecider = primaryDecider self.pluginPaths = pluginPaths self.testNodeClass = testNodeClass self.nodes = OrderedDict() # type: Dict[str, TestNode] # Can use just self.nodes rather than maintaining a separate dictionary # but then have to pluck attributes from the `self.nodes` so keeping # it simple a the cost of extra memory and its test code so not a big # deal if nodeReg: self.nodeReg = nodeReg else: nodeNames = (names if names is not None and count is None else genNodeNames(count) if count is not None else error("only one of either names or count is required")) self.nodeReg = genNodeReg( names=nodeNames) # type: Dict[str, NodeDetail] for name in self.nodeReg.keys(): self.addNode(name) # The following lets us access the nodes by name as attributes of the # NodeSet. It's not a problem unless a node name shadows a member. self.__dict__.update(self.nodes) def addNode(self, name: str) -> TestNode: if name in self.nodes: error("{} already added".format(name)) assert name in self.nodeReg ha, cliname, cliha = self.nodeReg[name] config_helper = PNodeConfigHelper(name, self.config, chroot=self.tmpdir) seed = randomSeed() if self.keyshare: learnKeysFromOthers(config_helper.keys_dir, name, self.nodes.values()) testNodeClass = self.testNodeClass node = self.enter_context( testNodeClass(name=name, ha=ha, cliname=cliname, cliha=cliha, config_helper=config_helper, primaryDecider=self.primaryDecider, pluginPaths=self.pluginPaths, seed=seed)) if self.keyshare: tellKeysToOthers(node, self.nodes.values()) self.nodes[name] = node self.__dict__[name] = node return node def removeNode(self, name): self.nodes[name].stop() del self.nodes[name] del self.__dict__[name] # del self.nodeRegistry[name] # for node in self: # node.removeNodeFromRegistry(name) def __iter__(self) -> Iterator[TestNode]: return self.nodes.values().__iter__() def __getitem__(self, key) -> Optional[TestNode]: if key in self.nodes: return self.nodes[key] elif isinstance(key, int): return list(self.nodes.values())[key] else: return None def __len__(self): return self.nodes.__len__() @property def nodeNames(self): return sorted(self.nodes.keys()) @property def nodes_by_rank(self): return [t[1] for t in sorted([(node.rank, node) for node in self.nodes.values()], key=operator.itemgetter(0))] @property def f(self): return getMaxFailures(len(self.nodes)) def getNode(self, node: NodeRef) -> TestNode: return node if isinstance(node, Node) \ else self.nodes.get(node) if isinstance(node, str) \ else error("Expected a node or node name") @staticmethod def getNodeName(node: NodeRef) -> str: return node if isinstance(node, str) \ else node.name if isinstance(node, Node) \ else error("Expected a node or node name") def connect(self, fromNode: NodeRef, toNode: NodeRef): fr = self.getNode(fromNode) to = self.getNode(toNode) fr.connect(to.nodestack.ha) def connectAll(self): for c in combinations(self.nodes.keys(), 2): print("connecting {} to {}".format(*c)) self.connect(*c) def getLastMsgReceived(self, node: NodeRef, method: str = None) -> Tuple: return getLastMsgReceivedForNode(self.getNode(node), method) def getAllMsgReceived(self, node: NodeRef, method: str = None) -> List: return getAllMsgReceivedForNode(self.getNode(node), method)
class BestPracticeWarning(collections.MutableMapping, base.ValidationError): """Represents a best practice warning. These are built within best practice rule checking methods and attached to :class:`BestPracticeWarningCollection` instances. Note: This class acts like a dictionary and contains the following keys at a minimum: * ``'id'``: The id of a node associated with the warning. * ``'idref'``: The idref of a node associated with the warning. * ``'line'``: The line number of the offending node. * ``'message'``: A message associated with the warning. * ``'tag'``: The lxml tag for the offending node. These keys can be retrieved via the :attr:`core_keys` property. Instances of this class may attach additional keys. These `other keys` can be obtained via the :attr:`other_keys` property. Args: node: The ``lxml._Element`` node associated with this warning. message: A message for this warning. """ def __init__(self, node, message=None): base.ValidationError.__init__(self) self._inner = OrderedDict() self._node = node self['line'] = node.sourceline self['message'] = message self['id'] = node.attrib.get('id') self['idref'] = node.attrib.get('idref') self['tag'] = node.tag def __unicode__(self): return unicode(self.message) def __str__(self): return unicode(self).encode("utf-8") def __getitem__(self, key): return self._inner.__getitem__(key) def __delitem__(self, key): self._inner.__delitem__(key) def __setitem__(self, key, value): self._inner.__setitem__(key, value) def __len__(self): return self._inner.__len__() def __iter__(self): return self._inner.__iter__() @property def line(self): """Returns the line number of the warning node in the input document. """ return self['line'] @property def message(self): """Returns a message associated with the warning. This may return ``None`` if there is no warning message. """ return self['message'] @property def core_keys(self): """Returns a ``tuple`` of the keys that can always be found on instance of this class. Returns: A tuple including the following keys. * ``'id'``: The id of the warning node. The associated value may be ``None``. * ``'idref'``: The idref of the warning node. The associated value may be ``None``. * ``'line'``: The line number of the warning node in the input document. The associated value may be ``None``. * ``'tag'``: The ``{namespace}localname`` value of the warning node. * ``'message'``: An optional message that can be attached to the warning. The associated value may be ``None``. """ return ('id', 'idref', 'line', 'tag', 'message') @property def other_keys(self): """Returns a ``tuple`` of keys attached to instances of this class that are not found in the :attr:`core_keys`. """ return tuple(x for x in self.iterkeys() if x not in self.core_keys) def as_dict(self): """Returns a dictionary representation of this class instance. This is implemented for consistency across other validation error types. The :class:`.BestPracticeWarning` class extends :class:`collections.MutableMapping`, so this method isn't really necessary. """ return dict(self.iteritems())
class DotMap(OrderedDict): def __init__(self, *args, **kwargs): self._map = OrderedDict() if args: d = args[0] if type(d) is dict: for k,v in self.__call_items(d): if type(v) is dict: v = DotMap(v) self._map[k] = v if kwargs: for k,v in self.__call_items(kwargs): self._map[k] = v def __call_items(self, obj): if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')): return obj.iteritems() else: return obj.items() def items(self): return self.iteritems() def iteritems(self): return self.__call_items(self._map) def __iter__(self): return self._map.__iter__() def next(self): return self._map.next() def __setitem__(self, k, v): self._map[k] = v def __getitem__(self, k): if k not in self._map: # automatically extend to new DotMap self[k] = DotMap() return self._map[k] def __setattr__(self, k, v): if k == '_map': super(DotMap, self).__setattr__(k,v) else: self[k] = v def __getattr__(self, k): if k == '_map': super(DotMap, self).__getattr__(k) else: return self[k] def __delattr__(self, key): return self._map.__delitem__(key) def __contains__(self, k): return self._map.__contains__(k) def __str__(self): items = [] for k,v in self.__call_items(self._map): items.append('{0}={1}'.format(k, repr(v))) out = 'DotMap({0})'.format(', '.join(items)) return out def __repr__(self): return str(self) def toDict(self): d = {} for k,v in self.items(): if type(v) is DotMap: v = v.toDict() d[k] = v return d def pprint(self): pprint(self.toDict()) # proper dict subclassing def values(self): return self._map.values() @classmethod def parseOther(self, other): if type(other) is DotMap: return other._map else: return other def __cmp__(self, other): other = DotMap.parseOther(other) return self._map.__cmp__(other) def __eq__(self, other): other = DotMap.parseOther(other) if not isinstance(other, dict): return False return self._map.__eq__(other) def __ge__(self, other): other = DotMap.parseOther(other) return self._map.__ge__(other) def __gt__(self, other): other = DotMap.parseOther(other) return self._map.__gt__(other) def __le__(self, other): other = DotMap.parseOther(other) return self._map.__le__(other) def __lt__(self, other): other = DotMap.parseOther(other) return self._map.__lt__(other) def __ne__(self, other): other = DotMap.parseOther(other) return self._map.__ne__(other) def __delitem__(self, key): return self._map.__delitem__(key) def __len__(self): return self._map.__len__() def clear(self): self._map.clear() def copy(self): return self def get(self, key, default=None): return self._map.get(key, default) def has_key(self, key): return key in self._map def iterkeys(self): return self._map.iterkeys() def itervalues(self): return self._map.itervalues() def keys(self): return self._map.keys() def pop(self, key, default=None): return self._map.pop(key, default) def popitem(self): return self._map.popitem() def setdefault(self, key, default=None): self._map.setdefault(key, default) def update(self, *args, **kwargs): if len(args) != 0: self._map.update(*args) self._map.update(kwargs) def viewitems(self): return self._map.viewitems() def viewkeys(self): return self._map.viewkeys() def viewvalues(self): return self._map.viewvalues() @classmethod def fromkeys(cls, seq, value=None): d = DotMap() d._map = OrderedDict.fromkeys(seq, value) return d
class YList(EntityCollection): """ Represents a list with support for hanging a parent All YANG based entity classes that have lists in them use YList to represent the list. The "list" statement is used to define an interior data node in the schema tree. A list node may exist in multiple instances in the data tree. Each such instance is known as a list entry. The "list" statement takes one argument, which is an identifier, followed by a block of sub-statements that holds detailed list information. A list entry is uniquely identified by the values of the list's keys, if defined. The keys then could be used to get entities from the YList. """ def __init__(self, parent): super(YList, self).__init__() self.parent = parent self.counter = 1000000 self._cache_dict = OrderedDict() def __setattr__(self, name, value): if name == 'yfilter' and isinstance(value, _YFilter): for e in self: e.yfilter = value super(YList, self).__setattr__(name, value) def _key(self, entity): key_list = [] if hasattr(entity, 'ylist_key_names'): for key in entity.ylist_key_names: if hasattr(entity, key): attr = entity.__dict__[key] if attr is None: key_list = [] break key_list.append(attr) if len(key_list) == 0: key = format(self.counter) self.counter += 1 elif len(key_list) == 1: key = key_list[0] if not isinstance(key, str): key = format(key) else: key = tuple(key_list) return key def _flush_cache(self): for _ in range(len(self._cache_dict)): _, entity = self._cache_dict.popitem(False) self._entity_map[self._key(entity)] = entity def append(self, entities): entities.parent = self.parent if entities is None: self._log_error_and_raise_exception( "Cannot add None object to the YList", YInvalidArgumentError) elif isinstance(entities, Entity): key = self._key(entities) self._cache_dict[key] = entities else: msg = "Argument %s is not supported by YList class; data ignored" % type( entities) self._log_error_and_raise_exception(msg, YInvalidArgumentError) def extend(self, entity_list): for entity in entity_list: self.append(entity) def clear(self): """Deletes all the members of collection""" self._entity_map.clear() self._cache_dict.clear() def keys(self): self._flush_cache() return super(YList, self).keys() def entities(self): self._flush_cache() return super(YList, self).entities() def pop(self, item=None): self._flush_cache() return super(YList, self).pop(item) def __getitem__(self, item): entity = None if isinstance(item, int) and 0 <= item < len(self): entity = self.entities()[item] elif self.has_key(item): entity = self._entity_map[item] elif not isinstance(item, str): entity = self._entity_map[format(item)] return entity def __len__(self): return self._entity_map.__len__() + self._cache_dict.__len__()
class SardanaBuffer(EventGenerator): """Buffer for SardanaValue objects. Each value is identified by an unique idx and all values are organized based on the order of addition to the buffer ..todo:: Eliminate the last_chunk - it is not really necessary and just consumes memory. ..todo:: It is better to fire events in form of a list with tuples of idx and value objects. """ def __init__(self, obj=None, name=None, persistent=False, **kwargs): """Construct SardanaBuffer object :param obj: the object which owns this buffer :type obj: obj :param name: object name :type name: :obj:`str` :param persistent: whether values are kept in the buffer until being explicitly removed (True) or just until firing the next event (False) :type persistent: bool """ super(SardanaBuffer, self).__init__(**kwargs) if obj is not None: obj = weakref.ref(obj) self._obj = obj self.name = name or self.__class__.__name__ self._persistent = persistent self._buffer = OrderedDict() self._next_idx = 0 self._last_chunk = None def __len__(self): return self._buffer.__len__() def get_obj(self): """Returns the object which owns this buffer :return: the object which owns this buffer :rtype: obj""" return self._get_obj() def _get_obj(self): obj = self._obj if obj is not None: obj = obj() return obj def get_value(self, idx): """Return value of a given index. :param idx: index of the value to be returned :type idx: int :return: the value corresponding to the idx :rtype: object """ return self.get_value_obj(idx).value def get_value_obj(self, idx): """Return the value object of a given index. :param idx: index of the value to be returned :type idx: int :return: the value object corresponding to the idx :rtype: SardanaValue """ try: return self._buffer[idx] except KeyError: msg = "value with %s index is not in buffer" % idx if self.next_idx > idx: raise LateValueException(msg) else: raise EarlyValueException(msg) def append(self, value, idx=None): """Append a single value at the end of the buffer with a given index. :param value: value to be appended to the buffer :type param: SardanaValue or any object :param idx: at which index append the value, None means append at the end of the buffer :type idx: int :param persistent: whether value should be added to a persistent buffer or just as a last chunk :type param: bool """ if idx is None: idx = self._next_idx self._last_chunk = OrderedDict() if not isinstance(value, SardanaValue): value = SardanaValue(value) self._last_chunk[idx] = value if self._persistent: self._buffer[idx] = value self._next_idx = idx + 1 self.fire_add_event() def extend(self, values, initial_idx=None): """Extend buffer with a list of objects assigning them consecutive indexes. :param objs: objects that extend the buffer :type param: list<object> :param initial_idx: at which index append the first object, the rest of them will be assigned the next consecutive indexes, None means assign at the end of the buffer :type idx: int """ if initial_idx is None: initial_idx = self._next_idx self._last_chunk = OrderedDict() for idx, value in enumerate(values, initial_idx): if not isinstance(value, SardanaValue): value = SardanaValue(value) self._last_chunk[idx] = value if self._persistent: self._buffer[idx] = value self._next_idx = idx + 1 self.fire_add_event() def remove(self, idx): """Remove value object of a given index. :param idx: index of the value to be returned :type idx: int :return: the value object corresponding to the idx :rtype: object """ try: return self._buffer.pop(idx) except KeyError: msg = "value with %s index is not in buffer" % idx raise KeyError(msg) def fire_add_event(self, propagate=1): """Fires an event to the listeners of the object which owns this buffer. :param propagate: 0 for not propagating, 1 to propagate, 2 propagate with priority :type propagate: int """ evt_type = EventType(self.name, priority=propagate) self.fire_event(evt_type, self.last_chunk) def clear(self): self._next_idx = 0 self._buffer = OrderedDict() def get_last_chunk(self): return self._last_chunk def get_next_idx(self): return self._next_idx def get_persistent(self): return self._persistent def set_persistent(self, persistent): self._persistent = persistent obj = property(get_obj, "container object for this buffer") persistent = property(get_persistent, set_persistent, "flag whether this " "buffer stores objects persistently") last_chunk = property(get_last_chunk, doc="chunk with last value(s) added to this buffer") next_idx = property(get_next_idx, doc="index that will be automatically assigned to the " "next value added to this buffer (if not " "explicitly assigned by the user)")