class SimpleEventManager: def __init__(self): self.events = SortedList( key=lambda v: v[1]) # sorted(topic, ts, event)]) self.prefix_handler = PrefixHandler() self.last_ts = None def subscribe(self, topic, callback): self.prefix_handler.add(topic, callback) def add_event(self, topic, ts, event, duplicates_allowed=False): if not duplicates_allowed: if self.is_event_present(topic, ts, event): return False self.events.add((topic, ts, event)) return topic, ts, event def is_event_present(self, topic, ts, event, ts_error=1e-5): if not self.events: return False ind = self.events.bisect_left([topic, ts, event]) for increment in [1, -1]: cur_ind = ind while True: if cur_ind < 0 or cur_ind >= len(self.events): break cur_event = self.events[cur_ind] if increment == 1 and cur_event[1] > ts + ts_error: break if increment == -1 and cur_event[1] < ts - ts_error: break if (cur_event[0] == topic and abs(cur_event[1] - ts) < ts_error and cur_event[2] == event): return True cur_ind += increment return False def run(self): while True: if not self.events: return topic, ts, event = self.events.pop(0) if self.last_ts is not None and ts < self.last_ts: logging.warning("Old event was added to the events timeline") self.prefix_handler(topic, ts, event) self.last_ts = max(ts, self.last_ts or 0)
def counts_matrix(A, tolerance=.01): counts = np.zeros_like(A, dtype=float) sorted_elements = SortedList(np.ndenumerate(A), key=lambda i:i[1]) for (i,j),v in sorted_elements: left = ((None,None), v - (tolerance/2)) right = ((None,None), v + (tolerance/2)) counts[i,j] = sorted_elements.bisect_right(right) - \ sorted_elements.bisect_left(left) return counts
def find132pattern(self, nums: List[int]) -> bool: n = len(nums) if n < 3: return False left_min = nums[0] right_list = SortedList(nums[2:]) for j in range(1, n - 1): if left_min < nums[j]: index = right_list.bisect_right(left_min) if index < len(right_list) and right_list[index] < nums[j]: return True left_min = min(left_min, nums[j]) right_list = SortedList(nums[j + 1:]) return False
def __init__(self) -> None: super().__init__() self._grouper: Grouper = NoGrouper() self._filter: Filter = Filter() self._sorter: Sorter = Sorter() self._location2fileinfo: Dict[Location, List[FileInfo]] = defaultdict(list) self._fileinfos: SortedList[FileInfo] = SortedList( key=self._sorter.get_key_func())
def __init__(self, keep_sorted_list_of: Iterable[str] = None): self._freq_dict: OrderedDict[T, int] = OrderedDict() self._list: Dict[datetime, T] = {} self._rlist: Dict[T, datetime] = {} self._tot_freq = 0 self.sorted_lists = {} if keep_sorted_list_of: for key in keep_sorted_list_of: self.sorted_lists[key] = SortedList(key=lambda x: x[key])
class PriceLevel(object): def __init__(self, price): self.amounts = {} self.price = price self.e = SortedList(key=lambda e: (e["price_microtimestamp"], e[ "microtimestamp"], e["order_id"])) def add(self, e): self.e.add(e) self.amounts[e["side"]] = self.amounts.get(e["side"], Decimal(0)) + e["amount"] def remove(self, e): self.e.remove(e) self.amounts[e["side"]] -= e["amount"] if self.amounts[e["side"]] < 1e-5: # check whether the amount is actually zero found = False for ev in self.e: if ev["side"] == e["side"]: found = True break if not found: self.amounts[e["side"]] = Decimal(0) def amount(self, side): return self.amounts.get(side) def events(self): return self.e def purge(self): ''' Leaves only non-zeros in self.amounts in order to identify the episode when the zero amount will be zeroed out next time. ''' for side in ['b', 's']: if self.amounts.get(side) == Decimal(0): del self.amounts[side]
def solve(self, table): table.heuristic = manhattan_heuristic self.timeout = False visited = 0 closed_states = set([]) came_from = {} # for path reconstruction gscores = { str(table): 0 } open_states = SortedList(key=lambda key: 1000000-key[0]) open_states.add((0, table)) while len(open_states) > 0 and not self.timeout: current = open_states.pop()[1] c = str(current) visited += 1 if current.is_solved(): return { 'moves': self.reconstruct_path(came_from, c), 'visited': visited } closed_states.add(c) gscore = gscores[c] for block in current.blocks: if self.timeout: break block = current.get_block(block) for move in block.available_movements: if self.timeout: break #neighbor = current.copy() neighbor = copy.deepcopy(current) neighbor.heuristic = manhattan_heuristic neighbor.move_block(block.index, move) #simulate_movement(neighbor, block.index, move) n = str(neighbor) if n in closed_states: continue tenative_gscore = gscore + 1 tenative_fscore = tenative_gscore + neighbor.heuristic(neighbor) if not any(state == neighbor for (fscore, state) in open_states): open_states.add((tenative_fscore, neighbor)) elif tenative_gscore >= gscores[n]: continue came_from[n] = (c, { 'block': block.index, 'position': move }) gscores[n] = tenative_gscore return { 'moves': [], 'visited': visited }
def lrtest_1dim(sample1, sample2, test_points=None): sample1_sorted = SortedList() sample2_sorted = SortedList() results = [] for index, point in enumerate(sample1): sample1_sorted.add(point) sample2_sorted.add(sample2[index]) m = len(sample2_sorted) n = len(sample1_sorted) if test_points is None or n in test_points: result = lr_test(sample1_sorted, sample2_sorted) # results.append(result * (n * m) / (m + n)) results.append(result) return results
def test_sorted_list(self): sl = SortedList() self.assertEqual(0, len(sl)) sl.update([3, 1, 2]) print("sorted_list = {0}".format(sl)) self.assertEqual([1, 2, 3], sl) sl.add(6) sl.add(5) self.assertEqual([1, 2, 3, 5, 6], sl)
def __init__(self, price): self.amounts = {} self.price = price self.e = SortedList(key=lambda e: (e["price_microtimestamp"], e[ "microtimestamp"], e["order_id"]))
def __init__(self, error_time): """ initialize a new Scheduler """ self.item_map = dict() self.item_list = SortedList() self.error_time = error_time self.lock = Lock()
def set_sorter(self, sorter: Sorter) -> None: print("rebuilding fileinfos") self._sorter = sorter self._fileinfos = SortedList(self._fileinfos, key=self._sorter.get_key_func()) self.sig_files_reordered.emit()
def __init__(self): self.events = SortedList( key=lambda v: v[1]) # sorted(topic, ts, event)]) self.prefix_handler = PrefixHandler() self.last_ts = None
class FileCollection(QObject): # A new file entry has been added sig_file_added = pyqtSignal(int, FileInfo) # An existing file entry has been removed sig_file_removed = pyqtSignal(Location) # A file changed on disk sig_file_modified = pyqtSignal(FileInfo) # New information about a file has becomes available (thumbnail, # metadata, etc.) sig_fileinfo_updated = pyqtSignal(FileInfo) # File handle was closed and the file is in it's final state. sig_file_closed = pyqtSignal(FileInfo) # The file list has changed completely and needs a reload from scratch sig_files_set = pyqtSignal() # The file list has been reordered, but otherwised stayed the same sig_files_reordered = pyqtSignal() # The file list has been filtered, .is_excluded has been # set/unset, but otherwise stayed the same sig_files_filtered = pyqtSignal() # The file list has been grouped, .group has been set sig_files_grouped = pyqtSignal() def __init__(self) -> None: super().__init__() self._grouper: Grouper = NoGrouper() self._filter: Filter = Filter() self._sorter: Sorter = Sorter() self._location2fileinfo: Dict[Location, List[FileInfo]] = defaultdict(list) self._fileinfos: SortedList[FileInfo] = SortedList(key=self._sorter.get_key_func()) def clear(self) -> None: logger.debug("FileCollection.clear") self._location2fileinfo.clear() self._fileinfos.clear() self.sig_files_set.emit() def set_fileinfos(self, fileinfos: Iterable[FileInfo]) -> None: logger.debug("FileCollection.set_fileinfos") for fi in fileinfos: self._location2fileinfo[fi.location()].append(fi) self._fileinfos.clear() self._fileinfos.update(fileinfos) self.sig_files_set.emit() def add_fileinfo(self, fi: FileInfo) -> None: logger.debug("FileCollection.add_fileinfos: %s", fi) self._location2fileinfo[fi.location()].append(fi) self._fileinfos.add(fi) idx = self._fileinfos.index(fi) self.sig_file_added.emit(idx, fi) def remove_file(self, location: Location) -> None: if location not in self._location2fileinfo: logger.error("FileCollection.remove_file: %s: KeyError", location) else: logger.debug("FileCollection.remove_file: %s", location) fis = self._location2fileinfo[location] del self._location2fileinfo[location] for fi in fis: self._fileinfos.remove(fi) self.sig_file_removed.emit(location) def modify_file(self, fileinfo: FileInfo) -> None: try: self._replace_fileinfo(fileinfo) except KeyError: logger.error("FileCollection.modify_file: %s: KeyError", fileinfo) else: logger.debug("FileCollection.modify_file: %s", fileinfo) self.sig_file_modified.emit(fileinfo) def update_fileinfo(self, fileinfo: FileInfo) -> None: try: self._replace_fileinfo(fileinfo) except KeyError: logger.error("FileCollection.update_fileinfo: %s", fileinfo) else: logger.debug("FileCollection.update_fileinfo: %s: KeyError", fileinfo) self.sig_fileinfo_updated.emit(fileinfo) def close_file(self, fileinfo: FileInfo) -> None: try: self._replace_fileinfo(fileinfo) except KeyError: logger.error("FileCollection.close_file: %s", fileinfo) else: logger.debug("FileCollection.close_file: %s: KeyError", fileinfo) self.sig_file_closed.emit(fileinfo) def get_fileinfos(self) -> Iterator[FileInfo]: if self._sorter.reverse: return cast(Iterator[FileInfo], reversed(self._fileinfos)) else: return cast(Iterator[FileInfo], iter(self._fileinfos)) def get_fileinfo(self, location: Location) -> Optional[FileInfo]: if location not in self._location2fileinfo: return None else: fis = self._location2fileinfo[location] return fis[0] # FIXME: this is fishy def index(self, fileinfo: FileInfo): return self._fileinfos.index(fileinfo) def __getitem__(self, key): return self._fileinfos[key] def __len__(self) -> int: return len(self._fileinfos) def set_grouper(self, grouper: Grouper) -> None: self._grouper = grouper for fi in self._fileinfos: self._grouper(fi) self.sig_files_grouped.emit() def set_filter(self, filter: Filter) -> None: self._filter = filter for fi in self._fileinfos: self._filter.apply(fi) self.sig_files_filtered.emit() def set_sorter(self, sorter: Sorter) -> None: print("rebuilding fileinfos") self._sorter = sorter self._fileinfos = SortedList(self._fileinfos, key=self._sorter.get_key_func()) self.sig_files_reordered.emit() # def sort(self, key, reverse: bool=False) -> None: # logger.debug("FileCollection.sort") # self._fileinfos.sort(key=key) # if reverse: # self._fileinfos.sort(key=key, reverse=True) # logger.debug("FileCollection.sort:done") # self.sig_files_reordered.emit() def _replace_fileinfo(self, fileinfo: FileInfo) -> None: if fileinfo in self._fileinfos: return location = fileinfo.location() if location not in self._location2fileinfo: raise KeyError("location not in location2fileinfo: {}".format(location)) else: fis = self._location2fileinfo[location] self._location2fileinfo[location] = [fileinfo] for fi in fis: self._fileinfos.remove(fi) self._fileinfos.add(fileinfo) # def shuffle(self) -> None: # logger.debug("FileCollection.sort") # tmp = list(self._fileinfos) # random.shuffle(tmp) # self._fileinfos = ListDict(lambda fi: fi.location(), tmp) # logger.debug("FileCollection.sort:done") # self.sig_files_reordered.emit() def save_as(self, filename: str) -> None: with open(filename, "w") as fout: for fi in self._fileinfos: fout.write(fi.abspath()) fout.write("\n")
def generate_overlaps(seqs, gzip_compressed=True): """ Return a list of overlaps Args: seqs: list of Seq objects (4-tuple (read_id, sequence, reference_start, reference_end) gzip_compressed (bool): True if sequence field in seqs tuples is compressed Returns: list of overlap objects. Overlaps are named tuples with the follwing fields: * query_sequence_name * query_sequence_length * query_start * query_end * relative_strand * target_sequence_name * target_sequence_length * target_start * target_end * num_residue_matches * alignment_block_length * mapping_quality """ def startsort(read): return read[2] overlaps = [] sorted_seqs = SortedList(seqs, key=startsort) # reads are now sorted by their start position for query_index, query in enumerate(sorted_seqs): query_name = query[0] if gzip_compressed: query_seq_len = len(str(gzip.decompress(query[1]), "utf-8")) else: query_seq_len = len(query[1]) query_reference_start = query[2] query_reference_end = query[3] for target_index, target in enumerate(sorted_seqs[query_index+1:]): target_reference_start = target[2] target_reference_end = target[3] if query_reference_end > target_reference_start: # overlap found # Calculate the overlap coordinates: if gzip_compressed: target_seq_len = len(str(gzip.decompress(target[1]), "utf-8")) else: target_seq_len = len(target[1]) query_start = target_reference_start - query_reference_start target_start = 0 if target_reference_end > query_reference_end: query_end = query_seq_len target_end = query_reference_end - target_reference_start else: target_end = target_seq_len query_end = query_start + target_seq_len target_name = target[0] overlap = pafio.Overlap(query_sequence_name=query_name, query_sequence_length=query_seq_len, query_start=query_start, query_end=query_end, relative_strand="+", # temporary target_sequence_name=target_name, target_sequence_length=target_seq_len, target_start=target_start, target_end=target_end, num_residue_matches=1, alignment_block_length=-1, mapping_quality=255) overlaps.append(overlap) return(overlaps)
class FileCollection(QObject): # A new file entry has been added sig_file_added = pyqtSignal(int, FileInfo) # An existing file entry has been removed sig_file_removed = pyqtSignal(Location) # A file changed on disk sig_file_modified = pyqtSignal(FileInfo) # New information about a file has becomes available (thumbnail, # metadata, etc.) sig_fileinfo_updated = pyqtSignal(FileInfo) # File handle was closed and the file is in it's final state. sig_file_closed = pyqtSignal(FileInfo) # The file list has changed completely and needs a reload from scratch sig_files_set = pyqtSignal() # The file list has been reordered, but otherwised stayed the same sig_files_reordered = pyqtSignal() # The file list has been filtered, .is_excluded has been # set/unset, but otherwise stayed the same sig_files_filtered = pyqtSignal() # The file list has been grouped, .group has been set sig_files_grouped = pyqtSignal() def __init__(self) -> None: super().__init__() self._grouper: Grouper = NoGrouper() self._filter: Filter = Filter() self._sorter: Sorter = Sorter() self._location2fileinfo: Dict[Location, List[FileInfo]] = defaultdict(list) self._fileinfos: SortedList[FileInfo] = SortedList( key=self._sorter.get_key_func()) def clear(self) -> None: logger.debug("FileCollection.clear") self._location2fileinfo.clear() self._fileinfos.clear() self.sig_files_set.emit() def set_fileinfos(self, fileinfos: Iterable[FileInfo]) -> None: logger.debug("FileCollection.set_fileinfos") for fi in fileinfos: self._location2fileinfo[fi.location()].append(fi) self._fileinfos.clear() self._fileinfos.update(fileinfos) self.sig_files_set.emit() def add_fileinfo(self, fi: FileInfo) -> None: logger.debug("FileCollection.add_fileinfos: %s", fi) self._location2fileinfo[fi.location()].append(fi) self._fileinfos.add(fi) idx = self._fileinfos.index(fi) self.sig_file_added.emit(idx, fi) def remove_file(self, location: Location) -> None: if location not in self._location2fileinfo: logger.error("FileCollection.remove_file: %s: KeyError", location) else: logger.debug("FileCollection.remove_file: %s", location) fis = self._location2fileinfo[location] del self._location2fileinfo[location] for fi in fis: self._fileinfos.remove(fi) self.sig_file_removed.emit(location) def modify_file(self, fileinfo: FileInfo) -> None: try: self._replace_fileinfo(fileinfo) except KeyError: logger.error("FileCollection.modify_file: %s: KeyError", fileinfo) else: logger.debug("FileCollection.modify_file: %s", fileinfo) self.sig_file_modified.emit(fileinfo) def update_fileinfo(self, fileinfo: FileInfo) -> None: try: self._replace_fileinfo(fileinfo) except KeyError: logger.error("FileCollection.update_fileinfo: %s", fileinfo) else: logger.debug("FileCollection.update_fileinfo: %s: KeyError", fileinfo) self.sig_fileinfo_updated.emit(fileinfo) def close_file(self, fileinfo: FileInfo) -> None: try: self._replace_fileinfo(fileinfo) except KeyError: logger.error("FileCollection.close_file: %s", fileinfo) else: logger.debug("FileCollection.close_file: %s: KeyError", fileinfo) self.sig_file_closed.emit(fileinfo) def get_fileinfos(self) -> Iterator[FileInfo]: if self._sorter.reverse: return cast(Iterator[FileInfo], reversed(self._fileinfos)) else: return cast(Iterator[FileInfo], iter(self._fileinfos)) def get_fileinfo(self, location: Location) -> Optional[FileInfo]: if location not in self._location2fileinfo: return None else: fis = self._location2fileinfo[location] return fis[0] # FIXME: this is fishy def index(self, fileinfo: FileInfo): return self._fileinfos.index(fileinfo) def __getitem__(self, key): return self._fileinfos[key] def __len__(self) -> int: return len(self._fileinfos) def set_grouper(self, grouper: Grouper) -> None: self._grouper = grouper for fi in self._fileinfos: self._grouper(fi) self.sig_files_grouped.emit() def set_filter(self, filter: Filter) -> None: self._filter = filter for fi in self._fileinfos: self._filter.apply(fi) self.sig_files_filtered.emit() def set_sorter(self, sorter: Sorter) -> None: print("rebuilding fileinfos") self._sorter = sorter self._fileinfos = SortedList(self._fileinfos, key=self._sorter.get_key_func()) self.sig_files_reordered.emit() # def sort(self, key, reverse: bool=False) -> None: # logger.debug("FileCollection.sort") # self._fileinfos.sort(key=key) # if reverse: # self._fileinfos.sort(key=key, reverse=True) # logger.debug("FileCollection.sort:done") # self.sig_files_reordered.emit() def _replace_fileinfo(self, fileinfo: FileInfo) -> None: if fileinfo in self._fileinfos: return location = fileinfo.location() if location not in self._location2fileinfo: raise KeyError( "location not in location2fileinfo: {}".format(location)) else: fis = self._location2fileinfo[location] self._location2fileinfo[location] = [fileinfo] for fi in fis: self._fileinfos.remove(fi) self._fileinfos.add(fileinfo) # def shuffle(self) -> None: # logger.debug("FileCollection.sort") # tmp = list(self._fileinfos) # random.shuffle(tmp) # self._fileinfos = ListDict(lambda fi: fi.location(), tmp) # logger.debug("FileCollection.sort:done") # self.sig_files_reordered.emit() def save_as(self, filename: str) -> None: with open(filename, "w") as fout: for fi in self._fileinfos: fout.write(fi.abspath()) fout.write("\n")
class Scheduler: def __init__(self, error_time): """ initialize a new Scheduler """ self.item_map = dict() self.item_list = SortedList() self.error_time = error_time self.lock = Lock() def start_running(self): def processor(): while True: sleep(1) self._process_list() thread = Thread(target=processor) thread.start() def _process_list(self): for item in self.item_list: if not item.is_expired(): break self._process_item(item) def _process_item(self, item): with self.lock: self.item_list.remove(item) if item.status == Status.Online: item.set_exp(self.error_time) item.status = Status.Pending self.item_list.add(item) elif item.status == Status.Pending: del self.item_map[item.client_id] def add(self, item): with self.lock: self.item_list.add(item) self.item_map[item.client_id] = item def get(self, key): if key in self.item_map: return self.item_map[key] return None def expire(self, key, ttl): status = self.status(key) if status == Status.Pending: raise NeedsVerification() elif status == Status.DNE: raise DoesNotExist() with self.lock: item = self.item_map(key) self.item_list.remove(item) item.set_exp(ttl) self.item_list.add(item) return True def delete(self, key): status = self.status(key) if status == Status.DNE: return False with self.lock: item = self.item_map[key] self.item_list.remove(item) del self.item_map[key] return True def verify(self, key, ttl): status = self.status(key) if status == Status.DNE: raise DoesNotExist() elif status == Status.Online: return False with self.lock: item = self.item_map[key] item.set_exp(ttl) item.status = Status.Online return True def status(self, key): if key in self.item_map: item = self.item_map[key] return item.status else: return Status.DNE