def test_bisect_key_left(): slt = SortedKeyList(key=modulo) assert slt.bisect_key_left(10) == 0 slt = SortedKeyList(range(100), key=modulo) slt._reset(17) slt.update(range(100)) slt._check() assert slt.bisect_key_left(0) == 0 assert slt.bisect_key_left(5) == 100 assert slt.bisect_key_left(10) == 200
def test_bisect_key_left(): slt = SortedKeyList(key=modulo) assert slt.bisect_key_left(10) == 0 slt = SortedKeyList(range(100), key=modulo) slt._reset(17) slt.update(range(100)) slt._check() assert slt.bisect_key_left(0) == 0 assert slt.bisect_key_left(5) == 100 assert slt.bisect_key_left(10) == 200
class MemoryTimestampIndex(TimestampIndex): """ Index of transactions sorted by their timestamps. """ _index: 'SortedKeyList[TransactionIndexElement]' def __init__(self) -> None: self.log = logger.new() self._index = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) def add_tx(self, tx: BaseTransaction) -> bool: assert tx.hash is not None # It is safe to use the in operator because it is O(log(n)). # http://www.grantjenks.com/docs/sortedcontainers/sortedlist.html#sortedcontainers.SortedList.__contains__ element = TransactionIndexElement(tx.timestamp, tx.hash) if element in self._index: return False self._index.add(element) return True def del_tx(self, tx: BaseTransaction) -> None: idx = self._index.bisect_key_left((tx.timestamp, tx.hash)) if idx < len(self._index) and self._index[idx].hash == tx.hash: self._index.pop(idx) def get_newest(self, count: int) -> Tuple[List[bytes], bool]: return get_newest_sorted_key_list(self._index, count) def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: return get_older_sorted_key_list(self._index, timestamp, hash_bytes, count) def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: return get_newer_sorted_key_list(self._index, timestamp, hash_bytes, count) def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> Tuple[List[bytes], Optional[RangeIdx]]: timestamp, offset = from_idx idx = self._index.bisect_key_left((timestamp, b'')) txs = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) txs.update(self._index[idx:idx+offset+count]) ret_txs = txs[offset:offset+count] hashes = [tx.hash for tx in ret_txs] if len(ret_txs) < count: return hashes, None else: next_offset = offset + count next_timestamp = ret_txs[-1].timestamp if next_timestamp != timestamp: next_idx = txs.bisect_key_left((next_timestamp, b'')) next_offset -= next_idx return hashes, RangeIdx(next_timestamp, next_offset)
class ConstraintList: ''' List of constraints for a single rom variant ''' def __init__(self, constraints: List[Constraint], rom_variant: RomVariant) -> None: self.constraints = SortedKeyList(key=lambda x: x.addr) for constraint in constraints: if constraint.romA == rom_variant: self.constraints.add( RomConstraint(constraint.addressA, constraint)) elif constraint.romB == rom_variant: self.constraints.add( RomConstraint(constraint.addressB, constraint)) def get_constraints_at(self, local_address: int) -> List[Constraint]: constraints = [] index = self.constraints.bisect_key_left(local_address) while index < len(self.constraints ) and self.constraints[index].addr == local_address: constraints.append(self.constraints[index].constraint) index += 1 return constraints
def sorted_list_get_with_key(sorted_list: SortedKeyList, key: object) -> object: idx = sorted_list.bisect_key_left(key) if idx < len(sorted_list) and sorted_list.key(sorted_list[idx]) == key: return sorted_list[idx] else: raise ValueError("key '{}' not present in sorted list '{}'".format( key, sorted_list))
def find_candidate_gaps(gaps: List[CalendarGap], gap_request: GapRequest) -> List[CalendarGap]: """ gap_request ={'minimum_start...} Returns a subset of the original gaps, but not copies. one list, it is sorted by start date. Find all gaps with a start date is is after the required minimum start date, call this sublist 1. O(log2(n)) now sort sublist1 by gap end date O(N log N) search the second list on the end date criteria: sublist2 now sort sublist2 by gap duration: sublist 3 search sublist3 to eliminate gaps which aren't big enough, and sort by start date into sublist4. Now take the first 1. """ def key_fun_startdate(cg: CalendarGap): return cg.gap_start_datetime def key_fun_enddate(cg: CalendarGap): return cg.gap_end_datetime gaps_sorted_by_start = SortedKeyList( key=key_fun_startdate) #type: SortedKeyList[CalendarGap] gaps_sorted_by_start.update(gaps) index_start_date = gaps_sorted_by_start.bisect_key_left( gap_request.minimum_start_date) remaining_gaps_sorted_by_end = SortedKeyList( key=key_fun_enddate) #type: SortedKeyList[CalendarGap] remaining_gaps_sorted_by_end.update(gaps[index_start_date:]) index_end_date = remaining_gaps_sorted_by_end.bisect_key_left( gap_request.maximum_end_date) gaps_of_sufficient_duration = [ g for g in remaining_gaps_sorted_by_end[:index_end_date] if g.gap_duration.seconds / 60 >= gap_request.gap_duration_minutes ] return gaps_of_sufficient_duration
class HalfSnap: def __init__(self, bids: bool): if bids: self.data = SortedKeyList(key=lambda val: -val[0]) else: self.data = SortedKeyList(key=lambda val: val[0]) self.is_bids = bids self.time = None def fill(self, source): self.data.clear() for item in source: self.add(item) def add(self, item): price = item[0] size = item[1] self.data.add([price, size]) def update(self, price: float, size: float): key = -price if self.is_bids else price i = self.data.bisect_key_left(key) if 0 <= i < len(self.data): value = self.data[i] else: if size <= VERY_SMALL_NUMBER: return False self.data.add([price, size]) return True if size <= VERY_SMALL_NUMBER: if value[0] == price: self.data.discard(value) return True else: return False if value[0] == price: self.data[i][1] = size else: self.data.add([price, size]) return True def delete(self, price: float): return self.updatef(price, 0.0)
def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> Tuple[List[bytes], Optional[RangeIdx]]: timestamp, offset = from_idx idx = self._index.bisect_key_left((timestamp, b'')) txs = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) txs.update(self._index[idx:idx+offset+count]) ret_txs = txs[offset:offset+count] hashes = [tx.hash for tx in ret_txs] if len(ret_txs) < count: return hashes, None else: next_offset = offset + count next_timestamp = ret_txs[-1].timestamp if next_timestamp != timestamp: next_idx = txs.bisect_key_left((next_timestamp, b'')) next_offset -= next_idx return hashes, RangeIdx(next_timestamp, next_offset)
class TransactionsIndex: """ Index of transactions sorted by their timestamps. """ transactions: 'SortedKeyList[TransactionIndexElement]' def __init__(self) -> None: self.transactions = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) def __getitem__(self, index: slice) -> List[TransactionIndexElement]: """ Get items from SortedKeyList given a slice :param index: list index slice, for eg [1:6] """ return self.transactions[index] def update(self, values: List[TransactionIndexElement]) -> None: """ Update sorted list by adding all values from iterable :param values: new values to add to SortedKeyList """ self.transactions.update(values) def add_tx(self, tx: BaseTransaction) -> None: """ Add a transaction to the index :param tx: Transaction to be added """ assert tx.hash is not None # It is safe to use the in operator because it is O(log(n)). # http://www.grantjenks.com/docs/sortedcontainers/sortedlist.html#sortedcontainers.SortedList.__contains__ element = TransactionIndexElement(tx.timestamp, tx.hash) if element in self.transactions: return self.transactions.add(element) def del_tx(self, tx: BaseTransaction) -> None: """ Delete a transaction from the index :param tx: Transaction to be deleted """ idx = self.transactions.bisect_key_left((tx.timestamp, tx.hash)) if idx < len(self.transactions) and self.transactions[idx].hash == tx.hash: self.transactions.pop(idx) def find_tx_index(self, tx: BaseTransaction) -> Optional[int]: """Return the index of a transaction in the index :param tx: Transaction to be found """ idx = self.transactions.bisect_key_left((tx.timestamp, tx.hash)) if idx < len(self.transactions) and self.transactions[idx].hash == tx.hash: return idx return None def get_newest(self, count: int) -> Tuple[List[bytes], bool]: """ Get transactions or blocks from the newest to the oldest :param count: Number of transactions or blocks to be returned :return: List of tx hashes and a boolean indicating if has more txs """ return get_newest_sorted_key_list(self.transactions, count) def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: """ Get transactions or blocks from the timestamp/hash_bytes reference to the oldest :param timestamp: Timestamp reference to start the search :param hash_bytes: Hash reference to start the search :param count: Number of transactions or blocks to be returned :return: List of tx hashes and a boolean indicating if has more txs """ return get_older_sorted_key_list(self.transactions, timestamp, hash_bytes, count) def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: """ Get transactions or blocks from the timestamp/hash_bytes reference to the newest :param timestamp: Timestamp reference to start the search :param hash_bytes: Hash reference to start the search :param count: Number of transactions or blocks to be returned :return: List of tx hashes and a boolean indicating if has more txs """ return get_newer_sorted_key_list(self.transactions, timestamp, hash_bytes, count) def find_first_at_timestamp(self, timestamp: int) -> int: """ Get index of first element at the given timestamp, or where it would be inserted if the timestamp is not in the list. Eg: SortedKeyList = [(3,hash1), (3, hash2), (7, hash3), (8, hash4)] find_first_at_timestamp(7) = 2, which is the index of (7, hash3) find_first_at_timestamp(4) = 2, which is the index of (7, hash3) :param timestamp: timestamp we're interested in :return: the index of the element, or None if timestamp is greater than all in the list """ idx = self.transactions.bisect_key_left((timestamp, b'')) return idx
class UTCAssigner: """Helper class to assign UTC timestamps to device readings. This class contains logic to infer UTC timestamps for readings that are stamped in uptime only by looking for nearby readings for which the UTC timestamp is known. The relative offset between these anchor points and the reading in question is then used to infer the UTC timestamp from the anchor point either exactly or approximately. The underlying truth that this class relies on is the fact that all readings produced by an IOTile device have a mononotically increasing reading_id that can be used to absolutely order them. In contrast to the readings timestamp, which may reset to 0 if the device reboots and does not have a realtime clock, the reading_id is guaranteed to never decrease. """ _Y2KReference = datetime.datetime(2000, 1, 1) _EpochReference = datetime.datetime(1970, 1, 1) def __init__(self): self._anchor_points = SortedKeyList(key=lambda x: x.reading_id) self._prepared = False self._anchor_streams = {} self._break_streams = set() self._logger = logging.getLogger(__name__) self._known_converters = { 'rtc': UTCAssigner._convert_rtc_anchor, 'epoch': UTCAssigner._convert_epoch_anchor } def _load_known_breaks(self): self._break_streams.add(0x5C00) def anchor_stream(self, stream_id, converter="rtc"): """Mark a stream as containing anchor points.""" if isinstance(converter, str): converter = self._known_converters.get(converter) if converter is None: raise ArgumentError( "Unknown anchor converter string: %s" % converter, known_converters=list(self._known_converters)) self._anchor_streams[stream_id] = converter def id_range(self): """Get the range of archor reading_ids. Returns: (int, int): The lowest and highest reading ids. If no reading ids have been loaded, (0, 0) is returned. """ if len(self._anchor_points) == 0: return (0, 0) return (self._anchor_points[0].reading_id, self._anchor_points[-1].reading_id) @classmethod def convert_rtc(cls, timestamp): """Convert a number of seconds since 1/1/2000 to UTC time.""" if timestamp & (1 << 31): timestamp &= ~(1 << 31) delta = datetime.timedelta(seconds=timestamp) return cls._Y2KReference + delta @classmethod def _convert_rtc_anchor(cls, reading): """Convert a reading containing an RTC timestamp to datetime.""" return cls.convert_rtc(reading.value) @classmethod def _convert_epoch_anchor(cls, reading): """Convert a reading containing an epoch timestamp to datetime.""" delta = datetime.timedelta(seconds=reading.value) return cls._EpochReference + delta def add_point(self, reading_id, uptime=None, utc=None, is_break=False): """Add a time point that could be used as a UTC reference.""" if reading_id == 0: return if uptime is None and utc is None: return if uptime is not None and uptime & (1 << 31): if utc is not None: return uptime &= ~(1 << 31) utc = self.convert_rtc(uptime) uptime = None anchor = _TimeAnchor(reading_id, uptime, utc, is_break, exact=utc is not None) if anchor in self._anchor_points: return self._anchor_points.add(anchor) self._prepared = False def add_reading(self, reading): """Add an IOTileReading.""" is_break = False utc = None if reading.stream in self._break_streams: is_break = True if reading.stream in self._anchor_streams: utc = self._anchor_streams[reading.stream](reading) self.add_point(reading.reading_id, reading.raw_time, utc, is_break=is_break) def add_report(self, report, ignore_errors=False): """Add all anchors from a report.""" if not isinstance(report, SignedListReport): if ignore_errors: return raise ArgumentError( "You can only add SignedListReports to a UTCAssigner", report=report) for reading in report.visible_readings: self.add_reading(reading) self.add_point(report.report_id, report.sent_timestamp, report.received_time) def assign_utc(self, reading_id, uptime=None, prefer="before"): """Assign a utc datetime to a reading id. This method will return an object with assignment information or None if a utc value cannot be assigned. The assignment object returned contains a utc property that has the asssigned UTC as well as other properties describing how reliable the assignment is. Args: reading_id (int): The monotonic reading id that we wish to assign a utc timestamp to. uptime (int): Optional uptime that should be associated with the reading id. If this is not specified and the reading_id is found in the anchor points passed to this class then the uptime from the corresponding anchor point will be used. prefer (str): There are two possible directions that can be used to assign a UTC timestamp (the nearest anchor before or after the reading). If both directions are of similar quality, the choice is arbitrary. Passing prefer="before" will use the anchor point before the reading. Passing prefer="after" will use the anchor point after the reading. Default: before. Returns: UTCAssignment: The assigned UTC time or None if assignment is impossible. """ if prefer not in ("before", "after"): raise ArgumentError( "Invalid prefer parameter: {}, must be 'before' or 'after'". format(prefer)) if len(self._anchor_points) == 0: return None if reading_id > self._anchor_points[-1].reading_id: return None i = self._anchor_points.bisect_key_left(reading_id) found_id = False crossed_break = False exact = True last = self._anchor_points[i].copy() if uptime is not None: last.uptime = uptime if last.reading_id == reading_id: found_id = True if last.utc is not None: return UTCAssignment(reading_id, last.utc, found_id, exact, crossed_break) left_assign = self._fix_left(reading_id, last, i, found_id) if left_assign is not None and left_assign.exact: return left_assign right_assign = self._fix_right(reading_id, last, i, found_id) if right_assign is not None and right_assign.exact: return right_assign return self._pick_best_fix(left_assign, right_assign, prefer) def ensure_prepared(self): """Calculate and cache UTC values for all exactly known anchor points.""" if self._prepared: return exact_count = 0 fixed_count = 0 inexact_count = 0 self._logger.debug("Preparing UTCAssigner (%d total anchors)", len(self._anchor_points)) for curr in self._anchor_points: if not curr.exact: assignment = self.assign_utc(curr.reading_id, curr.uptime) if assignment is not None and assignment.exact: curr.utc = assignment.utc curr.exact = True fixed_count += 1 else: inexact_count += 1 else: exact_count += 1 self._logger.debug( "Prepared UTCAssigner with %d reference points, " "%d exact anchors and %d inexact anchors", exact_count, fixed_count, inexact_count) self._prepared = True def fix_report(self, report, errors="drop", prefer="before"): """Perform utc assignment on all readings in a report. The returned report will have all reading timestamps in UTC. This only works on SignedListReport objects. Note that the report should typically have previously been added to the UTC assigner using add_report or no reference points from the report will be used. Args: report (SignedListReport): The report that we should fix. errors (str): The behavior that we should have when we can't fix a given reading. The only currently support behavior is drop, which means that the reading will be dropped and not included in the new report. prefer (str): Whether to prefer fixing readings by looking for reference points after the reading or before, all other things being equal. See the description of ``assign_utc``. Returns: SignedListReport: The report with UTC timestamps. """ if not isinstance(report, SignedListReport): raise ArgumentError("Report must be a SignedListReport", report=report) if errors not in ('drop', ): raise ArgumentError( "Unknown errors handler: {}, supported=['drop']".format( errors)) self.ensure_prepared() fixed_readings = [] dropped_readings = 0 for reading in report.visible_readings: assignment = self.assign_utc(reading.reading_id, reading.raw_time, prefer=prefer) if assignment is None: dropped_readings += 1 continue fixed_reading = IOTileReading(assignment.rtc_value, reading.stream, reading.value, reading_time=assignment.utc, reading_id=reading.reading_id) fixed_readings.append(fixed_reading) fixed_report = SignedListReport.FromReadings( report.origin, fixed_readings, report_id=report.report_id, selector=report.streamer_selector, streamer=report.origin_streamer, sent_timestamp=report.sent_timestamp) fixed_report.received_time = report.received_time if dropped_readings > 0: self._logger.warning( "Dropped %d readings of %d when fixing UTC timestamps in report 0x%08X for device 0x%08X", dropped_readings, len(report.visible_readings), report.report_id, report.origin) return fixed_report def _pick_best_fix(self, before, after, prefer): if before is None and after is None: return None if after is None: return before if before is None: return after if after.crossed_break and not before.crossed_break: return before if before.crossed_break and not after.crossed_break: return after if before.exact and not after.exact: return before if after.exact and not before.exact: return after if prefer == 'before': return before return after def _fix_right(self, reading_id, last, start, found_id): """Fix a reading by looking for the nearest anchor point after it.""" accum_delta = 0 exact = True crossed_break = False if start == len(self._anchor_points) - 1: return None for curr in self._anchor_points.islice(start + 1): if curr.uptime is None or last.uptime is None: exact = False elif curr.is_break or curr.uptime < last.uptime: exact = False crossed_break = True else: accum_delta += curr.uptime - last.uptime if curr.utc is not None: time_delta = datetime.timedelta(seconds=accum_delta) return UTCAssignment(reading_id, curr.utc - time_delta, found_id, exact, crossed_break) last = curr return None def _fix_left(self, reading_id, last, start, found_id): """Fix a reading by looking for the nearest anchor point before it.""" accum_delta = 0 exact = True crossed_break = False if start == 0: return None for curr in self._anchor_points.islice(None, start, reverse=True): if curr.uptime is None or last.uptime is None: exact = False elif curr.is_break or last.uptime < curr.uptime: exact = False crossed_break = True else: accum_delta += last.uptime - curr.uptime if curr.utc is not None: time_delta = datetime.timedelta(seconds=accum_delta) return UTCAssignment(reading_id, curr.utc + time_delta, found_id, exact, crossed_break) last = curr return None
class UTCAssigner(object): """Helper class to assign UTC timestamps to device readings.""" _Y2KReference = datetime.datetime(2000, 1, 1) _EpochReference = datetime.datetime(1970, 1, 1) def __init__(self): self._anchor_points = SortedKeyList(key=lambda x: x.reading_id) self._anchor_streams = {} self._break_streams = set() self._known_converters = { 'rtc': UTCAssigner._convert_rtc_anchor, 'epoch': UTCAssigner._convert_epoch_anchor } def _load_known_breaks(self): self._break_streams.add(0x5C00) def anchor_stream(self, stream_id, converter="rtc"): """Mark a stream as containing anchor points.""" if isinstance(converter, str): converter = self._known_converters.get(converter) if converter is None: raise ArgumentError( "Unknown anchor converter string: %s" % converter, known_converters=list(self._known_converters)) self._anchor_streams[stream_id] = converter @classmethod def convert_rtc(cls, timestamp): """Convert a number of seconds since 1/1/2000 to UTC time.""" if timestamp & (1 << 31): timestamp &= ~(1 << 31) delta = datetime.timedelta(seconds=timestamp) return cls._Y2KReference + delta @classmethod def _convert_rtc_anchor(cls, reading): """Convert a reading containing an RTC timestamp to datetime.""" return cls.convert_rtc(reading.value) @classmethod def _convert_epoch_anchor(cls, reading): """Convert a reading containing an epoch timestamp to datetime.""" delta = datetime.timedelta(seconds=reading.value) return cls._EpochReference + delta def add_point(self, reading_id, uptime=None, utc=None, is_break=False): """Add a time point that could be used as a UTC reference.""" if reading_id == 0: return if uptime is None and utc is None: return if uptime is not None and uptime & (1 << 31): if utc is not None: return uptime &= ~(1 << 31) utc = self.convert_rtc(uptime) uptime = None anchor = _TimeAnchor(reading_id, uptime, utc, is_break) if anchor in self._anchor_points: return self._anchor_points.add(anchor) def add_reading(self, reading): """Add an IOTileReading.""" is_break = False utc = None if reading.stream in self._break_streams: is_break = True if reading.stream in self._anchor_streams: utc = self._anchor_streams[reading.stream](reading) self.add_point(reading.reading_id, reading.raw_time, utc, is_break=is_break) def add_report(self, report, ignore_errors=False): """Add all anchors from a report.""" if not isinstance(report, SignedListReport): if ignore_errors: return raise ArgumentError( "You can only add SignedListReports to a UTCAssigner", report=report) for reading in report.visible_readings: self.add_reading(reading) self.add_point(report.report_id, report.sent_timestamp, report.received_time) def assign_utc(self, reading_id, uptime=None): """Assign a utc datetime to a reading id. This method will return an object with assignment information or None if a utc value cannot be assigned. The assignment object returned contains a utc property that has the asssigned UTC as well as other properties describing how reliable the assignment is. """ if len(self._anchor_points) == 0: return None if reading_id > self._anchor_points[-1].reading_id: return None i = self._anchor_points.bisect_key_left(reading_id) found_id = False crossed_break = False exact = True last = self._anchor_points[i].copy() if uptime is not None: last.uptime = uptime if last.reading_id == reading_id: found_id = True if last.utc is not None: return last.utc accum_delta = 0 for curr in self._anchor_points.islice(i + 1): if curr.uptime is None or last.uptime is None: exact = False elif curr.is_break or curr.uptime < last.uptime: exact = False crossed_break = True else: accum_delta += curr.uptime - last.uptime if curr.utc is not None: time_delta = datetime.timedelta(seconds=accum_delta) return UTCAssignment(reading_id, curr.utc - time_delta, found_id, exact, crossed_break) last = curr return None
class MaxStore: """ >>> x = MaxStore() >>> x.add('a',0) True >>> list(x) [(0, 'a')] >>> x.add('b',0) True >>> list(x) [(0, 'a'), (0, 'b')] >>> x.add('c', 2) True >>> list(x) [(2, 'c')] >>> x.add('d', 0) False >>> list(x) [(2, 'c')] >>> x.add('e', 1.9) True >>> list(x) [(1.9, 'e'), (2, 'c')] >>> x.add('f', 2.2) True >>> list(x) [(2, 'c'), (2.2, 'f')] """ def __init__(self, margin=0.9): self.store = SortedKeyList(key=itemgetter(0)) self.margin = margin @property def cutoff(self): if not self.store: return 0 return self.store[-1][0] * self.margin @property def cuton(self): if not self.store: return 0 return self.store[-1][0] def add(self, key, score): if score < self.cutoff: return False purge = score > self.cuton self.store.add((score, key)) if purge: purge_ind = self.store.bisect_key_left(self.cutoff) del self.store[:purge_ind] return True def __iter__(self): return iter(self.store) def __len__(self): return len(self.store) def __bool__(self): return bool(self.store) def best(self): return self.store[-1] def best_key(self): return self.best()[1]
class RangeModule: def __init__(self): self.ranges = SortedKeyList(key=lambda t: t[0]) def addRange(self, left: int, right: int) -> None: a, b = left, right if len(self.ranges) == 0: self.ranges.add((a, b)) return i = self.ranges.bisect_key_left(left) if i - 1 >= 0: if self.ranges[i-1][0] <= a <= self.ranges[i-1][1]: i -= 1 elif i == len(self.ranges): self.ranges.add((a, b)) return add = False for j in range(i, len(self.ranges)): c, d = self.ranges[j] if max(a, c) < min(b, d): self.ranges.pop(j) x, z = min(a, c), max(b, d) self.ranges.add((x, z)) a, b = x, z add = True else: add = True break if add: self.ranges.add((a, b)) def queryRange(self, left: int, right: int) -> bool: a, b = left, right i = self.ranges.bisect_key_left(left) if i - 1 >= 0: if self.ranges[i-1][0] <= a <= self.ranges[i-1][1]: i -= 1 elif i == len(self.ranges): return False for j in range(i, len(self.ranges)): c, d = self.ranges[j] x, y = max(a, c), min(b, d) if (x, y) == (c, d): if b == d: break else: a = d elif (x, y) == (a, b): break else: return False return True def removeRange(self, left: int, right: int) -> None: a, b = left, right i = self.ranges.bisect_key_left(left) if i - 1 >= 0: if self.ranges[i-1][0] <= a <= self.ranges[i-1][1]: i -= 1 elif i == len(self.ranges): return add = True for j in range(i, len(self.ranges)): c, d = self.ranges[j] y, w = max(a, c), min(b, d) if y < w: self.ranges.pop(j) x, z = min(a, c), max(b, d) if x < y: self.ranges.add((x, y)) add = True a, b = w, z else: add = False break if add: self.ranges.add((a, b))