def test_replace_with_union(self): i = P.closed(0, 1) | P.open(2, 3) assert i.replace() == i assert i.replace(P.OPEN, -1, 4, P.OPEN) == P.openclosed(-1, 1) | P.open(2, 4) assert i.replace(lower=2) == P.closedopen(2, 3) assert i.replace(upper=1) == P.closedopen(0, 1) assert i.replace(lower=5) == P.empty() assert i.replace(upper=-5) == P.empty() assert i.replace(left=lambda v: ~v, lower=lambda v: v - 1, upper=lambda v: v + 1, right=lambda v: ~v) == P.openclosed(-1, 1) | P.openclosed(2, 4)
def test_parameters(self): i1, i2, i3, i4 = P.closed(0, 1), P.openclosed(0, 1), P.closedopen(0, 1), P.open(0, 1) params = { 'disj': ' or ', 'sep': '-', 'left_open': '<!', 'left_closed': '<', 'right_open': '!>', 'right_closed': '>', 'conv': lambda s: '"{}"'.format(s), 'pinf': '+oo', 'ninf': '-oo', } assert P.to_string(i1, **params) == '<"0"-"1">' assert P.to_string(i2, **params) == '<!"0"-"1">' assert P.to_string(i3, **params) == '<"0"-"1"!>' assert P.to_string(i4, **params) == '<!"0"-"1"!>' assert P.to_string(P.empty(), **params) == '<!!>' assert P.to_string(P.singleton(1), **params) == '<"1">' assert P.to_string(P.openclosed(-P.inf, 1), **params) == '<!-oo-"1">' assert P.to_string(P.closedopen(1, P.inf), **params) == '<"1"-+oo!>' assert P.to_string(P.closed(0, 1) | P.closed(2, 3), **params) == '<"0"-"1"> or <"2"-"3">'
def test_parameters(self): i1, i2, i3, i4 = '<"0"-"1">', '<!"0"-"1">', '<"0"-"1"!>', '<!"0"-"1"!>' params = { 'conv': lambda s: int(s[1:-1]), 'disj': ' or ', 'sep': '-', 'left_open': '<!', 'left_closed': '<', 'right_open': '!>', 'right_closed': '>', 'pinf': r'\+oo', 'ninf': '-oo', } assert P.from_string(i1, **params) == P.closed(0, 1) assert P.from_string(i2, **params) == P.openclosed(0, 1) assert P.from_string(i3, **params) == P.closedopen(0, 1) assert P.from_string(i4, **params) == P.open(0, 1) assert P.from_string('<!!>', **params) == P.empty() assert P.from_string('<"1">', **params) == P.singleton(1) assert P.from_string('<!-oo-"1">', **params) == P.openclosed(-P.inf, 1) assert P.from_string('<"1"-+oo!>', **params) == P.closedopen(1, P.inf) assert P.from_string('<"0"-"1"> or <"2"-"3">', **params) == P.closed(0, 1) | P.closed(2, 3)
def parse_or_empty(parser, text, verbose=False): try: return parser.parse(text) except Exception as e: if verbose: print('E:', text, str(e)) return I.empty()
def _to_p_intervals(intervals): """ Converts a `TimeInterval` instance or a list to an `Interval` list (portion library objects). This is usually done to merge and simplify the elements of the list. Parameters ---------- intervals : TimeInterval or list[TimeInterval] List of intervals Returns ------- `Interval` object with the list of time intervals """ # Fill the `Interval` list and merge as necessary p_intervals = p.empty() if isinstance(intervals, list): for interval in intervals: # make sure interval is not None if interval: p_intervals = p_intervals.union(interval.p_interval) else: # intervals object is a single TimeInterval p_intervals = intervals.p_interval return p_intervals
def test_is_empty(self): assert P.openclosed(1, 1).empty assert P.closedopen(1, 1).empty assert P.open(1, 1).empty assert not P.closed(1, 1).empty assert P.Interval().empty assert P.empty().empty
def delays(values: ClockValuation, constraint: ClockConstraint) -> Interval: """Compute the allowable delay with the given clock valuations and constraints .. todo:: Write the LaTeX version of the function. Parameters ---------- values : A mapping from `Clock` to the valuation of the clock. constraint : A clock constrain. Returns ------- : An interval that represents the set of possible delays that satisfy the given clock constraint. """ if isinstance(constraint, bool): constraint = Boolean(constraint) if isinstance(constraint, Boolean): if constraint.value: return P.closed(0, P.inf) return P.empty() if isinstance(constraint, SingletonConstraint): v_c = values[constraint.clock] n: int = constraint.rhs if constraint.op == ComparisonOp.GE: return P.closed(n - v_c, P.inf) if constraint.op == ComparisonOp.GT: return P.open(n - v_c, P.inf) if constraint.op == ComparisonOp.LE: return P.closed(0, n - v_c) if constraint.op == ComparisonOp.LT: return P.closedopen(0, n - v_c) if isinstance(constraint, And): return delays(values, constraint.args[0]) & delays(values, constraint.args[1]) if isinstance(constraint, DiagonalConstraint): v_c1 = values[constraint.lhs.clock1] v_c2 = values[constraint.lhs.clock2] op_fn = constraint.op.to_op() if op_fn(v_c1 - v_c2, n): return P.closed(0, P.inf) return P.empty() raise TypeError("Unsupported ClockConstraint type: {}".format(type(constraint)))
def _reassemble(self, ctr): if 'deliver' not in ctr.actions: return if not (ctr.bundle.primary.bundle_flags & PrimaryBlock.Flag.IS_FRAGMENT): return final_ident = ctr.bundle_ident()[:3] frag_offset = ctr.bundle.primary.fragment_offset total_length = ctr.bundle.primary.total_app_data_len reassm = self._reassembly.get(final_ident, None) if reassm is None: reassm = Reassembly( ident=final_ident, total_length=total_length, total_valid=portion.closedopen(0, total_length), valid=portion.empty(), data=bytearray(total_length) ) self._reassembly[final_ident] = reassm else: if reassm.total_length != total_length: LOGGER.warning('Mismatch in fragment-bundle total application data length') if frag_offset == 0: reassm.first_frag = ctr.bundle # Inject the new fragment payload_data = ctr.block_num(1).getfieldval('btsd') end_ix = frag_offset + len(payload_data) reassm.data[frag_offset:end_ix] = payload_data reassm.valid |= portion.closedopen(frag_offset, end_ix) if reassm.valid == reassm.total_valid: del self._reassembly[final_ident] LOGGER.info('Finished reassembly of %s size %d', final_ident, reassm.total_length) # Synthesize original bundle rctr = BundleContainer() rctr.bundle.primary = reassm.first_frag.primary.copy() rctr.bundle.primary.bundle_flags &= ~PrimaryBlock.Flag.IS_FRAGMENT rctr.bundle.primary.crc_type = AbstractBlock.CrcType.NONE rctr.bundle.primary.crc_value = None LOGGER.debug('Copying %d first-fragment blocks', len(reassm.first_frag.blocks)) for blk in reassm.first_frag.blocks: rctr.bundle.blocks.append(blk.copy()) rctr.reload() pyld_blk = rctr.block_num(Bundle.BLOCK_NUM_PAYLOAD) pyld_blk.setfieldval('btsd', reassm.data) pyld_blk.crc_type = AbstractBlock.CrcType.NONE pyld_blk.crc_value = None glib.idle_add(self._agent.recv_bundle, rctr) ctr.actions.clear() return True
def test_creation(self): assert P.Interval() == P.empty() assert P.Interval(P.closed(0, 1)) == P.closed(0, 1) assert P.Interval(P.closed(0, 1)) == P.closed(0, 1) assert P.Interval(P.closed(0, 1), P.closed(2, 3)) == P.closed(0, 1) | P.closed(2, 3) assert P.Interval(P.closed(0, 1) | P.closed(2, 3)) == P.closed(0, 1) | P.closed(2, 3) with pytest.raises(TypeError): P.Interval(1)
def test_empty(self): assert P.empty() == P.Interval.from_atomic(P.OPEN, P.inf, -P.inf, P.open) assert P.closed(3, -3) == P.empty() assert P.openclosed(0, 0) == P.empty() assert P.closedopen(0, 0) == P.empty() assert P.open(0, 0) == P.empty() assert P.closed(0, 0) != P.empty() assert P.singleton(P.inf) == P.empty() assert P.singleton(-P.inf) == P.empty()
def choose_action(self, i_ph): # Activation rule not_covered = P.closed(lower=0, upper=1) # not_covered = P.closed(lower=0.01, upper=self.sigma_square) # scale = not_covered.upper - not_covered.lower for arm in self.active_arms: # confidence_radius = scale * self.confidence_radius(i_ph, i) confidence_radius = self.confidence_radius(i_ph, arm) confidence_interval = P.closed(arm.norm_value - confidence_radius, arm.norm_value + confidence_radius) not_covered = not_covered - confidence_interval if debug: print( f"arm: {arm.index}, value: {arm.value}, i_ph: {i_ph}, pulled_arms: {arm.pulled}" ) print( f"not_covered: {not_covered}, confidence_radius: {confidence_radius}" ) if not_covered != P.empty(): rans = [] height = 0 heights = [] for i in not_covered: rans.append(np.random.uniform(i.lower, i.upper)) height += i.upper - i.lower heights.append(i.upper - i.lower) ran_n = np.random.uniform(0, height) j = 0 ran = 0 for i in range(len(heights)): if j < ran_n < j + heights[i]: ran = rans[i] j += heights[i] if debug_pricing: ran = 0.1 new_arm = Arm(len(self.active_arms), ran * (self.sigma_square - lower) + lower, ran) self.active_arms.append(new_arm) # self.pulled_arms.append(0) # self.avg_rewards.append(0) # Selection rule max_index = float('-inf') max_index_arm = None for arm in self.active_arms: confidence_radius = self.confidence_radius(i_ph, arm) index = arm.avg_learning_reward + 2 * confidence_radius if index > max_index: max_index = index max_index_arm = arm action = max_index_arm.value self.current_arm = max_index_arm # self.current_action = action # action = action * (self.sigma_square - lower) + lower return action
def get(self, request, *args, **kwargs): context = self.get_context_data(**kwargs) dim = (24 - self.start_hour) * self.splits m = np.zeros((7, dim)) for day in range(7): time = P.empty() tt = TeacherTime.objects.filter(teacher=request.user, day=day) for t in tt: time = time|P.closed(tools.to_min(t.start_time), tools.to_min(t.end_time)) print(time) if time != P.empty(): for t in range(dim): if P.closed(t*self.step+self.start_hour*60, (t+1)*self.step+self.start_hour*60) in time: m[day, t] = 1 print(m.shape) context["freetime"] = m.tolist() return render(request, self.template_name, context)
def test_hash_with_hashable(self): assert hash(P.closed(0, 1)) is not None assert hash(P.closed(0, 1)) != hash(P.closed(1, 2)) assert hash(P.openclosed(-P.inf, 0)) is not None assert hash(P.closedopen(0, P.inf)) is not None assert hash(P.empty()) is not None assert hash(P.closed(0, 1) | P.closed(3, 4)) is not None assert hash(P.closed(0, 1) | P.closed(3, 4)) != hash(P.closed(0, 1)) assert hash(P.closed(0, 1) | P.closed(3, 4)) != hash(P.closed(3, 4))
def test_empty(self): assert P.empty().adjacent(P.closed(0, 2)) assert P.empty().adjacent(P.empty()) assert P.closed(0, 2).adjacent(P.empty()) assert not P.empty().adjacent(P.closed(0, 1) | P.closed(2, 3)) assert not (P.closed(0, 1) | P.closed(2, 3)).adjacent(P.empty())
def test_replace_with_empty(self): assert P.empty().replace(left=P.CLOSED, right=P.CLOSED) == P.empty() assert P.empty().replace(lower=1, upper=2) == P.open(1, 2) assert P.empty().replace(lower=lambda v: 1, upper=lambda v: 2) == P.empty() assert P.empty().replace(lower=lambda v: 1, upper=lambda v: 2, ignore_inf=False) == P.open(1, 2)
def get_free_teacher_time(teacher, day, raw=True): tt = list(TeacherTime.objects.filter(day=day, teacher=teacher).order_by("start_time").values_list("start_time", "end_time")) t = P.empty() for start_time, end_time in tt: print(start_time) t = t | P.closed(to_min(start_time), to_min(end_time)) ls = list(Lesson.objects.filter(teacher=teacher, day=day, active__gt=0).values_list("start_time", "end_time")) for start_time, end_time in ls: t = t - P.closed(to_min(start_time), to_min(end_time)) return list(t)
def post(self, request, *args, **kwargs): step = self.step start_hour = self.start_hour TeacherTime.objects.filter(teacher=request.user).delete() for day in range(7): times = P.empty() for time in range((24-self.start_hour)*self.step): if request.POST.get("{}_{}".format(day, time), False): times = times|P.closed(time, time+1) if times!=P.empty(): for time in list(times): print(time.lower, time.upper) start_time = tools.to_time(time.lower, step, duration=start_hour*60) end_time = tools.to_time(time.upper, step, duration=start_hour*60) print(start_time, end_time) tt = TeacherTime(teacher=request.user, day=day, start_time = start_time, end_time = end_time) tt.save() #duration = request.POST["day_{}".format(day)] #LessonBook(day=day, duration=duration, user_id=request.user, ) return redirect(reverse('lms:choose_day'))
def choose_action(self, episode): if episode == 1: i_ph = 1 else: i_ph = math.ceil(math.log2(episode)) if self.i_ph != i_ph: self.active_arms = [] self.pulled_arms = [] self.avg_rewards = [] self.i_ph = i_ph # Activation rule not_covered = self.action_space for arm in self.active_arms: arm.confidence_radius = math.sqrt((8 * i_ph)/(1 + arm.pulled)) confidence_interval = P.closed(arm.value - arm.confidence_radius, arm.value + arm.confidence_radius) not_covered = not_covered - confidence_interval if not_covered != P.empty(): rans = [] height = 0 heights = [] for i in not_covered: rans.append(np.random.uniform(i.lower, i.upper)) height += i.upper - i.lower heights.append(i.upper - i.lower) ran_n = np.random.uniform(0, height) j = 0 ran = 0 for i in range(len(heights)): if j < ran_n < j + heights[i]: ran = rans[i] j += heights[i] new_arm = Arm(len(self.active_arms), ran) self.active_arms.append(new_arm) # Selection rule max_index = float('-inf') max_index_arm = None for arm in self.active_arms: confidence_radius = arm.confidence_radius index = arm.avg_reward + 2 * confidence_radius if index > max_index: max_index = index max_index_arm = arm self.current_arm = max_index_arm return max_index_arm.value
def get_visits(self): """ Method get visits for days from database and prepare time intervals """ first_day = datetime.combine(self.days[0], time.min) last_day = datetime.combine(self.days[-1], time.max) intervals = portion.empty() visits = Visit.objects.filter( ~Q(is_confirmed=True, is_available=False) & (Q(user=self.user, start__range=[first_day, last_day]) | Q(user=self.user, end__range=[first_day, last_day]))) for visit in visits: intervals |= portion.closedopen(visit.start, visit.end) return visits, intervals
def get_time_off_interval(self): """ Method get visits for days from database and prepare time intervals """ first_day = datetime.combine(self.days[0], time.min) last_day = datetime.combine(self.days[-1], time.max) intervals = portion.empty() times_off = TimeOff.objects.filter( Q(user=self.user, start__range=[first_day, last_day]) | Q(user=self.user, end__range=[first_day, last_day])) for time_off in times_off: intervals |= portion.closedopen(time_off.start, time_off.end) return intervals
def test_bounds_on_empty(self): i = P.empty() assert i.left == P.OPEN assert i.right == P.OPEN assert i.lower == P.inf assert i.upper == -P.inf i = P.openclosed(10, -10) assert i.left == P.OPEN assert i.right == P.OPEN assert i.lower == P.inf assert i.upper == -P.inf i = P.open(0, 1) | P.closed(3, 4) assert i.left == P.OPEN assert i.right == P.CLOSED assert i.lower == 0 assert i.upper == 4
class TestIntervalDifference: @pytest.mark.parametrize('i', [ P.closed(0, 1), P.open(0, 1), P.openclosed(0, 1), P.closedopen(0, 1), P.empty(), P.singleton(0) ]) def test_with_itself(self, i): assert i - i == P.empty() def test_with_disjoint(self): assert P.closed(0, 1) - P.closed(2, 3) == P.closed(0, 1) assert P.closed(0, 4) - P.empty() == P.closed(0, 4) assert P.empty() - P.closed(0, 4) == P.empty() def test_with_smaller(self): assert P.closed(0, 4) - P.closed(2, 3) == P.closedopen( 0, 2) | P.openclosed(3, 4) assert P.closed(1, 4) - P.closed(1, 3) == P.openclosed(3, 4) assert P.closed(1, 4) - P.closed(2, 4) == P.closedopen(1, 2) assert P.closed(0, 4) - P.open(1, 2) == P.closed(0, 1) | P.closed(2, 4) assert P.closed(0, 2) - P.open(0, 2) == P.singleton(0) | P.singleton(2) def test_with_larger(self): assert P.closed(0, 2) - P.closed(0, 4) == P.empty() assert P.closed(0, 2) - P.closed(-2, 2) == P.empty() assert P.closed(0, 2) - P.closed(-2, 4) == P.empty() assert P.open(0, 2) - P.closed(0, 2) == P.empty() def test_with_overlap(self): assert P.closed(0, 2) - P.closed(1, 3) == P.closedopen(0, 1) assert P.closed(0, 2) - P.open(1, 3) == P.closed(0, 1) assert P.closed(0, 2) - P.closed(-2, 1) == P.openclosed(1, 2) assert P.closed(0, 2) - P.open(-2, 1) == P.closed(1, 2) def test_proxy_method(self): i1, i2 = P.closed(0, 1), P.closed(2, 3) assert i1 - i2 == i1.difference(i2) def test_with_invalid_type(self): with pytest.raises(TypeError): P.closed(0, 1) - 1
def get_stereotype_ratio(self): ''' Calculate stereotype ratio for the underlying ReferencedSequence. Stereotype ratio is the fraction of sequence elements presented as entries of some repetitive pattern. E.g., consider the following case: `seq` = [a, b (->1), c (->0), d]. Element 'b' is included into pattern referring to 'b' elem, and 'c' is the part of pattern referring to 'a'. Thus, 'a', 'b' and 'c' relate to some patterns while 'd' does not. So, ratio here is 3/4. ''' refs = self.content['refs'] patterned_area = P.empty() for i in range(len(refs)): for ref in refs[i]: patterned_area |= P.closed(*sorted([i, ref])) indices = list(P.iterate(patterned_area, step=1)) return len(indices) / len(refs)
def create_interval_dict_linear_time(gene, isoform_interval_dict): interval_set = set(isoform_interval_dict.keys()) d = P.IntervalDict() union = P.empty() for transcript, inter in isoform_interval_dict.items(): union = union | inter power_set_coords_dict = {} for p in P.iterate(union, step=1): s = list() for transcript, inter in isoform_interval_dict.items(): if p in inter: s.append(transcript) s = repr(s) if s in power_set_coords_dict: power_set_coords_dict[s].append(p) else: power_set_coords_dict[s] = [p] for s, coords in power_set_coords_dict.items(): d[interval(intervals_extract(coords))] = set(eval(s)) return gene, d
def get_work_time_interval(self): intervals = portion.empty() holidays_work = UserSettings.objects.get(user=self.user).holidays for day in self.days: if not self.is_holiday(day) or holidays_work: day_number = day.weekday() work_time = WorkTime.objects.filter(user=self.user, day_of_week=day_number) if work_time: for work_hours in work_time: day_datetime = datetime.combine(day, time.min) work_hours.start = day_datetime + work_hours.start work_hours.end = day_datetime + work_hours.end intervals |= portion.closedopen( work_hours.start, work_hours.end) return intervals
def get(self, request, *args, **kwargs): context = self.get_context_data(**kwargs) teacher = User.objects.get(id=self.kwargs['id']) t_all = [] week = ["Пн", "Вт", "Ср", "Чт", "Пт", "Cб", "Вс"] my_week = [] for day in range(7): t = tools.get_free_teacher_time(teacher, day) if t[0]!=P.empty(): t_all.append([tools.min_to_time(el.lower), tools.min_to_time(el.upper)] for el in t) my_week.append((day, week[day])) else: t_all.append(()) context["times"] = t_all context["week"] = my_week context["teacher"] = teacher return render(request, self.template_name, context)
def get_time_steps(self): """ Method return list of quarters of hours to display in schedule. """ times = [] if self.time_range == 'full': times.append(timedelta(seconds=0)) times.append(timedelta(hours=24)) else: for day in self.days[1:-1]: datetime_day = datetime.combine(day, time.min) day_interval = portion.closedopen( datetime_day, datetime_day + timedelta(days=1)) on_intervals = day_interval & self.work_interval - self.time_off_interval if on_intervals != portion.empty(): for on_interval in on_intervals: times.append(on_interval.lower - datetime.combine(day, time.min)) times.append(on_interval.upper - datetime.combine(day, time.min)) if self.time_range == 'extra': for visit in self.visits: if visit.start in day_interval: times.append(visit.start - datetime_day) if visit.end in day_interval: times.append(visit.end - datetime_day) if times: start_time = min(times) end_time = max(times) else: start_time = timedelta(hours=8) end_time = timedelta(hours=16) time_steps = self.generate_time_steps(start_time, end_time) return time_steps, start_time, end_time
def get_visits_per_day(self): visits_per_days = [] for day in self.days[1:-1]: visits_per_day = [] dt_day = datetime.combine(day, time.min) day_interval = portion.closedopen(dt_day + self.start_day, dt_day + self.end_day) for visit in self.visits: visit_interval = portion.closedopen(visit.start, visit.end) & day_interval if visit_interval != portion.empty(): day_visit = deepcopy(visit) day_visit.start = visit_interval.lower day_visit.end = visit_interval.upper visits_per_day.append(day_visit) visits_per_days.append(visits_per_day) return visits_per_days
def get_portions_from_intervals(intervals: List[int]) -> portion.interval.Interval: p = portion.empty() for x in range(len(intervals) // 2): p |= portion.closed(*(intervals[x * 2:x * 2 + 2])) return p
default=1, help='Number of threads') args = parser.parse_args() gtffile = args.gtf dbfile = args.db jsonfile = args.json threads = int(args.threads) print('Creating gtf database, this will take some time...') db = gffutils.create_db(gtffile, dbfile) isoform_interval_dict = {} for gene in db.features_of_type('gene'): g_id = gene['gene_id'][0] isoform_interval_dict[g_id] = {} for transcript in db.children(gene, featuretype='transcript'): t_id = transcript['transcript_id'][0] isoform_interval_dict[g_id][t_id] = P.empty() for exon in db.children(transcript, featuretype='exon'): isoform_interval_dict[g_id][ t_id] = isoform_interval_dict[g_id][t_id] | P.closed( exon.start, exon.end) print('Extracting unque isoform intervals') res = Parallel(n_jobs=threads, verbose=3, backend='loky')( delayed(create_interval_dict_linear_time)(gene, transcript_intervals) for gene, transcript_intervals in isoform_interval_dict.items()) isoform_unique_intervals = {k: v for k, v in res} isoform_unique_intervals_for_json_dump = { gene: {P.to_string(k): ','.join(v) for k, v in d.items()} for gene, d in isoform_unique_intervals.items() } print('Writing unique isoform intervals to json file {}'.format(jsonfile))