def multi_tab(): import sys input = sys.stdin.readline count = 0 n, m = map(int, input().split()) orders = [x for x in map(int, input().split())] plugs = [] for i, order in enumerate(orders): if len(plugs) < n: if order not in plugs: plugs.append(order) else: if order not in plugs: flag = False next_orders = orders[i:] changeable = 0 for j, plug in enumerate(plugs): if plug not in next_orders: plugs[j] = order flag = True count += 1 break if next_orders.index(plugs[changeable]) < next_orders.index(plugs[j]): changeable = j if not flag: plugs[changeable] = order count += 1 return count
def _enumerate(reversible, reverse_index): if reverse_index is False: return builtins.enumerate(reversible) else: my_list = list(builtins.enumerate(reversed(reversible))) my_list.reverse() return my_list
def get_central_node(self, node_list): sum_weight = sum(n.weight for n in node_list) min_priority = min((n.priority for n in node_list)) weight_dir = 0 for index_dir, central_node_dir in enumerate(node_list): weight_dir += central_node_dir.weight if weight_dir >= sum_weight / 2 and central_node_dir.priority == min_priority and central_node_dir.type == BalancerNode.OPERATOR: break weight_rev = 0 for i, central_node_rev in enumerate(node_list[::-1]): index_rev = len(node_list) - 1 - i weight_rev += central_node_rev.weight if weight_rev >= sum_weight / 2 and central_node_rev.priority == min_priority and central_node_rev.type == BalancerNode.OPERATOR: break dir_criteria = (weight_dir - (sum_weight / 2)) * ( 1000 * int(central_node_dir.type != BalancerNode.OPERATOR)) rev_criteria = (weight_rev - (sum_weight / 2)) * ( 1000 * int(central_node_rev.type != BalancerNode.OPERATOR)) index = index_rev if rev_criteria < dir_criteria else index_dir return index
async def _zip_inner_strict(aiters): tried = 0 try: while True: items = [] for tried, _aiter in _sync_builtins.enumerate( aiters): # noqa: B007 items.append(await anext(_aiter)) yield (*items, ) except StopAsyncIteration: # after the first iterable provided an item, some later iterable was empty if tried > 0: plural = " " if tried == 1 else "s 1-" raise ValueError( f"zip() argument {tried+1} is shorter than argument{plural}{tried}" ) # after the first iterable was empty, some later iterable may be not sentinel = object() for tried, _aiter in _sync_builtins.enumerate(aiters): if await anext(_aiter, sentinel) is not sentinel: plural = " " if tried == 1 else "s 1-" raise ValueError( f"zip() argument {tried+1} is longer than argument{plural}{tried}" ) return
def plot_power_spectrum_two_layers(powers_negative, powers_positive, times, title='', figure_fname='', only_power_spectrum=True, high_gamma_max=120): if not only_power_spectrum: fig, (ax1, ax2, ax3) = plt.subplots(3) #, sharex=True) else: fig, ax1 = plt.subplots() F, T = powers_negative.shape freqs = np.concatenate([ np.arange(1, 30), np.arange(31, 60, 3), np.arange(60, high_gamma_max + 5, 5) ]) bands = dict(delta=[1, 4], theta=[4, 8], alpha=[8, 15], beta=[15, 30], gamma=[30, 55], high_gamma=[65, high_gamma_max]) im1 = _plot_powers(powers_negative, ax1, times, freqs) # cba = plt.colorbar(im1, shrink=0.25) im2 = _plot_powers(powers_positive, ax1, times, freqs) cbb = plt.colorbar(im2, shrink=0.25) plt.ylabel('frequency (Hz)') plt.xlabel('Time (s)') plt.title(title) if not only_power_spectrum: for band_name, band_freqs in bands.items(): idx = [ k for k, f in enumerate(freqs) if band_freqs[0] <= f <= band_freqs[1] ] band_power = np.mean(powers_negative[idx, :], axis=0) ax2.plot(band_power.T, label=band_name) ax2.set_xlim([0, T]) # ax2.legend() for band_name, band_freqs in bands.items(): idx = [ k for k, f in enumerate(freqs) if band_freqs[0] <= f <= band_freqs[1] ] band_power = np.mean(powers_positive[idx, :], axis=0) ax3.plot(band_power.T, label=band_name) ax3.set_xlim([0, T]) # ax3.legend() if figure_fname != '': print('Saving figure to {}'.format(figure_fname)) plt.savefig(figure_fname, dpi=300) plt.close() else: plt.show()
def FRvec(obs,ehtim_convention=True): """ Construct vectors of field rotation corrections for each station and hand Args: obs (obsdata): eht-imaging obsdata object containing VLBI data ehtim_convention (bool): if true, assume the field rotation uses the eht-imaging pre-rotation convention Returns: FR1: field rotation corrections for the first station FR2: field rotation corrections for the second station """ # read the elevation angles for each station el1 = obs.unpack(['el1'],ang_unit='rad')['el1'] el2 = obs.unpack(['el2'],ang_unit='rad')['el2'] # read the parallactic angles for each station par1 = obs.unpack(['par_ang1'],ang_unit='rad')['par_ang1'] par2 = obs.unpack(['par_ang2'],ang_unit='rad')['par_ang2'] # get the observation array info tarr = obs.tarr # get arrays of station names ant1 = obs.data['t1'] ant2 = obs.data['t2'] # get multiplicative prefactors for station 1 f_el1 = np.zeros_like(el1) f_par1 = np.zeros_like(par1) f_off1 = np.zeros_like(el1) for ia, a1 in enumerate(ant1): ind1 = (tarr['site'] == a1) f_el1[ia] = tarr[ind1]['fr_elev'] f_par1[ia] = tarr[ind1]['fr_par'] f_off1[ia] = tarr[ind1]['fr_off']*eh.DEGREE # get multiplicative prefactors for station 2 f_el2 = np.zeros_like(el2) f_par2 = np.zeros_like(par2) f_off2 = np.zeros_like(el2) for ia, a2 in enumerate(ant2): ind2 = (tarr['site'] == a2) f_el2[ia] = tarr[ind2]['fr_elev'] f_par2[ia] = tarr[ind2]['fr_par'] f_off2[ia] = tarr[ind2]['fr_off']*eh.DEGREE # combine to get field rotations for each station FR1 = (f_el1*el1) + (f_par1*par1) + f_off1 FR2 = (f_el2*el2) + (f_par2*par2) + f_off2 # if pre-rotation has been applied, multiply by 2.0 if ehtim_convention: FR1 *= 2.0 FR2 *= 2.0 return FR1, FR2
def enumerate(iterable=None, start=None): if iterable is None and start is None: return bpipe(enumerate, enumerate_start=0) if iterable is None and start is not None: return bpipe(enumerate, enumerate_start=start) if iterable is not None: if start is None: return builtins.enumerate(iterable, start=0) return builtins.enumerate(iterable, start=start)
def using_dic1(nums: List[int], target: int) -> List[int]: nums_map = {} #키와 값을 바꿔서 딕셔너리로 저장 for i, num in enumerate(nums): nums_map[num] = i #타겟에서 첫 번째 수를 뺀 결과를 키로 조회 for i, num in enumerate(nums): if target - num in nums_map and i != nums_map[target - num]: return nums.index(num), nums_map[target - num]
def from_wgs84(self, latitude: float, longitude: float) -> tuple: """ Converts WGS84 coordinates into RD coordinates """ dlat = 0.36 * (latitude - self.phi0) dlon = 0.36 * (longitude - self.lam0) x = self.x0 + sum([v * dlat ** self.Rp[i] * dlon ** self.Rq[i] for i, v in enumerate(self.Rpq)]) y = self.y0 + sum([v * dlat ** self.Sp[i] * dlon ** self.Sq[i] for i, v in enumerate(self.Spq)]) return x, y
def from_rd2(self, x: int, y: int) -> list: dx = 1E-5 * (x - self.x0) dy = 1E-5 * (y - self.y0) latitude = self.phi0 + sum([ v * dx**self.Kp[i] * dy**self.Kq[i] for i, v in enumerate(self.Kpq) ]) / 3600 longitude = self.lam0 + sum([ v * dx**self.Lp[i] * dy**self.Lq[i] for i, v in enumerate(self.Lpq) ]) / 3600 return longitude
def gain_phase_prior(obs,ref_station='AA'): """ Construct vector of prior means and inverse standard deviations for gain phases Args: obs (obsdata): eht-imaging obsdata object containing VLBI data ref_station (str): name of reference station Returns: gainphase_mu: prior means for gain phases gainphase_kappa: prior inverse standard deviations for gain phases """ # get arrays of station names ant1 = obs.data['t1'] ant2 = obs.data['t2'] # get array of timestamps time = obs.data['time'] timestamps = np.unique(time) # Determine the total number of gains that need to be solved for N_gains = 0 T_gains = list() A_gains = list() for it, t in enumerate(timestamps): ind_here = (time == t) N_gains += len(np.unique(np.concatenate((ant1[ind_here],ant2[ind_here])))) stations_here = np.unique(np.concatenate((ant1[ind_here],ant2[ind_here]))) for ii in range(len(stations_here)): A_gains.append(stations_here[ii]) T_gains.append(t) T_gains = np.array(T_gains) A_gains = np.array(A_gains) # initialize vectors of gain phase means and inverse standard deviations gainphase_mu = np.zeros(N_gains) gainphase_kappa = 0.0001*np.ones(N_gains) # set reference station standard devation to be tiny for it, t in enumerate(timestamps): index = (T_gains == t) ants_here = A_gains[index] for ant in ants_here: if ant == ref_station: ind = ((T_gains == t) & (A_gains == ant)) gainphase_kappa[ind] = 10000.0 break return gainphase_mu, gainphase_kappa
def from_rd(self, x: int, y: int) -> tuple: """ Converts RD coordinates into WGS84 coordinates """ dx = 1E-5 * (x - self.x0) dy = 1E-5 * (y - self.y0) latitude = self.phi0 + sum([ v * dx**self.Kp[i] * dy**self.Kq[i] for i, v in enumerate(self.Kpq) ]) / 3600 longitude = self.lam0 + sum([ v * dx**self.Lp[i] * dy**self.Lq[i] for i, v in enumerate(self.Lpq) ]) / 3600 return latitude, longitude
def initialize(self): for i, ob in enumerate(self.objects): print("Initializing object %d of %d..." % (i + 1, len(self.objects))) ob.initialize() print("Initializing %d satellites..." % len(self.sat_group)) self.sat_group.initialize() for i, an in enumerate(self.analyses): print("Initializing analysis %d of %d..." % (i + 1, len(self.analyses))) an.initialize()
def parse(input, boost): _, immune, infection = [ x.strip() for x in re.split(r'Immune System:|Infection:', input) ] good, bad = set(), set() for i, line in enumerate(immune.split('\n')): sq = parse_squad(line, Type.Immune, i + 1, boost) good.add(sq) for i, line in enumerate(infection.split('\n')): sq = parse_squad(line, Type.Infection, i + 1, 0) bad.add(sq) return good, bad
def print_tree(self, node): rows = self.str_node(node) max_len = max([len(r) for r in rows]) rows = [f'{r}{" "*(max_len - len(r))}' for r in rows] transposed = [[] for _ in range(max_len)] for j, r in enumerate(rows): for i, s in enumerate(r): transposed[i].append(s) rows = [' '.join(r) for r in transposed] for r in rows: print(r) print('----------------------------------------------')
def Clean_Wav_File(timeList): Vaw_obj_list = [ "CF003 - Active - Day - (214).wav", "CF003 - Active - Day - (215).wav", "CF003 - Active - Day - (216).wav" ] #Vaw_obj_list = list_of_sound_file_name for counter, i in enumerate(Vaw_obj_list): newAudio = AudioSegment.from_wav("Data\\" + i) for num, a in enumerate(timeList[counter]): newAu = newAudio[a[0] * 1000:a[1] * 1000] newAu.export("Cleaned Data\\Cleaned_Voice\\" + i + "--" + (a[2])[:-1] + "--" + str(num) + ".wav", format="wav")
def enumerate(reversible, reverse_index=False): ''' Iterate over `(i, item)` pairs, where `i` is the index number of `item`. This is an extension of the builtin `enumerate`. What it allows is to get a reverse index, by specifying `reverse_index=True`. This causes `i` to count down to zero instead of up from zero, so the `i` of the last member will be zero. ''' if reverse_index is False: return builtins.enumerate(reversible) else: my_list = list(builtins.enumerate(reversed(reversible))) my_list.reverse() return my_list
def predict(): test_contents, test_labels = load_corpus('./dataset/test.txt', word2id, max_sen_len=50) # 加载测试集 test_dataset = TensorDataset( torch.from_numpy(test_contents).type(torch.float), torch.from_numpy(test_labels).type(torch.long)) test_dataloader = DataLoader(dataset=test_dataset, batch_size=config.batch_size, shuffle=False, num_workers=2) # 读取模型 model = TextCNN(config) model.load_state_dict(torch.load(config.model_path)) model.eval() model.to(device) # 测试过程 count, correct = 0, 0 for _, (batch_x, batch_y) in enumerate(test_dataloader): batch_x, batch_y = batch_x.to(device), batch_y.to(device) output = model(batch_x) # correct += (output.argmax(1) == batch_y).float().sum().item() correct += (output.argmax(1) == batch_y).sum().item() count += len(batch_x) # 打印准确率 print('test accuracy is {:.2f}%.'.format(100 * correct / count))
def MOD_check(data, O): check_MOD = True check_ED = False char_check = "" checked = False for i, line in enumerate(data): if "`show module`" in line: char_check = ''.join(line.split()[0][0]) for l in data[i + 1:i + 10]: if "Mod" in l.split(): for j in data[i:i + 1000]: if "`show" not in j.split(): if "DS-X9316-SSNK9" in j: #O.write("Disruptive Model Exists => Module " + j + "\n") notes_arr.append( "Disruptive Model Exists => Module " + j + "\n") check_MOD = False if "Supervisor" in j: if "active *" or "ha-standby" in j and not check_ED: #O.write("Director Model" + "\n") check_ED = True if "faulty" in j or "shutdown" in j or "powered-dn" in j or "down" in j or "DOWN" in j \ or "UNKNOWN" in j or "fail" in j or "FAULTY" in j or "Faulty" in j or \ "Powered-DN" in j or "Powered-Down" in j: #O.write("Faulty Module => " + j + "\n") notes_arr.append("Faulty Module => " + j + "\n") check_MOD = False break else: break else: continue return check_MOD
def set_predicate(self): """ Sets a new predicate category in a predicate node Used in the relationship change mode """ if self.selected_node is not None: # extract user input new_label = self.comboBox.currentText() idx_t = self.selected_node.split(".")[1] mapping = {self.selected_node: new_label + "." + idx_t} # update list of relationship triples with the change self.triples[int(idx_t)][1] = vocab["pred_name_to_idx"][new_label] s = self.triples[int(idx_t)][0] self.keep_box_idx[s] = 0 self.keep_image_idx[s] = 0 self.new_triples = self.triples # objects remain the same self.new_objs = self.objs # update the networkx graph and the list of triples for visualization for idx, [s, p, o] in enumerate(self.curr_triples): if p == self.selected_node: self.curr_triples[idx][1] = new_label + "." + idx_t self.pos[new_label + "." + idx_t] = self.pos[self.selected_node] del self.pos[self.selected_node] self.graph = nx.relabel_nodes(self.graph, mapping, copy=False) self.selected_node = self.comboBox.currentText() self.mode = "reposition" self.set_graph()
def simulate(self, t_start=None, t_stop=None, t_step=None, animate=False, follow=None): if t_start is not None: self.t_start = t_start if t_stop is not None: self.t_stop = t_stop if t_step is not None: self.t_step = t_step self._generate_time_vector(self.t_start, self.t_stop, self.t_step) t_sim_start = timer() # simulation start time for i, tof in enumerate(self._tof_vector): t_sim_step = timer() self.tof_current = tof self.step(animate) print("t=%2.1f s (%5.3f ms)" % (self.t[i], (timer() - t_sim_step) * 1000)) if not follow is None: mlab.view(azimuth=np.mod( -20 + np.arctan2(follow._xyz[1].to(u.km).value, follow._xyz[0]. to(u.km).value) * 180 / np.pi, 360)) yield # if animate: # yield print("Total simulation time: %5.5f s" % (timer() - t_sim_start))
def HW_check(data, O): check_HW = True v_check = False for i, line in enumerate(data): if "`show hardware`" in line and check_HW: char_check = ''.join(line.split()[0][0]) for l in data[i + 1:i + 100]: if "Switch is booted up" in l: for j in data[i + 1:i + 10000]: if char_check not in j.split() and not v_check: if "kickstart:" in j: kickstart_version = j.split(": ", 1)[1].split()[1] #O.write("\n" + "Current Version = " + kickstart_version + "\n") v_check = True #O.write("version --- Checked" + "\n") if "Module" in j or "PS" in j or "Fan" in j or "Xbar" in j: if "faulty" in j or "shutdown" in j or "powered-dn" in j or "down" in j or "DOWN" in j \ or "UNKNOWN" in j or "fail" in j or "FAULTY" in j or "Faulty" in j or \ "Powered-DN" in j or "Powered-Down" in j: #O.write("Faulty => " + j + "\n") notes_arr.append("Faulty => " + j + "\n") check_HW = False break else: break return kickstart_version
async def refine(self, input, output): if input: if self.place_input == 'last': sources = (*self.sources, input) else: sources = (input, *self.sources) else: sources = self.sources async with trio.open_nursery() as weld_nursery: sources = [weld(weld_nursery, source) for source in sources] async def pull_task(source, index, results: list): try: results.append((index, await source.__anext__())) except StopAsyncIteration: weld_nursery.cancel_scope.cancel() while True: results = [] async with trio.open_nursery() as pull_nursery: for i, source in builtins.enumerate(sources): pull_nursery.start_soon(pull_task, source, i, results) await output(tuple(result for i, result in sorted(results, key=lambda packet: packet[0]))) for source in sources: await source.aclose()
def model_vs_code(data): flag_6_2 = False flag_7_3 = False flag_8_4 = False upto_6_2 = ["9222i"] upto_7_3 = ["9506", "9509", "9513", "9148"] upto_8_4 = [ "9710", "9250i", "9706", "9148S", "9148T", "9396S", "9178", "9132T", "9396T" ] for i, line in enumerate(data): if "`show sprom backplane 1`" in line: for l in data[i:i + 500]: if 'Product Number :' in l: if "C" in l.split("-")[1]: model = l.split("-")[1].split("C")[1] for x in range(len(upto_6_2)): if upto_6_2[x] in model: flag_6_2 = True break for x in range(len(upto_7_3)): if upto_7_3[x] in model: flag_7_3 = True break for x in range(len(upto_8_4)): if upto_8_4[x] in model: flag_8_4 = True break return flag_6_2, flag_7_3, flag_8_4
async def zip_longest(*itrs: AnyIterable[Any], fillvalue: Any = None) -> AsyncIterator[Tuple[Any, ...]]: """ Yield a tuple of items from mixed iterables until all are consumed. If shorter iterables are exhausted, the default value will be used until all iterables are exhausted. Example: a = range(3) b = range(5) async for a, b in zip_longest(a, b, fillvalue=-1): a # 0, 1, 2, -1, -1 b # 0, 1, 2, 3, 4 """ its: List[AsyncIterator[Any]] = [iter(itr) for itr in itrs] itr_count = len(its) finished = 0 while True: values = await asyncio.gather(*[it.__anext__() for it in its], return_exceptions=True) for idx, value in builtins.enumerate(values): if isinstance(value, AnyStop): finished += 1 values[idx] = fillvalue its[idx] = repeat(fillvalue) elif isinstance(value, BaseException): raise value if finished >= itr_count: break yield tuple(values)
def test_extractall(self): # Check numeric groups res = evaluate(extractall('Step: (?P<no>\d+)', self.tempfile, 1)) for expected, v in builtins.enumerate(res, start=1): self.assertEqual(str(expected), v) # Check named groups res = evaluate(extractall('Step: (?P<no>\d+)', self.tempfile, 'no')) for expected, v in builtins.enumerate(res, start=1): self.assertEqual(str(expected), v) # Check convert function res = evaluate( extractall('Step: (?P<no>\d+)', self.tempfile, 'no', builtins.int)) for expected, v in builtins.enumerate(res, start=1): self.assertEqual(expected, v)
def fn_bracket(): brackets = input() stack = [] depths = [] for bracket in brackets: flag = False if stack: if stack[-1] == "(" and bracket == ")": flag = True depths += [[len(stack), 2]] stack.pop() elif stack[-1] == "[" and bracket == "]": flag = True depths += [[len(stack), 3]] stack.pop() if bracket == '(' or bracket == '[': flag = True stack.append(bracket) if not flag: return 0 while depths: max_index = max(enumerate(depths), key=lambda x:x[1][0])[0]
def using_dic2(nums: List[int], target: int) -> List[int]: nums_map = {} #하나의 for 문으로 통합 for i, num in enumerate(nums): if target - num in nums_map: return [nums_map[target - num], i] nums_map[num] = i
def define_playlist_values(playlist_json): """ Dictionary of only the values we need to transmit to slave players 'playlistInfo': dict{ total_duration: int* total_items: int* } 'mainPlaylist': list of dicts [{ sequenceName: str* }] 'name': str """ values_to_send = [ { "total_duration": playlist_json["playlistInfo"]["total_duration"] }, { "total_items": playlist_json["playlistInfo"]["total_items"] }, ] for i, value in enumerate(playlist_json["mainPlaylist"]): values_to_send.append( {"sequenceName" + str(i): value["sequenceName"]}) return values_to_send
def managerOrdersView(request, *args, **kwargs): context = {} if request.user.is_authenticated: try: orders = list(Order.objects.all()) except: orders = [] # is_not_saved = True for id, order in enumerate(orders): if request.POST: form = OrderStatusForm(request.POST, instance=order) if form.is_valid(): form.save() print(request.POST) print("ZASEJWOWANE") else: print("Przypisany form1") else: form = OrderStatusForm(instance=order) print("Przypisany form2") orders[id] = (order, form, id) print(orders) print("Context:") context = {"user": request.user, "userType": request.user.type, "orders": orders} print(context) return render(request, "manager-orders.html", context) else: context = {} return render(request, "manager-orders.html", context)
def test(): from operator import add a = range(0, 100, 2) b = range(100) mab = map(add, a, b) assert list(mab) == list(range(0, 150, 3)) iab = islice(mab, -10, None, 2) assert list(iab) == list(range(120, 150, 6)) ieab = islice(enumerate(mab), -10, None, 2) assert list(ieab) == list(builtins.enumerate(mab))[-10::2] eiab = enumerate(iab) assert list(eiab) == list(builtins.enumerate(range(120, 150, 6))) fb = filter(lambda x: x % 2, b) assert list(fb) == list(range(100)[1::2])
def range( title, *args ): '''Progress logger identical to built in range''' items = builtins.range(*args) for index, item in builtins.enumerate(items): with _current_log.context('{} {} ({:.0f}%)'.format(title, item, index*100/len(items))): yield item
def test(): from operator import add a = range(0, 100, 2) b = range(100) mab = map(add, a, b) assert list(mab) == list(range(0, 150, 3)) iab = islice(mab, -10, None, 2) assert list(iab) == list(range(120, 150, 6)) ieab = islice(enumerate(mab), -10, None, 2) assert list(ieab) == list(builtins.enumerate(mab))[-10::2] eiab = enumerate(iab) assert list(eiab) == list(builtins.enumerate(range(120, 150, 6))) fb = filter(lambda x: x%2, b) assert list(fb) == list(range(100)[1::2])
def get_hamming_distance(motif1, motif2): count = 0 for i, (char1, char2) in enumerate(zip(motif1, motif2)): if char1 != char2: count += 1 return count
def _enumerate(iterable, reverse_index): if reverse_index is False: return builtins.enumerate(iterable) else: from python_toolbox import sequence_tools try: length = sequence_tools.get_length(iterable) except AttributeError: iterable = nifty_collections.LazyTuple(iterable) length = len(iterable) return zip(range(length - 1, -1, -1), iterable)
def lineup_dict(aw_lineup, hm_lineup): """Returns a dictionary of lineups to be converted to columns. Specifically, the columns are 'aw_player1' through 'aw_player5' and 'hm_player1' through 'hm_player5'. :param aw_lineup: The away team's current lineup. :param hm_lineup: The home team's current lineup. :returns: A dictionary of lineups. """ return { '{}_player{}'.format(tm, i+1): player for tm, lineup in zip(['aw', 'hm'], [aw_lineup, hm_lineup]) for i, player in enumerate(lineup) }
def enumerate(title, iterable): return iter(title, builtins.enumerate(iterable), length=_len(iterable))
def __reversed__(self): l = len(self._iterable) for i, value in builtins.enumerate(reversed(self._iterable)): yield l-i-1+self._start, value
def enumerate( title, iterable ): '''Progress logger identical to built in enumerate''' return iter(title, builtins.enumerate(iterable), length=_len(iterable))
def __iter__(self): yield from builtins.enumerate(self._iterable, self._start)