def parallel_wrapper(ticker: typing.Dict, months: int, days: int, stddev: int, found_anomalies: typing.List): volume_data = download_volume_data(ticker['symbol'], months) anomalies, mean, std_dev = find_anomalies(volume_data, stddev, days) if not anomalies.empty: found_anomalies.append( {**ticker, 'url': f"https://finance.yahoo.com/quote/{ticker['symbol']}", 'mean': mean, 'std_dev': std_dev, 'anomalies': json.loads(anomalies.to_json(orient='records'))})
def update_pytail_objects(self, log: Log, pytail_list: t.List): if os.path.isfile(log.target): if len(pytail_list) == 0: filename = '.' + os.path.basename(log.target) + '.offset' path = os.path.dirname(log.target) offset_file = self.dm.config.path(defaults.OFFSET_DIR, remove_root(path), filename) if not os.path.exists(offset_file): _log_logger.debug(f"creating offset file {offset_file}") os.makedirs(os.path.dirname(offset_file), exist_ok=True) pytail_list.append( _PygtailBuffer(file=log.target, offset_mode='manual', offset_file=offset_file)) else: for folder, dirnames, filenames in os.walk(log.target): for filename in filenames: if log._re_include.search( filename) and not log._re_exclude.search(filename): file = os.path.join(folder, filename) offset_file = os.path.join(folder, '.' + filename + '.offset') if not any(map(lambda p: p.file == file, pytail_list)): pytail_list.append( _PygtailBuffer(file=file, offset_mode='manual', offset_file=offset_file)) if not log.recursive: break new_dirnames = [] for dirname in dirnames: if log._re_include.search( dirname) and not log._re_exclude.search(dirname): new_dirnames.append(dirname) dirnames[:] = new_dirnames
def output(self, outs: t.List, msg: t.Dict, display_id: str, cell_index: int) -> t.Optional[t.List]: msg_type = msg['msg_type'] parent_msg_id = msg['parent_header'].get('msg_id') if self.output_hook_stack[parent_msg_id]: # if we have a hook registered, it will override our # default output behaviour (e.g. OutputWidget) hook = self.output_hook_stack[parent_msg_id][-1] hook.output(outs, msg, display_id, cell_index) return None try: out = output_from_msg(msg) except ValueError: self.log.error("unhandled iopub msg: " + msg_type) return None if self.clear_before_next_output: self.log.debug('Executing delayed clear_output') outs[:] = [] self.clear_display_id_mapping(cell_index) self.clear_before_next_output = False if display_id: # record output index in: # _display_id_map[display_id][cell_idx] cell_map = self._display_id_map.setdefault(display_id, {}) output_idx_list = cell_map.setdefault(cell_index, []) output_idx_list.append(len(outs)) outs.append(out) return out
def build_summary(layout, level=1): """make a summary for the report, including X level""" assert level > 0 level -= 1 summary = ListType(klass="summary") for child in layout.children: if not isinstance(child, Section): continue label = layout_title(child) if not label and not child.id: continue if not child.id: child.id = label.replace(" ", "-") node = Link("#" + child.id, label=label or child.id) # FIXME: Three following lines produce not very compliant # docbook: there are some useless <para><para>. They might be # replaced by the three commented lines but this then produces # a bug in html display... if level and [n for n in child.children if isinstance(n, Section)]: node = Paragraph([node, build_summary(child, level)]) summary.append(node) # summary.append(node) # if level and [n for n in child.children if isinstance(n, Section)]: # summary.append(build_summary(child, level)) return summary
def output(self, outs: t.List, msg: t.Dict) -> t.Optional[t.List]: try: out = output_from_msg(msg) except ValueError: self.log.error("unhandled iopub msg: " + msg["msg_type"]) return outs.append(out)
def id_pre(self) -> Matrix: List: list = list() for i in range(self.rows): for j in range(self.rows): if i==j: List.append(1) else: List.append(0) P: Matrix = matrix(List, self.rows, self.rows) return P
def traverse_tree(bt: Tree, elem: str, encode: str, output: t.List): if bt == None: return if bt.root == None: return if bt.root.value.character == elem: output.append(encode) return traverse_tree(bt.root.left, elem, encode + '0', output) traverse_tree(bt.root.right, elem, encode + '1', output)
def add_name(self, name: str, target: typing.List) -> None: """ Add a name to the target list, if it not already used. `target` should be one of the self properties (`imports`, `declares`, etc). `seen_vars` is the global record of any seen names to prevent duplicates (e.g. a variable would not be both declared [in `declares` list] and then used [in `uses` list]). """ if name not in self.seen_vars and name not in target: self.seen_vars.append(name) target.append(name)
def twoSum(self, nums: List[int], target: int) -> List[int]: List = [] for index, i in enumerate(nums): target_val = target - i try: if nums.index(target_val, index + 1): List.append(nums.index(i)) List.append(nums.index(target_val, index + 1)) return List except ValueError: continue
def simple_sort(data: List[int]) -> List[list]: """ Sort list of ints without using built-in methods. Examples: simple_sort([2, 9, 6, 7, 3, 2, 1]) >>> [1, 2, 2, 3, 6, 7, 9] """ List = [] while data: List.append(min(data)) data.remove(min(data)) return List
def kafka_websocket_reader_main(kafka_brokers, wss=("0.0.0.0", 8080), *, loop, _ctx: _t.List = None, _ctx_sem=None): """Self-contained aiohttp.web.Application-based routine (app-main)""" app = web.Application(loop=loop) # -- if not _ctx is None: _ctx.append(app) if isinstance(_ctx_sem, Semaphore): _ctx_sem.release() # -- try: app.loop.add_signal_handler( 2, lambda x: x.loop.run_until_complete(x.shutdown), app) app.loop.add_signal_handler( 15, lambda x: x.loop.run_until_complete(x.shutdown), app) except NotImplementedError: pass # -- app._router.add_route("GET", "/{topic:.+}", partial(stream_to_websocket, lambda webreq, **webkw: asyncmap( lambda kfk_rec: \ json.dumps(kafka_record_to_dict(kfk_rec, compact_meta = bool(webkw.get("compact_meta", False)), coerce_value = webkw.get("coerce_value", "str"), coerce_key = webkw.get("coerce_key", "str"), kafka_timestamp = bool(webkw.get("kafka_timestamp", False)), kafka_topic = bool(webkw.get("kafka_topic", False)),), ensure_ascii = False), kafka_consumer_prepare( brokers = kafka_brokers, topic = ("/"+webreq.path.lstrip("/")).split("/")[0:2][-1], default_offspec = (list(filter(lambda o: (len(o) == 1) and o[0], map(lambda s: s.split(":"), webkw.get("offspec", "").split(",")))) or [["+0"]])[0][-1], parts_offspecs = { int(t) : o for t, o in (filter(lambda to: len(to) == 2 and all(to), map(lambda s: tuple(s.split(":")), webkw.get("offspec", "").split(",")))) }, loop = app.loop),))) # -- web.run_app(app, host=wss[0], port=wss[1])
def inPins(Cell, Data): if Cell == 'INV': return ['A'] if Cell == 'BUF': return ['A'] if simpleGate(Cell): Num = int(Cell[-1]) X = 'ABCDEFGHKLMNOPQRST' Y = list(X) return Y[:Num] else: Func = get_param('function', Data, 'A') List = [] for Chr in Func: if Chr in string.ascii_uppercase: if Chr not in List: List.append(Chr) return List
def levelOrder(self, root: TreeNode) -> List[List[int]]: if root is None: return root q = [] List = [] q.append(root) while len(q): LvlList = [] for i in range(len(q)): node = q.pop(0) LvlList.append(node.val) if node.left != None: q.append(node.left) if node.right != None: q.append(node.right) List.append(LvlList) return List
def traverse_decode(copy_inmutable: Tree, data_list: t.List, output: t.List, parent: Tree): if copy_inmutable == None: return if copy_inmutable.root.value.character != "": output.append(copy_inmutable.root.value.character) if (len(data_list) > 0): traverse_decode(parent, data_list, output, parent) return if (len(data_list) > 0): elem = data_list.pop(0) if elem == '0': traverse_decode(copy_inmutable.root.left, data_list, output, parent) if elem == '1': traverse_decode(copy_inmutable.root.right, data_list, output, parent) return
def solution() -> List[str]: # File load json_file = open("pokemon_data.json", "r", encoding="UTF-8") pokemonLists = json.load(json_file) json_file.close List = [] # Extract condiion 1,2,3 for pokemon in pokemonLists: if ('くさ' in pokemon["types"]) \ and (pokemon["stats"]["hp"] >= 80)\ and (len(pokemon["abilities"]) + len(pokemon["hiddenAbilities"]) >= 3): List.append(pokemon["name"]) # Sort List.sort() return List
def readobj(key: str, val: typing.Dict, denylist: typing.List, allowlist: typing.List): if not isinstance(val, dict): return str(val) if "id" not in val: raise ValueError("Missing field id in entry " + str(val)) itemid = val["id"] del val["id"] if "deny" in val: # val["deny"] is an object that contains one or more entries per entry. # the entries are matched against the matrix new_denial = { mapping.get(key, key): [itemid], **extract_mapped_list(val["deny"]) } for blkey, v in new_denial.items(): for vi in v[1:]: denylist.append({**new_denial, blkey: vi}) new_denial[blkey] = v[0] denylist.append(new_denial) del val["deny"] if "allow" in val: allowlist.append({ "key": mapping.get(key, key), "value": itemid, "allowed": extract_mapped_list(val["allow"]) }) del val["allow"] if len(val.keys()) == 0: return itemid else: return itemid, val
def recursion_modulaire(i: int, contraintes: collections.deque, base: dict, struc: typing.List): """ Fonction suivant la logique de la division euclidienne modulaire. On applique les contraintes données en paramètres. :param i: entier représentant un cardinal :param contraintes: liste de Contraintes :param base: base de connaissances (lexique) :param struc: structure de donnée de type List :return: None """ if i in base: if struc and struc[-1] in (20, 30, 40, 50, 60, 70) and i == 1: struc.append('+') elif i >= pow(10, 6): struc.append(1) struc.append(i) else: (q, r, n, spec) = suivant(i, contraintes) tmp = q * n if n >= pow(10, 6) and q == 1: struc.append(q) struc.append(n) elif tmp in base: struc.append(tmp) else: recursion_modulaire(q, contraintes, base, struc) struc.append(n) if r: recursion_modulaire(r, contraintes, base, struc)
def add_customer(customer, customers: typing.List): customers.append(customer)
def cumulative(self, list): List = [] List.append(list[0]) for i in range(len(list) - 1): List.append(list[i + 1] + List[i]) return List
def _unknown_tile_id(x: str, t_ids: tp.List) -> bool: ret_val = x in t_ids if not ret_val: t_ids.append(x) return not ret_val
def lst_append(lst: typing.List, *args): for arg in args: lst.append(arg)
def connected_components(self) -> List[list]: """ Finds all the Strongly Connected Component(SCC) in the graph. @return: The list all SCC Notes: If the graph is None the function should return an empty list [] """ nodes = len(self.graph.nodes) tmp_list = [[] for _ in range(nodes)] List = [] List_bool = [False] * nodes for i in range(nodes): if not List_bool[i]: List_bool[i] = True queue = [i] while queue: i = queue[-1] done = True for v in self.graph.neighbors[i]: tmp_list[v].append(i) if not List_bool[v]: List_bool[v] = True done = False queue.append(v) break if done: queue.pop() List.append(i) before = [None] * nodes while List: tmp = List.pop() queue2 = [tmp] if List_bool[tmp]: List_bool[tmp] = False before[tmp] = tmp while queue2: u = queue2[-1] done = True for j in tmp_list[u]: if List_bool[j]: List_bool[j] = done = False queue2.append(j) before[j] = tmp break if done: queue2.pop() before_scc = [[] for _ in range(nodes)] for i in range(len(before)): if before[i] == i: before_scc[i].append(before[i]) else: t = before[i] if t is not None: before_scc[t].append(i) scc = [] for i in range(len(before_scc)): tmp = before_scc[i] if len(tmp) != 0: scc.append(tmp) return scc
parser2.add_argument("-f", type=str, help="colab option.") args2 = parser2.parse_args() references = {} predictions = {} for i in range(1, len(df_test)+1): #references[i] = [df_trial_answers['reason1'][i].split(), df_trial_answers['reason2'][i].split(), df_trial_answers['reason3'][i].split()] sent = df_test['FalseSent'][i] #predictions[i] = df['FalseSent'][i].split() predictions[i] = run(sent)[0].split() predictions[1] List = [] List2= [] for i in range(1 , len(df_test)+1): List.append(predictions[i]) List2.append(' '.join(word for word in List[i-1])) List2 submission = pd.DataFrame(List2) submission.index = np.arange(1, len(submission) + 1) submission submission.to_csv('subtaskC_answers.csv', header=False) from google.colab import files files.download('subtaskC_answers.csv')
def task_5_append_str_to_list_and_return(input_data: List, elem: str): List = [n for n in input_data] List.append(elem) return List
def revolve(l:typing.List): if l: l.append(l.pop(0))
def __construct_project_panel_items(self, key_path: typing.List[str], node: TreeNode, closed: bool, project_panel_items: typing.List, closed_items: typing.Set, encountered_items: typing.Set) -> None: # if the node has no data (no projects) and no children, do not display it; move down a level. if len(node.data) == 0 and len(node.children) == 1: # this node represents a directory that only has a sub directory. # start by extracting the key and only child. key, child = list(node.children.items())[0] # if not root (the key path is not empty), combine the child with the key (directory path). if len(key_path) > 0: new_key = key_path[:-1] + [ key_path[-1] + (key if key_path[-1].endswith("/") else "/" + key) ] # otherwise combine the child with the key (directory path). else: new_key = [key] # recurse self.__construct_project_panel_items(new_key, child, closed, project_panel_items, closed_items, encountered_items) else: # this node represents a directory that more than one of either sub directory or project. folder_key = "/".join(key_path) folder_closed = folder_key in self.__closed_items or closed if len(key_path) > 0: encountered_items.add(folder_key) if not closed: # closed indicates whether the parent is closed project_panel_items.append( ProjectPanelFolderItem(node, len(key_path) - 1, key_path[-1], folder_closed, folder_key)) for key, child in node.children.items(): self.__construct_project_panel_items(key_path + [key], child, folder_closed, project_panel_items, closed_items, encountered_items) for project_reference in typing.cast( typing.Sequence[Profile.ProjectReference], node.data): project_reference_parts = project_reference.project_reference_parts project_key = "/".join(key_path + [project_reference_parts[-1]] ) if project_reference_parts else str( id(project_reference)) encountered_items.add(project_key) if not folder_closed: def handle_item_controller_title_changed(t: str) -> None: self.property_changed_event.fire("value") display_item_counter = ProjectCounterDisplayItem( project_reference) display_item_counter.on_title_changed = handle_item_controller_title_changed project_panel_items.append( ProjectPanelProjectItem(len(key_path), project_reference, display_item_counter))