def __init__(self, gene_count_file_path: list): self.file_paths: List[str] = gene_count_file_path # We need to build a dict for each sample self.all_gene_counts: Dict(Dict(str)) = {} # We also keep track of all gene names found for each samples self.gene_names: Set[str] = set() # When writing header, we need to freeze the order of the samples self.samples_names: List[str] = []
def __init__(self, logfh: logging.FileHandler = None): self.storage: Dict([str, SimpleQueue]) = dict() self.producers: Dict([str, List(Connection)]) = dict() self.consumers: Dict([str, Connection]) = dict() self.log = logging.getLogger("TinyMQServer") self.log.setLevel(logfh.level) if logfh: self.log.addHandler(logfh)
def __init__(self, file_name: str): self.__file_name = file_name self.__dataset_of_movies: List(Movie) = list() self.__dataset_of_actors: Set(Actor) = set() self.__dataset_of_directors: Set(Director) = set() self.__dataset_of_genres: Set(Genre) = set() self.__movies_with_given_year: Dict(Movie) = dict() self.__movies_with_given_director: Dict(Movie) = dict() self.__movies_with_given_actor: Dict(Movie) = dict() self.__movies_with_given_genre: Dict(Movie) = dict()
def getAll(iso: str, country: str, url: str, target_file: str, countryListObj: CountryList, admin1CodeListObj: Admin1CodeList, admin2CodeListObj: Admin2CodeList) -> Dict(str, str): ''' Externally you're supposed to invoke this function, which will get JSON formatted places for a certain country. Return value will be a dictionary, holding status of operation ''' status = {'error': 'incomplete'} try: if (__get_data__(iso=iso, request=Request(url), target_file=target_file, countryListObj=countryListObj, admin1CodeListObj=admin1CodeListObj, admin2CodeListObj=admin2CodeListObj)): status = {'success': 'true'} else: status = {'success': 'false'} except Exception as e: status['error'] = str(e) finally: if (exists(abspath('./tmp.zip'))): remove(abspath('./tmp.zip')) return status
def setupFiles(args: object) -> Dict({str: str}): localTime = time.asctime(time.localtime(time.time())) backupDirectory = Path(f"{args.directory}/backup_{args.user}") logDirectory = Path(f"{args.logPath}") backupLogFile = Path(f"{args.logPath}/log_{args.user}_{localTime}.log") try: if not backupDirectory.is_dir(): subprocess.run(["mkdir", backupDirectory]) if not logDirectory.is_dir(): subprocess.run(["mkdir", logDirectory]) if not backupLogFile.is_file(): fileMode = "w" subprocess.run(["touch", backupLogFile]) subprocess.run(["chmod", "666", backupLogFile]) else: fileMode = "a" return { "fileMode": fileMode, "localTime": localTime, "bkDir": backupDirectory, "logDir": logDirectory, "bkLogFile": backupLogFile, } except Exception as err: print(err) raise Exception("Couldn't setup files", err)
def _set_spaces(self): pairs = [] for key in self._presampled_goals: dim = self._presampled_goals[key][0].shape[0] box = gym.spaces.Box(-np.ones(dim), np.ones(dim)) pairs.append((key, box)) self.observation_space = Dict(pairs)
def test_rpc_1_create(request): if request.method == 'POST': data: Dict(str, str) = json.loads(request.body.decode('utf-8')) createdCar: Car = create_Car(data) return JsonResponse(createdCar.serialize(), safe=False, status=201) return JsonResponse({'stauts': 401, 'message': 'Bad Request'}, status=401)
def _parse(self): parsed_dict: Dict[str, str] = {} with open(self.file_path, 'r') as file: for line in file: elements: List[str] = line.strip().split() parsed_dict[elements[1]] = elements[0] self._parsed_dict: Dict(str) = parsed_dict
def init_config(self, config: dict): ''' 初始化实例属性, 每个子类的__init__方法, 必须调用此方法. :param config: 一个字典, 键是解析数据所需字段, 值是一个callable对象, 即解析这个字段的对象. :return: None ''' conf: Dict(str, callable) = config self.__dict__ = conf
def replace_keyed_meta(self, axis: int, replaced_meta_name: str, values_dict: Dict(str, str), key_meta_name: str = None) -> Adat: """Updates metadata in an Adat given a dictionary of values keyed to existing metadata. If a key does not exist in values_dict, the function will fill in missing data with pre-existing values and create a warning to notify the user. Parameters ---------- axis : int The metadata/multiindex to operate on: 0 - row metadata, 1 - column metadata replaced_meta_name : str The name of the index to be added. key_meta_name : str, optional The name of the index to use as the key-map. Will default to `replaced_meta_name` if None. values_dict : Dict(str, str) Values to be added to the metadata/multiindex keyed to the existing values in `key_meta_name`. Returns ------- adat : Adat Examples -------- >>> new_adat = adat.replace_keyed_meta(axis=0, inserted_meta_name='Barcode', key_meta_name='SampleType', values_dict={"J12345": "Calibrator"}) >>> new_adat = adat.replace_keyed_meta(axis=1, inserted_meta_name='Type', key_meta_name='SeqId', values_dict={"12345-6": "ProteinSet1") """ key_meta_name = key_meta_name or replaced_meta_name values = [] metadata = get_pd_axis(self, axis) key_meta = metadata.get_level_values(key_meta_name) values_to_update = metadata.get_level_values(replaced_meta_name) if replaced_meta_name not in metadata.names: raise AdatKeyError( 'Name does not exists in index, use `adat.insert_keyed_meta` instead.' ) warning_str = 'Some keys not provided, using original values for those keys' warnings.filterwarnings('once', message=warning_str) for key, value in zip(key_meta, values_to_update): if key in values_dict: values.append(values_dict[key]) else: warnings.warn(warning_str) values.append(value) return self.replace_meta(axis, replaced_meta_name, values)
def count_nucleotides(genomes: Dict(str)): counts = { genome_name: {letter: genomes[genome_name].count(letter) for letter in letters} for genome_name in genomes.keys() } return counts
def atom_list_to_dic_list(atomList: List[Atom]) -> Dict[str, List[str]]: outputDict: Dict(str, List[str]) = dict() for key in atomSpecs: outputDict[key] = [] for atom in atomList: for key, val in atom.data_vals.items(): outputDict[key].append(val) return outputDict
def get_all_books() -> List[Dict(str, Union(str, int))]: with DatabaseConnection('data.db') as connection: cursor = connection.cursor() cursor.execute('SELECT * FROM books') books = [{ 'name': row[0], 'author': row[1], 'read': row[3] } for row in cursor.fetchall()] return books
def __init__(self): self.queues: Dict(Direction, int) = { Direction.UP: [], # list of vehicle ids Direction.RIGHT: [], Direction.DOWN: [], Direction.LEFT: [] } self.wait_timers: Dict(Direction, float) = { Direction.UP: 0, Direction.RIGHT: 0, Direction.DOWN: 0, Direction.LEFT: 0, } self._last_dequeue_dir = Direction.LEFT # so UP goes first # Timer to give vehicle in the middle of the intersection time to leave # the intersection before dequeuing the next vehicle in the # intersection. self._clear_timer = 0
def insert_keyed_meta( self, axis: int, inserted_meta_name: str, key_meta_name: str, values_dict: Dict(str, str)) -> Adat: """Inserts metadata into Adat given a dictionary of values keyed to existing metadata. If a key does not exist in values_dict, the function will fill in missing data with empty strings values and create a warning to notify the user. Parameters ---------- axis : int The metadata/multiindex to operate on: 0 - row metadata, 1 - column metadata inserted_meta_name : str The name of the index to be added. key_meta_name : str The name of the index to use as the key-map. values_dict : Dict(str, str) Values to be added to the metadata/multiindex keyed to the existing values in `key_meta_name`. Returns ------- adat : Adat Examples -------- >>> new_adat = adat.insert_keyed_meta(axis=0, inserted_meta_name='NewBarcode', key_meta_name='Barcode', values_dict={"J12345": "1"}) >>> new_adat = adat.insert_keyed_meta(axis=1, inserted_meta_name='NewProteinType', key_meta_name='Type', values_dict={"Protein": "Buffer") """ values = [] metadata = get_pd_axis(self, axis) key_meta = metadata.get_level_values(key_meta_name) if inserted_meta_name in metadata.names: raise AdatKeyError( 'Name already exists in index, use `adat.update_keyed_meta` instead.' ) for key in key_meta: if key in values_dict: values.append(values_dict[key]) else: values.append('') if None in values: warnings.warn('Empty string values inserted into metadata.', category=Warning) return self.insert_meta(axis, inserted_meta_name, values)
def __init__(self): """TODO: INSERT DOCSTRING.""" self.__colors: dict = None # Store each color for the theme. self.update_colors() # Set the colors upon loading. # Stores tkinter form items to their respective color grouping # so that they are colored properly. self.__item_groups: Dict(str, list) = { ThemeGroup.MAIN_GROUP: [], ThemeGroup.BUTTON_GROUP: [], ThemeGroup.LABEL_GROUP: [], ThemeGroup.TEXT_BOX_GROUP: [] }
def get_features(self, word1: str, time_id: int) -> Dict[str, float]: features: Dict(str, float) = {} f = self.db.query( 'SELECT feature, score FROM similar_features ' 'WHERE word1=:tw and time_id=:td ' 'ORDER BY score DESC', tw=str(word1), td=int(time_id) ) for row in f: features[str(row['feature'])] = float(row['score']) return features
def __init__(self, bot, unique_id, logs): super().__init__() self.bot = bot self._config = Config.get_conf( self, identifier=unique_id, force_registration=True) self._config.register_guild( roles={}, welcome_message="", welcome_message_id="", welcome_message_channel="", ) self._cached = Dict()
def __init__(self, ak: str): """ 初始化 :param ak:开发者签名 """ if not ak: logger.error("初始化ACCESS_KEY时出错:ACCESS_KEY为空") if not isinstance(ak, str): logger.error("初始化ACCESS_KEY时出错:ACCESS_KEY类型不为str") self.ak: str = ak # 根据设备编号存储注册的用户及对应的api_userid self.devices: Dict[Dict[str, int]] = dict() # 根据咕咕号存储注册的设备及对应的api_userid self.users: Dict(Dict[str, int]) = dict() self.printed_papers: Dict[int, Paper] = dict()
def on_generate(self, **kwargs): p = random.randint(0, 10) if p < 1: delegate = Bool() elif p < 4: delegate = String() elif p < 6: delegate = Int() elif p < 7: delegate = Float() else: delegate = Dict() return delegate.on_generate()