def __init__(self, filepath): try: if os.path.exists(filepath): UserDict.__init__(self) st = os.stat(filepath) self['id'] = id(self) self['full_name'] = filepath self['size'] = st.st_size self['sizeh'] = approximate_size(st.st_size,a_kilobyte_is_1024_bytes=False) self['user_id'] = st.st_uid self['group_id'] = st.st_gid self['is_directory'] = False self['is_file'] = False self['is_link'] = False self['extension'] = '' if (os.path.isdir((self['full_name']))): self['is_directory'] = True if (os.path.isfile((self['full_name']))): self['is_file'] = True if (os.path.isdir((self['full_name']))): self['is_link'] = True if self['is_file']: self['extension'] = filepath.split(os.extsep)[len(filepath.split(os.extsep))-1] else: raise IOError except IOError: log.error("File {0} doesn't exist.".format(filepath)) sys.exit(1)
def __init__(self): UserDict.__init__(self) self.__format__ = '<i2h' self.__size__ = struct.calcsize(self.__format__) self['magic'] = None self['numMaterials'] = None self['numObjects'] = None
def __init__(self): UserDict.__init__(self) self.__format__ = '<32si' self.__size__ = struct.calcsize(self.__format__) self['boneName'] = None self['boneType'] = None
def __init__(self, name, tables): """init Chains object""" UserDict.__init__(self) self.name = name self.tables = tables self.predef = tables self.reset() # name, tables)
def __init__(self, d=None, path=None): if not path: path = os.path.join(os.getcwd(), 'project.pbxproj') self.pbxproj_path = os.path.abspath(path) self.source_root = os.path.abspath(os.path.join(os.path.split(path)[0], '..')) UserDict.__init__(self, d) self.data = PBXDict(self.data) self.objects = self.get('objects') self.modified = False root_id = self.get('rootObject') if root_id: self.root_object = self.objects[root_id] root_group_id = self.root_object.get('mainGroup') self.root_group = self.objects[root_group_id] else: print("error: project has no root object") self.root_object = None self.root_group = None for k, v in self.objects.items(): v.id = k
def __setitem__(self, key, value): self.modified = True log.debug("Setting key '%s' to value '%s'" % (key, value)) if key.startswith("_"): self.internal_data[key] = value else: UserDict.__setitem__(self, key, value)
def __init__(self, model, query_parameters, *args, **kwargs): self._model = model # Core options, not modifiable by client updates if 'columns' not in kwargs: model_fields = model._meta.local_fields kwargs['columns'] = list(map(lambda f: (six.text_type(f.verbose_name), f.name), model_fields)) if 'hidden_columns' not in kwargs or kwargs['hidden_columns'] is None: kwargs['hidden_columns'] = [] if 'search_fields' not in kwargs or kwargs['search_fields'] is None: kwargs['search_fields'] = [] if 'unsortable_columns' not in kwargs or kwargs['unsortable_columns'] is None: kwargs['unsortable_columns'] = [] # Absorb query GET params kwargs = self._normalize_options(query_parameters, kwargs) UserDict.__init__(self, DEFAULT_OPTIONS, *args, **kwargs) self._flat_column_names = [] for column in self['columns']: column = get_field_definition(column) flat_name = column.pretty_name if column.fields: flat_name = column.fields[0] self._flat_column_names.append(flat_name)
def test_blur_image(image_mock, os_mock, capsys): filename = str(uuid.uuid4()) os_mock.remove = MagicMock() os_mock.path = MagicMock() os_mock.path.basename = MagicMock(side_effect=(lambda x: x)) image_mock.return_value = image_mock image_mock.__enter__.return_value = image_mock blob = UserDict() blob.name = filename blob.bucket = UserDict() blob.bucket.blob = MagicMock(return_value=blob) blob.download_to_filename = MagicMock() blob.upload_from_filename = MagicMock() main.__blur_image(blob) out, _ = capsys.readouterr() assert f'Image {filename} was downloaded to' in out assert f'Image {filename} was blurred.' in out assert f'Blurred image was uploaded to blurred-{filename}.' in out assert os_mock.remove.called assert image_mock.resize.called
def __init__(self,schema,dn,entry): self._keytuple2attrtype = {} self._attrtype2keytuple = {} self._s = schema self.dn = dn UserDict.__init__(self,{}) self.update(entry)
def __init__(self): UserDict.__init__(self) self.__format__ = '<4f3f' self.__size__ = struct.calcsize(self.__format__) self['quat'] = [None, None, None, None] self['position'] = [None, None, None]
def test_make_upper_case(firestore_mock, capsys): firestore_mock.collection = MagicMock(return_value=firestore_mock) firestore_mock.document = MagicMock(return_value=firestore_mock) firestore_mock.set = MagicMock(return_value=firestore_mock) user_id = str(uuid.uuid4()) date_string = datetime.now().isoformat() email_string = '%s@%s.com' % (uuid.uuid4(), uuid.uuid4()) data = { 'uid': user_id, 'metadata': {'createdAt': date_string}, 'email': email_string, 'value': { 'fields': { 'original': { 'stringValue': 'foobar' } } } } context = UserDict() context.resource = '/documents/some_collection/path/some/path' main.make_upper_case(data, context) out, _ = capsys.readouterr() assert 'Replacing value: foobar --> FOOBAR' in out firestore_mock.collection.assert_called_with('some_collection') firestore_mock.document.assert_called_with('path/some/path') firestore_mock.set.assert_called_with({'original': 'FOOBAR'})
def __init__(self): UserDict.__init__(self) self.__format__ = '<8s3i' self.__size__ = struct.calcsize(self.__format__) self['fileType'] = None self['numObjects'] = None self['skeletonHash'] = None self['numElements'] = None
def __init__(self, owner_player, data): UserDict.__init__(self, data) self.owner = owner_player self.obj_map = {'planets': baseobjects.Planet, 'starbases': baseobjects.Starbase, 'ships': baseobjects.Ship, 'engines': baseobjects.Engine, 'torpedos': baseobjects.Launcher}
def __init__(self, file_path = None): UserDict.__init__(self) if not os.path.exists(file_path): os.makedirs(file_path) self["path"] = file_path self["file"] = [] self["directory"] = [] self.__parse(self["path"])
def __init__(self): UserDict.__init__(self) self.__format__ = '<32sif12f' self.__size__ = struct.calcsize(self.__format__) self['name'] = None self['parent'] = None self['scale'] = None self['matrix'] = [[],[],[]]
def update(self, dict=None, **kwargs): if dict: self._validate_var_dict(dict) UserDict.update(self, dict) for key in dict: self._add_key(key) if kwargs: self.update(kwargs)
def __init__(self, bot): UserDict.__init__(self) self.bot = bot self.streamer = bot.streamer self.db_session = DBManager.create_session() self.custom_data = [] self.bttv_emote_manager = BTTVEmoteManager(self) self.bot.execute_every(60 * 60 * 2, self.bot.action_queue.add, (self.bttv_emote_manager.update_emotes, ))
def __init__(self): UserDict.__init__(self) self.data = dict() # self.data[elementid] = list of vendors with prices self._vendormap = VendorMap() self.bricklink_initialized = False self.vendor_initialized = False self.averageprices = dict() self.webreader = None
def __init__(self, url, local_file, download_time=None): UserDict.__init__(self) if download_time is None: download_time = datetime.datetime.fromtimestamp(time.time()) timestr = download_time.isoformat() self["url"] = url self["local_file"] = local_file self["time"] = timestr
def __init__(self, initial=None): handlers = initial or { 'application/json': JSONHandler(), 'application/json; charset=UTF-8': JSONHandler(), } # NOTE(jmvrbanac): Directly calling UserDict as it's not inheritable. # Also, this results in self.update(...) being called. UserDict.__init__(self, handlers)
def __init__(self,prefix=""): UserDict.__init__(self) self.reset() self.nextlabel = 0 self.nextTemp = 0 self.nextEnum = 0 self.nextParamSlot = 0 self.var_params = [] self.prefix = prefix self.temp_prefix = "t__%s" % prefix
def __init__(self, bundleID, bundlePath=None, defaultsPlistName='Defaults'): """ bundleId: the application bundle identifier bundlePath: the full bundle path (useful to test a Debug build) defaultsPlistName: the name of the plist that contains default values """ self.__bundleID = bundleID self.__bundlePath = bundlePath UserDict.__init__(self) self.__setup(defaultsPlistName)
def __init__(self): UserDict.__init__(self) self.__format__ = '<i64s4i' self.__size__ = struct.calcsize(self.__format__) self['matIndex'] = None self['name'] = None self['startVertex'] = None self['numVertices'] = None self['startIndex'] = None self['numIndices'] = None
def __init__(self): UserDict.__init__(self) self.__format__ = '<8s5i' self.__size__ = struct.calcsize(self.__format__) self['filetype'] = None self['three'] = None self['magic'] = None self['numBones'] = None self['numFrames'] = None self['fps'] = None
def __init__(self, initial={}, namespace=None): self._init = False UserDict.__init__(self, initial) if namespace and isclass(namespace): self.names = {x:getattr(namespace, x)for x in dir(namespace) if not x.startswith("_")} self.variables = {x:self.names[x] for x in self.names if x.startswith("var")} self.flags = {x:self.names[x] for x in self.names if x.startswith("flag")} if("levelName" in self.names): self.levelName = self.names["levelName"] else: self.levelName = None self._init = True
def __init__(self, initdict=None, hook_when_init=True): """ :param initdict: initialized from :param hook_when_init: run hook points when it is True """ UserDict.__init__(self) if initdict: if hook_when_init: self.update(initdict) else: self.data.update(initdict)
def __init__(self, initial=None, ignore=(), caseless=True, spaceless=True): """Initializes with possible initial value and normalizing spec. Initial values can be either a dictionary or an iterable of name/value pairs. In the latter case items are added in the given order. Normalizing spec has exact same semantics as with `normalize` method. """ UserDict.__init__(self) self._keys = {} self._normalize = lambda s: normalize(s, ignore, caseless, spaceless) if initial: self._add_initial(initial)
def __init__(self, cellbase_dict): UserDict.__init__(self, cellbase_dict) if "measureSet" in self.data['referenceClinVarAssertion']: measure_list = self.data['referenceClinVarAssertion']["measureSet"]["measure"] elif "measureSet" in self.data['referenceClinVarAssertion']["genotypeSet"]: measure_list = [] for measure_set in self.data['referenceClinVarAssertion']["genotypeSet"]["measureSet"]: for measure in measure_set["measure"]: measure_list.append(measure) else: raise KeyError() self.measures = [ClinvarRecordMeasure(measure_dict, self) for measure_dict in measure_list]
def test_redirection_not_to_https(self): self.reqs['responses']['http'].url = 'http://http-observatory.security.mozilla.org/foo' history1 = UserDict() history1.request = UserDict() history1.request.url = 'http://http-observatory.security.mozilla.org/' self.reqs['responses']['http'].history.append(history1) result = redirection(self.reqs) self.assertEquals('redirection-not-to-https', result['result']) self.assertFalse(result['pass'])
def __init__(self, bot): UserDict.__init__(self) self.bot = bot self.streamer = bot.streamer self.db_session = DBManager.create_session() self.custom_data = [] self.bttv_emote_manager = BTTVEmoteManager(self) self.bot.execute_delayed(5, self.bot.action_queue.add, (self.bttv_emote_manager.update_emotes, )) self.bot.execute_every(60 * 60 * 2, self.bot.action_queue.add, (self.bttv_emote_manager.update_emotes, )) # Used as caching to store emotes self.global_emotes = []
def _config(): cwd = getcwd() ini_paths = [ os.path.join(os.path.dirname(__file__), "defaults.cfg"), os.path.join(cwd, "setup.cfg"), ] ini_config = _config_from_ini(ini_paths) toml_path = os.path.join(cwd, "pyproject.toml") toml_config = _config_from_pyproject(toml_path) # Cast to a UserDict so that we can mock the get() method. return UserDict({**ini_config, **toml_config})
def copy(self): if self.__class__ is UserDict: return UserDict(self.data.copy()) import copy data = self.data try: self.data = {} c = copy.copy(self) finally: self.data = data c.update(self) return c
def test_set_double_exists(self): a = UserDict() key = 10.0 a[key] = 100.0 inline_tools.inline('a[key] = 123.0;',['a','key']) first = sys.getrefcount(key) inline_tools.inline('a[key] = 123.0;',['a','key']) second = sys.getrefcount(key) assert_equal(first,second) # !! I think the following should be 3 assert_equal(sys.getrefcount(key),5) assert_equal(sys.getrefcount(a[key]),2) assert_equal(a[key],123.0)
def test_is_Dict(self): assert is_Dict({}) assert is_Dict(UserDict()) try: class mydict(dict): pass except TypeError: pass else: assert is_Dict(mydict({})) assert not is_Dict([]) assert not is_Dict(()) assert not is_Dict("")
def __init__(self, index_columns_pairlist, values=None): """ default arrange: column first """ try: keys_values = zip_longest(index_columns_pairlist, values, fillvalue=None) except Exception: keys_values = zip_longest(index_columns_pairlist, [None], fillvalue=None) self.index_ = [pair[0] for pair in index_columns_pairlist] self.columns_ = [pair[1] for pair in index_columns_pairlist] self.data = UserDict(keys_values)
def test_process_offensive_image( storage_client, vision_client, __blur_image, capsys): result = UserDict() result.safe_search_annotation = UserDict() result.safe_search_annotation.adult = 5 result.safe_search_annotation.violence = 5 vision_client.safe_search_detection = MagicMock(return_value=result) filename = str(uuid.uuid4()) data = { 'bucket': 'my-bucket', 'name': filename } main.blur_offensive_images(data, None) out, _ = capsys.readouterr() assert 'Analyzing %s.' % filename in out assert 'The image %s was detected as inappropriate.' % filename in out assert main.__blur_image.called
def __init__( self, socket_manager: Optional[SocketManager] = None, module_manager: Optional[ModuleManager] = None, bot: Optional[Bot] = None, ) -> None: UserDict.__init__(self) self.db_session: Session = DBManager.create_session() self.internal_commands: Dict[str, Command] = {} self.db_commands: Dict[str, Command] = {} self.module_commands: Dict[str, Command] = {} self.data = {} self.bot = bot self.module_manager = module_manager if socket_manager: socket_manager.add_handler("module.update", self.on_module_reload) socket_manager.add_handler("command.update", self.on_command_update) socket_manager.add_handler("command.remove", self.on_command_remove)
def handle(self, *args, **options): # the location where the markdown and other files live docs_dir = os.path.join(os.path.dirname(settings.MAIN_DOC_YAML), 'docs') if options['site_dir']: site_dir = options['site_dir'] else: site_dir = os.path.join(os.path.dirname(settings.MAIN_DOC_YAML), 'site') kwargs = {'config_file': settings.MAIN_DOC_YAML, 'site_dir': site_dir} # build docs build.build(mkdocs_config.load_config(**kwargs), dirty=False) # generate the openAPI spec: generator = SchemaGenerator(title='WebMEV REST API Specification') schema = generator.get_schema(request=None, public=True) renderer = JSONOpenAPIRenderer() output = renderer.render(schema, renderer_context={}) with open(os.path.join(site_dir, 'openapi_spec.json'), 'w') as fout: fout.write(output.decode()) # add the information relevant for the commit/push kwargs['remote_name'] = options['remote_name'] kwargs['remote_branch'] = options['remote_branch'] # due to the way config info is accessed from within the mkdocs gh_deploy # function below, it needs both dict-like access and attribute-like access # UserDict fits that bill config = UserDict(kwargs) config.config_file_path = settings.MAIN_DOC_YAML if options['push']: gh_deploy.gh_deploy(config, message=options['message'])
def empty_requests() -> dict: req = { 'hostname': 'http-observatory.security.mozilla.org', 'resources': { '/': None, '/clientaccesspolicy.xml': None, '/contribute.json': None, '/crossdomain.xml': None, '/robots.txt': None, }, 'responses': { 'auto': UserDict(), 'cors': None, 'http': None, 'https': None, }, 'session': UserDict(), } req['responses']['auto'].headers = { 'Content-Type': 'text/html', } req['responses']['auto'].history = [] req['responses']['auto'].request = UserDict() req['responses']['auto'].request.headers = UserDict() req['responses']['auto'].status_code = 200 req['responses'][ 'auto'].url = 'https://http-observatory.security.mozilla.org/' req['responses']['auto'].verified = True req['session'].cookies = CookieJar() req['responses']['cors'] = deepcopy(req['responses']['auto']) req['responses']['http'] = deepcopy(req['responses']['auto']) req['responses']['https'] = deepcopy(req['responses']['auto']) return req
def __init__( self, taskmanager_id, create_time=None, expiration_time=None, scheduled_create_time=None, creator="module", schema_id=None, ): """ Initialize Header object :type taskmanager_id: :obj:`string` :type create_time: :obj:`float` :type expiration_time: :obj:`float` :type scheduled_create_time: :obj:`float` :type creator: :obj:`string` :type schema_id: :obj:`int` """ UserDict.__init__(self) if not create_time: create_time = time.time() if not expiration_time: expiration_time = create_time + Header.default_data_lifetime if not scheduled_create_time: scheduled_create_time = time.time() self.data = { "taskmanager_id": taskmanager_id, "create_time": int(create_time), "expiration_time": int(expiration_time), "scheduled_create_time": int(scheduled_create_time), "creator": creator, "schema_id": schema_id, }
def __init__(self, url, local_file, description, download_time=None, start_time=None, end_time=None, raw=None, is_accompany=False, market=''): UserDict.__init__(self) if download_time is None: download_time = datetime.datetime.utcnow() timestr = download_time.isoformat() self['url'] = url self['local_file'] = local_file self['description'] = description self['time'] = timestr self['start_time'] = start_time.isoformat() if start_time else None self['end_time'] = end_time.isoformat() if end_time else None self['raw'] = raw self['is_accompany'] = is_accompany self['market'] = market
def test_make_upper_case(firestore_mock, capsys): firestore_mock.collection = MagicMock(return_value=firestore_mock) firestore_mock.document = MagicMock(return_value=firestore_mock) firestore_mock.set = MagicMock(return_value=firestore_mock) user_id = str(uuid.uuid4()) date_string = datetime.now().isoformat() email_string = '%s@%s.com' % (uuid.uuid4(), uuid.uuid4()) data = { 'uid': user_id, 'metadata': { 'createdAt': date_string }, 'email': email_string, 'value': { 'fields': { 'original': { 'stringValue': 'foobar' } } } } context = UserDict() context.resource = '/documents/some_collection/path/some/path' main.make_upper_case(data, context) out, _ = capsys.readouterr() assert 'Replacing value: foobar --> FOOBAR' in out firestore_mock.collection.assert_called_with('some_collection') firestore_mock.document.assert_called_with('path/some/path') firestore_mock.set.assert_called_with({'original': 'FOOBAR'})
def main(): global re # import pdb;pdb.set_trace() patternPara = UserDict() patternPara['-E'] = 'elc' patternPara['-S'] = 'signup' patternPara['-C'] = 'compose' patternPara['-O'] = 'other' patternPara['-KS'] = 'kwstudent' patternPara['-D'] = 'dynamic' pattern = str(sys.argv[2]).upper() xlsfile = sys.argv[1] exp = ouchnExport(xlsfile, patternPara[pattern]) exp.export2csv()
class JSONDtype(ExtensionDtype): type = abc.Mapping name = "json" na_value = UserDict() @classmethod def construct_array_type(cls) -> Type["JSONArray"]: """ Return the array type associated with this dtype. Returns ------- type """ return JSONArray
def __init__(self, filename=None, data=None, fmt='UWYO', station_name=None, parcel_mixing_depth=50.0, calculate_parcel=True, surface_pressure=None): UserDict.__init__(self) self.fmt = fmt self.station_name = station_name self.mixing_depth = parcel_mixing_depth self.surface_pressure = surface_pressure if data is None: self.data = {} self.readfile(filename) else: self.data = data self['SoundingDate'] = "" if calculate_parcel: # if this is set to True, make a parcel attribute that can be used to calculate all sorts of stuff self.parcel_pres, self.parcel_temp, self.parcel_dpt = self.surface_parcel( mixdepth=self.mixing_depth, pres_s=self.surface_pressure) self.p, self.tdry, self.tiso, self.pwet, self.twet = self.lift_parcel( self.parcel_pres, self.parcel_temp, self.parcel_dpt) # these are interpolation-based conversion functions self.f_pres_ht = si.interp1d(self.data['pres'], self.data['hght']) self.f_temp_ht = si.interp1d(self.data['temp'], self.data['hght']) # Here are some calculations on the default parcel self.interp_parcel() self.p_el, self.p_lfc = self.find_el_lfc()
def test_MutableMapping_subclass(self): # Test issue 9214 mymap = UserDict() mymap['red'] = 5 self.assertIsInstance(mymap.keys(), Set) self.assertIsInstance(mymap.keys(), KeysView) self.assertIsInstance(mymap.items(), Set) self.assertIsInstance(mymap.items(), ItemsView) mymap = UserDict() mymap['red'] = 5 z = mymap.keys() | {'orange'} self.assertIsInstance(z, set) list(z) mymap['blue'] = 7 # Shouldn't affect 'z' self.assertEqual(sorted(z), ['orange', 'red']) mymap = UserDict() mymap['red'] = 5 z = mymap.items() | {('orange', 3)} self.assertIsInstance(z, set) list(z) mymap['blue'] = 7 # Shouldn't affect 'z' self.assertEqual(sorted(z), [('orange', 3), ('red', 5)])
def count_lines3(*directories, extensions): files = UserDict() files.totals = totals = Stat2() paths = { path for ext in extensions for directory in directories for path in Path(directory).rglob(f'*.{ext}') } for path in directories: if Path(path).is_file() and path.suffix.lstrip('.') in extensions: paths.add(path) for path in paths: stat = files.setdefault(path.suffix.lstrip('.'), Stat2()) stat.files += 1 totals.files += 1 with path.open() as f: for line in f: stat.lines += 1 totals.lines += 1 if line.strip(): stat.non_blank += 1 totals.non_blank += 1 return files
def __init__(self, source=None, target=None, deviceid=None, netboxid=None, subid=None, time=None, eventtypeid=None, state=None, value=None, severity=None): UserDict.__init__(self) self.eventqid = None self.source = source self.target = target self.deviceid = deviceid self.netboxid = netboxid self.subid = subid self.time = time self.eventtypeid = eventtypeid self.state = state self.value = value self.severity = severity
def __getitem__(self, key): '''This method "implements" row inheritance: if the current row does not have an element with p_key, it looks in the parent row of this row, via the parent table self.table.''' keyError = False t = self.table if key in self: res = UserDict.__getitem__(self, key) else: # Get the parent row if t.parent: if isinstance(t.parentRow, int): if t.parentRow < len(t.parent): try: res = t.parent[t.parentRow][key] except KeyError: keyError = True else: raise ParserError(PARENT_ROW_NOT_FOUND % (t.name, t.parent.name, t.parentRow, t.parent.name, len(t.parent))) else: tColumn, tValue = t.parentRow # Get the 1st row having tColumn = tValue rowFound = False for row in t.parent: try: curVal = row[tColumn] except KeyError: raise ParserError(PARENT_COLUMN_NOT_FOUND % (t.name, t.parent.name, tColumn, t.parent.name)) if curVal == tValue: rowFound = True try: res = row[key] except KeyError: keyError = True break if not rowFound: raise ParserError(PARENT_ROW_COL_NOT_FOUND % (t.name, t.parent.name, tColumn, tValue, t.parent.name)) else: keyError = True if keyError: raise KeyError(TABLE_KEY_ERROR % (t.name, key, t.name)) return res
def test_dict_creation() -> None: d1 = {String("t1"): 1, String("t2"): 2} dict1 = Dict(d1) assert type(getattr(dict1, "id", None)) is UID d2 = dict({"t1": 1, "t2": 2}) dict2 = Dict(d2) dict2._id = UID() assert type(getattr(dict2, "id", None)) is UID d3 = UserDict({"t1": 1, "t2": 2}) dict3 = Dict(**d3) assert type(getattr(dict3, "id", None)) is UID assert dict1.keys() == dict2.keys() assert dict1.keys() == dict3.keys()
def test_first_redirection_still_http(self): self.reqs['responses']['http'].url = 'https://http-observatory.services.mozilla.com/foo' history1 = UserDict() history1.request = UserDict() history1.request.url = 'http://http-observatory.services.mozilla.com/' history2 = UserDict() history2.request = UserDict() history2.request.url = 'http://http-observatory.services.mozilla.com/foo' self.reqs['responses']['http'].history.append(history1) self.reqs['responses']['http'].history.append(history2) result = redirection(self.reqs) self.assertEquals('redirection-not-to-https-on-initial-redirection', result['result']) self.assertFalse(result['pass'])
def setUp(self): self.reldata = UserDict({ 'allowed-units': 'client/0 client/9 client/8', 'host': '10.9.8.7', 'port': '5433', 'database': 'mydata', 'user': '******', 'password': '******' }) self.reldata.relname = 'relname' self.reldata.relid = 'relname:42' local_unit = self.patch('charmhelpers.core.hookenv.local_unit') local_unit.return_value = 'client/9' rels = self.patch('charmhelpers.context.Relations') rels()['relname']['relname:42'].local = {'database': 'mydata'}
def test_encode_default(): value = UserDict({'a': 10, 'b': 20}) with pytest.raises(qtoml.TOMLEncodeError): qtoml.dumps(value) class UserDictEncoder(qtoml.TOMLEncoder): def default(self, obj): if isinstance(obj, UserDict): return obj.data # this calls the parent version which just always TypeErrors return super().default(obj) v = UserDictEncoder().encode(value) v2 = qtoml.dumps(value, cls=UserDictEncoder) assert v == v2 nv = qtoml.loads(v) assert nv == value.data
def test_is_Dict(self): assert is_Dict({}) assert is_Dict(UserDict()) assert is_Dict(os.environ) try: class mydict(dict): pass except TypeError: pass else: assert is_Dict(mydict({})) assert not is_Dict([]) assert not is_Dict(()) assert not is_Dict("") if HasUnicode: exec "assert not is_Dict(u'')"
def read(cls, name): """Loads info about stored tracks from name, adding extension if missing, and loads tracks by calling read(<name without extension>) for them. """ name_wo_ext = os.path.splitext(name)[ 0] # TODO: upgrade all path stuff to pathlib if name == name_wo_ext: name += cls.default_suffix with open(name, "rb") as mtt_file: track_infos = json.load(mtt_file) self = cls() for track_type_name, track_info_list in track_infos: track_type = globals()[track_type_name] track_info: UserDict = UserDict(track_info_list) track = track_type.read(name_wo_ext, **track_info) self[track_info["track_name"]] = track return self
def _get_parameter_attributes(cls, attributes: NamedNodeMap) -> UserDict: """ Creates a dictionary object of xml element attributes of the given xml element, which are not reserved for special usage. :param attributes: :return: """ """Stores attributes of xml element in user dictionary, als long as they are not in the set of reserved attributes with special meaning.""" parameters = UserDict() # Just add parameters which are not in the intersection of the set with the # reserved names and the attributes of the provided node map. for parametername in attributes.keys( ) - cls._none_parameter_attributes: parameters[parametername] = attributes[parametername].nodeValue return parameters
def test_is_Dict(self): assert is_Dict({}) assert is_Dict(UserDict()) # os.environ is not a dictionary in python 3 if sys.version_info < (3, 0): assert is_Dict(os.environ) try: class mydict(dict): pass except TypeError: pass else: assert is_Dict(mydict({})) assert not is_Dict([]) assert not is_Dict(()) assert not is_Dict("")
def read_all(path, nodes="nodes.csv", links="links.csv"): nodes_path = os.path.join(path, nodes) links_path = os.path.join(path, links) # w - запись, a - дополнение, r - чтение; t - текстовый b - бинарный # encoding только для python3 # src = open(nodes_path, "rt", encoding="utf-8") # try: # for line in src: # Удаляем пустые строки # if line[-1] == "\n": # line = line[:-1] # print(line) # finally: # src.close() Graph = {} Names = {} for number, name in read_nodes(nodes_path): Node = UserDict({ "number": number, "name": name, "neighbours": dict(), "prohibited": set(), # "neighbours": {}, # "prohibited": (), # "prohibited": list(), }) Graph[number] = Node if name in Names: raise CorruptedData("{0} уже существует".format(name)) Names[name] = Node for point, point2, penalty in read_links(links_path): Node1 = Graph[point] Node2 = Graph[point2] Node1["neighbours"][point2] = (penalty, weakref.ref(Node2)) Node2["neighbours"][point] = (penalty, weakref.ref(Node1)) # print(Graph) return Graph, Names
def test_avoid_infinite_retries(capsys): now = datetime.now(timezone.utc) with patch('main.datetime', wraps=datetime) as datetime_mock: datetime_mock.now = Mock(return_value=now) old_context = UserDict() old_context.timestamp = (now - timedelta(seconds=15)).isoformat() old_context.event_id = 'old_event_id' young_context = UserDict() young_context.timestamp = (now - timedelta(seconds=5)).isoformat() young_context.event_id = 'young_event_id' main.avoid_infinite_retries(None, old_context) out, _ = capsys.readouterr() assert f"Dropped {old_context.event_id} (age 15000.0ms)" in out main.avoid_infinite_retries(None, young_context) out, _ = capsys.readouterr() assert f"Processed {young_context.event_id} (age 5000.0ms)" in out
def _ensure_tensor_on_device(self, inputs, device): if isinstance(inputs, ModelOutput): return ModelOutput( {name: self._ensure_tensor_on_device(tensor, device) for name, tensor in inputs.items()} ) elif isinstance(inputs, dict): return {name: self._ensure_tensor_on_device(tensor, device) for name, tensor in inputs.items()} elif isinstance(inputs, UserDict): return UserDict({name: self._ensure_tensor_on_device(tensor, device) for name, tensor in inputs.items()}) elif isinstance(inputs, list): return [self._ensure_tensor_on_device(item, device) for item in inputs] elif isinstance(inputs, tuple): return tuple([self._ensure_tensor_on_device(item, device) for item in inputs]) elif isinstance(inputs, torch.Tensor): if device == torch.device("cpu") and inputs.dtype in {torch.float16, torch.bfloat16}: inputs = inputs.float() return inputs.to(device) else: return inputs