def register(self, meth): """ Register a new method as a multimethod :param meth: :return: """ if str(meth).startswith("<classmethod") or str(meth).startswith( "<staticmethod"): sig = inspect.signature(meth.__get__(self)) self._is_static = True else: sig = inspect.signature(meth) # Build a type-signature from the method's annotations types = [] for name, parm in sig.parameters.items(): if name == 'self': continue if name == "args" or name == "kwargs": raise InvalidParametersException( "Can not be used with args and kwargs") if name == 'cls': continue if parm.annotation is inspect.Parameter.empty: raise InvalidParametersException( 'Argument {} must be annotated with a type'.format(name)) if parm.default is not inspect.Parameter.empty: self._methods[tuple(types)] = meth types.append((name, parm.annotation)) self._methods[tuple(types)] = meth
def csv2columns(csvFile, columns): """Given a CSV file and a comma-separated list of desired column names and types (name:type), return an array of vectors containing type-converted data. Type should be the name of string-to-val type-conversion function such as float, int, or str. If type is missing, then float conversion is assumed. """ import csv names = []; types = []; cols = [] for column in columns.split(','): if column.find(':') > 0: name, type = column.split(':') else: name = column; type = 'float' names.append(name.strip()) types.append( eval(type.strip()) ) # get type conversion function from type string cols.append([]) print(csvFile) for fields in csv.DictReader(urlopen(csvFile).readlines(), skipinitialspace=True): tmpColVals = [] try: for i, type in enumerate(types): tmpColVals.append( type(fields[names[i]]) ) except Exception as e: print("Got exception coercing values: %s" % e) continue for i in range(len(types)): cols[i].append(tmpColVals[i]) return [N.array(col) for col in cols]
def delete_kwargs(self, base_key, args=None, kwargs=None): """ Deletes the ``*args`` or ``**kwargs`` part from the parameters section Either `args` or `kwargs` must not be None. The resulting key will be stored in ``base_key + 'no_args_kwargs' Parameters ---------- base_key: str The key in the :attr:`params` attribute to use args: None or str The string for the args to delete kwargs: None or str The string for the kwargs to delete""" if not args and not kwargs: warn("Neither args nor kwargs are given. I do nothing for %s" % ( base_key)) return types = [] if args is not None: types.append('`?`?\*%s`?`?' % args) if kwargs is not None: types.append('`?`?\*\*%s`?`?' % kwargs) self.delete_types(base_key, 'no_args_kwargs', *types)
def delete_kwargs_s(cls, s, args=None, kwargs=None): """ Deletes the ``*args`` or ``**kwargs`` part from the parameters section Either `args` or `kwargs` must not be None. Parameters ---------- s: str The string to delete the args and kwargs from args: None or str The string for the args to delete kwargs: None or str The string for the kwargs to delete Notes ----- The type name of `args` in `s` has to be like ````*<args>```` (i.e. the `args` argument preceeded by a ``'*'`` and enclosed by double ``'`'``). Similarily, the type name of `kwargs` in `s` has to be like ````**<kwargs>````""" if not args and not kwargs: return s types = [] if args is not None: types.append("`?`?\*%s`?`?" % args) if kwargs is not None: types.append("`?`?\*\*%s`?`?" % kwargs) return cls.delete_types_s(s, types)
def register(self, meth): ''' Register a new method as a multimethod ''' sig = inspect.signature(meth) # Build a type-signature from the method's annotations types = [] for name, parm in sig.parameters.items(): if name == 'self': continue if parm.annotation is inspect.Parameter.empty: raise TypeError( 'Argument {} must be annotated with a type'.format(name)) if not isinstance(parm.annotation, type): raise TypeError( 'Argument {} annotation must be a type'.format(name)) if parm.default is not inspect.Parameter.empty: self._methods[tuple(types)] = meth types.append(parm.annotation) self._methods[tuple(types)] = meth
def delete_kwargs_s(cls, s, args=None, kwargs=None): """ Deletes the ``*args`` or ``**kwargs`` part from the parameters section Either `args` or `kwargs` must not be None. Parameters ---------- s: str The string to delete the args and kwargs from args: None or str The string for the args to delete kwargs: None or str The string for the kwargs to delete Notes ----- The type name of `args` in `s` has to be like ````*<args>```` (i.e. the `args` argument preceeded by a ``'*'`` and enclosed by double ``'`'``). Similarily, the type name of `kwargs` in `s` has to be like ````**<kwargs>````""" if not args and not kwargs: return s types = [] if args is not None: types.append('`?`?\*%s`?`?' % args) if kwargs is not None: types.append('`?`?\*\*%s`?`?' % kwargs) return cls.delete_types_s(s, types)
def drawCaldwells(self, basemap, ax, c='r'): caldwell_keys = sorted(self.caldwells) caldwells = [self.caldwells[x] for x in caldwell_keys] types = [] for x in caldwells: try: types.append(x.getType()) except AttributeError: types.append("") types = numpy.array(types) rade = [[x.getRA(), x.getDE()] for x in caldwells] rade = numpy.array(rade) mapx, mapy = basemap.project(rade[:, 0], rade[:, 1]) for t, sym, _ in ngcTypeSymbols: basemap.plot(mapx[types == t], mapy[types == t], color=c, marker=sym, linestyle="None", markeredgewidth=0., markersize=9) for s, x, y in zip(caldwell_keys, mapx, mapy): ax.annotate("C" + str(s), (x, y), color=c, textcoords="offset points", xytext=(0, 2), ha="center", va="bottom", fontsize=10)
def expand_struct(self, convert=classes): Drift = convert['Drift'] DriftExact = convert['DriftExact'] Multipole = convert['Multipole'] Cavity = convert['Cavity'] #XYshift = convert['XYShift'] #SRotation = convert['SRotation'] rest = [] names = self.flatten_names() elems = self.flatten_objects() newelems = [] types = [] iconv = [] icount = 0 for elem in elems: if elem.keyword == 'drift': newelems.append(DriftExact(length=elem.l)) types.append('DriftExact') icount += 1 elif elem.keyword == 'Multipole': newelems.append( Multipole(knl=elem.knl, ksl=elem.ksl, length=elem.lrad, hxl=elem.knl[0], hyl=elem.ksl[0])) types.append(elem.keyword) icount += 1 elif elem.keyword in ['hkicker']: ne = Multipole(knl=[-elem.kick], ksl=[], length=elem.lrad, hxl=elem.kick, hyl=0) newelems.append(ne) types.append('Multipole') icount += 1 elif elem.keyword in ['vkicker']: ne = Multipole(knl=[], ksl=[elem.kick], length=elem.lrad, hxl=0, hyl=elem.kick) newelems.append(ne) types.append('Multipole') icount += 1 elif elem.keyword in ['rfcavity']: nvolt = elem.volt * 1e6 ne = Cavity(voltage=nvolt, frequency=elem.freq * 1e6, lag=elem.lag * 360) newelems.append(ne) types.append('Cavity') icount += 1 else: rest.append(el) iconb.append(icount) newelems = [dict(i._asdict()) for i in newelems] return list(zip(names, types, newelems)), rest, iconv
def __build_types(self): types = [] klass = self.klass for _type, type_data in TYPE_MAP_OBJ.items(): if _type not in TYPE_ELEMENTS: continue if issubclass(klass, type_data.ctrl_klass): types.append(_type) return types
def build_dict(self, key_types, value_types, llvm_keys, llvm_values): "Build a dict from a bunch of LLVM values" types = [] for k, v in zip(key_types, value_types): types.append(k) types.append(v) lstr = self.lstr(types, fmt="{%s}") largs = llvm_keys + llvm_values return self.buildvalue(lstr, *largs)
def missing_data(data): total = data.isnull().sum() percent = (data.isnull().sum() / data.isnull().count() * 100) tt = pd.concat([total, percent], axis=1, keys=['Total', 'Percent']) types = [] for col in data.columns: dtype = str(data[col].dtype) types.append(dtype) tt['Types'] = types return (np.transpose(tt))
def _parse_signature(self, node, func_type): types = [] for arg in node.args: if not arg.variable.type.is_cast: self.error(arg, "Expected a numba type") else: types.append(arg.variable.type) signature = func_type.dst_type(*types) new_node = nodes.const(signature, numba_types.CastType(signature)) return new_node
def _get(self, *args, **kwargs): types = [] cursor.execute( "select MateriaID,MateriaName from TotalMateria where " "MateriaID in (select DISTINCT(MateriaID) from TotalProduct)" ) rows = cursor.fetchall() for row in rows: types.append(dict(id=row.MateriaID, name=row.MateriaName.strip().decode("GBK"))) return types
def nodeupdate(self): types = [] instances = [] mboxptrs = [] for x in self.actors: types.append(int(x[0])) instances.append(x[1]) mboxptrs.append(x[2]) returnit = sendcmd(self, serialize( [cfgenum.cfgforwarddict['CMDLOCALCONFIG'], types, instances, mboxptrs] ))
def createDefaultConfig(self): # create a configuration unit for default values of object properties # take the defaults from schema defaults _self_name = self.name config = Ganga.Utility.Config.makeConfig(defaultConfigSectionName(_self_name), "default attribute values for %s objects" % _self_name) for name, item in self.allItems(): # and not item['sequence']: #FIXME: do we need it or not?? if not item['protected'] and not item['hidden']: if item.hasProperty('typelist'): types = item['typelist'] if types == []: types = None else: types = None if item['sequence']: if not types is None: # bugfix 36398: allow to assign a list in the # configuration types.append('list') if isinstance(item['defvalue'], dict): if not types is None: types.append('dict') config.addOption(name, item['defvalue'], item['doc'], override=False, typelist=types) def prehook(name, x): errmsg = "Cannot set %s=%s in [%s]: " % (name, repr(x), config.name) try: item = self.getItem(name) except Exception as x: raise Ganga.Utility.Config.ConfigError(errmsg + str(x)) if item.isA(ComponentItem): if not isinstance(x, str) and not x is None: raise Ganga.Utility.Config.ConfigError(errmsg + "only strings and None allowed as a default value of Component Item.") try: self._getDefaultValueInternal(name, x, check=True) except Exception as err: logger.info("Unexpected error: %s", err) raise if item['protected'] or item['hidden']: raise Ganga.Utility.Config.ConfigError(errmsg + "protected or hidden property") return x config.attachUserHandler(prehook, None) config.attachSessionHandler(prehook, None)
def GetDisplayVectSettings(): settings = list() if not UserSettings.Get( group='vectorLayer', key='featureColor', subkey=['transparent', 'enabled']): featureColor = UserSettings.Get( group='vectorLayer', key='featureColor', subkey='color') settings.append('color=%s' % rgb2str.get( featureColor, ':'.join(map(str, featureColor)))) else: settings.append('color=none') if not UserSettings.Get( group='vectorLayer', key='areaFillColor', subkey=['transparent', 'enabled']): fillColor = UserSettings.Get( group='vectorLayer', key='areaFillColor', subkey='color') settings.append('fcolor=%s' % rgb2str.get(fillColor, ':'.join(map(str, fillColor)))) else: settings.append('fcolor=none') settings.append( 'width=%s' % UserSettings.Get( group='vectorLayer', key='line', subkey='width')) settings.append( 'icon=%s' % UserSettings.Get( group='vectorLayer', key='point', subkey='symbol')) settings.append( 'size=%s' % UserSettings.Get( group='vectorLayer', key='point', subkey='size')) types = [] for ftype in ['point', 'line', 'boundary', 'centroid', 'area', 'face']: if UserSettings.Get(group='vectorLayer', key='showType', subkey=[ftype, 'enabled']): types.append(ftype) settings.append('type=%s' % ','.join(types)) return settings
def visitTypeSwitchCase(self, node): types = [] for t in node.types: cond = self.seg.process(t) types.append(cond) if node.expr is not None: expr = self.seg.process(node.expr) self.out.startBlock("{case %s => %s}" % (", ".join(types), expr)) else: self.out.startBlock("{case %s}" % (", ".join(types))) self(node.body) self.out.endBlock()
def force_flatten(self): # force the struct or union to have a declaration that lists # directly all fields returned by enumfields(), flattening # nested anonymous structs/unions. names = [] types = [] bitsizes = [] for name, type, bitsize in self.enumfields(): names.append(name) types.append(type) bitsizes.append(bitsize) self.fldnames = tuple(names) self.fldtypes = tuple(types) self.fldbitsize = tuple(bitsizes)
def nodeupdate(self): types = [] instances = [] mboxptrs = [] for x in self.actors: types.append(int(x[0])) instances.append(x[1]) mboxptrs.append(x[2]) returnit = sendcmd( self, serialize([ cfgenum.cfgforwarddict['CMDLOCALCONFIG'], types, instances, mboxptrs ]))
def __str__(self): types = list(self.types) if str != bytes: # on Python 2 str == bytes if Instance(bytes) in types and Instance(str) in types: # we Union[bytes, str] -> AnyStr as late as possible so we avoid # corner cases like subclasses of bytes or str types.remove(Instance(bytes)) types.remove(Instance(str)) types.append(Instance(AnyStr)) if len(types) == 1: return str(types[0]) elif len(types) == 2 and None in types: type = [t for t in types if t is not None][0] return 'Optional[%s]' % type else: return 'Union[%s]' % (', '.join(sorted(str(t) for t in types)))
def listArtistRoot(): url = 'http://www.mtvmusicmeter.com/sitewide/dataservices/meter/videos/?id='+params['url'] data = getURL(url) SaveFile(VIDEOCACHE, data) videos = demjson.decode(data)['videos'] total = len(videos) types = [] for video in videos: videoType = video['videoTypeGrouping_facet'].replace('_',' ').title() if videoType not in types: types.append(videoType) for type in types: addDir(type,type, 'listArtistVideos', iconimage=params['artistimage']) addDir('All Videos', 'All Videos', 'listArtistVideos', iconimage=params['artistimage']) xbmcplugin.addSortMethod(pluginhandle, xbmcplugin.SORT_METHOD_LABEL) xbmcplugin.endOfDirectory(pluginhandle)
def WriteUFuncObjectHelpers(self): """ Generate the static information required to set up a UFunc. This is: -- space for registering/writting the approbriate callbacks -- the doc strings. """ c = StringIO() save = sys.stdout sys.stdout = c try: name = self.GetPyUFuncEvaluatorOne() print "static PyUFuncGenericFunction %s_data[] = {NULL, NULL};" % name print "" print self.WriteUFuncObjectDoc() print "" tmp = "static char %s_types [] = {" % name indent = ' ' * (len(tmp)) print tmp, # float == 1, double = 0 types = [] test = 0 try: for mode in (1, 0): t = [] for i in self.in_args: for j in range(i.GetNumberInArgs()): t.append(i.GetArrayType(j, mode)) if not self.return_is_error_flag: t.append(self.return_arg.GetArrayType(0, mode)) for i in self.out_args: for j in range(i.GetNumberOutArgs()): t.append(i.GetArrayType(j, mode)) types.append(t) test = 1 finally: if test == 0: print "->>I was working on :", self.GetName() print indent_lists(types, indent), print "};" c.seek(0) return c.read() finally: sys.stdout = save
def __doc__(self): if isinstance(self.cls, type) and issubclass(self.cls, EBMLList): if isinstance(self.cls.itemclass, tuple): types = [] for cls in self.cls.itemclass: if cls.__module__ == "builtins": types.append(cls.__name__) else: types.append(f"{cls.__module__}.{cls.__name__}") if len(types) == 1: typestring = f"List of {types[-1]} objects" elif len(types) == 2: typestring = "List of objects of type " + " or ".join(types) else: typestring = "List of objects of type " + ", ".join(types[:-1]) + ", or " + types[-1] elif self.cls.itemclass.__module__ == "builtins": typestring = f"List of {self.cls.itemclass.__name__} objects" else: typestring = f"List of {self.cls.itemclass.__module__}.{self.cls.itemclass.__name__} objects" elif isinstance(self.cls, tuple): types = [] for cls in self.cls: if cls.__module__ == "builtins": types.append(cls.__name__) else: types.append(f"{cls.__module__}.{cls.__name__}") if len(types) == 1: typestring = f"{types[-1]} object" elif len(types) == 2: typestring = "object of type " + " or ".join(types) else: typestring = "object of type " + ", ".join(types[:-1]) + ", or " + types[-1] elif self.cls.__module__ == "builtins": typestring = f"{self.cls.__name__} object" else: typestring = f"{self.cls.__module__}.{self.cls.__name__} object" if self.optional: return f"{typestring} (optional)" return f"{typestring} (required)"
def _unpack_lists(state, arg): """Extract inner types of Lists and Tuples. Pops arg count items from the stack, concatenates their inner types into 1 list, and returns that list. Example: if stack[-arg:] == [[i1, i2], [i3]], the output is [i1, i2, i3] """ types = [] for i in range(arg, 0, -1): type_constraint = state.stack[-i] if isinstance(type_constraint, typehints.IndexableTypeConstraint): types.extend(type_constraint._inner_types()) else: logging.debug('Unhandled type_constraint: %r', type_constraint) types.append(typehints.Any) state.stack[-arg:] = [] return types
def type_keys(self): types = self.known_types try: stats = self.rest.active_tasks() except ValueError: logger.error("unable to get stats for active tasks") for stat in stats: # append any new keys returned by the server # not part of known list of supported keys type_ = stat["type"] if type_ not in types: types.append(type_) return types
def register(self, meth): '''Register a new method as a multimethod''' sig = inspect.signature(meth) # build a type signature from the method's annotations types = [] for name, parm in sig.parameters.items(): if name == 'self': continue if parm.annotation is inspect.Parameter.empty: raise TypeError( 'Argument {} must be annotated with a type'.format(name)) if not isinstance( parm.annotation, type): raise TypeError( 'Argrment {} annotation must be a type'.format(name)) if parm.default is not inspect.Parameter.empty: self._methods[tuple(types)] = meth types.append( parm.annotation) self._methods[tuple(types)] = meth
def register(self, meth): sig = signature(meth) types = [] for name, param in sig.parameters.items(): if name == 'self': continue if param.annotation is Parameter.empty: raise TypeError( 'Argument {} must be annotated with a type'.format(name)) if not isinstance(param.annotation, type): raise TypeError( 'Argument {} annotation must be a type'.format(name)) if param.default is not Parameter.empty: self._methods[tuple(types)] = meth types.append(param.annotation) self._methods[tuple(types)] = meth
def plugin_from_module(superclass, module): """Return plug-ins from module Arguments: superclass (superclass): Superclass of subclasses to look for module (types.ModuleType): Imported module from which to parse valid Avalon plug-ins. Returns: List of plug-ins, or empty list if none is found. """ types = list() def recursive_bases(klass): r = [] bases = klass.__bases__ r.extend(bases) for base in bases: r.extend(recursive_bases(base)) return r for name in dir(module): # It could be anything at this point obj = getattr(module, name) if not inspect.isclass(obj): continue # These are subclassed from nothing, not even `object` if not len(obj.__bases__) > 0: continue # Use string comparison rather than `issubclass` # in order to support reloading of this module. bases = recursive_bases(obj) if not any(base.__name__ == superclass.__name__ for base in bases): continue types.append(obj) return types
def GetDisplayVectSettings(): settings = list() if not UserSettings.Get(group='vectorLayer', key='featureColor', subkey=['transparent', 'enabled']): featureColor = UserSettings.Get(group='vectorLayer', key='featureColor', subkey='color') settings.append( 'color=%s' % rgb2str.get(featureColor, ':'.join(map(str, featureColor)))) else: settings.append('color=none') if not UserSettings.Get(group='vectorLayer', key='areaFillColor', subkey=['transparent', 'enabled']): fillColor = UserSettings.Get(group='vectorLayer', key='areaFillColor', subkey='color') settings.append('fcolor=%s' % rgb2str.get(fillColor, ':'.join(map(str, fillColor)))) else: settings.append('fcolor=none') settings.append( 'width=%s' % UserSettings.Get(group='vectorLayer', key='line', subkey='width')) settings.append( 'icon=%s' % UserSettings.Get(group='vectorLayer', key='point', subkey='symbol')) settings.append( 'size=%s' % UserSettings.Get(group='vectorLayer', key='point', subkey='size')) types = [] for ftype in ['point', 'line', 'boundary', 'centroid', 'area', 'face']: if UserSettings.Get(group='vectorLayer', key='showType', subkey=[ftype, 'enabled']): types.append(ftype) settings.append('type=%s' % ','.join(types)) return settings
def listArtistRoot(): url = 'http://www.mtvmusicmeter.com/sitewide/dataservices/meter/videos/?id=' + params[ 'url'] data = getURL(url) SaveFile(VIDEOCACHE, data) videos = demjson.decode(data)['videos'] total = len(videos) types = [] for video in videos: videoType = video['videoTypeGrouping_facet'].replace('_', ' ').title() if videoType not in types: types.append(videoType) for type in types: addDir(type, type, 'listArtistVideos', iconimage=params['artistimage']) addDir('All Videos', 'All Videos', 'listArtistVideos', iconimage=params['artistimage']) xbmcplugin.addSortMethod(pluginhandle, xbmcplugin.SORT_METHOD_LABEL) xbmcplugin.endOfDirectory(pluginhandle)
def all_meta_types( self, interfaces=None ): """ What can you put inside me? Checks if the legal products are actually installed in Zope """ types = ['LDAPUserFolder','User Folder', 'Script (Python)', 'DTML Method', 'DTML Document', 'XMLRPC Method'] y = [] if self.allow_collections: y.append({'name': 'Report Collection', 'action': 'manage_addCollectionForm', 'permission': 'Add Collections'}) types.append('Repository Referral') if self.allow_envelopes: y.append({'name': 'Report Envelope', 'action': 'manage_addEnvelopeForm', 'permission': 'Add Envelopes'}) if not self.allow_collections: types.append('Repository Referral') for x in Products.meta_types: if x['name'] in types: y.append(x) return y
def __repr__(self): if not self.repr_seen: self.repr_seen = set() self.repr_seen.add(self) self.repr_count += 1 types = [] for type in self.types: if type not in self.repr_seen: types.append(type) self.repr_seen.add(type) else: types.append("...") result = "promote%d(%s)" % (self.count, ", ".join(map(str, types))) self.repr_count -= 1 if not self.repr_count: self.repr_seen = None return result
def register(self, meth): """ Register a new method as multimethod """ sig = inspect.signature(meth) _types = [] for name, param in sig.parameters.items(): if name == "self": continue if param.annotation is inspect.Parameter.empty(): raise TypeError(f"Argument {name} must" "be annotated with a type") if not isinstance(param.annotation, type): raise TypeError(f"Argument {name} must" "be annotated with a type") if param.default is not inspect.Parameter.empty(): self._method[tuple(_types)] = meth types.append(param.annotation) self._method[tuple(_types)] = meth
def register(self, method): """Register a new method as a multimethod. """ signature = inspect.signature(method) # Build a type signature from the method's annotations. types = [] for name, param in signature.parameters.items(): if name == 'self': continue if param.annotation is inspect.Parameter.empty: raise TypeError( f'Argument {name} must be annotated with a type.') if not isinstance(param.annotation, type): raise TypeError(f'Argument {name} must be a type.') # 这里要考虑带默认值的关键字参数的情况 if param.default is not inspect.Parameter.empty: self._methods[tuple(types)] = method types.append(param.annotation) self._methods[tuple(types)] = method
def map_nested(function, data_struct, dict_only=False, map_list=True, map_tuple=False, map_numpy=False): """Apply a function recursively to each element of a nested data struct.""" # Could add support for more exotic data_struct, like OrderedDict if isinstance(data_struct, dict): return { k: map_nested( function, v, dict_only=dict_only, map_list=map_list, map_tuple=map_tuple, map_numpy=map_numpy ) for k, v in data_struct.items() } elif not dict_only: types = [] if map_list: types.append(list) if map_tuple: types.append(tuple) if map_numpy: types.append(np.ndarray) if isinstance(data_struct, tuple(types)): mapped = [ map_nested( function, v, dict_only=dict_only, map_list=map_list, map_tuple=map_tuple, map_numpy=map_numpy ) for v in data_struct ] if isinstance(data_struct, list): return mapped elif isinstance(data_struct, tuple): return tuple(mapped) else: return np.array(mapped) # Singleton return function(data_struct)
def parse_composite_object(string, max_items=None): "recursively parse one composite object of type (1,8), not including a length header, and stopping at end or after <max_items>" types=[] chunklen, objtype, count=struct.unpack("<L2sH",string[:8]) assert objtype==compositetype, "attempt to unpack non-composite object, type=%04x" %objtype string=string[8:] for i in range(count): chunklen, objtype=struct.unpack("<L2s",string[:6]) if objtype==compositetype or objtype==arraytype: objdata=string[:chunklen] #just save it raw for later descent, if needed else: objdata='' string=string[chunklen:] types.append( (objtype, objdata) ) if max_items is not None and (max_items < len(types)): types, leftovers=types[:max_items], types[max_items:] else: leftovers=() vals, string = parse_typelist_and_string(types, string) return vals, (leftovers, string)
def parse_composite_object(string, max_items=None): "recursively parse one composite object of type (1,8), not including a length header, and stopping at end or after <max_items>" types = [] chunklen, objtype, count = struct.unpack("<L2sH", string[:8]) assert objtype == compositetype, "attempt to unpack non-composite object, type=%04x" % objtype string = string[8:] for i in range(count): chunklen, objtype = struct.unpack("<L2s", string[:6]) if objtype == compositetype or objtype == arraytype: objdata = string[:chunklen] # just save it raw for later descent, if needed else: objdata = "" string = string[chunklen:] types.append((objtype, objdata)) if max_items is not None and (max_items < len(types)): types, leftovers = types[:max_items], types[max_items:] else: leftovers = () vals, string = parse_typelist_and_string(types, string) return vals, (leftovers, string)
def expand_struct(self,convert=classes): drift =convert['drift'] multipole =convert['multipole'] cavity =convert['cavity'] align =convert['align'] block =convert['block'] rest=[] names=self.flatten_names() elems=self.flatten_objects() newelems=[] types=[] for elem in elems: if elem.keyword=='drift': newelems.append(drift(length=elem.l)) types.append('driftexact') elif elem.keyword=='multipole': newelems.append(multipole(knl=elem.knl, ksl=elem.ksl, length=elem.lrad, hxl=elem.knl[0],hyl=elem.ksl[0])) types.append(elem.keyword) elif elem.keyword in ['hkicker']: ne=multipole(knl=[-elem.kick],ksl=[], length=elem.lrad,hxl=elem.kick,hyl=0) newelems.append(ne) types.append('multipole') elif elem.keyword in ['vkicker']: ne=multipole(knl=[],ksl=[elem.kick], length=elem.lrad,hxl=0,hyl=elem.kick) newelems.append(ne) types.append('multipole') elif elem.keyword in ['rfcavity']: nvolt=elem.volt*1e6 ne=cavity(volt=nvolt,freq=elem.freq*1e6,lag=elem.lag*360) newelems.append(ne) types.append('cavity') else: rest.append(el) newelems=[dict(i._asdict()) for i in newelems] return zip(names,types,newelems),rest
def build(self): ''' names = [] types = [] parents = [] for i in range( 0 , len( self.data ) ): refs = self.refs(i) names.append( refs['Name'] ) types.append( refs['type'] ) parents.append( refs.get('parent','') ) utilsMaya.createDagNodes( types , names , parents ) ''' buildCmds = '' #CREATE NODE outNames = [] types = [] parents = [] for i in range(0, len(self.data)): refs = self.refs(i) name = refs['Name'] buildCmds += self.utils_createNode(**refs) outNames.append(name) types.append(refs.get('type', None)) parents.append(refs.get('parent', None)) #PARENT for i in range(0, len(parents)): if not (parents[i] == None): buildCmds += self.utils_parentNode(name=outNames[i], type=types[i], parent=parents[i]) return buildCmds
def register(self, meth): ''' Rejestrowanie nowej metody jako wielometody ''' sig = inspect.signature(meth) # Tworzenie sygnatury na podstawie uwag metod types = [] for name, parm in sig.parameters.items(): if name == 'self': continue if parm.annotation is inspect.Parameter.empty: raise TypeError( 'Dla argumentu {} trzeba dodać uwagę określającą typu'.format(name) ) if not isinstance(parm.annotation, type): raise TypeError( 'Uwaga dla argumentu {} musi określać typ'.format(name) ) if parm.default is not inspect.Parameter.empty: self._methods[tuple(types)] = meth types.append(parm.annotation) self._methods[tuple(types)] = meth
def Register(self): builders = [] builderNames = [] if isinstance(self.builders, list): for buildersSet in self.builders: if isinstance(buildersSet, SetOfBuilders): (new_builders, new_builderNames) = buildersSet.RegisterBuilders() else: builder = buildersSet trace("Register builder: name=%s, slavenames=%s, codebase=%s" % (builder.getName(), builder.slaves(), builder.codebase())) new_builders = [builder.register()] new_builderNames = [builder.getName()] builders = builders + new_builders builderNames = builderNames + new_builderNames else: (builders, builderNames) = self.builders.RegisterBuilders() schedulers = [] from buildbot.schedulers.forcesched import ForceScheduler from buildbot.schedulers.timed import Nightly from buildbot.schedulers.basic import SingleBranchScheduler from buildbot.schedulers.triggerable import Triggerable branch = self.branch types = [] if self.genForce: types.append('force') if self.genNightly: types.append('nightly') if self.genTrigger: types.append('trigger') if types: trace("Register schedulers (%s): branch=%s, builders = %s" % (' + '.join(types), branch, builderNames)) if branch is not None: codebase = constants.codebase[branch] if self.genForce: schedulers.append(ForceScheduler(name=self.nameprefix + 'force_' + branch, builderNames=builderNames, codebases=codebase.getCodebase())) if self.genNightly: schedulers.append(Nightly(hour='*' if self.nightlyHour is None else self.nightlyHour, minute=0 if self.nightlyMinute is None else self.nightlyMinute, name=self.nameprefix + 'nightly_' + branch, builderNames=builderNames, codebases=codebase.getCodebase(), branch=None)) if self.genTrigger: schedulers.append(Triggerable(name=self.nameprefix + 'trigger' + ('_' + branch if branch is not None else ''), builderNames=builderNames, codebases=codebase.getCodebase())) self.builders = builders self.builderNames = builderNames self.schedulers = schedulers return (builders, schedulers)
def prepare(self): global attr_global_dict attr_global_dict = None self.prepared = True self.sets = copy.deepcopy(self._sets) self.font = copy.deepcopy(self._font) layout = copy.deepcopy(self._layout) """ . share/icons/themes/<skin> share/images share/skins/main share/skins/plugins """ if not os.path.isdir(self.icon_dir): self.icon_dir = os.path.join(config.ICON_DIR, 'themes', self.icon_dir) search_dirs = [ '.', self.icon_dir, config.IMAGE_DIR, ] + self.skindirs for f in self.font: self.font[f].prepare(self._color, scale=self.font_scale) for l in layout: layout[l].prepare(self.font, self._color, search_dirs, self._images) all_menus = copy.deepcopy(self._menu) for menu in all_menus: all_menus[menu].prepare(self._menuset, layout) # prepare listing area images for s in all_menus[menu].style: for i in range(2): if s[i] and hasattr(s[i], 'listing'): for image in s[i].listing.images: s[i].listing.images[image].prepare(None, search_dirs, self._images) self.default_menu = {} self.special_menu = {} for k in all_menus: if k.startswith('default'): self.default_menu[k] = all_menus[k] else: self.special_menu[k] = all_menus[k] types = [] for k in self.special_menu: if k.find('main menu') == -1: types.append(k) for t in types: if not self.special_menu.has_key(t + ' main menu'): self.special_menu[t + ' main menu'] = self.special_menu[t] for t in ('default no image', 'default description'): if not self.default_menu.has_key(t): self.default_menu[t] = self.default_menu['default'] t = 'default description' if not self.default_menu.has_key(t + ' no image'): self.default_menu[t + ' no image'] = self.default_menu[t] for s in self.sets: if isinstance(self.sets[s], AreaSet): # prepare an areaset self.sets[s].prepare(layout) for area in self.sets[s].areas.values(): if hasattr(area, 'images'): for image in area.images.values(): image.prepare(None, search_dirs, self._images) else: # prepare a menu self.sets[s].prepare(self._menuset, layout) for s in self.sets[s].style: for i in range(2): if s[i] and hasattr(s[i], 'listing'): for image in s[i].listing.images: s[i].listing.images[image].prepare(None, search_dirs, self._images) self.popup = layout[self._popup] self.mainmenu = copy.deepcopy(self._mainmenu) self.mainmenu.prepare(search_dirs, self._images) self.images = {} for name in self._images: self.images[name] = search_file(self._images[name], search_dirs) return 1
def map_nested( function, data_struct, dict_only: bool = False, map_list: bool = True, map_tuple: bool = False, map_numpy: bool = False, num_proc: Optional[int] = None, types=None, ): """Apply a function recursively to each element of a nested data struct. If num_proc > 1 and the length of data_struct is longer than num_proc: use multi-processing """ if types is None: types = [] if not dict_only: if map_list: types.append(list) if map_tuple: types.append(tuple) if map_numpy: types.append(np.ndarray) types = tuple(types) # Singleton if not isinstance(data_struct, dict) and not isinstance(data_struct, types): return function(data_struct) disable_tqdm = bool(logger.getEffectiveLevel() > INFO) iterable = list(data_struct.values()) if isinstance(data_struct, dict) else data_struct if num_proc is None: num_proc = 1 if num_proc <= 1 or len(iterable) <= num_proc: mapped = [ _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm) ] else: split_kwds = [] # We organize the splits ourselve (contiguous splits) for index in range(num_proc): div = len(iterable) // num_proc mod = len(iterable) % num_proc start = div * index + min(index, mod) end = start + div + (1 if index < mod else 0) split_kwds.append((function, iterable[start:end], types, index, disable_tqdm)) assert len(iterable) == sum(len(i[1]) for i in split_kwds), ( f"Error dividing inputs iterable among processes. " f"Total number of objects {len(iterable)}, " f"length: {sum(len(i[1]) for i in split_kwds)}" ) logger.info( "Spawning {} processes for {} objects in slices of {}".format( num_proc, len(iterable), [len(i[1]) for i in split_kwds] ) ) with Pool(num_proc, initargs=(RLock(),), initializer=tqdm.set_lock) as pool: mapped = pool.map(_single_map_nested, split_kwds) logger.info("Finished {} processes".format(num_proc)) mapped = [obj for proc_res in mapped for obj in proc_res] logger.info("Unpacked {} objects".format(len(mapped))) if isinstance(data_struct, dict): return dict(zip(data_struct.keys(), mapped)) else: if isinstance(data_struct, list): return mapped elif isinstance(data_struct, tuple): return tuple(mapped) else: return np.array(mapped)
def __init__(self, types=("URS", "FRS", "SDS", "SWDS")): types = list(types) types.append("3.0WebFRS") super().__init__(types)
def createDefaultConfig(self): # create a configuration unit for default values of object properties # take the defaults from schema defaults config = Ganga.Utility.Config.makeConfig(defaultConfigSectionName(self.name),\ "default attribute values for %s objects" % self.name) # self._pluginclass._proxyClass.__doc__ ) for name, item in self.allItems(): # and not item['sequence']: #FIXME: do we need it or not?? if not item['protected'] and not item['hidden']: if 'typelist' in item._meta: types = item['typelist'] if types == []: types = None else: types = None if item['sequence']: if not types is None: # bugfix 36398: allow to assign a list in the # configuration types.append('list') if isinstance(item['defvalue'], dict): if not types is None: types.append('dict') config.addOption( name, item['defvalue'], item['doc'], override=False, typelist=types) def prehook(name, x): errmsg = "Cannot set %s=%s in [%s]: " % (name, repr(x), config.name) try: item = self.getItem(name) #except KeyError as x: # raise Ganga.Utility.Config.ConfigError(errmsg + "attribute not defined in the schema") except Exception as x: raise Ganga.Utility.Config.ConfigError(errmsg + str(x)) if item.isA(ComponentItem): if not isinstance(x, str) and not x is None: raise Ganga.Utility.Config.ConfigError(errmsg + "only strings and None allowed as a default value of Component Item.") try: self._getDefaultValueInternal(name, x, check=True) except Exception as err: logger.info("Unexpected error: %s" % str(err)) raise #Ganga.Utility.logging.log_unknown_exception() #raise Ganga.Utility.Config.ConfigError(errmsg + str(x)) if item['protected'] or item['hidden']: raise Ganga.Utility.Config.ConfigError(errmsg + "protected or hidden property") # FIXME: File() == 'x' triggers AttributeError # try: # if x == '': x = None # except AttributeError: # pass return x config.attachUserHandler(prehook, None) config.attachSessionHandler(prehook, None)