def __init__(self, element): self.__element = element self.__tags = {} for child in element.getchildren(): ns, tag = denamespace(child.tag) if tag in self.__tags: self.__tags[tag].append(child) else: self.__tags[tag] = child if hasattr(self, tag): spot = getattr(self, tag) if type(spot) != list: spot = [spot] #spot.append(objectify(child)) if len(child.getchildren()): spot.append( new.classobj(tag, (ElementWrapper, ), {})(child)) else: spot.append(child.text) setattr(self, tag, spot) setattr(self, '__islist__', True) elif len(child.getchildren()): setattr(self, tag, new.classobj(tag, (ElementWrapper, ), {})(child)) else: # marshall the type here! setattr(self, denamespace(child.tag)[1], child.text)
def loadINIPackage(self, inifile): """ Load INI file containing macro definitions Arguments: inifile -- filename of INI formatted file """ ini = ConfigParser.RawConfigParser() if not isinstance(inifile, (list,tuple)): inifile = [inifile] for f in inifile: ini.read(f) macros = {} for section in ini.sections(): try: baseclass = self[section] except KeyError: log.warning('Could not find macro %s' % section) continue for name in ini.options(section): value = ini.get(section,name) m = re.match(r'^unicode\(\s*(?:(\'|\")(?P<string>.+)(?:\1)|(?P<number>\d+))\s*\)$',value) if m: data = m.groupdict() if data['number'] is not None: value = unichr(int(data['number'])) else: value = unicode(data['string']) macros[name] = new.classobj(name, (baseclass,), {'unicode': value}) continue macros[name] = new.classobj(name, (baseclass,), {'args': value}) self.importMacros(macros)
def validate_on_load(cls): for e in cls.errors: exception = classobj(str(e.error_type), (Exception,), {}) raise exception, e.error_message for e in cls.other_errors: exception = classobj(str(e.error_type), (Exception,), {}) raise exception, e.error_message
def newif(self, name, initial=False): """ Create a new \\if (and accompanying) commands This method corresponds to TeX's \\newif command. Required Arguments: name -- name of the 'if' command. This name should always start with the letters 'if'. Keyword Arguments: initial -- initial value of the 'if' command """ name = str(name) # \if already exists if self.has_key(name): macrolog.debug('if %s already defined', name) return # Generate new 'if' class macrolog.debug('creating if %s', name) ifclass = new.classobj(name, (plasTeX.NewIf,), {'state':initial}) self.addGlobal(name, ifclass) # Create \iftrue macro truename = name[2:]+'true' newclass = new.classobj(truename, (plasTeX.IfTrue,), {'ifclass':ifclass}) self.addGlobal(truename, newclass) # Create \iffalse macro falsename = name[2:]+'false' newclass = new.classobj(falsename, (plasTeX.IfFalse,), {'ifclass':ifclass}) self.addGlobal(falsename, newclass)
def _preferences_pages_default(self): from apptools.help.help_plugin.preferences_pages import \ DocumentsPreferencesPage, DemosPreferencesPage, \ ExamplesPreferencesPage, HelpDocPreferencesPage, \ HelpDemoPreferencesPage, HelpExamplePreferencesPage pages = [] if len(self.help_docs) > 0: pages.append(DocumentsPreferencesPage) pages.extend( [ new.classobj(doc.preferences_path + 'PreferencesPage', (HelpDocPreferencesPage,), {'preferences_path': doc.preferences_path}, ) for doc in self.help_docs ]) if len(self.help_demos) > 0: pages.append(DemosPreferencesPage) pages.extend( [ new.classobj(demo.preferences_path + 'PreferencesPage', (HelpDemoPreferencesPage,), {'preferences_path': demo.preferences_path}, ) for demo in self.help_demos ]) if len(self.help_examples) > 0: pages.append(ExamplesPreferencesPage) pages.extend( [ new.classobj(example.preferences_path + 'PreferencesPage', (HelpExamplePreferencesPage,), {'preferences_path': example.preferences_path}, ) for example in self.help_examples ]) return pages
def invoke(self, tex): self.parse(tex) attrs = self.attributes name = attrs['name'] counter = attrs['counter'] caption = attrs['caption'] within = attrs['within'] if not counter and not attrs['*modifier*']: counter = name if within: self.ownerDocument.context.newcounter(counter,initial=0,resetby=within, format='${the%s}.${%s}' % (within, name)) else: self.ownerDocument.context.newcounter(counter,initial=0) deflog.debug('newtheorem %s', name) # The nodeName key below ensure all theorem type will call the same # rendering method, the type of theorem being retained in the thmName # attribute if attrs['*modifier*']: newclass = new.classobj(str(name), (Environment,), {'caption': caption, 'nodeName': 'thmenv', 'thmName': name, 'args': '[title]'}) else: newclass = new.classobj(str(name), (Environment,), {'caption': caption, 'nodeName': 'thmenv', 'thmName': name, 'counter': counter, 'args': '[title]'}) self.ownerDocument.context.addGlobal(name, newclass)
def setup(**kwargs): """ A drop in replacement for distutils.core.setup which integrates nicely with kiwi.environ :param packagename: the name of the main package to be used when it differs from the 'name' argument fallback to 'name' if not provided """ packagename = kwargs.pop('packagename', None) # FIXME: This is for kiwi to allow setting datadir to kiwi instead of # kiwi-gtk when uploading to pip. Is there a better way of doing this? _VariableExtender.packagename = packagename def run_install(self): domain = packagename or kwargs.get('name') if domain: datadir = domain if 'bdist_egg' in self.distribution.commands else None self.data_files.extend(compile_po_files(domain, datadir=datadir)) KiwiInstallData.run(self) # distutils uses old style classes InstallData = new.classobj('InstallData', (KiwiInstallData,), dict(run=run_install)) InstallLib = new.classobj('InstallLib', (KiwiInstallLib,), dict()) cmdclass = dict(install_data=InstallData, install_lib=InstallLib, clean=KiwiClean) kwargs.setdefault('cmdclass', cmdclass).update(cmdclass) DS_setup(**kwargs)
def GenClass_new_class(attributes, myglobals): classname = attributes[SELF][NAME] if attributes[SELF][TYPE] != LIST: raise AttributeError if ANONYMOUS in attributes[SELF][FLAGS]: listtype = GenClassAList attributes[SELF]['install_func'] = GenClassAList_get_install_funcs else: listtype = GenClassList attributes[SELF]['install_func'] = GenClassList_get_install_funcs def GenClass_init_func(self, list=None, parent=None): if hasattr(self.__class__, "_ListClass"): self.__class__._ListClass.__init__(self, list, parent) def GenClass_newClass_func(self, classname, list=None, parent=None): klass = self.__class__._Globals[classname] if klass: return klass(list, parent) else: return None newclass = new.classobj(classname + '_base', (listtype,), {'Attributes' : attributes, '__init__' : GenClass_init_func, '_ListClass' : listtype, '_Globals' : myglobals, 'newClass' : GenClass_newClass_func}) _install_funcs(newclass) implclass = new.classobj(classname, (newclass,), {}) return (newclass, implclass)
def __init__(self): self.host = "localhost" self.port = 2663 self.isSessionRunning = False self.timeline = "" self.waitStr = None self.waitFlag = threading.Event() self.PlayState = -1 self.lastMessage = {} self.lastSubtitleNum = 0 self.lastSubtitlesEnabled = False self.lastAudioTrackNum = 0 group = self.AddGroup('Requests') for className, scancode, descr in ttRequests: clsAttributes = dict(name=descr, value=scancode) cls = new.classobj(className, (stdAction,), clsAttributes) group.AddAction(cls) group = self.AddGroup('Commands') for className, scancode, descr, ParamDescr in ttCommands: clsAttributes = dict(name=descr, value=scancode) if ParamDescr == "": if className[0:3] == "IP_": cls = new.classobj(className, (stdAction,), clsAttributes) else: cls = new.classobj(className, (wmAction,), clsAttributes) else: cls = new.classobj(className, (stdActionWithStringParameter,), clsAttributes) cls.parameterDescription = ParamDescr group.AddAction(cls)
def suite(): suite = unittest.TestSuite() if has_svn: tests = [(NormalTests, ''), (ScopedTests, u'/tête'), (RecentPathScopedTests, u'/tête/dir1'), (NonSelfContainedScopedTests, '/tags/v1'), (AnotherNonSelfContainedScopedTests, '/branches'), ] skipped = { 'SvnCachedRepositoryNormalTests': [ 'test_changeset_repos_creation', ], 'SvnCachedRepositoryScopedTests': [ 'test_changeset_repos_creation', 'test_rev_navigation', ], } for test, scope in tests: tc = new.classobj('SubversionRepository' + test.__name__, (SubversionRepositoryTestCase, test), {'path': REPOS_PATH + scope}) suite.addTest(unittest.makeSuite( tc, 'test', suiteClass=SubversionRepositoryTestSetup)) tc = new.classobj('SvnCachedRepository' + test.__name__, (SvnCachedRepositoryTestCase, test), {'path': REPOS_PATH + scope}) for skip in skipped.get(tc.__name__, []): setattr(tc, skip, lambda self: None) # no skip, so we cheat... suite.addTest(unittest.makeSuite( tc, 'test', suiteClass=SubversionRepositoryTestSetup)) else: print "SKIP: versioncontrol/tests/svn_fs.py (no svn bindings)" return suite
def __init__(self, entity=None, base=None, **kw): """ """ self.get_table_args() if entity: self._dao = entity else: if len(self.__slots__) == 0: if base: self.__decl_base = base self._dao = \ base._decl_class_registry[self.__class__.__name__]()\ if self.__class__.__name__ in base._decl_class_registry\ else classobj( self.__class__.__name__, (self.__clz_proxy__,base,), self.get_table_args() or {} )() # instantiate else: self._dao = classobj( self.__class__.__name__, (self.__clz_proxy__,(object,),), self.get_table_args() or {} )() # instantiate self.__class__.__decl_class__ = self._dao.__class__ for k,v in kw.items(): setattr(self, k, v) if hasattr(self, 'mixin'): if 'entity_mixes' in self.__pyaella_args__: for em in self.__pyaella_args__['entity_mixes']: self.mixin(em)
def setup(**kwargs): """ A drop in replacement for distutils.core.setup which integrates nicely with kiwi.environ :attribute resources: :attribute global_resources: :attribute templates: List of templates to install """ resources = {} global_resources = {} templates = [] if 'resources' in kwargs: resources = kwargs.pop('resources') if 'global_resources' in kwargs: global_resources = kwargs.pop('global_resources') if 'templates' in kwargs: templates = kwargs.pop('templates') def run_install(self): name = kwargs.get('name') if name: self.data_files.extend(compile_po_files(name)) KiwiInstallData.run(self) varext = _VariableExtender(self.distribution) for path, files in templates: # Skip templates inside eggs for now if 'bdist_egg' in self.distribution.commands: continue install = self.distribution.get_command_obj('install') target = os.path.join(install.prefix, path) if install.root: if target[0] == '/': target = target[1:] target = os.path.join(install.root, target) if not os.path.exists(target): info("creating %s" % target) os.makedirs(target) for filename in files: data = open(filename).read() data = varext.extend(data) target_file = os.path.join(target, os.path.basename(filename)) info('installing template %s' % target_file) open(target_file, 'w').write(data) # distutils uses old style classes InstallData = new.classobj('InstallData', (KiwiInstallData,), dict(run=run_install)) InstallLib = new.classobj('InstallLib', (KiwiInstallLib,), dict(resources=resources, global_resources=global_resources)) cmdclass = dict(install_data=InstallData, install_lib=InstallLib, clean=KiwiClean) kwargs.setdefault('cmdclass', cmdclass).update(cmdclass) DS_setup(**kwargs)
def newenvironment(self, name, nargs=0, definition=None, opt=None): """ Create a \\newenvironment Required Arguments: name -- name of the macro to create nargs -- integer number of arguments that the macro has definition -- two-element tuple containing the LaTeX definition. Each element should be a string. The first element corresponds to the beginning of the environment, and the second element is the end of the environment. opt -- string containing the LaTeX code to use in the optional argument Examples:: c.newenvironment('mylist', 0, (r'\\begin{itemize}', r'\\end{itemize}')) """ name = str(name) # Macro already exists if self.has_key(name): if not issubclass(self[name], (plasTeX.NewCommand, plasTeX.Definition)): return macrolog.debug('redefining environment "%s"', name) if nargs is None: nargs = 0 assert isinstance(nargs, int), 'nargs must be an integer' if definition is not None: assert isinstance(definition, (tuple,list)), \ 'definition must be a list or tuple' assert len(definition) == 2, 'definition must have 2 elements' if isinstance(definition[0], basestring): definition[0] = [x for x in Tokenizer(definition[0], self)] if isinstance(definition[1], basestring): definition[1] = [x for x in Tokenizer(definition[1], self)] if isinstance(opt, basestring): opt = [x for x in Tokenizer(opt, self)] macrolog.debug('creating newenvironment %s', name) # Begin portion newclass = new.classobj(name, (plasTeX.NewCommand,), {'nargs':nargs,'opt':opt,'definition':definition[0]}) self.addGlobal(name, newclass) # End portion newclass = new.classobj('end'+name, (plasTeX.NewCommand,), {'nargs':0,'opt':None,'definition':definition[1]}) self.addGlobal('end' + name, newclass)
def get_class(self, name, inuse=False): """ return a ClassSerializer named name, if the class hasn't previously been request create a new class, otherwise return the cached class. """ try: klass = self.ctypes["{%s}%s" % (self.tns, name)] except: typeklass = new.classobj("types", (), {}) klass = new.classobj(name, (ClassSerializer, object), {"types": typeklass, "__name__": name}) self.ctypes["{%s}%s" % (self.tns, name)] = klass if not getattr(klass, "inuse", False): klass.inuse = inuse return klass
def get_app(config, config_vars, profiler=None, ctx_mixins=None, **kwargs): app_bases = [HandleExceptionMixin] if ctx_mixins: ctx_bases = list(ctx_mixins) else: ctx_bases = [] if profiler: app_bases.append(ProfilerMixin) if config.session_server: try: sess_serv_host, sess_serv_port = config.session_server.split(':') except ValueError: sess_serv_host, sess_serv_port = config.session_server, 34343 else: try: sess_serv_port = int(sess_serv_port) except ValueError: sys.exit('bad session server port specification: %s' % sess_serv_port) kwargs['session_appid'] = config.appname kwargs['session_server'] = sess_serv_host kwargs['server_port'] = sess_serv_port if config.session_timeout: kwargs['session_age'] = int(config.session_timeout) else: kwargs['session_age'] = 600 app_bases.append(ModularSessionApp) if have_branching_session: ctx_bases.append(BranchingSessionContext) else: ctx_bases.append(SessionAppContext) else: app_bases.append(ModularApp) ctx_bases.append(SimpleAppContext) ctx_bases.append(CommonAppContext) kwargs['secret'] = config.session_secret # This is a *little* gross... create a class on the fly ctx_cls = new.classobj('AlbaCtx', tuple(ctx_bases), dict(__init__=call_all('__init__'))) def create_context(self): return ctx_cls(self) app_cls = new.classobj('AlbaApp', tuple(app_bases), dict(create_context=create_context)) app = app_cls(**kwargs) if profiler: app.profiler = profiler app.config = config app.config_vars = config_vars return app
def _create(self): fields = [] myfields = {} for k in self.fields: field = self[k] attr = {} if isinstance(field.type, gluon.sql.SQLCustomType): ftype = self._db._translator[field.type.native or \ field.type.type](**attr) elif field.type[:2] == 'id': continue elif field.type[:10] == 'reference ': if field.notnull: attr = dict(required=True) referenced = field.type[10:].strip() ftype = self._db._translator[field.type[:9]](self._db[referenced]) elif not field.type in self._db._translator\ or not self._db._translator[field.type]: raise SyntaxError, 'Field: unknown field type: %s' % field.type else: ftype = self._db._translator[field.type](**attr) myfields[field.name] = ftype self._tableobj = classobj(self._tablename, (google_db.Model, ), myfields) return None
def taskify(baseclass, name): smajor = baseclass._schema.version.major sminor = baseclass._schema.version.minor cat = baseclass._category if cat == "applications": schema_items = _app_schema taskclass = TaskApplication elif cat == "splitters": schema_items = _splitter_schema taskclass = TaskSplitter classdict = { "_schema": Schema(Version(smajor, sminor), dict(baseclass._schema.datadict.items() + schema_items)), "_category": cat, "_name": name, "__init__": __task__init__, } if '_exportmethods' in baseclass.__dict__: classdict['_exportmethods'] = baseclass.__dict__['_exportmethods'] cls = classobj(name, (taskclass, baseclass), classdict) global handler_map # Use the same handlers as for the base class handler_map.append((getName(baseclass), name)) return cls
def char_data(self, data): if self.str is None: self.inseq = False return if self.inseq == False: return # Just a macro m = re.match(r'^\\(\w+|\W)$', data) if m: name = str(m.group(1)).replace('\\','\\\\') if name not in self.defined: g[name+'_'] = new.classobj(name+'_', (Command,), {'unicode':chr(self.str), 'macroName':name}) self.defined[name] = True # Wingdings m = re.match(r'^\\ding\{(\d+)\}$', data) if m: int(m.group(1)) Characters.ding.values[int(m.group(1))] = chr(self.str) # Accented characters m = re.match(r'^(\\(%s)\{([^\}])\})' % '|'.join(list(self.accentmap.keys())), data) if m and m.group(1) not in self.defined: accent = self.accentmap[m.group(2)] accent.chars[m.group(3)] = chr(self.str) self.defined[m.group(1)] = True self.inseq = False
def initfunc(**kwargs): klass = new.classobj(name, (PyPLearnObject,), {}) assert issubclass( klass, PyPLearnObject ) obj = klass(**kwargs) assert isinstance(obj, PyPLearnObject) return obj
def newcounter(self, name, resetby=None, initial=0, format=None): """ Create a new counter This method corresponds to LaTeX's \\newcounter command Required Arguments: name -- name of the counter. The generate counter class will use this name. Also, a new macro called 'the<name>' will also be generated for the counter format. Keyword Arguments: resetby -- the name of the counter that this counter is reset by initial -- initial value for the counter """ name = str(name) # Counter already exists if self.counters.has_key(name): macrolog.debug('counter %s already defined', name) return self.counters[name] = plasTeX.Counter(self, name, resetby, initial) if format is None: format = '${%s}' % name newclass = new.classobj('the'+name, (plasTeX.TheCounter,), {'format': format}) self.addGlobal('the'+name, newclass)
def create_music_command_classes(debug=False): klasses = [] for (byte, cmd) in music_commands.items(): cmd_name = cmd[0].replace(" ", "_") params = { "id": byte, "size": 4, "end": cmd[0] in music_command_enders, "macro_name": cmd[0] } params["param_types"] = {} if len(cmd) > 1: param_types = cmd[1:] for (i, each) in enumerate(param_types): thing = {"name": each[0], "class": each[1]} params["param_types"][i] = thing if debug: logging.debug("each is {0} and thing[class] is {1}".format(each, thing["class"])) params["size"] += thing["class"].size klass_name = cmd_name+"Command" klass = classobj(klass_name, (Command,), params) globals()[klass_name] = klass if klass.macro_name == "notetype": klass.allowed_lengths = [1, 2] elif klass.macro_name in ["togglenoise", "sfxtogglenoise"]: klass.allowed_lengths = [0, 1] klasses.append(klass) # later an individual klass will be instantiated to handle something return klasses
def _help_action_sets_default(self): """ Returns a list containing an action set class whose **actions** correspond to the help docs in the help_docs extension point. """ extension_point_mapping = { DOCS_MENU: self.help_docs, EXAMPLES_MENU: self.help_examples, DEMOS_MENU: self.help_demos, DOWNLOADS_MENU: self.help_downloads} # Construct traits for the action set ns = {'id': 'apptools.help.help_plugin.help_action_set', 'name': 'Help Plugin ActionSet', 'groups': [ Group( id=DOCS_GROUP, before='AboutGroup', path=HELP_MENU ) ] } for (menu_name, items) in extension_point_mapping.items(): if len(items) > 0: menu = Menu( name = menu_name, class_name = PKG + '.help_submenu_manager:%sMenuManager' % menu_name ) if menu_name in self.menus: menu.path = 'MenuBar' menu.before = 'Help' else: menu.path = HELP_MENU menu.group = DOCS_GROUP # Append the menu. ns.setdefault('menus', []).append(menu) return [new.classobj('SPLHelpActionSet', (ActionSet,), ns)]
def getPort(self): """ Returns a Port object of the same type as the one specified as the porttype argument during the object instantiation. It uses the classobj from the new module to generate a class on runtime. The classobj generates a class using the following arguments: name: The name of the class to generate bases: A tuple containing all the base classes to use dct: A dictionary containing all the attributes such as functions, and class variables It is important to notice that the porttype is a BULKIO__POA type and not a BULKIO type. The reason is because it is used to generate a Port class that will be returned when the getPort() is invoked. The returned class is the one acting as a server and therefore must be a Portable Object Adapter rather and a simple BULKIO object. """ # The classobj generates a class using the following arguments: # # name: The name of the class to generate # bases: A tuple containing all the base classes to use # dct: A dictionary containing all the attributes such as # functions, and class variables PortClass = classobj('PortClass', (self.port_type,), {'pushPacket':self.pushPacket, 'pushSRI':self.pushSRI}) # Create a port using the generate Metaclass and return an instance port = PortClass() return port._this()
def findViewletManager(self, name): managerObj = queryMultiAdapter((self.context, self.request, self), IViewletManager, name) if not managerObj: # Here's where we go totally off the deep end... # Since we can't find this viewlet manager with the basic (self.context, self.request, self) # multiadapter lookup, this must be some sort of custom manager, registered to other interfaces. # In order to find it, we need to do a bit of reverse engineering... # Since Plone's generic setup process for viewlets constrains us to one viewlet manager / name, # we're going to assume that the adapter with this name, and a provided interface that is or extends IViewletManger # is the one we're looking for. # So, start with a search of the adapter registry... reg = [reg for reg in getGlobalSiteManager().registeredAdapters() if reg.name == name][0] # So far, I think we're stuck with context and request being the first two interfaces. providedClasses = [self.context, self.request] # Now, we take a look at the required interfaces... # And create some dummy classes that implement them. for iface in reg.required[2:]: tempClass = classobj("dummy", (object,), {}) classImplements(tempClass, iface) providedClasses.append(tempClass()) # Now just do a basic multiadapter lookup using our new objects providing the correct interfaces... managerObj = queryMultiAdapter(tuple(providedClasses), reg.provided, name) return managerObj
def __new__(meta, classname, bases, classDict): """ """ schemafp = os.path.abspath('schema.yaml') sch = get_lexical_tokens('schema.yaml') \ if os.path.exists('schema.yaml') \ else None if not sch: sch = dinj.ModelConfig() table_name_plural, table_name_standard = \ create_default_tablename(classname) classDict['__domain__'] = sch classDict['__schema__'] = None classDict['__tablename__'] = table_name_plural classDict['__pkname__'] = '%s_id'%table_name_standard members = { '__tablename__':table_name_plural } classDict['__clz_proxy__'] = classobj(classname+'Proxy', (object,), members) return type.__new__( meta, classname, (bases[0],), classDict )
def _make_functions(namespace): """Make the functions for adding modules and add them to the namespace automatically. """ # Ignore these since they are already provided. ignore = ['axes', 'text', 'orientation_axes'] for mod in registry.modules: func_name = camel2enthought(mod.id) class_name = mod.id if func_name.endswith('_module'): func_name = func_name[:-7] class_name = class_name[:-6] class_name = class_name + 'Factory' # Don't create any that are already defined or ignored. if class_name in namespace or func_name in ignore: continue # The class to wrap. klass = new.classobj(class_name, (_AutomaticModuleFactory,), {'__doc__': mod.help,} ) klass._metadata = mod # The mlab helper function. func = make_function(klass) # Inject class/function into the namespace and __all__. namespace[class_name] = klass namespace[func_name] = func __all__.append(func_name)
def decorator(base_class): module = sys.modules[base_class.__module__].__dict__ for i, platform in enumerate(platforms): d = dict(base_class.__dict__) d['desired_capabilities'] = platform name = "%s_%s" % (base_class.__name__, i + 1) module[name] = new.classobj(name, (base_class,), d)
def hook__setattr__(obj): if not hasattr(obj,'__attrproxy__'): C = obj.__class__ import new obj.__class__=new.classobj(C.__name__,(C,)+C.__bases__, {'__attrproxy__':[], '__setattr__':lambda self,k,v,osa=getattr(obj,'__setattr__',None),hook=hook: hook(self,k,v,osa)})
def load_local_class(bytes): t = loads(bytes) if not isinstance(t, tuple): return classes_loaded[t] name, bases, internal, external = t if name in classes_loaded: return classes_loaded[name] cls = new.classobj(name, bases, internal) classes_loaded[name] = cls for k, v in loads(external).items(): if isinstance(k, tuple): t, k = k if t == 'property': fget, fset, fdel, doc = v v = property(fget, fset, fdel, doc) if t == 'staticmethod': v = staticmethod(v) if t == 'method': im_self, _func = v im_func = load_closure(_func) v = types.MethodType(im_func, im_self, cls) setattr(cls, k, v) return cls
def build_tests( tmp_dir=None, testing_shed_tools=False, master_api_key=None, user_api_key=None ): """ If the module level variable `data_managers` is set, generate `DataManagerToolTestCase` classes for all of its tests and put them into this modules globals() so they can be discovered by nose. """ if data_managers is None: log.warning( 'data_managers was not set for Data Manager functional testing. Will not test.' ) return # Push all the data_managers tests to module level G = globals() # Eliminate all previous tests from G. for key, val in G.items(): if key.startswith( 'TestForDataManagerTool_' ): del G[ key ] # first we will loop through data table loc files and copy them to temporary location, then swap out filenames: for data_table_name, data_table in data_managers.app.tool_data_tables.get_tables().items(): for filename, value in list( data_table.filenames.items() ): new_filename = tempfile.NamedTemporaryFile( prefix=os.path.basename( filename ), dir=tmp_dir ).name try: shutil.copy( filename, new_filename ) except IOError as e: log.warning( "Failed to copy '%s' to '%s', will create empty file at '%s': %s", filename, new_filename, new_filename, e ) open( new_filename, 'wb' ).close() if 'filename' in value: value[ 'filename' ] = new_filename del data_table.filenames[ filename ] # remove filename:value pair data_table.filenames[ new_filename ] = value # add new value by for i, ( data_manager_id, data_manager ) in enumerate( data_managers.data_managers.items() ): tool = data_manager.tool if not tool: log.warning( "No Tool has been specified for Data Manager: %s", data_manager_id ) if tool.tests: # fixme data_manager.tool_shed_repository_info_dict should be filled when is toolshed based shed_tool_id = None if not testing_shed_tools else tool.id # Create a new subclass of ToolTestCase, dynamically adding methods # named test_tool_XXX that run each test defined in the tool config. name = "TestForDataManagerTool_" + data_manager_id.replace( ' ', '_' ) baseclasses = ( DataManagerToolTestCase, ) namespace = dict() for j, testdef in enumerate( tool.tests ): def make_test_method( td ): def test_tool( self ): self.do_it( td ) return test_tool test_method = make_test_method( testdef ) test_method.__doc__ = "%s ( %s ) > %s" % ( tool.name, tool.id, testdef.name ) namespace[ 'test_tool_%06d' % j ] = test_method namespace[ 'shed_tool_id' ] = shed_tool_id namespace[ 'master_api_key' ] = master_api_key namespace[ 'user_api_key' ] = user_api_key # The new.classobj function returns a new class object, with name name, derived # from baseclasses (which should be a tuple of classes) and with namespace dict. new_class_obj = new.classobj( name, baseclasses, namespace ) G[ name ] = new_class_obj
def __init__(self): self.serial = None self.response = None for groupname, list in commandsList: group = self.AddGroup(groupname) for classname, title, desc, serial in list: if desc is None: desc = title clsAttributes = dict(name=title, description=desc, serialcmd=serial) cls = new.classobj(classname, (EpsonTW700SerialAction,), clsAttributes) group.AddAction(cls) if (groupname == 'Volume'): group.AddAction(EpsonTW700SerialsetVolumeAbsolute) group.AddAction(EpsonTW700SerialsetVolumeRelative)
def getPort(self): PortClass = classobj( 'PortClass', (BULKIO__POA.dataSDDS, ), { '_get_attachmentIds': self._get_attachmentIds, '_get_attachedStreams': self._get_attachedStreams, '_get_usageState': self._get_usageState, 'getUser': self.getUser, 'getStreamDefinition': self.getStreamDefinition, 'pushSRI': self.pushSRI, 'attach': self.attach, 'detach': self.detach }) # Create a port using the generate Metaclass and return an instance port = PortClass() return port._this()
def invoke(self, tex): Command.invoke(self, tex) c = self.ownerDocument.context name = str(self.attributes['name']) options = self.attributes['options'] or {} # Create new subfloat class newclass = new.classobj(name, (subfloat,), {'options':options,'counter':name}) c.addGlobal(name, newclass) # Create new counter c.newcounter(name, resetby='figure', format='${%s.alph}' % name) # Create the float name macro c.newcommand(name+'name', 0, name)
def test_bad_class_type_fault(self, instancemock): def raiser(*args): raise TypeError("boom!") mockmodule = new.module('MockClass') sys.modules['MockClass'] = mockmodule mockmodule.__dict__['MockClass'] = new.classobj('MockClass', (), {}) instancemock.side_effect = raiser offshootpoamock = mock.Mock(spec=omniORB.PortableServer.POA) offshootpoamock.create_POA.return_value = \ mock.Mock(spec=omniORB.PortableServer.POA) self.tc.createPOAForComponent.return_value = offshootpoamock self.assertRaises(Acspy.Container.CannotActivateComponentExImpl, self.tc.activate_component, 12345, 12, 'Test', 'MockClass', 'IDL:alma/acspytest/MockClass:1.0') del sys.modules['MockClass']
def makeSQLTests(base, suffix, globals): """ Make a test case for every db connector which can connect. @param base: Base class for test case. Additional base classes will be a DBConnector subclass and unittest.TestCase @param suffix: A suffix used to create test case names. Prefixes are defined in the DBConnector subclasses. """ connectors = [GadflyConnector, SQLiteConnector, PyPgSQLConnector, PsycopgConnector, MySQLConnector, FirebirdConnector] for connclass in connectors: name = connclass.TEST_PREFIX + suffix import new klass = new.classobj(name, (connclass, base, unittest.TestCase), base.__dict__) globals[name] = klass
def newmuskip(self, name, initial=0): """ Create a new muglue (like \\newmuskip) Required Arguments: name -- name of muglue to create Keyword Arguments: initial -- value to initialize to """ name = str(name) # Generate a new muglue class macrolog.debug('creating muskip %s', name) newclass = new.classobj(name, (plasTeX.MuGlueCommand, ), {'value': plasTeX.muglue(initial)}) self.addGlobal(name, newclass)
def newtheorem(self, name, caption, cnt, within): name = str(name) d = {'caption':caption, } if cnt != None: d['counter'] = cnt else: d['counter'] = name if self.rcformat.has_key(within): fmt = self.rcformat[within] % name else: fmt = None self.newcounter(d['counter'], resetby=within, format=fmt) nc = new.classobj(name, (plasTeX.NewTheorem,), d) self.addGlobal(name, nc)
def newcount(self, name, initial=0): """ Create a new count (like \\newcount) Required Arguments: name -- name of count to create Keyword Arguments: initial -- value to initialize to """ name = str(name) # Generate a new count class macrolog.debug('creating count %s', name) newclass = new.classobj(name, (plasTeX.CountCommand, ), {'value': plasTeX.count(initial)}) self.addGlobal(name, newclass)
def make_movement_command_classes(): classes = {} for byte, name in enumerate(movements): movement_commands[byte] = { 'name': name, } for byte in xrange(byte, num_movement_commands): movement_commands[byte] = {'name': 'step_{:02x}'.format(byte)} for byte, command in movement_commands.items(): class_name = 'Movement_' + command['name'] attributes = {} attributes['id'] = byte attributes['param_classes'] = [Byte] + command.get( 'param_types', list()) attributes.update(command) classes[byte] = classobj(class_name, (Command, ), attributes) return classes
def __new__(cls, src, *args, **kwargs): kls = None srcKlass = src.__class__ for k in cls.__subclasses__(): if issubclass(k, srcKlass): kls = k break if not kls: mixinName = '_{}_weakMixin'.format(srcKlass.__name__) module = getmodule(cls) kls = new.classobj(mixinName, (cls, srcKlass), {}) if module is not None: setattr(module, mixinName, kls) obj = object.__new__(kls) obj.__target__ = weakref.proxy(src) return obj
def DotTest(oper, percision=7, numtests=1, expected_mean=1): """ Dottest a Linear Operator @details s.t. @f$ \langle Ay,x \rangle \approx \langle A^{H}x, y \rangle @f$ within @a percision digits. where @a A is in @a opers and x and y are created by @ref slimpy_base.Core.User.Structures.VectorSpace.VectorSpace.noise "A.range.noise( )" and @ref slimpy_base.Core.User.Structures.VectorSpace.VectorSpace.noise "A.domain.noise( )" respectivly. @param opers may be a linear operator or a list of linear operators @param percision the preceition to run the test at, determining a pass or a fail. """ suite = unittest.TestSuite() testloader = unittest.TestLoader() loadtest = testloader.loadTestsFromTestCase ratio_list = [] for i in range(numtests): dottest = new.classobj(str(oper), (DotTestFixture, ), { 'oper': oper, 'places': percision, 'ratio_list': ratio_list }) test = loadtest(dottest) suite.addTest(test) testRunner = TextTestRunner(sys.stdout, 1, 1) testRunner.run(suite) ratarray = array(ratio_list) avrat = sum(ratarray) / len(ratarray) varrat = sum((ratarray - 1)**2) avvar = (avrat - 1)**2 print "Summary: " print " avg ratio: %(avrat).2e" % vars() print " avg var : %(avvar).2e" % vars() print " varience : %(varrat).2e" % vars()
def newdimen(self, name, initial=0): """ Create a new dimen (like \\newdimen) Required Arguments: name -- name of dimen to create Keyword Arguments: initial -- value to initialize to """ name = str(name) # Generate a new dimen class macrolog.debug('creating dimen %s', name) newclass = new.classobj(name, (plasTeX.DimenCommand, ), {'value': plasTeX.dimen(initial)}) self.addGlobal(name, newclass)
def register(self): """ put docstring here """ # If object already have 'uid' field update entry otherwise insert new if hasattr(self.object, 'uid') and self.object.uid: cmd = 'update' else: cmd = 'insert' # autogenerated fields uid = xml_models.IntField(xpath="/tasksResponse/%s%sResult/@%s" % (cmd, self.__model__, { 'insert': 'insertedEntityUid', 'update': 'updatedEntityUid' }[cmd]), default=None) lastUpdatedUid = xml_models.CharField( xpath="/tasksResponse/%s%sResult/@lastUpdatedUid" % (cmd, self.__model__)) errors = xml_models.Collection( Error, order_by="error_type", xpath="/tasksResponse/%s%sResult/errors/error" % (cmd, self.__model__)) other_errors = xml_models.Collection( Error, order_by="error_type", xpath="/tasksResponse/errors/error") def validate_on_load(cls): for e in cls.errors: exception = classobj(str(e.error_type), (Exception, ), {}) raise exception, e.error_message for e in cls.other_errors: exception = classobj(str(e.error_type), (Exception, ), {}) raise exception, e.error_message # autogenerated class cls = classobj( 'POSTResponse', (xml_models.Model, ), { 'uid': uid, 'lastUpdatedUid': lastUpdatedUid, 'errors': errors, 'other_errors': other_errors, 'validate_on_load': validate_on_load, }) #print self.xml return cls(xml=self.xml, dom=self.dom)
def create_extra_classes(): ''' Create classes for testing with other browsers ''' classes = {} for platform in EXTRA_PLATFORMS: classdict = dict(SeleniumTests.__dict__) name = '{}_{}'.format( platform, SeleniumTests.__name__, ) classdict.update({ 'caps': EXTRA_PLATFORMS[platform], }) classes[name] = new.classobj(name, (SeleniumTests,), classdict) globals().update(classes)
def __call__(self, name, url, address, options): """ Factory method. :param name: The stub class (or module) name. :type name: str :param url: The agent URL. :type url: str :param address: The AMQP address :type address: str :param options: A dict of gofer options :param options: Options :return: A stub instance. :rtype: Stub """ stub = classobj(name, (Stub, ), {}) inst = stub(url, address, options) return inst
def test_existing_module(self): mockmodule = new.module('MockClass') mockmodule.__dict__['MockClass'] = new.classobj('MockClass', (Acspy.Container.ContainerServices, Acspy.Container.ACSComponent, Acspy.Container.ComponentLifecycle), {}) sys.modules['MockClass'] = mockmodule offshootpoamock = mock.Mock(spec=omniORB.PortableServer.POA) offshootpoamock.create_POA.return_value = \ mock.Mock(spec=omniORB.PortableServer.POA) self.tc.createPOAForComponent.return_value = offshootpoamock self.tc.compModuleCount[mockmodule] = 1 self.assertEqual(True, self.tc.activate_component(12345, 12, 'Test', 'MockClass', 'IDL:alma/acspytest/MockClass:1.0') is not None) del sys.modules['MockClass']
def _generate_model(self, tablename, engine, model_name=None): if model_name is None: model_name = tablename _table = Table(tablename, self.base.metadata, autoload=True, autoload_with=self.engine, schema=self.schema) _model = classobj( model_name, (self.base, ), { '__table__': _table, '__mapper_args__': { 'column_prefix': self.column_prefix }, 'db_session_pool': self.get_db_session_pool() }) return _model
def parasite_axes_class_factory(axes_class=None): if axes_class is None: axes_class = Axes new_class = _parasite_axes_classes.get(axes_class) if new_class is None: import new def _get_base_axes_attr(self, attrname): return getattr(axes_class, attrname) new_class = new.classobj("%sParasite" % (axes_class.__name__), (ParasiteAxesBase, axes_class), {'_get_base_axes_attr': _get_base_axes_attr}) _parasite_axes_classes[axes_class] = new_class return new_class
def getItemClass(self): """ Return the class used to create items of this Kind. If this Kind has superKinds and C{self.classes['python']} is not set a composite class is generated and cached from the superKinds. The L{Item<repository.item.Item.Item>} class is returned by default. """ try: return self._values['classes']['python'] except KeyError: pass except TypeError: pass superClasses = [] for superKind in self.getAttributeValue('superKinds', self._references): c = superKind.getItemClass() if c is not Item and c not in superClasses: superClasses.append(c) count = len(superClasses) if count == 0: c = Item elif count == 1: c = superClasses[0] else: hash = 0 for c in superClasses: hash = _combine(hash, _hash('.'.join((c.__module__, c.__name__)))) if hash < 0: hash = ~hash name = "class_%08x" %(hash) c = classobj(name, tuple(superClasses), {}) self._values['classes'] = { 'python': c } self._values._setTransient('classes') self._setupClass(c) return c
def internal_init(self, native=None, trigger=None, commons={},\ commit=True, update_method={}): """ CompositionField class that patches native field with custom `contribute_to_class` method Params: * native - Django field instance for current compostion field * trigger - one or some numberr of triggers that handle composition. Trigger is a dict with allowed keys: * on - signal or list of signals that this field handles * do - signals handler, with 3 params: * related instance * instance (that comes with signal send) * concrete signal (one from `on` value) * field_holder_getter - function that gets instance(that comes with signal send)\ as parameter and returns field holder object (related instance) * sender - signal sender * sender_model - model instance or model name that send signal * commit - flag that indicates save instance after trigger appliance or not * commons - a trigger like field with common settings for all given triggers * update_method - dict for customization of update_method. Allowed params: * initial - initial value to field before applince of method * do - index of update trigger or trigger itself * queryset - query set or callable(with one param - `instance` of an holder model) that have to retun something iterable * name - custom method name instead of `update_FOO` """ if native is not None: import new self.__class__ = new.classobj( self.__class__.__name__, tuple([self.__class__, native.__class__] + list(self.__class__.__mro__[1:])), {}) self.__dict__.update(native.__dict__) self._c_native = native self._c_trigger = trigger self._c_commons = commons self._c_commit = commit self._c_update_method = update_method
def __call__(self, table, properties={}, cls=None): """ Returns a tuple (mapped_class, table_class). 'table' - sqlalchemy.Table to be mapped 'properties' - dict containing additional informations about 'cls' - (optional) class used as base for creating the mapper class (will be autogenerated if not available). """ if cls is None: newCls = new.classobj('_mapped_%s' % str(table.name), (MappedClassBase, ), {}) else: newCls = cls mapper(newCls, table, properties=properties) return newCls
def make(base_classes=(), have_mt=False): """Use this static method to build a model class that possibly derives from other classes. If have_mt is True, then returned class will take into account multi-threading issues when dealing with observable properties.""" good_bc = ModelFactory.__fix_bases(base_classes, have_mt) print "Base classes are:", good_bc key = "".join(map(str, good_bc)) if ModelFactory.__memoized.has_key(key): return ModelFactory.__memoized[key] cls = new.classobj('', good_bc, { '__module__': '__main__', '__doc__': None }) ModelFactory.__memoized[key] = cls return cls
def __getitem__(self, key): """ Look through the stack of macros and return the requested one Required Arguments: key -- name of macro Returns: instance of requested macro """ try: return self.top[key] except KeyError: pass # Didn't find it, so generate a new class if self.warnOnUnrecognized and not self.isMathMode: log.warning('unrecognized command/environment: %s', key) self[key] = newclass = new.classobj(str(key), (plasTeX.UnrecognizedMacro,), {}) return newclass
def getPort(self): """ Returns a Port object of the type CF__POA.Port. """ # The classobj generates a class using the following arguments: # # name: The name of the class to generate # bases: A tuple containing all the base classes to use # dct: A dictionary containing all the attributes such as # functions, and class variables PortClass = classobj('PortClass', (CF__POA.Port,), {'connectPort':self.connectPort, 'disconnectPort':self.disconnectPort}) # Create a port using the generate Metaclass and return an instance port = PortClass() return port._this()
def __init__(self): self.serial = None self.response = None self.method = 0 self.hwndMarantzControl = None for groupname, list in commandsList: group = self.AddGroup(groupname) for classname, title, desc, app, serial in list: if desc is None: desc = title clsAttributes = dict(name=title, description=desc, appcmd=app, serialcmd=serial) cls = new.classobj(classname, (MarantzSerialAction,), clsAttributes) group.AddAction(cls) group = self.AddGroup('Volume') group.AddAction(MarantzSerialSetVolumeAbsolute) group.AddAction(MarantzSerialSetVolumeRelative)
def __init__(self): self.server = pyicecream.Server() self.source = pyicecream.Source() self.backend = pyicecream.Backend(self) self.hooks = new.classobj('hooks', (object, ), {})() self.hooks.source = pyicecream.HooksHolder() self.hooks.source.add_hook('start_play') self.hooks.source.add_hook('halfway') self.hooks.source.add_hook('transition') self.hooks.source.add_hook('eof') self.hooks.stream = pyicecream.HooksHolder() self.hooks.stream.add_hook('eos') self.hooks.stream.add_hook('stop') self.interval_sound = self.silent_file()
def parasite_axes_auxtrans_class_factory(axes_class=None): if axes_class is None: parasite_axes_class = ParasiteAxes elif not issubclass(axes_class, ParasiteAxesBase): parasite_axes_class = parasite_axes_class_factory(axes_class) else: parasite_axes_class = axes_class new_class = _parasite_axes_auxtrans_classes.get(parasite_axes_class) if new_class is None: import new new_class = new.classobj("%sParasiteAuxTrans" % (parasite_axes_class.__name__), (ParasiteAxesAuxTransBase, parasite_axes_class), {'_parasite_axes_class': parasite_axes_class, 'name': 'parasite_axes'}) _parasite_axes_auxtrans_classes[parasite_axes_class] = new_class return new_class
def get_test(uri): """Return a single test case for the specified URI.""" for manifest in MANIFESTS: LOG.debug("Retrieving manifest %s" % manifest) test_manifest = Graph() test_manifest.parse(manifest) # find all the approved tests' for result in test_manifest.query(APPROVED_TESTS): if str(result[0]) == uri: return new.classobj(py_name(str(result[3])), (TaskForceTest, ), {})(*result) return None
def invoke(self, tex): self.parse(tex) a = self.attributes name = str(a['name']) header = a['header'] star = a['*modifier*'] == '*' parent = a['parent'] shared = a['shared'] style = self.ownerDocument.userdata.getPath( 'packages/amsthm/currentstyle') l = self.ownerDocument.userdata.getPath('packages/amsthm/theorems') l += [name] if star: thecounter = None else: if parent and not shared: self.ownerDocument.context.newcounter(name, initial=0, resetby=parent) self.ownerDocument.context.newcommand( "the" + name, 0, "\\arabic{%s}.\\arabic{%s}" % (parent, name)) thecounter = name elif shared: thecounter = shared else: thecounter = name self.ownerDocument.context.newcounter(name, initial=0) self.ownerDocument.context.newcommand("the" + name, 0, "\\arabic{%s}" % name) data = { 'macroName': name, 'counter': thecounter, 'thehead': header, 'thename': name, 'labelable': True, 'forcePars': True, 'thestyle': style } th = new.classobj(name, (theoremCommand, ), data) self.ownerDocument.context.addGlobal(name, th)
def _find_class_for(cls, element_name=None, class_name=None): """Look in the parent modules for classes matching the element name. One or both of element/class name must be specified. Args: element_name: The name of the element type. class_name: The class name of the element type. Returns: A Resource class. """ if not element_name and not class_name: raise Error('One of element_name,class_name must be specified.') elif not element_name: element_name = util.underscore(class_name) elif not class_name: class_name = util.camelize(element_name) module_path = cls.__module__.split('.') for depth in range(len(module_path), 0, -1): try: __import__('.'.join(module_path[:depth])) module = sys.modules['.'.join(module_path[:depth])] except ImportError: continue try: klass = getattr(module, class_name) return klass except AttributeError: try: __import__('.'.join([module.__name__, element_name])) submodule = sys.modules['.'.join( [module.__name__, element_name])] except ImportError: continue try: klass = getattr(submodule, class_name) return klass except AttributeError: continue # If we made it this far, no such class was found return new.classobj(class_name, (cls, ), {'__module__': cls.__module__})