def __new__(cls, name, bases, dct): if '_VALUES_TO_NAMES' in dct: # Provide a way to convert an enum value into its name. dct['to_name'] = classmethod(enum_to_name) dct['values'] = classmethod(enum_values) return type.__new__(cls, name, bases, dct)
def setUp(self): super(DriveTestBase, self).setUp() actions.login(self.ADMIN_EMAIL, is_admin=True) self.app_context = actions.simple_add_course( self.COURSE_NAME, self.ADMIN_EMAIL, 'Drive Course') self.base = '/{}'.format(self.COURSE_NAME) # have a syncing policy in place already with utils.Namespace(self.app_context.namespace): self.setup_schedule_for_file('3') self.setup_schedule_for_file('5') self.setup_schedule_for_file('6', synced=True) # remove all hooks for handler, hooks in self.HANDLER_HOOKS: for hook in hooks: self.swap(handler, hook, []) # Prevent it from consulting the settings or calling out self.swap( drive_api_manager._DriveManager, 'from_app_context', classmethod(mocks.manager_from_mock)) self.swap( drive_api_manager._DriveManager, 'from_code', classmethod(mocks.manager_from_mock)) self.swap( handlers.AbstractDriveDashboardHandler, 'setup_drive', mocks.setup_drive) self.swap( drive_settings, 'get_secrets', mocks.get_secrets)
def _makeLayer(self, group, parent_layer=None): if parent_layer is None: parent_layer = object # FIXME test setups #test_setups = group._test_setups[:] #test_teardowns = group._testeardowns[:] def setUp(cls): for setup in cls.setups: setup() def tearDown(cls): for teardown in cls.teardowns: teardown() attr = { 'description': group.description, 'setUp': classmethod(setUp), 'tearDown': classmethod(tearDown), 'setups': group._setups[:], 'teardowns': group._teardowns[:], } return type("%s:layer" % group.description, (parent_layer, ), attr)
def __call__(self, cls): '''the decorator, add constants to the class: * cls.TUBES_JSON_SERIALIZABLE = True and class methods: * cls.from_json() * cls.from_json_str() and instance methods: * cls.to_json() * cls.to_json_str() ''' if hasattr(cls, 'TUBES_JSON_SERIALIZABLE'): return cls setattr(cls, 'from_json', classmethod(from_json)) setattr(cls, 'from_json_str', classmethod(from_json_str)) setattr(cls, 'to_json', to_json) setattr(cls, 'to_json_str', to_json_str) setattr(cls, 'to_json_list', classmethod(to_json_list)) setattr(cls, 'to_json_list_str', classmethod(to_json_list_str)) setattr(cls, 'TUBES_JSON_SERIALIZABLE', True) setattr(cls, 'TUBES_TO_IGNORE', self.to_ignore) setattr(cls, 'TUBES_FROM_IGNORE', self.from_ignore) setattr(cls, 'TUBES_TO_TRANSFORM', self.to_transform) setattr(cls, 'TUBES_FROM_TRANSFORM', self.from_transform) setattr(cls, 'TUBES_EXCLUDE_PRIVATE_FIELDS', self.exclude_private_fields) return cls
def decorator(class_to_decorate): class DeclarativeMeta(class_to_decorate.__class__): def __init__(cls, name, bases, dict): members = get_members(cls) set_declared(cls, members, parameter) super(DeclarativeMeta, cls).__init__(name, bases, dict) new_class = DeclarativeMeta(class_to_decorate.__name__, class_to_decorate.__bases__, {k: v for k, v in class_to_decorate.__dict__.items() if k not in ['__dict__', '__weakref__']}) def get_extra_args_function(self): members = get_declared(self, parameter) copied_members = OrderedDict((k, copy(v)) for k, v in members.items()) self.__dict__.update(copied_members) return {parameter: copied_members} if add_init_kwargs: add_args_to_init_call(new_class, get_extra_args_function) else: add_init_call_hook(new_class, get_extra_args_function) setattr(new_class, 'get_declared', classmethod(get_declared)) setattr(new_class, 'set_declared', classmethod(set_declared)) return new_class
def swap_model_kind(self): self.kind_method_swapped = True self.original_class_for_kind_method = db.class_for_kind kprefix = self.kind_prefix() def kind_for_test_with_store_kinds(cls): k = kprefix + cls._meta.db_table global kind_names_for_test if not kind_names_for_test: kind_names_for_test = {} kind_names_for_test[k] = cls return k def kind_for_test(cls): return kprefix + cls._meta.db_table def class_for_kind_for_test(kind): if kind.find(kprefix) == 0 and \ db._kind_map.has_key(kind[len(kprefix):]): return db._kind_map[kind[len(kprefix):]] else: try: return db._kind_map[kind] except KeyError: import logging logging.error(db._kind_map) raise KindError('No implementation for kind \'%s\'' % kind) if self.may_cleanup_used_kind(): db.Model.kind = classmethod(kind_for_test_with_store_kinds) else: db.Model.kind = classmethod(kind_for_test) db.class_for_kind = class_for_kind_for_test
def cached_wrapper(cls, table_name, sqlstore=None, mc=None, cache_ver='', id2str=True): primary_field = _initialize_primary_field(cls) db_fields, raw_db_fields = zip(*_collect_fields(cls, id2str)) order_combs = _collect_order_combs(cls) cls.objects = SQL2CacheOperator(table_name, cls, raw_db_fields, sqlstore=sqlstore, mc=mc, cache_ver=cache_ver, order_combs=order_combs) cls.save = method_combine(save) cls.create = classmethod(method_combine(create, db_fields)) cls.delete = method_combine(delete) cls.delete_transactionally = method_combine(delete_transactionally, alias='delete') cls.create_transactionally = classmethod(method_combine(create_transactionally, db_fields, alias='create')) cls.save_transactionally = method_combine(save_transactionally, alias='save') cls.__org_init__ = cls.__init__ cls.__init__ = init cls.__setstate__ = setstate cls.__getstate__ = getstate cls.db_fields = db_fields cls.gets_by = cls.objects.gets_by cls.count_by = cls.objects.count_by cls.get_by = cls.objects.get cls.exist = classmethod(exist) return cls
def list_type(cls): """ Return an ASTNode subclass that represent a list of "cls". """ element_type = cls def add_to_context(cls): if cls in get_context().types: return get_context().types.add(cls) get_context().list_types.add(cls.element_type()) # Make sure the type this list contains is already declared cls.element_type().add_to_context() return type( '{}ListType'.format(element_type.name()), (StructMetaClass.root_grammar_class, ), { 'is_ptr': True, 'name': classmethod( lambda cls: names.Name('List') + cls.element_type().name() ), 'add_to_context': classmethod(add_to_context), 'nullexpr': classmethod(lambda cls: null_constant()), 'is_list_type': True, 'is_collection': classmethod(lambda cls: True), 'element_type': classmethod(lambda cls: element_type), })
def setup_test_environment(self, **kwargs): """ Setting up test environment. """ # Monkey-patch Task.on_success() method def on_success_patched(self, retval, task_id, args, kwargs): TaskState.objects.create(task_id=uuid4().hex, state="SUCCESS", name=self.name, result=retval, args=args, kwargs=kwargs, tstamp=datetime.now()) Task.on_success = classmethod(on_success_patched) # Monkey-patch Task.on_failure() method def on_failure_patched(self, exc, task_id, args, kwargs, einfo): TaskState.objects.create(task_id=uuid4().hex, state="FAILURE", name=self.name, result=einfo, args=args, kwargs=kwargs, tstamp=datetime.now()) Task.on_failure = classmethod(on_failure_patched) # Call parent's version super(CeleryTestSuiteRunnerStoringResult, self).setup_test_environment(**kwargs) # Tell celery run tasks synchronously settings.CELERY_ALWAYS_EAGER = True settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True # Issue #75
def _find_by_attrib(cls): find_meth = cls._find_orig if hasattr(cls, "_find_orig") else cls.find def __find_first_by_attrib(cls, value): offset = 0 limit = 60 cur_result = [None] while cur_result: cur_result = find_meth(limit=limit, offset=offset) filtered = filter(lambda u: getattr(u, attrib) == value, cur_result) try: return next(filtered) except: pass offset += len(cur_result) return None def __find_all_by_attrib(cls, value): offset = 0 limit = 60 cur_result = [None] result = [] while cur_result: cur_result = find_meth(limit=limit, offset=offset) filtered = filter(lambda u: getattr(u, attrib) == value, cur_result) result += filtered offset += len(cur_result) return result setattr(cls, "find_first_by_" + attrib, classmethod(__find_first_by_attrib)) setattr(cls, "find_all_by_" + attrib, classmethod(__find_all_by_attrib)) return cls
def __new__(metaclass, name, bases, attrs): #@NoSelf super_new = super(StaticMeta, metaclass).__new__ parents = [b for b in bases if isinstance(b, StaticMeta)] if not parents and name != 'Static': # should be created with __metaclass__ = type return super_new(metaclass, name, bases, attrs) # Create the class. class_instance = super_new(metaclass, name, bases, {'__module__': attrs.pop('__module__')}) # If class overrides our _new_ if '_add_to_class_' in attrs: class_instance._add_to_class_ = classmethod(attrs.pop('_add_to_class_')) if '_new_' in attrs: class_instance._add_to_class_('_new_', classmethod(attrs.pop('_new_'))) # Run attribute initializers class_instance._new_(attrs, parents) # Call constructor method class_instance._init_() #StaticMeta.register(module, name, class_instance) #register_models(class_instance._meta.app_label, class_instance) class_name = class_instance._name_() #print "Registering", class_name if not class_name in metaclass._registry: metaclass._registry[class_name] = class_instance # Because of the way imports happen (recursively), we may or may not be # the first time this model tries to register with the framework. There # should only be one class for each model, so we always return the # registered version. return metaclass._registry[class_name]
def make_bound_schema(self): if getattr(self, '_mixin_bound', False): return type(self) original_fields = self._meta.fields.keys() def get_active_mixins(kls, target=None): return self.get_active_mixins(target=target) def send_mixin_event(kls, event, kwargs): return self.send_mixin_event(event, kwargs) cls = type(self) document_kwargs = { 'fields':{},#copy(self._meta.fields.fields), #fields => uber dictionary, fields.fields => fields we defined 'proxy': True, 'name': cls.__name__, 'parents': (cls,), 'module': cls.__module__, 'attrs': {'get_active_mixins':classmethod(get_active_mixins), 'send_mixin_event':classmethod(send_mixin_event), '_mixin_bound':True}, } self.send_mixin_event('document_kwargs', {'document_kwargs':document_kwargs}) new_cls = create_document(**document_kwargs) assert self._meta.fields.keys() == original_fields return new_cls
def AddPanels(cls, kwargs): doc = cls.__doc__ name = kwargs.get("bl_idname") or kwargs.get("idname") or cls.__name__ # expected either class or function if not isinstance(cls, type): cls = type(name, (), dict(__doc__=doc, draw=cls)) def is_panel_left(): if not addon.preferences: return False return addon.preferences.use_panel_left def is_panel_right(): if not addon.preferences: return False return addon.preferences.use_panel_right @addon.Panel(**kwargs) class LeftPanel(cls): bl_idname = name + "_left" bl_region_type = 'TOOLS' @addon.Panel(**kwargs) class RightPanel(cls): bl_idname = name + "_right" bl_region_type = 'UI' poll = getattr(cls, "poll", None) if poll: LeftPanel.poll = classmethod(lambda cls, context: is_panel_left() and poll(cls, context)) RightPanel.poll = classmethod(lambda cls, context: is_panel_right() and poll(cls, context)) else: LeftPanel.poll = classmethod(lambda cls, context: is_panel_left()) RightPanel.poll = classmethod(lambda cls, context: is_panel_right()) return cls
def _coio_rebase(helper_module): """Rebase classes `tasklet' and `bomb' from those in the helper_module.""" global tasklet global bomb global current global main global _tasklets_created is_tasklet_ok = list(tasklet.__bases__) == [helper_module.tasklet] if is_tasklet_ok and bomb is helper_module.bomb: return if main is not current: raise ImportTooLateError if main.next is not main: raise ImportTooLateError # We should check for the number of bombs as well, but that would be too # much work. if _tasklets_created != 1: raise ImportTooLateError if not is_tasklet_ok: # This would be easier: tasklet.__bases__ = (helper_module.tasklet,) # But it doesn't work: TypeError("__bases__ assignment: 'tasklet' deallocator differs from 'object'") dict_obj = dict(tasklet.__dict__) dict_obj['__slots__'] = _process_slots( dict_obj['__slots__'], helper_module.tasklet, dict_obj) #old_tasklet = tasklet tasklet.__new__ = classmethod(_new_too_late) tasklet = type(tasklet.__name__, (helper_module.tasklet,), dict_obj) current = main = _get_new_main() _tasklets_created = 1 assert type(main) is tasklet #del old_tasklet if bomb is not helper_module.bomb: bomb.__new__ = classmethod(_new_too_late) bomb = helper_module.bomb
def test_classmethods_various(self): class C(object): def foo(*a): return a goo = classmethod(foo) c = C() assert C.goo(1) == (C, 1) assert c.goo(1) == (C, 1) assert c.foo(1) == (c, 1) class D(C): pass d = D() assert D.goo(1) == (D, 1) assert d.goo(1) == (D, 1) assert d.foo(1) == (d, 1) assert D.foo(d, 1) == (d, 1) def f(cls, arg): return (cls, arg) ff = classmethod(f) assert ff.__get__(0, int)(42) == (int, 42) assert ff.__get__(0)(42) == (int, 42) assert C.goo.im_self is C assert D.goo.im_self is D assert super(D,D).goo.im_self is D assert super(D,d).goo.im_self is D assert super(D,D).goo() == (D,) assert super(D,d).goo() == (D,) meth = classmethod(1).__get__(1) raises(TypeError, meth)
def setUp(self): self.cluster_patch = patch('lib.cluster.Cluster') #self.view_patch = patch('lib.view.CliView') real_stdoup = sys.stdout sys.stdout = StringIO() self.addCleanup(patch.stopall) self.addCleanup(reset_stdout) self.MockCluster = self.cluster_patch.start() #self.MockView = self.view_patch.start() Cluster._crawl = classmethod(lambda self: None) Cluster._callNodeMethod = classmethod( lambda self, nodes, method_name, *args, **kwargs: {"test":IOError("test error")}) n = Node("172.99.99.99") Cluster.getNode = classmethod( lambda self, key: [n]) pd = PrefixDict() pd['test'] = 'test' Cluster.getPrefixes = classmethod(lambda self: pd) self.rc = RootController()
def __new__(meta, name, bases, attrs): fixtures = attrs.get('fixtures', []) # Should we persist fixtures across tests, i.e., should we use the # setUpClass and tearDownClass methods instead of setUp and tearDown? persist_fixtures = attrs.get('persist_fixtures', False) and can_persist_fixtures() # We only need to do something if there's a set of fixtures, # otherwise, do nothing. The main reason this is here is because this # method is called when the FixturesMixin class is created and we # don't want to do any test setup on that class. if fixtures: if not persist_fixtures: child_setup_fn = meta.get_child_fn(attrs, TEST_SETUP_NAMES, bases) child_teardown_fn = meta.get_child_fn(attrs, TEST_TEARDOWN_NAMES, bases) attrs[child_setup_fn.__name__] = meta.setup_handler(setup, child_setup_fn) attrs[child_teardown_fn.__name__] = meta.teardown_handler(teardown, child_teardown_fn) else: child_setup_fn = meta.get_child_fn(attrs, CLASS_SETUP_NAMES, bases) child_teardown_fn = meta.get_child_fn(attrs, CLASS_TEARDOWN_NAMES, bases) attrs[child_setup_fn.__name__] = classmethod(meta.setup_handler(setup, child_setup_fn)) attrs[child_teardown_fn.__name__] = classmethod(meta.teardown_handler(teardown, child_teardown_fn)) return super(MetaFixturesMixin, meta).__new__(meta, name, bases, attrs)
def test_unsubscribe_unregistered(self): connection.UEPConnection = StubUEP prod = StubProduct('stub_product') ent = StubEntitlementCertificate(prod) cmd = managercli.UnSubscribeCommand(ent_dir=StubEntitlementDirectory([ent]), prod_dir=StubProductDirectory([])) managercli.ConsumerIdentity = StubConsumerIdentity StubConsumerIdentity.existsAndValid = classmethod(lambda cls: False) StubConsumerIdentity.exists = classmethod(lambda cls: False) cmd.main(['unsubscribe', '--all']) self.assertTrue(cmd.entitlement_dir.list_called) self.assertTrue(ent.is_deleted) prod = StubProduct('stub_product') ent1 = StubEntitlementCertificate(prod) ent2 = StubEntitlementCertificate(prod) cmd = managercli.UnSubscribeCommand(ent_dir=StubEntitlementDirectory([ent1, ent2]), prod_dir=StubProductDirectory([])) managercli.ConsumerIdentity = StubConsumerIdentity StubConsumerIdentity.existsAndValid = classmethod(lambda cls: False) StubConsumerIdentity.exists = classmethod(lambda cls: False) cmd.main(['unsubscribe', '--serial=%s' % ent1.serial]) self.assertTrue(cmd.entitlement_dir.list_called) self.assertTrue(ent1.is_deleted) self.assertFalse(ent2.is_deleted)
def game_mapper(): from models import get_all, get_by_id, get_by_name, save, delete from business.army.army import Army from business.game import Game from business.game.player import PlayerGame from sqlalchemy.orm import mapper, relationship, backref Game.get_all = classmethod(get_all) Game.get_by_id = classmethod(get_by_id) Game.save = save Game.delete = delete mapper(Game, game_table, properties={}) PlayerGame.get_all = classmethod(get_all) PlayerGame.get_by_id = classmethod(get_by_id) PlayerGame.get_by_name = classmethod(get_by_name) PlayerGame.save = save PlayerGame.delete = delete mapper(PlayerGame, game_player_table, properties={ 'game': relationship(Game), 'buried_army': relationship(Army, foreign_keys=[game_player_table.c.buried_army_id]), 'dead_army': relationship(Army, foreign_keys=[game_player_table.c.dead_army_id]), 'reserve_army': relationship(Army, foreign_keys=[game_player_table.c.reserve_army_id]), 'summon_army': relationship(Army, foreign_keys=[game_player_table.c.summon_army_id]), 'armies': relationship(Army, secondary=game_player_armies_table, backref=backref('owner', uselist=False)) })
def decorator(cls): # Навешиваем методы и свойства getter = curry(_method, _get, field_name) setter = curry(_set_method, field_name) incrementer = curry(_method, _incr, field_name) setattr(cls, 'get_' + field_name, getter) setattr(cls, field_name, property(getter, setter)) setattr(cls, 'incr_' + field_name, incrementer) # Навешиваем методы класса key_getter = curry(_key, field_name) cls_getter = curry(_class_method, _get, field_name) cls_incrementer = curry(_class_method, _incr, field_name) setattr(cls, 'key_for_' + field_name, classmethod(key_getter)) setattr(cls, 'get_%s_for_pk' % field_name, classmethod(cls_getter)) setattr(cls, 'incr_%s_for_pk' % field_name, classmethod(cls_incrementer)) if not hasattr(cls, '_counters'): cls._counters = [] cls._counters.append(field_name) post_delete.connect(curry(_post_delete, field_name), sender=cls, weak=False, dispatch_uid=(cls, field_name)) return cls
def message(cls: Type[T]) -> Type[T]: """ Returns the same class as was passed in, with additional dunder attributes needed for serialization and deserialization. """ type_hints = get_type_hints(cls) try: # Used to list all fields and locate fields by field number. cls.__protobuf_fields__: Dict[int, Field] = dict( make_field(field_.metadata['number'], field_.name, type_hints[field_.name]) for field_ in dataclasses.fields(cls) ) except KeyError as e: # FIXME: catch `KeyError` in `make_field` and re-raise as `TypeError`. raise TypeError(f'type is not serializable: {e}') from e # noinspection PyUnresolvedReferences Message.register(cls) cls.serializer = MessageSerializer(cls) cls.type_url = f'type.googleapis.com/{cls.__module__}.{cls.__name__}' cls.validate = Message.validate cls.dump = Message.dump cls.dumps = Message.dumps cls.merge_from = Message.merge_from cls.load = classmethod(load) cls.loads = classmethod(loads) return cls
def prepare_case(self): dic = { 'get_fixture': LazyFixture(self.fixtures['lazy']), '__str__': lambda test: self.test_to_str(test), 'setUp': no_op, 'tearDown': no_op, 'setUpClass': classmethod(no_op), 'tearDownClass': classmethod(no_op), } for name in self.names: func = getattr(self.klass, name) if PY2: func = func.__func__ @wraps(func) def wrapper(test, _test_func=func): fixtures = [F(test) for F in self._sorted(self.fixtures['test'])] if not all(isinstance(f, ContextManager) for f in fixtures): for wrapper in reversed(fixtures): _test_func = wrapper(_test_func) _test_func(test) return with ExitStack() as stack: for ctx in fixtures: stack.enter_context(ctx) _test_func(test) dic[name] = wrapper return dic
def __enter__(self): self.original_read_keys = distutils.msvc9compiler.Reg.read_keys self.original_read_values = distutils.msvc9compiler.Reg.read_values _winreg = getattr(distutils.msvc9compiler, '_winreg', None) winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg) hives = { winreg.HKEY_CURRENT_USER: self.hkcu, winreg.HKEY_LOCAL_MACHINE: self.hklm, } def read_keys(cls, base, key): """Return list of registry keys.""" hive = hives.get(base, {}) return [k.rpartition('\\')[2] for k in hive if k.startswith(key.lower())] def read_values(cls, base, key): """Return dict of registry keys and values.""" hive = hives.get(base, {}) return dict((k.rpartition('\\')[2], hive[k]) for k in hive if k.startswith(key.lower())) distutils.msvc9compiler.Reg.read_keys = classmethod(read_keys) distutils.msvc9compiler.Reg.read_values = classmethod(read_values) return self
def __new__(cls, name, bases, attrs): ret = super(BaseEnumerationMeta, cls).__new__(cls, name, bases, attrs) #This gets the file a class is defined in, without its extension import inspect modfile = lambda klass: os.path.splitext(os.path.abspath(inspect.getfile(klass)))[0] try: ret = BaseEnumerationMeta._enumeration_types[(modfile(ret), name, )] if not '_INTERMEDIATE_CLASS' in attrs: #If the enumeration is sealed/finished, we still need to provide #dummy behaviour, since registerItem etc can be re-called on it ret.registerItem = classmethod(lambda *args, **kwargs: None) ret.finishItemRegistration = classmethod(generateFinishItemRegistration()) return ret except KeyError: pass try: #Check whether BaseEnumeration is already defined. If it isn't, we're #registering BaseEnumeration itself, so the methods should not be added BaseEnumeration except NameError: return ret #This is the case for non-final enumeration types, eg EnumerationWithValue if '_INTERMEDIATE_CLASS' in attrs: pass else: #Add methods to the class ret.registerItem = classmethod(generateRegisterItem()) ret.finishItemRegistration = classmethod(generateFinishItemRegistration()) BaseEnumerationMeta._enumeration_types[(modfile(ret), name, )] = ret #Call class._initItems getattr(ret, '_initItems', lambda: None)() if modfile(ret).startswith(os.path.join(os.path.sep,"opt","qbase5","pyapps")): appname = modfile(ret).split(os.path.sep)[4] category = modfile(ret).split(os.path.sep)[6] setattr(pymodelEnumerators.setdefault((appname,category), PymodelEnumerationContainer()), name, ret) else: #Since we can't hook enumerations on pylabs.q directly, since 'q' #could be not initialized when the first enumeration type is created. #To get around this, we use a module-global container variable which #gets populated, and should be hooked onto pylabs.q whenever #applicable. # #For some reason, it was decided to use smallCapStarting names for #enumerations registered on q.enumerators, although they're types. #I guess this should be emulated here. if hasattr(enumerations, name): raise RuntimeError('Unable to register enumeration %s, name already in use' % name) else: setattr(enumerations, name, ret) return ret
def init_class(cls): if not hasattr(cls, "_leveldb_options"): # must be in test mode return setattr(cls, "_leveldb_meta", {}) def open_connections(cls): db = cls._leveldb_options.get("db") indexdb = cls._leveldb_options.get("indexdb") if isinstance(db, basestring): cls._leveldb_meta["db"] = plyvel.DB(db, create_if_missing=True) if isinstance(indexdb, basestring): cls._leveldb_meta["indexdb"] = plyvel.DB(indexdb, create_if_missing=True) def close_connections(cls): if cls._leveldb_meta.get("db"): cls._leveldb_meta["db"].close() if cls._leveldb_meta.get("indexdb"): cls._leveldb_meta["indexdb"].close() cls.open_leveldb_connections = classmethod(open_connections) cls.open_leveldb_connections() cls.close_leveldb_connections = classmethod(close_connections)
def test_unsubscribe_registered(self): connection.UEPConnection = StubUEP prod = StubProduct('stub_product') ent1 = StubEntitlementCertificate(prod) ent2 = StubEntitlementCertificate(prod) ent3 = StubEntitlementCertificate(prod) inj.provide(inj.ENT_DIR, StubEntitlementDirectory([ent1, ent2, ent3])) inj.provide(inj.PROD_DIR, StubProductDirectory([])) cmd = managercli.UnSubscribeCommand() managercli.ConsumerIdentity = StubConsumerIdentity StubConsumerIdentity.existsAndValid = classmethod(lambda cls: True) StubConsumerIdentity.exists = classmethod(lambda cls: True) managercli.CertLib = StubCertLib cmd.main(['unsubscribe', '--all']) self.assertEquals(cmd.cp.called_unbind_uuid, StubConsumerIdentity.CONSUMER_ID) cmd.main(['unsubscribe', '--serial=%s' % ent1.serial]) self.assertEquals(cmd.cp.called_unbind_serial, ['%s' % ent1.serial]) code = cmd.main(['unsubscribe', '--serial=%s' % ent2.serial, '--serial=%s' % ent3.serial]) self.assertEquals(cmd.cp.called_unbind_serial, ['%s' % ent2.serial, '%s' % ent3.serial]) self.assertEquals(code, 0) connection.UEPConnection.unbindBySerial = mock.Mock( side_effect=connection.RestlibException("Entitlement Certificate with serial number 2300922701043065601 could not be found.")) code = cmd.main(['unsubscribe', '--serial=%s' % '2300922701043065601']) self.assertEquals(code, 1)
def objectify(self, d): """ WARNING, EXPERIMENTAL METHOD DO NOT USE """ for k, v in d.items(): if isinstance(v, dict): setattr(self, k, self.objectify(v)) elif isinstance(v, list) and len(v) > 0: # this is the funniest part name = None name = {'LoadBalancerRules': 'LoadBalancerRule', 'LoadBalancerIPAddresses': 'LoadBalancerIPAddress', 'Instances': 'Instance'}[k] new_list = [] for elem in v: # create the new class assert name is not None, "class name is none" base_cls_spec = {'name': name, '__repr__': classmethod(reprfunc), '__str__': classmethod(strfunc)} new_cls = type(name, (object,), base_cls_spec) # create attributes for k1, v1 in elem.items(): setattr(new_cls, k1, v1) new_list.append(new_cls) # print('%s: %s' % (new_cls, dir(new_cls))) setattr(self, k, new_list) else: setattr(self, k, v)
def ActiveModel(cls): def register(self, graph): if('element_type' in dir(cls)): graph.add_proxy(cls.element_type, cls) if('label' in dir(cls)): graph.add_proxy(cls.label, cls) cls.g = graph setattr(cls, 'register', classmethod(register)) if('element_type' in dir(cls)): def get_or_create(cls, **kwds): cls.keys.sort() ids = [str(kwds[key]) for key in cls.keys] id_string = "".join(ids) model_id = hashlib.sha224(id_string).hexdigest() kwds.update({"model_id": model_id}) return getattr(cls.g,cls.element_type).get_or_create("model_id", model_id, **kwds) setattr(cls, 'get_or_create', classmethod(get_or_create)) def get_unique(cls, **kwds): cls.keys.sort() ids = [str(kwds.get(key)) for key in cls.keys] id_string = "".join(ids) model_id = hashlib.sha224(id_string).hexdigest() return getattr(cls.g,cls.element_type).index.get_unique("model_id", model_id) setattr(cls, 'get_unique', classmethod(get_unique)) def get_count_of_nodes_which_have(cls, **kwds): return getattr(cls.g,cls.element_type).index.count(**kwds) setattr(cls, 'get_count_of_nodes_which_have', classmethod(get_count_of_nodes_which_have)) def update(self, **val): for attribute in val: if attribute in dir(self): setattr(self, attribute, val[attribute]) self.save() return self setattr(cls, 'update', update) return cls
def __init__(cls, base_ring, modulus): c = cls m = QuotientFieldMeta _ = lambda _: m.coerce(c, _) # Initialization. c.__init__ = lambda a, b: m.init(c, a, b) # Arithmetic. c.__pos__ = lambda a: m.pos(c, _(a)) c.__neg__ = lambda a: m.neg(c, _(a)) c.__add__ = lambda a, b: m.add(c, _(a), _(b)) c.__radd__ = lambda a, b: m.add(c, _(b), _(a)) c.__sub__ = lambda a, b: m.sub(c, _(a), _(b)) c.__rsub__ = lambda a, b: m.sub(c, _(b), _(a)) c.__mul__ = lambda a, b: m.mul(c, _(a), _(b)) c.__rmul__ = lambda a, b: m.mul(c, _(b), _(a)) c.__divmod__ = lambda a, b: m.divmod(c, _(a), _(b)) c.__rdivmod__ = lambda a, b: m.divmod(c, _(b), _(a)) c.__div__ = lambda a, b: m.div(c, _(a), _(b)) c.__rdiv__ = lambda a, b: m.div(c, _(b), _(a)) c.__mod__ = lambda a, b: m.mod(c, _(a), _(b)) c.__rmod__ = lambda a, b: m.mod(c, _(b), _(a)) # Comparisons. c.__eq__ = lambda a, b: m.eq(c, _(a), _(b)) c.__req__ = lambda a, b: m.eq(c, _(b), _(a)) c.__ne__ = lambda a, b: m.ne(c, _(a), _(b)) c.__rne__ = lambda a, b: m.ne(c, _(b), _(a)) c.__nonzero__ = lambda a: m.nonzero(c, _(a)) # String representation. c.__str__ = lambda a: m.str(c, _(a)) c.__repr__ = lambda a: m.repr(c, _(a)) # Other class methods. c.gcd = classmethod(lambda c, a, b: m.gcd(c, _(a), _(b))) c.base_ring = classmethod(lambda c: base_ring) c.modulus = classmethod(lambda c: modulus)
def django_template(): #Code to make # from django import template # register = template.Library() # register.tag('compile', do_compile) #run without a hitch. Uses a bunch of inline class creations import imp, sys if 'django' in sys.modules: django = sys.modules['django'] else: django = imp.new_module('django') template = type('template', (object,), { 'Node': object, 'Library': classmethod(lambda x: type('tag', (object, ), { 'tag': classmethod(lambda x, y, z: None) })) }) django.__dict__.update({'template':template}) sys.modules.update({ 'django':django }) yield del_keys(sys.modules, 'django')
def mock_timestamp_now(now=None): if now is None: now = Timestamp.now() with mocklib.patch('swift.common.utils.Timestamp.now', classmethod(lambda c: now)): yield now
class ShapeComponent(RecordModel): ''' 4.2.9.2.1. 개체 요소 ''' tagid = HWPTAG_SHAPE_COMPONENT FillFlags = Flags(UINT16, 8, 'fill_colorpattern', 9, 'fill_image', 10, 'fill_gradation') Flags = Flags(UINT32, 0, 'flip') def attributes(cls): ''' 표 78 개체 요소 속성 ''' yield dict(type=CHID, name='chid0', condition=parent_must_be_gso) yield CHID, 'chid' yield SHWPUNIT, 'x_in_group' yield SHWPUNIT, 'y_in_group' yield WORD, 'level_in_group' yield WORD, 'local_version' yield SHWPUNIT, 'initial_width' yield SHWPUNIT, 'initial_height' yield SHWPUNIT, 'width' yield SHWPUNIT, 'height' yield cls.Flags, 'flags' yield WORD, 'angle' yield Coord, 'rotation_center' ''' 표 79 Rendering 정보 ''' yield WORD, 'scalerotations_count' yield Matrix, 'translation' yield dict(type=X_ARRAY(ScaleRotationMatrix, ref_member('scalerotations_count')), name='scalerotations') # # Container # yield dict(type=N_ARRAY(WORD, CHID), name='controls', condition=chid_is_container) # # Rectangle # ''' 표 81 테두리 선 정보 ''' yield dict(type=BorderLine, name='border', condition=chid_is_rect) ''' 표 83 Outline style ''' # TODO: Outline ??? yield dict(type=cls.FillFlags, name='fill_flags', condition=chid_is_rect) yield dict(type=UINT16, name='unknown', condition=chid_is_rect) yield dict(type=UINT8, name='unknown1', condition=chid_is_rect) yield dict(type=FillColorPattern, name='fill_colorpattern', condition=chid_is_rect_and_fill_colorpattern) yield dict(type=FillGradation, name='fill_gradation', condition=chid_is_rect_and_fill_gradation) yield dict(type=FillImage, name='fill_image', condition=chid_is_rect_and_fill_image) yield dict(type=UINT32, name='fill_shape', condition=chid_is_rect) yield dict(type=BYTE, name='fill_blur_center', condition=chid_is_rect_and_fill_gradation) # TODO: 아래 두 필드: chid == $rec일 때만인지 확인 필요 yield dict(type=HexBytes(5), name='unknown2', condition=chid_is_rect, version=(5, 0, 2, 4)) yield dict(type=HexBytes(16), name='unknown3', condition=chid_is_rect, version=(5, 0, 2, 4)) # # Line # yield dict(type=BorderLine, name='line', condition=chid_is_line) attributes = classmethod(attributes)
class Asset(SObject): SEARCH_TYPE = "prod/asset" def get_search_columns(): return ['code', 'name', 'description', 'asset_library'] get_search_columns = staticmethod(get_search_columns) def get_required_columns(): '''for csv import''' #return ['name', 'asset_library'] return [] get_required_columns = staticmethod(get_required_columns) def get_defaults(my): '''specifies the defaults for this sobject''' # use the naming the generate the next code from naming import AssetCodeNaming naming = AssetCodeNaming() asset_code = naming.get_next_code(my) defaults = super(Asset, my).get_defaults() defaults.update({"asset_type": "asset", 'code': asset_code}) return defaults def get_asset_type(my): return my.get_value("asset_type") def get_asset_library(my): return my.get_value("asset_library") def get_asset_library_obj(my): search = Search("prod/asset_library") search.add_filter("code", my.get_asset_library()) return search.get_sobject() def get_icon_context(cls, context=None): if context: return context else: return "icon" get_icon_context = classmethod(get_icon_context) def get_full_name(my): return "%s|%s" % (my.get_value('name'), my.get_code()) def has_auto_current(my): #return False return True def delete(my, log=True): '''This is for undo''' # TODO: the should probably be clearer!!!! if log == False: super(Asset, my).delete(log) return # An asset can only be deleted if only icon snapshots exist snapshots = Snapshot.get_by_sobject(my) only_icons = True for snapshot in snapshots: context = snapshot.get_value("context") if context != my.get_icon_context(): only_icons = False if not only_icons: raise TacticException("Cannot delete because snapshots exist") # only delete if not tasks have been assigned tasks = Task.get_by_sobject(my) has_assigned = False for task in tasks: assigned = task.get_value("assigned") if assigned != "" and assigned != "None": has_assigned = True if has_assigned: raise TacticException( "Cannot delete because tasks have been assigned") # delete tasks and icons for snapshot in snapshots: snapshot.delete() for task in tasks: task.delete() my.description = "Deleted '%s', search_type '%s'" % ( my.get_code(), my.get_search_type) super(Asset, my).delete(log) # Static methods def alter_search(search): '''allow the sobject to alter the search''' search.add_order_by("code") alter_search = staticmethod(alter_search) def get_by_id(id): search = Search(Asset.SEARCH_TYPE) search.add_id_filter(id) return search.get_sobject() get_by_id = staticmethod(get_by_id) def get_by_sobject(sobject, column="asset_code"): '''gets the sobject by an sobject. All an sobject needs to be contained by this sobject is to have the asset_code column''' code = sobject.get_value(column) search = Search(Asset.SEARCH_TYPE) search.add_filter("code", code) return search.get_sobject() get_by_sobject = staticmethod(get_by_sobject) def get_by_name(cls, name): search = Search(cls.SEARCH_TYPE) search.add_filter("name", name) return search.get_sobject() get_by_name = classmethod(get_by_name) def create(code, name, asset_library, description): '''create with an explicit code''' asset = SObjectFactory.create(Asset.SEARCH_TYPE) asset.set_value("code", code) asset.set_value("name", name) asset.set_value("asset_library", asset_library) asset.set_value("description", description) asset.set_value("asset_type", "asset") asset.commit() return asset create = staticmethod(create) def create_with_autocode(cls, name, asset_library, description, asset_type="asset"): # create the new asset asset = SObjectFactory.create(cls.SEARCH_TYPE) asset.set_value("name", name) asset.set_value("asset_library", asset_library) asset.set_value("description", description) asset.set_value("asset_type", asset_type) # use the naming the generate the next code from naming import AssetCodeNaming naming = AssetCodeNaming() asset_code = naming.get_next_code(asset) asset.set_value("code", asset_code) asset.commit() return asset create_with_autocode = classmethod(create_with_autocode)
class ShotInstance(SObject): '''Represents an instance of an asset in a shot''' SEARCH_TYPE = "prod/shot_instance" def get_code(my): '''This is kept for backward-compatibility. code is auto-gen now''' return my.get_value("name") def get_asset(my, search_type='prod/asset'): asset_code = my.get_value("asset_code") asset = Search.get_by_code(search_type, asset_code) return asset def get_shot(my, search_type='prod/shot'): shot_code = my.get_value("shot_code") shot = Search.get_by_code(search_type, shot_code) return shot def get_title(my): shot_code = my.get_value("shot_code") name = my.get_value("name") return "%s in %s" % (name, shot_code) # Static methods def get_by_shot(shot, instance_name, parent_codes=[], type=None): shot_col = shot.get_foreign_key() search = Search(ShotInstance.SEARCH_TYPE) if parent_codes: parent_codes.append(shot.get_code()) search.add_filters(shot_col, parent_codes) else: search.add_filter(shot_col, shot.get_code()) search.add_filter("name", instance_name) if type: search.add_filter("type", type) return search.get_sobject() get_by_shot = staticmethod(get_by_shot) def get_all_by_shot(shot, parent_codes=[], type='asset'): shot_col = shot.get_foreign_key() search = Search(ShotInstance.SEARCH_TYPE) if parent_codes: parent_codes.append(shot.get_code()) search.add_filters(shot_col, parent_codes) else: search.add_filter(shot_col, shot.get_code()) search.add_filter("type", type) return search.do_search() get_all_by_shot = staticmethod(get_all_by_shot) def filter_instances(instances, shot_code): ''' filter out the parents' shot instances if child shot instances exist''' instance_dict = {} for instance in instances: key = "%s:%s" % (instance.get_value('asset_code'), instance.get_value('name')) if key not in instance_dict.keys(): instance_dict[key] = instance continue if instance.get_value('shot_code') == shot_code: instance_dict[key] = instance return Common.sort_dict(instance_dict) filter_instances = staticmethod(filter_instances) def get_by_shot_and_asset(cls, shot, asset, type='asset'): shot_col = shot.get_foreign_key() search = Search(cls.SEARCH_TYPE) search.add_filter(shot_col, shot.get_code()) search.add_filter("type", type) search.add_filter("asset_code", asset.get_code()) search.add_order_by('name desc') return search.get_sobjects() get_by_shot_and_asset = classmethod(get_by_shot_and_asset) def create(cls, shot, asset, instance_name="", type="asset", unique=False): shot_col = shot.get_foreign_key() search = Search(cls.SEARCH_TYPE) search.add_filter(shot_col, shot.get_code()) search.add_filter("type", type) #if unique: # search.add_filter("name", instance_name) search.add_filter("asset_code", asset.get_code()) search.add_order_by('name desc') sobjs = search.get_sobjects() # if unique and exists, then return if unique and sobjs: Environment.add_warning( "Instance exists", "Shot '%s' already has an instance for asset '%s'" % (shot.get_code(), asset.get_code())) return naming = Project.get_naming("node") instance_name = naming.get_shot_instance_name(shot, asset, sobjs) #instance_name = cls.get_instance_name(sobjs, instance_name) instance = SObjectFactory.create(cls.SEARCH_TYPE) instance.set_value(shot_col, shot.get_code()) instance.set_value("asset_code", asset.get_code()) instance.set_value("name", instance_name) instance.set_value("type", type) instance.commit() return instance create = classmethod(create) def add_related_connection(my, src_sobject, dst_sobject, src_path=None): '''adding the related sobject code to this current sobject''' my.add_related_sobject(src_sobject) my.add_related_sobject(dst_sobject) #shot_col = dst_sobject.get_foreign_key() schema = my.get_schema() st1 = my.get_base_search_type() st2 = dst_sobject.get_base_search_type() relationship = schema.get_relationship(st1, st2) attrs = schema.get_relationship_attrs(st1, st2) from_col = attrs.get("from_col") search = Search(my.SEARCH_TYPE) search.add_filter(from_col, dst_sobject.get_code()) search.add_filter("type", "asset") search.add_filter("asset_code", src_sobject.get_code()) search.add_order_by('name desc') instances = search.get_sobjects() """ # if it allows order by, I can switch to this filters = [('asset_code', src_sobject.get_code())] instances = dst_sobject.get_related_sobjects(my.SEARCH_TYPE, filters=filters) """ naming = Project.get_naming("node") instance_name = naming.get_shot_instance_name(dst_sobject, src_sobject, instances) my.set_value('name', instance_name) my.set_value('type', 'asset') #my.commit() def get_instance_name(sobjs, instance_name): ''' append a digit to the instance_name if necessary ''' if sobjs: pat = re.compile('(%s)(_\d{2}$)?' % instance_name) m = pat.match(sobjs[0].get_value('name')) ext = 1 if m: if m.group(2): ext = int(m.group(2)[1:]) instance_name = "%s_%0.2d" % (instance_name, ext + 1) return instance_name get_instance_name = staticmethod(get_instance_name) def get_aux_data(top_instances, asset_stype='prod/asset'): '''get the aux data for asset_name''' if not top_instances or not top_instances[0].has_value('asset_code'): return [] search = Search(asset_stype) asset_codes = SObject.get_values(top_instances, 'asset_code') search.add_filters('code', asset_codes) assets = search.get_sobjects() asset_dict = SObject.get_dict(assets, ['code']) aux_data = [] for inst in top_instances: asset = asset_dict.get(inst.get_value('asset_code')) asset_name = "RETIRED" if asset: asset_name = asset.get_value('name') aux_data.append({'asset_name': asset_name}) return aux_data get_aux_data = staticmethod(get_aux_data)
class ClassAlias(object): """ Class alias. All classes are initially set to a dynamic state. @ivar attrs: A list of attributes to encode for this class. @type attrs: C{list} @ivar metadata: A list of metadata tags similar to ActionScript tags. @type metadata: C{list} """ def __init__(self, klass, alias, attrs=None, attr_func=None, metadata=[]): """ @type klass: C{class} @param klass: The class to alias. @type alias: C{str} @param alias: The alias to the class e.g. C{org.example.Person}. If the value of this is C{None}, then it is worked out based on the C{klass}. The anonymous tag is also added to the class. @type attrs: A list of attributes to encode for this class. @param attrs: C{list} @type metadata: A list of metadata tags similar to ActionScript tags. @param metadata: C{list} @raise TypeError: The C{klass} must be a class type. @raise TypeError: The C{attr_func} must be callable. @raise TypeError: C{__readamf__} must be callable. @raise TypeError: C{__writeamf__} must be callable. @raise AttributeError: An externalised class was specified, but no C{__readamf__} attribute was found. @raise AttributeError: An externalised class was specified, but no C{__writeamf__} attribute was found. @raise ValueError: The C{attrs} keyword must be specified for static classes. """ if not isinstance(klass, (type, types.ClassType)): raise TypeError("klass must be a class type") self.checkClass(klass) self.metadata = ClassMetaData(metadata) if alias is None: self.metadata.append('anonymous') alias = "%s.%s" % (klass.__module__, klass.__name__,) self.klass = klass self.alias = alias self.attr_func = attr_func self.attrs = attrs if 'external' in self.metadata: # class is declared as external, lets check if not hasattr(klass, '__readamf__'): raise AttributeError("An externalised class was specified, but" " no __readamf__ attribute was found for class %s" % ( klass.__name__)) if not hasattr(klass, '__writeamf__'): raise AttributeError("An externalised class was specified, but" " no __writeamf__ attribute was found for class %s" % ( klass.__name__)) if not isinstance(klass.__readamf__, types.UnboundMethodType): raise TypeError("%s.__readamf__ must be callable" % ( klass.__name__)) if not isinstance(klass.__writeamf__, types.UnboundMethodType): raise TypeError("%s.__writeamf__ must be callable" % ( klass.__name__)) if 'dynamic' in self.metadata: if attr_func is not None and not callable(attr_func): raise TypeError("attr_func must be callable") if 'static' in self.metadata: if attrs is None: raise ValueError("attrs keyword must be specified for static classes") def __str__(self): return self.alias def __repr__(self): return '<ClassAlias alias=%s klass=%s @ %s>' % ( self.alias, self.klass, id(self)) def __eq__(self, other): if isinstance(other, basestring): return self.alias == other elif isinstance(other, self.__class__): return self.klass == other.klass elif isinstance(other, (type, types.ClassType)): return self.klass == other else: return False def __hash__(self): return id(self) def checkClass(kls, klass): """ This function is used to check the class being aliased to fits certain criteria. The default is to check that the __init__ constructor does not pass in arguments. @since: 0.4 @raise TypeError: C{__init__} doesn't support additional arguments """ # Check that the constructor of the class doesn't require any additonal # arguments. if not (hasattr(klass, '__init__') and hasattr(klass.__init__, 'im_func')): return klass_func = klass.__init__.im_func # built-in classes don't have func_code if hasattr(klass_func, 'func_code') and ( klass_func.func_code.co_argcount - len(klass_func.func_defaults or []) > 1): args = list(klass_func.func_code.co_varnames) values = list(klass_func.func_defaults or []) if not values: sign = "%s.__init__(%s)" % (klass.__name__, ", ".join(args)) else: named_args = zip(args[len(args) - len(values):], values) sign = "%s.__init__(%s, %s)" % ( klass.__name__, ", ".join(args[:0-len(values)]), ", ".join(map(lambda x: "%s=%s" % (x,), named_args))) raise TypeError("__init__ doesn't support additional arguments: %s" % sign) checkClass = classmethod(checkClass) def _getAttrs(self, obj, static_attrs=None, dynamic_attrs=None, traverse=True): if static_attrs is None: static_attrs = [] if dynamic_attrs is None: dynamic_attrs = [] modified_attrs = False if self.attrs is not None: modified_attrs = True static_attrs.extend(self.attrs) elif traverse is True and hasattr(obj, '__slots__'): modified_attrs = True static_attrs.extend(obj.__slots__) if self.attr_func is not None: modified_attrs = True extra_attrs = self.attr_func(obj) dynamic_attrs.extend([key for key in extra_attrs if key not in static_attrs]) if traverse is True: for base in util.get_mro(obj.__class__): try: alias = get_class_alias(base) except UnknownClassAlias: continue x, y = alias._getAttrs(obj, static_attrs, dynamic_attrs, False) if x is not None: static_attrs.extend(x) modified_attrs = True if y is not None: dynamic_attrs.extend(y) modified_attrs = True if modified_attrs is False: return None, None sa = [] da = [] for x in static_attrs: if x not in sa: sa.append(x) for x in dynamic_attrs: if x not in da: da.append(x) return (sa, da) def getAttrs(self, obj, codec=None): """ Returns a tuple of lists, static and dynamic attrs to encode. @param codec: An optional argument that will contain the en/decoder instance calling this function. """ return self._getAttrs(obj) def getAttributes(self, obj, codec=None): """ Returns a collection of attributes for an object. Returns a C{tuple} containing a dict of static and dynamic attributes @param codec: An optional argument that will contain the en/decoder instance calling this function. """ dynamic_attrs = {} static_attrs = {} static_attr_names, dynamic_attr_names = self.getAttrs(obj, codec=codec) if static_attr_names is None and dynamic_attr_names is None: dynamic_attrs = util.get_attrs(obj) if static_attr_names is not None: for attr in static_attr_names: if hasattr(obj, attr): static_attrs[attr] = getattr(obj, attr) else: static_attrs[attr] = Undefined if dynamic_attr_names is not None: for attr in dynamic_attr_names: if attr in static_attrs: continue if hasattr(obj, attr): dynamic_attrs[attr] = getattr(obj, attr) return (static_attrs, dynamic_attrs) def applyAttributes(self, obj, attrs, codec=None): """ Applies the collection of attributes C{attrs} to aliased object C{obj}. It is mainly used when reading aliased objects from an AMF byte stream. @param codec: An optional argument that will contain the en/decoder instance calling this function. """ if 'static' in self.metadata: s, d = self.getAttrs(obj, codec=codec) if s is not None: for k in attrs.keys(): if k not in s: del attrs[k] util.set_attrs(obj, attrs) def createInstance(self, codec=None, *args, **kwargs): """ Creates an instance of the klass. @return: Instance of C{self.klass}. """ return self.klass(*args, **kwargs)
class CtypesParameterHandler(formathandler.FormatHandler): """Ctypes Paramater-type-specific data-type handler for OpenGL""" isOutput = True HANDLED_TYPES = (ParamaterType, _ctypes._SimpleCData) def from_param(cls, value, typeCode=None): if isinstance(value, DIRECT_RETURN_TYPES): return value else: return ctypes.byref(value) from_param = voidDataPointer = classmethod(from_param) def dataPointer(cls, value): if isinstance(value, DIRECT_RETURN_TYPES): return value else: return ctypes.addressof(value) dataPointer = classmethod(dataPointer) def zeros(self, dims, typeCode): """Return Numpy array of zeros in given size""" type = GL_TYPE_TO_ARRAY_MAPPING[typeCode] for dim in dims: type *= dim return type() # should expicitly set to 0s def ones(self, dims, typeCode='d'): """Return numpy array of ones in given size""" raise NotImplementedError( """Haven't got a good ones implementation yet""") ## type = GL_TYPE_TO_ARRAY_MAPPING[ typeCode ] ## for dim in dims: ## type *= dim ## return type() # should expicitly set to 0s def arrayToGLType(self, value): """Given a value, guess OpenGL type of the corresponding pointer""" if isinstance(value, ParamaterType): value = value._obj result = ARRAY_TO_GL_TYPE_MAPPING.get(value._type_) if result is not None: return result raise TypeError( """Don't know GL type for array of type %r, known types: %s\nvalue:%s""" % ( value._type_, list(ARRAY_TO_GL_TYPE_MAPPING.keys()), value, )) def arraySize(self, value, typeCode=None): """Given a data-value, calculate dimensions for the array""" if isinstance(value, ParamaterType): value = value._obj dims = 1 for base in self.types(value): length = getattr(base, '_length_', None) if length is not None: dims *= length return dims def arrayByteCount(self, value, typeCode=None): """Given a data-value, calculate number of bytes required to represent""" if isinstance(value, ParamaterType): value = value._obj return ctypes.sizeof(value) def types(self, value): """Produce iterable producing all composite types""" if isinstance(value, ParamaterType): value = value._obj dimObject = value while dimObject is not None: yield dimObject dimObject = getattr(dimObject, '_type_', None) if isinstance(dimObject, str): dimObject = None def dims(self, value): """Produce iterable of all dimensions""" if isinstance(value, ParamaterType): value = value._obj for base in self.types(value): length = getattr(base, '_length_', None) if length is not None: yield length def asArray(self, value, typeCode=None): """Convert given value to an array value of given typeCode""" if isinstance(value, DIRECT_RETURN_TYPES): return value if isinstance(value, ParamaterType): value = value._obj return ctypes.byref(value) def unitSize(self, value, typeCode=None): """Determine unit size of an array (if possible)""" if isinstance(value, ParamaterType): value = value._obj return tuple(self.dims(value))[-1] def dimensions(self, value, typeCode=None): """Determine dimensions of the passed array value (if possible)""" if isinstance(value, ParamaterType): value = value._obj return tuple(self.dims(value))
class MyVocabulary(self._getTargetClass()): def createTerm(cls, value): return MyTerm(value) createTerm = classmethod(createTerm)
class BaseMallocRemovalTest(object): type_system = None MallocRemover = None def _skip_oo(self, msg): if self.type_system == 'ootype': py.test.skip(msg) def check_malloc_removed(cls, graph, expected_mallocs, expected_calls): count_mallocs = 0 count_calls = 0 for node in graph.iterblocks(): for op in node.operations: if op.opname == 'malloc': count_mallocs += 1 if op.opname == 'direct_call': count_calls += 1 assert count_mallocs == expected_mallocs assert count_calls == expected_calls check_malloc_removed = classmethod(check_malloc_removed) def check(self, fn, signature, args, expected_result, expected_mallocs=0, expected_calls=0): t = TranslationContext() self.translator = t t.buildannotator().build_types(fn, signature) t.buildrtyper(type_system=self.type_system).specialize() graph = graphof(t, fn) if option.view: t.view() self.original_graph_count = len(t.graphs) # to detect broken intermediate graphs, # we do the loop ourselves instead of calling remove_simple_mallocs() maxiter = 100 mallocv = MallocVirtualizer(t.graphs, t.rtyper, verbose=True) while True: progress = mallocv.remove_mallocs_once() #simplify.transform_dead_op_vars_in_blocks(list(graph.iterblocks())) if progress and option.view: t.view() t.checkgraphs() if expected_result is not DONT_CHECK_RESULT: interp = LLInterpreter(t.rtyper) if not isinstance(expected_result, CHECK_RAISES): res = interp.eval_graph(graph, args) assert res == expected_result else: excinfo = py.test.raises(LLException, interp.eval_graph, graph, args) assert expected_result.excname in str(excinfo.value) if not progress: break maxiter -= 1 assert maxiter > 0, "infinite loop?" self.check_malloc_removed(graph, expected_mallocs, expected_calls) return graph def test_fn1(self): def fn1(x, y): if x > 0: t = x + y, x - y else: t = x - y, x + y s, d = t return s * d graph = self.check(fn1, [int, int], [15, 10], 125) insns = summary(graph) assert insns['int_mul'] == 1 def test_aliasing1(self): A = lltype.GcStruct('A', ('x', lltype.Signed)) def fn1(x): a1 = lltype.malloc(A) a1.x = 123 if x > 0: a2 = a1 else: a2 = lltype.malloc(A) a2.x = 456 a1.x += 1 return a2.x self.check(fn1, [int], [3], 124) self.check(fn1, [int], [-3], 456) def test_direct_call(self): def g(t): a, b = t return a * b def f(x): return g((x + 1, x - 1)) graph = self.check(f, [int], [10], 99, expected_calls=1) # not inlined def test_direct_call_mutable_simple(self): A = lltype.GcStruct('A', ('x', lltype.Signed)) def g(a): a.x += 1 def f(x): a = lltype.malloc(A) a.x = x g(a) return a.x graph = self.check(f, [int], [41], 42, expected_calls=0) # no more call, inlined def test_direct_call_mutable_retval(self): A = lltype.GcStruct('A', ('x', lltype.Signed)) def g(a): a.x += 1 return a.x * 100 def f(x): a = lltype.malloc(A) a.x = x y = g(a) return a.x + y graph = self.check(f, [int], [41], 4242, expected_calls=0) # no more call, inlined def test_direct_call_mutable_ret_virtual(self): A = lltype.GcStruct('A', ('x', lltype.Signed)) def g(a): a.x += 1 return a def f(x): a = lltype.malloc(A) a.x = x b = g(a) return a.x + b.x graph = self.check(f, [int], [41], 84, expected_calls=0) # no more call, inlined def test_direct_call_mutable_lastref(self): A = lltype.GcStruct('A', ('x', lltype.Signed)) def g(a): a.x *= 10 return a.x def f(x): a = lltype.malloc(A) a.x = x y = g(a) return x - y graph = self.check(f, [int], [5], -45, expected_calls=1) # not inlined def test_direct_call_ret_virtual(self): A = lltype.GcStruct('A', ('x', lltype.Signed)) prebuilt_a = lltype.malloc(A) def g(a): prebuilt_a.x += a.x return a def f(n): prebuilt_a.x = n a = lltype.malloc(A) a.x = 2 a = g(a) return prebuilt_a.x * a.x graph = self.check(f, [int], [19], 42, expected_calls=0) # inlined def test_direct_call_unused_arg(self): A = lltype.GcStruct('A', ('x', lltype.Signed)) prebuilt_a = lltype.malloc(A) def g(a, unused): return a.x def f(n): a = lltype.malloc(A) a.x = 15 return g(a, n) graph = self.check(f, [int], [42], 15, expected_calls=1) # not inlined def test_raises_simple(self): class MyExc(Exception): pass def f(n): if n < 3: e = MyExc() e.n = n raise e return n self.check(f, [int], [5], 5, expected_mallocs=1) self.check(f, [int], [-5], CHECK_RAISES("MyExc"), expected_mallocs=1) def test_catch_simple(self): class A: pass class E(Exception): def __init__(self, n): self.n = n def g(n): if n < 0: raise E(n) def f(n): a = A() a.n = 10 try: g(n) # this call should not be inlined except E, e: a.n = e.n return a.n self.check(f, [int], [15], 10, expected_calls=1) self.check(f, [int], [-15], -15, expected_calls=1)
def _cached(f): """ Decorator that makes a method cached.""" attr_name = '_cached_' + f.__name__ def wrapper(obj, *args, **kwargs): if not hasattr(obj, attr_name): setattr(obj, attr_name, f(obj, *args, **kwargs)) return getattr(obj, attr_name) return wrapper classproperty = lambda f: _classproperty(classmethod(f)) cached_property = lambda f: property(_cached(f)) cached_classproperty = lambda f: classproperty(_cached(f)) class Choices(object): """ Choices.""" def __init__(self, *choices): self._choices = [] self._choice_dict = {} for choice in choices: if isinstance(choice, (list, tuple)): if len(choice) == 2: choice = (choice[0], choice[1], choice[1])
class WorkerSpec(object): # attributes _attributes = ('harvesterID','workerID','batchID','queueName','status','computingSite','nCore', 'nodeID','submitTime','startTime','endTime','lastUpdate', 'stdOut', 'stdErr', 'batchLog', 'jdl', 'resourceType', 'nativeExitCode', 'nativeStatus', 'diagMessage', 'nJobs', 'computingElement', 'submissionHost', 'harvesterHost', 'errorCode', 'jobType') # slots __slots__ = _attributes + ('_changedAttrs',) # attributes which have 0 by default _zeroAttrs = () # catchall resouce type RT_catchall = 'ANY' # constructor def __init__(self): # install attributes for attr in self._attributes: object.__setattr__(self,attr,None) # map of changed attributes object.__setattr__(self,'_changedAttrs',{}) # override __setattr__ to collecte the changed attributes def __setattr__(self,name,value): oldVal = getattr(self,name) # convert string to datetime if type(value) in [str,unicode] and value.startswith('datetime/'): value = datetime.datetime.strptime(value.split('/')[-1],'%Y-%m-%d %H:%M:%S.%f') object.__setattr__(self,name,value) # collect changed attributes if oldVal != value: self._changedAttrs[name] = value # reset changed attribute list def resetChangedList(self): self._oldPandaID = self.PandaID object.__setattr__(self,'_changedAttrs',{}) # return map of values def valuesMap(self,onlyChanged=False): ret = {} for attr in self._attributes: if onlyChanged and not attr in self._changedAttrs: continue val = getattr(self,attr) if val is None: if attr in self._zeroAttrs: val = 0 ret[':%s' % attr] = val return ret # pack tuple into FileSpec def pack(self,values): for i in range(len(self._attributes)): attr= self._attributes[i] val = values[i] object.__setattr__(self,attr,val) # return column names for INSERT def columnNames(cls, prefix=None): ret = "" for attr in cls._attributes: if prefix is not None: ret += '{0}.'.format(prefix) ret += '{0},'.format(attr) ret = ret[:-1] return ret columnNames = classmethod(columnNames) # return expression of bind variables for INSERT def bindValuesExpression(cls): from pandaserver.config import panda_config ret = "VALUES(" for attr in cls._attributes: ret += ":%s," % attr ret = ret[:-1] ret += ")" return ret bindValuesExpression = classmethod(bindValuesExpression) # return an expression of bind variables for UPDATE to update only changed attributes def bindUpdateChangesExpression(self): ret = "" for attr in self._attributes: if not attr in self._changedAttrs: continue ret += '{0}=:{0},'.format(attr) ret = ret[:-1] return ret
def classproperty(func): if not isinstance(func, (classmethod, staticmethod)): func = classmethod(func) return ClassPropertyDescriptor(func)
async def Test async with async for # Builtin objects. True False Ellipsis None NotImplemented __debug__ __doc__ __file__ __name__ __package__ __loader__ __spec__ __path__ __cached__ # Bultin types bool bytearray dict float frozenset int list object set str tuple # Builtin functions __import__() abs() all() any() bin() bool() breakpoint() bytearray() callable() chr() classmethod() compile() complex() delattr() dict() dir() divmod() enumerate() eval() filter() float() format() frozenset() getattr() globals() hasattr() hash() help() hex() id() input() int() isinstance() issubclass() iter() len() list() locals() map() max() memoryview() min() next() object() oct() open() ord() pow() property() range() repr() reversed() round() set() setattr() slice() sorted() staticmethod() str() sum() super() tuple() type() vars() zip() # Builtin functions: Python 2 apply() basestring() buffer() cmp() coerce() execfile() file() intern() long() raw_input() reduce() reload() unichr() unicode() xrange() print() # Builtin functions: Python 3 ascii() bytes() exec() print() # Builtin exceptions and warnings. BaseException Exception StandardError ArithmeticError LookupError EnvironmentError AssertionError AttributeError EOFError FloatingPointError GeneratorExit IOError ImportError IndexError KeyError KeyboardInterrupt MemoryError NameError NotImplementedError OSError OverflowError ReferenceError RuntimeError StopIteration SyntaxError IndentationError TabError SystemError SystemExit TypeError UnboundLocalError UnicodeError UnicodeEncodeError UnicodeDecodeError UnicodeTranslateError ValueError WindowsError ZeroDivisionError Warning UserWarning DeprecationWarning PendingDeprecationWarning SyntaxWarning RuntimeWarning FutureWarning ImportWarning UnicodeWarning # Decorators. @ decoratorname @ object.__init__(arg1, arg2) @ декоратор @ декоратор.décorateur # Operators and or in is not - + * ** **- **+ **~ @ / // % & | ^ ~ << >> < <= == != >= >
if '__hash__' not in cls.__dict__: cls.__hash__ = _hash def _installOneConstructor(cls: Any, case: Enum) -> None: def constructor(cls: Type[Any], *args: Any, _case: Enum = case) -> Any: return cls(key=_case, value=cls.__annotations__[_case.name].constructCase(*args)) if hasattr(cls, case.name): raise AttributeError( f'{cls} should not have a default value for {case.name}, as this will be a generated constructor' ) setattr(cls, case.name, classmethod(constructor)) def _installOneAccessor(cls: Any, case: Enum) -> None: def accessor(self: Any, _case: Enum = case) -> Any: if self._key != _case: raise AttributeError( f'{self} was constructed as case {self._key.name}, so {_case.name.lower()} is not accessible' ) return self._value accessorName = case.name.lower() if accessorName not in cls.__dict__: setattr(cls, accessorName, accessor)
class Reg: """Helper class to read values from the registry """ def get_value(cls, path, key): for base in HKEYS: d = cls.read_values(base, path) if d and key in d: return d[key] raise KeyError(key) get_value = classmethod(get_value) def read_keys(cls, base, key): """Return list of registry keys.""" try: handle = RegOpenKeyEx(base, key) except RegError: return None L = [] i = 0 while True: try: k = RegEnumKey(handle, i) except RegError: break L.append(k) i += 1 return L read_keys = classmethod(read_keys) def read_values(cls, base, key): """Return dict of registry keys and values. All names are converted to lowercase. """ try: handle = RegOpenKeyEx(base, key) except RegError: return None d = {} i = 0 while True: try: name, value, type = RegEnumValue(handle, i) except RegError: break name = name.lower() d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) i += 1 return d read_values = classmethod(read_values) def convert_mbcs(s): dec = getattr(s, "decode", None) if dec is not None: try: s = dec("mbcs") except UnicodeError: pass return s convert_mbcs = staticmethod(convert_mbcs)
settings = Settings(min_satisfying_examples=1) def runTest(self): run_state_machine_as_test(state_machine_class) base_name = state_machine_class.__name__ StateMachineTestCase.__name__ = str(base_name + u'.TestCase') StateMachineTestCase.__qualname__ = str( getattr(state_machine_class, u'__qualname__', base_name) + u'.TestCase') state_machine_class._test_case_cache[state_machine_class] = ( StateMachineTestCase) return StateMachineTestCase GenericStateMachine.find_breaking_runner = classmethod(find_breaking_runner) def seeds(starting, n_steps): random = Random(starting) result = [] for _ in hrange(n_steps): result.append(random.getrandbits(64)) return result # Sentinel value used to mark entries as deleted. TOMBSTONE = [object(), [u'TOMBSTONE FOR STATEFUL TESTING']]
def setter(self, func): if not isinstance(func, (classmethod, staticmethod)): func = classmethod(func) self.fset = func return self
def __new__(metaclass,classname,baseclasses,classdict): #print('ME in',classname,metaclass.__name__,baseclasses) def connect(cls): """create a thread local connection if there isn't one yet""" #print('@@@@@@@@ connect',cls,cls._database,'thread',threading.current_thread().ident) ##print('connect',cls) if not hasattr(cls._local,'conn'): #print('!!!!!!!!!!!!!!!!!!new connection for thread',threading.current_thread().ident) cls._local.conn=sqlite.connect(cls._database) cls._local.conn.execute('pragma foreign_keys = 1') cls._local.conn.row_factory = sqlite.Row # check if rbac tables are defined, if not, initialize them cls._local.rbac=rbac(cls._database) # check if we have a display customization table, if not, initialize it cls._local.custom=custom(cls._database) #print(cls._local.conn) return cls._local.conn def getrbac(cls): cls._connect() return cls._local.rbac def getcustom(cls): cls._connect() return cls._local.custom entitydefinition = False if len(baseclasses): # these test ensure we only take special actions for # classes derived from Entity ( we cannot check that directly # because forward references are not allowed #print('ME baseclasses nonzero') if not 'database' in classdict and not '_database' in classdict: classdict['_database']=MetaEntity.findattr(baseclasses,'database') #print('ME no database',classdict['_database']) if classdict['_database'] is None: raise AttributeError('subclass of AbstractEntity has no database class variable') entitydefinition=True # copy reference to thread local storage if not '_local' in classdict: classdict['_local']=MetaEntity.findattr(baseclasses,'_local') classdict['_connect']=classmethod(connect) classdict['_rbac']=classmethod(getrbac) classdict['_custom']=classmethod(getcustom) classdict['columns']=[k for k,v in classdict.items() if type(v) == Attribute] classdict['sortorder']=[] classdict['displaynames']={k:v.displayname if v.displayname else k for k,v in classdict.items() if type(v) == Attribute} classdict['validators']={k:v.validate for k,v in classdict.items() if type(v) == Attribute and not v.validate is None} classdict['displaynames']['id']='id' classdict['displayclasses']={k:v.displayclass for k,v in classdict.items() if type(v) == Attribute} classdict['htmlescape']={k:v.htmlescape for k,v in classdict.items() if type(v) == Attribute} PrimaryKey = Attribute() PrimaryKey.coldef = 'integer primary key autoincrement' #print('ME entitydefinition',entitydefinition) if entitydefinition or '_meta' in classdict: sql = 'create table if not exists "' + classname +'" (' + ", ".join(['"'+k+'" '+v.coldef for k,v in [('id',PrimaryKey)]+list(classdict.items()) if type(v) == Attribute]) + ')' # we cannot use connect yet conn = sqlite.connect(classdict['_database']) sqllogger.debug(sql) conn.execute(sql) conn.commit() conn.close() for k,v in classdict.items(): if type(v) == Attribute: if v.primary: classdict['primary']=property(lambda self:getattr(self,k)) classdict['primaryname']=k break # absolutely necessary otherwise infinite recursion! if not 'primary' in classdict: classdict['primary']=property(lambda self:getattr(self,'id')) classdict['primaryname']='id' return type.__new__(metaclass,classname,baseclasses,classdict)
class PCA(object): __doc__ = __doc__ option_default_dict = {('input_fname', 1,):['', 'i', 1, 'input file', ],\ ('commit', 0, int):[0, 'c', 0, 'commit the db operation. this commit happens after every db operation, not wait till the end.'],\ ('debug', 0, int):[0, 'b', 0, 'debug mode. 1=level 1 (pdb mode). 2=level 2 (same as 1 except no pdb mode)'],\ ('report', 0, int):[0, 'r', 0, 'toggle report, more verbose stdout/stderr.']} def __init__(self, **keywords): """ 2008-11-18 """ from pymodule import ProcessOptions self.ad = ProcessOptions.process_function_arguments(keywords, self.option_default_dict, error_doc=self.__doc__, class_to_have_attr=self) def normalize(cls, data_matrix, divide_variance=True): """ 2008-11-18 this is based on Patterson2006a. assume no missing data in data_matrix. SNP allele in data_matrix is 0 or 1. two chromosomes are homozygous. so each strain has either allele 0 or 1 on both chromosomes. (selfing plants, like arabidopsis thaliana) normalizing steps: 1. subtract mean from each column 2. make variance equal among columns by dividing each column by the estimated stdev if variance is 0, skip this step. """ sys.stderr.write("Normalizing ...") no_of_rows, no_of_cols = data_matrix.shape new_data_matrix = data_matrix.astype(numpy.float) #change the data type for j in range(no_of_cols): genotype_ls = data_matrix[:,j] col_mean = numpy.mean(genotype_ls) col_var = numpy.var(genotype_ls) #col_var = col_mean*(1-col_mean) #2009-9-3 only good for binary matrix if col_mean!=0 and divide_variance and col_var!=0: new_data_matrix[:,j] = (new_data_matrix[:,j]-col_mean)/numpy.sqrt(col_var) else: new_data_matrix[:,j] = new_data_matrix[:,j]-col_mean sys.stderr.write("Done.\n") return new_data_matrix normalize = classmethod(normalize) def eig(cls, data_matrix, normalize=True): """ 2008-11-18 numpy.inner(matrix1, matrix2) is weird. matrix1's 2nd dimension is equal to matrix2's 2nd dimension. Not as traditional, matrix1's 2nd dimension is equal to matrix2's 1st dimension. """ if normalize: new_data_matrix = cls.normalize(data_matrix) new_data_matrix2 = numpy.transpose(new_data_matrix) #2008-11-20 transpose only to get cov_matrix. a later mulitplication between eigen_vector and new_data_matrix cov_matrix = 1.0/new_data_matrix2.shape[1]*numpy.inner(new_data_matrix2, new_data_matrix2) #2008-11-19 mysteriously, numpy.tranpose() is not required on the 2nd new_data_matrix, it'll cause ValueError('matrices are not aligned',) else: new_data_matrix = cls.normalize(data_matrix, divide_variance=False) #new_data_matrix = data_matrix cov_matrix = numpy.cov(new_data_matrix, rowvar=0) #2009-9-3 numpy.cov or numpy.corrcoef #eigen_values, eigen_vectors = numpy.linalg.eig(cov_matrix) #get complex values out of this import rpy eigen_result = rpy.r.eigen(cov_matrix) eigen_values = numpy.array(eigen_result['values']) eigen_vectors = eigen_result['vectors'] explained_var = eigen_values/numpy.sum(eigen_values) pc_matrix = numpy.inner(numpy.transpose(eigen_vectors), new_data_matrix) #eigen_vectors has to be transposed in row-vector form pc_matrix = numpy.transpose(pc_matrix) #transpose again. maybe pc_matrix = numpy.inner(new_data_matrix, eigen_vectors) return pc_matrix, eigen_vectors, explained_var eig = classmethod(eig)
class Vehicle: _speed = 2 def __init__(self, stoplight, direction, canvas): self._fsm = Vehicle_sm.Vehicle_sm(self) # The canvas to draw on and the direction this vehicle is # moving. self._canvas = canvas self._direction = direction # The stoplight object is responsible knowing the road # layout. Ask it for all relevant information. self._stoplight = stoplight # This vehicle is initially at the road's outside edge. # Figure out the road's length. XLength = stoplight.getRoadLengthX() YLength = stoplight.getRoadLengthY() LaneWidth = stoplight.getRoadWidth() / 2 # The vehicle is 12 pixels x 12 pixels. self._vehicleSize = 6 # A 3 pixel separation is to be maintained between vehicles. self._vehicleSeparation = 3 # How far away the vehicle is from the curb. CurbOffset = (LaneWidth - self._vehicleSize) / 2 # The vehicle's current canvas location. This is the # square's upper left hand corner. if direction == 'north': self._xpos = (XLength / 2) + CurbOffset self._ypos = YLength - self._vehicleSize elif direction == 'south': self._xpos = (XLength / 2) - LaneWidth + CurbOffset self._ypos = 0 elif direction == 'east': self._xpos = 0 self._ypos = (YLength / 2) + CurbOffset elif direction == 'west': self._xpos = XLength - self._vehicleSize self._ypos = (YLength / 2) - LaneWidth + CurbOffset # Put the vehicle on display. self._canvasID = canvas.create_rectangle( self._xpos, self._ypos, self._xpos + self._vehicleSize, self._ypos + self._vehicleSize, fill='black', outline='white', ) # Move this vehicle along at near movie-refresh rate. self._redrawRate = 1000 / 60 # Store the after's timer ID here. self._timerID = -1 # Set this flag to true when the vehicle has # completed its trip. self._isDoneFlag = False # Uncomment to see debug output. #self._fsm.setDebugFlag(True) def Delete(self): if self._timerID >= 0: self._canvas.after_cancel(self._timerID) self._timerID = -1 self._canvas.delete(self._canvasID) # timeout -- # # If the vehicle has driven off the canvas, then # delete the vehicle. # Check if the vehicle is at the intersection and the # light is either yellow or red. If yes, then issue a # "LightRed" transition. If all is go, then keep on # truckin. # # Arugments: # None. def timeout(self): self._timerID = -1 if self.OffCanvas(): self._fsm.TripDone() elif self.AtIntersection() and self.getLight() != 'green': self._fsm.LightRed() else: self._fsm.KeepGoing() def getLight(self): return self._stoplight.getLight(self._direction) # lightGreen -- # # The light has turned green. Time to get moving again. # # Arguments: # None def lightGreen(self): self._fsm.LightGreen() # setSpeed -- # # Set speed for all vehicles. # # Arguments: # speed In pixels. def setSpeed(klass, speed): if speed < 1 or speed > 10: print("Invalid speed (%d).\n" % speed) else: klass._speed = speed setSpeed = classmethod(setSpeed) # isDone -- # # Has this vehicle completed its trip? # # Arguments: # None. # # Results: # Returns true if the trip is done and false # otherwise. def isDone(self): return self._isDoneFlag # start -- # # Start this vehicle running. # # Arguments: # None. def Start(self): self._fsm.enterStartState() self._fsm.Start() # pause -- # # Pause this vehicles' running. # # Arguments: # None. def Pause(self): self._fsm.Pause() # continue -- # # Continue this vehicles' running. # # Arguments: # None. def Continue(self): self._fsm.Continue() # stop -- # # Stop this vehicles' running. # # Arguments: # None. # def Stop(self): self._fsm.Stop() self.Delete() # State Machine Actions # # The following methods are called by the state machine. # SetTimer -- # # Set the timer for the next move. # # Arguments: # None. def SetTimer(self): self._timerID = self._canvas.after(self._redrawRate, self.timeout) # StopTimer -- # # Stop the vehicle's timer. # # Arguments: # None. def StopTimer(self): if self._timerID >= 0: self._canvas.after_cancel(self._timerID) self._timerID = -1 # Move -- # # 1. Calculate the vehicle's new position. # 2. Remove the vehicle from the canvas. # 3. Draw the vehicles new position. # # Arguments: # None. # # Results: # None returned. Side affect of redrawing vehicle. def Move(self): if self._direction == 'north': Xmove = 0 Ymove = -self._speed elif self._direction == 'south': Xmove = 0 Ymove = self._speed elif self._direction == 'east': Xmove = self._speed Ymove = 0 elif self._direction == 'west': Xmove = -self._speed Ymove = 0 self._canvas.move(self._canvasID, Xmove, Ymove) self._xpos += Xmove self._ypos += Ymove # RegisterWithLight -- # # When the light turns green, it will inform us. # # Arguments: # None. def RegisterWithLight(self): self._stoplight.registerVehicle(self, self._direction) # SelfDestruct -- # # Remove the vehicle from the canvas. # # Arguments: # None. def SelfDestruct(self): self._canvas.delete(self._canvasID) self._canvasID = -1 self._isDoneFlag = True # OffCanvas -- # # Figure out if the vehicle has driven off the map. # # Arguments: # None. # # Results: # Returns true if the vehicle is off the map; otherwise # false. def OffCanvas(self): if self._direction == 'north': return (self._ypos - self._speed) <= 0 elif self._direction == 'south': YLength = self._stoplight.getRoadLengthY() return (self._ypos + self._speed) >= YLength elif self._direction == 'east': XLength = self._stoplight.getRoadLengthX() return (self._xpos + self._speed) >= XLength elif self._direction == 'west': return (self._xpos - self._speed) <= 0 # AtIntersection -- # # Figure out whether this vehicile is at the intersection # or not. # # Arguments: # None. # # Results: # Returns true if the vehicle is at the intersection; # otherwise, false. def AtIntersection(self): # The vehicle is not at the intersection until proven # otherwise. Retval = False XLength = self._stoplight.getRoadLengthX() YLength = self._stoplight.getRoadLengthY() LaneWidth = self._stoplight.getRoadWidth() / 2 # Calculate the intersections coordinates based on # the vehicle's direction. Then calculate where the # vehicle will end up this move. If the vehicle will # move beyond the intersection stop line, then the # vehicle is at the intersection. # # Also take into account the vehicles already waiting # at the intersection. # # By the way, once the vehicle moves past the intersection, # ignore the light. NumVehicles = self._stoplight.getQueueSize(self._direction) LenVehicles = (self._vehicleSize + self._vehicleSeparation) * NumVehicles if self._direction == 'north': YIntersection = (YLength / 2) + LaneWidth + (self._vehicleSize / 2) + LenVehicles Retval = (self._ypos > YIntersection) and (self._ypos - self._speed <= YIntersection) elif self._direction == 'south': YIntersection = (YLength / 2) - LaneWidth - (self._vehicleSize / 2) - LenVehicles Retval = (self._ypos < YIntersection) and (self._ypos + self._speed >= YIntersection) elif self._direction == 'east': XIntersection = (XLength / 2) - LaneWidth - (self._vehicleSize / 2) - LenVehicles Retval = (self._xpos < XIntersection) and (self._xpos + self._speed >= XIntersection) elif self._direction == 'west': XIntersection = (XLength / 2) + LaneWidth + (self._vehicleSize / 2) + LenVehicles Retval = (self._xpos > XIntersection) and (self._xpos - self._speed <= XIntersection) return Retval
def __new__(metaclass,classname,baseclasses,classdict): # print(classdict) def connect(cls): """create a thread local connection if there isn't one yet""" #print('@@@@@@@@ connect',cls,cls._database,'thread',threading.current_thread().ident) if not hasattr(cls._local,'conn'): cls._local.conn=sqlite.connect(cls._database) cls._local.conn.execute('pragma foreign_keys = 1') cls._local.conn.row_factory = sqlite.Row #print(cls._local.conn) return cls._local.conn def get(self,cls): return getattr(self,'get'+cls.__name__)() def getclass(self,cls,relname): clsname = cls.__name__ sql = 'select %s_id from %s where %s_id = ?'%(clsname,relname,self.__class__.__name__) sqllogger.debug(sql+str(self.id)) result = 'no result' with self._connect() as conn: #print("\n".join(conn.iterdump())) cursor=conn.cursor() cursor.execute(sql,(self.id,)) result = [cls(id=r[0]) for r in cursor] # print(result) return result def add(self,entity): return getattr(self,'add'+entity.__class__.__name__)(entity) def addclass(self,entity,Entity,relname,reltype='N:1'): if not entity.__class__ == Entity : raise TypeError('entity not of the required class') sql = 'insert or replace into %(rel)s (%(a)s_id,%(b)s_id) values (?,?)'%{'rel':relname,'a':self.__class__.__name__,'b':entity.__class__.__name__} sqllogger.debug(sql+str(self.id)+str(entity.id)) with self._connect() as conn: cursor = conn.cursor() cursor.execute(sql,(self.id,entity.id)) relationdefinition = False if len(baseclasses): # these test ensure we only take special actions for # classes derived from Relation ( we cannot check that directly # because forward references are not allowed if not 'database' in classdict and not '_database' in classdict: classdict['_database']=MetaRelation.findattr(baseclasses,'database') if classdict['_database'] is None: raise AttributeError('subclass of AbstractRelation has no database class variable') relationdefinition=True # copy reference to thread local storage if not '_local' in classdict: classdict['_local']=MetaRelation.findattr(baseclasses,'_local') classdict['_connect']=classmethod(connect) if relationdefinition or '_meta' in classdict: a = classdict['a'] b = classdict['b'] r = '1:N' if 'relation_type' in classdict: r = classdict['relation_type'] if not r in ('N:1','1:N','N:N'): raise KeyError("unknown relation_type %s"%r) classdict['relation_type'] = r if not issubclass(a,AbstractEntity) : raise TypeError('a not an AbstractEntity') if not issubclass(a,AbstractEntity) : raise TypeError('b not an AbstractEntity') runique = '' if r == 'N:1' : runique = ' ,unique(%s_id)'%a.__name__ if r == '1:N' : runique = ' ,unique(%s_id)'%b.__name__ sql = 'create table if not exists %(rel)s ( %(a)s_id references %(a)s on delete cascade, %(b)s_id references %(b)s on delete cascade, unique(%(a)s_id,%(b)s_id)%(ru)s)'%{'rel':classname,'a':a.__name__,'b':b.__name__,'ru':runique} # we cannot use connect yet conn = sqlite.connect(classdict['_database']) sqllogger.debug(sql) conn.execute(sql) conn.commit() conn.close() setattr(a,'get'+b.__name__,lambda self:getclass(self,b,classname)) setattr(a,'get',get) setattr(b,'get'+a.__name__,lambda self:getclass(self,a,classname)) setattr(b,'get',get) setattr(a,'add'+b.__name__,lambda self,entity:addclass(self,entity,b,classname)) setattr(a,'add',add) setattr(b,'add'+a.__name__,lambda self,entity:addclass(self,entity,a,classname)) setattr(b,'add',add) reltypes = getattr(a,'reltype',{}) reltypes[b.__name__]=r setattr(a,'reltype',reltypes) reltypes = getattr(b,'reltype',{}) reltypes[a.__name__]={'1:N':'N:1','N:N':'N:N','N:1':'1:N'}[r] setattr(b,'reltype',reltypes) relclasses = getattr(a,'relclass',{}) relclasses[b.__name__]=b setattr(a,'relclass',relclasses) relclasses = getattr(b,'relclass',{}) relclasses[a.__name__]=a setattr(b,'relclass',relclasses) joins = getattr(a,'joins',{}) joins[b.__name__]=classname setattr(a,'joins',joins) joins = getattr(b,'joins',{}) joins[a.__name__]=classname setattr(b,'joins',joins) return type.__new__(metaclass,classname,baseclasses,classdict)
def clslevel(name): def do(cls, *args, **kwargs): return getattr(Session, name)(*args, **kwargs) return classmethod(do)
def _process_class( _cls: Type[Any], init: bool, repr: bool, eq: bool, order: bool, unsafe_hash: bool, frozen: bool, config: Optional[Type[Any]], ) -> 'DataclassType': import dataclasses post_init_original = getattr(_cls, '__post_init__', None) if post_init_original and post_init_original.__name__ == '_pydantic_post_init': post_init_original = None if not post_init_original: post_init_original = getattr(_cls, '__post_init_original__', None) post_init_post_parse = getattr(_cls, '__post_init_post_parse__', None) def _pydantic_post_init(self: 'DataclassType', *initvars: Any) -> None: if post_init_original is not None: post_init_original(self, *initvars) d, _, validation_error = validate_model(self.__pydantic_model__, self.__dict__, cls=self.__class__) if validation_error: raise validation_error object.__setattr__(self, '__dict__', d) object.__setattr__(self, '__initialised__', True) if post_init_post_parse is not None: post_init_post_parse(self, *initvars) _cls.__post_init__ = _pydantic_post_init cls = dataclasses._process_class(_cls, init, repr, eq, order, unsafe_hash, frozen) # type: ignore fields: Dict[str, Any] = {} for field in dataclasses.fields(cls): if field.default != dataclasses.MISSING: field_value = field.default # mypy issue 7020 and 708 elif field.default_factory != dataclasses.MISSING: # type: ignore field_value = field.default_factory() # type: ignore else: field_value = Required fields[field.name] = (field.type, field_value) validators = gather_all_validators(cls) cls.__pydantic_model__ = create_model(cls.__name__, __config__=config, __module__=_cls.__module__, __validators__=validators, **fields) cls.__initialised__ = False cls.__validate__ = classmethod(_validate_dataclass) cls.__get_validators__ = classmethod(_get_validators) if post_init_original: cls.__post_init_original__ = post_init_original if cls.__pydantic_model__.__config__.validate_assignment and not frozen: cls.__setattr__ = setattr_validate_assignment return cls
class PrivateCertificate(Certificate): """ An x509 certificate and private key. """ def __repr__(self): return Certificate.__repr__(self) + ' with ' + repr(self.privateKey) def _setPrivateKey(self, privateKey): if not privateKey.matches(self.getPublicKey()): raise VerifyError( "Certificate public and private keys do not match.") self.privateKey = privateKey return self def newCertificate(self, newCertData, format=crypto.FILETYPE_ASN1): """ Create a new L{PrivateCertificate} from the given certificate data and this instance's private key. """ return self.load(newCertData, self.privateKey, format) def load(Class, data, privateKey, format=crypto.FILETYPE_ASN1): return Class._load(data, format)._setPrivateKey(privateKey) load = classmethod(load) def inspect(self): return '\n'.join( [Certificate._inspect(self), self.privateKey.inspect()]) def dumpPEM(self): """ Dump both public and private parts of a private certificate to PEM-format data. """ return self.dump(crypto.FILETYPE_PEM) + self.privateKey.dump( crypto.FILETYPE_PEM) def loadPEM(Class, data): """ Load both private and public parts of a private certificate from a chunk of PEM-format data. """ return Class.load(data, KeyPair.load(data, crypto.FILETYPE_PEM), crypto.FILETYPE_PEM) loadPEM = classmethod(loadPEM) def fromCertificateAndKeyPair(Class, certificateInstance, privateKey): privcert = Class(certificateInstance.original) return privcert._setPrivateKey(privateKey) fromCertificateAndKeyPair = classmethod(fromCertificateAndKeyPair) def options(self, *authorities): options = dict(privateKey=self.privateKey.original, certificate=self.original) if authorities: options.update( dict(verify=True, requireCertificate=True, caCerts=[auth.original for auth in authorities])) return OpenSSLCertificateOptions(**options) def certificateRequest(self, format=crypto.FILETYPE_ASN1, digestAlgorithm='md5'): return self.privateKey.certificateRequest(self.getSubject(), format, digestAlgorithm) def signCertificateRequest(self, requestData, verifyDNCallback, serialNumber, requestFormat=crypto.FILETYPE_ASN1, certificateFormat=crypto.FILETYPE_ASN1): issuer = self.getSubject() return self.privateKey.signCertificateRequest(issuer, requestData, verifyDNCallback, serialNumber, requestFormat, certificateFormat) def signRequestObject( self, certificateRequest, serialNumber, secondsToExpiry=60 * 60 * 24 * 365, # One year digestAlgorithm='md5'): return self.privateKey.signRequestObject(self.getSubject(), certificateRequest, serialNumber, secondsToExpiry, digestAlgorithm)
class Sensing_Light(tinyos.message.Message.Message): # Create a new Sensing_Light of size 6. def __init__(self, data="", addr=None, gid=None, base_offset=0, data_length=6): tinyos.message.Message.Message.__init__(self, data, addr, gid, base_offset, data_length) self.amTypeSet(AM_TYPE) # Get AM_TYPE def get_amType(cls): return AM_TYPE get_amType = classmethod(get_amType) # # Return a String representation of this message. Includes the # message type name and the non-indexed field values. # def __str__(self): s = "Message <Sensing_Light> \n" try: s += " [seqno=0x%x]\n" % (self.get_seqno()) except: pass try: s += " [sender=0x%x]\n" % (self.get_sender()) except: pass try: s += " [light=0x%x]\n" % (self.get_light()) except: pass return s # Message-type-specific access methods appear below. # # Accessor methods for field: seqno # Field type: int # Offset (bits): 0 # Size (bits): 16 # # # Return whether the field 'seqno' is signed (False). # def isSigned_seqno(self): return False # # Return whether the field 'seqno' is an array (False). # def isArray_seqno(self): return False # # Return the offset (in bytes) of the field 'seqno' # def offset_seqno(self): return (0 / 8) # # Return the offset (in bits) of the field 'seqno' # def offsetBits_seqno(self): return 0 # # Return the value (as a int) of the field 'seqno' # def get_seqno(self): return self.getUIntElement(self.offsetBits_seqno(), 16, 1) # # Set the value of the field 'seqno' # def set_seqno(self, value): self.setUIntElement(self.offsetBits_seqno(), 16, value, 1) # # Return the size, in bytes, of the field 'seqno' # def size_seqno(self): return (16 / 8) # # Return the size, in bits, of the field 'seqno' # def sizeBits_seqno(self): return 16 # # Accessor methods for field: sender # Field type: int # Offset (bits): 16 # Size (bits): 16 # # # Return whether the field 'sender' is signed (False). # def isSigned_sender(self): return False # # Return whether the field 'sender' is an array (False). # def isArray_sender(self): return False # # Return the offset (in bytes) of the field 'sender' # def offset_sender(self): return (16 / 8) # # Return the offset (in bits) of the field 'sender' # def offsetBits_sender(self): return 16 # # Return the value (as a int) of the field 'sender' # def get_sender(self): return self.getUIntElement(self.offsetBits_sender(), 16, 1) # # Set the value of the field 'sender' # def set_sender(self, value): self.setUIntElement(self.offsetBits_sender(), 16, value, 1) # # Return the size, in bytes, of the field 'sender' # def size_sender(self): return (16 / 8) # # Return the size, in bits, of the field 'sender' # def sizeBits_sender(self): return 16 # # Accessor methods for field: light # Field type: int # Offset (bits): 32 # Size (bits): 16 # # # Return whether the field 'light' is signed (False). # def isSigned_light(self): return False # # Return whether the field 'light' is an array (False). # def isArray_light(self): return False # # Return the offset (in bytes) of the field 'light' # def offset_light(self): return (32 / 8) # # Return the offset (in bits) of the field 'light' # def offsetBits_light(self): return 32 # # Return the value (as a int) of the field 'light' # def get_light(self): return self.getUIntElement(self.offsetBits_light(), 16, 1) # # Set the value of the field 'light' # def set_light(self, value): self.setUIntElement(self.offsetBits_light(), 16, value, 1) # # Return the size, in bytes, of the field 'light' # def size_light(self): return (16 / 8) # # Return the size, in bits, of the field 'light' # def sizeBits_light(self): return 16
class Exposure(metaclass=TemplateMeta): def _set(self, index, value, origin): """Set the pixel at the given index to a triple (value, mask, variance). Parameters ---------- index : `geom.Point2I` Position of the pixel to assign to. value : `tuple` A tuple of (value, mask, variance) scalars. origin : `ImageOrigin` Coordinate system of ``index`` (`PARENT` or `LOCAL`). """ self.maskedImage._set(index, value=value, origin=origin) def _get(self, index, origin): """Return a triple (value, mask, variance) at the given index. Parameters ---------- index : `geom.Point2I` Position of the pixel to assign to. origin : `ImageOrigin` Coordinate system of ``index`` (`PARENT` or `LOCAL`). """ return self.maskedImage._get(index, origin=origin) def __reduce__(self): from lsst.afw.fits import reduceToFits return reduceToFits(self) def convertF(self): return ExposureF(self, deep=True) def convertD(self): return ExposureD(self, deep=True) def getImage(self): return self.maskedImage.image def setImage(self, image): self.maskedImage.image = image image = property(getImage, setImage) def getMask(self): return self.maskedImage.mask def setMask(self, mask): self.maskedImage.mask = mask mask = property(getMask, setMask) def getVariance(self): return self.maskedImage.variance def setVariance(self, variance): self.maskedImage.variance = variance variance = property(getVariance, setVariance) readFitsWithOptions = classmethod(imageReadFitsWithOptions) writeFitsWithOptions = exposureWriteFitsWithOptions
class KeyPair(PublicKey): def load(Class, data, format=crypto.FILETYPE_ASN1): return Class(crypto.load_privatekey(format, data)) load = classmethod(load) def dump(self, format=crypto.FILETYPE_ASN1): return crypto.dump_privatekey(format, self.original) def __getstate__(self): return self.dump() def __setstate__(self, state): self.__init__(crypto.load_privatekey(crypto.FILETYPE_ASN1, state)) def inspect(self): t = self.original.type() if t == crypto.TYPE_RSA: ts = 'RSA' elif t == crypto.TYPE_DSA: ts = 'DSA' else: ts = '(Unknown Type!)' L = (self.original.bits(), ts, self.keyHash()) return '%s-bit %s Key Pair with Hash: %s' % L def generate(Class, kind=crypto.TYPE_RSA, size=1024): pkey = crypto.PKey() pkey.generate_key(kind, size) return Class(pkey) def newCertificate(self, newCertData, format=crypto.FILETYPE_ASN1): return PrivateCertificate.load(newCertData, self, format) generate = classmethod(generate) def requestObject(self, distinguishedName, digestAlgorithm='md5'): req = crypto.X509Req() req.set_pubkey(self.original) distinguishedName._copyInto(req.get_subject()) req.sign(self.original, digestAlgorithm) return CertificateRequest(req) def certificateRequest(self, distinguishedName, format=crypto.FILETYPE_ASN1, digestAlgorithm='md5'): """Create a certificate request signed with this key. @return: a string, formatted according to the 'format' argument. """ return self.requestObject(distinguishedName, digestAlgorithm).dump(format) def signCertificateRequest( self, issuerDistinguishedName, requestData, verifyDNCallback, serialNumber, requestFormat=crypto.FILETYPE_ASN1, certificateFormat=crypto.FILETYPE_ASN1, secondsToExpiry=60 * 60 * 24 * 365, # One year digestAlgorithm='md5'): """ Given a blob of certificate request data and a certificate authority's DistinguishedName, return a blob of signed certificate data. If verifyDNCallback returns a Deferred, I will return a Deferred which fires the data when that Deferred has completed. """ hlreq = CertificateRequest.load(requestData, requestFormat) dn = hlreq.getSubject() vval = verifyDNCallback(dn) def verified(value): if not value: raise VerifyError("DN callback %r rejected request DN %r" % (verifyDNCallback, dn)) return self.signRequestObject( issuerDistinguishedName, hlreq, serialNumber, secondsToExpiry, digestAlgorithm).dump(certificateFormat) if isinstance(vval, Deferred): return vval.addCallback(verified) else: return verified(vval) def signRequestObject( self, issuerDistinguishedName, requestObject, serialNumber, secondsToExpiry=60 * 60 * 24 * 365, # One year digestAlgorithm='md5'): """ Sign a CertificateRequest instance, returning a Certificate instance. """ req = requestObject.original dn = requestObject.getSubject() cert = crypto.X509() issuerDistinguishedName._copyInto(cert.get_issuer()) cert.set_subject(req.get_subject()) cert.set_pubkey(req.get_pubkey()) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(secondsToExpiry) cert.set_serial_number(serialNumber) cert.sign(self.original, digestAlgorithm) return Certificate(cert) def selfSignedCert(self, serialNumber, **kw): dn = DN(**kw) return PrivateCertificate.fromCertificateAndKeyPair( self.signRequestObject(dn, self.requestObject(dn), serialNumber), self)
d['CellType'] = (_lambda)(0).__closure__[0] a['XRangeType'] = _xrange = range(1) else: d['CellType'] = (_lambda)(0).func_closure[0] a['XRangeType'] = _xrange = xrange(1) d['MethodDescriptorType'] = type.__dict__['mro'] d['WrapperDescriptorType'] = type.__repr__ a['WrapperDescriptorType2'] = type.__dict__['__module__'] # built-in functions (CH 2) if PY3: _methodwrap = (1).__lt__ else: _methodwrap = (1).__cmp__ d['MethodWrapperType'] = _methodwrap a['StaticMethodType'] = staticmethod(_method) a['ClassMethodType'] = classmethod(_method) a['PropertyType'] = property() d['SuperType'] = super(Exception, _exception) # string services (CH 7) if PY3: _in = _bytes else: _in = _str a['InputType'] = _cstrI = StringIO(_in) a['OutputType'] = _cstrO = StringIO() # data types (CH 8) a['WeakKeyDictionaryType'] = weakref.WeakKeyDictionary() a['WeakValueDictionaryType'] = weakref.WeakValueDictionary() a['ReferenceType'] = weakref.ref(_instance) a['DeadReferenceType'] = weakref.ref(_class()) a['ProxyType'] = weakref.proxy(_instance)
class Certificate(CertBase): """ An x509 certificate. """ def __repr__(self): return '<%s Subject=%s Issuer=%s>' % (self.__class__.__name__, self.getSubject().commonName, self.getIssuer().commonName) def __eq__(self, other): if isinstance(other, Certificate): return self.dump() == other.dump() return False def __ne__(self, other): return not self.__eq__(other) def load(Class, requestData, format=crypto.FILETYPE_ASN1, args=()): """ Load a certificate from an ASN.1- or PEM-format string. @rtype: C{Class} """ return Class(crypto.load_certificate(format, requestData), *args) load = classmethod(load) _load = load def dumpPEM(self): """ Dump this certificate to a PEM-format data string. @rtype: C{str} """ return self.dump(crypto.FILETYPE_PEM) def loadPEM(Class, data): """ Load a certificate from a PEM-format data string. @rtype: C{Class} """ return Class.load(data, crypto.FILETYPE_PEM) loadPEM = classmethod(loadPEM) def peerFromTransport(Class, transport): """ Get the certificate for the remote end of the given transport. @type: L{ISystemHandle} @rtype: C{Class} @raise: L{CertificateError}, if the given transport does not have a peer certificate. """ return _handleattrhelper(Class, transport, 'peer') peerFromTransport = classmethod(peerFromTransport) def hostFromTransport(Class, transport): """ Get the certificate for the local end of the given transport. @param transport: an L{ISystemHandle} provider; the transport we will @rtype: C{Class} @raise: L{CertificateError}, if the given transport does not have a host certificate. """ return _handleattrhelper(Class, transport, 'host') hostFromTransport = classmethod(hostFromTransport) def getPublicKey(self): """ Get the public key for this certificate. @rtype: L{PublicKey} """ return PublicKey(self.original.get_pubkey()) def dump(self, format=crypto.FILETYPE_ASN1): return crypto.dump_certificate(format, self.original) def serialNumber(self): """ Retrieve the serial number of this certificate. @rtype: C{int} """ return self.original.get_serial_number() def digest(self, method='md5'): """ Return a digest hash of this certificate using the specified hash algorithm. @param method: One of C{'md5'} or C{'sha'}. @rtype: C{str} """ return self.original.digest(method) def _inspect(self): return '\n'.join([ 'Certificate For Subject:', self.getSubject().inspect(), '\nIssuer:', self.getIssuer().inspect(), '\nSerial Number: %d' % self.serialNumber(), 'Digest: %s' % self.digest() ]) def inspect(self): """ Return a multi-line, human-readable representation of this Certificate, including information about the subject, issuer, and public key. """ return '\n'.join((self._inspect(), self.getPublicKey().inspect())) def getIssuer(self): """ Retrieve the issuer of this certificate. @rtype: L{DistinguishedName} @return: A copy of the issuer of this certificate. """ return self._copyName('issuer') def options(self, *authorities): raise NotImplementedError('Possible, but doubtful we need this yet')
def create_command(command: Dict[str, Any]) -> Type[Command]: """ From a given dictionary, generates the Command class. """ try: name = command['name'] except KeyError: msg = "missing 'name' field of Command" raise TypeError(msg) try: id = command['id'] except KeyError: msg = "missing 'id' field of Command" raise TypeError(msg) next_allowed = command.get('next-allowed') if next_allowed is None: next_allowed = ['*'] generator = command.get('generator') parameters = {} # type: Dict[str, Union[int, Tuple[str, Parameter]]] for i in range(1, 8): p = 'p{}'.format(i) if p in command: param = None try: p_name = command[p]['name'] except KeyError: msg = "missing 'name' field of Command parameter {}".format(p) raise TypeError(msg) try: typ = command[p]['value']['type'] except KeyError: msg = "missing 'value' or 'type' field of Command parameter {}" msg = msg.format(p) raise TypeError(msg) if typ == 'discrete': vals = command[p]['value']['vals'] param = Parameter(p_name, DiscreteValueRange(vals)) elif typ == 'continuous': min_value = command[p]['value']['min'] max_value = command[p]['value']['max'] param = Parameter( p_name, ContinuousValueRange(min_value, max_value, True)) else: msg = "The type of value {} is not supported".format(typ) raise Exception(msg) parameters['param_{}'.format(i)] = param else: parameters['param_{}'.format(i)] = 0 def to_message(self): params = {} for name, p in parameters.items(): if not p: params[name] = p else: params[name] = self[p.name] return CommandLong(0, 0, id, **params) ns = { 'name': name, 'to_message': to_message, 'parameters': [p for p in parameters.values() if p], 'specifications': [Idle], 'uid': 'factory.{}'.format(name), 'next_allowed': next_allowed } C = CommandMeta(name, (Command, ), ns) if generator == 'circle_based_generator': setattr(C, 'generate', classmethod(circle_based_generator)) logger.info("Command class generated: %s", C) return C
from flask.ext.sqlalchemy import SQLAlchemy import simplejson from sqlalchemy.types import TypeDecorator, VARCHAR from sqlalchemy.orm.exc import NoResultFound from . import app from . import common db = SQLAlchemy(app) db.Model.itercolumns = classmethod(lambda cls: cls.__table__.columns._data.iterkeys()) class JSONType(TypeDecorator): impl = VARCHAR def process_bind_param(self, value, dialect): if value is not None: value = simplejson.dumps(value) return value def process_result_value(self, value, dialect): if value is not None: value = simplejson.loads(value) return value class ChessDotComUser(db.Model): id = db.Column(db.Integer, primary_key=True)