def import_model(cls, classpath): if isinstance(classpath, list): imported_class = import_helper('.'.join(classpath[0:-1]),classpath[-1]) elif isinstance(classpath, (str, unicode)): imported_class = import_helper(classpath) else: return False return imported_class
def __getattribute__(self, name): soop = super(PCAPIProxy, self) _api_map = soop.__getattribute__('_api_map') _api_dispatch = soop.__getattribute__('_api_dispatch') if name not in _api_map: if not name[0] == '_': raise exceptions.APINotImplemented('Could not load API controller for given name "%s".' % name) else: return super(PCAPIProxy, self).__getattribute__(name) else: if name in _api_dispatch: return _api_dispatch[name].__get__(self, super(PCAPIProxy, self).__getattribute__('__class__')) else: try: api_object, props = import_helper(['ProvidenceClarity','api',name],['_controller']) _api_dispatch[name] = PCControllerProxy(name, props['dict']['_controller']) return _api_dispatch[name].__get__(self, super(PCAPIProxy, self).__getattribute__('__class__')) except ImportError: raise exceptions.APIInvalid('Could not load valid API controller for given name "%s", despite being found in API map.' % name)
def getAndValidate(cls, request, for_use=True): path_t = request.path.split('/') d = DataReceiver.get_by_key_name('.'.join(path_t[path_t.index('receiver'):len(path_t)])) if d is not None: if d.enabled != True: raise exceptions.ReceiverDisabled() else: self.model = d if d.data_handler is not None: data_handler = None mod, prop = import_helper(d.data_handler[0:-1],d.data_handler[-1]) if issubclass(prop, DataReceiver): data_handler = prop[d.data_handler[-1]] else: raise exceptions.InvalidDataHandler() if for_use: return (d, data_handler) else: return True else: raise exceptions.ReceiverNotFound()
def __getattribute__(self, name): soop = super(PCExtProxy, self) _ext_map = soop.__getattribute__('_ext_points') _ext_dispatch = soop.__getattribute__('_ext_dispatch') if name not in _ext_map: if not name[0] == '_': raise exceptions.ExtensionNotImplemented('Could not load extension point for given name "%s".' % name) else: return super(PCExtProxy, self).__getattribute__(name) else: if name in _ext_dispatch: return _ext_dispatch[name] else: try: package = ['ProvidenceClarity']+_ext_map[name] ext_object, props = import_helper(package[0:-1],package[-1]) _ext_dispatch[name] = props['dict'][package[-1]] return _ext_dispatch[name] except ImportError: raise exceptions.ExtensionInvalid('Could not load valid extension class for given name "%s", despite being found in EXT map.' % name)
def loadAdapter(cls,name): global adapters if backend in adapters: adapter = import_helper(adapters[backend]) if adapter == False: raise exceptions.InvalidBackend() else: ### STOPPED HERE pass
def __getattr__(self, name): super_obj = super(PCController, self) if name in super(PCController, self).__getattribute__('_subcontrollers'): api_package_name = super_obj.__getattribute__('__api_package_name__') stack = ['ProvidenceClarity','api',api_package_name] + super(PCController, self).__getattribute__('_subcontrollers')[name] api_object, props = import_helper(stack[0:-1],stack[-1]) self._subcontroller_dispatch[name] = PCControllerProxy(name, props['dict'][stack[-1]]) return self._subcontroller_dispatch[name].__get__(self, super(PCController, self).__getattribute__('__class__')) else: return super_obj.__getattribute__(name)
def loadStubClass(cls,adapter): if isinstance(adapter, (str,basestring,unicode)): _adapter = DataBackend.get_by_key_name(adapter) elif isinstance(adapter, DataBackend): _adapter = adapter else: raise exceptions.InvalidBackend() stub_class = import_helper(adapter.model_path) if stub_class == False: return False else: return stub_class
def __init__(self,config_override=None, *args, **kwargs): ## Setup globals global version_major global version_minor global config_module global import_item global build version = {'major':version_major,'minor':version_minor,'full':str(version_major)+'.'+str(version_minor)+' '+str(build),'build':build} ## Path Inserts if '.' not in sys.path: sys.path.insert(0,'.') sys.path.insert(1,'lib') sys.path.insert(2, 'distlib') ## Setup capabilities sets _cap = capabilities.CapabilitySet ## Setup object proxies self.config = c_config.PCConfigProxy() self.log = c_log.PCLogProxy() self.clock = c_clock.PCClockProxy() self.state = c_state.PCStateProxy() self.api = c_api.PCAPIProxy() self.ext = c_ext.PCExtProxy() ## Link up with Platform self.api._setPlatformParent(self) self.log._setPlatformParent(self) self.state._setPlatformParent(self) self.clock._setPlatformParent(self) self.ext._setPlatformParent(self) self.config._setPlatformParent(self) ## Setup initial state self.state.set('env',os.environ) self.state.set('quotas', None) self.state.set('namespace',namespace_manager.get_namespace()) self.state.set('capabilities', {'api':{ 'images':{ 'enabled':_cap('images').is_enabled(), 'crop':_cap('images',methods=['crop']).is_enabled(), 'get_serving_url':_cap('images',methods=['get_serving_url']).is_enabled(), 'resize':_cap('images',methods=['resize']).is_enabled(), 'rotate':_cap('images',methods=['rotate']).is_enabled()}, 'datastore_v3':{ 'enabled':_cap('datastore_v3').is_enabled(), 'write':_cap('datastore_v3',capabilities=['write']).is_enabled(), 'read':_cap('datastore_v3',capabilities=['read']).is_enabled(), 'put':_cap('datastore_v3',methods=['put']).is_enabled(), 'delete':_cap('datastore_v3',methods=['delete']).is_enabled(), 'get':_cap('datastore_v3',methods=['get']).is_enabled(), 'run_in_transaction':_cap('datastore_v3',methods=['run_in_transaction']).is_enabled(), 'run_in_transaction_custom_retries':_cap('datastore_v3',methods=['run_in_transaction']).is_enabled()}, 'users':{ 'enabled':_cap('users').is_enabled(), 'get_current_user':_cap('users',methods=['get_current_user']).is_enabled(), 'is_current_user_admin':_cap('users',methods=['is_current_user_admin']).is_enabled()}, 'mail':{ 'enabled':_cap('mail').is_enabled(), 'send_mail':_cap('mail',methods=['send_mail']).is_enabled(), 'send_mail_to_admins':_cap('mail',methods=['send_mail_to_admins']).is_enabled()}, 'memcache':{ 'enabled':_cap('memcache').is_enabled(), 'get':_cap('memcache',methods=['get']).is_enabled(), 'set':_cap('memcache',methods=['set']).is_enabled(), 'delete':_cap('memcache',methods=['delete']).is_enabled()}, 'oauth':{ 'enabled':_cap('oauth').is_enabled(), 'get_current_user':_cap('oauth',methods=['get_current_user']).is_enabled(), 'is_current_user_admin':_cap('oauth',methods=['is_current_user_admin']).is_enabled()}, 'multitenancy':{ 'enabled':_cap('multitenancy').is_enabled(), 'get_namespace':_cap('multitenancy',methods=['get_namespace']).is_enabled(), 'set_namespace':_cap('multitenancy',methods=['set_namespace']).is_enabled()}, 'blobstore':{ 'enabled':_cap('blobstore').is_enabled(), 'get':_cap('blobstore',methods=['get']).is_enabled(), 'delete':_cap('blobstore',methods=['delete']).is_enabled()}, 'xmpp':{ 'enabled':_cap('xmpp').is_enabled(), 'send_message':_cap('xmpp',methods=['send_message']).is_enabled(), 'send_invite':_cap('xmpp',methods=['send_invite']).is_enabled()}, 'urlfetch':{ 'enabled':_cap('urlfetch').is_enabled(), 'fetch':_cap('urlfetch',methods=['fetch']).is_enabled()} } }) ## Load/resolve Config if config_override is None: config_mod, props = import_helper(['ProvidenceClarity','pc_config'],['get','dump','config']) elif config_override is False or config_override == '': raise exceptions.ConfigRequired() else: if isinstance(config_override, type(os)): config_mod = config_override elif isinstance(config_override, (str, basestring, unicode)): config_mod, props = import_helper(config_override,['get','dump','config']) elif isinstance(config_override, list): config_mod, props = import_helper('.'.join(config_override)) else: try: config_mod, props = import_helper(['ProvidenceClarity',config_override],['get','dump','config']) except ImportError: raise exceptions.InvalidConfig() ## Set configuration self.config.setConfig(config_mod) ## Environment Vars - Split for Namespace software_t = '/'.split(os.environ['SERVER_SOFTWARE']) if software_t[0].lower() == 'development': platform = 'Dev'; else: platform = 'Production' domain = os.environ['HTTP_HOST'].split(':')[0].split('.') if domain[-1] == 'com': subdomain = domain[0] else: subdomain = None # :: Namespace is usually extracted from subdomain - set via initialized keyword parameter if self.config.get('enable','multitenancy',False): _m_log = self.config.get('logging','multitenancy',False) if self.config.get('force_namespace','multitenancy', False): if isinstance(self.config.get('force_namespace','multitenancy',False), (str, unicode)) and self.config.get('force_namespace','multitenancy',False) is not '': if namespace_manager.validate_namespace(self.config.get('force_namespace','multitenancy',False)): if _m_log: self.log.info('Setting request namespace to "%s".' % config_get('force_namespace','multitenancy',False)) namespace_manager.set_namespace(self.config.get('force_namespace','multitenancy',False)) else: if 'namespace' in kwargs or self.config.get('apps_mode_force', 'multitenancy', False): if kwargs['namespace'] == self.config.get('apps_subdomain','multitenancy','apps') or self.config.get('apps_mode_force', 'multitenancy', False): if _m_log: self.log.info('Setting request namespace to Google Apps domain "%s".' % namespace_manager.google_apps_namespace()) namespace_manager.set_namespace(namespace_manager.google_apps_namespace()) else: if isinstance(kwargs['namespace'], (str, unicode)) and namespace_manager.validate_namespace(kwargs['namespace']): if _m_log: self.log.info('Setting request namespace to split domain "%s".' % kwargs['namespace']) namespace_manager.set_namespace(kwargs['namespace']) else: if kwargs['namespace'] is not None: if _m_log: self.log.info('Given namespace "%s" failed to pass validation. Ignoring.' % str(kwargs['namespace'])) if 'version' in kwargs: self.version = kwargs['version'] else: self.version = version super(Platform, self).__init__(*args, **kwargs)