def __init__(self): conf = Configuration('global').get('delivery_agent') if conf.get('direct', False): self.direct = True self.deliver = UserMessageDelivery() else: if not conf.get('broker'): raise Exception('Missing broker configuration') self.direct = False self.publisher = Publisher(conf['broker'])
def connect_storage(): """Connect to storage engines.""" try: from cassandra.io.libevreactor import LibevConnection kwargs = {'connection_class': LibevConnection} except ImportError: kwargs = {} setup_cassandra(Configuration('global').get('cassandra.hosts'), Configuration('global').get('cassandra.keyspace'), Configuration('global').get('cassandra.consistency_level'), lazy_connect=True, **kwargs)
def main(args=sys.argv): parser = argparse.ArgumentParser() parser.add_argument('-f', dest='conffile') kwargs = parser.parse_args(args[1:]) kwargs = vars(kwargs) config_file = kwargs.pop('conffile') Configuration.load(config_file, 'global') consumer = Consumer(Configuration('global').get('broker'), process_message) consumer.start()
def create(cls, user): """Create a new user.""" user.validate() user.password = bcrypt.hashpw(user.password.encode('utf-8'), bcrypt.gensalt()) try: ReservedName.get(user.name) raise ValueError('Reserved user name') except NotFound: pass try: UserName.get(user.name) raise Exception('User %s already exist' % user.name) except NotFound: pass core = super(User, cls).create(name=user.name, password=user.password, params=user.params, date_insert=datetime.utcnow()) # Ensure unicity UserName.create(name=user.name, user_id=core.user_id) if user.contact: contact = Contact.create(user=core, contact=user.contact) # XXX should use core proxy, not directly model attribute core.model.contact_id = contact.contact_id core.save() # Create counters Counter.create(user_id=core.user_id) # Create default tags default_tags = Configuration('global').get('system.default_tags') for tag in default_tags: Tag.create(user_id=core.user_id, **tag) return core
class BaseModel(Model): """Cassandra base model.""" __abstract__ = True __keyspace__ = Configuration('global').get('cassandra.keyspace') @classmethod def create(cls, **kwargs): """Create a new model record.""" kwargs = { key: val for key, val in kwargs.items() if key in cls._columns } return super(BaseModel, cls).create(**kwargs) @classmethod def get(cls, **kwargs): """Raise our exception when model not found.""" try: return super(BaseModel, cls).get(**kwargs) except DoesNotExist as exc: raise NotFound(exc) except: raise @classmethod def filter(cls, **kwargs): """Filter storable objects.""" return cls.objects.filter(**kwargs) @classmethod def all(cls): """Return all storable objects.""" return cls.objects.all()
def main(args=sys.argv): parser = argparse.ArgumentParser() parser.add_argument('-f', dest='conffile', default='development.ini') subparsers = parser.add_subparsers(title="action") sp_import = subparsers.add_parser('import', help='import existing mailbox') sp_import.set_defaults(func=import_email) sp_import.add_argument('-f', dest='format', choices=['mbox', 'maildir'], default='mbox') sp_import.add_argument('-p', dest='import_path') sp_import.add_argument('-e', dest='email') sp_setup_storage = subparsers.add_parser('setup', help='initialize the storage engine') sp_setup_storage.set_defaults(func=setup_storage) sp_create_user = subparsers.add_parser('create_user', help='Create a new user') sp_create_user.set_defaults(func=create_user) sp_create_user.add_argument('-e', dest='email', help='user email') sp_create_user.add_argument('-p', dest='password', help='password') sp_create_user.add_argument('-g', dest='given_name', help='user given name') sp_create_user.add_argument('-f', dest='family_name', help='user family name') sp_shell = subparsers.add_parser('shell') sp_shell.set_defaults(func=shell) sp_dump = subparsers.add_parser('dump') sp_dump.set_defaults(func=dump_model) sp_dump.add_argument('-m', dest='model', help='model to dump') sp_dump.add_argument('-o', dest='output_path', help='output path') kwargs = parser.parse_args(args[1:]) kwargs = vars(kwargs) config_uri = kwargs.pop('conffile') func = kwargs.pop('func') Configuration.load(config_uri, 'global') connect_storage() func(**kwargs)
def main(global_config, **settings): """Caliopen entry point for WSGI application. Load Caliopen configuration and setup a WSGI application with loaded API services. """ # XXX ugly way to init caliopen configuration before pyramid caliopen_config = settings['caliopen.config'].split(':')[1] Configuration.load(caliopen_config, 'global') config = Configurator(settings=settings) services = config.registry.settings. \ get('caliopen.api.services', []). \ split('\n') route_prefix = settings.get('caliopen.api.route_prefix') for service in services: log.info('Loading %s service' % service) config.include(service, route_prefix=route_prefix) config.end() return config.make_wsgi_app()
class IndexUser(object): """User index management class.""" __url__ = Configuration('global').get('elasticsearch.url') @classmethod def create(cls, user, **kwargs): """Create user index.""" # Create index for user client = Elasticsearch(cls.__url__) indice = IndicesClient(client) if indice.exists(index=user.user_id): if 'delete_existing' in kwargs and kwargs['delete_existing']: log.warn('Deleting existing index for user %s' % user.user_id) indice.delete(index=user.user_id) else: log.warn('Index already exists for user %s' % user.user_id) return False log.info('Creating index for user %s' % user.user_id) indice.create(index=user.user_id) return True
# # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os here = os.path.dirname(__file__) from caliopen.base.config import Configuration Configuration.load(os.path.join(here, '..', 'base', 'caliopen.yaml.template'), 'global') # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones.
class BaseIndexDocument(object): """Base class for indexed objects.""" columns = [] doc_type = None __url__ = Configuration('global').get('elasticsearch.url') def __init__(self, data): """Create a BaseIndexDocument instance from a dict. :param data: Elasticsearch result dict. :type data: dict """ # XXX : tofix, need to handle better ES result if '_source' in data: data = data['_source'] for col in self.columns: setattr(self, col, data.get(col, None)) @classmethod def client(cls): """Return an elasticsearch client.""" return Elasticsearch(cls.__url__) @classmethod def get(cls, user_id, uid): """Get an indexed object.""" res = cls.client().get(index=user_id, doc_type=cls.doc_type, id=uid) if res: obj = cls(res) # XXX : design problem, we should not do this setattr(obj, 'user_id', user_id) setattr(obj, 'uid', uid) return obj raise NotFound('Index %s/%s/%s not found' % (user_id, cls.doc_type, uid)) def refresh(self): """Refresh instance with stored values.""" self = self.get(self.user_id, self.uid) def update(self, query): """Update indexed object running query.""" # XXX surely not secure client = self.client() res = client.update(index=self.user_id, doc_type=self.doc_type, id=self.uid, body=to_json(query)) return res def update_field_add(self, field, value): """Add a value to a given field in index.""" query = { 'script': 'ctx._source.%s += attrs' % field, 'params': { 'attrs': value } } return self.update(query) def update_field_delete(self, field, value): """Delete a value from a given field in index.""" query = { 'script': 'ctx._source.%s -= attrs' % field, 'params': { 'attrs': value } } return self.update(query) @classmethod def create(cls, core, **kwargs): """Create an indexed object.""" uid = getattr(core, core._pkey_name) data = { column: kwargs.get(column, getattr(core, column)) for column in cls.columns } # XXX TOFIX HACKISH : encode UUID using json encoder data = json.loads(to_json(data)) return cls.client().create(index=core.user_id, doc_type=cls.doc_type, id=uid, body=data) @classmethod def _format_list_result(cls, res): results = [] for idx in res: results.append(idx.to_dict()) return {'total': res.hits.total, 'data': results} @classmethod def filter(cls, user_id, params, order=None, limit=None, offset=0): """Filter indexed objects using a query string. :param user_id: user identifier :type user_id: str :param params: parameters to add in query string, will be form of name:value :type params: dict :param limit: restrict result to this limit :type limit: int :param offset: start result list from this offset :type offset: int :return list """ # XXX well I know this it bad, security must be considered strongly values = [] for k, v in params.iteritems(): values.append('%s:%s' % (k, v)) q_str = ' AND '.join(values) client = cls.client() s = Search(using=client, index=user_id, doc_type=cls.doc_type). \ query("query_string", query=q_str) if limit or offset: s = s[offset:(offset + limit)] log.debug("Filter index %s %s with : %s" % (user_id, cls.doc_type, s.to_dict())) res = s.execute() return cls._format_list_result(res) @classmethod def all(cls, user_id, order=None, limit=None, offset=0): """Return all indexed objects with limits and sort options. :param user_id: user identifier :type user_id: str :param limit: restrict result to this limit :type limit: int :param offset: start result list from this offset :type offset: int :return list """ client = cls.client() s = Search(using=client, index=user_id, doc_type=cls.doc_type) if limit or offset: s = s[offset:(offset + limit)] res = s.execute() return cls._format_list_result(res) @classmethod def dump(cls, user_id): """Dump indexed objects with their id for an user. :param user_id: user identifier :type user_id: str """ client = cls.client() s = Search(using=client, index=user_id, doc_type=cls.doc_type) # XXX do count and adjust limit to be sure to get everything s = s[0:100] res = s.execute() results = [] for doc in res: url = '%s/%s/%s' % (user_id, cls.doc_type, doc._meta.id) results.append({ 'id': doc._meta.id, 'url': url, 'body': doc.to_dict(), }) if len(results) >= 100: log.warn('Only 100 indexed objects %s dumped' % cls.doc_type) return results def to_dict(self): """Return indexed object instance as dict.""" data = {} for col in self.columns: # XXX only not none columns ? data.update({col: getattr(self, col)}) return data