def __init__(self, context, request, _query_params={}, _json_params={}): """ Prepare data to be used across the view and run init methods. Each view has these dicts on data: :_query_params: Params from a query string :_json_params: Request JSON data. Populated only for PUT, PATCH, POST methods :_params: Join of _query_params and _json_params For method tunneling, _json_params contains the same data as _query_params. """ self.context = context self.request = request self._query_params = dictset(_query_params or request.params.mixed()) self._json_params = dictset(_json_params) ctype = request.content_type if request.method in ['POST', 'PUT', 'PATCH']: if ctype == 'application/json': try: self._json_params.update(request.json) except simplejson.JSONDecodeError: log.error( "Expecting JSON. Received: '{}'. " "Request: {} {}".format( request.body, request.method, request.url)) self._json_params = BaseView.convert_dotted(self._json_params) self._query_params = BaseView.convert_dotted(self._query_params) self._params = self._query_params.copy() self._params.update(self._json_params) # dict of the callables {'action':[callable1, callable2..]} # as name implies, before calls are executed before the action is # called after_calls are called after the action returns. self._before_calls = defaultdict(list) self._after_calls = defaultdict(list) # no accept headers, use default if '' in request.accept: request.override_renderer = self._default_renderer elif 'application/json' in request.accept: request.override_renderer = 'nefertari_json' elif 'text/plain' in request.accept: request.override_renderer = 'string' self._run_init_actions()
def includeme(config): Settings = dictset(config.registry.settings) config.include("nefertari.engine") config.include("nefertari") config.include("nefertari.view") config.include("nefertari.elasticsearch") # Process nefertari settings if Settings.asbool("debug"): log.warning("*** DEBUG DEBUG DEBUG mode ***") config.add_tween("nefertari.tweens.get_tunneling") if Settings.asbool("cors.enable"): config.add_tween("nefertari.tweens.cors") if Settings.asbool("ssl_middleware.enable"): config.add_tween("nefertari.tweens.ssl") if Settings.asbool("request_timing.enable"): config.add_tween("nefertari.tweens.request_timing") # Set root factory config.root_factory = NefertariRootACL # Process auth settings root = config.get_root_resource() ramses_auth = Settings.asbool("ramses.auth", False) root.auth = ramses_auth log.info("Parsing RAML") parsed_raml = pyraml.parser.load(Settings["ramses.raml_schema"]) log.info("Starting models generation") generate_models(config, raml_resources=parsed_raml.resources) if ramses_auth: if getattr(config.registry, "auth_model", None) is None: from nefertari.authentication.models import AuthUser config.registry.auth_model = AuthUser from .auth import setup_auth_policies setup_auth_policies(config, parsed_raml) log.info("Starting server generation") generate_server(parsed_raml, config) log.info("Running nefertari.engine.setup_database") from nefertari.engine import setup_database setup_database(config) from nefertari.elasticsearch import ES ES.setup_mappings() if ramses_auth: config.include("ramses.auth") log.info("Server succesfully generated\n")
def __init__(self, *arg, **kw): from nefertari.utils import dictset kw = dictset(kw) self.__class__.__base__.__init__( self, *arg, **kw.subset(BASE_ATTRS + ['headers', 'location'])) create_json_response(self, **kw)
def filter_fields(cls, params): """ Filter out fields with invalid names. """ fields = cls.fields_to_query() return dictset({ name: val for name, val in params.items() if name.split('__')[0] in fields })
def __init__(self, *args, **kwargs): """ Init view and set fake `self.Model` so its __name__ would contain names of all requested collections. """ super(PolymorphicESView, self).__init__(*args, **kwargs) types = self.determine_types() self.Model = dictset({'__name__': ','.join(types)})
def includeme(config): Settings = dictset(config.registry.settings) ES.setup(Settings) ES.create_index() if ES.settings.asbool('enable_polymorphic_query'): config.include('nefertari.polymorphic')
def to_dict(self, **kwargs): _depth = kwargs.get('_depth') if _depth is None: _depth = self._nesting_depth depth_reached = _depth is not None and _depth <= 0 _data = dictset() native_fields = self.__class__.native_fields() for field in native_fields: value = getattr(self, field, None) include = field in self._nested_relationships if not include or depth_reached: encoder = lambda v: getattr(v, v.pk_field(), None) else: encoder = lambda v: v.to_dict(_depth=_depth - 1) if isinstance(value, BaseMixin): value = encoder(value) elif isinstance(value, InstrumentedList): value = [encoder(val) for val in value] elif hasattr(value, 'to_dict'): value = value.to_dict(_depth=_depth - 1) _data[field] = value _data['_type'] = self._type _data['_pk'] = str(getattr(self, self.pk_field())) return _data
def includeme(config): from nefertari.resource import get_root_resource, get_resource_map from nefertari.renderers import (JsonRendererFactory, NefertariJsonRendererFactory) from nefertari.utils import dictset from nefertari.events import (ModelClassIs, FieldIsChanged, subscribe_to_events, add_field_processors) log.info("%s %s" % (APP_NAME, __version__)) config.add_directive('get_root_resource', get_root_resource) config.add_directive('subscribe_to_events', subscribe_to_events) config.add_directive('add_field_processors', add_field_processors) config.add_renderer('json', JsonRendererFactory) config.add_renderer('nefertari_json', NefertariJsonRendererFactory) if not hasattr(config.registry, '_root_resources'): config.registry._root_resources = {} if not hasattr(config.registry, '_resources_map'): config.registry._resources_map = {} # Map of {ModelName: model_collection_resource} if not hasattr(config.registry, '_model_collections'): config.registry._model_collections = {} config.add_request_method(get_resource_map, 'resource_map', reify=True) config.add_tween('nefertari.tweens.cache_control') config.add_subscriber_predicate('model', ModelClassIs) config.add_subscriber_predicate('field', FieldIsChanged) Settings = dictset(config.registry.settings) root = config.get_root_resource() root.auth = Settings.asbool('auth')
def to_dict(self, **kwargs): _depth = kwargs.get('_depth') _negative_items=kwargs.get('negative_items',[]) if _depth is None: _depth = self._nesting_depth depth_reached = _depth is not None and _depth <= 0 _data = dictset() for field, field_type in self._fields.items(): # Ignore ForeignKeyField fields if _negative_items: if field in _negative_items: continue if isinstance(field_type, ForeignKeyField): continue value = getattr(self, field, None) if value is not None: include = field in self._nested_relationships if not include or depth_reached: encoder = lambda v: getattr(v, v.pk_field(), None) else: encoder = lambda v: v.to_dict(_depth=_depth-1) if isinstance(field_type, ReferenceField): value = encoder(value) elif isinstance(field_type, RelationshipField): value = [encoder(val) for val in value] elif hasattr(value, 'to_dict'): value = value.to_dict(_depth=_depth-1) _data[field] = value _data['_type'] = self._type _data['_pk'] = str(getattr(self, self.pk_field())) return _data
def __init__(self, *arg, **kw): from nefertari.utils import dictset kw = dictset(kw) self.__class__.__base__.__init__(self, *arg, **kw.subset(BASE_ATTRS + ["headers", "location"])) create_json_response(self, **kw)
def view_mapper_wrapper(context, request): matchdict = request.matchdict.copy() matchdict.pop('action', None) matchdict.pop('traverse', None) # instance of BaseView (or child of) view_obj = view(context, request) action = getattr(view_obj, action_name) request.action = action_name # Tunneled collection PATCH/PUT doesn't support query params tunneled = getattr(request, '_tunneled_get', False) if tunneled and action_name in ('update_many', ): view_obj._query_params = dictset() # we should not run "after_calls" here, so lets save them in # request as filters they will be ran in the renderer factory request.filters = view_obj._after_calls try: # run before_calls (validators) before running the action for call in view_obj._before_calls.get(action_name, []): call(request=request) except wrappers.ValidationError as e: log.error('validation error: %s', e) raise JHTTPBadRequest(e.args) except wrappers.ResourceNotFound as e: log.error('resource not found: %s', e) raise JHTTPNotFound() trigger_before_events(view_obj) return action(**matchdict)
def includeme(config): Settings = dictset(config.registry.settings) ES.setup(Settings) # Load custom index settings index_settings = None index_settings_path = None if "elasticsearch.index.settings_file" in Settings: index_settings_path = Settings["elasticsearch.index.settings_file"] if not os.path.exists(index_settings_path): raise Exception("Custom index settings file does not exist : '{file_name}'".format( file_name=index_settings_path )) else: if os.path.exists("index_settings.json"): index_settings_path = "index_settings.json" if index_settings_path is not None: with open(index_settings_path) as data_file: try: index_settings = json.load(data_file) except: raise Exception("Could not parse custom index settings : '{file_name}'".format( file_name=index_settings_path )) ES.create_index(index_settings=index_settings) if ES.settings.asbool('enable_polymorphic_query'): config.include('nefertari.polymorphic')
def includeme(config): from nefertari.resource import get_root_resource, get_resource_map from nefertari.renderers import ( JsonRendererFactory, NefertariJsonRendererFactory) from nefertari.utils import dictset from nefertari.events import ( ModelClassIs, FieldIsChanged, subscribe_to_events, add_field_processors) log.info("%s %s" % (APP_NAME, __version__)) config.add_directive('get_root_resource', get_root_resource) config.add_directive('subscribe_to_events', subscribe_to_events) config.add_directive('add_field_processors', add_field_processors) config.add_renderer('json', JsonRendererFactory) config.add_renderer('nefertari_json', NefertariJsonRendererFactory) if not hasattr(config.registry, '_root_resources'): config.registry._root_resources = {} if not hasattr(config.registry, '_resources_map'): config.registry._resources_map = {} # Map of {ModelName: model_collection_resource} if not hasattr(config.registry, '_model_collections'): config.registry._model_collections = {} config.add_request_method(get_resource_map, 'resource_map', reify=True) config.add_tween('nefertari.tweens.cache_control') config.add_subscriber_predicate('model', ModelClassIs) config.add_subscriber_predicate('field', FieldIsChanged) Settings = dictset(config.registry.settings) root = config.get_root_resource() root.auth = Settings.asbool('auth')
def view_mapper_wrapper(context, request): matchdict = request.matchdict.copy() matchdict.pop('action', None) matchdict.pop('traverse', None) # instance of BaseView (or child of) view_obj = view(context, request) action = getattr(view_obj, action_name) request.action = action_name # Tunneled collection PATCH/PUT doesn't support query params tunneled = getattr(request, '_tunneled_get', False) if tunneled and action_name in ('update_many',): view_obj._query_params = dictset() # we should not run "after_calls" here, so lets save them in # request as filters they will be ran in the renderer factory request.filters = view_obj._after_calls try: # run before_calls (validators) before running the action for call in view_obj._before_calls.get(action_name, []): call(request=request) except wrappers.ValidationError as e: log.error('validation error: %s', e) raise JHTTPBadRequest(e.args) except wrappers.ResourceNotFound as e: log.error('resource not found: %s', e) raise JHTTPNotFound() with trigger_events(view_obj): view_obj._response = action(**matchdict) return view_obj._response
def to_dict(self, **kwargs): _depth = kwargs.get('_depth') if _depth is None: _depth = self._nesting_depth depth_reached = _depth is not None and _depth <= 0 _data = dictset() native_fields = self.__class__.native_fields() for field in native_fields: value = getattr(self, field, None) include = field in self._nested_relationships if not include or depth_reached: encoder = lambda v: getattr(v, v.pk_field(), None) else: encoder = lambda v: v.to_dict(_depth=_depth-1) if isinstance(value, BaseMixin): value = encoder(value) elif isinstance(value, InstrumentedList): value = [encoder(val) for val in value] elif hasattr(value, 'to_dict'): value = value.to_dict(_depth=_depth-1) _data[field] = value _data['_type'] = self._type _data['_pk'] = str(getattr(self, self.pk_field())) return _data
def __init__(self, *arg, **kw): kw = dictset(kw) self.__class__.__base__.__init__( self, *arg, **kw.subset(BASE_ATTRS+['headers', 'location'])) create_json_response(self, **kw)
def test_setup_aggregation_es_disabled(self, aggregator, mock_es): mock_es.settings = dictset(enable_aggregations=False) request = Mock(content_type='', method='', accept=['']) view = DummyBaseView(context={}, request=request, _query_params={'foo': 'bar'}) view.index = 1 view._setup_aggregation() assert view.index == 1
def get_by_ids(self, ids, **params): if not ids: return _ESDocs() _raise_on_empty = params.pop('_raise_on_empty', False) fields = params.pop('_fields', []) _limit = params.pop('_limit', len(ids)) _page = params.pop('_page', None) _start = params.pop('_start', None) _start, _limit = process_limit(_start, _page, _limit) docs = [] for _id in ids: docs.append( dict(_index=self.index_name, _type=self.src2type(_id['_type']), _id=_id['_id'])) params = dict(body=dict(docs=docs)) if fields: fields_params = process_fields_param(fields) params.update(fields_params) documents = _ESDocs() documents._nefertari_meta = dict( start=_start, fields=fields, ) try: data = self.api.mget(**params) except IndexNotFoundException: if _raise_on_empty: raise JHTTPNotFound( '{}({}) resource not found (Index does not exist)'.format( self.doc_type, params)) documents._nefertari_meta.update(total=0) return documents for found_doc in data['docs']: try: output_doc = found_doc['_source'] output_doc['_type'] = found_doc['_type'] except KeyError: msg = "ES: '%s(%s)' resource not found" % (found_doc['_type'], found_doc['_id']) if _raise_on_empty: raise JHTTPNotFound(msg) else: log.error(msg) continue documents.append(dict2obj(dictset(output_doc))) documents._nefertari_meta.update(total=len(documents), ) return documents
def test_setup_aggregation_index_not_defined(self, aggregator, mock_es): mock_es.settings = dictset(enable_aggregations=True) request = Mock(content_type='', method='', accept=['']) view = DummyBaseView(context={}, request=request, _query_params={'foo': 'bar'}) assert view.index == view.not_allowed_action view._setup_aggregation() with pytest.raises(JHTTPMethodNotAllowed): view.index()
def includeme(config): Settings = dictset(config.registry.settings) config.include('nefertari.engine') config.include('nefertari') config.include('nefertari.view') # Process nefertari settings if Settings.asbool('debug'): log.warning('*** DEBUG DEBUG DEBUG mode ***') config.add_tween('nefertari.tweens.get_tunneling') if Settings.asbool('cors.enable'): config.add_tween('nefertari.tweens.cors') if Settings.asbool('ssl_middleware.enable'): config.add_tween('nefertari.tweens.ssl') if Settings.asbool('request_timing.enable'): config.add_tween('nefertari.tweens.request_timing') # Set root factory config.root_factory = NefertariRootACL # Process auth settings root = config.get_root_resource() ramses_auth = Settings.asbool('ramses.auth', False) root.auth = ramses_auth log.info('Parsing RAML') parsed_raml = pyraml.parser.load(Settings['ramses.raml_schema']) log.info('Starting models generation') generate_models(config, raml_resources=parsed_raml.resources) if ramses_auth: if getattr(config.registry, 'auth_model', None) is None: from nefertari.authentication.models import get_authuser_model config.registry.auth_model = get_authuser_model() from .auth import setup_auth_policies setup_auth_policies(config, parsed_raml) config.include('nefertari.elasticsearch') log.info('Starting server generation') generate_server(parsed_raml, config) log.info('Running nefertari.engine.setup_database') from nefertari.engine import setup_database setup_database(config) from nefertari.elasticsearch import ES ES.setup_mappings() if ramses_auth: config.include('ramses.auth') log.info('Server succesfully generated\n')
def __init__(self, argv, log): parser = ArgumentParser(description=__doc__) parser.add_argument('-c', '--config', help='config.ini (required)', required=True) parser.add_argument('--quiet', help='Quiet mode', action='store_true', default=False) parser.add_argument( '--models', help=('Comma-separated list of model names to index ' '(required)'), required=True) parser.add_argument('--params', help='Url-encoded params for each model') parser.add_argument('--index', help='Index name', default=None) parser.add_argument( '--chunk', help=('Index chunk size. If chunk size not provided ' '`elasticsearch.chunk_size` setting is used'), type=int) parser.add_argument( '--force', help=('Recreate ES mappings and reindex all documents of provided ' 'models. By default, only documents that are missing from ' 'index are indexed.'), action='store_true', default=False) self.options = parser.parse_args() if not self.options.config: return parser.print_help() # Prevent ES.setup_mappings running on bootstrap; # Restore ES._mappings_setup after bootstrap is over mappings_setup = getattr(ES, '_mappings_setup', False) try: ES._mappings_setup = True env = self.bootstrap[0](self.options.config) finally: ES._mappings_setup = mappings_setup registry = env['registry'] # Include 'nefertari.engine' to setup specific engine config = Configurator(settings=registry.settings) config.include('nefertari.engine') self.log = log if not self.options.quiet: self.log.setLevel(logging.INFO) self.settings = dictset(registry.settings)
def test_setup_aggregation(self, aggregator, mock_es): mock_es.settings = dictset(enable_aggregations=True) request = Mock(content_type='', method='', accept=['']) view = DummyBaseView(context={}, request=request, _query_params={'foo': 'bar'}) type(view).index = 1 view._setup_aggregation() aggregator.assert_called_once_with(view) aggregator().wrap.assert_called_once_with(1) assert view.index == aggregator().wrap()
def create_account(cls, params): user_params = dictset(params).subset( ['username', 'email', 'password', 'first_name', 'last_name']) try: user_params['status'] = 'inactive' return cls.get_or_create( email=user_params['email'], defaults=user_params) except JHTTPBadRequest as e: log.error(e) raise JHTTPBadRequest('Failed to create account.')
def __init__(self, argv, log): parser = ArgumentParser(description=__doc__) parser.add_argument( '-c', '--config', help='config.ini (required)', required=True) parser.add_argument( '--quiet', help='Quiet mode', action='store_true', default=False) parser.add_argument( '--models', help=('Comma-separated list of model names to index ' '(required)'), required=True) parser.add_argument( '--params', help='Url-encoded params for each model') parser.add_argument('--index', help='Index name', default=None) parser.add_argument( '--chunk', help=('Index chunk size. If chunk size not provided ' '`elasticsearch.chunk_size` setting is used'), type=int) parser.add_argument( '--force', help=('Recreate ES mappings and reindex all documents of provided ' 'models. By default, only documents that are missing from ' 'index are indexed.'), action='store_true', default=False) self.options = parser.parse_args() if not self.options.config: return parser.print_help() # Prevent ES.setup_mappings running on bootstrap; # Restore ES._mappings_setup after bootstrap is over mappings_setup = getattr(ES, '_mappings_setup', False) try: ES._mappings_setup = True env = self.bootstrap[0](self.options.config) finally: ES._mappings_setup = mappings_setup registry = env['registry'] # Include 'nefertari.engine' to setup specific engine config = Configurator(settings=registry.settings) config.include('nefertari.engine') self.log = log if not self.options.quiet: self.log.setLevel(logging.INFO) self.settings = dictset(registry.settings)
def create_account(cls, params): """ Create auth user instance with data from :params:. Used by both Token and Ticket-based auths to register a user ( called from views). """ user_params = dictset(params).subset(['username', 'email', 'password']) try: return cls.get_or_create(email=user_params['email'], defaults=user_params) except JHTTPBadRequest: raise JHTTPBadRequest('Failed to create account.')
def get_by_ids(self, ids, **params): if not ids: return _ESDocs() __raise_on_empty = params.pop('__raise_on_empty', False) fields = params.pop('_fields', []) _limit = params.pop('_limit', len(ids)) _page = params.pop('_page', None) _start = params.pop('_start', None) _start, _limit = process_limit(_start, _page, _limit) docs = [] for _id in ids: docs.append( dict( _index=self.index_name, _type=self.src2type(_id['_type']), _id=_id['_id'] ) ) params = dict( body=dict(docs=docs) ) if fields: params['fields'] = fields data = ES.api.mget(**params) documents = _ESDocs() for _d in data['docs']: try: _d = _d['fields'] if fields else _d['_source'] except KeyError: msg = "ES: '%s(%s)' resource not found" % ( _d['_type'], _d['_id']) if __raise_on_empty: raise JHTTPNotFound(msg) else: log.error(msg) continue documents.append(dict2obj(dictset(_d))) documents._nefertari_meta = dict( total=len(documents), start=_start, fields=fields, ) return documents
def prepare_request_params(self, _query_params, _json_params): """ Prepare query and update params. """ self._query_params = dictset(_query_params or self.request.params.mixed()) self._json_params = dictset(_json_params) ctype = self.request.content_type if self.request.method in ['POST', 'PUT', 'PATCH']: if ctype == 'application/json': try: self._json_params.update(self.request.json) except simplejson.JSONDecodeError: log.error("Expecting JSON. Received: '{}'. " "Request: {} {}".format(self.request.body, self.request.method, self.request.url)) self._json_params = BaseView.convert_dotted(self._json_params) self._query_params = BaseView.convert_dotted(self._query_params) self._params = self._query_params.copy() self._params.update(self._json_params)
def prepare_request_params(self, _query_params, _json_params): """ Prepare query and update params. """ self._query_params = dictset( _query_params or self.request.params.mixed()) self._json_params = dictset(_json_params) ctype = self.request.content_type if self.request.method in ['POST', 'PUT', 'PATCH']: if ctype == 'application/json': try: self._json_params.update(self.request.json) except simplejson.JSONDecodeError: log.error( "Expecting JSON. Received: '{}'. " "Request: {} {}".format( self.request.body, self.request.method, self.request.url)) self._json_params = BaseView.convert_dotted(self._json_params) self._query_params = BaseView.convert_dotted(self._query_params) self._params = self._query_params.copy() self._params.update(self._json_params)
def __call__(self, **kwargs): from nefertari.utils import validate_data_privacy, dictset from nefertari.json_httpexceptions import JHTTPForbidden request = kwargs.pop('request') request_data = dictset(self.request_data) request_data['_type'] = self.model_cls.__name__ try: validate_data_privacy( request, request_data, wrapper_kw={'drop_hidden': False}) except ValidationError as ex: raise JHTTPForbidden( 'Not enough permissions to update fields: {}'.format(ex))
def create_account(cls, params): """ Create auth user instance with data from :params:. Used by both Token and Ticket-based auths to register a user ( called from views). """ user_params = dictset(params).subset( ['username', 'email', 'password']) try: return cls.get_or_create( email=user_params['email'], defaults=user_params) except JHTTPBadRequest: raise JHTTPBadRequest('Failed to create account.')
def build_search_params(self, params): params = dictset(params) _params = dict( index=self.index_name, doc_type=self.doc_type ) _raw_terms = params.pop('q', '') if 'body' not in params: query_string = build_qs(params.remove(RESERVED_PARAMS), _raw_terms) if query_string: _params['body'] = { 'query': { 'query_string': { 'query': query_string } } } else: _params['body'] = {"query": {"match_all": {}}} else: _params['body'] = params['body'] if '_limit' not in params: params['_limit'] = self.api.count()['count'] _params['from_'], _params['size'] = process_limit( params.get('_start', None), params.get('_page', None), params['_limit']) if '_sort' in params: _params['sort'] = apply_sort(params['_sort']) if '_fields' in params: _params['fields'] = params['_fields'] if '_search_fields' in params: search_fields = params['_search_fields'].split(',') search_fields.reverse() search_fields = [s + '^' + str(i) for i, s in enumerate(search_fields, 1)] current_qs = _params['body']['query']['query_string'] if isinstance(current_qs, str): _params['body']['query']['query_string'] = {'query': current_qs} _params['body']['query']['query_string']['fields'] = search_fields return _params
def get_by_ids(self, ids, **params): if not ids: return _ESDocs() __raise_on_empty = params.pop("__raise_on_empty", False) fields = params.pop("_fields", []) _limit = params.pop("_limit", len(ids)) _page = params.pop("_page", None) _start = params.pop("_start", None) _start, _limit = process_limit(_start, _page, _limit) docs = [] for _id in ids: docs.append(dict(_index=self.index_name, _type=self.src2type(_id["_type"]), _id=_id["_id"])) params = dict(body=dict(docs=docs)) if fields: fields_params = process_fields_param(fields) params.update(fields_params) documents = _ESDocs() documents._nefertari_meta = dict(start=_start, fields=fields) try: data = self.api.mget(**params) except IndexNotFoundException: if __raise_on_empty: raise JHTTPNotFound("{}({}) resource not found (Index does not exist)".format(self.doc_type, params)) documents._nefertari_meta.update(total=0) return documents for found_doc in data["docs"]: try: output_doc = found_doc["_source"] output_doc["_type"] = found_doc["_type"] except KeyError: msg = "ES: '%s(%s)' resource not found" % (found_doc["_type"], found_doc["_id"]) if __raise_on_empty: raise JHTTPNotFound(msg) else: log.error(msg) continue documents.append(dict2obj(dictset(output_doc))) documents._nefertari_meta.update(total=len(documents)) return documents
def test_setup(self, mock_es, mock_engine): settings = dictset({ 'elasticsearch.hosts': '127.0.0.1:8080,127.0.0.2:8090', 'elasticsearch.sniff': 'true', }) es.ES.setup(settings) mock_es.Elasticsearch.assert_called_once_with( hosts=[{'host': '127.0.0.1', 'port': '8080'}, {'host': '127.0.0.2', 'port': '8090'}], serializer=mock_engine.ESJSONSerializer(), connection_class=es.ESHttpConnection, sniff_on_start=True, sniff_on_connection_fail=True ) assert es.ES.api == mock_es.Elasticsearch()
def build_search_params(self, params): params = dictset(params) _params = dict(index=self.index_name, doc_type=self.doc_type) _raw_terms = params.pop('q', '') if 'body' not in params: query_string = build_qs(params.remove(RESERVED_PARAMS), _raw_terms) if query_string: _params['body'] = { 'query': { 'query_string': { 'query': query_string } } } else: _params['body'] = {"query": {"match_all": {}}} else: _params['body'] = params['body'] if '_limit' not in params: params['_limit'] = self.api.count()['count'] _params['from_'], _params['size'] = process_limit( params.get('_start', None), params.get('_page', None), params['_limit']) if '_sort' in params: _params['sort'] = apply_sort(params['_sort']) if '_fields' in params: _params['fields'] = params['_fields'] if '_search_fields' in params: search_fields = params['_search_fields'].split(',') search_fields.reverse() search_fields = [ s + '^' + str(i) for i, s in enumerate(search_fields, 1) ] current_qs = _params['body']['query']['query_string'] if isinstance(current_qs, str): _params['body']['query']['query_string'] = { 'query': current_qs } _params['body']['query']['query_string']['fields'] = search_fields return _params
def _bulk_body(documents_actions, request): kwargs = {"client": ES.api, "actions": documents_actions} if request is None: query_params = {} else: query_params = request.params.mixed() query_params = dictset(query_params) refresh_enabled = ES.settings.asbool("enable_refresh_query") if "_refresh_index" in query_params and refresh_enabled: kwargs["refresh"] = query_params.asbool("_refresh_index") executed_num, errors = helpers.bulk(**kwargs) log.info("Successfully executed {} Elasticsearch action(s)".format(executed_num)) if errors: raise Exception("Errors happened when executing Elasticsearch " "actions".format("; ".join(errors)))
def get_by_ids(self, ids, **params): if not ids: return _ESDocs() __raise_on_empty = params.pop('__raise_on_empty', False) fields = params.pop('_fields', []) _limit = params.pop('_limit', len(ids)) _page = params.pop('_page', None) _start = params.pop('_start', None) _start, _limit = process_limit(_start, _page, _limit) docs = [] for _id in ids: docs.append( dict(_index=self.index_name, _type=self.src2type(_id['_type']), _id=_id['_id'])) params = dict(body=dict(docs=docs)) if fields: params['fields'] = fields data = ES.api.mget(**params) documents = _ESDocs() for _d in data['docs']: try: _d = _d['fields'] if fields else _d['_source'] except KeyError: msg = "ES: '%s(%s)' resource not found" % (_d['_type'], _d['_id']) if __raise_on_empty: raise JHTTPNotFound(msg) else: log.error(msg) continue documents.append(dict2obj(dictset(_d))) documents._nefertari_meta = dict( total=len(documents), start=_start, fields=fields, ) return documents
def convert_dotted(params): """ Convert dotted keys in :params: dictset to a nested dictset. E.g. {'settings.foo': 'bar'} -> {'settings': {'foo': 'bar'}} """ if not isinstance(params, dictset): params = dictset(params) dotted_items = {k: v for k, v in params.items() if '.' in k} if dotted_items: dicts = [str2dict(key, val) for key, val in dotted_items.items()] dotted = six.functools.reduce(merge_dicts, dicts) params = params.subset(['-' + k for k in dotted_items.keys()]) params.update(dict(dotted)) return params
def main(argv=sys.argv): global log args = parse_args() options = parse_vars(args.options) settings = get_appsettings(args.ini, name="hathor", options=options) setup_logging(args.ini) log = logging.getLogger() ## Init underlaying FW -- Code inspire from nefertari # see: https://github.com/ramses-tech/nefertari/blob/master/nefertari/scripts/es.py mappings_setup = getattr(ES, '_mappings_setup', False) try: ES._mappings_setup = True env = bootstrap("%s#hathor" % args.ini) ## Not sure if I need this, bootstrap should take care of this config = Configurator(settings=settings) config.include('nefertari.engine') finally: ES._mappings_setup = mappings_setup registry = env['registry'] ES.setup(dictset(registry.settings)) # Recreate: drop index, get all available model names -- ignore -m from # arguments list if args.recreate and not args.list: recreate_index() args.models = available_models() if args.list: print("Available models:\n {}".format(", ".join(available_models()))) elif args.models: model_names = args.models if model_names: av_models = available_models() for elem in model_names: if not elem in av_models: raise (ValueError( "model '{}' not available." "Use '-l' to list available models.".format(elem))) ## still here we gonna re-index things for model in model_names: if args.delete_mapping: recreate_mapping(model) reindextask(model, boxsize=args.boxsize)
def build_search_params(self, params): params = dictset(params) _params = dict( index=self.index_name, doc_type=self.doc_type ) if 'body' not in params: query_string = build_qs( params.remove(RESERVED), params.get('_raw_terms', '')) if query_string: _params['body'] = { 'query': { 'query_string': { 'query': query_string } } } else: _params['body'] = {"query": {"match_all": {}}} if '_limit' not in params: raise JHTTPBadRequest('Missing _limit') _params['from_'], _params['size'] = process_limit( params.get('_start', None), params.get('_page', None), params['_limit']) if '_sort' in params: _params['sort'] = apply_sort(params['_sort']) if '_fields' in params: _params['fields'] = params['_fields'] if '_search_fields' in params: search_fields = params['_search_fields'].split(',') search_fields.reverse() search_fields = [s + '^' + str(i) for i, s in enumerate(search_fields, 1)] _params['body']['query']['query_string']['fields'] = search_fields return _params
def _bulk_body(documents_actions, request): kwargs = { 'client': ES.api, 'actions': documents_actions, } if request is None: query_params = {} else: query_params = request.params.mixed() query_params = dictset(query_params) refresh_enabled = ES.settings.asbool('enable_refresh_query') if '_refresh_index' in query_params and refresh_enabled: kwargs['refresh'] = query_params.asbool('_refresh_index') ESAction(**kwargs)
def test_setup(self, mock_es, mock_engine): settings = dictset({ 'elasticsearch.hosts': '127.0.0.1:8080,127.0.0.2:8090', 'elasticsearch.sniff': 'true', }) es.ES.setup(settings) mock_es.Elasticsearch.assert_called_once_with( hosts=[{ 'host': '127.0.0.1', 'port': '8080' }, { 'host': '127.0.0.2', 'port': '8090' }], serializer=mock_engine.ESJSONSerializer(), connection_class=es.ESHttpConnection, sniff_on_start=True, sniff_on_connection_fail=True) assert es.ES.api == mock_es.Elasticsearch()
def convert_dotted(params): """ Convert dotted keys in :params: dictset to a nested dictset. E.g. {'settings.foo': 'bar'} -> {'settings': {'foo': 'bar'}} """ if not isinstance(params, dictset): params = dictset(params) dotted = defaultdict(dict) dotted_items = {k: v for k, v in params.items() if '.' in k} if dotted_items: for key, value in dotted_items.items(): field, subfield = key.split('.') dotted[field].update({subfield: value}) params = params.subset(['-' + k for k in dotted_items.keys()]) params.update(dict(dotted)) return params
def __init__(self, argv, log): parser = ArgumentParser(description=__doc__) parser.add_argument('-c', '--config', help='config.ini (required)', required=True) parser.add_argument('--quiet', help='Quiet mode', action='store_true', default=False) parser.add_argument('--models', help='List of dotted paths of models to index', required=True) parser.add_argument('--params', help='Url-encoded params for each model') parser.add_argument('--index', help='Index name', default=None) parser.add_argument('--chunk', help='Index chunk size', type=int) parser.add_argument( '--force', help=('Force reindex of all documents. Only documents that ' 'are missing from index are indexed by default.'), type=bool, default=False) self.options = parser.parse_args() if not self.options.config: return parser.print_help() env = self.bootstrap[0](self.options.config) registry = env['registry'] # Include 'nefertari.engine' to setup specific engine config = Configurator(settings=registry.settings) config.include('nefertari.engine') self.log = log if not self.options.quiet: self.log.setLevel(logging.INFO) self.settings = dictset(registry.settings)
def test_params_converted(self, mock_policy): from ramses import auth params = dictset( secure=True, include_ip=True, http_only=False, wild_domain=True, debug=True, parent_domain=True, secret='my_secret_setting' ) auth_model = Mock() config = Mock() config.registry.settings = {'my_secret_setting': 12345} config.registry.auth_model = auth_model auth._setup_ticket_policy(config=config, params=params) assert_called_with_at_least(mock_policy, include_ip=True, secure=True, parent_domain=True, secret=12345, wild_domain=True, debug=True, http_only=False )
def _bulk_body(documents_actions, request): kwargs = { 'client': ES.api, 'actions': documents_actions, } if request is None: query_params = {} else: query_params = request.params.mixed() query_params = dictset(query_params) refresh_enabled = ES.settings.asbool('enable_refresh_query') if '_refresh_index' in query_params and refresh_enabled: kwargs['refresh'] = query_params.asbool('_refresh_index') executed_num, errors = helpers.bulk(**kwargs) log.info('Successfully executed {} Elasticsearch action(s)'.format( executed_num)) if errors: raise Exception('Errors happened when executing Elasticsearch ' 'actions'.format('; '.join(errors)))
def build_search_params(self, params): params = dictset(params) _params = dict(index=self.index_name, doc_type=self.doc_type) if 'body' not in params: query_string = build_qs(params.remove(RESERVED), params.get('_raw_terms', '')) if query_string: _params['body'] = { 'query': { 'query_string': { 'query': query_string } } } print query_string else: _params['body'] = {"query": {"match_all": {}}} if '_limit' in params: _params['from_'], _params['size'] = process_limit( params.get('_start', None), params.get('_page', None), params['_limit']) if '_sort' in params: _params['sort'] = apply_sort(params['_sort']) if '_fields' in params: _params['fields'] = params['_fields'] if '_search_fields' in params: search_fields = params['_search_fields'].split(',') search_fields.reverse() search_fields = [ s + '^' + str(i) for i, s in enumerate(search_fields, 1) ] _params['body']['query']['query_string']['fields'] = search_fields return _params
def setup_auth_policies(config, raml_root): """ Setup authentication, authorization policies. Performs basic validation to check all the required values are present and performs authentication, authorization policies generation using generator functions from `AUTHENTICATION_POLICIES`. :param config: Pyramid Configurator instance. :param raml_root: Instance of ramlfications.raml.RootNode. """ log.info('Configuring auth policies') secured_by_all = raml_root.secured_by or [] secured_by = [item for item in secured_by_all if item] if not secured_by: log.info('API is not secured. `secured_by` attribute ' 'value missing.') return secured_by = secured_by[0] schemes = {scheme.name: scheme for scheme in raml_root.security_schemes} if secured_by not in schemes: raise ValueError( 'Undefined security scheme used in `secured_by`: {}'.format( secured_by)) scheme = schemes[secured_by] if scheme.type not in AUTHENTICATION_POLICIES: raise ValueError('Unsupported security scheme type: {}'.format( scheme.type)) # Setup Authentication policy policy_generator = AUTHENTICATION_POLICIES[scheme.type] params = dictset(scheme.settings or {}) authn_policy = policy_generator(config, params) config.set_authentication_policy(authn_policy) # Setup Authorization policy authz_policy = ACLAuthorizationPolicy() config.set_authorization_policy(authz_policy)
def _filter_fields(self, data): if '_type' not in data: return data try: model_cls = engine.get_document_cls(data['_type']) except ValueError as ex: log.error(str(ex)) return data public_fields = set(getattr(model_cls, '_public_fields', None) or []) auth_fields = set(getattr(model_cls, '_auth_fields', None) or []) hidden_fields = set(getattr(model_cls, '_hidden_fields', None) or []) fields = set(data.keys()) user = getattr(self.request, 'user', None) if self.request: # User authenticated if user: # User not admin if not self.is_admin: fields &= auth_fields # User not authenticated else: fields &= public_fields if self.drop_hidden: if not self.is_admin: fields -= hidden_fields else: fields.update(hidden_fields) fields.update(['_type', '_pk', '_self']) if not isinstance(data, dictset): data = dictset(data) data = data.subset(fields) return self._apply_nested_privacy(data)
def test_validator_decorator(self): params = dictset(a=10, b='bbb', c=20, mixed=lambda: {}) req = mock.MagicMock(params=params) res = mock.MagicMock(actions=['create', 'update', 'index']) class MyView(BaseView): __validation_schema__ = dict(a=dict(type=int, required=True), b=dict(type=str, required=False)) def __init__(self): BaseView.__init__(self, res, req) @wrappers.validator(c=dict(type=int, required=True), a=dict(type=float, required=False)) def create(self): pass @wrappers.validator() def update(self): pass @wrappers.validator(a=dict(type=int, required=False)) def index(self): [] def convert_ids2objects(self, *args, **kwargs): pass view = MyView() self.assertEqual( [wrappers.validate_types(), wrappers.validate_required()], view.create._before_calls) self.assertIn('c', view.create._before_calls[0].kwargs) self.assertEqual(dict(type=float, required=False), view.create._before_calls[0].kwargs['a'])
def __init__(self, context, request, _params={}): self.context = context self.request = request self._params = dictset(_params or request.params.mixed()) ctype = request.content_type if request.method in ['POST', 'PUT', 'PATCH']: if ctype == 'application/json': try: self._params.update(request.json) except simplejson.JSONDecodeError: log.error("Expecting JSON. Received: '{}'. Request: {} {}". format(request.body, request.method, request.url)) # dict of the callables {'action':[callable1, callable2..]} # as name implies, before calls are executed before the action is called # after_calls are called after the action returns. self._before_calls = defaultdict(list) self._after_calls = defaultdict(list) # no accept headers, use default if '' in request.accept: request.override_renderer = self._default_renderer elif 'application/json' in request.accept: request.override_renderer = 'nefertari_json' elif 'text/plain' in request.accept: request.override_renderer = 'string' self.setup_default_wrappers() self.convert_ids2objects() if not getattr(self.request, 'user', None): wrappers.set_public_limits(self)
def includeme(config): log.info('Including logstash') Settings = dictset(config.registry.settings) try: if not Settings.asbool('logstash.enable'): log.warning('Logstash is disabled') return if Settings.asbool('logstash.check'): import socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) deftimeout = sock.gettimeout() sock.settimeout(3) try: sock.sendto('PING', 0, (Settings['logstash.host'], Settings.asint('logstash.port'))) recv, svr = sock.recvfrom(255) sock.shutdown(2) except Exception as e: log.error('Looks like logstash server is not running: %s' % e) finally: sock.settimeout(deftimeout) logger = logging.getLogger() handler = logstash.LogstashHandler(Settings['logstash.host'], Settings.asint('logstash.port'), version=1) handler.setFormatter( logging.Formatter( "%(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] " "%(module)s.%(funcName)s: %(message)s")) logger.addHandler(handler) except KeyError as e: log.warning('Bad settings for logstash. %s' % e)