def publish(): types = request.json.get("types", []) map = app.config.get("ESDAO_ROLLING_PLUGINS", {}) for t in types: klazz = plugin.load_class(map.get(t)) klazz.publish() return ""
def rollback(): types = request.json.get("types", []) map = app.config.get("ESDAO_ROLLING_PLUGINS", {}) for t in types: klazz = plugin.load_class(map.get(t)) klazz.rollback() return ""
def get(cls): """ Returns an implementation of the base Store class """ si = app.config.get("STORE_IMPL") sm = plugin.load_class(si) return sm()
def dynamic_write_type(cls): # look to see if the next index is already set, in which case we # can return next = cls._get_cfg("next") if next is not None: return next # since there could be several threads trying to do the same thing, lock # this thread until the file/index has been sorted out with cls._lock: # if not read it from the directory next = cls._get_file("next") if next is not None: cls._set_cfg("next", next) return next # if it wasn't in the directory we need to make it tname = cls._mint_next_type() if cls.__init_dynamic_type__: # find out if this class needs to self-init for cname in app.config.get("ELASTIC_SEARCH_SELF_INIT", []): klazz = plugin.load_class(cname) if issubclass(cls, klazz): cls.self_init(type_name=tname, write_to="next") # now write the file dir = cls._roll_dir() if not os.path.exists(dir): os.mkdir(dir) cls._set_file("next", tname) cls._set_cfg("next", tname) return tname
def generate_file(name, respect_timeout=False): # check that we have a generator for this cache type generator = app.config.get("CACHE_GENERATORS", {}).get(name, {}).get("class") if generator is None: raise CacheException("No generator configured for {x}".format(x=name)) # figure out the timeout on this file timeout = app.config.get("CACHE_GENERATORS", {}).get(name, {}).get("timeout") if timeout is None: raise CacheException("No timeout specified for {x}".format(x=name)) # get the file record to which this pertains (or make one if it is new) cf = models.CachedFile.pull(name) if cf is None: cf = models.CachedFile() cf.id = name else: # if the file is currently generating (perhaps in another thread), just return if cf.generating: return # if the file is not stale, and we are respecting the timeout, just return if not cf.is_stale() and respect_timeout: return # switch the generating flag to true and re-save cf.generating = True cf.save() # Note that we don't do a blocking save, because we want to update this record again asap, and this data is throwaway # create a suitable filename filename = "{x}_{y}".format(x=name, y=datetime.utcnow().strftime("%Y-%m-%d_%H%M")) # get the cache directory sorted out dir = os.path.join(app.config.get("CACHE_DIR"), name) if not os.path.exists(dir): os.makedirs(dir) # finally get the file path ready filepath = os.path.join(dir, filename) # now instantiate the class and ask it to generate the file klazz = plugin.load_class(generator) inst = klazz() inst.generate(filepath) # now calculate the timeout to = datetime.utcnow() + timedelta(seconds=timeout) # now update the cached file record cf.filename = filename cf.generating = False cf.timeout = to.strftime("%Y-%m-%dT%H:%M:%SZ") cf.save() # finally, clean up the cache directory of any old files cleanup_cache_dir(dir) return cf
def tmp(cls): """ Returns an implementation of the base Store class which should be able to provide local temp storage to the app. In addition to the methods supplied by Store, it must also provide a "path" function to give the path on-disk to the file """ si = app.config.get("STORE_TMP_IMPL") sm = plugin.load_class(si) return sm()
def status(): map = app.config.get("ESDAO_ROLLING_PLUGINS", {}) resp = {} for k, v in map.iteritems(): klazz = plugin.load_class(v) s = klazz.rolling_status() resp[k] = s r = make_response(json.dumps(resp)) r.mimetype = "application/json" return r
def status(): map = app.config.get("ESDAO_ROLLING_PLUGINS", {}) resp = {} for k,v in map.iteritems(): klazz = plugin.load_class(v) s = klazz.rolling_status() resp[k] = s r = make_response(json.dumps(resp)) r.mimetype = "application/json" return r
def _get_class(container_type, operation): # get the CRUD configuration object cfgs = app.config.get("CRUD") if cfgs is None: return None # get the container configuration ct = cfgs.get(container_type) if ct is None: return None # get the model object reference m = ct.get("model") if m is None: return None # determine if the operation is permitted if operation not in ct: return None if not ct.get(operation).get("enable", False): return None return plugin.load_class(m)
def get_reset_formcontext(cls, acc=None, form_data=None): path = app.config.get("ACCOUNT_RESET_FORM_CONTEXT") klazz = plugin.load_class(path) fc = klazz(form_data, acc) return fc
def get_register_formcontext(cls, form_data=None): path = app.config.get("ACCOUNT_REGISTER_FORM_CONTEXT") klazz = plugin.load_class(path) fc = klazz(form_data) return fc
def refresh(): map = app.config.get("ESDAO_ROLLING_PLUGINS", {}) for k, v in map.iteritems(): klazz = plugin.load_class(v) klazz.rolling_refresh() return ""
def get_activate_formcontext(cls, acc=None, form_data=None): path = app.config.get("ACCOUNT_ACTIVATE_FORM_CONTEXT") klazz = plugin.load_class(path) fc = klazz(form_data, acc) return fc
def get_model(cls): path = app.config.get("ACCOUNT_MODEL") klazz = plugin.load_class(path) return klazz
def compound(config_name): # get the configuration acc = app.config.get("AUTOCOMPLETE_COMPOUND") cfg = acc.get(config_name) if cfg is None: abort(404) # get the query value q = request.values.get("q") if q is None or q == "": abort(400) q = q.strip() # apply any input filters to the query value ifs = cfg.get("input_filter") if ifs is not None: q = ifs(q) # get the filters that will be used to match documents filters = cfg.get("filters") if filters is None or len(filters.keys()) == 0: abort(500) # now build the query object query = {"query": {"bool": {"should": []}}} for field, params in filters.iteritems(): wq = _do_wildcard(q, params.get("start_wildcard", True), params.get("end_wildcard", True)) boost = params.get("boost", 1.0) wcq = {"wildcard": {field: {"value": wq, "boost": boost}}} query["query"]["bool"]["should"].append(wcq) # set the size of the result set size = request.values.get("size") if size is None or size == "": size = cfg.get("default_size") else: try: size = int(size) except: abort(400) if size > cfg.get("max_size", 25): size = cfg.get("max_size", 25) query["size"] = size # add the fields constraint fields = cfg.get("fields") if fields is None or len(fields) == 0: abort(500) query["fields"] = fields # get the name of the model that will handle this query, and then look up # the class that will handle it dao_name = cfg.get("dao") dao_klass = plugin.load_class(dao_name) if dao_klass is None: abort(500) # issue the query res = dao_klass.query(q=query) records = esprit.raw.unpack_json_result(res) # rewrite the field names if necessary field_name_map = cfg.get("field_name_map") mapped_records = [] if field_name_map is not None and len(field_name_map.keys()) > 0: for r in records: newobj = {} for k, v in r.iteritems(): newk = field_name_map.get(k) if newk is None: newobj[k] = v else: newobj[newk] = v mapped_records.append(newobj) records = mapped_records # make the response resp = make_response(json.dumps(records)) resp.mimetype = "application/json" return resp
def term(config_name): # get the configuration acc = app.config.get("AUTOCOMPLETE_TERM") cfg = acc.get(config_name) if cfg is None: abort(404) # get the query value q = request.values.get("q") if q is None or q == "": abort(400) q = q.strip() # apply any input filters to the query value ifs = cfg.get("input_filter") if ifs is not None: q = ifs(q) # get the filters that will be used to match documents filter = cfg.get("filter") if filter is None: abort(500) # now build the query object field = filter.keys()[0] params = filter.get(field, {}) wq = _do_wildcard(q, params.get("start_wildcard", True), params.get("end_wildcard", True)) query = {"query" : {"bool" : {"must" : [{"wildcard" : {field : {"value" : wq}}}]}}} # the size of this query is 0, as we're only interested in the facet query["size"] = 0 # get the size of the facet size = request.values.get("size") if size is None or size == "": size = cfg.get("default_size") else: try: size = int(size) except: abort(400) if size > cfg.get("max_size", 25): size = cfg.get("max_size", 25) # build the facet facet = cfg.get("facet") if facet is None: abort(500) query["facets"] = {facet : {"terms" : {"field" : facet, "size" : size}}} # get the name of the model that will handle this query, and then look up # the class that will handle it dao_name = cfg.get("dao") dao_klass = plugin.load_class(dao_name) if dao_klass is None: abort(500) # issue the query res = dao_klass.query(q=query) terms = esprit.raw.get_facet_terms(res, facet) records = [t.get("term") for t in terms] # make the response resp = make_response(json.dumps(records)) resp.mimetype = "application/json" return resp
def term(config_name): # get the configuration acc = app.config.get("AUTOCOMPLETE_TERM") cfg = acc.get(config_name) if cfg is None: abort(404) # get the query value q = request.values.get("q") if q is None or q == "": abort(400) q = q.strip() # apply any input filters to the query value ifs = cfg.get("input_filter") if ifs is not None: q = ifs(q) # get the filters that will be used to match documents filter = cfg.get("filter") if filter is None: abort(500) # now build the query object field = filter.keys()[0] params = filter.get(field, {}) wq = _do_wildcard(q, params.get("start_wildcard", True), params.get("end_wildcard", True)) query = { "query": { "bool": { "must": [{ "wildcard": { field: { "value": wq } } }] } } } # the size of this query is 0, as we're only interested in the facet query["size"] = 0 # get the size of the facet size = request.values.get("size") if size is None or size == "": size = cfg.get("default_size") else: try: size = int(size) except: abort(400) if size > cfg.get("max_size", 25): size = cfg.get("max_size", 25) # build the facet facet = cfg.get("facet") if facet is None: abort(500) query["facets"] = {facet: {"terms": {"field": facet, "size": size}}} # get the name of the model that will handle this query, and then look up # the class that will handle it dao_name = cfg.get("dao") dao_klass = plugin.load_class(dao_name) if dao_klass is None: abort(500) # issue the query res = dao_klass.query(q=query) terms = esprit.raw.get_facet_terms(res, facet) records = [t.get("term") for t in terms] # make the response resp = make_response(json.dumps(records)) resp.mimetype = "application/json" return resp
def search(): # get the values for the 3 key bits of search info: the query, the page number and the page size q = request.values.get("q") page = request.values.get("page", 1) psize = request.values.get("pageSize", 10) # check that we have been given a query if q is None or q == "": abort(400) # check the page is an integer greater than 0 try: page = int(page) except: abort(400) if page < 1: page = 1 # limit the page size as per the configuration try: psize = int(psize) except: abort(400) if psize > app.config.get("SEARCH_MAX_PAGE_SIZE", 100): psize = app.config.get("SEARCH_MAX_PAGE_SIZE", 100) elif psize < 1: psize = 10 # calculate the position of the from cursor in the document set fro = (page - 1) * psize # assemble the query query = dao.QueryStringQuery(q, fro, psize) # load the DAO class and send the query through it klazz = plugin.load_class(app.config.get("SEARCH_DAO")) res = klazz.query(q=query.query()) # check to see if there was a search error if res.get("error") is not None: abort(400) # unpack the results and pull out the search metadata obs = esprit.raw.unpack_json_result(res) total = res.get("hits", {}).get("total", 0) # optionally filter the result objects as per the config filter = app.config.get("SEARCH_RESULT_FILTER") if filter is not None: fn = plugin.load_function(filter) obs = [fn(o) for o in obs] # build the response object response = { "total" : total, "page" : page, "pageSize" : psize, "timestamp" : datetime.utcnow().strftime("%Y-%m%dT%H:%M:%SZ"), "query" : q, "results" : obs } resp = make_response(json.dumps(response)) resp.mimetype = "application/json" return resp
def get_forgot_formcontext(cls, form_data=None): path = app.config.get("ACCOUNT_FORGOT_FORM_CONTEXT") klazz = plugin.load_class(path) fc = klazz(form_data) return fc
def compound(config_name): # get the configuration acc = app.config.get("AUTOCOMPLETE_COMPOUND") cfg = acc.get(config_name) if cfg is None: abort(404) # get the query value q = request.values.get("q") if q is None or q == "": abort(400) q = q.strip() # apply any input filters to the query value ifs = cfg.get("input_filter") if ifs is not None: q = ifs(q) # get the filters that will be used to match documents filters = cfg.get("filters") if filters is None or len(filters.keys()) == 0: abort(500) # now build the query object query = {"query" : {"bool" : {"should" : []}}} for field, params in filters.iteritems(): wq = _do_wildcard(q, params.get("start_wildcard", True), params.get("end_wildcard", True)) boost = params.get("boost", 1.0) wcq = {"wildcard" : {field : {"value" : wq, "boost" : boost}}} query["query"]["bool"]["should"].append(wcq) # set the size of the result set size = request.values.get("size") if size is None or size == "": size = cfg.get("default_size") else: try: size = int(size) except: abort(400) if size > cfg.get("max_size", 25): size = cfg.get("max_size", 25) query["size"] = size # add the fields constraint fields = cfg.get("fields") if fields is None or len(fields) == 0: abort(500) query["fields"] = fields # get the name of the model that will handle this query, and then look up # the class that will handle it dao_name = cfg.get("dao") dao_klass = plugin.load_class(dao_name) if dao_klass is None: abort(500) # issue the query res = dao_klass.query(q=query) records = esprit.raw.unpack_json_result(res) # rewrite the field names if necessary field_name_map = cfg.get("field_name_map") mapped_records = [] if field_name_map is not None and len(field_name_map.keys()) > 0: for r in records: newobj = {} for k, v in r.iteritems(): newk = field_name_map.get(k) if newk is None: newobj[k] = v else: newobj[newk] = v mapped_records.append(newobj) records = mapped_records # make the response resp = make_response(json.dumps(records)) resp.mimetype = "application/json" return resp
def query(path=None): # get the bits out of the path, and ensure that we have at least some parts to work with pathparts = path.strip('/').split('/') if len(pathparts) == 0: abort(400) # load the query route config and the path we are being requested for qrs = app.config.get("QUERY_ROUTE", {}) frag = request.path # get the configuration for this url route route_cfg = None for key in qrs: if frag.startswith("/" + key): route_cfg = qrs.get(key) break # if no route cfg is found this is not authorised if route_cfg is None: abort(401) # get the configuration for the specific index being queried index = pathparts[0] cfg = route_cfg.get(index) if cfg is None: abort(401) # does the user have to be authenticated if cfg.get("auth"): if current_user is None or current_user.is_anonymous(): abort(401) # if so, does the user require a role role = cfg.get("roles") if role is not None and not current_user.has_role(role): abort(401) # get the name of the model that will handle this query, and then look up # the class that will handle it dao_name = cfg.get("dao") dao_klass = plugin.load_class(dao_name) if dao_klass is None: abort(404) # now work out what kind of operation is being asked for # if _search is specified, then this is a normal query search = False by_id = None if len(pathparts) == 1 or (len(pathparts) == 2 and pathparts[1] == "_search"): search = True elif len(pathparts) == 2: if request.method == "POST": abort(401) by_id = pathparts[1] else: abort(400) resp = None if by_id is not None: rec = dao_klass.pull(by_id, wrap=False) resp = make_response(rec) elif search: q = Query() # if this is a POST, read the contents out of the body if request.method == "POST": q = Query(request.json) if request.json else Query(dict(request.form).keys()[-1]) # FIXME: does this actually work? # if there is a q param, make it into a query string query elif 'q' in request.values: s = request.values['q'] op = request.values.get('default_operator') q.query_string(s, op) # if there is a source param, load the json from it elif 'source' in request.values: q = Query(json.loads(urllib2.unquote(request.values['source']))) # now run the query through the filters filters = app.config.get("QUERY_FILTERS", {}) filter_names = cfg.get("filters", []) for filter_name in filter_names: # because of back-compat, we have to do a few tricky things here... # filter may be the name of a filter in the list of query filters fn = filters.get(filter_name) if fn is None: # filter may be the path to a function fn = plugin.load_function(filter_name) if fn is None: app.logger.info("Unable to load query filter for {x}".format(x=filter_name)) abort(500) fn(q) # finally send the query and return the response res = dao_klass.query(q=q.as_dict()) resp = make_response(json.dumps(res)) else: abort(400) resp.mimetype = "application/json" return resp
from octopus.core import app, initialise from octopus.lib import plugin, cli import sys command = sys.argv[1] args = sys.argv[2:] for name, path in app.config.get("CLI_SCRIPTS", {}).iteritems(): ran = False if name == command: # get an instance of the script klazz = plugin.load_class(path) inst = klazz() # check that it can legitimately be run if not isinstance(inst, cli.Script): print command, "is not a legitimate octopus script - must extend from octopus.lib.cli.Script" exit() # ensure the app is initialised initialise() # run it ran = True klazz().run(args) if not ran: print command, "- command not found"
def search(): # get the values for the 3 key bits of search info: the query, the page number and the page size q = request.values.get("q") page = request.values.get("page", 1) psize = request.values.get("pageSize", 10) # check that we have been given a query if q is None or q == "": abort(400) # check the page is an integer greater than 0 try: page = int(page) except: abort(400) if page < 1: page = 1 # limit the page size as per the configuration try: psize = int(psize) except: abort(400) if psize > app.config.get("SEARCH_MAX_PAGE_SIZE", 100): psize = app.config.get("SEARCH_MAX_PAGE_SIZE", 100) elif psize < 1: psize = 10 # calculate the position of the from cursor in the document set fro = (page - 1) * psize # assemble the query query = dao.QueryStringQuery(q, fro, psize) # load the DAO class and send the query through it klazz = plugin.load_class(app.config.get("SEARCH_DAO")) res = klazz.query(q=query.query()) # check to see if there was a search error if res.get("error") is not None: abort(400) # unpack the results and pull out the search metadata obs = esprit.raw.unpack_json_result(res) total = res.get("hits", {}).get("total", 0) # optionally filter the result objects as per the config filter = app.config.get("SEARCH_RESULT_FILTER") if filter is not None: fn = plugin.load_function(filter) obs = [fn(o) for o in obs] # build the response object response = { "total": total, "page": page, "pageSize": psize, "timestamp": datetime.utcnow().strftime("%Y-%m%dT%H:%M:%SZ"), "query": q, "results": obs, } resp = make_response(json.dumps(response)) resp.mimetype = "application/json" return resp