def get(uid: int = None, name: str = None, limit: int = None, offset: int = None, search: str = None): """ Fetches some resourcegroups :param uid: :param name: :param limit: :param offset: :param search: performs a fulltext search :return: """ q = db.session.query(ResourceGroup) if isinstance(uid, int): return [q.filter(ResourceGroup.id == uid).first()] if isinstance(name, str) and name: return q.filter(ResourceGroup.name == name).first() if isinstance(search, str) and search: q = q.filter( ResourceGroup.name.ilike("%" + escape_like(search) + "%")) if offset and isinstance(offset, int): q = q.offset(offset) if limit and isinstance(limit, int): q = q.limit(limit) return q.all()
def TestInput(data): if len(data) < 10: pass fdp = atheris.FuzzedDataProvider(data) cast_if(FuzzTable.id, Integer) cast_if(FuzzTable.name, Integer) cast_if(FuzzTable.id, String) cast_if(FuzzTable.name, String) cast_if(fdp.ConsumeInt(10), Integer) cast_if(fdp.ConsumeString(10), Integer) cast_if(fdp.ConsumeInt(10), String) cast_if(fdp.ConsumeString(10), String) db_str = 'sqlite:///fuzz.db' engine = create_engine(db_str) Base.metadata.create_all(engine) try: with Session(engine) as session: name_str = fdp.ConsumeString(20) session.query(FuzzTable).filter( FuzzTable.name.ilike(escape_like(name_str))).all() except SQLAlchemyError as e: pass
def search(queryset, user_input): if user_input is None or user_input == u'': return None # TODO joins and type checks for the queries where possible queryset = queryset.filter( or_(*(getattr(model, f.name).ilike(u'%{}%'.format( escape_like(str(user_input)))) for f in fields if f.searchable))) return queryset
def get_filter(l): if type(l) is list: if len(l) == 1: return get_filter(l[0]) else: op_str = l[1] if op_str == "AND": op = and_ else: #OR op = or_ args = [] for e in l[0::2]: args += [get_filter(e)] return op(*args) else: return OcrMetaData.text.like("%" + escape_like(l) + "%")
def create_tags_clause(self, tag): """Creates a clause for a tag""" tag = tag.lower() return Task.tags.any(Tag.name.ilike(u'%{}%'.format(escape_like(tag))))
def create_text_clause(self, column, value): """Creates a clause for text""" return column.ilike(u'%{}%'.format(escape_like(value.strip())))
def test_escapes_wildcards(self): assert escape_like('_*%') == '*_***%'
def search(**kwargs): kwargs['key'] = DatabaseSearchController.make_valid_key(kwargs['key']) # @TODO: filter by protocols / hosts q = Files.query # only find files that are not in 'temp' mode q = q.filter(Files.resource_id >= 1) # ignores certain filters ignore_filters = [] # filter only files/dirs, or both if 'folders' in kwargs['file_type'] and 'files' in kwargs['file_type']: pass elif 'folders' in kwargs['file_type']: q = q.filter(Files.file_isdir == True) # When searching only for directories, ignore filters that are not relevant ignore_filters.extend(('file_size', 'file_categories', 'file_extensions')) elif 'files' in kwargs['file_type']: q = q.filter(Files.file_isdir == False) # size if kwargs['file_size'] and not 'file_size' in ignore_filters: try: file_size = kwargs['file_size'].split('-') if not len(file_size) == 2: raise Exception() if file_size[0] == '*': q = q.filter(Files.file_size <= int(file_size[1])) elif file_size[1] == '*': q = q.filter(Files.file_size >= int(file_size[0])) else: q = q.filter(Files.file_size.between(*[int(x) for x in file_size])) except: pass # filter categories filecategories = FileCategories() cat_ids = [] for cat in kwargs['file_categories']: cat_id = filecategories.id_by_name(cat) if cat_id is None: continue cat_ids.append(FileCategories().id_by_name(cat)) if cat_ids and not 'file_categories' in ignore_filters: q = q.filter(Files.file_format.in_(cat_ids)) if not kwargs['file_categories']: file_categories = filecategories.get_names() # filter extensions if kwargs['file_extensions'] and not 'file_extensions' in ignore_filters: exts = [] for ext in kwargs['file_extensions']: if ext.startswith('.'): ext = ext[1:] exts.append(ext) q = q.filter(Files.file_ext.in_(exts)) # Search if kwargs['autocomplete'] or kwargs['lazy_search'] or app.config['db_file_count'] > 5000000: q = q.filter(Files.searchable.like(escape_like(kwargs['key']) + '%')) else: q = q.filter(Files.searchable.like('%' + escape_like(kwargs['key']) + '%')) # pagination q = q.offset(kwargs['page']) if kwargs['autocomplete']: q = q.limit(5) q = q.distinct(func.lower(Files.file_name)) else: q = q.limit(kwargs['per_page']) # fetch results = q.all() resource_ids = set([z.resource_id for z in results]) resource_obs = {z.id: z for z in Resources.query.filter(Resources.id.in_(resource_ids)).all()} for result in results: setattr(result, 'resource', resource_obs[result.resource_id]) results = [result.fancify() for result in results] return results
def _search(**kwargs): kwargs["key"] = CrawlController.make_valid_key(kwargs["key"]) if not kwargs["key"]: raise Exception("Invalid search. Too short?") q = ZdbQuery(Files, session=db.session) if config( "findex:elasticsearch:enabled") else Files.query # @TODO: filter by protocols / hosts # only find files that are not in "temp" mode # q = q.filter(Files.resource_id >= 1) # ignores certain filters ignore_filters = [] # filter only files/dirs if kwargs.get("file_type"): if "both" in kwargs["file_type"]: pass if "folders" in kwargs["file_type"]: q = q.filter(Files.file_isdir == True) ignore_filters.extend( ("file_size", "file_categories", "file_extensions")) elif "files" in kwargs["file_type"]: q = q.filter(Files.file_isdir == False) # size if kwargs["file_size"] and "file_size" not in ignore_filters: try: file_size = kwargs["file_size"].split("-") if not len(file_size) == 2: raise Exception() if file_size[0] == "*": q = q.filter(Files.file_size <= int(file_size[1])) elif file_size[1] == "*": q = q.filter(Files.file_size >= int(file_size[0])) else: q = q.filter( Files.file_size.between(*[int(x) for x in file_size])) except: pass # filter categories filecategories = FileCategories() cat_ids = [] cats = kwargs.get("file_categories", []) cats = [] if cats is None else cats for cat in cats: cat_id = filecategories.id_by_name(cat) if cat_id is None: continue cat_ids.append(FileCategories().id_by_name(cat)) if cat_ids and "file_categories" not in ignore_filters: q = q.filter(Files.file_format.in_(cat_ids)) if not kwargs["file_categories"]: file_categories = filecategories.get_names() # filter extensions if kwargs[ "file_extensions"] and "file_extensions" not in ignore_filters: exts = [] for ext in kwargs["file_extensions"]: if ext.startswith("."): ext = ext[1:] exts.append(ext) q = q.filter(Files.file_ext.in_(exts)) if isinstance(kwargs["meta_movie_id"], int): q = q.filter(Files.meta_movie_id == kwargs["meta_movie_id"]) # Search if config("findex:elasticsearch:enabled"): val = kwargs["key"] else: if kwargs["autocomplete"] or app.config["db_file_count"] > 5000000: print("warning: too many rows, enable ElasticSearch") val = "%s%%" % escape_like(kwargs["key"]) else: val = "%%%s%%" % escape_like(kwargs["key"]) if val != "*": q = q.filter(Files.searchable.like(val)) q = q.order_by(Files.file_size.desc()) # pagination q = q.offset(kwargs["page"]) if kwargs["autocomplete"]: q = q.limit(5) # q = q.distinct(func.lower(Files.file_name)) q = q.distinct(Files.file_size) else: q = q.limit(kwargs["per_page"]) # fetch try: results = q.all() except Exception as ex: raise Exception(ex) results = SearchController.assign_resource_objects(results) return results
OPERATOR_FILTER_MAP = { 'eq': lambda f, v: f == v, 'neq': lambda f, v: f != v, 'gt': lambda f, v: f > v, 'gte': lambda f, v: f >= v, 'lt': lambda f, v: f < v, 'lte': lambda f, v: f <= v, 'contains': lambda f, v: f.ilike('%{}%'.format(escape_like(v))), 'contains_cs': lambda f, v: f.like('%{}%'.format(escape_like(v))), 'eq_cs': lambda f, v: f.ilike('{}'.format(escape_like(v))), 'neq_cs': lambda f, v: ~f.ilike('{}'.format(escape_like(v))), 'near': lambda f, v: f.ST_DWithin(ga_point_from_dict(v), v.get('radius', '20000')), 'in': lambda f, v: sa.or_(*(f.any(vv) for vv in v)), # field: array; value: array 'all': lambda f, v: sa.and_(*(f.any(vv) for vv in v)), # field: array; value: array 'any':