Example #1
0
    def test_not_in(self):
        self.compare('a not in [1, 2, 3]', {'a': {'$nin': [1, 2, 3]}})

        with self.assertRaises(pql.ParseError) as context:
            pql.find('a not in (1)')

        self.assertIn('Invalid value type', str(context.exception))
Example #2
0
    def test_not_in(self):
        self.compare('a not in [1, 2, 3]', {'a': {'$nin': [1, 2, 3]}})

        with self.assertRaises(pql.ParseError) as context:
            pql.find('a not in (1)')

        self.assertIn('Invalid value type', str(context.exception))
Example #3
0
 def compare(self, string, expected):
     print("{} | {}".format(string, expected))
     self.assertEqual(
         pql.find(string,
                  schema={
                      'a': pql.IntField(),
                      'd': pql.DateTimeField(),
                      'foo.bar': pql.ListField(pql.StringField())
                  }), expected)
Example #4
0
    def result(request, infmt, qry):
        try:
            qry = pql.find(qry)
        except Exception as inst:
            Logger.Error(str(type(inst)))  # the exception instance
            Logger.Error(str(inst.args))  # arguments stored in .args
            Logger.Error(str(inst))
            return HttpLogger.Error("Syntax Error in " + str(qry)), False

        return qry, True
Example #5
0
 def count(self, owner, cube, query, date=None):
     self.cube_exists(owner, cube)
     self.requires_owner_read(owner, cube)
     set_default(query, '')
     logger.info('pql query: %s' % query)
     try:
         spec = pql.find(query_add_date(query, date))
     except Exception as e:
         self._raise(400, "Invalid Query (%s)" % str(e))
     logger.debug('mongo query: %s' % spec)
     _cube = self.timeline(owner, cube)
     docs = _cube.find(spec=spec)
     return docs.count() if docs else 0
Example #6
0
    def get(self):
        resources = get_resources_db()

        args = query_parser.parse_args()
        if args.query:
            q = pql.find(args.query)
        else:
            q = None
        objs = resources.find(q)
        objs = [o for o in objs]
        if not objs:
            abort(404, message="'%s' wasn't found" % args.query)
        return json.loads(json.dumps(objs, sort_keys=True, indent=4, default=json_util.default))
Example #7
0
    def result(request, infmt, qry):
        try:
            if(infmt == "python"):
                import pql
                qry = pql.find(qry)
            else:
                qry = eval(qry)
        except Exception as inst:
            Logger.Error(str(type(inst)))    # the exception instance
            Logger.Error(str(inst.args))     # arguments stored in .args
            Logger.Error(str(inst)) 
            return HttpLogger.Error("Syntax Error in " + str(qry)), False 

        return qry, True
Example #8
0
    def qry_mongo(realm, qry, cfg=Config()):
        host = cfg["mongo_host"]
        port = cfg["mongo_port"]

        client = MongoClient(host, port)
        db = client["primary"]
        try:
            coll = db[realm.lower()]
        except:
            raise Exception("realm not found")
        qry = pql.find(qry)
        res = coll.find(qry)
        dat = []
        for d in res:
            dat.append(d)
        print("Connecting to mongo://" + host + ":" + str(port))
        return pd.DataFrame(dat)
Example #9
0
    def post():
        resources = get_resources_db()

        args = post_parser.parse_args()
        if args.query:
            query = pql.find(args.query)
        else:
            query = dict(id=args.resource)
        obj = resources.find_one(query)
        if not obj:
            abort(404, message="resource[%s] wasn't found" % args.resource)
        if obj.get('locked_by', None):
            abort(405, message="locked by %s" % obj['locked_by'])
        duration = args.duration if args.duration else 0
        lock_endtime = datetime.datetime.now() + datetime.timedelta(minutes=duration)
        res = resources.update_one({'_id': obj['_id']}, {"$set": {'locked_by': args.username, 'lock_end': lock_endtime}})
        obj = resources.find_one(dict(id=args.resource))

        return json.loads(json.dumps(obj, sort_keys=True, indent=4, default=json_util.default))
Example #10
0
 def deptree(self, owner, cube, field, oids, date, level):
     self.cube_exists(owner, cube)
     self.requires_owner_read(owner, cube)
     if level and level <= 0:
         self._raise(400, 'level must be >= 1')
     oids = self.parse_oids(oids)
     checked = set(oids)
     fringe = oids
     loop_k = 0
     pql_date = date_pql_string(date)
     while len(fringe) > 0:
         if level and loop_k == abs(level):
             break
         spec = pql.find(
             '_oid in %s and %s != None and %s' % (fringe, field, pql_date))
         _cube = self.timeline(owner, cube)
         fields = {'_id': -1, '_oid': 1, field: 1}
         docs = _cube.find(spec, fields=fields)
         fringe = set([oid for doc in docs for oid in doc[field]])
         fringe = filter(lambda oid: oid not in checked, fringe)
         checked |= set(fringe)
         loop_k += 1
     return sorted(checked)
Example #11
0
def optimadeToMongoDBConverter(optimadeQuery, version=None, aliases=None):
    """
    main function for converting optimade query to mongoDB query
    Procedure:
     1. converting optimadeQuery into Lark tree
     2. converting tree into raw PQL
     3. parsing the rawPQL into cleaned PQL (putting combined item in place)
     4. parse cleaned PQL into raw MongoDB query
     5. parse raw MongoDB Query into cleaned MongoDb Query (turn values in string into float if possible)
    """

    p = Parser(version=version)
    optimadeQuery = parseAlias(optimadeQuery, aliases)
    try:
        tree = p.parse(optimadeQuery)
        rawPQL = OptimadeToPQLTransformer().transform(tree)
        cleanedPQL = cleanPQL(rawPQL)
        mongoDbQuery = pql.find(cleanedPQL)
    except Exception as e:
        return e

    cleanMongo(mongoDbQuery)
    return mongoDbQuery
Example #12
0
    def deptree(self, owner, cube, field, oids, date, level):
        '''
        Dependency tree builder. Recursively fetchs objects that
        are children of the initial set of parent object ids provided.

        :param cube: cube name
        :param owner: username of cube owner
        :param field: Field that contains the 'parent of' data
        :param oids: Object oids to build depedency tree for
        :param date: date (metrique date range) that should be queried.
                    If date==None then the most recent versions of the
                    objects will be queried.
        :param level: limit depth of recursion
        '''
        self.requires_read(owner, cube)
        if level and level <= 0:
            self._raise(400, 'level must be >= 1')
        if isinstance(oids, basestring):
            oids = [s.strip() for s in oids.split(',')]
        checked = set(oids)
        fringe = oids
        loop_k = 0
        pql_date = date_pql_string(date)
        while len(fringe) > 0:
            if level and loop_k == abs(level):
                break
            spec = pql.find(
                '_oid in %s and %s != None and %s' % (fringe, field, pql_date))
            _cube = self.timeline(owner, cube)
            fields = {'_id': -1, '_oid': 1, field: 1}
            docs = _cube.find(spec, fields=fields)
            fringe = set([oid for doc in docs for oid in doc[field]])
            fringe = filter(lambda oid: oid not in checked, fringe)
            checked |= set(fringe)
            loop_k += 1
        return sorted(checked)
Example #13
0
 def compare(self, string, expected):
     #print string, '|', expected
     self.assertEqual(pql.find(string, schema={'a': pql.IntField(),
                                               'd': pql.DateTimeField(),
                                               'foo.bar': pql.ListField(pql.StringField())}), expected)
Example #14
0
def convert_sql2mongo_query(sql_query):
    """ Convert SQL query to Mongo Query"""
    return pql.find(sql_query)
Example #15
0
 def test_invalid_name(self):
     with self.assertRaises(pql.ParseError) as context:
         pql.find('a == foo')
     self.assertIn('Invalid name', str(context.exception))
Example #16
0
 def test_missing_func(self):
     with self.assertRaises(pql.ParseError) as context:
         pql.find('a == foo()')
     self.assertIn('Unsupported function', str(context.exception))
Example #17
0
def convert_sql2mongo_time(sql_query):
    """ Converts SQL query to Mongo Query with time stamp"""
    return pql.find(sql_query, schema={'TestStartTime': pql.DateTimeField()})
Example #18
0
 def compare(self, string, expected):
     print("{} | {}".format(string, expected))
     self.assertEqual(pql.find(string), expected)
Example #19
0
def read(db,
         query: Optional[dict] = None,
         pql: any = None,
         group_by: Optional[str] = None,
         order_by: Optional[str] = None,
         limit: Optional[int] = None,
         offset: Optional[int] = None,
         handler: any = None,
         disable_count_total: bool = False,
         **kwargs):
    """Read data from DB.

    Args:
        db (Collection): DB connection
        query (dict or Query): Query to select items
        pql (PQL) Python-Query-Language to select items
        group_by (str): Aggregate by this key
        order_by (list): column name to sort by with format [ ( column1, 1 or -1 ), ... ]
        limit (int): number of items to return per a page
        offset (int): offset of cursor
        handler (BaseDBHandler): DBHandler
        disable_count_total (bool): set True to avoid counting total number of records
        **kwargs: kwargs for function `pandas.read_sql_query`
                  or `influxdb.DataFrameClient.query`

    Returns:
        (list, int): list of data and total number of records

    """
    if limit is None:
        limit = 0
    if offset is None:
        offset = 0

    if pql is not None and query is not None:
        raise ValueError('Either query or pql can be specified')

    if pql:
        query = PQL.find(pql)

    if group_by is None:
        if query:
            if order_by is None:
                data = db.find(query).skip(offset).limit(limit)
                count_total = db.count(query) if not disable_count_total else None
            else:
                data = db.find(query).sort(order_by).skip(offset).limit(limit)
                count_total = db.count(query) if not disable_count_total else None
        else:
            if order_by is None:
                data = db.find().skip(offset).limit(limit)
                count_total = db.count({}) if not disable_count_total else None
            else:
                data = db.find().sort(order_by).skip(offset).limit(limit)
                count_total = db.count({}) if not disable_count_total else None
    else:
        aggregate = []
        if query:
            aggregate.append({'$match': query})

        columns = {}
        for column in set(handler.columns).union(['_uuid', '_creation_time']):
            try:
                config = next(filter(lambda c: c['name'] == column, handler.config['columns']))
                agg = config['aggregation']
                columns.update({column: {'${}'.format(agg): '${}'.format(column)}})
            except Exception:
                columns.update({column: {'$first': '${}'.format(column)}})

        aggregate.append({
            '$group': {
                **columns,
                '_id': '${}'.format(group_by),
            }
        })
        aggregate.append({'$project': {'_id': 0}})
        aggregate_count = deepcopy(aggregate)
        aggregate_count.append({'$count': 'count'})

        if order_by is not None:
            aggregate.append({'$sort': {item[0]: item[1] for item in order_by}})

        if offset > 0:
            aggregate.append({'$skip': offset})
        if limit > 0:
            aggregate.append({'$limit': limit})

        data = db.aggregate(aggregate)
        try:
            count_total = list(db.aggregate(aggregate_count))[0]['count'] \
                if not disable_count_total else None
        except Exception as e:
            logging.warning(e)
            count_total = None

    data = list(data)
    count_total = count_total if count_total is not None else len(data)

    return data, count_total
Example #20
0
 def compare(self, string, expected):
     #print string, '|', expected
     self.assertEqual(pql.find(string), expected)
Example #21
0
 def test_invalid_name(self):
     with self.assertRaises(pql.ParseError) as context:
         pql.find('a == foo')
     self.assertIn('Invalid name', str(context.exception))
Example #22
0
 def test_missing_func(self):
     with self.assertRaises(pql.ParseError) as context:
         pql.find('a == foo()')
     self.assertIn('Unsupported function', str(context.exception))
Example #23
0
 def compare(self, string, expected):
     print string, '|', expected
     self.assertEqual(pql.find(string), expected)
Example #24
0
    async def list_widget(self):
        # parse arguments
        per_page = int(self.get_argument("per_page", as_type=int, default=10))
        current_page = int(self.get_argument("page", as_type=int, default=0))
        query = self.get_argument("filter", as_type=dict, default={})
        search = self.get_argument("search", as_type=str, default=None)
        all = self.get_argument("api", as_type=bool, default=False)
        tag = self.get_argument("tag", as_type=list, default=None)
        # parse search
        q = {}
        if search:
            search = search.strip()
            if search.startswith("!"):
                search = search[1:].lstrip()
                all = True
            if search:
                try:
                    q = pql.find(search)
                    self.logger.debug("search: %s", q)
                except Exception:
                    search = ".*" + search + ".*"
                    q = {
                        "$or": [{
                            "author": re.compile(search, re.I)
                        }, {
                            "description": re.compile(search, re.I)
                        }, {
                            "qual_name": re.compile(search, re.I)
                        }, {
                            "subtitle": re.compile(search, re.I)
                        }, {
                            "tag": re.compile(search, re.I)
                        }, {
                            "title": re.compile(search, re.I)
                        }]
                    }
        else:
            q = query
        if not all:
            q = {"$and": [{"tag": {"$ne": "api"}}, q]}
        if tag:
            q = {"$and": [q, {"tag": {"$in": tag}}]}
        data = []
        self.logger.debug("search %s", q)
        for handler in await self.application.container.get_handler(**q):
            check = []
            if handler["perm_base"] == "handler":
                check.append(handler["qual_name"])
            elif handler["perm_base"] == "container":
                check += handler["container"]
            for test in check:
                if await self.user.has_api_access(test, info_request=True):
                    data.append(handler)
                    break

        data.sort(key=lambda d: ((d["title"] or "").lower(),
                                 (d["subtitle"] or "").lower()))

        # paginate
        async def _length(*_, **__):
            return len(data)

        async def _query(skip, limit, *_, **__):
            return data[skip:(skip + limit)]

        pager = CorePager(per_page=int(per_page),
                          current_page=int(current_page),
                          length=_length,
                          query=_query,
                          sort_by=None,
                          filter=None)
        ret = await pager.page()
        return self.reply(ret)