def test_count_mean_sum(self): data = [1, 2, 3, 4, 5, 6, 7, 8, 9] self._q(query.create_collection({"name": "countmeansum_test"})) self._q( query.create_index({ "name": "countmeansum_idx", "source": query.collection("countmeansum_test"), "active": True, "values": [{ "field": ["data", "value"] }] })) self._q( query.foreach( query.lambda_( "x", query.create( query.collection("countmeansum_test"), {"data": { "value": query.add(query.var("x"), 2) }})), data)) m = query.match(query.index("countmeansum_idx")) expected = [9, 5.0, 45, 9, 7.0, 63] self.assertEqual( self._q([ query.count(data), query.mean(data), query.sum(data), query.count(m), query.mean(m), query.sum(m) ]), expected)
def test_documents(self): aCollection = "col_test_documents" anIndex = "idx_test_documents" self._q(query.create_collection({"name": aCollection})) self._q( query.create_index({ "name": anIndex, "source": query.collection(aCollection), "active": True })) count = 56 data = [{} for x in range(count)] self._q( query.foreach( query.lambda_( "x", query.create(query.collection(aCollection), {"data": query.var("x")})), data)) self.assertEqual( self._q( query.select([0], query.count( query.paginate( query.documents( query.collection(aCollection)))))), count) self.assertEqual( self._q(query.count(query.documents( query.collection(aCollection)))), count)
def update_documents(sql_query: sql.SQLQuery) -> QueryExpression: """Update document fields with the given values. Params: ------- table: Table object that contains the parameters for building an update query in FQL. Returns: -------- An FQL update query for the given collection and documents. """ assert len(sql_query.tables) == 1 table = sql_query.tables[0] assert len(sql_query.filter_groups) <= 1 filter_group = (None if not any(sql_query.filter_groups) else sql_query.filter_groups[0]) field_updates = {column.name: column.value for column in table.columns} return q.let( {"document_set": build_document_set_intersection(table, filter_group)}, q.do( q.update( q.select( "ref", q.get(q.var("document_set")), ), {"data": field_updates}, ), {"data": [{ "count": q.count(q.var("document_set")) }]}, ), )
def test_access_providers(self): for i in range(10): providerName = 'provider_%d' % (i) issuerName = 'issuer_%d' % (i) jwksUri = 'https://xxx.auth0.com/uri%d' % (i) obj = { "name": providerName, "issuer": issuerName, "jwks_uri": jwksUri } self.admin_client.query(query.create_access_provider(obj)) self.assertEqual( self.admin_client.query(query.count(query.access_providers())), 10) self._assert_insufficient_permissions( query.paginate(query.access_providers()))
def number_of_anime(update: Update, context: CallbackContext): result = client.query( q.count(q.paginate(q.documents(q.collection(animes)), size=100000))) context.bot.send_message(chat_id=update.effective_chat.id, text='Number of anime: ' + str(result['data'][0]))
def number_of_users(update: Update, context: CallbackContext): user = User(update.effective_chat.id) result = client.query( q.count(q.paginate(q.documents(q.collection(users)), size=100000), )) context.bot.send_message(chat_id=user.chat_id, text='Number of users: ' + str(result['data'][0]))
def test_count(self): self.assertJson(query.count([1, 2, 3, 4, 5]), '{"count":[1,2,3,4,5]}')
def test_typecheckfns(self): coll = query.collection("typecheck_coll") db = query.database("typecheck_db") fn = query.function("typecheck_fn") index = query.index("typecheck_index") self.admin_client.query(query.create_collection({"name": "typecheck_coll"})) self.admin_client.query(query.create_index( {"name": "typecheck_index", "source": coll, "active": True})) doc = self.admin_client.query(query.create( coll, {"data": {}, "credentials": {"password": "******"}})) self.admin_client.query(query.create_database({"name": "typecheck_db"})) function = self._q(query.create_function( {"name": "typecheck_fn", "body": query.query(query.lambda_("x", query.now()))})) key = self.admin_client.query( query.create_key({"database": db, "role": "admin"})) token = self._q(query.login(doc["ref"], {"password": "******"})) credentials = self._q(query.select(['data', 0], query.paginate(query.credentials()))) role = self.admin_client.query(query.create_role( {"name": "typecheck_role", "membership": [], "privileges": []})) values = [ None, bytearray([12,3,4,5]), credentials, 90, 3.14, True, query.to_date(query.now()), query.date("1970-01-01"), query.now(), query.epoch(1, "second"), query.time("1970-01-01T00:00:00Z"), {"x": 10}, query.get(doc["ref"]), query.paginate(query.collections()), [1, 2, 3], "a string", coll, query.collections(), query.match(index), query.union(query.match(index)), doc["ref"], query.get(doc["ref"]), index, db, coll, token["ref"], role["ref"], key["ref"], function["ref"], query.get(function["ref"]), query.query(query.lambda_("x", query.var("x"))), ] pairs = [ ["array", query.is_array], ["object", query.is_object], ["string", query.is_string], ["null", query.is_null], ["number", query.is_number], ["bytes", query.is_bytes], ["date", query.is_date], ["timestamp", query.is_timestamp], ["set", query.is_set], ["ref", query.is_ref], ["boolean", query.is_boolean], ["double", query.is_double], ["integer", query.is_integer], ["database", query.is_database], ["index", query.is_index], ["collection", query.is_collection], ["token", query.is_token], ["function", query.is_function], ["collection", query.is_collection], ["role", query.is_role], ["credentials", query.is_credentials], ["key", query.is_key], ] expected = { "array": 1, "boolean": 1, "bytes": 1, "collection": 3, "credentials": 1, "database": 1, "date": 2, "double": 1, "function": 2, "integer": 1, "index": 1, "key": 1, "null": 1, "number": 2, "object": 5, "ref": 11, "role": 1, "set": 3, "string": 1, "timestamp": 3, "token": 1, } q = [] for p in pairs: d = dict() d[p[0]] = query.count(query.filter_(query.lambda_("v", p[1](query.var("v"))), query.var("vals"))) q.append(d) actual = self._q(query.let({"vals": values}, query.merge({}, q))) self.assertEqual(actual, expected)
def translate_select(sql_query: sql.SQLQuery) -> QueryExpression: """Translate a SELECT SQL query into an equivalent FQL query. Params: ------- sql_query: An SQLQuery instance. Returns: -------- An FQL query expression based on the SQL query. """ document_pages = _define_document_pages(sql_query) selected_table = next(table for table in sql_query.tables if table.has_columns) get_field_value = lambda function_value, raw_value: q.if_( q.equals(function_value, common.NULL), q.if_(q.equals(raw_value, common.NULL), None, raw_value), q.select([common.DATA, 0], function_value), ) calculate_function_value = lambda document_set, function_name: q.if_( q.is_null(function_name), common.NULL, q.if_( q.equals(function_name, sql.Function.COUNT.value), q.count(document_set), common.NULL, ), ) # With aggregation functions, standard behaviour is to include the first value # if any column selections are part of the query, at least until we add support # for GROUP BY get_first_document = lambda documents: q.if_(q.is_empty(documents), [{}], q.take(1, documents)) translate_document_fields = lambda maybe_documents: q.let( { # We map over selected_fields to build document object # to maintain the order of fields as queried. Otherwise, # SQLAlchemy gets confused and assigns values to the incorrect keys. "selected_column_info": [[col.table_name, col.name, col.function_name] for col in sql_query.columns], "has_functions": any(col.function_name for col in sql_query.columns), "maybe_document_set": q.if_( q.var("has_functions"), get_first_document(maybe_documents), maybe_documents, ), "field_alias_map": sql_query.alias_map, }, q.map_( q.lambda_( "maybe_document", q.let( { "document": q.if_( q.is_ref(q.var("maybe_document")), { # We use the selected table name here instead of deriving # the collection name from the document ref in order to # save a 'get' call from inside of a map, which could get # expensive. selected_table.name: q.merge( q.select( common.DATA, q.get(q.var("maybe_document")), ), {"ref": q.var("maybe_document")}, ), }, q.var("maybe_document"), ), }, q.to_object( q.map_( q.lambda_( [ "collection_name", "field_name", "function_name" ], q.let( { "function_value": calculate_function_value( maybe_documents, q.var("function_name")), "raw_value": q.select( [ q.var("collection_name"), q.var("field_name"), ], q.var("document"), default=common.NULL, ), }, [ q.select( [ q.var("collection_name"), q.var("field_name"), ], q.var("field_alias_map"), ), get_field_value( q.var("function_value"), q.var("raw_value")), ], ), ), q.var("selected_column_info"), )), ), ), q.var("maybe_document_set"), ), ) return q.let( { "maybe_documents": document_pages, "translated_documents": translate_document_fields(q.var("maybe_documents")), "result": q.distinct(q.var("translated_documents")) if sql_query.distinct else q.var("translated_documents"), }, # Paginated sets hold an array of results in a 'data' field, so we try to flatten it # in case we're dealing with pages instead of an array of results which doesn't # have such nesting {common.DATA: q.select(common.DATA, q.var("result"), q.var("result"))}, )
def _print_document_counts(): print("\nDocument counts by collection") for collection in COLLECTIONS: result = _execute_with_retries(q.count(q.match(q.index(f"{collection}_all")))) print(f"\t{collection}: {result}")