def create_server_client(): """ create server client, collections and indexes :return: server client """ client = FaunaClient(secret=os.environ.get('FAUNA_SERVER_SECRET')) client.query(q.create_collection({"name": "users"})) client.query(q.create_index( { "name": "users_by_username", "source": q.collection("users"), "permissions": {"read": "public"}, "terms": [{"field": ["data", "username"]}], "unique": True } )) client.query(q.create_collection({"name": "contacts"})) client.query(q.create_index( { "name": "contacts_by_username", "source": q.collection("contacts"), "terms": [{"field": ["data", "username"]}] } )) return client
def setUpClass(cls): super(QueryTest, cls).setUpClass() cls.collection_ref = cls.client.query( query.create_collection({"name": "widgets"}))["ref"] cls.n_index_ref = cls.client.query( query.create_index({ "name": "widgets_by_n", "active": True, "source": cls.collection_ref, "terms": [{ "field": ["data", "n"] }] }))["ref"] cls.m_index_ref = cls.client.query( query.create_index({ "name": "widgets_by_m", "active": True, "source": cls.collection_ref, "terms": [{ "field": ["data", "m"] }] }))["ref"] cls.z_index_ref = cls._q( query.create_index({ "name": "widgets_by_z", "active": True, "source": cls.collection_ref, "values": [{ "field": ["data", "z"] }] }))["ref"]
def create_indices(client): # # Create two indexes here. The first index is to query customers when you know specific id's. # The second is used to query customers by range. Examples of each type of query are presented # below. # res = client.query([ q.create_index({ "name": "customer_by_id", "source": q.class_("customers"), "unique": True, "terms": {"field": ["data", "id"]} }), q.create_index({ "name": "customer_id_filter", "source": q.class_("customers"), "unique": True, "values": [{"field": ["data", "id"]}, {"field": ["ref"]}] }), q.create_index({ "name": "transaction_uuid_filter", "source": q.class_("transactions"), "unique": True, "values": [{"field": ["data", "id"]}, {"field": ["ref"]}] }) ]) print('Create \'customer_by_id\', \'customer_id_filter\' and \'transaction_uuid_filter\' indices') pprint.pprint(res)
def test_create_index(self): self._q(query.create_index({ "name": "index_for_test", "active": True, "source": query.collection("widgets")})) self.assertTrue(self._q(query.exists(query.index("index_for_test"))))
def test_count_mean_sum(self): data = [1, 2, 3, 4, 5, 6, 7, 8, 9] self._q(query.create_collection({"name": "countmeansum_test"})) self._q( query.create_index({ "name": "countmeansum_idx", "source": query.collection("countmeansum_test"), "active": True, "values": [{ "field": ["data", "value"] }] })) self._q( query.foreach( query.lambda_( "x", query.create( query.collection("countmeansum_test"), {"data": { "value": query.add(query.var("x"), 2) }})), data)) m = query.match(query.index("countmeansum_idx")) expected = [9, 5.0, 45, 9, 7.0, 63] self.assertEqual( self._q([ query.count(data), query.mean(data), query.sum(data), query.count(m), query.mean(m), query.sum(m) ]), expected)
def test_range(self): data = list(range(1, 20)) self._q(query.create_collection({"name": "range_test"})) self._q( query.create_index({ "name": "range_idx", "source": query.collection("range_test"), "active": True, "values": [{ "field": ["data", "value"] }] })) self._q( query.foreach( query.lambda_query(lambda x: query.create( query.collection("range_test"), {"data": { "value": x }})), data)) m = query.match(query.index("range_idx")) q1 = query.select("data", query.paginate(query.range(m, 3, 8))) q2 = query.select("data", query.paginate(query.range(m, 17, 18))) q3 = query.select("data", query.paginate(query.range(m, 19, 0))) self.assertEqual(self._q(q1), [3, 4, 5, 6, 7, 8]) self.assertEqual(self._q(q2), [17, 18]) self.assertEqual(self._q(q3), [])
def test_documents(self): aCollection = "col_test_documents" anIndex = "idx_test_documents" self._q(query.create_collection({"name": aCollection})) self._q( query.create_index({ "name": anIndex, "source": query.collection(aCollection), "active": True })) count = 56 data = [{} for x in range(count)] self._q( query.foreach( query.lambda_( "x", query.create(query.collection(aCollection), {"data": query.var("x")})), data)) self.assertEqual( self._q( query.select([0], query.count( query.paginate( query.documents( query.collection(aCollection)))))), count) self.assertEqual( self._q(query.count(query.documents( query.collection(aCollection)))), count)
def create_database(self, name: str, database: Type['FaunaDatabase'], key_type: str) -> Secret: self.client.query(q.create_database({'name': name})) key = self.client.query( q.create_key({ 'database': q.database(name), 'role': key_type })) secret = Secret(key['secret']) database_instance = database(secret, self.client_factory) for class_ in database.classes(): log.info(f'creating class {class_}') database_instance._create_class(class_) for name, index in database.indices().items(): log.info(f'creating index {name}') database_instance.client.query( q.create_index({ 'name': name, 'source': q.class_(index.source.name()), 'terms': [{ 'field': ['data', field] } for field in index.fields], 'values': [{ 'field': ["data", value] } for value in index.values] + [{ 'field': ['ref'] }], 'unique': index.unique })) return Secret(secret)
def schema(event, context): create_todos = query.create_class({ 'name': 'todos' }) create_all_todos = query.create_index({ 'name': 'all_todos', 'source': TODOS }) client.query(query.if_expr( query.exists(TODOS), query.get(TODOS), create_todos )) client.query(query.if_expr( query.exists(ALL_TODOS), query.get(ALL_TODOS), create_all_todos )) # create a response response = { "statusCode": 200 } return response
def test_reverse(self): #arrays list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] self.assertEqual(self._q(query.reverse(list)), [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]) self.assertEqual(self._q(query.reverse(query.reverse(list))), list) self.assertEqual(self._q(query.reverse([])), []) self._q(query.create_collection({"name": "reverse_collection"})) self._q(query.create_index({ "name": "rev_coll_idx", "source": query.collection("reverse_collection"), "values": [{"field": ["data", "val"]}] })) index = query.index("rev_coll_idx") for i in range(100): self._q(query.create(query.collection( "reverse_collection"), {"data": {"val": i}})) assertPaginate = lambda q, expected, size = None : self.assertEqual(self._q(query.select("data", query.paginate(q, size))), expected) assertPaginate(query.reverse(query.match(index)), [99, 98, 97, 96, 95], 5) assertPaginate(query.reverse(query.reverse(query.match(index))), list, 11) q1 = query.select(["data", 0], query.reverse(query.paginate(query.match(index), size=50))) self.assertEqual(self._q(q1), 49) self._assert_bad_query(query.reverse("a string")) self._assert_bad_query(query.reverse(index)) self._assert_bad_query(query.reverse({"a": 1, "b": 2}))
def test_create_index(self): self.assertJson( query.create_index({ "name": "index-name", "source": query.collection("widget") }), '{"create_index":{"object":{"name":"index-name","source":{"collection":"widget"}}}}' )
def test_queries_are_made(): # The client mock is used by both the # database and annotation database # so here we assert calls for both # could maybe be improved by changing the way databases are created? create_annotation_database_queries = [ q.create_database({'name': '1234-annotations'}), q.create_key( {'database': q.database('1234-annotations'), 'role': 'server'} ) ] mock_dependencies.database.client.query.assert_has_calls( [call(query) for query in create_annotation_database_queries] ) setup_annotation_database_queries = [ q.create_class({'name': 'SpanLabel'}), q.create_class({'name': 'DocumentLabel'}), q.create_class({'name': 'RelationLabel'}), q.create_class({'name': 'Relation'}), q.create_class({'name': 'Span'}), q.create_class({'name': 'Document'}), q.create_index( {'name': 'documents', 'source': q.class_('Document')}), q.create_index( {'name': 'span_labels', 'source': q.class_('SpanLabel')} ), q.create_index( {'name': 'relation_labels', 'source': q.class_('RelationLabel')} ), q.create_index( {'name': 'document_labels', 'source': q.class_('DocumentLabel')} ) ] mock_dependencies.database.client.query.assert_has_calls( [call(query) for query in setup_annotation_database_queries] ) event = event_reader('post_confirmation_event.json') handle(event, {}, dependencies=mock_dependencies) assert event == event test_buckets_are_created() test_queries_are_made() test_user_attributes_are_updated()
def _translate_create_index(statement: token_groups.Statement, idx: int) -> typing.List[QueryExpression]: _, unique = statement.token_next_by(m=(token_types.Keyword, "UNIQUE"), idx=idx) idx, _ = statement.token_next_by(m=(token_types.Keyword, "ON"), idx=idx) _, index_params = statement.token_next_by(i=token_groups.Function, idx=idx) params_idx, table_identifier = index_params.token_next_by( i=token_groups.Identifier) table_name = table_identifier.value params_idx, column_identifiers = index_params.token_next_by( i=token_groups.Parenthesis, idx=params_idx) index_fields = [ token.value for token in column_identifiers.flatten() if token.ttype == token_types.Name ] if len(index_fields) > 1: raise exceptions.NotSupportedError( "Creating indexes for multiple columns is not currently supported." ) index_terms = [{ "field": ["data", index_field] } for index_field in index_fields] index_name = fql.index_name(table_name, column_name=index_fields[0], index_type=fql.IndexType.TERM) return [ q.do( q.if_( # We automatically create indices for some fields on collection creation, # so we can skip explicit index creation if it already exists. q.exists(q.index(index_name)), None, q.create_index({ "name": index_name, "source": q.collection(table_name), "terms": index_terms, "unique": unique, }), ), q.let( {"collection": q.collection(table_name)}, {"data": [{ "id": q.var("collection") }]}, ), ) ]
def create_schema(client): # # Create an class to hold customers # res = client.query(q.create_class({"name": "customers"})) print('Create \'customer\' class: {0}'.format(res)) # # Create two indexes here. The first index is to query customers when you know specific id's. # The second is used to query customers by range. Examples of each type of query are presented # below. # res = client.query([ q.create_index({ "name": "customer_by_id", "source": q.class_("customers"), "unique": True, "terms": { "field": ["data", "id"] } }), q.create_index({ "name": "customer_id_filter", "source": q.class_("customers"), "unique": True, "values": [{ "field": ["data", "id"] }, { "field": ["ref"] }] }) ]) print( 'Create \'customer_by_id\' index & \'customer_id_filter\' index : {0}'. format(res))
def create_server_client(): client = FaunaClient(secret=os.environ.get('FAUNA_SECRET')) client.query(q.create_collection({"name": "users"})) client.query( q.create_index({ "name": "users_by_username", "source": q.collection("users"), "permissions": { "read": "public" }, "terms": [{ "field": ["data", "username"] }], "unique": True })) client.create_collection()
def schema(event, context): create_todos = query.create_class({'name': 'todos'}) create_all_todos = query.create_index({ 'name': 'all_todos', 'source': TODOS }) client.query( query.if_expr(query.exists(TODOS), query.get(TODOS), create_todos)) client.query( query.if_expr(query.exists(ALL_TODOS), query.get(ALL_TODOS), create_all_todos)) # create a response response = {"statusCode": 200} return response
def create_schema(client): # # Create an class to hold customers # res = client.query(q.create_class({"name": "customers"})) print('Create \'customer\' class: {0}'.format(res)) # # Create an index to access customer records by id # res = client.query( q.create_index({ "name": "customer_by_id", "source": q.class_("customers"), "unique": True, "terms": { "field": ["data", "id"] } })) print('Create \'customer_by_id\' index: {0}'.format(res))
def test_set_iterator(self): collection_ref = self.client.query( query.create_collection({"name": "gadgets"}))["ref"] index_ref = self.client.query( query.create_index({ "name": "gadgets_by_n", "active": True, "source": collection_ref, "terms": [{ "field": ["data", "n"] }] }))["ref"] def create(n): q = query.create(collection_ref, {"data": {"n": n}}) return self.client.query(q)["ref"] a = create(0) create(1) b = create(0) gadgets_set = query.match(index_ref, 0) self.assertEqual( list(Page.set_iterator(self.client, gadgets_set, page_size=1)), [a, b]) query_mapper = lambda a: query.select(['data', 'n'], query.get(a)) query_mapped_iter = Page.set_iterator(self.client, gadgets_set, map_lambda=query_mapper) self.assertEqual(list(query_mapped_iter), [0, 0]) mapped_iter = Page.set_iterator(self.client, gadgets_set, mapper=lambda x: [x]) self.assertEqual(list(mapped_iter), [[a], [b]])
def create_indexes(client): indexes = [{ "name": "user_by_email", "source": q.collection("users"), "terms": [{ "field": ["data", "email"] }], }, { "name": "products_search_by_name", "source": { "collection": q.collection('products'), "fields": { "wordparts": q.query(lambda product: wordPartsGenerator( q.select(['data', 'name'], product))) } }, "terms": [{ "binding": 'wordparts' }], }, { "name": "products_search_by_category", "source": q.collection('products'), "terms": [{ "field": ["data", "categories"] }], }, { "name": "products_sort_by_name_asc", "source": q.collection('products'), "terms": [{ "field": ["ref"] }], "values": [ { "field": ["data", "name"] }, { "field": ["ref"] }, ] }, { "name": "products_sort_by_price_asc", "source": q.collection('products'), "terms": [{ "field": ["ref"] }], "values": [ { "field": ["data", "price"] }, { "field": ["ref"] }, ] }, { "name": "products_sort_by_price_desc", "source": q.collection('products'), "terms": [{ "field": ["ref"] }], "values": [ { "field": ["data", "price"], "reverse": True }, { "field": ["ref"] }, ] }, { "name": "products_sort_by_created_asc", "source": q.collection('products'), "values": [ { "field": ["data", "createdAt"] }, { "field": ["ref"] }, ] }] client.query( q.map_( lambda index: q.if_(q.exists(q.index(q.select(["name"], index))), True, q.create_index(index)), indexes))
def _create_table_indices(table_name: str, field_metadata: AllFieldMetadata) -> QueryExpression: index_by_collection = partial(fql.index_name, table_name) index_queries = [ q.create_index({ "name": index_by_collection(index_type=fql.IndexType.REF), "source": q.collection(table_name), "terms": [{ "field": ["ref"] }], }), q.create_index({ "name": index_by_collection(), "source": q.collection(table_name) }), ] foreign_references = [ field_name for field_name, field_data in field_metadata.items() if any(field_data["references"]) ] # We create a foreign ref index per foreign ref that exists in the collection, # because this permits us to access any foreign ref we may need to continue # a chain of joins. for foreign_reference in foreign_references: index_queries.append( q.create_index({ "name": index_by_collection( index_type=fql.IndexType.REF, foreign_key_name=foreign_reference, ), "source": q.collection(table_name), "terms": [{ "field": ["ref"] }], "values": [ { "field": ["data", foreign_reference] }, ], })) for field_name, field_data in field_metadata.items(): # Fauna can query documents by ID by default, so we don't need an index for it if field_name == "id": continue index_by_field = partial(index_by_collection, column_name=field_name) index_queries.extend([ q.create_index({ "name": index_by_field( # pylint: disable=no-value-for-parameter index_type=fql.IndexType.VALUE), "source": q.collection(table_name), "values": [{ "field": ["data", field_name] }, { "field": ["ref"] }], }), # Sorting index, so we can support ORDER BY clauses in SQL queries. # This will allow us to order a set of refs by a value while still # keeping that same set of refs. q.create_index({ "name": index_by_field( # pylint: disable=no-value-for-parameter index_type=fql.IndexType.SORT), "source": q.collection(table_name), "terms": [{ "field": ["ref"] }], "values": [{ "field": ["data", field_name] }, { "field": ["ref"] }], }), ]) # We need a separate index for unique fields, because the values-based indices # contain the 'ref' field, which will never be duplicated is_unique = field_data["unique"] if is_unique: index_queries.append( q.create_index({ "name": index_by_field( # pylint: disable=no-value-for-parameter index_type=fql.IndexType.TERM), "source": q.collection(table_name), "terms": [{ "field": ["data", field_name] }], "unique": is_unique, })) # We need a ref-based index for foreign keys to permit JOIN queries via matching # document refs is_foreign_key = any(field_data["references"]) if is_foreign_key: index_queries.append( q.create_index({ "name": index_by_field( # pylint: disable=no-value-for-parameter index_type=fql.IndexType.REF), "source": q.collection(table_name), "terms": [{ "field": ["data", field_name] }], })) return index_queries
})) # Set up a collection # Create a collection using the CreateCollection function with a param_object containing the name of the collection. # We shall name our collection "posts": client.query(q.create_collection({"name": "posts"})) # Create an index # The customary way to access documents within a collection is by specifying a criteria for one of the fields. # To enable criteria-based searches, we need to first create an index using the path of the field within the document. client.query( q.create_index({ "name": "posts_by_title", "source": q.collection("posts"), "terms": [{ "field": ["data", "title"] }] })) client.query( q.create_index({ "name": "posts_by_tags_with_title", "source": q.collection("posts"), "terms": [{ "field": ["data", "tags"] }], "values": [{ "field": ["data", "title"] }] }))
def test_typecheckfns(self): coll = query.collection("typecheck_coll") db = query.database("typecheck_db") fn = query.function("typecheck_fn") index = query.index("typecheck_index") self.admin_client.query(query.create_collection({"name": "typecheck_coll"})) self.admin_client.query(query.create_index( {"name": "typecheck_index", "source": coll, "active": True})) doc = self.admin_client.query(query.create( coll, {"data": {}, "credentials": {"password": "******"}})) self.admin_client.query(query.create_database({"name": "typecheck_db"})) function = self._q(query.create_function( {"name": "typecheck_fn", "body": query.query(query.lambda_("x", query.now()))})) key = self.admin_client.query( query.create_key({"database": db, "role": "admin"})) token = self._q(query.login(doc["ref"], {"password": "******"})) credentials = self._q(query.select(['data', 0], query.paginate(query.credentials()))) role = self.admin_client.query(query.create_role( {"name": "typecheck_role", "membership": [], "privileges": []})) values = [ None, bytearray([12,3,4,5]), credentials, 90, 3.14, True, query.to_date(query.now()), query.date("1970-01-01"), query.now(), query.epoch(1, "second"), query.time("1970-01-01T00:00:00Z"), {"x": 10}, query.get(doc["ref"]), query.paginate(query.collections()), [1, 2, 3], "a string", coll, query.collections(), query.match(index), query.union(query.match(index)), doc["ref"], query.get(doc["ref"]), index, db, coll, token["ref"], role["ref"], key["ref"], function["ref"], query.get(function["ref"]), query.query(query.lambda_("x", query.var("x"))), ] pairs = [ ["array", query.is_array], ["object", query.is_object], ["string", query.is_string], ["null", query.is_null], ["number", query.is_number], ["bytes", query.is_bytes], ["date", query.is_date], ["timestamp", query.is_timestamp], ["set", query.is_set], ["ref", query.is_ref], ["boolean", query.is_boolean], ["double", query.is_double], ["integer", query.is_integer], ["database", query.is_database], ["index", query.is_index], ["collection", query.is_collection], ["token", query.is_token], ["function", query.is_function], ["collection", query.is_collection], ["role", query.is_role], ["credentials", query.is_credentials], ["key", query.is_key], ] expected = { "array": 1, "boolean": 1, "bytes": 1, "collection": 3, "credentials": 1, "database": 1, "date": 2, "double": 1, "function": 2, "integer": 1, "index": 1, "key": 1, "null": 1, "number": 2, "object": 5, "ref": 11, "role": 1, "set": 3, "string": 1, "timestamp": 3, "token": 1, } q = [] for p in pairs: d = dict() d[p[0]] = query.count(query.filter_(query.lambda_("v", p[1](query.var("v"))), query.var("vals"))) q.append(d) actual = self._q(query.let({"vals": values}, query.merge({}, q))) self.assertEqual(actual, expected)