def sum_customer_balanaces(client, cust_refs): # # This is going to take the customer references that were created during the # createCustomers routine and aggregate all the balances for them. We could so this, # and probably would, with class index. In this case we want to take this approach to show # how to use references. # balance_sum = 0 res = client.query( q.map_( lambda cust_ref: q.select("data", q.get(cust_ref)), cust_refs) ) for customer in res: balance_sum = balance_sum + customer['balance'] print('Customer Balance Sum: {0}'.format(balance_sum)) return balance_sum
def create_database(scheme, domain, port, secret, db_name): # # Create an admin client. This is the client we will use to create the database. # # If you are using the the FaunaDB-Cloud you will need to replace the value of the # 'secret' in the command below with your "secret". # adminClient = FaunaClient(secret=secret, domain=domain, scheme=scheme, port=port) print("Connected to FaunaDB as admin!") # # The code below creates the Database that will be used for this example. Please note that # the existence of the database is evaluated, deleted if it exists and recreated with a single # call to the Fauna DB. # res = adminClient.query( q.if_(q.exists(q.database(db_name)), [ q.delete(q.database(db_name)), q.create_database({"name": db_name}) ], q.create_database({"name": db_name}))) print('DB {0} created: {1}'.format(db_name, res)) # # Create a key specific to the database we just created. We will use this to # create a new client we will use in the remainder of the examples. # res = adminClient.query( q.select(["secret"], q.create_key({ "database": q.database(db_name), "role": "server" }))) print('DB {0} secret: {1}'.format(db_name, res)) return res
def _translate_drop_default(table_name: str, column_name: str) -> QueryExpression: drop_default = q.map_( q.lambda_( "column_info_ref", q.update(q.var("column_info_ref"), {"data": { "default_": None }}), ), q.paginate(_fetch_column_info_refs(table_name, column_name)), ) return q.let( { "altered_docs": drop_default, # Should only be one document that matches the unique combination # of collection and field name, so we just select the first. "altered_ref": q.select([0, "ref"], q.var("altered_docs")), }, {"data": [{ "id": q.var("altered_ref") }]}, )
def test_set_iterator(self): collection_ref = self.client.query( query.create_collection({"name": "gadgets"}))["ref"] index_ref = self.client.query( query.create_index({ "name": "gadgets_by_n", "active": True, "source": collection_ref, "terms": [{ "field": ["data", "n"] }] }))["ref"] def create(n): q = query.create(collection_ref, {"data": {"n": n}}) return self.client.query(q)["ref"] a = create(0) create(1) b = create(0) gadgets_set = query.match(index_ref, 0) self.assertEqual( list(Page.set_iterator(self.client, gadgets_set, page_size=1)), [a, b]) query_mapper = lambda a: query.select(['data', 'n'], query.get(a)) query_mapped_iter = Page.set_iterator(self.client, gadgets_set, map_lambda=query_mapper) self.assertEqual(list(query_mapped_iter), [0, 0]) mapped_iter = Page.set_iterator(self.client, gadgets_set, mapper=lambda x: [x]) self.assertEqual(list(mapped_iter), [[a], [b]])
def record(self, data: Data) -> QueryResult: """In one query, updates existing topic document with data if document already exists, else create a new document. Parameters ---------- data : Data Parsed message from the MQTT broker. Returns ------- QueryResult See src.types.QueryResult for its signature. """ topic = q.match(self.index, data.get("topic", "")) update_record = q.update(q.select(["ref"], q.get(topic)), {"data": data}) create_record = q.create(self.collection, {"data": data}) result = self.db.query( q.let( {"topic_exists": q.exists(topic)}, q.if_(q.var("topic_exists"), update_record, create_record), )) return result
def _define_match_set(query_filter: sql.Filter) -> QueryExpression: field_name = query_filter.column.name comparison_value = query_filter.value index_name_for_collection = functools.partial(index_name, query_filter.table_name) convert_to_collection_ref_set = functools.partial(convert_to_ref_set, query_filter.table_name) get_info_indexes_with_references = lambda collection_name, field_name: q.map_( q.lambda_("info_index_ref", q.get(q.var("info_index_ref"))), q.paginate( q.match( q.index( index_name( "information_schema_indexes_", column_name="name_", index_type=IndexType.TERM, )), index_name( collection_name, column_name=field_name, index_type=IndexType.REF, ), ), ), ) index_name_for_field = functools.partial(index_name_for_collection, field_name) equality_range = q.range( q.match(q.index(index_name_for_field(IndexType.VALUE))), [comparison_value], [comparison_value], ) if query_filter.checks_whether_equal: if field_name == "ref": assert isinstance(comparison_value, str) return q.singleton( q.ref(q.collection(query_filter.table_name), comparison_value)) return q.let( { "ref_index": q.index(index_name_for_field(IndexType.REF)), "term_index": q.index(index_name_for_field(IndexType.TERM)), "info_indexes": get_info_indexes_with_references(query_filter.table_name, field_name), "comparison_value": comparison_value, }, q.if_( q.exists(q.var("ref_index")), q.match( q.var("ref_index"), get_foreign_key_ref( q.var("comparison_value"), # Assumes that there is only one reference per foreign key # and that it refers to the associated collection's ID field # (e.g. {'associated_table': 'id'}). # This is enforced via NotSupported errors when creating collections. q.select([0, DATA, "referred_table_"], q.var("info_indexes")), ), ), q.if_( q.exists(q.var("term_index")), q.match( q.var("term_index"), q.var("comparison_value"), ), convert_to_collection_ref_set(equality_range), ), ), ) # In the building of Filter objects from SQL tokens, we enforce the convention # of <column name> <operator> <value> for WHERE clauses, so we build the FQL queries # assuming that '>' means 'column value greater than literal value'. I can't think # of a good way to centralize the knowledge of this convention across # all query translation, so I'm leaving this note as a warning. if query_filter.checks_whether_greater_than: inclusive_comparison_range = q.range( q.match(q.index(index_name_for_field(IndexType.VALUE))), [comparison_value], [], ) return convert_to_collection_ref_set( q.difference(inclusive_comparison_range, equality_range)) if query_filter.checks_whether_greater_than_or_equal: inclusive_comparison_range = q.range( q.match(q.index(index_name_for_field(IndexType.VALUE))), [comparison_value], [], ) return convert_to_collection_ref_set(inclusive_comparison_range) if query_filter.checks_whether_less_than: inclusive_comparison_range = q.range( q.match(q.index(index_name_for_field(IndexType.VALUE))), [], [comparison_value], ) return convert_to_collection_ref_set( q.difference(inclusive_comparison_range, equality_range)) if query_filter.checks_whether_less_than_or_equal: inclusive_comparison_range = q.range( q.match(q.index(index_name_for_field(IndexType.VALUE))), [], [comparison_value], ) return convert_to_collection_ref_set(inclusive_comparison_range) raise exceptions.NotSupportedError( f"Unsupported comparison {query_filter.comparison} was received.")
def test_typecheckfns(self): coll = query.collection("typecheck_coll") db = query.database("typecheck_db") fn = query.function("typecheck_fn") index = query.index("typecheck_index") self.admin_client.query(query.create_collection({"name": "typecheck_coll"})) self.admin_client.query(query.create_index( {"name": "typecheck_index", "source": coll, "active": True})) doc = self.admin_client.query(query.create( coll, {"data": {}, "credentials": {"password": "******"}})) self.admin_client.query(query.create_database({"name": "typecheck_db"})) function = self._q(query.create_function( {"name": "typecheck_fn", "body": query.query(query.lambda_("x", query.now()))})) key = self.admin_client.query( query.create_key({"database": db, "role": "admin"})) token = self._q(query.login(doc["ref"], {"password": "******"})) credentials = self._q(query.select(['data', 0], query.paginate(query.credentials()))) role = self.admin_client.query(query.create_role( {"name": "typecheck_role", "membership": [], "privileges": []})) values = [ None, bytearray([12,3,4,5]), credentials, 90, 3.14, True, query.to_date(query.now()), query.date("1970-01-01"), query.now(), query.epoch(1, "second"), query.time("1970-01-01T00:00:00Z"), {"x": 10}, query.get(doc["ref"]), query.paginate(query.collections()), [1, 2, 3], "a string", coll, query.collections(), query.match(index), query.union(query.match(index)), doc["ref"], query.get(doc["ref"]), index, db, coll, token["ref"], role["ref"], key["ref"], function["ref"], query.get(function["ref"]), query.query(query.lambda_("x", query.var("x"))), ] pairs = [ ["array", query.is_array], ["object", query.is_object], ["string", query.is_string], ["null", query.is_null], ["number", query.is_number], ["bytes", query.is_bytes], ["date", query.is_date], ["timestamp", query.is_timestamp], ["set", query.is_set], ["ref", query.is_ref], ["boolean", query.is_boolean], ["double", query.is_double], ["integer", query.is_integer], ["database", query.is_database], ["index", query.is_index], ["collection", query.is_collection], ["token", query.is_token], ["function", query.is_function], ["collection", query.is_collection], ["role", query.is_role], ["credentials", query.is_credentials], ["key", query.is_key], ] expected = { "array": 1, "boolean": 1, "bytes": 1, "collection": 3, "credentials": 1, "database": 1, "date": 2, "double": 1, "function": 2, "integer": 1, "index": 1, "key": 1, "null": 1, "number": 2, "object": 5, "ref": 11, "role": 1, "set": 3, "string": 1, "timestamp": 3, "token": 1, } q = [] for p in pairs: d = dict() d[p[0]] = query.count(query.filter_(query.lambda_("v", p[1](query.var("v"))), query.var("vals"))) q.append(d) actual = self._q(query.let({"vals": values}, query.merge({}, q))) self.assertEqual(actual, expected)
def create_indexes(client): indexes = [{ "name": "user_by_email", "source": q.collection("users"), "terms": [{ "field": ["data", "email"] }], }, { "name": "products_search_by_name", "source": { "collection": q.collection('products'), "fields": { "wordparts": q.query(lambda product: wordPartsGenerator( q.select(['data', 'name'], product))) } }, "terms": [{ "binding": 'wordparts' }], }, { "name": "products_search_by_category", "source": q.collection('products'), "terms": [{ "field": ["data", "categories"] }], }, { "name": "products_sort_by_name_asc", "source": q.collection('products'), "terms": [{ "field": ["ref"] }], "values": [ { "field": ["data", "name"] }, { "field": ["ref"] }, ] }, { "name": "products_sort_by_price_asc", "source": q.collection('products'), "terms": [{ "field": ["ref"] }], "values": [ { "field": ["data", "price"] }, { "field": ["ref"] }, ] }, { "name": "products_sort_by_price_desc", "source": q.collection('products'), "terms": [{ "field": ["ref"] }], "values": [ { "field": ["data", "price"], "reverse": True }, { "field": ["ref"] }, ] }, { "name": "products_sort_by_created_asc", "source": q.collection('products'), "values": [ { "field": ["data", "createdAt"] }, { "field": ["ref"] }, ] }] client.query( q.map_( lambda index: q.if_(q.exists(q.index(q.select(["name"], index))), True, q.create_index(index)), indexes))
def test_value_not_found(self): self._assert_query_error(query.select("a", {}), NotFound, "value not found", ["from"])
def create_roles(client): roles = [{ "name": "admin", "membership": [{ "resource": q.collection("users"), "predicate": q.query(lambda ref: q.equals( q.select(["data", "type"], q.get(ref)), "admin")) }], "privileges": [{ "resource": q.collection("categories"), "actions": { "read": True, "create": True, "write": True } }, { "resource": q.collection("products"), "actions": { "read": True, "write": True, "create": True } }, { "resource": q.collection("users"), "actions": { "read": True, "create": True, "write": True } }, { "resource": q.collection("orders"), "actions": { "read": True, "write": True, "create": True } }, { "resource": objects.Ref("check_if_categories_exists", objects.Ref("functions")), "actions": { "call": True } }] }, { "name": "customer", "membership": [{ "resource": q.collection("users"), "predicate": q.query(lambda ref: q.equals( q.select(["data", "type"], q.get(ref)), "customer")) }], "privileges": [{ "resource": objects.Ref("purchase", objects.Ref("functions")), "actions": { "call": True } }, { "resource": objects.Ref("get_order_status_history", objects.Ref("functions")), "actions": { "call": q.query(lambda ref: q.equals( q.current_identity(), q.select(["data", "customer"], q.get(ref)))), } }, { "resource": q.collection("users"), "actions": { "read": q.query(lambda ref: q.equals(q.current_identity(), ref)), "write": q.query(lambda ref: q.equals(q.current_identity(), ref)), } }, { "resource": q.collection("orders"), "actions": { "read": q.query(lambda ref: q.equals( q.current_identity(), q.select(["data", "customer"], q.get(ref)))), } }] }] client.query(q.map_(lambda role: q.create_role(role), roles))
def subscribe_to_anime(self, anime_link: str): try: # create a new anime document anime_info = anime_alarm.utils.GGAScraper().get_anime_info( anime_link) print(anime_info['anime_id']) result = client.query( q.let( { 'user_anime_list': q.select(['data', 'animes_watching'], q.get(q.ref(q.collection(users), self.chat_id))), }, q.if_( # check if this anime exists in the db q.exists( q.match(q.index(anime_by_id), anime_info['anime_id'])), # if it exists... q.let( { 'anime_ref': q.select( 'ref', q.get( q.match(q.index(anime_by_id), anime_info['anime_id']))) }, q.if_( # check if user has subscribed to this anime already q.contains_value(q.var('anime_ref'), q.var('user_anime_list')), 'This anime is already on your watch list!', q.do( q.update( q.ref(q.collection(users), self.chat_id), { 'data': { 'animes_watching': q.append( q.var('user_anime_list'), [q.var('anime_ref')]) } }), q.update( q.var('anime_ref'), { 'data': { 'followers': q.add( q.select([ 'data', 'followers' ], q.get( q.var('anime_ref'))), 1) } }), ))), q.let( {'new_anime_id': q.new_id()}, q.do( # create new anime document q.create( q.ref(q.collection(animes), q.var('new_anime_id')), { 'data': { 'title': anime_info['title'], 'followers': 1, 'link': anime_link, 'anime_id': anime_info['anime_id'], 'anime_alias': anime_info['anime_alias'], 'episodes': anime_info['number_of_episodes'], 'last_episode': { 'link': anime_info[ 'latest_episode_link'], 'title': anime_info[ 'latest_episode_title'], }, } }), # add to user's list of subscribed animes q.update( q.ref(q.collection(users), self.chat_id), { 'data': { 'animes_watching': q.append( q.var('user_anime_list'), [ q.ref( q.collection(animes), q.var('new_anime_id')) ]) } }), ))))) if isinstance(result, str): updater.bot.send_message(chat_id=self.chat_id, text=result) else: updater.bot.send_message( chat_id=self.chat_id, text='You are now listening for updates on ' + anime_info['title']) except Exception as err: log_error(err)
def resolution(self) -> Resolution: print('getting resolution') resolution = client.query( q.select(['data', 'config', 'resolution'], q.get(q.ref(q.collection(users), self.chat_id)))) return Resolution(resolution)
def translate_drop( statement: token_groups.Statement) -> typing.List[QueryExpression]: """Translate a DROP SQL query into an equivalent FQL query. Params: ------- statement: An SQL statement returned by sqlparse. Returns: -------- An FQL query expression. """ idx, _ = statement.token_next_by(m=(token_types.Keyword, "TABLE")) _, table_identifier = statement.token_next_by(i=token_groups.Identifier, idx=idx) table_name = table_identifier.value deleted_collection = q.select("ref", q.delete(q.collection(table_name))) return [ q.do( q.map_( q.lambda_("ref", q.delete(q.var("ref"))), q.paginate( q.union( q.match( q.index( fql.index_name( "information_schema_tables_", column_name="name_", index_type=fql.IndexType.TERM, )), table_name, ), fql.convert_to_ref_set( "information_schema_columns_", q.range( q.match( q.index( fql.index_name( "information_schema_columns_", column_name="table_name_", index_type=fql.IndexType.VALUE, ))), [table_name], [table_name], ), ), fql.convert_to_ref_set( "information_schema_indexes_", q.range( q.match( q.index( fql.index_name( "information_schema_indexes_", column_name="table_name_", index_type=fql.IndexType.VALUE, ))), [table_name], [table_name], ), ), ), ), ), q.let( {"collection": deleted_collection}, {"data": [{ "id": q.var("collection") }]}, ), ) ]
def test_select(self): obj = {"a": {"b": 1}} self.assertEqual(self._q(query.select("a", obj)), {"b": 1}) self.assertEqual(self._q(query.select(["a", "b"], obj)), 1) self.assertIsNone(self._q(query.select_with_default("c", obj, None))) self.assertRaises(NotFound, lambda: self._q(query.select("c", obj)))
def callback_handler_func(update: Update, context: CallbackContext): user = User(update.effective_chat.id) callback_message = update.callback_query.message.reply_markup.inline_keyboard[ 0][0].callback_data [command, payload] = callback_message.split(sep='=') if command == 'subscribe': user.subscribe_to_anime(payload) elif command == 'unsubscribe': user.unsubscribe_from_anime(payload) elif command == 'getlatest': try: anime_info = scraper.get_anime_info(payload) latest_episode_download_link = shorten( scraper.get_download_link(anime_info['latest_episode_link'], resolution=user.resolution)) markup = [[ InlineKeyboardButton(text='Download', url=latest_episode_download_link) ]] context.bot.send_message(chat_id=user.chat_id, text=anime_info['latest_episode_title'], reply_markup=InlineKeyboardMarkup(markup)) except CannotDownloadAnimeException as err: log_error(err) context.bot.send_message(chat_id=user.chat_id, text="Sorry," + payload + "could not be downloaded at this " "time!") context.bot.send_message(chat_id=os.getenv('ADMIN_CHAT_ID'), text='A user tried to download ' + payload + "but could not due to error: " + str(err)) return except Exception as err: log_error(err) return else: # check if anime is in our anime registry try: anime_from_db = client.query( q.if_( q.exists( q.match(q.index(anime_by_id), anime_info['anime_id'])), q.let( { 'anime': q.get( q.match(q.index(anime_by_id), anime_info['anime_id'])) }, q.if_( q.gt( anime_info['number_of_episodes'], q.select(['data', 'episodes'], q.var('anime'))), q.var('anime'), None)), None)) except errors.NotFound: anime_from_db = None if anime_from_db is not None: send_update_to_subscribed_users( anime_from_db, download_links={ user.resolution: latest_episode_download_link }, anime_info=anime_info) elif command == 'set_resolution': try: new_res = Resolution(payload) user.resolution = new_res context.bot.send_message( chat_id=user.chat_id, text= f'Your desired resolution has been set to {new_res.value}({resolutions[new_res]}).\nThis resolution will be used for your future /subscribe and /latest commands.' ) except ValueError: context.bot.send_message(chat_id=user.chat_id, text='Unidentified resolution level!') context.bot.send_message(chat_id=os.getenv('ADMIN_CHAT_ID'), text='Unidentified resolution level!') else: pass
def processAggregates(match_data, count): match_duration = match_data['result']['duration'] first_blood_time = match_data['result']['first_blood_time'] aggregate_info_list = client.query( q.map_( q.lambda_( 'data', q.get(q.ref(q.collection('match_aggregate_info'), q.var('data'))) ), [getIntValue('min_match_duration'), getIntValue('max_match_duration'), getIntValue('max_first_blood_time'), getIntValue('mean_match_duration'), getIntValue('avg_first_blood_time')] ) ) new_aggregate_list = [] for aggregate in aggregate_info_list: ref = aggregate['ref'] data = aggregate['data'] new_aggregate = {} new_aggregate['ref'] = ref if 'id='+str(getIntValue('min_match_duration')) in str(ref): if data['data'] > match_duration: new_data = {} new_data['data'] = match_duration new_aggregate['data'] = new_data new_aggregate_list.append(new_aggregate) if 'id='+str(getIntValue('max_match_duration')) in str(ref): if data['data'] < match_duration: new_data = {} new_data['data'] = match_duration new_aggregate['data'] = new_data new_aggregate_list.append(new_aggregate) if 'id='+str(getIntValue('max_first_blood_time')) in str(ref): if data['data'] < match_duration: new_data = {} new_data['data'] = match_duration new_aggregate['data'] = new_data new_aggregate_list.append(new_aggregate) if 'id='+str(getIntValue('mean_match_duration')) in str(ref): new_data = {} new_data['data'] = (data['data'] * (count -1) + match_duration)/count new_aggregate['data'] = new_data new_aggregate_list.append(new_aggregate) if 'id='+str(getIntValue('avg_first_blood_time')) in str(ref): new_data = {} new_data['data'] = (data['data'] * (count -1) + first_blood_time)/count new_aggregate['data'] = new_data new_aggregate_list.append(new_aggregate) client.query( q.map_( q.lambda_( 'data', q.update( q.select(['ref'], q.var('data')), { 'data' : q.select(['data'], q.var('data'))} ) ), new_aggregate_list ) )
def _assign_ref(ref_collection, ref_map, record_id): return q.if_( q.is_null(record_id), None, q.ref(ref_collection, q.select([q.to_string(record_id), "id"], ref_map)), )
def test_select_array(self): arr = [1, 2, 3] self.assertEqual(self._q(query.select(2, arr)), 3) self.assertRaises(NotFound, lambda: self._q(query.select(3, arr)))
def translate_select(sql_query: sql.SQLQuery) -> QueryExpression: """Translate a SELECT SQL query into an equivalent FQL query. Params: ------- sql_query: An SQLQuery instance. Returns: -------- An FQL query expression based on the SQL query. """ document_pages = _define_document_pages(sql_query) selected_table = next(table for table in sql_query.tables if table.has_columns) get_field_value = lambda function_value, raw_value: q.if_( q.equals(function_value, common.NULL), q.if_(q.equals(raw_value, common.NULL), None, raw_value), q.select([common.DATA, 0], function_value), ) calculate_function_value = lambda document_set, function_name: q.if_( q.is_null(function_name), common.NULL, q.if_( q.equals(function_name, sql.Function.COUNT.value), q.count(document_set), common.NULL, ), ) # With aggregation functions, standard behaviour is to include the first value # if any column selections are part of the query, at least until we add support # for GROUP BY get_first_document = lambda documents: q.if_(q.is_empty(documents), [{}], q.take(1, documents)) translate_document_fields = lambda maybe_documents: q.let( { # We map over selected_fields to build document object # to maintain the order of fields as queried. Otherwise, # SQLAlchemy gets confused and assigns values to the incorrect keys. "selected_column_info": [[col.table_name, col.name, col.function_name] for col in sql_query.columns], "has_functions": any(col.function_name for col in sql_query.columns), "maybe_document_set": q.if_( q.var("has_functions"), get_first_document(maybe_documents), maybe_documents, ), "field_alias_map": sql_query.alias_map, }, q.map_( q.lambda_( "maybe_document", q.let( { "document": q.if_( q.is_ref(q.var("maybe_document")), { # We use the selected table name here instead of deriving # the collection name from the document ref in order to # save a 'get' call from inside of a map, which could get # expensive. selected_table.name: q.merge( q.select( common.DATA, q.get(q.var("maybe_document")), ), {"ref": q.var("maybe_document")}, ), }, q.var("maybe_document"), ), }, q.to_object( q.map_( q.lambda_( [ "collection_name", "field_name", "function_name" ], q.let( { "function_value": calculate_function_value( maybe_documents, q.var("function_name")), "raw_value": q.select( [ q.var("collection_name"), q.var("field_name"), ], q.var("document"), default=common.NULL, ), }, [ q.select( [ q.var("collection_name"), q.var("field_name"), ], q.var("field_alias_map"), ), get_field_value( q.var("function_value"), q.var("raw_value")), ], ), ), q.var("selected_column_info"), )), ), ), q.var("maybe_document_set"), ), ) return q.let( { "maybe_documents": document_pages, "translated_documents": translate_document_fields(q.var("maybe_documents")), "result": q.distinct(q.var("translated_documents")) if sql_query.distinct else q.var("translated_documents"), }, # Paginated sets hold an array of results in a 'data' field, so we try to flatten it # in case we're dealing with pages instead of an array of results which doesn't # have such nesting {common.DATA: q.select(common.DATA, q.var("result"), q.var("result"))}, )
def test_select(self): json = ('{"from":{"object":{"favorites":{"object":{"foods":["steak"]}}}},' '"select":["favorites","foods",0]}') self.assertJson(query.select(["favorites", "foods", 0], {"favorites": {"foods": ["steak"]}}), json)
def last_command(self) -> str: last_command = client.query( q.select(['data', 'last_command'], q.get(q.ref(q.collection(users), self.chat_id)))) return last_command