def _create_documents(let_params, records, build_document): results = [] idx = 0 while True: if idx > len(records): break end_idx = idx + BATCH_LIMIT batch = _execute_with_retries( q.let( let_params, q.map_( q.lambda_( "document", q.create( q.var("collection"), {"data": build_document(q.var("document"))}, ), ), records[idx:end_idx], ), ) ) results.extend(batch) idx = end_idx return results
def login(data): try: return current_app.fauna_client.query( q.let( { 'response': q.login( q.match(q.index('unique_account_username_type'), [data.get('username'), 'EMAIL']), {'password': data.get('password')}), 'user': q.select_with_default( ['data', 'user'], q.get(q.select(['instance'], q.var('response'))), None) }, { 'data': { 'token': q.select('secret', q.var('response')), 'user': q.if_( q.is_ref(q.var('user')), q.select(['data', 'alias'], q.get(q.var('user'))), None) } })) except Exception as e: print(e)
def update_documents(sql_query: sql.SQLQuery) -> QueryExpression: """Update document fields with the given values. Params: ------- table: Table object that contains the parameters for building an update query in FQL. Returns: -------- An FQL update query for the given collection and documents. """ assert len(sql_query.tables) == 1 table = sql_query.tables[0] assert len(sql_query.filter_groups) <= 1 filter_group = (None if not any(sql_query.filter_groups) else sql_query.filter_groups[0]) field_updates = {column.name: column.value for column in table.columns} return q.let( {"document_set": build_document_set_intersection(table, filter_group)}, q.do( q.update( q.select( "ref", q.get(q.var("document_set")), ), {"data": field_updates}, ), {"data": [{ "count": q.count(q.var("document_set")) }]}, ), )
def get_multiple(index, data=None): """ Get multiple records by ID """ try: serverClient = FaunaClient( secret=os.environ.get("FAUNA_SERVER_SECRET")) res_arr = [] if data is None: res = serverClient.query( q.map_(q.lambda_("data", q.get(q.var("data"))), q.paginate(q.match(q.index(index))))) res_arr.extend(res["data"]) elif isinstance(data, list): for x in data: res = serverClient.query( q.map_(q.lambda_("data", q.get(q.var("data"))), q.paginate(q.match(q.index(index), q.casefold(x))))) res_arr.extend(res["data"]) else: res = serverClient.query( q.map_(q.lambda_("data", q.get(q.var("data"))), q.paginate(q.match(q.index(index), q.casefold(data))))) res_arr.extend(res["data"]) arr = [] for x in res_arr: x["data"]["ref_id"] = x["ref"].id() arr.append(x["data"]) return arr except Exception as ex: raise ex
def test_lambda_expr(self): self.assertJson(query.lambda_("a", query.var("a")), '{"expr":{"var":"a"},"lambda":"a"}') self.assertJson( query.lambda_(["a", "b"], query.add(query.var("a"), query.var("b"))), '{"expr":{"add":[{"var":"a"},{"var":"b"}]},"lambda":["a","b"]}')
def unsubscribe(update: Update, context: CallbackContext): user = User(update.effective_chat.id) try: animes_watched = client.query( q.let({'bot_user': q.ref(q.collection(users), user.chat_id)}, q.if_( q.exists(q.var('bot_user')), q.map_( q.lambda_('doc_ref', q.get(q.var('doc_ref'))), q.select(['data', 'animes_watching'], q.get(q.var('bot_user')))), []))) for anime in animes_watched: markup = [[ InlineKeyboardButton('Unsubscribe', callback_data='unsubscribe=' + anime['ref'].id()) ]] context.bot.send_message(chat_id=user.chat_id, text=anime['data']['title'], reply_markup=InlineKeyboardMarkup(markup)) # update last command user.last_command = '' if not animes_watched: context.bot.send_message( chat_id=user.chat_id, text='You are currently not subscribed to any anime') except Exception as err: log_error(err)
def create_user(data): try: current_identity = get_current_identity() email_hash = md5( current_identity['data']['username'].encode('utf-8')).hexdigest() return current_app.fauna_client.query( q.if_( q.is_ref( q.select_with_default(['data', 'user'], q.get(q.current_identity()), None)), q.abort('exists'), q.let( { 'userMetaRef': q.new_id(), 'userRef': q.new_id() }, q.do( q.create( q.ref(q.collection('user_metas'), q.var('userMetaRef')), { 'data': { 'name': data.get('name'), 'email': q.select(['data', 'username'], q.get(q.current_identity())), 'dob': parser.parse(data.get('dob')).date() } }), q.create( q.ref(q.collection('users'), q.var('userRef')), { 'data': { 'alias': data.get('alias'), 'avatar': f'https://www.gravatar.com/avatar/{email_hash}', 'public': False, 'meta': q.ref(q.collection('user_metas'), q.var('userMetaRef')), } }), q.update( q.current_identity(), { 'data': { 'user': q.ref(q.collection('users'), q.var('userRef')) } }), q.call('current_user', []))))) except Exception as e: if str(e) == 'exists': abort(409, 'User for current identity already exists.') print(e)
def processHeroInformation(match_data): radiant_win = match_data['result']['radiant_win'] players = match_data['result']['players'] win_heros = [] heros_in_game = [] for player in players: win_flag = getWinFlag(player, radiant_win) heros_in_game.append(player['hero_id']) if win_flag: win_heros.append(player['hero_id']) hero_list=client.query( q.map_( q.lambda_( 'hero', q.get(q.ref(q.collection('heroes'), q.var('hero'))) ), heros_in_game ) ) update_hero_list = [] for hero_info in hero_list: ref = hero_info['ref'] data = hero_info['data'] if data['id'] in win_heros: data['wins'] += 1 data['games'] += 1 else: data['games'] += 1 for player in players: if player['hero_id'] == data['id']: getItemsData(player, data) update_info = {} update_info['ref'] = ref update_info['data'] = data update_hero_list.append(update_info) client.query( q.map_( q.lambda_( 'hero', q.update( q.select(['ref'], q.var('hero')), { 'data': q.select(['data'], q.var('hero')) } ) ), update_hero_list ) )
def test_reduce(self): data1 = [1, 2, 3, 4, 5, 6, 7, 8, 9] data2 = ["Fauna", "DB", " ", "rocks"] q1 = query.reduce(query.lambda_query(lambda accum, value: query.add(query.var("accum"), query.var("value"))), 0, data1) q2 = query.reduce(query.lambda_query(lambda accum, value: query.concat([query.var("accum"), query.var("value")])), "", data2) self.assertEqual(self._q(q1), 45) self.assertEqual(self._q(q2), "FaunaDB rocks")
def test_get_foreign_key_ref(): fql_query = q.let( { "references": {}, "foreign_key": Fake.credit_card_number() }, common.get_foreign_key_ref(q.var("foreign_key"), q.var("references")), ) assert isinstance(fql_query, QueryExpression)
def find_order(secret, order_ref): client = FaunaClient(secret=secret) return client.query( q.let( { "order": q.get(q.ref(q.collection("orders"), order_ref)), "status_history": q.call("get_order_status_history", q.select(["ref"], q.var("order"))) }, { "ref": q.select(["ref"], q.var("order")), "data": q.merge(q.select(["data"], q.var("order")), {"status_history": q.var("status_history")}) } ) )
def donate(update, context): try: for message in config['message']['donate']: context.bot.send_message(chat_id=update.effective_chat.id, text=message) client.query( q.let( {'user': q.ref(q.collection(users), update.effective_chat.id)}, q.if_( q.exists(q.var('user')), q.update(q.var('user'), {'data': { 'last_command': '', }}), 'Success!'))) except Exception as err: log_error(err)
def updatePairInformationForTeam(hero_ids, team_win): key_list=[] for k in range(0, len(hero_ids)): for j in range(k + 1, len(hero_ids)): if hero_ids[k] < hero_ids[j]: key = format(hero_ids[k], '03d') + format(hero_ids[j], '03d') else: key = format(hero_ids[j], '03d') + format(hero_ids[k], '03d') key_list.append(key) try: hero_data_list=client.query( q.map_( q.lambda_( 'hero_pair', q.get(q.ref(q.collection('hero_pairs'), q.var('hero_pair'))) ), key_list ) ) except Exception as e: logging.info(e) logging.info(key_list) hero_team_list=[] for hero_data in hero_data_list : hero_team_dictionary = {} hero_pair_ref=hero_data['ref'] hero_pair_data = hero_data['data'] hero_pair_data['games']+=1 if team_win: hero_pair_data['wins'] += 1 hero_team_dictionary['ref']=hero_pair_ref hero_team_dictionary['data']=hero_pair_data hero_team_list.append(hero_team_dictionary) client.query( q.map_( q.lambda_( 'hero_pair', q.update( q.select(['ref'],q.var('hero_pair')), {'data': q.select(['data'], q.var('hero_pair'))} ) ), hero_team_list ) )
def processTemporalHeroInformation(match_data): radiant_win = match_data['result']['radiant_win'] players = match_data['result']['players'] temporal_hero_list = [] for player in players: win_flag = False if player['player_slot'] <= 4 and radiant_win: win_flag = True elif player['player_slot'] > 4 and not radiant_win: win_flag = True temporal_hero = {} temporal_hero['id'] = player['hero_id'] temporal_hero['win'] = win_flag temporal_hero['match_start_time'] = pytz.utc.localize(datetime.utcfromtimestamp(match_data['result']['start_time'])) temporal_hero_list.append(temporal_hero) client.query( q.map_( q.lambda_( 'temporal_hero', q.create(q.collection('heroes_temporal'), { "data": q.var('temporal_hero') }) ), temporal_hero_list ) )
def test_documents(self): aCollection = "col_test_documents" anIndex = "idx_test_documents" self._q(query.create_collection({"name": aCollection})) self._q( query.create_index({ "name": anIndex, "source": query.collection(aCollection), "active": True })) count = 56 data = [{} for x in range(count)] self._q( query.foreach( query.lambda_( "x", query.create(query.collection(aCollection), {"data": query.var("x")})), data)) self.assertEqual( self._q( query.select([0], query.count( query.paginate( query.documents( query.collection(aCollection)))))), count) self.assertEqual( self._q(query.count(query.documents( query.collection(aCollection)))), count)
def _sort_document_set(document_set: QueryExpression, order_by: typing.Optional[sql.OrderBy]): if order_by is None: return q.paginate(document_set, size=common.MAX_PAGE_SIZE) if len(order_by.columns) > 1: raise exceptions.NotSupportedError( "Ordering by multiple columns is not yet supported.") ordered_column = order_by.columns[0] assert ordered_column.table_name is not None ordered_document_set = q.join( document_set, q.index( common.index_name( ordered_column.table_name, column_name=ordered_column.name, index_type=common.IndexType.SORT, )), ) if order_by.direction == sql.OrderDirection.DESC: ordered_document_set = q.reverse(ordered_document_set) return q.map_( q.lambda_(["_", "ref"], q.var("ref")), q.paginate(ordered_document_set, size=common.MAX_PAGE_SIZE), )
def test_varargs(self): # Works for lists too self.assertEqual(self._q(query.add([2, 3, 5])), 10) # Works for a variable equal to a list self.assertEqual( self._q(query.let({"x": [2, 3, 5]}, query.add(query.var("x")))), 10)
def test_count_mean_sum(self): data = [1, 2, 3, 4, 5, 6, 7, 8, 9] self._q(query.create_collection({"name": "countmeansum_test"})) self._q( query.create_index({ "name": "countmeansum_idx", "source": query.collection("countmeansum_test"), "active": True, "values": [{ "field": ["data", "value"] }] })) self._q( query.foreach( query.lambda_( "x", query.create( query.collection("countmeansum_test"), {"data": { "value": query.add(query.var("x"), 2) }})), data)) m = query.match(query.index("countmeansum_idx")) expected = [9, 5.0, 45, 9, 7.0, 63] self.assertEqual( self._q([ query.count(data), query.mean(data), query.sum(data), query.count(m), query.mean(m), query.sum(m) ]), expected)
def get_foreign_key_ref( foreign_value: QueryExpression, reference_collection_name: QueryExpression, ) -> QueryExpression: """Get the Ref to a document associated with a foreign key value. Params: ------- foreign_value: The value to look up, usually an ID. references: Field metadata dict that defines the collection (key) and field name (value) that the foreign key refers to. Returns: -------- Fauna query expression that returns an array of Refs for the associated document(s). """ return q.let( { "is_blank_reference": q.or_( q.is_null(foreign_value), q.equals(foreign_value, NULL), q.equals(reference_collection_name, NULL), ), }, q.if_( q.var("is_blank_reference"), None, q.ref(q.collection(reference_collection_name), foreign_value), ), )
def test_equality(self): self.assertEqual(query.var("x"), _Expr({"var": "x"})) self.assertEqual( query.match(Ref("widgets_by_name", Native.INDEXES), "computer"), _Expr({ "match": Ref("widgets_by_name", Native.INDEXES), "terms": "computer" }))
def _delete_data(): resources = [q.functions(), q.indexes(), q.collections()] delete = lambda res: q.foreach( q.lambda_("res", q.delete(q.var("res"))), q.paginate(res) ) delete_queries = [delete(res) for res in resources] _execute_with_retries(q.do(*delete_queries))
def test_repr(self): self.assertRegexCompat(repr(query.var("x")), r"Expr\({u?'var': u?'x'}\)") self.assertRegexCompat(repr(Ref("collections")), r"Ref\(id=collections\)") self.assertRegexCompat( repr(SetRef(query.match(query.index("widgets")))), r"SetRef\({u?'match': Expr\({u?'index': u?'widgets'}\)}\)")
def help_user(update, context): user = User(update.effective_chat.id) if str(user.chat_id) == str(os.getenv('ADMIN_CHAT_ID')): message = config['message']['help_admin'] else: message = config['message']['help'] context.bot.send_message(chat_id=user.chat_id, text=message) try: client.query( q.let({'user': q.ref(q.collection(users), user.chat_id)}, q.if_( q.exists(q.var('user')), q.update(q.var('user'), {'data': { 'last_command': '', }}), 'Success!'))) except Exception as err: log_error(err)
def unsubscribe_from_anime(self, anime_doc_id: str): try: anime = client.query( q.get(q.ref(q.collection(animes), anime_doc_id))) client.query( q.let( { 'anime_ref': q.ref(q.collection(animes), anime_doc_id), 'bot_user': q.ref(q.collection(users), self.chat_id), 'followers': q.select(['data', 'followers'], q.get(q.var('anime_ref'))), }, q.do( q.update( q.var('anime_ref'), { 'data': { 'followers': q.subtract( q.var('followers'), 1) } }), q.update( q.var('bot_user'), { 'data': { 'animes_watching': q.filter_( q.lambda_( 'watched_anime_ref', q.not_( q.equals( q.var('watched_anime_ref'), q.var('anime_ref')))), q.select(['data', 'animes_watching'], q.get(q.var('bot_user')))) } }), q.if_(q.equals(q.var('followers'), 1), q.delete(q.var('anime_ref')), 'successful!')))) updater.bot.send_message(chat_id=self.chat_id, text='You have stopped following ' + anime['data']['title']) except errors.NotFound: logger.info( 'Somehow, a user {0} almost unsubscribed from an anime that did not exist' .format(self.chat_id)) except Exception as err: log_error(err)
def get_all_students(self): result = self.clientf.query( query.map_( query.lambda_("x", query.get(query.var('x'))), query.paginate(query.match(query.index('all_students')), size=1000))) students = result['data'] students = [student['data'] for student in students] return students
def _translate_create_index(statement: token_groups.Statement, idx: int) -> typing.List[QueryExpression]: _, unique = statement.token_next_by(m=(token_types.Keyword, "UNIQUE"), idx=idx) idx, _ = statement.token_next_by(m=(token_types.Keyword, "ON"), idx=idx) _, index_params = statement.token_next_by(i=token_groups.Function, idx=idx) params_idx, table_identifier = index_params.token_next_by( i=token_groups.Identifier) table_name = table_identifier.value params_idx, column_identifiers = index_params.token_next_by( i=token_groups.Parenthesis, idx=params_idx) index_fields = [ token.value for token in column_identifiers.flatten() if token.ttype == token_types.Name ] if len(index_fields) > 1: raise exceptions.NotSupportedError( "Creating indexes for multiple columns is not currently supported." ) index_terms = [{ "field": ["data", index_field] } for index_field in index_fields] index_name = fql.index_name(table_name, column_name=index_fields[0], index_type=fql.IndexType.TERM) return [ q.do( q.if_( # We automatically create indices for some fields on collection creation, # so we can skip explicit index creation if it already exists. q.exists(q.index(index_name)), None, q.create_index({ "name": index_name, "source": q.collection(table_name), "terms": index_terms, "unique": unique, }), ), q.let( {"collection": q.collection(table_name)}, {"data": [{ "id": q.var("collection") }]}, ), ) ]
def get_subscribed_users_for_anime(anime_doc_id): """ This function gets all the user subscribed to a particular anime """ subscribed_users = client.query( q.map_( q.lambda_('doc_ref', q.get(q.var('doc_ref'))), q.paginate(q.match(q.index(all_users_by_anime), q.ref(q.collection(animes), str(anime_doc_id))), size=100000))) subscribed_users = subscribed_users['data'] return subscribed_users
def _load_matches(data): matches = data["matches"] records = list(matches.values()) build_document = lambda match: q.to_object( q.map_( q.lambda_( ["key", "value"], [ q.var("key"), q.if_( q.equals(q.var("key"), "winner_id"), _assign_ref(q.var("teams"), q.var("team_map"), q.var("value")), q.var("value"), ), ], ), q.to_array(match), ) ) let_params = { "collection": q.collection("matches"), "teams": q.collection("teams"), "team_map": data["teams"], } documents = _create_documents(let_params, records, build_document) for record, document in zip(records, documents): record["id"] = document["ref"].id()
def wordPartsGenerator(word): return q.let( { "indexes": q.map_( # Reduce this array if you want less ngrams per word. # Setting it to [ 0 ] would only create the word itself, Setting it to [0, 1] would result in the word itself # and all ngrams that are one character shorter, etc.. lambda index: q.subtract(q.length(word), index), maxNgrams), "indexesFiltered": q.filter_( # left min parts length 3 lambda l: q.gte(l, 3), q.var('indexes')), "ngramsArray": q.distinct( q.union( q.map_(lambda l: q.ngram(q.lowercase(word), l, l), q.var('indexesFiltered')))) }, q.var('ngramsArray'))
def _translate_drop_default(table_name: str, column_name: str) -> QueryExpression: drop_default = q.map_( q.lambda_( "column_info_ref", q.update(q.var("column_info_ref"), {"data": { "default_": None }}), ), q.paginate(_fetch_column_info_refs(table_name, column_name)), ) return q.let( { "altered_docs": drop_default, # Should only be one document that matches the unique combination # of collection and field name, so we just select the first. "altered_ref": q.select([0, "ref"], q.var("altered_docs")), }, {"data": [{ "id": q.var("altered_ref") }]}, )