def create(event, context): data = json.loads(event['body']) if 'text' not in data: logging.error("Validation Failed") raise Exception("Couldn't create the todo item.") data = { 'text': data['text'], 'checked': False, 'createdAt': query.time('now'), 'updatedAt': query.time('now') } # write the todo to the database created = client.query(query.create(TODOS, {'data': data})) # create a response response = { "statusCode": 200, "body": json.dumps(make_result(created)) } return response
def test_typecheckfns(self): coll = query.collection("typecheck_coll") db = query.database("typecheck_db") fn = query.function("typecheck_fn") index = query.index("typecheck_index") self.admin_client.query(query.create_collection({"name": "typecheck_coll"})) self.admin_client.query(query.create_index( {"name": "typecheck_index", "source": coll, "active": True})) doc = self.admin_client.query(query.create( coll, {"data": {}, "credentials": {"password": "******"}})) self.admin_client.query(query.create_database({"name": "typecheck_db"})) function = self._q(query.create_function( {"name": "typecheck_fn", "body": query.query(query.lambda_("x", query.now()))})) key = self.admin_client.query( query.create_key({"database": db, "role": "admin"})) token = self._q(query.login(doc["ref"], {"password": "******"})) credentials = self._q(query.select(['data', 0], query.paginate(query.credentials()))) role = self.admin_client.query(query.create_role( {"name": "typecheck_role", "membership": [], "privileges": []})) values = [ None, bytearray([12,3,4,5]), credentials, 90, 3.14, True, query.to_date(query.now()), query.date("1970-01-01"), query.now(), query.epoch(1, "second"), query.time("1970-01-01T00:00:00Z"), {"x": 10}, query.get(doc["ref"]), query.paginate(query.collections()), [1, 2, 3], "a string", coll, query.collections(), query.match(index), query.union(query.match(index)), doc["ref"], query.get(doc["ref"]), index, db, coll, token["ref"], role["ref"], key["ref"], function["ref"], query.get(function["ref"]), query.query(query.lambda_("x", query.var("x"))), ] pairs = [ ["array", query.is_array], ["object", query.is_object], ["string", query.is_string], ["null", query.is_null], ["number", query.is_number], ["bytes", query.is_bytes], ["date", query.is_date], ["timestamp", query.is_timestamp], ["set", query.is_set], ["ref", query.is_ref], ["boolean", query.is_boolean], ["double", query.is_double], ["integer", query.is_integer], ["database", query.is_database], ["index", query.is_index], ["collection", query.is_collection], ["token", query.is_token], ["function", query.is_function], ["collection", query.is_collection], ["role", query.is_role], ["credentials", query.is_credentials], ["key", query.is_key], ] expected = { "array": 1, "boolean": 1, "bytes": 1, "collection": 3, "credentials": 1, "database": 1, "date": 2, "double": 1, "function": 2, "integer": 1, "index": 1, "key": 1, "null": 1, "number": 2, "object": 5, "ref": 11, "role": 1, "set": 3, "string": 1, "timestamp": 3, "token": 1, } q = [] for p in pairs: d = dict() d[p[0]] = query.count(query.filter_(query.lambda_("v", p[1](query.var("v"))), query.var("vals"))) q.append(d) actual = self._q(query.let({"vals": values}, query.merge({}, q))) self.assertEqual(actual, expected)
q.create_index({ "name": "posts_by_tags_with_title", "source": q.collection("posts"), "terms": [{ "field": ["data", "tags"] }], "values": [{ "field": ["data", "title"] }] })) # Create a post client.query( q.create(q.collection("posts"), {"data": { "title": "What I had for breakfast .." }})) # Create several posts client.query( q.map_expr( lambda post_title: q.create(q.collection( "posts"), {"data": { "title": post_title }}), [ "My cat and other marvels", "Pondering during a commute", "Deep meanings in a latte" ])) # Retrieve posts
# industry_classified = list(set([item[2] for item in result.values.tolist()])) # print(industry_classified) with open("./data/stocks.json", "r") as in_file: data = json.load(in_file) code_list = [[ "{}{}".format(stock[0].split(".")[-1].lower(), stock[1]), stock[2], stock[4] ] for stock in data] # code_list = code_list[:100] with concurrent.futures.ThreadPoolExecutor(max_workers=500) as executor: futures = [executor.submit(analyze_item, item) for item in code_list] for future in concurrent.futures.as_completed(futures): pass df = pd.DataFrame(analyze_data) df = df.join(pd.DataFrame(df["gain"].to_dict()).T) index_names = df[(df["long"] < 0) | (df["short"] < 0)].index df.drop(index_names, inplace=True) df = df.sort_values(["score"], ascending=[False]) top_10 = df.head(10).values.tolist() ret = server_client.query( q.create(q.collection("analyze"), {"data": { "result": top_10 }})) # with open("result.json", "w") as result_json: # json.dump(analyze_data, result_json)
def test_instance_already_exists(self): self.client.query(create_collection({"name": "duplicates"})) r = self.client.query(create(collection("duplicates"), {}))["ref"] self._assert_query_error(create(r, {}), BadRequest, "instance already exists", ["create"])
def test_create(self): json = ('{"create":{"collection":"widget"},' '"params":{"object":{"data":{"object":{"name":"Laptop"}}}}}') self.assertJson(query.create(query.collection("widget"), { "data": {"name": "Laptop"} }), json)
def create(n): q = query.create(collection_ref, {"data": {"n": n}}) return self.client.query(q)["ref"]
import json import re from faunadb import query as q from faunadb.objects import Ref from faunadb.client import FaunaClient client = FaunaClient(secret="YOUR_SECRET_HERE") allusers = client.query(q.paginate(q.match(q.index('YOU_INDEX')))) def lambda_handler(event, context): # TODO implement print(event) return {'statusCode': 200, 'body': json.dumps('Hello from Lambda!')} client.query( q.create(q.collection("YOUR_COLLECTION"), {"data": { "Temp": event }}))
def _create(self, instance: T) -> T: result = self.client.query( q.create(q.class_(instance.name()), {'data': instance.as_query()})) ref = result['ref'] return instance.clone(ref=ref)
def do_GET(self): code = os.environ.get('VKTOKEN') app = os.environ.get('VKAPPID') secret = os.environ.get('VKSECRET') vk_session = vk_api.VkApi(token=code, app_id=app, client_secret=secret) try: vk = vk_session.get_api() except vk_api.AuthError as error_msg: print(error_msg) return # CORE RADIO group groups = ['-23314431'] posts = [] client = FaunaClient(secret=os.environ.get('DBSECRET')) for group in groups: remote_wall = vk.wall.get(count=10, owner_id=group) for post in remote_wall['items']: postid = post['id'] date = datetime.fromtimestamp(post['date']).isoformat() text = [t for t in post['text'].split('\n') if len(t) > 1] if len(text) > 2: title = text[0] country = text[2] genre = text[1] search = client.query( q.paginate(q.match(q.index("titles"), title))) if not search['data']: if ('2021' in title) and (('Metalcore' in genre) or ('Deathcore' in genre) or ('Post-Hardcore' in genre)): img = [ img for img in post['attachments'][0]['photo'] ['sizes'] if img['type'] == 'x' ][0]['url'] links = [ link for link in post['attachments'] if link['type'] == 'link' ] if len(links) > 0: url = links[0]['link']['url'] if 'Post-Hardcore' in genre: style = 'Post-Hardcore' elif 'Deathcore' in genre: style = 'Deathcore' else: style = 'Metalcore' posts.append({ 'title': title, 'date': date, 'img': img, 'country': country, 'genre': genre, # 'style': style, 'groupid': group, 'postid': postid, 'url': url }) if len(posts) > 0: client.query( q.map_( lambda post: q.create(q.collection("AlbumEntry"), {"data": post}), posts)) self.send_response(200) self.send_header('Content-type', 'application/json') self.end_headers() self.wfile.write(json.dumps({'posts': posts}).encode()) return
def create_resume(request): if request.method == "POST": username = request.session["user"]["username"] full_name = request.POST.get("name") address = request.POST.get("address") phone = request.POST.get("phone") email = request.POST.get("email") about_you = request.POST.get("about") education = request.POST.get("education") career = request.POST.get("career") job_1__start = request.POST.get("job-1__start") job_1__end = request.POST.get("job-1__end") job_1__details = request.POST.get("job-1__details") job_2__start = request.POST.get("job-2__start") job_2__end = request.POST.get("job-2__end") job_2__details = request.POST.get("job-2__details") job_3__start = request.POST.get("job-3__start") job_3__end = request.POST.get("job-3__end") job_3__details = request.POST.get("job-3__details") references = request.POST.get("references") try: resume = client.query( q.get(q.match(q.index("resume_index"), username))) quiz = client.query( q.update( q.ref(q.collection("Resume_Info"), resume["ref"].id()), { "data": { "user": username, "full_name": full_name, "address": address, "phone": phone, "email": email, "about_you": about_you, "education": education, "career": career, "job_1__start": job_1__start, "job_1__end": job_1__end, "job_1__details": job_1__details, "job_2__start": job_2__start, "job_2__end": job_2__end, "job_2__details": job_2__details, "job_3__start": job_3__start, "job_3__end": job_3__end, "job_3__details": job_3__details, } })) messages.add_message( request, messages.INFO, 'Resume Info Edited Successfully. Download Resume Now') return redirect("App:create-resume") except: quiz = client.query( q.create( q.collection("Resume_Info"), { "data": { "user": username, "full_name": full_name, "address": address, "phone": phone, "email": email, "about_you": about_you, "education": education, "job_1__start": job_1__start, "job_1__end": job_1__end, "job_1__details": job_1__details, "job_2__start": job_2__start, "job_2__end": job_2__end, "job_2__details": job_2__details, "job_3__start": job_3__start, "job_3__end": job_3__end, "job_3__details": job_3__details, } })) messages.add_message( request, messages.INFO, 'Resume Info Saved Successfully. Download Resume Now') return redirect("App:resume") else: try: resume_info = client.query( q.get( q.match(q.index("resume_index"), request.session["user"]["username"])))["data"] context = {"resume_info": resume_info} return render(request, "create-resume.html", context) except: return render(request, "create-resume.html")
def make_event(data): client.query(q.create("event", {"data": data})) return True
def create_new_user(user_data): try: client.query(q.create("users", {"data": user_data})) return True # Username creation success except: return False # Something fracked up
def _update_information_metadata( table_name: str, collection_metadata: CollectionMetadata) -> QueryExpression: column_metadata = [{ "name_": "id", "table_name_": table_name, "type_": "Integer", "nullable": False, "default_": None, }] column_metadata.extend([ { "name_": name, "table_name_": table_name, "type_": metadata["type"], # A bit awkward, but SQL uses the 'NOT NULL' keyword, while SQLAlchemy # uses 'nullable' when returning metadata "nullable_": not metadata["not_null"], "default_": metadata["default"], } for name, metadata in collection_metadata["fields"].items() ]) index_metadata = [{ "name_": index_name, "table_name_": table_name, **typing.cast(typing.Dict[str, typing.Any], metadata), } for index_name, metadata in collection_metadata["indexes"].items()] return q.if_( # We don't want to update information schema collections with information schema info, # because that would some weird inception-type stuff. q.contains_str_regex( table_name, r"^information_schema_(?:tables|columns|indexes)_$"), None, q.do( q.create( q.collection("information_schema_tables_"), {"data": { "name_": table_name }}, ), q.foreach( q.lambda_( "column_metadata", q.create( q.collection("information_schema_columns_"), {"data": q.var("column_metadata")}, ), ), column_metadata, ), q.foreach( q.lambda_( "index_metadata", q.create( q.collection("information_schema_indexes_"), {"data": q.var("index_metadata")}, ), ), index_metadata, ), ), )
def subscribe_to_anime(self, anime_link: str): try: # create a new anime document anime_info = anime_alarm.utils.GGAScraper().get_anime_info( anime_link) print(anime_info['anime_id']) result = client.query( q.let( { 'user_anime_list': q.select(['data', 'animes_watching'], q.get(q.ref(q.collection(users), self.chat_id))), }, q.if_( # check if this anime exists in the db q.exists( q.match(q.index(anime_by_id), anime_info['anime_id'])), # if it exists... q.let( { 'anime_ref': q.select( 'ref', q.get( q.match(q.index(anime_by_id), anime_info['anime_id']))) }, q.if_( # check if user has subscribed to this anime already q.contains_value(q.var('anime_ref'), q.var('user_anime_list')), 'This anime is already on your watch list!', q.do( q.update( q.ref(q.collection(users), self.chat_id), { 'data': { 'animes_watching': q.append( q.var('user_anime_list'), [q.var('anime_ref')]) } }), q.update( q.var('anime_ref'), { 'data': { 'followers': q.add( q.select([ 'data', 'followers' ], q.get( q.var('anime_ref'))), 1) } }), ))), q.let( {'new_anime_id': q.new_id()}, q.do( # create new anime document q.create( q.ref(q.collection(animes), q.var('new_anime_id')), { 'data': { 'title': anime_info['title'], 'followers': 1, 'link': anime_link, 'anime_id': anime_info['anime_id'], 'anime_alias': anime_info['anime_alias'], 'episodes': anime_info['number_of_episodes'], 'last_episode': { 'link': anime_info[ 'latest_episode_link'], 'title': anime_info[ 'latest_episode_title'], }, } }), # add to user's list of subscribed animes q.update( q.ref(q.collection(users), self.chat_id), { 'data': { 'animes_watching': q.append( q.var('user_anime_list'), [ q.ref( q.collection(animes), q.var('new_anime_id')) ]) } }), ))))) if isinstance(result, str): updater.bot.send_message(chat_id=self.chat_id, text=result) else: updater.bot.send_message( chat_id=self.chat_id, text='You are now listening for updates on ' + anime_info['title']) except Exception as err: log_error(err)
def _create(cls, n=0, **data): data["n"] = n return cls._q(query.create(cls.collection_ref, {"data": data}))
from faunadb.client import FaunaClient from tqdm import tqdm import pprint import json f = open(".faunarc", "r") testDatabaseKey = f.readline()[10:] f.close() # print(testDatabaseKey) client = FaunaClient(testDatabaseKey) # indexes = client.query(q.paginate(q.indexes())) # pprint.pprint(indexes) with open("classes.txt", "r") as f: classes_dict = json.load(f) # print(list(classes_dict.keys())) for c in tqdm(list(classes_dict.keys())[5:]): data_dict = { "identifier": classes_dict[c]["course_abbrev"], "course_num": classes_dict[c]["course_number"], "full_identifier": c, "department": classes_dict[c]["department"], "description": classes_dict[c]["description"], "credit_hours": classes_dict[c]["credit_hours"] } client.query(q.create(q.collection("Class"), {"data": data_dict}))
age = (today.year - date_object.year) #------------------------------------------------------------------------------- # Age Groups if age >= 19 and age <= 60: AgeGroup = "Adult" if age >= 60 and age <= 1000: AgeGroup = "Senior" if age >= 9 and age <= 19: AgeGroup = "Teen" if age >= 0 and age <= 9: AgeGroup = "Child" #------------------------------------------------------------------------------- # Pushing Data To FaunaDB print(age, AgeGroup) client.query( q.create( q.collection("BollywoodActor"), { "data": { "Name": row[0], "Image": row[1], "DOB": row[2], "Age": age, "AgeGroup": AgeGroup } }))