def id_name_query(target, where): for category in list(selects.keys()): print( 'select id, name, description, {} from "botw-api".{} where {}=\'{}\'' .format(selects[category], category, where, target)) res = list( rs.sql( Q('select id, name, description, {} from "botw-api".{} where {}=\'{}\'' .format(selects[category], category, where, target.replace('\'', '\'\''))))) if res != []: return category, res[0] res = list( rs.sql( Q('select id, name, description, hearts_recovered, cooking_effect from "botw-api".creatures where {}=\'{}\'' .format(where, target)))) if res != []: if res[0]['cooking_effect'] == None: res = list( rs.sql( Q('select id, name, description, drops from "botw-api".creatures where {}=\'{}\'' .format(where, target)))) return 'creatures', res[0] return None
def creatures_category(): others = list( rs.sql( Q('select id, name, description, {} from "botw-api".creatures where cooking_effect is null' .format(creatures_selects['others'])))) foods = list( rs.sql( Q('select id, name, description, {} from "botw-api".creatures where cooking_effect is not null' .format(creatures_selects['food'])))) return {'non-food': others, 'food': foods}
def creatures_category(version): others = list( rs.sql( Q('select id, name, description, common_locations, image, category, {} from "botw-api".creatures where cooking_effect is null' .format(creatures_selects['others'])))) foods = list( rs.sql( Q('select id, name, description, common_locations, image, category, {} from "botw-api".creatures where cooking_effect is not null' .format(creatures_selects['food'])))) if version == 'v1': food_key = 'non-food' elif version == 'v2': food_key = 'non_food' return {food_key: others, 'food': foods}
def get(userid): struserid = str(userid) res = rs.sql( Q(collectionName).where(F['_id'] == struserid).select(F['city'])) cityobj = res[0] city = cityobj['city'] return city
def query(category: Union[StandardCategoryName, DlcCategoryName], where: Optional[bool] = None, select: list[BaseEntrySelect] = []) -> list[EntryData]: return rs.sql( Q(f'botw-api.{category}').where( where if where is not None else F['id'].is_not_null()).select(*[ 'id', 'name', 'description', 'common_locations', 'image', 'category' ] + list(map(lambda i: F[i], select)))).results()
def rs_check(): state = json.loads(request.args.get('state')) return json.dumps( list( rs.sql( Q('SELECT DISTINCT "{}".{} FROM "{}" JOIN "{}" ON "{}".{} = "{}".{} LIMIT 5' .format(state['leftCollection'], state['leftField'], state['leftCollection'], state['rightCollection'], state['leftCollection'], state['leftField'], state['rightCollection'], state['rightField'])))))
def yelp(): lat = request.args.get('lat') lon = request.args.get('lon') return json.dumps( list( rs.sql( Q('SELECT name, city, latitude, longitude FROM yelp_business \ WHERE latitude IS NOT NULL \ AND longitude IS NOT NULL \ AND categories LIKE \'%food%\' \ ORDER BY abs(latitude - ({})) + abs(longitude - ({})) \ LIMIT 20'.format(lat, lon)))))
def contributors(event, context): try: results = client.sql(Q(TOP_CONTRIBUTORS_QUERY)).results() return { "statusCode": 200, "headers": HEADERS, "body": json.dumps(results) } except Exception as e: print('Error finding top contributors {}'.format(e)) return { "statusCode": 500, "headers": HEADERS, "body": json.dumps({'msg': 'Internal Error'}) }
def get_upd_views(site): cnt = list( rs.sql( Q('SELECT * FROM commons.RepoViews WHERE _id=\'{}\''.format( site)))) if cnt == []: collection.add_docs([{'_id': site, 'views': 0}]) return (get_upd_views(site)) views = cnt[0]['views'] + 1 collection.add_docs([{'_id': site, 'views': views}]) return (str(views))
def rockset_querymaker(query): # connect to Rockset rs = Client(api_key=api_key) print("query is", query) if query not in queries: print("Err!") return time = timeit.timeit(str(rs.sql(Q(queries[query])))) print(query, 1000 * time) return query, time * 1000
def get_data(query, x_label, y_label): """ Execute query on Rockset Args: query (str): Rockset compatible SQL Query x_label (str): Values of this column will be mapped on x-axis of the graph y_label (str): Values of this column will be mapped on x-axis of the graph Returns: dict """ result = rs.sql(Q(query)) result[0] return { 'x': [record[x_label] for record in result], 'y': [record[y_label] for record in result] }
def run(args): if args.profile: rs = Client(profile=args.profile) else: rs = Client() queries = [] for f in os.listdir(args.query_dir): if not f.endswith('.sql'): continue query_id = os.path.splitext(f)[0] with open(os.path.join(args.query_dir, f), 'r') as f: query_str = f.read() queries.append((query_id, query_str)) queries = sorted(queries) print('Found {} queries to run. Will run each {} times and take the median' ' runtime.'.format(len(queries), args.runs)) print('=' * 70) for query_id, query_str in queries: times = [] rows = None error = False for _ in range(args.runs): start = time.time() try: resp = rs.sql(Q(query_str)) if rows is None: rows = len(resp.results()) else: assert rows == len(resp.results()) times.append(1000 * (time.time() - start)) except Exception as e: print('Query {} produced an error: {}'.format( query_id, str(e))) error = True break if not error: print('Query {} produced {:>3d} rows in {:>5.0f} ms'.format( query_id, rows, statistics.median(times)))
def lambda_handler(event, context): if 'queryStringParameters' in event: if 'interval' in event["queryStringParameters"]: interval = event["queryStringParameters"]["interval"] res = rs.sql(Q(f'''-- unnest tweets with stock ticker symbols from the past 1 day WITH stock_tweets AS (SELECT t.user.name, t.text, upper(sym.text) AS ticker FROM "twitter-firehose" AS t, unnest(t.entities.symbols) AS sym WHERE t.entities.symbols[1] is not null AND t._event_time > current_timestamp() - INTERVAL {interval}), -- aggregate stock ticker symbol tweet occurrences top_stock_tweets AS (SELECT ticker, count(*) AS tweet_count FROM stock_tweets GROUP BY ticker), -- join stock ticker symbol in tweets with NASDAQ company list data stock_info_with_tweets AS (SELECT top_stock_tweets.ticker, top_stock_tweets.tweet_count, tickers.Name, tickers.Industry, tickers.MarketCap FROM top_stock_tweets JOIN tickers ON top_stock_tweets.ticker = tickers.Symbol) -- show top 10 most tweeted stock ticker symbols along with their company name, industry and market cap SELECT * FROM stock_info_with_tweets t ORDER BY t.tweet_count DESC LIMIT 10''')) return { "isBase64Encoded": False, "headers": { "Access-Control-Allow-Origin" : "*" }, "statusCode": 200, "body": json.dumps([x for x in res]) }
def rank(event, context): try: username = event.get('pathParameters', {}).get('username', None) if not username: return { "statusCode": 400, "headers": HEADERS, "body": json.dumps({'msg': 'Please provide "username"'}) } else: results = client.sql( Q(INDIVIDUAL_CONTRIBUTOR_RANK.format(username))).results() return { "statusCode": 200, "headers": HEADERS, "body": json.dumps(results) } except Exception as e: print('Error finding rank {}'.format(e)) return { "statusCode": 500, "headers": HEADERS, "body": json.dumps({'msg': 'Internal Error'}) }
import flask from flask_geomapper import flask_geomapper from apscheduler.schedulers.background import BackgroundScheduler from rockset import Client, Q from os import getenv app = flask.Flask(__name__) fg = flask_geomapper(app, debug=True) token = getenv("RS2_TOKEN") # or set token to a string with your API key rs = Client(token, "https://api.rs2.usw2.rockset.com") # configure server based off your location (this one is us west) collection_name = "flask-locations" # configure based off your collection name and workspace (if not in "commons") collection = rs.Collection.retrieve(collection_name) previous_locations = list(rs.sql(Q(f"select * from \"{collection_name}\""))) # retrieve previous locations from database if previous_locations != []: fg.add_locations(previous_locations, ip_key="_id") # if there are any items in the database, add them to flask-geomapper def add_docs(): collection.add_docs(fg.shape_to_docs()) scheduler = BackgroundScheduler(daemon=True) # init scheduler scheduler.add_job(func=collection.add_docs, args=(fg.shape_to_docs(ip_key="_id"), ), trigger="interval", seconds=10) """ ^^^ Add documents to collection every ten seconds. Only locations with an unrecorded IP are added, by setting the `ip_key` parameter of `fg.shape_to_docs` (`flask_geomapper.flask_geomapper().shape_to_docs`) to `_id`, the unique document identifier for Rockset. """
def single_category(category): query = 'select id, name, description, common_locations, {} from "botw-api".{}'.format( selects[category], category) return list(rs.sql(Q(query)))
def dashboard(): if flask.request.args.get("token") == getenv("DASHBOARD_TOKEN"): return {"data": list(rs.sql(Q("select _event_time from \"schoology-extension-downloads\"")))} else: return main()
def rs_collection(collectionName): return json.dumps(list(rs.sql(Q('DESCRIBE "' + collectionName + '"'))))