def get_available_metrics_and_queries(slug): gql_query = ''' { projectBySlug(slug: "''' + slug + '''"){ availableTimeseriesMetrics availableHistogramMetrics availableQueries } } ''' attempts = 0 error = None while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) timeseries_metrics = response['projectBySlug'][ 'availableTimeseriesMetrics'] histogram_metrics = response['projectBySlug'][ 'availableHistogramMetrics'] queries = response['projectBySlug']['availableQueries'] if 'getMetric' in queries: queries.remove('getMetric') return (timeseries_metrics, histogram_metrics, queries) except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to get availableMetrics for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def get_marketcap_batch(slugs): now = dt.utcnow() to_str = dt.strftime(now, DATETIME_PATTERN_QUERY) from_str = dt.strftime(now - td(days=1), DATETIME_PATTERN_QUERY) error = None gql_query = '{' i = 0 for slug in slugs: gql_query += ''' query_''' + str(i) + ''': historyPrice( slug: "''' + slug + '''", from: "''' + from_str + '''", to: "''' + to_str + '''", interval: "1d" ) {marketcap} ''' i += 1 gql_query += '}' attempts = 0 while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) return [ response[f"query_{x}"][0]['marketcap'] if response[f"query_{x}"] else 0 for x in range(len(slugs)) ] except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to fetcha batch of marketcaps after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def available_metrics(): sanbase_graphql_functions = inspect.getmembers(san.sanbase_graphql, inspect.isfunction) all_functions = list(map(lambda x: x[0], sanbase_graphql_functions)) + execute_gql( '{query: getAvailableMetrics}')['query'] all_functions.remove('get_metric') return all_functions
def available_metric_for_slug_since(metric, slug): query_str = ("""{{ getMetric(metric: \"{metric}\"){{ availableSince(slug: \"{slug}\") }} }} """).format(metric=metric, slug=slug) return execute_gql(query_str)['getMetric']['availableSince']
def available_metrics_for_slug(slug): query_str = ("""{{ projectBySlug(slug: \"{slug}\"){{ availableMetrics }} }} """).format(slug=slug) return execute_gql(query_str)['projectBySlug']['availableMetrics']
def __request_api_call_data(query): try: res = execute_gql(query)['currentUser']['apiCallsHistory'] except Exception as exc: if 'the results are empty' in str(exc): raise SanError('No API Key detected...') else: raise SanError(exc) return res
def execute(self): result = [] batched_queries = [] for idx, q in enumerate(self.queries): batched_queries.append(get_gql_query(idx, q[0], **q[1])) gql_string = self.__batch_gql_queries(batched_queries) res = execute_gql(gql_string) for k in sorted(res.keys()): df = convert_to_datetime_idx_df(res[k]) result.append(df) return result
def get_query_data(gql_query, query_name, slug): error = None attempts = 0 started = time.time() while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) elapsed_time = time.time() - started return response[query_name], elapsed_time except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to fetch {query_name} query for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def get_histogram_metric_data(gql_query, metric, slug): error = None attempts = 0 started = time.time() while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) elapsed_time = time.time() - started return response['getMetric']['histogramData'], elapsed_time except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to fetch {metric} metric for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def metric_complexity(metric, from_date, to_date, interval): query_str = ("""{{ getMetric (metric: \"{metric}\") {{ timeseriesDataComplexity( from: \"{from_date}\", to: \"{to_date}\", interval: \"{interval}\" ) }} }} """).format( metric=metric, from_date=_format_from_date(from_date), to_date=_format_to_date(to_date), interval=interval ) return execute_gql(query_str)['getMetric']['timeseriesDataComplexity']
def get(dataset, **kwargs): query, slug = parse_dataset(dataset) if query in DEPRECATED_QUERIES: print( '**NOTICE**\n{} will be deprecated in version 0.9.0, please use {} instead' .format(query, DEPRECATED_QUERIES[query])) if query in CUSTOM_QUERIES: return getattr(san.sanbase_graphql, query)(0, slug, **kwargs) if query in QUERY_MAPPING.keys(): gql_query = "{" + get_gql_query(0, dataset, **kwargs) + "}" else: if slug != '': gql_query = "{" + \ san.sanbase_graphql.get_metric(0, query, slug, **kwargs) + "}" else: raise SanError('Invalid metric!') res = execute_gql(gql_query) return transform_query_result(0, query, res)
def get_signal_last_triggered(id): gql_query = ''' { getTriggerById(id: ''' + str(id) + ''') { trigger { lastTriggeredDatetime } } } ''' try: response = execute_gql(gql_query) last_triggered = response['getTriggerById']['trigger'][ 'lastTriggeredDatetime'] if last_triggered: return dt.strptime(last_triggered, DATETIME_PATTERN_SIGNAL) else: return dt.min except SanError as e: error = e raise SanError( f"Not able to get lastTriggeredDatetime for signal {id}. Reason: {str(error)}" )
def get_min_interval(metric): gql_query = ''' { getMetric(metric: "''' + metric + '''") { metadata { minInterval } } } ''' attempts = 0 error = None while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) return response['getMetric']['metadata']['minInterval'] except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to get min interval for {metric} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def get(dataset, **kwargs): gql_query = get_gql_query(0, dataset, **kwargs) gql_query = "{\n" + gql_query + "\n}" res = execute_gql(gql_query) return convert_to_datetime_idx_df(res["query_0"])
def execute(self): graphql_string = self.__create_batched_query_string() result = execute_gql(graphql_string) return self.__transform_batch_result(result)
all_metrics = { asset: san.available_metrics_for_slug(asset) for asset in assets } queries = { slug: "{\n " + f"projectBySlug(slug: '{slug}')" + " {" for slug in assets } for query in queries: queries[query] += '\n ' + '\n '.join( [metric for metric in all_metrics[query][:3]]) + "\n }\n}" for asset in assets: res = execute_gql("{\n " + f'projectBySlug(slug: "{asset}")' + " {\n availableTimeseriesMetrics\n }\n}") #ethereum_df =pd.DataFrame(res['projectBySlug'], index=[0]) all_metrics = res['projectBySlug']['availableTimeseriesMetrics'] prices = san.get(f'prices/{asset}', from_date=from_date, to_date=to_date, interval=interval) data = prices for metric in all_metrics: if san.metadata(metric, arr=['isAccessible', 'isRestricted'])['isRestricted']: continue else: data = data.join(