def execute_gql(gql_query_str): headers = {} if ApiConfig.api_key: headers = {'authorization': "Apikey {}".format(ApiConfig.api_key)} try: response = requests.post( SANBASE_GQL_HOST, json={'query': gql_query_str}, headers=headers) except requests.exceptions.RequestException as e: raise SanError('Error running query: ({})'.format(e)) if response.status_code == 200: return __handle_success_response__(response, gql_query_str) else: if __result_has_gql_errors__(response): error_response = response.json()['errors']['details'] else: error_response = '' raise SanError( "Error running query. Status code: {}.\n {}\n {}".format( response.status_code, error_response, gql_query_str))
def __request_api_call_data(query): try: res = execute_gql(query)['currentUser']['apiCallsHistory'] except Exception as exc: if 'the results are empty' in str(exc): raise SanError('No API Key detected...') else: raise SanError(exc) return res
def __handle_success_response__(response, gql_query_str): if __result_has_gql_errors__(response): raise SanError( "GraphQL error occured running query {} \n errors: {}".format( gql_query_str, response.json()['errors'])) elif __exist_not_empty_result(response): return response.json()['data'] else: raise SanError("Error running query. Status code: {}.\n {}".format( response.status_code, gql_query_str))
def get_available_metrics_and_queries(slug): gql_query = ''' { projectBySlug(slug: "''' + slug + '''"){ availableTimeseriesMetrics availableHistogramMetrics availableQueries } } ''' attempts = 0 error = None while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) timeseries_metrics = response['projectBySlug'][ 'availableTimeseriesMetrics'] histogram_metrics = response['projectBySlug'][ 'availableHistogramMetrics'] queries = response['projectBySlug']['availableQueries'] if 'getMetric' in queries: queries.remove('getMetric') return (timeseries_metrics, histogram_metrics, queries) except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to get availableMetrics for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def get_marketcap_batch(slugs): now = dt.utcnow() to_str = dt.strftime(now, DATETIME_PATTERN_QUERY) from_str = dt.strftime(now - td(days=1), DATETIME_PATTERN_QUERY) error = None gql_query = '{' i = 0 for slug in slugs: gql_query += ''' query_''' + str(i) + ''': historyPrice( slug: "''' + slug + '''", from: "''' + from_str + '''", to: "''' + to_str + '''", interval: "1d" ) {marketcap} ''' i += 1 gql_query += '}' attempts = 0 while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) return [ response[f"query_{x}"][0]['marketcap'] if response[f"query_{x}"] else 0 for x in range(len(slugs)) ] except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to fetcha batch of marketcaps after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def projects(idx, slug, **kwargs): if (slug == "erc20"): return sgh.erc20_projects(idx, **kwargs) elif (slug == "all"): return sgh.all_projects(idx, **kwargs) raise SanError("Unknown project group: {}".format(slug))
def __parse_out_calls_data(response): try: api_calls = list( map(lambda x: (x['datetime'], x['apiCallsCount']), response)) except: raise SanError('An error has occured, please contact our support...') return api_calls
def __get_headers_remaining(data): try: return { 'month_remaining': data['x-ratelimit-remaining-month'], 'hour_remaining': data['x-ratelimit-remaining-hour'], 'minute_remaining': data['x-ratelimit-remaining-minute'] } except KeyError as exc: raise SanError('There are no limits for this API Key.')
def get_gql_query(idx, identifier, **kwargs): query, separator, slug = identifier.partition("/") if slug == '' and separator != '': raise SanError('Invalid metric!') elif slug == '': return getattr(san.sanbase_graphql, query, lambda *args, **kwargs: not_found(query))(idx, **kwargs) else: return getattr(san.sanbase_graphql, query, lambda *args, **kwargs: not_found(query))(idx, slug, **kwargs)
def build_query_gql_string(query, **kwargs): kwargs['from'] = dt.strftime( kwargs['from'], DATETIME_PATTERN_QUERY) if 'from' in kwargs else None kwargs['to'] = dt.strftime( kwargs['to'], DATETIME_PATTERN_QUERY) if 'to' in kwargs else None if query in queries: query_template = queries[query] query_args_str = '' args_template = query_template['arguments'] for arg in args_template: if arg in kwargs: query_args_str += f"{arg}: {args_template[arg] % (kwargs[arg])},\n" else: query_args_str += f"{arg}: {args_template[arg]},\n" query_fields_str = '{' + ' '.join( query_template['fields']) + '}' if query_template['fields'] else '' query_args_str = '(' + query_args_str + ')' if query_args_str else '' gql_query = '{' + query + query_args_str + query_fields_str + '}' return gql_query elif query in SPECIAL_METRICS_AND_QUERIES: raise SanError(f"Query {query} is used in other format.") else: raise SanError(f"Unknown query: {query}")
def execute_gql(gql_query_str): headers = {} if ApiConfig.api_key: headers = {'authorization': "Apikey {}".format(ApiConfig.api_key)} response = requests.post(SANBASE_GQL_HOST, json={'query': gql_query_str}, headers=headers) if response.status_code == 200: return __handle_success_response__(response, gql_query_str) else: raise SanError("Error running query. Status code: {}.\n {}".format( response.status_code, gql_query_str))
def _transform_arg_helper(kwargs): transform_arg_str = '' if 'transform' in kwargs and isinstance(kwargs['transform'], dict): transform_arg_str += 'transform:{' for k, v in kwargs['transform'].items(): if isinstance(v, int): transform_arg_str += f'{k}: {v}\n' elif isinstance(v, str): transform_arg_str += f'{k}: \"{v}\"\n' else: raise SanError( f'\"transform\" argument incorrect: {kwargs["transform"]}') transform_arg_str += '}' return transform_arg_str
def execute_gql(gql_query_str): headers = {} if ApiConfig.api_key: headers = {'authorization': "Apikey {}".format(ApiConfig.api_key)} response = requests.post(SANBASE_GQL_HOST, json={'query': gql_query_str}, headers=headers) if response.status_code == 200: return response.json()['data'] else: raise SanError( "Error running query. Status code: {}.\n {} \n errors: {}".format( response.status_code, gql_query_str, response.json()['errors']))
def __create_batched_query_string(self): batched_queries = [] for idx, query in enumerate(self.queries): [metric, _separator, slug] = query[0].partition('/') if metric in QUERY_MAPPING: batched_queries.append(get_gql_query(idx, query[0], **query[1])) else: if slug != '': batched_queries.append( san.sanbase_graphql.get_metric(idx, metric, slug, **query[1])) else: raise SanError('Invalid metric!') self.__batch_gql_queries(batched_queries) return self.__batch_gql_queries(batched_queries)
def get_histogram_metric_data(gql_query, metric, slug): error = None attempts = 0 started = time.time() while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) elapsed_time = time.time() - started return response['getMetric']['histogramData'], elapsed_time except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to fetch {metric} metric for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def get_query_data(gql_query, query_name, slug): error = None attempts = 0 started = time.time() while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) elapsed_time = time.time() - started return response[query_name], elapsed_time except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to fetch {query_name} query for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def get(dataset, **kwargs): query, slug = parse_dataset(dataset) if query in DEPRECATED_QUERIES: print( '**NOTICE**\n{} will be deprecated in version 0.9.0, please use {} instead' .format(query, DEPRECATED_QUERIES[query])) if query in CUSTOM_QUERIES: return getattr(san.sanbase_graphql, query)(0, slug, **kwargs) if query in QUERY_MAPPING.keys(): gql_query = "{" + get_gql_query(0, dataset, **kwargs) + "}" else: if slug != '': gql_query = "{" + \ san.sanbase_graphql.get_metric(0, query, slug, **kwargs) + "}" else: raise SanError('Invalid metric!') res = execute_gql(gql_query) return transform_query_result(0, query, res)
def get_min_interval(metric): gql_query = ''' { getMetric(metric: "''' + metric + '''") { metadata { minInterval } } } ''' attempts = 0 error = None while attempts < NUMBER_OF_RETRIES: try: response = execute_gql(gql_query) return response['getMetric']['metadata']['minInterval'] except SanError as e: attempts += 1 error = e time.sleep(RETRY_DELAY) raise SanError( f"Not able to get min interval for {metric} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}" )
def get_signal_last_triggered(id): gql_query = ''' { getTriggerById(id: ''' + str(id) + ''') { trigger { lastTriggeredDatetime } } } ''' try: response = execute_gql(gql_query) last_triggered = response['getTriggerById']['trigger'][ 'lastTriggeredDatetime'] if last_triggered: return dt.strptime(last_triggered, DATETIME_PATTERN_SIGNAL) else: return dt.min except SanError as e: error = e raise SanError( f"Not able to get lastTriggeredDatetime for signal {id}. Reason: {str(error)}" )
def test_rate_limits(): exception = SanError( 'API Rate Limit Reached. Try again in 366 seconds(7 minutes)') assert san.is_rate_limit_exception(exception) assert san.rate_limit_time_left(exception) == 366
def not_found(query): raise SanError(query + ' not found')