Beispiel #1
0
def get_available_metrics_and_queries(slug):
    gql_query = '''
    {
        projectBySlug(slug: "''' + slug + '''"){
            availableTimeseriesMetrics
            availableHistogramMetrics
            availableQueries
        }
    }
    '''
    attempts = 0
    error = None
    while attempts < NUMBER_OF_RETRIES:
        try:
            response = execute_gql(gql_query)
            timeseries_metrics = response['projectBySlug'][
                'availableTimeseriesMetrics']
            histogram_metrics = response['projectBySlug'][
                'availableHistogramMetrics']
            queries = response['projectBySlug']['availableQueries']
            if 'getMetric' in queries:
                queries.remove('getMetric')
            return (timeseries_metrics, histogram_metrics, queries)
        except SanError as e:
            attempts += 1
            error = e
            time.sleep(RETRY_DELAY)
    raise SanError(
        f"Not able to get availableMetrics for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}"
    )
Beispiel #2
0
def get_marketcap_batch(slugs):
    now = dt.utcnow()
    to_str = dt.strftime(now, DATETIME_PATTERN_QUERY)
    from_str = dt.strftime(now - td(days=1), DATETIME_PATTERN_QUERY)
    error = None
    gql_query = '{'
    i = 0
    for slug in slugs:
        gql_query += '''
        query_''' + str(i) + ''': historyPrice(
            slug: "''' + slug + '''",
            from: "''' + from_str + '''",
            to: "''' + to_str + '''",
            interval: "1d"
        ) {marketcap}
        '''
        i += 1
    gql_query += '}'
    attempts = 0
    while attempts < NUMBER_OF_RETRIES:
        try:
            response = execute_gql(gql_query)
            return [
                response[f"query_{x}"][0]['marketcap']
                if response[f"query_{x}"] else 0 for x in range(len(slugs))
            ]
        except SanError as e:
            attempts += 1
            error = e
            time.sleep(RETRY_DELAY)
    raise SanError(
        f"Not able to fetcha batch of marketcaps after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}"
    )
Beispiel #3
0
def available_metrics():
    sanbase_graphql_functions = inspect.getmembers(san.sanbase_graphql,
                                                   inspect.isfunction)
    all_functions = list(map(lambda x: x[0],
                             sanbase_graphql_functions)) + execute_gql(
                                 '{query: getAvailableMetrics}')['query']
    all_functions.remove('get_metric')
    return all_functions
Beispiel #4
0
def available_metric_for_slug_since(metric, slug):
    query_str = ("""{{
        getMetric(metric: \"{metric}\"){{
            availableSince(slug: \"{slug}\")
        }}
    }}
    """).format(metric=metric, slug=slug)

    return execute_gql(query_str)['getMetric']['availableSince']
Beispiel #5
0
def available_metrics_for_slug(slug):
    query_str = ("""{{
        projectBySlug(slug: \"{slug}\"){{
            availableMetrics
        }}
    }}
    """).format(slug=slug)

    return execute_gql(query_str)['projectBySlug']['availableMetrics']
Beispiel #6
0
def __request_api_call_data(query):
    try:
        res = execute_gql(query)['currentUser']['apiCallsHistory']
    except Exception as exc:
        if 'the results are empty' in str(exc):
            raise SanError('No API Key detected...')
        else:
            raise SanError(exc)

    return res
Beispiel #7
0
    def execute(self):
        result = []
        batched_queries = []

        for idx, q in enumerate(self.queries):
            batched_queries.append(get_gql_query(idx, q[0], **q[1]))

        gql_string = self.__batch_gql_queries(batched_queries)
        res = execute_gql(gql_string)

        for k in sorted(res.keys()):
            df = convert_to_datetime_idx_df(res[k])
            result.append(df)

        return result
Beispiel #8
0
def get_query_data(gql_query, query_name, slug):
    error = None
    attempts = 0
    started = time.time()

    while attempts < NUMBER_OF_RETRIES:
        try:
            response = execute_gql(gql_query)
            elapsed_time = time.time() - started
            return response[query_name], elapsed_time
        except SanError as e:
            attempts += 1
            error = e
            time.sleep(RETRY_DELAY)
    raise SanError(
        f"Not able to fetch {query_name} query for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}"
    )
Beispiel #9
0
def get_histogram_metric_data(gql_query, metric, slug):
    error = None
    attempts = 0
    started = time.time()

    while attempts < NUMBER_OF_RETRIES:
        try:
            response = execute_gql(gql_query)
            elapsed_time = time.time() - started
            return response['getMetric']['histogramData'], elapsed_time
        except SanError as e:
            attempts += 1
            error = e
            time.sleep(RETRY_DELAY)
    raise SanError(
        f"Not able to fetch {metric} metric for {slug} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}"
    )
Beispiel #10
0
def metric_complexity(metric, from_date, to_date, interval):
    query_str = ("""{{
    getMetric (metric: \"{metric}\") {{
        timeseriesDataComplexity(
            from: \"{from_date}\",
            to: \"{to_date}\",
            interval: \"{interval}\"
        )
        }}
    }}
    """).format(
        metric=metric,
        from_date=_format_from_date(from_date),
        to_date=_format_to_date(to_date),
        interval=interval
    )

    return execute_gql(query_str)['getMetric']['timeseriesDataComplexity']
Beispiel #11
0
def get(dataset, **kwargs):
    query, slug = parse_dataset(dataset)
    if query in DEPRECATED_QUERIES:
        print(
            '**NOTICE**\n{} will be deprecated in version 0.9.0, please use {} instead'
            .format(query, DEPRECATED_QUERIES[query]))
    if query in CUSTOM_QUERIES:
        return getattr(san.sanbase_graphql, query)(0, slug, **kwargs)
    if query in QUERY_MAPPING.keys():
        gql_query = "{" + get_gql_query(0, dataset, **kwargs) + "}"
    else:
        if slug != '':
            gql_query = "{" + \
                san.sanbase_graphql.get_metric(0, query, slug, **kwargs) + "}"
        else:
            raise SanError('Invalid metric!')
    res = execute_gql(gql_query)

    return transform_query_result(0, query, res)
Beispiel #12
0
def get_signal_last_triggered(id):
    gql_query = '''
    {
        getTriggerById(id: ''' + str(id) + ''') {
        trigger {
            lastTriggeredDatetime
        }
    }
    }
    '''
    try:
        response = execute_gql(gql_query)
        last_triggered = response['getTriggerById']['trigger'][
            'lastTriggeredDatetime']
        if last_triggered:
            return dt.strptime(last_triggered, DATETIME_PATTERN_SIGNAL)
        else:
            return dt.min
    except SanError as e:
        error = e
    raise SanError(
        f"Not able to get lastTriggeredDatetime for signal {id}. Reason: {str(error)}"
    )
Beispiel #13
0
def get_min_interval(metric):
    gql_query = '''
    {
        getMetric(metric: "''' + metric + '''") {
            metadata {
                minInterval
            }
        }
    }
    '''
    attempts = 0
    error = None
    while attempts < NUMBER_OF_RETRIES:
        try:
            response = execute_gql(gql_query)
            return response['getMetric']['metadata']['minInterval']
        except SanError as e:
            attempts += 1
            error = e
            time.sleep(RETRY_DELAY)
    raise SanError(
        f"Not able to get min interval for {metric} after {NUMBER_OF_RETRIES} attempts. Reason: {str(error)}"
    )
Beispiel #14
0
def get(dataset, **kwargs):
    gql_query = get_gql_query(0, dataset, **kwargs)
    gql_query = "{\n" + gql_query + "\n}"
    res = execute_gql(gql_query)
    return convert_to_datetime_idx_df(res["query_0"])
Beispiel #15
0
 def execute(self):
     graphql_string = self.__create_batched_query_string()
     result = execute_gql(graphql_string)
     return self.__transform_batch_result(result)
Beispiel #16
0
all_metrics = {
    asset: san.available_metrics_for_slug(asset)
    for asset in assets
}

queries = {
    slug: "{\n " + f"projectBySlug(slug: '{slug}')" + "   {"
    for slug in assets
}

for query in queries:
    queries[query] += '\n     ' + '\n     '.join(
        [metric for metric in all_metrics[query][:3]]) + "\n }\n}"

for asset in assets:
    res = execute_gql("{\n " + f'projectBySlug(slug: "{asset}")' +
                      " {\n        availableTimeseriesMetrics\n        }\n}")

    #ethereum_df =pd.DataFrame(res['projectBySlug'], index=[0])
    all_metrics = res['projectBySlug']['availableTimeseriesMetrics']
    prices = san.get(f'prices/{asset}',
                     from_date=from_date,
                     to_date=to_date,
                     interval=interval)
    data = prices
    for metric in all_metrics:

        if san.metadata(metric, arr=['isAccessible',
                                     'isRestricted'])['isRestricted']:
            continue
        else:
            data = data.join(