Exemplo n.º 1
0
 def __init__(self):
     self.transport = AIOHTTPTransport(url="http://localhost:5000/graphql")
     self.client = Client(transport=self.transport, fetch_schema_from_transport=True)
Exemplo n.º 2
0
infura = "https://mainnet.infura.io/v3/681ab0b8b0eb4fa0a9c26751c49a4367"  # Michael

web3 = Web3(Web3.HTTPProvider(infura))
print(web3.isConnected())

import warnings

warnings.filterwarnings("ignore")


network = "uniswapv2"
ENDPOINT = "https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2"

transport = AIOHTTPTransport(url=ENDPOINT)

client = Client(transport=transport, fetch_schema_from_transport=True)


#
# Globals



dirname = os.path.dirname(__file__)
if not os.path.isdir(os.path.join(dirname, "../", "data")):
    os.mkdir(os.path.join(dirname, "../", "data"))
if not os.path.isdir(os.path.join(dirname, "../", "data", network)):
    os.mkdir(os.path.join(dirname, "../", "data", network))
datafolder = os.path.join(dirname, "../", "data", network)

Exemplo n.º 3
0
 async def task_coro():
     transport = WebsocketsTransport(url=url)
     async with Client(transport=transport) as session:
         await assert_client_is_working(session)
import pandas as pd

from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport

app = dash.Dash(__name__, external_stylesheets=[dbc.themes.SANDSTONE])
server = app.server

#---------- Extract data from Aragon Tokens xdai Subgraph ----------

sample_transport = RequestsHTTPTransport(
    url='https://api.thegraph.com/subgraphs/name/1hive/aragon-tokens-xdai',
    verify=True,
    retries=3,
)
client = Client(transport=sample_transport)
query = gql('''
query {
    tokenHolders(first: 1000 where : { tokenAddress: "0x8FbeD5491438B81b2fCDBFd4A53e7eD8d5B4f1be"}) {
    address
    balance
  }
}
''')
response1 = client.execute(query)

#---------- Extract data from Conviction Voting xdai Subgraph ----------

sample_transport = RequestsHTTPTransport(
    url=
    'https://api.thegraph.com/subgraphs/name/1hive/aragon-conviction-voting-xdai',
Exemplo n.º 5
0
from typing import Union

from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport

from data_sources import keys
from data_sources.apicalls import print_red
from datetime import datetime, timedelta

uniswap_client = Client(transport=RequestsHTTPTransport(
    url="https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2"))

bitquery_client = Client(transport=RequestsHTTPTransport(
    url="https://graphql.bitquery.io",
    headers={"X-API-KEY": keys.bitquery_api_key}))


def get_intervals(since, till, interval):
    num_intervals = int((till - since) / interval)
    return [since + interval * i for i in range(num_intervals + 1)]


def get_average_btc_like_fees(network: str,
                              since: datetime,
                              till: datetime = datetime.now(),
                              interval: timedelta = timedelta(minutes=5)):
    intervals = get_intervals(since, till, interval)
    query_str = "{{bitcoin(network: {} ) {{".format(network)
    interval_keys = [f"t{i}" for i in range(len(intervals) - 1)]
    for i, key in enumerate(interval_keys):
        query_str += "{}: transactions(time: {{ since: \"{}\", till: \"{}\" }}) {{ avgFee: feeValue(calculate: average) }}\n".format(
Exemplo n.º 6
0
def request(query, url):
    """Request data from a GraphQL endpoint."""
    transport = AIOHTTPTransport(url=url)
    client = Client(transport=transport, fetch_schema_from_transport=True)
    query = gql(query)
    return client.execute(query)
Exemplo n.º 7
0
def introspection_schema():
    return Client(introspection=introspection)
Exemplo n.º 8
0
import json

import time
from tqdm import tqdm

sample_transport = RequestsHTTPTransport(url='http://localhost:4001/graphql',
                                         use_json=True,
                                         headers={
                                             "Content-type":
                                             "application/json",
                                         },
                                         verify=False)

client = Client(
    retries=3,
    transport=sample_transport,
    fetch_schema_from_transport=True,
)

# Import tilt file and convert to local python instance
file = json.loads(
    requests.get(
        'https://raw.githubusercontent.com/Transparency-Information-Language/schema/master/tilt.json'
    ).content)
instance = tilt.tilt_from_dict(file)

# Create DataController
meta_id = abs(hash(instance.meta.id)) % (10**8)
query_string = 'mutation { CreateController(id: %s, name: "%s", privacyPolicy: "%s") { _id }}' % (
    meta_id, instance.meta.name, instance.meta.url)
query = gql(query_string)
Exemplo n.º 9
0
from gql import Client
from gql.transport.requests import RequestsHTTPTransport
from django.conf import settings

git_transport = RequestsHTTPTransport(
    url='https://api.github.com/graphql',
    headers={
        'Authorization': f'bearer {settings.GITHUB_API_TOKEN}',
    },
    use_json=True)

git_client = Client(transport=git_transport)
Exemplo n.º 10
0
 def get_response_graphql(self, url=None, endpoint='', gql_query=None, params=None):
     if url == None:
         url = self.base_url + endpoint
     transport = RequestsHTTPTransport(url=url, use_json=True)
     client = Client(transport=transport, fetch_schema_from_transport=True)
     return client.execute(gql_query, params)
Exemplo n.º 11
0
def output_tids(keyword, tid_names):
    """Write line of space separated list of task ID:"name" prefixed by a keyword"""
    sys.stdout.write(f'{keyword} ')
    tasks = [f'{tid}:"{name}"' for tid, name in tid_names.items()]
    sys.stdout.write(",".join(tasks))
    sys.stdout.write("\n")


if __name__ == "__main__":
    # Ref: https://cirrus-ci.org/api/
    cirrus_graphql_url = "https://api.cirrus-ci.com/graphql"
    cirrus_graphql_xport = RequestsHTTPTransport(url=cirrus_graphql_url,
                                                 verify=True,
                                                 retries=3)
    client = Client(transport=cirrus_graphql_xport,
                    fetch_schema_from_transport=True)

    try:
        raw_builds = get_raw_builds(client, sys.argv[1], sys.argv[2],
                                    sys.argv[3])
    except IndexError as xcpt:
        print(
            f"Error: argument {xcpt}\n\nUsage: {sys.argv[0]} <user> <repo> <sha>"
        )
        sys.exit(1)

    raw_tasks = get_raw_tasks(client, latest_build_id(raw_builds))
    for cci_status in CIRRUS_CI_STATUSES:
        output_tids(cci_status, status_tid_names(raw_tasks, cci_status))
    output_tids("CIASR", notif_tids(raw_tasks, "CI agent stopped responding!"))
Exemplo n.º 12
0
def test_no_schema_exception():
    with pytest.raises(AssertionError) as exc_info:
        client = Client()
        client.validate("")
    assert "Cannot validate the document locally, you need to pass a schema." in str(
        exc_info.value)
Exemplo n.º 13
0
async def get_rating(uid, query_type):
    try:
        if query_type == 'b30':
            query_type = 'bestRecords'
        elif query_type == 'r30':
            query_type = 'recentRecords'
        Profile_url = 'http://services.cytoid.io/profile/' + uid
        Profile_json = json.loads(await get_url(Profile_url))
        if 'statusCode' in Profile_json:
            if Profile_json['statusCode'] == 404:
                return {'status': False, 'text': '发生错误:此用户不存在。'}
        ProfileId = Profile_json['user']['id']
        ProfileRating = Profile_json['rating']
        ProfileLevel = Profile_json['exp']['currentLevel']
        ProfileUid = Profile_json['user']['uid']
        nick = Profile_json['user']['name']
        if nick is None:
            nick = ProfileUid
        if 'avatar' in Profile_json['user']:
            Avatar_img = Profile_json['user']['avatar']['medium']
        else:
            Avatar_img = None
        transport = AIOHTTPTransport(url='https://services.cytoid.io/graphql')
        client = Client(transport=transport, fetch_schema_from_transport=True)
        query = gql(f"""
            query StudioAnalytics($id: ID = "{ProfileId}") {{
          profile(id: $id) {{
            id
            {query_type}(limit: 30) {{
              ...RecordFragment
            }}
          }}
        }}
        
        fragment RecordFragment on UserRecord {{
          id
          date
          chart {{
            id
            difficulty
            type
            level {{
              uid
              title
            }}
          }}
          score
          accuracy
          rating
        }}
        """)

        result = await client.execute_async(query)
        print(result)
        workdir = os.path.abspath('./cache/' + str(uuid.uuid4()))
        os.mkdir(workdir)
        bestRecords = result['profile'][query_type]
        rank = 0
        for x in bestRecords:
            thumbpath = await download_cover_thumb(x['chart']['level']['uid'])
            chart_type = x['chart']['type']
            difficulty = x['chart']['difficulty']
            chart_name = x['chart']['level']['title']
            score = str(x['score'])
            acc = x['accuracy']
            rt = x['rating']
            _date = datetime.strptime(x['date'], "%Y-%m-%dT%H:%M:%S.%fZ")
            local_time = _date + timedelta(hours=8)
            playtime = local_time.timestamp()
            nowtime = time.time()
            playtime = playtime - nowtime
            playtime = -playtime
            t = playtime / 60 / 60 / 24
            dw = 'd'
            if t < 1:
                t = playtime / 60 / 60
                dw = 'h'
                if t < 1:
                    t = playtime / 60
                    dw = 'm'
                if t < 1:
                    t = playtime
                    dw = 's'
            playtime = str(int(t)) + dw
            rank += 1
            if thumbpath:
                havecover = True
            else:
                havecover = False
            make_songcard(workdir, thumbpath, chart_type, difficulty,
                          chart_name, score, acc, rt, playtime, rank,
                          havecover)
        # b30card
        b30img = Image.new("RGBA", (1975, 1610), '#1e2129')
        avatar_path = await download_avatar_thumb(Avatar_img, ProfileId)
        if avatar_path:
            im = Image.open(avatar_path)
            im = im.resize((110, 110))
            try:
                bigsize = (im.size[0] * 3, im.size[1] * 3)
                mask = Image.new('L', bigsize, 0)
                draw = ImageDraw.Draw(mask)
                draw.ellipse((0, 0) + bigsize, fill=255)
                mask = mask.resize(im.size, Image.ANTIALIAS)
                im.putalpha(mask)
                output = ImageOps.fit(im, mask.size, centering=(0.5, 0.5))
                output.putalpha(mask)
                output.convert('RGBA')
                b30img.alpha_composite(output, (1825, 25))
            except:
                traceback.print_exc()

        font4 = ImageFont.truetype(
            os.path.abspath('./assets/Nunito-Regular.ttf'), 35)
        drawtext = ImageDraw.Draw(b30img)
        get_name_width = font4.getsize(nick)[0]
        get_img_width = b30img.width
        drawtext.text((get_img_width - get_name_width - 160, 30),
                      nick,
                      '#ffffff',
                      font=font4)

        font5 = ImageFont.truetype(
            os.path.abspath('./assets/Noto Sans CJK DemiLight.otf'), 20)
        level_text = f'等级 {ProfileLevel}'
        level_text_width = font5.getsize(level_text)[0]
        level_text_height = font5.getsize(level_text)[1]
        img_level = Image.new("RGBA", (level_text_width + 20, 40), '#050a1a')
        drawtext_level = ImageDraw.Draw(img_level)
        drawtext_level.text(((img_level.width - level_text_width) / 2,
                             (img_level.height - level_text_height) / 2),
                            level_text,
                            '#ffffff',
                            font=font5)
        b30img.alpha_composite(img_level, (1825 - img_level.width - 20, 85))
        font6 = ImageFont.truetype(
            os.path.abspath('./assets/Nunito-Light.ttf'), 20)
        rating_text = f'Rating {str(round(float(ProfileRating), 2))}'
        rating_text_width = font6.getsize(rating_text)[0]
        rating_text_height = font6.getsize(rating_text)[1]
        img_rating = Image.new("RGBA", (rating_text_width + 20, 40), '#050a1a')
        drawtext_level = ImageDraw.Draw(img_rating)
        drawtext_level.text(((img_rating.width - rating_text_width) / 2,
                             (img_rating.height - rating_text_height) / 2),
                            rating_text,
                            '#ffffff',
                            font=font6)
        b30img.alpha_composite(
            img_rating, (1825 - img_level.width - img_rating.width - 30, 85))

        i = 0
        fname = 1
        t = 0
        s = 0
        while True:
            try:
                cardimg = Image.open(f'{workdir}/{str(fname)}.png')
                w = 15 + 384 * i
                h = 135
                if s == 5:
                    s = 0
                    t += 1
                h = h + 240 * t
                w = w - 384 * 5 * t
                i += 1
                cardimg = makeShadow(cardimg, 4, 9, [0, 3], 'rgba(0,0,0,0)',
                                     '#000000')
                b30img.alpha_composite(cardimg, (w, h))
                fname += 1
                s += 1
            except FileNotFoundError:
                break
            except Exception:
                traceback.print_exc()
                break
        if __name__ == '__main__':
            b30img.show()
        else:
            savefilename = os.path.abspath(f'./cache/{str(uuid.uuid4())}.jpg')
            b30img.convert("RGB").save(savefilename)
            shutil.rmtree(workdir)
            return {'status': True, 'path': savefilename}
    except Exception as e:
        traceback.print_exc()
        return {'status': False, 'text': '发生错误:' + str(e)}
Exemplo n.º 14
0
 def __init__(self):
     transport = RequestsHTTPTransport(url='https://qa-connect.qualia.io/api/partner/graphql',
                                       headers={"Authorization": "Basic INSERT_YOUR_CREDENTIALS_HERE"})
     self.client = Client(transport=transport)
Exemplo n.º 15
0
from typing import Union

from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport

from data_sources.apicalls import print_red

uniswap_client = Client(transport=RequestsHTTPTransport(
    url="https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2"))


def get_uniswap_tvl() -> Union[None, float]:
    try:
        query = gql("""{
                uniswapFactories(first: 1) {
                    totalLiquidityUSD
                }
            }""")
        result = uniswap_client.execute(query)
        return float(result['uniswapFactories'][0]['totalLiquidityUSD'])
    except Exception as e:
        print_red("Unsucessful call in graphcalls.get_uniswap_tvl()")
        print(e)
        return None


def get_uniswap_daily_pools(date: int) -> Union[None, list]:
    """date: unix timestamp"""
    result: list
    try:
        # first request to get first id
Exemplo n.º 16
0
from datetime import datetime

from gql import Client, gql
from gql.transport.requests import RequestsHTTPTransport

from config import AUTH_KEY, BASE_URL
from helpers import get_params, get_quarters
from query import QUERY
from wikidata import TOPICS

import json
import os

client = Client(transport=RequestsHTTPTransport(
  url=BASE_URL,
  headers={"Authorization": "Bearer {key}".format(key=AUTH_KEY)},
  use_json=True
))

def make_path(root, folder):
  path = os.path.join(root, folder)
  if not os.path.exists(path):
    os.mkdir(path)
  return path

downloads = make_path(os.getcwd(), 'downloads')

# we want approx 2000 labeled articles in total.
# divided by 4 topics: 500 each
# 4 quarters: 125 each quarter
TOTAL_EVENTS_PER_QUARTER = 400
Exemplo n.º 17
0
import os
from dotenv import load_dotenv

load_dotenv()
SECRET_KEY = os.getenv("smart_parking_api_key")
SERVER_URL = os.getenv("server_url")

TRANSPORT = RequestsHTTPTransport(
    url=SERVER_URL,
    use_json=True,
    headers={"api-key": SECRET_KEY},
    verify=False,
    retries=3,
)

CLIENT = Client(transport=TRANSPORT, fetch_schema_from_transport=True)

update_parking_space_status = gql("""
    mutation updateParkingSpace($id: ID! $parkingSpaceDetails:ParkingSpaceDetailsInput! ) {
      updateParkingSpace(id: $id parkingSpaceDetails:$parkingSpaceDetails ) {
          id
        parkingLotIdentifier
        isOccupied
        spaceNumber
      }
    }
""")


def updateParkingSpaceStatus(id, is_occupied):
    params = {"id": id, "parkingSpaceDetails": {"isOccupied": is_occupied}}
Exemplo n.º 18
0
from gql import gql, Client
from gql.transport.requests import RequestsHTTPTransport

sample_transport=RequestsHTTPTransport(
    url='https://api.spacex.land/graphql/',
    use_json=True,
    headers={
        "Content-type": "application/json",
    },
    verify=False,
    retries=3,
)

client = Client(
    transport=sample_transport,
    fetch_schema_from_transport=True,
)

query = gql('''
query {
  launchesPast(limit: 10) {
    mission_name
    launch_date_local
    launch_site {
      site_name_long
    }
    links {
      article_link
      video_link
    }
    rocket {
Exemplo n.º 19
0
def local_schema():
    return Client(schema=StarWarsSchema)
Exemplo n.º 20
0
def __info_for_drug_ids(drug_ids, chembl2drug, diseases, args, errors):
    # Step 2: collect targets for the drug ids we obtained in step 1
    tp = RequestsHTTPTransport(url=ENDPOINT_URL, verify=True, retries=3)
    client = Client(transport=tp, fetch_schema_from_transport=True)

    target_ids = set([])
    results = set()
    all_results = {}
    for i, chembl_id in enumerate(drug_ids):
        print('%s - %d of %d' % (chembl_id, i, len(drug_ids)))
        try:
            query = gql(DRUG_INFO_QUERY % chembl_id)
            result = client.execute(query)
            drug = result["drug"]
            trial_urls = {}
            out_item = {}
            try:
                out_item['molecule_name'] = drug['name']
            except:
                out_item['molecule_name'] = ''
                errors.append('WARNING - no molecule name for "%s" (%s)' %
                              (chembl_id, chembl2drug[chembl_id]))
                # This would be weird
                #continue
            try:
                out_item['drug_type'] = drug['drugType']
            except:
                out_item['drug_type'] = ''
                errors.append('WARNING - no drug type for "%s" (%s)' %
                              (chembl_id, chembl2drug[chembl_id]))
            try:
                for d in diseases:
                    out_item['max_%s_phase' % d] = 0
                indications = drug['indications']['rows']
                out_item['indication_ids'] = set()
                out_item['indication_names'] = set()
                for indication in indications:
                    for d in diseases:
                        try:
                            indication['disease']['name'].index(d)
                            if indication['maxPhaseForIndication'] > out_item[
                                    'max_%s_phase' % d]:
                                out_item[
                                    'max_%s_phase' %
                                    d] = indication['maxPhaseForIndication']
                        except:
                            pass
                    out_item['indication_ids'].add(indication['disease']['id'])
                    out_item['indication_names'].add(
                        indication['disease']['name'])
                out_item['indication_ids'] = list(out_item['indication_ids'])
                out_item['indication_names'] = list(
                    out_item['indication_names'])
            except:
                out_item['indication_ids'] = []
                out_item['indication_names'] = []
            try:
                out_item['trial_phase'] = drug['maximumClinicalTrialPhase']
            except:
                out_item['trial_phase'] = ''
            try:
                out_item[
                    'chembl_uri'] = 'https://www.ebi.ac.uk/chembl/compound_report_card/%s/' % chembl_id
            except:
                out_item['chembl_uri'] = ''

            try:
                mech_action = drug['mechanismsOfAction']['rows'][0]
                out_item['mechanism_of_action'] = mech_action[
                    'mechanismOfAction']
                out_item['action_type'] = mech_action['actionType']
            except:
                out_item['mechanism_of_action'] = ''
                out_item['action_type'] = ''

            # error handling
            try:
                targets = drug['knownDrugs']['rows']
            except:
                targets = []

            out_item['targets'] = {}
            out_item['targetstr'] = []
            out_item['target_id'] = ''
            out_item['target_class'] = ''
            out_item['approved_name'] = ''
            for target in targets:
                try:
                    out_item['target_id'] = target['targetId']
                    if len(target['targetClass']) > 0:
                        out_item['targets'][
                            target['targetId']] = target['targetClass'][0]
                        out_item['target_class'] = target['targetClass'][0]
                    out_item['approved_name'] = target['approvedSymbol']
                    for url in target['urls']:
                        trial_urls[url['name']] = url['url']
                except:
                    raise
                    pass

            for tid, tclass in out_item['targets'].items():
                out_item['targetstr'].append('%s:%s' % (tid, tclass))

            # Clinical Trials URL if available
            try:
                out_item['trial_url'] = trial_urls['ClinicalTrials']
            except:
                out_item['trial_url'] = ''

            all_results[chembl_id] = out_item
            out_item['literature_occ'] = []
            try:
                for row in drug['literatureOcurrences']['rows']:
                    out_item['literature_occ'].append(row['pmid'])
            except:
                pass

            out_item['toxicity_class'] = []
            out_item['meddra_soc_code'] = []
            try:
                for warning in drug['drugWarnings']:
                    out_item['toxicity_class'].append(
                        str(warning['toxicityClass']))
                    out_item['meddra_soc_code'].append(
                        str(warning['meddraSocCode']))
            except:
                pass
        except:
            print(
                "FAILURE - could not retrieve targets for drug '%s' - skipping"
                % chembl_id)
            raise

    with open(os.path.join(args.outdir, 'errors.txt'), 'w') as outfile:
        for error in errors:
            outfile.write(error)
            outfile.write('\n')

    # output as JSON
    with open(os.path.join(args.outdir, 'drug_opentargets.json'),
              'w') as outfile:
        json.dump(all_results, outfile)

    # output as CSV
    with open(os.path.join(args.outdir, 'drug_opentargets.csv'),
              'w') as outfile:
        header = [
            'CHEMBL_ID', 'molecule_name', 'molecule_type', 'indication_ids',
            'indication_names', 'max_trial_phase'
        ]
        for d in diseases:
            header.append('max_%s_phase' % d)
        header.extend([
            'chembl_uri', 'mechanism_of_action', 'action_type', 'target_id',
            'target_class', 'approved_name', 'literature_occ', 'trial_url',
            'toxicity_class', 'meddra_soc_code'
        ])
        outfile.write(CSV_DELIM.join(header))
        outfile.write('\n')

        for chembl_id, info in all_results.items():
            out_row = [
                chembl_id, info['molecule_name'], info['drug_type'],
                ':'.join(list(info['indication_ids'])),
                '"' + ':'.join(list(info['indication_names'])) + '"',
                str(info['trial_phase'])
            ]
            for d in diseases:
                out_row.append(str(info['max_%s_phase' % d]))
            out_row.extend([
                info['chembl_uri'], '"' + info['mechanism_of_action'] + '"',
                info['action_type'], info['target_id'], info['target_class'],
                info['approved_name'], ':'.join(info['literature_occ']),
                info['trial_url']
            ])
            out_row.append(':'.join(info['toxicity_class']))
            out_row.append(':'.join(info['meddra_soc_code']))
            outfile.write(CSV_DELIM.join(out_row))
            outfile.write('\n')
Exemplo n.º 21
0
from django.shortcuts import render
from gql import Client, gql
from graphene_django import settings as graphene_settings

from .query import get_query

val = getattr(settings, 'GRAPHENE', {})['SCHEMA']
schema = graphene_settings.perform_import(val, 'I dunno!')

graphqldoc_default_title = 'GraphQL API documentation'
graphqldoc_settings = getattr(settings, 'GRAPHQLDOC', {})
graphqldoc_title = graphqldoc_settings.get('TITLE', graphqldoc_default_title)
graphqldoc_logo = graphqldoc_settings.get(
    'LOGO_URL', 'https://www.graphql.org/img/logo.svg')

client = Client(schema=schema)
query = gql(get_query(_type='general'))
result_schema = client.execute(query)

types = result_schema['__schema']['types']
query_type = result_schema['__schema']['queryType']
mutation_type = result_schema['__schema']['mutationType']

data = {}
for i in types:
    kind = i['kind']
    name = i['name']
    if data.get(kind):
        data[kind].append(name)
    else:
        data[kind] = [name]
Exemplo n.º 22
0
if sys.version_info[0:2] < (3, 6) or sys.version_info[0:2] > (3, 7):
    raise Exception(
        "This script has only been tested on Python 3.6. "
        + "You are using {major}.{minor}.".format(
            major=sys.version_info[0], minor=sys.version_info[1]
        )
    )

transport_service = RequestsHTTPTransport(
    url=f"https://{DASH_ENTERPRISE_HOST}/Manager/graphql",
    auth=(SERVICE_USERNAME, SERVICE_API_KEY),
    use_json=True,
    retries=5,
)

client_service = Client(transport=transport_service)


def exit_message():
    """
    Prints out links to deployed app and app settings page before exiting
    script.
    """
    if MAIN_BRANCHNAME == REVIEW_BRANCHNAME:
        APPNAME = MAIN_APPNAME
    else:
        APPNAME = REVIEW_APPNAME

    print("Your app has been deployed...")
    print()
    print("  {APPNAME}".format(APPNAME=APPNAME))
Exemplo n.º 23
0
 def client(self):
     """Graphql client."""
     if self.__client is None:
         self.__client = Client(transport=self.transport)
     return self.__client
Exemplo n.º 24
0
	async def naruto(self, message, new_message, index):
		argument = new_message.partition("naruto ")[2]
		character = argument
		village = ""
		page = 1
		villages = ["cloud", "grass", "springs", "leaf", "mist", "flower", "rain", "sand", "sound", "star", "rock", "waterfall", "tides"]
		for i in range(len(villages)):
			if argument.startswith(villages[i]):
				village = villages[i]
				character = ""
		if argument == "" or argument == " " or argument == "random":
			character = ""
			village = ""
			page = random.randint(1, 12)
		query = '''
{
  characters(page: 1, filter: {name: "pain", village: "leaf"}) {
    results {
      name
      avatarSrc
      description
      rank
      village
      age
    }
  }
}
					'''
		query = query.replace("pain", character)
		query = query.replace("leaf", village)
		query = query.replace("1", str(page))
		query = gql(query)
		transport = RequestsHTTPTransport(url="https://narutoql.com/graphql", use_json=True)
		client = Client(transport=transport, fetch_schema_from_transport=True)
		response_query = client.execute(query)
		self.max_index = len(response_query["characters"]["results"]) - 1
		if argument == "" or argument == " " or argument == "random":
			index = random.randint(0, self.max_index)
			self.index = index
		name = response_query["characters"]["results"][index]["name"]
		description = response_query["characters"]["results"][index]["description"]
		picture = response_query["characters"]["results"][index]["avatarSrc"]
		rank = response_query["characters"]["results"][index]["rank"]
		village = response_query["characters"]["results"][index]["village"].title()
		age = response_query["characters"]["results"][index]["age"]
		if age == "" or age == None:
			age = "No Data"
		if rank == "" or rank == None:
			rank = "No Data"
		if self.embed != None:
			await self.embed.delete()
		embed = discord.Embed(title=name,
							  description=description,
							  color=0xea9828)
		embed.set_author(name=village)
		embed.set_thumbnail(url=picture)
		embed.add_field(name="Age", value=age, inline=True)
		embed.add_field(name="Rank", value=rank, inline=True)
		embed.set_footer(text="Naruto and Naruto Shippuden")
		embed_sent = await message.channel.send(embed=embed)
		self.message = message
		self.new_message = new_message
		self.func = self.naruto
		self.min_index = 0
		self.embed = embed_sent
		self.embed_to_send = embed
		if self.max_index > 0:
			await embed_sent.add_reaction(self.arrow_left)
			await embed_sent.add_reaction(self.arrow_right)
Exemplo n.º 25
0
        }
    }
}

"""


plugin = routing.Plugin()

stash_url = xbmcplugin.getSetting(plugin.handle, 'url')
hide_unorganised = xbmcplugin.getSetting(plugin.handle, 'hide_unorganised') == 'true'

xbmc.log(f'{sys.argv}, {plugin.handle}, {stash_url}, {hide_unorganised}', xbmc.LOGINFO)

transport = RequestsHTTPTransport(urljoin(stash_url, '/graphql'))
client = Client(transport=transport)


def common_item_info(mediatype: str):
    return {
        'genre': 'Adult / Pornography',
        'mpaa': 'NC-17',
        'mediatype': mediatype
    }


def random_fanart_from_gallery(gallery: Dict):
    # landscape images work best
    def wide_images():
        for image in gallery['images']:
            aspect = image['file']['width'] / max(image['file']['height'], 1)
Exemplo n.º 26
0
import json
import requests

from conf import config
from gql import Client
from gql.transport.requests import RequestsHTTPTransport
from graphql import build_ast_schema, parse

headers = {
    'Content-Type': "application/graphql",
    'x-api-key': config.API_KEY,
    'cache-control': "no-cache",
}

with open('conf/schema.graphql') as source:
    document = parse(source.read())
schema = build_ast_schema(document)

sample_transport = RequestsHTTPTransport(
    url=config.API_ENDPOINT + '/graphql',
    use_json=True,
    headers=headers,
    verify=False,
    retries=3,
)
client = Client(
    transport=sample_transport, schema=schema
)


Exemplo n.º 27
0
def get_PRA_data(start, stop, station):
    #print("In get_PRA_data")

    sample_transport = RequestsHTTPTransport(
        url='https://www.vegvesen.no/trafikkdata/api/',
        use_json=True,
        headers={
            "Content-type": "application/json",
        },
        verify=False,
        retries=3)

    client = Client(transport=sample_transport,
                    #    fetch_schema_from_transport=True,
                    )

    heading = '''
    trafficData(trafficRegistrationPointId: "%s") {
          trafficRegistrationPoint {
            name
            id
            latestData {
              volumeByHour
            }
            trafficRegistrationType
            manualLabels {
              affectedLanes {
                lane {
                  laneNumber
                }
              }
              validFrom
              validTo
            }
            commissions {
              validFrom
              validTo
              lanes {
                laneNumber
              }
            }
            direction {
              to
              from
            }
            location {
              coordinates {
                latLon {
                  lat
                  lon
                }
              }
            }
          }
    ''' % (station)

    after = ""
    next = True
    response = ""
    first = True

    while next:
        query_string = '''
      {
        %s
          volume {
            byHour(%sfrom: "%s", to: "%s") {
              pageInfo {
                hasNextPage
                endCursor
              }
              edges {
                node {
                  from
                  to
                  byDirection {
                    heading
                    total {
                      coverage {
                        percentage
                        unit
                        unavailable {
                          numerator
                          denominator
                          percentage
                        }
                        uncertain {
                          numerator
                          denominator
                          percentage
                        }
                        included {
                          numerator
                          denominator
                          percentage
                        }
                      }
                    }
                    byLengthRange {
                      lengthRange {
                        lowerBound
                        upperBound
                      }
                      total {
                        volumeNumbers {
                          volume
                          validSpeed {
                            total
                          }
                        }
                      }
                    }
                  }
                }
              }
            }
          }
        }
      }
      ''' % (heading, after, start, stop)
        #print (f"Query string is: {query_string}")
        query = gql(query_string)

        this_response = client.execute(query)
        #print("this_response is: " + json.dumps(this_response))
        # Let us first of all do some checking on the response to determine if we have data
        try:
            x = this_response['trafficData']['volume']['byHour']['edges']
        except KeyError:
            # No edges means no data, just return empty response
            #print("No edges in this_response")
            return ""
        if not x:
            # edges list exist but list is empty also means no data, just return empty response
            #print("edges exists in this_response but edge list is empty")
            return ""

        #print (f"this_response type is: {type(this_response)}")
        #print("this_response is: " + json.dumps(this_response))
        if first:
            #print("This is the first response...")
            this_response_str = json.dumps(this_response)
            response = response + this_response_str[:this_response_str.
                                                    rindex("]")] + ","
            first = False
        else:
            #print("This is a subsequent response...")
            this_response_str = json.dumps(
                this_response["trafficData"]["volume"]["byHour"]["edges"])
            this_response_str = this_response_str[this_response_str.
                                                  index("[") + 1:]
            response = response + this_response_str[:this_response_str.
                                                    rindex("]")] + ","

        # Check this_response to see if there are more pages and if yes, set after to endCursor
        if this_response["trafficData"]["volume"]["byHour"]["pageInfo"][
                "hasNextPage"] == True:
            #print("Next is True")
            endCursor = this_response["trafficData"]["volume"]["byHour"][
                "pageInfo"]["endCursor"]
            after = '''after: "%s", ''' % (endCursor)
            heading = '''trafficData(trafficRegistrationPointId: "%s") {''' % (
                station)
        else:
            #print("Next is False")
            next = False

    #print("Clean up the response...")
    # remove last comma
    response = response[:response.rindex(",")]
    # Add closing brackets
    response = response + "]}}}}"
    #print("Return the response...")
    return response
 def __init__(self):
     """
     Creates graphql client
     """
     transport = AIOHTTPTransport(url="http://localhost:3000/graphql")
     self.client = Client(transport=transport, fetch_schema_from_transport=True)
Exemplo n.º 29
0
async def test_aiohttp_file_upload_list_of_two_files(event_loop,
                                                     aiohttp_server):
    from aiohttp import web
    from gql.transport.aiohttp import AIOHTTPTransport

    async def handler(request):

        reader = await request.multipart()

        field_0 = await reader.next()
        assert field_0.name == "operations"
        field_0_text = await field_0.text()
        assert field_0_text == file_upload_mutation_3_operations

        field_1 = await reader.next()
        assert field_1.name == "map"
        field_1_text = await field_1.text()
        assert field_1_text == file_upload_mutation_3_map

        field_2 = await reader.next()
        assert field_2.name == "0"
        field_2_text = await field_2.text()
        assert field_2_text == file_1_content

        field_3 = await reader.next()
        assert field_3.name == "1"
        field_3_text = await field_3.text()
        assert field_3_text == file_2_content

        field_4 = await reader.next()
        assert field_4 is None

        return web.Response(text=file_upload_server_answer,
                            content_type="application/json")

    app = web.Application()
    app.router.add_route("POST", "/", handler)
    server = await aiohttp_server(app)

    url = server.make_url("/")

    transport = AIOHTTPTransport(url=url, timeout=10)

    with TemporaryFile(file_1_content) as test_file_1:
        with TemporaryFile(file_2_content) as test_file_2:

            async with Client(transport=transport) as session:

                query = gql(file_upload_mutation_3)

                file_path_1 = test_file_1.filename
                file_path_2 = test_file_2.filename

                f1 = open(file_path_1, "rb")
                f2 = open(file_path_2, "rb")

                params = {"files": [f1, f2]}

                # Execute query asynchronously
                result = await session.execute(query,
                                               variable_values=params,
                                               upload_files=True)

                f1.close()
                f2.close()

                success = result["success"]

                assert success
Exemplo n.º 30
0
 async def query_asn(self, asn: int):
     async with Client(transport=self.transport,
                       execute_timeout=IRRD_TIMEOUT) as session:
         result = await session.execute(GQL_QUERY_ASN, {"asn": asn})
         return self._graphql_to_route_info(result)