def test_hero_name_and_friends_query(client): query = gql(''' query HeroNameAndFriendsQuery { hero { id name friends { name } } } ''') expected = { 'hero': { 'id': '2001', 'name': 'R2-D2', 'friends': [ {'name': 'Luke Skywalker'}, {'name': 'Han Solo'}, {'name': 'Leia Organa'}, ] } } result = client.execute(query) assert result == expected
def test_duplicate_fields(client): query = gql(''' query DuplicateFields { luke: human(id: "1000") { name homePlanet } leia: human(id: "1003") { name homePlanet } } ''') expected = { 'luke': { 'name': 'Luke Skywalker', 'homePlanet': 'Tatooine', }, 'leia': { 'name': 'Leia Organa', 'homePlanet': 'Alderaan', } } result = client.execute(query) assert result == expected
def test_use_fragment(client): query = gql(''' query UseFragment { luke: human(id: "1000") { ...HumanFragment } leia: human(id: "1003") { ...HumanFragment } } fragment HumanFragment on Human { name homePlanet } ''') expected = { 'luke': { 'name': 'Luke Skywalker', 'homePlanet': 'Tatooine', }, 'leia': { 'name': 'Leia Organa', 'homePlanet': 'Alderaan', } } result = client.execute(query) assert result == expected
def validation_errors(client, query): query = gql(query) try: client.validate(query) return False except Exception: return True
def get_version(): """ Get the current version of HIVdb """ client = SierraClient(endpoint) data = client.execute( gql('query { viewer { currentVersion { text } } }') ) return data['viewer']['currentVersion']['text']
def test_hero_name_query(client): query = gql(''' { myFavoriteFilm: film(id:"RmlsbToz") { id title episodeId characters(first:5) { edges { node { name } } } } } ''') expected = { "myFavoriteFilm": { "id": "RmlsbToz", "title": "Return of the Jedi", "episodeId": 6, "characters": { "edges": [ { "node": { "name": "Luke Skywalker" } }, { "node": { "name": "C-3PO" } }, { "node": { "name": "R2-D2" } }, { "node": { "name": "Darth Vader" } }, { "node": { "name": "Leia Organa" } } ] } } } result = client.execute(query) assert result == expected
def test_parse_error(client): result = None with pytest.raises(Exception) as excinfo: query = gql(''' qeury ''') result = client.execute(query) error = excinfo.value formatted_error = format_error(error) assert formatted_error['locations'] == [{'column': 13, 'line': 2}] assert 'Syntax Error GraphQL request (2:13) Unexpected Name "qeury"' in formatted_error['message'] assert not result
def test_hero_name_query(client): query = gql(''' query HeroNameQuery { hero { name } } ''') expected = { 'hero': { 'name': 'R2-D2' } } result = client.execute(query) assert result == expected
def test_fetch_luke_aliased(client): query = gql(''' query FetchLukeAliased { luke: human(id: "1000") { name } } ''') expected = { 'luke': { 'name': 'Luke Skywalker', } } result = client.execute(query) assert result == expected
def test_fetch_luke_query(client): query = gql(''' query FetchLukeQuery { human(id: "1000") { name } } ''') expected = { 'human': { 'name': 'Luke Skywalker', } } result = client.execute(query) assert result == expected
def test_invalid_id_query(client): query = gql(''' query humanQuery($id: String!) { human(id: $id) { name } } ''') params = { 'id': 'not a valid id', } expected = { 'human': None } result = client.execute(query, variable_values=params) assert result == expected
def test_check_type_of_luke(client): query = gql(''' query CheckTypeOfLuke { hero(episode: EMPIRE) { __typename name } } ''') expected = { 'hero': { '__typename': 'Human', 'name': 'Luke Skywalker', } } result = client.execute(query) assert result == expected
def test_check_type_of_r2(client): query = gql(''' query CheckTypeOfR2 { hero { __typename name } } ''') expected = { 'hero': { '__typename': 'Droid', 'name': 'R2-D2', } } result = client.execute(query) assert result == expected
def test_fetch_some_id_query2(client): query = gql(''' query FetchSomeIDQuery($someId: String!) { human(id: $someId) { name } } ''') params = { 'someId': '1002', } expected = { 'human': { 'name': 'Han Solo', } } result = client.execute(query, variable_values=params) assert result == expected
def test_fetch_luke_and_leia_aliased(client): query = gql(''' query FetchLukeAndLeiaAliased { luke: human(id: "1000") { name } leia: human(id: "1003") { name } } ''') expected = { 'luke': { 'name': 'Luke Skywalker', }, 'leia': { 'name': 'Leia Organa', } } result = client.execute(query) assert result == expected
def test_retries(execute_mock): expected_retries = 3 execute_mock.side_effect =Exception("fail") client = Client( retries=expected_retries, transport=RequestsHTTPTransport(url='http://swapi.graphene-python.org/graphql') ) query = gql(''' { myFavoriteFilm: film(id:"RmlsbToz") { id title episodeId } } ''') with pytest.raises(Exception): client.execute(query) assert execute_mock.call_count == expected_retries
SETTING_FILE = ROOT_DIR + "/setting.txt" with open(ROOT_DIR + "/schema.graphql") as f: SCHEMA_STR = f.read() console = Console() query = gql(''' query getCurrentSeason($page: Int, $season: MediaSeason, $seasonYear: Int, $onList: Boolean) { Page(page: $page, perPage: 50) { media(season: $season, seasonYear: $seasonYear, onList: $onList) { id title { userPreferred } episodes nextAiringEpisode { airingAt episode } } } } ''') def epochtodate(epoch) -> str: return datetime.fromtimestamp( long(epoch)).strftime('%a (%d/%m) at %I:%M %p')
def query(self, query, timeout=None, **variable_values): return self.client.execute(gql(query), timeout=timeout, variable_values=variable_values or None)
def _execute_query(self, query_text: str) -> dict: query = gql(query_text) result = self.client.execute(query) return result
async def _execute_query_pseudo_async(self, query, params={}): async with Client( transport=self.websocket, # fetch_schema_from_transport=True, ) as session: return await session.execute(gql(query), variable_values=params)
def _execute_query(self, query, params=None): return self.gql_client.execute(gql(query), variable_values=params)
def execute_query(self, querytext): query = gql(querytext) return self._client.execute(query)
async def main(): transport = AIOHTTPTransport(url="https://gateway-iboard.ssi.com.vn/graphql") # Using `async with` on the client will start a connection on the transport # and provide a `session` variable to execute queries on this connection async with Client( transport=transport, fetch_schema_from_transport=True, ) as session: # Execute single query query = gql( """ query stockRealtimes($exchange: String) { stockRealtimes(exchange: $exchange) { stockNo ceiling floor refPrice stockSymbol stockType exchange matchedPrice matchedVolume priceChange priceChangePercent highest avgPrice lowest nmTotalTradedQty best1Bid best1BidVol best2Bid best2BidVol best3Bid best3BidVol best4Bid best4BidVol best5Bid best5BidVol best6Bid best6BidVol best7Bid best7BidVol best8Bid best8BidVol best9Bid best9BidVol best10Bid best10BidVol best1Offer best1OfferVol best2Offer best2OfferVol best3Offer best3OfferVol best4Offer best4OfferVol best5Offer best5OfferVol best6Offer best6OfferVol best7Offer best7OfferVol best8Offer best8OfferVol best9Offer best9OfferVol best10Offer best10OfferVol buyForeignQtty buyForeignValue sellForeignQtty sellForeignValue caStatus tradingStatus currentBidQty currentOfferQty remainForeignQtty session __typename } } """ ) params = {"exchange": "hose"} # Execute the query on the transport result = await session.execute(query, variable_values=params) print(result)
query = gql(""" query ($tag: [String]) { device_list(tag: $tag) { name serial asset_tag config_context primary_ip4 { address } platform { slug name } tags { name } interfaces { name type description mode lag { name } tagged_vlans { name vid } untagged_vlan { name vid } cable { _termination_a_device { name } _termination_b_device { name } } ip_addresses { address } vrf { name } } } }""")
from gql import gql gql(''' { id } ''') # GQL101: Cannot query field "id" on type "Query".
def upsert_run(self, id=None, name=None, project=None, host=None, group=None, tags=None, config=None, description=None, entity=None, state=None, display_name=None, notes=None, repo=None, job_type=None, program_path=None, commit=None, sweep_name=None, summary_metrics=None, num_retries=None): """Update a run Args: id (str, optional): The existing run to update name (str, optional): The name of the run to create group (str, optional): Name of the group this run is a part of project (str, optional): The name of the project config (dict, optional): The latest config params description (str, optional): A description of this project entity (str, optional): The entity to scope this project to. repo (str, optional): Url of the program's repository. state (str, optional): State of the program. job_type (str, optional): Type of job, e.g 'train'. program_path (str, optional): Path to the program. commit (str, optional): The Git SHA to associate the run with summary_metrics (str, optional): The JSON summary metrics """ mutation = gql(''' mutation UpsertBucket( $id: String, $name: String, $project: String, $entity: String!, $groupName: String, $description: String, $displayName: String, $notes: String, $commit: String, $config: JSONString, $host: String, $debug: Boolean, $program: String, $repo: String, $jobType: String, $state: String, $sweep: String, $tags: [String!], $summaryMetrics: JSONString, ) { upsertBucket(input: { id: $id, name: $name, groupName: $groupName, modelName: $project, entityName: $entity, description: $description, displayName: $displayName, notes: $notes, config: $config, commit: $commit, host: $host, debug: $debug, jobProgram: $program, jobRepo: $repo, jobType: $jobType, state: $state, sweep: $sweep, tags: $tags, summaryMetrics: $summaryMetrics, }) { bucket { id name displayName description config project { id name entity { id name } } } } } ''') if config is not None: config = json.dumps(config) if not description or description.isspace(): description = None kwargs = {} if num_retries is not None: kwargs['num_retries'] = num_retries variable_values = { 'id': id, 'entity': entity or self.settings('entity'), 'name': name, 'project': project, 'groupName': group, 'tags': tags, 'description': description, 'config': config, 'commit': commit, 'displayName': display_name, 'notes': notes, 'host': host, 'debug': env.is_debug(env=self._environ), 'repo': repo, 'program': program_path, 'jobType': job_type, 'state': state, 'sweep': sweep_name, 'summaryMetrics': summary_metrics } response = self.gql(mutation, variable_values=variable_values, **kwargs) run = response['upsertBucket']['bucket'] project = run.get('project') if project: self.set_setting('project', project['name']) entity = project.get('entity') if entity: self.set_setting('entity', entity['name']) return response['upsertBucket']['bucket']
from django.shortcuts import render from gql import Client, gql from gql.transport.requests import RequestsHTTPTransport from hackernews.schema import schema from .query import get_query # Create your views here. # client = Client(schema=schema) # host = 'http://devel.smartgreen.net.br:8000/graphql/' host = 'http://localhost:8000/graphql/' client = Client(transport=RequestsHTTPTransport(url=host)) query = gql(get_query(_type='general')) result_schema = client.execute(query) types = result_schema['__schema']['types'] data = {} for i in types: kind = i['kind'] name = i['name'] if data.get(kind): data[kind].append(name) else: data[kind] = [name] def index(request): # query = gql(get_query(_type='schema')) # result = client.execute(query) context = {'schema': result_schema['__schema'], 'menu': data}
class Files(Paginator): """Files is an iterable collection of :obj:`File` objects.""" QUERY = gql(''' query Run($project: String!, $entity: String!, $name: String!, $fileCursor: String, $fileLimit: Int = 50, $fileNames: [String] = [], $upload: Boolean = false) { project(name: $project, entityName: $entity) { run(name: $name) { fileCount ...RunFilesFragment } } } %s ''' % FILE_FRAGMENT) def __init__(self, client, run, names=[], per_page=50, upload=False): self.run = run variables = { 'project': run.project, 'entity': run.entity, 'name': run.id, 'fileNames': names, 'upload': upload } super(Files, self).__init__(client, variables, per_page) @property def length(self): if self.last_response: return self.last_response['project']['run']['fileCount'] else: return None @property def more(self): if self.last_response: return self.last_response['project']['run']['files']['pageInfo'][ 'hasNextPage'] else: return True @property def cursor(self): if self.last_response: return self.last_response['project']['run']['files']['edges'][-1][ 'cursor'] else: return None def update_variables(self): self.variables.update({ 'fileLimit': self.per_page, 'fileCursor': self.cursor }) def convert_objects(self): return [ File(self.client, r["node"]) for r in self.last_response['project']['run']['files']['edges'] ] def __repr__(self): return "<Files {} ({})>".format("/".join(self.run.path), len(self))
class SampledHistoryScan(object): QUERY = gql(''' query SampledHistoryPage($entity: String!, $project: String!, $run: String!, $spec: JSONString!) { project(name: $project, entityName: $entity) { run(name: $run) { sampledHistory(specs: [$spec]) } } } ''') def __init__(self, client, run, keys, page_size=1000): self.client = client self.run = run self.keys = keys self.page_size = page_size self.page_offset = 0 # minStep for next page self.scan_offset = 0 # index within current page of rows self.rows = [] # current page of rows def __iter__(self): self.page_offset = 0 self.scan_offset = 0 self.rows = [] return self def __next__(self): while True: if self.scan_offset < len(self.rows): row = self.rows[self.scan_offset] self.scan_offset += 1 return row if self.page_offset >= self.run.lastHistoryStep: raise StopIteration() self._load_next() @normalize_exceptions @retriable(check_retry_fn=util.no_retry_auth, retryable_exceptions=(RetryError, requests.RequestException)) def _load_next(self): variables = { "entity": self.run.entity, "project": self.run.project, "run": self.run.id, "spec": json.dumps({ "keys": self.keys, "minStep": int(self.page_offset), "maxStep": int(self.page_offset + self.page_size), "samples": int(self.page_size) }) } res = self.client.execute(self.QUERY, variable_values=variables) res = res['project']['run']['sampledHistory'] self.rows = res[0] self.page_offset += self.page_size self.scan_offset = 0
def upsert_run(self, id=None, name=None, project=None, host=None, config=None, description=None, entity=None, state=None, repo=None, job_type=None, program_path=None, commit=None, sweep_name=None, summary_metrics=None): """Update a run Args: id (str, optional): The existing run to update name (str, optional): The name of the run to create project (str, optional): The name of the project config (dict, optional): The latest config params description (str, optional): A description of this project entity (str, optional): The entity to scope this project to. repo (str, optional): Url of the program's repository. state (str, optional): State of the program. job_type (str, optional): Type of job, e.g 'train'. program_path (str, optional): Path to the program. commit (str, optional): The Git SHA to associate the run with summary_metrics (str, optional): The JSON summary metrics """ mutation = gql(''' mutation UpsertBucket( $id: String, $name: String, $project: String, $entity: String!, $description: String, $commit: String, $config: JSONString, $host: String, $debug: Boolean, $program: String, $repo: String, $jobType: String, $state: String, $sweep: String, $summaryMetrics: JSONString, ) { upsertBucket(input: { id: $id, name: $name, modelName: $project, entityName: $entity, description: $description, config: $config, commit: $commit, host: $host, debug: $debug, jobProgram: $program, jobRepo: $repo, jobType: $jobType, state: $state, sweep: $sweep, summaryMetrics: $summaryMetrics, }) { bucket { id name description config } } } ''') if config is not None: config = json.dumps(config) if not description: description = None commit = commit or self.git.last_commit response = self.gql(mutation, variable_values={ 'id': id, 'entity': entity or self.settings('entity'), 'name': name, 'project': project, 'description': description, 'config': config, 'commit': commit, 'host': host, 'debug': os.getenv('DEBUG'), 'repo': repo, 'program': program_path, 'jobType': job_type, 'state': state, 'sweep': sweep_name, 'summaryMetrics': summary_metrics}) return response['upsertBucket']['bucket']
#---------- Extract data from Uniswap Subgraph ---------- sample_transport=RequestsHTTPTransport( url='https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2', verify=True, retries=3, ) client = Client( transport=sample_transport ) query = gql(''' query { pairs (first: 5) { token0 { symbol } token1 { symbol } } } ''') response1 = client.execute(query) print(response1) print(response1['pairs']) pairs = [] for i in response1['pairs']: pairs.append([ i['token0']['symbol'],
def test_nested_query(client): query = gql(''' query NestedQuery { hero { name friends { name appearsIn friends { name } } } } ''') expected = { 'hero': { 'name': 'R2-D2', 'friends': [ { 'name': 'Luke Skywalker', 'appearsIn': ['NEWHOPE', 'EMPIRE', 'JEDI'], 'friends': [ { 'name': 'Han Solo', }, { 'name': 'Leia Organa', }, { 'name': 'C-3PO', }, { 'name': 'R2-D2', }, ] }, { 'name': 'Han Solo', 'appearsIn': ['NEWHOPE', 'EMPIRE', 'JEDI'], 'friends': [ { 'name': 'Luke Skywalker', }, { 'name': 'Leia Organa', }, { 'name': 'R2-D2', }, ] }, { 'name': 'Leia Organa', 'appearsIn': ['NEWHOPE', 'EMPIRE', 'JEDI'], 'friends': [ { 'name': 'Luke Skywalker', }, { 'name': 'Han Solo', }, { 'name': 'C-3PO', }, { 'name': 'R2-D2', }, ] }, ] } } result = client.execute(query) assert result == expected
def query(self, query): gql_query = gql(query) return self.client.execute(gql_query)
from gql import gql gql(''' wrong query ''') # noqa
IMG_SIZES = {"original": None, "medium": (940, 940), "small": (120, 120)} SSM_NAMES = { "ANIML_API_URL": "/api/url-{}".format(os.environ["STAGE"]), "ARCHIVE_BUCKET": "/images/archive-bucket-{}".format(os.environ["STAGE"]), "SERVING_BUCKET": "/images/serving-bucket-{}".format(os.environ["STAGE"]), "DEADLETTER_BUCKET": "/images/dead-letter-bucket-{}".format(os.environ["STAGE"]), } QUERY = gql(""" mutation CreateImageRecord($input: CreateImageInput!){ createImage(input: $input) { image { _id } } } """) s3 = boto3.client("s3") def resize(md, filename, dims): tmp_path = os.path.join("/tmp", filename) with Image.open(md["SourceFile"]) as image: image.thumbnail(dims) image.save(tmp_path) return tmp_path
QUERY = gql(""" query dynamicQuery($settings: EventSettingsInput!, $language: Language) { events(settings: $settings) { edges { cursor node { id body entities { edges { node { ... on Entity { id name description (language: $language) aliases(language: $language) { value color disabled } relations (language: $language) { edges { relationship { name } node { ... on Entity { name } } } } } } } } } } } } """)
from gql import gql # Sign in to Tracker and obtain an authentication token # :param dict creds: a dict with a username and password # Mutation variables should look like {"creds":{"userName": ${username}, "password": ${password}}} SIGNIN_MUTATION = gql(""" mutation SignIn($creds: SignInInput!) { signIn (input: $creds) { result { ... on TFASignInResult { sendMethod authenticateToken } ... on AuthResult { authToken } ... on SignInError{ code description } } } } """) # Finish sign in by completing TFA # :param dict creds: a dict with a authentication code and token # Mutation variables should look like {"authInput": {"authenticationCode": auth_code, "authenticateToken": token}} TFA_AUTH = gql(""" mutation Authenticate($authInput: AuthenticateInput!) {
token1 { id symbol decimals } reserve0 reserve1 } }''' return query.replace('$skip', str(skip)) url = "https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2" from gql import gql, Client # from gql.transport.aiohttp import AIOHTTPTransport from gql.transport.requests import RequestsHTTPTransport # transport = AIOHTTPTransport(url="https://countries.trevorblades.com/") transport = RequestsHTTPTransport(url=url) client = Client(transport=transport, fetch_schema_from_transport=True) all_pairs = [] idx = 0 while idx < 29200: query = gql(get_query(idx)) result = client.execute(query) all_pairs.extend(result['pairs']) idx += 1000 print('idx:', idx) with open('data/pairs.json', 'w') as f: json.dump(all_pairs, f)
class Runs(Paginator): """An iterable collection of runs associated with a project and optional filter. """ QUERY = gql(''' query Runs($project: String!, $entity: String!, $cursor: String, $perPage: Int = 50, $order: String, $filters: JSONString) { project(name: $project, entityName: $entity) { runCount(filters: $filters) readOnly runs(filters: $filters, after: $cursor, first: $perPage, order: $order) { edges { node { ...RunFragment } cursor } pageInfo { endCursor hasNextPage } } } } %s ''' % RUN_FRAGMENT) def __init__(self, client, entity, project, filters={}, order=None, per_page=50): self.entity = entity self.project = project self.filters = filters self.order = order variables = { 'project': self.project, 'entity': self.entity, 'order': self.order, 'filters': json.dumps(self.filters) } super(Runs, self).__init__(client, variables, per_page) @property def length(self): if self.last_response: return self.last_response['project']['runCount'] else: return None @property def more(self): if self.last_response: return self.last_response['project']['runs']['pageInfo'][ 'hasNextPage'] else: return True @property def cursor(self): if self.last_response: return self.last_response['project']['runs']['edges'][-1]['cursor'] else: return None def convert_objects(self): return [ Run(self.client, self.entity, self.project, r["node"]["name"], r["node"]) for r in self.last_response['project']['runs']['edges'] ] def __repr__(self): return "<Runs {}/{} ({})>".format(self.entity, self.project, len(self))
from helper import get_review_by_id, get_user_reviews, get_product_reviews, reviews type_defs = gql( ''' type Query { reviews(first: Int = 5): [Review] } type Review @key(fields: "id") { id: ID! body: String author: User @provides(fields: "username") product: Product @provides(fields: "upc") } type User @key(fields: "id") @extends { id: ID! @external username: String @external reviews: [Review] } type Product @key(fields: "upc") @extends { upc: String! @external reviews: [Review] } ''' ) @query('reviews')
def test_incompatible_request_gql(client): with pytest.raises(Exception) as exc_info: gql(123) assert "Received incompatible request" in str(exc_info.value)
import os from typing import Any from gql import gql, Client from gql.transport.requests import RequestsHTTPTransport import datetime current_folder = os.path.dirname(__file__) query_path = os.path.join(current_folder, "get_contribs.graphql") with open(query_path, "r") as f: query = gql(f.read()) def send_query(username: str, from_str: str, to_str: str, token: str, cursor: str): params = { "username": username, "from": from_str, "to": to_str, "cursor": cursor, } _transport = RequestsHTTPTransport( url='https://api.github.com/graphql', use_json=True, headers={ "Content-type": "application/json", "Authorization": f"Bearer {token}" } ) client = Client( transport=_transport,
def upload_urls(self, project, files, run=None, entity=None, description=None): """Generate temporary resumeable upload urls Args: project (str): The project to download files (list or dict): The filenames to upload run (str, optional): The run to upload to entity (str, optional): The entity to scope this project to. Defaults to wandb models Returns: (bucket_id, file_info) bucket_id: id of bucket we uploaded to file_info: A dict of filenames and urls, also indicates if this revision already has uploaded files. { 'weights.h5': { "url": "https://weights.url" }, 'model.json': { "url": "https://model.json", "updatedAt": '2013-04-26T22:22:23.832Z', 'md5': 'mZFLkyvTelC5g8XnyQrpOw==' }, } """ query = gql(''' query Model($name: String!, $files: [String]!, $entity: String!, $run: String!, $description: String) { model(name: $name, entityName: $entity) { bucket(name: $run, desc: $description) { id files(names: $files) { edges { node { name url(upload: true) updatedAt } } } } } } ''') run_id = run or self.settings('run') entity = entity or self.settings('entity') query_result = self.gql(query, variable_values={ 'name': project, 'run': run_id, 'entity': entity, 'description': description, 'files': [file for file in files] }) run = query_result['model']['bucket'] if run: result = { file['name']: file for file in self._flatten_edges(run['files']) } return run['id'], result else: raise CommError("Run does not exist {}/{}/{}.".format( entity, project, run_id))
url='https://api.datacite.org/graphql', use_json=True, ) client = Client( transport=_transport, fetch_schema_from_transport=True, ) query = gql("""{ researcher(id: "https://orcid.org/0000-0003-1419-2405") { id name publications(first: 50) { totalCount nodes { id relatedIdentifiers { relatedIdentifier } } } } }""") data = client.execute(query) print(data["researcher"]["name"]) #print(data.researcher.name) # In[8]: print(data["researcher"]["publications"]["totalCount"])
from gql import gql CURRENT_BUYCOINS_PRICE = gql( """ query { buycoinsPrices(side: buy, mode: standard, cryptocurrency: bitcoin){ buyPricePerCoin cryptocurrency id maxBuy maxSell minBuy minCoinAmount minSell mode sellPricePerCoin status } } """ ) GET_ORDERS = gql( """ query getOrders($status: GetOrdersStatus!){ getOrders(status: $status) { dynamicPriceExpiry orders { edges { node { id
def get_user_by_im_token(self, user_token): """ ret: {'userByImToken': {'avatar_url': None, 'fullname': '', 'is_locked': False, 'openid': 'b97812328ead4d57b992f18d3f168ccb', 'target_id': 'f2a7c018ed4a47b999e1c4893da42d79', 'username': '******', 'vendor_fund_balance': None, 'verified': False, 'verifiedInfo': {'bankcard': '{ "verified": false, 'card_number': '', # should be exist if true }', 'face': '{"verified": false}', 'idcard': '{ "verified": false, 'real_name': '', # should be exist if true 'card_number': '', # should be exist if true }', 'passport': '{ "verified": false, 'first_name': '', 'last_name': '', 'country': '', 'card_number': '', }', 'phone': '+8615650758818'}, 'wallet_balance': '{"DASH": "0", "AE": "0", "LTC": "0", ' '"READ": "0", "DOGE": "0", "ELF": "0", ' '"DAI": "0", "EOS": "0", "TRX": "0", ' '"AVH": "0", "MKR": "0", "BTC": "0", ' '"VEN": "0", "FGC": "0", "ETH": "0", ' '"RDN": "0", "USDT": "0", "ENU": "0"}'}} """ access_token = self.access_token query = """ query { userByImToken(access_token: "%s", im_token: "%s", ua_str:"%s"){ openid target_id username fullname avatar_url verified is_locked wallet_balance vendor_fund_balance verifiedInfo{ phone idcard passport bankcard face } } } """ % (access_token, user_token, self._gql_ua) query = gql(query) ret = self.gql.execute(query) return BixinVendorUser(**ret['userByImToken'], )
add_result_mutation = gql( """ mutation add_result_mutation( $communityName: String! $player1Name: String! $player2Name: String! $date: timestamptz! $player1Goals: Int! $player2Goals: Int! $extraTime: Boolean! ) { insert_results( objects: { community: { data: { name: $communityName } on_conflict: { constraint: communities_name_key update_columns: name } } date: $date player1: { data: { name: $player1Name community: { data: { name: $communityName } on_conflict: { constraint: communities_name_key update_columns: name } } } on_conflict: { constraint: players_name_communityId_key update_columns: name } } player2: { data: { name: $player2Name community: { data: { name: $communityName } on_conflict: { constraint: communities_name_key update_columns: name } } } on_conflict: { constraint: players_name_communityId_key update_columns: name } } player2goals: $player2Goals player1goals: $player1Goals extratime: $extraTime } ) { returning { id } } } """ )
from gql import Client, gql from gql.transport.requests import RequestsHTTPTransport transport = RequestsHTTPTransport( url="https://countries.trevorblades.com/", verify=True, retries=3, ) client = Client(transport=transport, fetch_schema_from_transport=True) query = gql(""" query getContinents { continents { code name } } """) result = client.execute(query) print(result)
from gql import gql gql(''' query NestedQueryWithFragment { hero { ...NameAndAppearances friends { ...NameAndAppearances friends { ...NameAndAppearances } } } } fragment NameAndAppearances on Character { name appearsIn } ''') gql(''' query HeroSpaceshipQuery { hero { favoriteSpaceship } } ''') # GQL101: Cannot query field "favoriteSpaceship" on type "Character". gql(''' query HeroNoFieldsQuery {
# store in DB logger.info("Committing batch state changes") try: session.commit() except SQLAlchemyError as e: logger.error(f"Encountered error during database commit: {e}") session.rollback() return ipfs_hash # so we can see the CID in the job dashboard results ANNOTATION_LIST_QUERY = gql( """ query MyQuery ($first: Int = 10, $skip: Int = 0) { annotations (first: $first, skip: $skip) { cid batchCID } } """ ) def fetch_registry_annotations(client, offset, limit): start_time = time.time() try: annotations = client.execute( ANNOTATION_LIST_QUERY, variable_values={"first": limit, "skip": offset} ).get("annotations", []) except ClientConnectionError: logger.warning("The IPFS gateway server dropped the connection - skipping") return []