def ready(self): if getattr(settings, 'GITHUB_DISABLE_PREFETCH', False): return # Iterate over the user feedback endpoints enabled and fetch their # current URLs from the remote forum (GitHub at present). from .models import FEEDBACK_TYPES transport = GQLHttpTransport(settings.GITHUB_GRAPHQL_HOST, auth=settings.GITHUB_ACCESS_TOKEN) client = GQLClient(transport=transport, fetch_schema_from_transport=True) query = gql(""" query($disc_id: ID!) { node (id: $disc_id) { ... on Discussion { url } } } """) for feedback_key, feedback in FEEDBACK_TYPES.items(): try: discussion = client.execute( query, variable_values={'disc_id': feedback.foreign_id}) except (TransportError, TransportQueryError): pass else: FEEDBACK_TYPES[feedback_key] = (feedback._replace( url=discussion['node']['url']))
def __init__(self): """Creates a GraphQL client able to interact with the Seer database, handling login and authorisation Parameters ---------- None Returns ------- Notes ----- Example ------- """ apiUrl = 'https://api.seermedical.com' cookie = SeerAuth(apiUrl).cookie header = {'Cookie': list(cookie.keys())[0] + '=' + cookie['seer.sid']} self.graphqlClient = GQLClient( transport=RequestsHTTPTransport(url=apiUrl + '/api/graphql', headers=header, use_json=True, timeout=60))
def graphql_client(party_id=None): url_suffix = '?partyId=' + party_id if party_id else '' url = self.api_url + '/graphql' + url_suffix return GQLClient( transport=RequestsHTTPTransport( url=url, headers=header, use_json=True, timeout=30 ) )
def __init__(self): log.debug("using url: %s", conf.QONTRACT_BASE_URL) transport_kwargs = {"url": conf.QONTRACT_BASE_URL} if conf.QONTRACT_TOKEN: log.debug("using token authentication") transport_kwargs["headers"] = {"Authorization": conf.QONTRACT_TOKEN} elif conf.QONTRACT_USERNAME and conf.QONTRACT_PASSWORD: log.debug("using basic authentication") transport_kwargs["auth"] = HTTPBasicAuth(conf.QONTRACT_USERNAME, conf.QONTRACT_PASSWORD) transport = RequestsHTTPTransport(**transport_kwargs) self.client = GQLClient(transport=transport, fetch_schema_from_transport=True)
def __init__(self, vendor_name, secret, access_token=None, server_url=None, gql_server_url=None): super(GraphQLClient, self).__init__( vendor_name=vendor_name, secret=secret, access_token=access_token, server_url=server_url, ) self.gql_server_url = gql_server_url or self._gql_server_url transport = RequestsHTTPTransport( url=self.gql_server_url, use_json=True, ) self.gql = GQLClient(transport=transport, fetch_schema_from_transport=True)
def _init_gql_client(self): session_auth_header_value = self._session.headers.get('Authorization') self._gql_client = GQLClient(transport=RequestsHTTPTransport( url=self.endpoints.get('gql'), headers={'Authorization': session_auth_header_value}))
def submit_publicly(self, feedback_type, message_text): """ Posts the feedback publicly in a dedicated forum thread. In case an exception happens in the process, reverts to posting the feedback and the exception details privately to the maintainers. """ transport = GQLHttpTransport(settings.GITHUB_GRAPHQL_HOST, auth=settings.GITHUB_ACCESS_TOKEN) client = GQLClient(transport=transport, fetch_schema_from_transport=True) # Attempt fetching the ID of the previous submission from the session. feedback_comment_id = self.request.session.get( f'feedback_{feedback_type.key}_comment_id') if feedback_comment_id: # If an ID is available, attempt fetching the contents of the previous # submission from the remote forum. (We do not persist the contents ourselves.) try: comment = client.execute( gql(""" query($comment_id: ID!) { node (id: $comment_id) { ... on DiscussionComment { body } } } """), variable_values={'comment_id': feedback_comment_id}) except (GraphQLError, TransportError, TransportQueryError): # Query failed for some reason, treat this as new submission. feedback_comment_id = None complete_text = message_text else: # Previous contents are available, concatenate them with the new feedback. complete_text = comment['node'][ 'body'] + "\n\n----\n\n" + message_text # Perform an update GraphQL mutation. comment_query = gql(""" mutation($comment_id: ID!, $body_text: String!) { updateDiscussionComment (input: {commentId: $comment_id, body: $body_text}) { comment { id url } } } """) comment_query.operation = 'updateDiscussionComment' params = { 'comment_id': feedback_comment_id, 'body_text': complete_text } if not feedback_comment_id: # Previous submission ID is not available, this is new submission. # Perform an insert GraphQL mutation. comment_query = gql(""" mutation($disc_id: ID!, $body_text: String!) { addDiscussionComment (input: {discussionId: $disc_id, body: $body_text}) { comment { id url } } } """) comment_query.operation = 'addDiscussionComment' complete_text = ( '_`' + gettext("Sent from the website {env} ({user})").format( env=settings.ENVIRONMENT if settings.ENVIRONMENT != 'PROD' else '', user=self.request.user.pk or '/') + '`_ \n\n' + message_text) params = { 'disc_id': feedback_type.foreign_id, 'body_text': complete_text } try: result = client.execute(comment_query, variable_values=params) except (GraphQLError, TransportError, TransportQueryError) as ex: # In case the query fails for some reason, let maintainers know that reason. self.submit_privately(feedback_type, message_text, ex) else: # Store the ID of the submission in the session, for future reuse. self.request.session[ f'feedback_{feedback_type.key}_comment_id'] = ( result[comment_query.operation]['comment']['id'])
else: raise ValueError(err_msg) except IndexError: sys.stderr.write(f'{err_msg}\n') sys.exit(1) if __name__ == "__main__": repo = get_arg(1, "repo. owner/name") bucket = get_arg(2, "bucket") build_id = get_arg(3, "build ID") path_rx = None if len(sys.argv) >= 5: path_rx = re.compile(get_arg(4, "path rx")) # Ref: https://cirrus-ci.org/api/ cirrus_graphql_xport = RequestsHTTPTransport(url=CCI_GQL_URL, verify=True, retries=3) gqlclient = GQLClient(transport=cirrus_graphql_xport, fetch_schema_from_transport=True) task_art_map = get_task_art_map(gqlclient, repo, bucket, build_id) loop = asyncio.get_event_loop() download_tasks = [] for task_name, art_names_urls in task_art_map.items(): download_tasks.append( loop.create_task( download_artifacts(task_name, art_names_urls, path_rx))) loop.run_until_complete(asyncio.gather(*download_tasks))