def configure(self, mutation, **more_args): # self.mock_websocket.side_effect = [query_response] self.args.update(**more_args) self.kmq = KmakeQuery(**self.args) self.cli = Cli(**self.args) self.q = Operation(Query) self.s = Operation(Subscription) self.w = TestWriter()
def bulk_mutation(self, op_builder_list): ret = None op = Operation(Mutation) for op_builder in op_builder_list: op = op_builder(op=op) if len(op) >= self.batch_size: ret = self.exec(op) op = Operation(Mutation) if len(op): ret = self.exec(op) return ret
def bulk_mutation(self, op_builder_list): ret = None op = Operation(Mutation) for op_builder in op_builder_list: op = op_builder(op=op) if len(op) >= MAX_BATCH_SIZE: ret = self.exec(op) op = Operation(Mutation) if len(op): ret = self.exec(op) return ret
async def add_labels_to_labelable(endpoint: BaseEndpoint, repository_id: str, labelable_id: str, label: str) -> AddLabelsToLabelablePayload: query = Operation(Query) query.node(id=repository_id).__as__(Repository).labels( first=50).nodes().__fields__('name', 'id') labels = { repo_label.name: repo_label.id for repo_label in (await endpoint(query)).node.labels.nodes } mutation = Operation(Mutation) mutation.add_labels_to_labelable(input=AddLabelsToLabelableInput( labelable_id=labelable_id, label_ids=[labels[label]])) return (await endpoint(mutation)).add_labels_to_labelable
async def main(): endpoint = await build_endpoint(open('token.txt').read()) qu = Operation(Query) repo = qu.repository(owner='Mause', name='media') repo.id() repo.pull_requests(first=1).nodes().__fields__('title', 'id') res = (await endpoint(qu)).repository await add_labels_to_labelable(endpoint, res.id, res.pull_requests.nodes[0].id, 'automerge') op = Operation(Mutation) op = build_merge([res.pull_requests.nodes[0].id]) res = await endpoint(op) print(res)
def schedule(self, for_week=None): op = Operation(schema.Query) season = op.seasons_connection(last=1) season.nodes.named_time_ranges_connection(last=25).nodes.__fields__('id', 'name', 'time', 'subseason', 'duration_milliseconds') games = season.nodes.games_connection(last=80) games.nodes.time() games.nodes.named_time_range().id() games.nodes.home_team().__fields__('abbreviation', 'name', 'region_name', 'nickname') games.nodes.away_team().__fields__('abbreviation', 'name', 'region_name', 'nickname') games.nodes.status().__fields__('home_team_points', 'away_team_points', 'phase') games.nodes.availability().short_name() result = self._execute(op) weeks = {} season = result.seasons_connection.nodes[0] for week in season.named_time_ranges_connection.nodes: weeks[week.id] = (week, []) for game in season.games_connection.nodes: weeks[game.named_time_range.id][1].append(game) if for_week is not None: for week_id, (week, games) in weeks.items(): if week.time < for_week < week.time + timedelta(milliseconds=week.duration_milliseconds): return week, games return weeks
def remove_protection(self, protection_rule_id): op = Operation(schema.Mutation) op.delete_branch_protection_rule(input=schema.DeleteBranchProtectionRuleInput( branch_protection_rule_id=protection_rule_id )) if settings.apply: GitHubGraphQL().call(op)
def create_operation(owner, name, labels=(), issue_states=(), pr_states=()): op = Operation(schema.Query) repo = op.repository(owner=owner, name=name) repo.labels(first=100).nodes.__fields__( name=True, color=True, ) repo.milestones(first=100).nodes.__fields__( id=True, state=True, due_on=True, title=True, description=True, ) repo.projects(first=100).nodes.__fields__( id=True, number=True, name=True, state=True, ) select_issues(repo, labels, issue_states) select_pull_requests(repo, labels, pr_states) return op
def get_swaps_page(self, transactions_filter, skip): op = Operation(schema.Query) transactions = op.transactions( where=transactions_filter, skip=skip, first=self.page_size ) transactions.block_number() transactions.swaps().log_index() transactions.swaps().pair().token0().symbol() transactions.swaps().pair().token1().symbol() transactions.swaps().amount0_in() transactions.swaps().amount1_in() transactions.swaps().amount0_out() transactions.swaps().amount1_out() transactions.swaps().amount_usd() while True: data = self.endpoint(op) if 'errors' not in data.keys(): break print("Error getting data. Retrying in 2 secs.") sleep(2) query = op + data if hasattr(query, 'transactions'): return query.transactions return []
def push_updates(self): """ push update buffer to database in order to be rendered by the graphics :return: """ times, updates = zip(*self._update_buffer) intersections, distances, directions = zip(*updates) print(times) print(intersections) op = Operation(schema.Mutation) update = op.push_update_buffer(times=times, intersections=intersections, distances=distances, directions=directions, search_algorithm=self._search_algorithm) update.time() new_car_loc = update.new_car_loc() new_car_loc.intersection() new_car_loc.distance() new_car_loc.direction() print(op) data = endpoint(op) result = op + data print(result) return result
def generateGQL(initViewer,currentUser,followingEndCursor,followerEndCursor): op = Operation(Query) if initViewer: viewer = op.viewer() viewer.login() viewer.isSiteAdmin() viewer.email() viewer.name() viewer.updatedAt() viewer.company() user = op.user(login=currentUser) if followingEndCursor == "": following = user.following(first=100) elif followingEndCursor == None: following = user.following(first=100) else: following = user.following(first=100,after=followingEndCursor) if followerEndCursor == "": followers = user.followers(first=100) elif followerEndCursor == None: followers = user.followers(first=100) else: followers = user.followers(first=100, after=followerEndCursor) initQueryNodes(following) initQueryNodes(followers) return op
def create_bid(postcode: int, **kwargs) -> str: """ Query the helpling API to create a new bid. Then, set the given parameters (or default values configured in ``DEFAULT_SEARCH_PARAMETERS``) to it. This is the first step to scraping offers for a region. :param postcode: The postcode for which to request a new bid :param kwargs: The parameters to set for the search. :return: The bidCode of the created bid """ response = requests.post(BASE_URL + "v1/bids", { "bid[postcode]": postcode, "bid[checkout_version]": 1 }) bid_id = response.json().get("data").get("code") if bid_id is None: raise Exception("Bid for postcode " + str(postcode) + " could not be created: " + response.text) print("Create bid for " + str(postcode) + " (" + bid_id + "): OK") op = Operation(helpling_schema.Mutation) op.transition_bid_to_provider_selection(**{ **DEFAULT_SEARCH_PARAMETERS, **kwargs, "bid_code": bid_id }) result = gql_endpoint(op).get("data").get( "transitionBidToProviderSelection") if result.get("success") is False: raise Exception("Bid " + bid_id + " could not be parametrized: " + result.get("errors")) print("Parametrize bid " + bid_id + ": OK") return bid_id
def test_basic_operation_query(mock_urlopen): 'Test if query with type sgqlc.operation.Operation() works' configure_mock_urlopen(mock_urlopen, graphql_response_ok) schema = Schema() # MyType and Query may be declared if doctests were processed by nose if 'MyType' in schema: schema -= schema.MyType if 'Query' in schema: schema -= schema.Query class MyType(Type): __schema__ = schema i = int class Query(Type): __schema__ = schema my_type = MyType op = Operation(Query) op.my_type.i() endpoint = HTTPEndpoint(test_url) data = endpoint(op) eq_(data, json.loads(graphql_response_ok)) check_mock_urlopen(mock_urlopen, query=bytes(op))
def standings(self): op = Operation(schema.Query) teams = op.teams_connection(first=8) division = teams.nodes.division() division.name() division.id() division.abbreviation() teams.nodes.abbreviation() teams.nodes.nickname() teams.nodes.region_name() season = teams.nodes.seasons_connection(last=1) standing = season.edges.standing() standing.__fields__() result = self._execute(op) standings = {} divs = {} for team in result.teams_connection.nodes: # Some hoop-jumping to use a single division object as the key and get things grouped nicely if team.division.id not in divs: divs[team.division.id] = team.division key = divs[team.division.id] if key not in standings: standings[key] = [] standings[key].append(team) def standings_key(team): return team.seasons_connection.edges[0].standing.division_rank for div in standings: standings[div] = sorted(standings[div], key=standings_key) return standings
def get_metric_data(self, label: str, source_id: str) -> List[MetricData]: op = Operation(Query) fields = list(MetricData._ContainerTypeMeta__fields.keys()) fields.remove("id") op.metric_data(source_id=source_id, label=label).nodes().__fields__(*fields) return (op + self.run(op)).metric_data.nodes
def by_ids(self, ids, select_fun: Callable[[shield.GameDetail], None] = None): op = Operation(shield.Viewer) game_details = op.viewer.game_details_by_ids(ids=ids) apply_selector(game_details, shield.GameDetail, select_fun) return self.query(op).viewer.game_details_by_ids
def lookup(self, season: int = 0, player_name: str = None, team_id: str = None, status=None, first=100, after=None, select_fun: Callable[[shield.Player], None] = None): def add_team_person_fields(player): if select_fun: select_fun(player) else: apply_selector(player, shield.Player) person = player.person() apply_selector(person, shield.PlayerPerson) team = player.current_team() apply_selector(team, shield.Team) op = Operation(shield.Viewer) players = op.viewer.players(season_season=season, person_display_name_contains=player_name, current_team_id=team_id, first=first, after=after) players.edges.cursor() player = players.edges.node() apply_selector(player, shield.Player, select_fun=add_team_person_fields) players = self.query(op) player_list = [] for p in players.viewer.players.edges: p.node.cursor = p.cursor player_list.append(p.node) return player_list
def get(self, week, season_type, season=0): logging.debug("Getting week %s, type %s, season %s", week, season_type, season) op = Operation(shield.Viewer) standings = op.viewer.standings(first=40, week_season_value=season, week_season_type=season_type, week_week_value=week) standing = standings.edges.node record = standing.team_records self._standard_fields(record, shield.TeamRecord) standings = self.query(op) if len(standings.viewer.standings.edges) == 0: return [] team_records = standings.viewer.standings.edges[0].node.team_records team_ids = [tr.team_id for tr in team_records] def with_div_con(team): team.id() team.full_name() team.nick_name() team.division() team.conference() teams = { t.id: t for t in self.nfl.team.by_ids(team_ids, select_fun=with_div_con) } return [(teams[team_record['team_id']], team_record) for team_record in team_records]
def get_candidates_for_bid( bid_id: str) -> List[helpling_schema.DecoratedPotentialCandidateEdge]: """ Query the API for all (i.e. the first 1000) candidates for a given bid. The bid must have been parametrized already. Note that not all fields are actually requested from the backend. :param bid_id: Id of an already-parametrized bid :return: First 1000 candidates available for the bid """ op = Operation(helpling_schema.Query) candidates = op.customer_bid(code=bid_id).potential_candidates(first=1000) candidates.edges.node.price_per_hour() provider = candidates.edges.node.provider provider.__fields__("id", "firstname", "shortname", "default_profile_image", "pets", "windows", "ironing", "ratings_received_count", "verification_level", "documents", "performed_cleanings_count", "language_skills", "instabook_enabled") provider.avg_rating.total() provider.experience.__fields__() provider.distance_to_bid(bid_code=bid_id) data = gql_endpoint(op) return (op + data).customer_bid.potential_candidates.edges
def game(self, gid): all_game_times = self.all_game_times() # print(all_game_times) games = list(filter(lambda g: g.id == gid, all_game_times)) if len(games) == 0: return None gametime = games[0].time op = Operation(schema.Query) games = op.games_connection(first=60, at_or_after_time=gametime, before_time=gametime + timedelta(hours=12)) games.nodes.id() home_team = games.nodes.home_team() home_team.region_name() home_team.nickname() home_team.abbreviation() home_team.name() away_team = games.nodes.away_team() away_team.region_name() away_team.nickname() away_team.abbreviation() away_team.name() status = games.nodes.status() status.home_team_points() status.away_team_points() status.phase() status.quarter() games.nodes.stadium().__fields__('name') games.nodes.stadium().address().__fields__('locality', 'administrative_area_abbreviation') games = self._execute(op).games_connection.nodes return list(filter(lambda g: g.id == gid, games))[0]
def _get_orders_page(self, orders_filter, skip): op = Operation(schema.Query) orders = op.orders(where=orders_filter, skip=skip, first=self.page_size) orders.id() orders.order_id() orders.buy_token().symbol() orders.sell_token().symbol() orders.max_sell_amount() orders.price_numerator() orders.price_denominator() orders.owner().id() orders.from_epoch() orders.until_epoch() orders.cancel_epoch() orders.create_epoch() orders.delete_epoch() orders.from_batch_id() orders.until_batch_id() orders.bought_volume() orders.sold_volume() orders.tx_hash() data = self.http_endpoint(op) query = op + data return query.orders if hasattr(query, 'orders') else []
def test_show_ops(self): bill = self._build_sample_bill() url = self._build_sample_url() LOGGER.warning(GraphQLClient.build_merge_operation(bill)) LOGGER.warning(GraphQLClient.build_link_operation(url.id, bill.id)) LOGGER.warning( GraphQLClient.build_link_operation(url.id, bill.id, remove=True)) LOGGER.warning( GraphQLClient.build_get_operation(bill.id, ['id', 'name'])) LOGGER.warning(GraphQLClient.build_delete_operation(bill.id)) LOGGER.warning( GraphQLClient.build_get_all_operation( 'bill', ['id', 'name', 'bill_number'])) LOGGER.warning( GraphQLClient.build_get_all_operation('committee', ['id', 'name'])) LOGGER.warning( GraphQLClient.build_get_all_operation( 'minutes', ['id', 'name', 'start_date_time'])) LOGGER.warning(GraphQLClient.build_get_all_news_operation(['id'])) LOGGER.warning( GraphQLClient.build_get_all_news_operation( ['id', 'title', 'published_at'], datetime(year=2020, month=1, day=1), datetime(year=2020, month=2, day=1))) bulk_op = Operation(Mutation) bulk_op = GraphQLClient.build_merge_operation(bill, bulk_op) bulk_op = GraphQLClient.build_link_operation(url.id, bill.id, bulk_op) LOGGER.warning(bulk_op)
def get_map(self): """ get the map from the server """ op = Operation(schema.Query) search_tf = op.search_tf() road_graph = search_tf.road_graph() road_graph.name() connections = road_graph.connections() connections.index() connections.start() connections.end() connections.start_name() connections.end_name() connections.length() connections.direction() intersections = road_graph.intersections() intersections.index() intersections.name() intersections.x() intersections.y() intersections.connections() start_state = road_graph.start_state() start_state.last_intersection() start_state.distance_since() start_state.direction() data = endpoint(op) result = op + data self._road_graph = result.search_tf.road_graph return self._road_graph
def get_game_detail_ids(self, ids): op = Operation(shield.Viewer) game = op.viewer.league.games_by_ids(ids=ids) game.id() game.game_detail_id() result = self.nfl.query(op) return [game for game in result.viewer.league.games_by_ids if hasattr(game, 'game_detail_id')]
def build_get_all_operation(class_name, fields=None, filter_=None): op = Operation(Query) ret = getattr(op, class_name)(filter=filter_) if fields is not None: for field in fields: getattr(ret, field)() return op
def get_game_details(self, ids): if len(ids) == 0: return [] op = Operation(shield.Viewer) gd = op.viewer.league.game_details_by_ids(ids=ids) gd.id() gd.game_clock() gd.home_points_q1() gd.home_points_q2() gd.home_points_q3() gd.home_points_q4() gd.home_points_total() gd.home_points_overtime() gd.home_points_overtime_total() gd.visitor_points_q1() gd.visitor_points_q2() gd.visitor_points_q3() gd.visitor_points_q4() gd.visitor_points_total() gd.visitor_points_overtime() gd.visitor_points_overtime_total() gd.period() gd.phase() result = self.nfl.query(op) return result.viewer.league.game_details_by_ids
def get_list(self, fields: list = [], **kwargs) -> Generator[Interview, None, None]: where = InterviewFilter(**kwargs) take = 20 skip = 0 filtered_count = 21 if not fields: fields = [ 'id', 'questionnaire_id', 'questionnaire_version', 'assignment_id', 'responsible_id', 'errors_count', 'status', ] while skip < filtered_count: op = Operation(headquarters_schema.HeadquartersQuery) q = op.interviews(take=take, skip=skip, where=where) q.__fields__('filtered_count') q.nodes.__fields__(*fields) cont = self.endpoint(op) errors = cont.get('errors') if errors: raise GraphQLError(errors[0]['message']) res = (op + cont).interviews filtered_count = res.filtered_count yield from res.nodes skip += take
def get_json(self, game_detail_id): if not game_detail_id: return try: op = Operation(shield.Viewer) gd = op.viewer.league.game_detail(id=game_detail_id) gd.home_points_q1() gd.home_points_q2() gd.home_points_q3() gd.home_points_q4() gd.home_points_total() gd.home_points_overtime() gd.home_points_overtime_total() gd.visitor_points_q1() gd.visitor_points_q2() gd.visitor_points_q3() gd.visitor_points_q4() gd.visitor_points_total() gd.visitor_points_overtime() gd.visitor_points_overtime_total() #gd.game_injuries() gd.scoring_summaries() gd.coin_toss_results() #gd.live_home_team_game_stats() #gd.live_home_player_game_stats() #gd.live_visitor_team_game_stats() #gd.live_visitor_player_game_stats() result, json = self.nfl.query(op, return_json=True) json = json['data']['viewer']['league']['gameDetail'] return json except Exception as e: self.logger.exception("Error getting boxscore for %s", game_detail_id)
def get_pairs_page(self, pairs_filter, skip, **kwargs): op = Operation(schema.Query) pairs = op.pairs( where=pairs_filter, skip=skip, first=self.page_size, **kwargs ) pairs.id() pairs.token0().symbol() pairs.token0().id() pairs.token0().decimals() pairs.token0_price() pairs.volume_token0() pairs.reserve0() pairs.token1().symbol() pairs.token1().id() pairs.token1().decimals() pairs.token1_price() pairs.volume_token1() pairs.reserve1() pairs.volume_usd() pairs.reserve_eth() pairs.reserve_usd() while True: data = self.endpoint(op) if 'errors' not in data.keys(): break print("Error getting data. Retrying in 2 secs.") sleep(2) query = op + data return query.pairs if hasattr(query, 'pairs') else []
def fetch_gql_speech(speech_id): op = Operation(Query) speech = op.speech(filter=_SpeechFilter({'id': speech_id})) speech.id() speech.order_in_minutes() minutes = speech.belonged_to_minutes() minutes.id() minutes.name() minutes.ndl_min_id() member = speech.be_delivered_by_member() member.id() member.name() res = gql_client.endpoint(op) speech = (op + res).speech[0] minutes = speech.belonged_to_minutes member = speech.be_delivered_by_member speech_info = {'speech_id': speech.id} if minutes: speech_info['minutes_id'] = minutes.id speech_info['minutes_name'] = minutes.name speech_info['minutes_politylink_url'] = to_politylink_url(minutes.id) if minutes.ndl_min_id: speech_info[ 'speech_ndl_url'] = 'https://kokkai.ndl.go.jp/txt/{0}/{1}'.format( minutes.ndl_min_id, speech.order_in_minutes) if member: speech_info['member_id'] = member.id speech_info['member_name'] = member.name speech_info['member_image_url'] = to_politylink_url( member.id, domain='image.politylink.jp') speech_info['member_politylink_url'] = to_politylink_url(member.id) return speech_info