def main(): gql_client = GraphQLClient() df = pd.read_csv(args.file) if args.delete: gql_client.bulk_unlink(from_ids=df['from_id'], to_ids=df['to_id']) LOGGER.info(f'deleted {len(df)} relationships') else: gql_client.bulk_link(from_ids=df['from_id'], to_ids=df['to_id']) LOGGER.info(f'merged {len(df)} relationships')
def test_bulk_link(self): client = GraphQLClient() url = self._build_sample_url() bill = self._build_sample_bill() minutes = self._build_sample_minutes() from_ids = [url.id, url.id] to_ids = [bill.id, minutes.id] data = client.bulk_link(from_ids, to_ids) assert data['op0']['from']['id'] == url.id assert data['op0']['to']['id'] == bill.id assert data['op1']['from']['id'] == url.id assert data['op1']['to']['id'] == minutes.id assert url.id in map(lambda x: x.id, client.get(bill.id).urls) assert url.id in map(lambda x: x.id, client.get(minutes.id).urls) data = client.bulk_unlink(from_ids, to_ids) assert data['op0']['from']['id'] == url.id assert data['op0']['to']['id'] == bill.id assert data['op1']['from']['id'] == url.id assert data['op1']['to']['id'] == minutes.id assert url.id not in map(lambda x: x.id, client.get(bill.id).urls) assert url.id not in map(lambda x: x.id, client.get(minutes.id).urls)
def main(): gql_client = GraphQLClient() objects = [] if args.bill: bills = gql_client.get_all_bills(['id', 'news']) LOGGER.info(f'fetched {len(bills)} bills to clean') objects += bills if args.minutes: minutesList = gql_client.get_all_minutes(['id', 'news']) LOGGER.info(f'fetched {len(minutesList)} minutes to clean') objects += minutesList LOGGER.info(f'registered {len(objects)} objects to clean') for obj in objects: news_ids = list(map(lambda x: x.id, obj.news)) if news_ids: gql_client.bulk_unlink(news_ids, [obj.id] * len(news_ids)) LOGGER.info(f'removed {len(news_ids)} news links from {obj.id}')