def fetch_pathways(site, client, page_size=None): next_page = 1 while next_page: pathways = client.pathways.get(exclude_utm=1, page=next_page, page_size=page_size) for pathway in pathways['results']: logger.info('Copying pathway "{}"'.format(pathway['name'])) parse_pathway(site, pathway) next_page = next_page + 1 if pathways['next'] else None
def fetch_pathways(site, client, page_size=None): next_page = 1 while next_page: pathways = client.pathways.get(exclude_utm=1, page=next_page, page_size=page_size) for pathway in pathways["results"]: logger.info(f'Copying pathway "{pathway["name"]}"') parse_pathway(site, pathway) next_page = next_page + 1 if pathways["next"] else None
def fetch_pathways(site, site_config, page_size=None): api_client = site_config.api_client pathways_url = urljoin(site_config.catalog_api_url, "pathways/") next_page = 1 while next_page: response = api_client.get(pathways_url, params={ "exclude_utm": 1, "page": next_page, "page_size": page_size }) response.raise_for_status() pathways = response.json() for pathway in pathways["results"]: logger.info(f'Copying pathway "{pathway["name"]}"') parse_pathway(site, pathway) next_page = next_page + 1 if pathways["next"] else None
def test_parse_pathway(self): # We assume that programs are parsed separately from pathway data. parse_program(self.site, self.PROGRAM1_DATA) pathway = parse_pathway(self.site, self.PATHWAY1_DATA) assert pathway.uuid == self.PATHWAY1_DATA['uuid'] assert pathway.name == self.PATHWAY1_DATA['name'] assert pathway.email == self.PATHWAY1_DATA['email'] assert pathway.org_name == self.PATHWAY1_DATA['org_name'] assert str(pathway.programs.all()[0].uuid) == self.PROGRAM1_DATA['uuid']