def load_p2p(date, city_id): path = FilepathMapper.p2p(date, city_id, "in") if not os.path.exists(path): logger.info("Obtaining the latest point to point data") crawl_p2p("in", date) logger.info(f"Reading <{city_id}> <{date}> point to point data") with open(path, "r", encoding="utf-8") as f: res = f.read() return json.loads(res.split("(")[-1][:-1])["data"]["list"]
def crawl_p2p(direction: str, date=yesterday()): assert direction in ["in", "out"] city_code = get_city_codes() total = len(city_code) for i, city_record in enumerate(get_city_codes()): time.sleep(SLEEP_SEC) city_id = city_record["code"] logger.info(f"[{i+1}/{total}]: {city_record['city']} ({city_id})") query = f"https://huiyan.baidu.com/migration/cityrank.jsonp?dt=city&id={city_id}&type=move_{direction}&date={date}" logger.info(f"Getting {query}") res = requests.get(query) if res.status_code == 200: logger.info("Success.") with open(FilepathMapper.p2p(date, city_id, direction), "w", encoding="utf-8") as f: f.write(res.text) else: logger.warning(f"Bad response code {res.status_code} for {city_record['city']}")