def main(start=0, end=len(const.city_code)): city_list = const.city_code c = Crawler() pool = multiprocessing.Pool(multiprocessing.cpu_count() - 2) for i in range(int(start), int(end)): if c.check_compete(city_list[i]): continue for j in range(0, len(city_list)): if i != j: pool.apply_async(c.get_all_info, ( city_list[i], city_list[j], )) pool.close() pool.join()