def runDict(self, url, conf): rule = Rule() result = rule.crawler_list(url, conf) # 数据入库 TODO dic_list = [] for row in conf['columns']: dic_list.append(row['名称']) # 文件写入文件 csv = Csv_base() csv.write_csv_file_dictLines(file_path='../data/xuexi111.csv', strs=result, fieldnames=dic_list)
def crawlerNext(self, conf, url=''): rule = Rule() csv = Csv_base() list_list = [] for row in conf['columns']: list_list.append(row['名称']) result, next_page = rule.crawler_list(url, conf) print(result) if (len(result) > 0): csv.write_csv_file_dictLines(file_path='../data/xuexi111List.csv', strs=result, fieldnames=list_list) if (next_page): self.crawlerNext(conf, url=next_page)