class Crawler(): def __init__(self): self.url = 'http://www.ccgp-hainan.gov.cn/' self.mongo=Saving() def __call__(self,nd=0): self.nd=nd try: updatelist=get_link(self.nd) except Exception as e: logging.error(e) self.mongo.save(self.url,updatelist) logging.info(u'海南省政府采购网,更新:(%s)'%len(updatelist)) return True
class Crawler(): def __init__(self): self.url = 'http://www.ccgp-jiangsu.gov.cn' self.mongo = Saving() def __call__(self, nd=0): self.nd = nd try: updatelists = get_link(self.nd) except Exception as e: logging.error(e) # print len(updatelists) self.mongo.save(self.url, updatelists) logging.info(u'江苏政府采购网,更新:(%s)' % len(updatelists)) return True
class Crawler(): def __init__(self): self.url = 'http://www.ccgp-neimenggu.gov.cn/' self.mongo = Saving() def __call__(self, nd, keyword): self.nd = nd self.keyword = keyword try: updatelists = get_link(self.nd, self.keyword) except Exception as e: logging.error(e) self.mongo.save(self.url, updatelists) logging.info(u'内蒙古政府采购网,更新:(%s)' % len(updatelists)) return True
class Crawler(): def __init__(self): self.url = 'http://www.ccgp-shaanxi.gov.cn/' self.mongo = Saving() def __call__(self, nd=0): self.nd = nd updatelists = [] for dat in [datas1, datas2]: try: updatelist = get_link(dat, self.nd) updatelists += updatelist except Exception as e: logging.error(e) print len(updatelists) self.mongo.save(self.url, updatelists) logging.info(u'陕西省政府采购网,更新:(%s)' % len(updatelists)) return True
class Crawler(): def __init__(self): self.url = 'http://www.ccgp-sichuan.gov.cn' self.mongo=Saving() def __call__(self,nd=0): self.nd=nd updatelists=[] for self.ear in xrange(2012,2019): try: updatelist=get_link(self.ear,self.nd) updatelists+=updatelist except Exception as e: logging.error(e) # print len(updatelists) self.mongo.save(self.url,updatelists) logging.info(u'四川政府采购网,更新:(%s)'%len(updatelists)) return True