def init_local(self, cls, obj): obj.core = base_scrape_alexa cls.out_base = out_base cls.out_format = out_format obj.base_args += [ get_out_name(self.name, obj.args.job_name, obj.args.out_file), obj.args.add_http ]
def err_base(self, err_item): err_name = get_out_name(self.name, self.args.job_name, self.args.out_file) with open(path.join(err_dir, err_name), 'a+') as f: print >> f, err_item.encode('utf8')
def out_base(self, out_item): self.status_good += 1 out_name = get_out_name(self.name, self.args.job_name, self.args.out_file) with open(path.join(out_dir, out_name), 'a+') as f: print >> f, out_item.encode('utf8')
def scrape_alexa(url, browser_args, job_name, out_name, add_http): base_scrape_alexa(url, browser_args, get_out_name(name, job_name, out_name), add_http)
def err_base(self, err_item): err_name = get_out_name(self.name, self.args.job_name, self.args.out_file) with open(path.join(err_dir, err_name),'a+') as f: print >> f, err_item.encode('utf8')