def crawling(self): curl = Curl() curl.set_url(self.url) with open(self.filname,"wb") as output: curl.set_option(pycurl.WRITEFUNCTION,output.write) curl.get() curl.close()
def crawling(self): curl = Curl() curl.set_url(self.url) with open(self.filname, "wb") as output: curl.set_option(pycurl.WRITEFUNCTION, output.write) curl.get() curl.close()
def get_zip(self, url, filename): fp = open(filename, "wb") c = Curl() c.get(url, ) c.set_option(c.WRITEDATA, fp) c.perform() c.close() fp.close()
def saveFile2Local(self,url): self.getFileNameByUrl(url) if self.filename: with open(self.filename,"wb") as output: curl = Curl() curl.set_url(url) curl.set_option(pycurl.WRITEFUNCTION,output.write) curl.get() curl.close() Log4Spider.downLog(self,"downloaded a file:[[[",self.filename,"]]]")
def fetch_url(url, nobody=0, timeout=30, follow_redirect=0, agent=USER_AGENT): """Fetch url using curl :param url: :param nobody: :param timeout: :param follow_redirect: :param agent: """ t = io.StringIO() c = Curl() s = r"%s" % (url) c.set_option(pycurl.USERAGENT, agent) c.set_option(pycurl.URL, s.encode('utf-8')) c.set_option(pycurl.NOBODY, nobody) c.set_option(pycurl.FOLLOWLOCATION, follow_redirect) c.set_option(pycurl.WRITEFUNCTION, t.write) c.set_option(pycurl.TIMEOUT, timeout) attempt = 0 try: c.get() except: return (None, None) return (c, t)