def fetch(self, url): net_arg = url checksum = url[2] url = url[0] tmp_filename = self.get_tmp_filename(url) if config.args().offline: if config.args().force: logger.warning("Running offline, skipping download of %s", url) logger.info("Using latest cached version of rule file: %s", url) if not os.path.exists(tmp_filename): logger.error("Can't proceed offline, " "source %s has not yet been downloaded.", url) sys.exit(1) return self.extract_files(tmp_filename) if not config.args().force and os.path.exists(tmp_filename): if not config.args().now and \ time.time() - os.stat(tmp_filename).st_mtime < (60 * 15): logger.info( "Last download less than 15 minutes ago. Not downloading %s.", url) return self.extract_files(tmp_filename) if checksum: if self.check_checksum(tmp_filename, url): logger.info("Remote checksum has not changed. " "Not fetching.") return self.extract_files(tmp_filename) if not os.path.exists(config.get_cache_dir()): os.makedirs(config.get_cache_dir(), mode=0o770) logger.info("Fetching %s." % (url)) try: tmp_fileobj = tempfile.NamedTemporaryFile() net.get( net_arg, tmp_fileobj, progress_hook=self.progress_hook) shutil.copyfile(tmp_fileobj.name, tmp_filename) tmp_fileobj.close() except URLError as err: if os.path.exists(tmp_filename): logger.warning( "Failed to fetch %s, " "will use latest cached version: %s", url, err) return self.extract_files(tmp_filename) raise err except IOError as err: self.progress_hook_finish() logger.error("Failed to copy file: %s", err) sys.exit(1) except Exception as err: raise err self.progress_hook_finish() logger.info("Done.") return self.extract_files(tmp_filename)
def check_checksum(self, tmp_filename, url): try: checksum_url = url + ".md5" local_checksum = hashlib.md5(open( tmp_filename, "rb").read()).hexdigest().strip() remote_checksum_buf = io.BytesIO() logger.info("Checking %s." % (checksum_url)) net.get(checksum_url, remote_checksum_buf) remote_checksum = remote_checksum_buf.getvalue().decode().strip() logger.debug("Local checksum=|%s|; remote checksum=|%s|" % (local_checksum, remote_checksum)) if local_checksum == remote_checksum: os.utime(tmp_filename, None) return True except Exception as err: logger.warning("Failed to check remote checksum: %s" % err) return False
def update_sources(): global local_index_filename local_index_filename = sources.get_index_filename() initial_content = get_initial_content() with io.BytesIO() as fileobj: url = sources.get_source_index_url() logger.info("Downloading %s", url) try: net.get(url, fileobj) except Exception as err: raise exceptions.ApplicationError( "Failed to download index: %s: %s" % (url, err)) if not os.path.exists(config.get_cache_dir()): try: os.makedirs(config.get_cache_dir()) except Exception as err: logger.error("Failed to create directory %s: %s", config.get_cache_dir(), err) return 1 write_and_compare(initial_content=initial_content, fileobj=fileobj)
def update_sources(): local_index_filename = sources.get_index_filename() with io.BytesIO() as fileobj: url = sources.get_source_index_url() logger.info("Downloading %s", url) try: net.get(url, fileobj) except Exception as err: raise exceptions.ApplicationError( "Failed to download index: %s: %s" % (url, err)) if not os.path.exists(config.get_cache_dir()): try: os.makedirs(config.get_cache_dir()) except Exception as err: logger.error("Failed to create directory %s: %s", config.get_cache_dir(), err) return 1 with open(local_index_filename, "wb") as outobj: outobj.write(fileobj.getvalue()) logger.info("Saved %s", local_index_filename)
def test_get0(self): buf = io.BytesIO() bytes_read, info = net.get("file:///%s/Makefile" % (os.getcwd()), buf) self.assertTrue(b"clean" in buf.getvalue())