def packages_file(self, uri): """ Called from apt_proxy.py when files get updated so we can update our fake lists/ directory and sources.list. """ if basename(uri)=="Packages" or basename(uri)=="Release": log.msg("REGISTERING PACKAGE:"+uri,'apt_pkg') mtime = os.stat(self.factory.cache_dir+'/'+uri) self.packages[uri] = mtime self.unload()
def import_file(factory, dir, file): """ Import a .deb or .udeb into cache from given filename """ if file[-4:]!='.deb' and file[-5:]!='.udeb': log.msg("Ignoring (unknown file type):"+ file, 'import') return log.debug("considering: " + dir + '/' + file, 'import') try: paths = get_mirror_path(factory, dir+'/'+file) except SystemError: log.msg(file + ' skipped - wrong format or corrupted', 'import') return if paths: if len(paths) != 1: log.debug("WARNING: multiple ocurrences", 'import') log.debug(str(paths), 'import') cache_path = paths[0] else: log.debug("Not found, trying to guess", 'import') info = AptDpkgInfo(dir+'/'+file) cache_path = closest_match(info, get_mirror_versions(factory, info['Package'])) if cache_path: log.debug("MIRROR_PATH:"+ cache_path, 'import') src_path = dir+'/'+file dest_path = factory.cache_dir+cache_path if not os.path.exists(dest_path): log.debug("IMPORTING:" + src_path, 'import') dest_path = re.sub(r'/\./', '/', dest_path) if not os.path.exists(dirname(dest_path)): os.makedirs(dirname(dest_path)) f = open(dest_path, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(src_path, dest_path) f.close() if hasattr(factory, 'access_times'): atime = os.stat(src_path)[stat.ST_ATIME] factory.access_times[cache_path] = atime log.msg(file + ' imported', 'import') else: log.msg(file + ' skipped - already in cache', 'import') else: log.msg(file + ' skipped - no suitable backend found', 'import')
def load(self): """ Regenerates the fake configuration and load the packages server. """ if not self.loaded: log.msg("Loading Packages database for "+self.status_dir,'apt_pkg') shutil.rmtree(self.status_dir+'/apt/lists/') os.makedirs(self.status_dir+'/apt/lists/partial') sources = open(self.status_dir+'/'+'apt/etc/sources.list', 'w') for file in self.packages.keys(): # we should probably clear old entries from self.packages and # take into account the recorded mtime as optimization fake_uri='http://apt-proxy:'+file source_line='deb '+dirname(fake_uri)+'/ /' listpath=(self.status_dir+'/apt/lists/' +apt_pkg.URItoFileName(fake_uri)) sources.write(source_line+'\n') try: #we should empty the directory instead os.unlink(listpath) except: pass os.symlink('../../../../../'+file, listpath) sources.close() for key, value in self.local_config.items(): apt_pkg.Config[key] = value apt_pkg.InitSystem() if log.isEnabled('apt'): self.cache = apt_pkg.GetCache() else: # apt_pkg prints progress messages to stdout, disable self.__save_stdout() self.cache = apt_pkg.GetCache() self.__restore_stdout() self.records = apt_pkg.GetPkgRecords(self.cache) self.loaded = 1
def factoryConfig(factory, shell=None): "Loads the configuration file into 'factory'" defaults = { 'address': '', 'port': '9999', 'min_refresh_delay': '30', 'complete_clientless_downloads': '0', 'debug': '0', 'telnet_port': '0', 'telnet_user': '', 'telnet_pass': '', 'timeout': '30', 'cleanup_freq': '600', 'cache_dir': '/var/cache/apt-proxy', 'max_versions': '3', 'max_age': '10', 'import_dir': '/var/cache/apt-proxy/import', 'disable_pipelining': '1', 'passive_ftp': 'on', 'dynamic_backends': 'on', 'http_proxy': '' } conf = MyConfigParser(defaults) if os.path.exists('/etc/apt-proxy/apt-proxy-v2.conf'): conf.read('/etc/apt-proxy/apt-proxy-v2.conf') elif os.path.exists('/etc/apt-proxy/apt-proxy-2.conf'): conf.read('/etc/apt-proxy/apt-proxy-2.conf') else: conf.read('/etc/apt-proxy/apt-proxy.conf') factory.proxy_address = conf.get(DEFAULTSECT, 'address').split(' ') factory.proxy_port = conf.getint(DEFAULTSECT, 'port') factory.cache_dir = conf.get(DEFAULTSECT, 'cache_dir') factory.max_freq = conf.gettime(DEFAULTSECT, 'min_refresh_delay') factory.max_versions = conf.getint(DEFAULTSECT, 'max_versions', 1) factory.max_age = conf.gettime(DEFAULTSECT, 'max_age', 1) factory.timeout = conf.gettime(DEFAULTSECT, 'timeout') factory.cleanup_freq = conf.gettime(DEFAULTSECT, 'cleanup_freq', 1) factory.do_debug = conf.get(DEFAULTSECT, 'debug') factory.passive_ftp = conf.getboolean(DEFAULTSECT, 'passive_ftp') factory.dynamic_backends = conf.getboolean(DEFAULTSECT, 'dynamic_backends') factory.http_proxy = conf.get(DEFAULTSECT, 'http_proxy') if factory.debug != '0': factory.debug = {'debug': '9'} for domain in factory.do_debug.split(): if domain.find(':') != -1: name, level = domain.split(':') else: name, level = domain, 9 factory.debug[name] = int(level) factory.do_debug = 1 else: factory.debug = 0 factory.do_debug = 0 factory.finish_horphans = conf.getboolean(DEFAULTSECT, 'complete_clientless_downloads') factory.import_dir = conf.get(DEFAULTSECT, 'import_dir') factory.disable_pipelining = conf.getboolean(DEFAULTSECT, 'disable_pipelining') for name in conf.sections(): uris = [] if name.find('/') != -1: log.msg("WARNING: backend %s contains '/' (ignored)" % (name)) continue servers = conf.get(name, 'backends').split() if len(servers) == 0: log.msg("WARNING: [%s] has no backend servers (skiped)" % name) continue for server in servers: if server[-1] == '/': log.msg("Removing unnecessary '/' at the end of %s" % (server)) server = server[0:-1] if urlparse.urlparse(server)[0] in ['http', 'ftp', 'rsync']: uris.append(server) else: log.msg( "WARNING: Wrong server '%s' found in backend '%s'. It was skiped." % (server, name)) continue if conf.has_option(name, 'timeout'): timeout = conf.gettime(name, 'timeout') else: timeout = factory.timeout if conf.has_option(name, 'passive_ftp'): passive_ftp = conf.getboolean(name, 'passive_ftp') else: passive_ftp = factory.passive_ftp backend = Backend(name, factory, uris, timeout, passive_ftp) factory.addBackend(backend) if shell: shell.username = conf.get(DEFAULTSECT, 'telnet_user') shell.password = conf.get(DEFAULTSECT, 'telnet_pass') if shell.username and shell.password: shell.port = conf.getint(DEFAULTSECT, 'telnet_port') else: shell.port = 0
def factoryConfig(factory, shell = None): "Loads the configuration file into 'factory'" defaults = { 'address': '', 'port': '9999', 'min_refresh_delay': '30', 'complete_clientless_downloads': '0', 'debug': '0', 'telnet_port': '0', 'telnet_user': '', 'telnet_pass': '', 'timeout': '30', 'cleanup_freq': '600', 'cache_dir': '/var/cache/apt-proxy', 'max_versions': '3', 'max_age': '10', 'import_dir': '/var/cache/apt-proxy/import', 'disable_pipelining': '1', 'passive_ftp': 'on', 'dynamic_backends': 'on', 'http_proxy': '' } conf = MyConfigParser(defaults) if os.path.exists('/etc/apt-proxy/apt-proxy-v2.conf'): conf.read('/etc/apt-proxy/apt-proxy-v2.conf') elif os.path.exists('/etc/apt-proxy/apt-proxy-2.conf'): conf.read('/etc/apt-proxy/apt-proxy-2.conf') else: conf.read('/etc/apt-proxy/apt-proxy.conf') factory.proxy_address = conf.get(DEFAULTSECT, 'address').split(' ') factory.proxy_port = conf.getint(DEFAULTSECT, 'port') factory.cache_dir = conf.get(DEFAULTSECT, 'cache_dir') factory.max_freq = conf.gettime(DEFAULTSECT, 'min_refresh_delay') factory.max_versions = conf.getint(DEFAULTSECT, 'max_versions', 1) factory.max_age = conf.gettime(DEFAULTSECT, 'max_age', 1) factory.timeout = conf.gettime(DEFAULTSECT, 'timeout') factory.cleanup_freq = conf.gettime(DEFAULTSECT, 'cleanup_freq', 1) factory.do_debug = conf.get(DEFAULTSECT, 'debug') factory.passive_ftp = conf.getboolean(DEFAULTSECT, 'passive_ftp') factory.dynamic_backends = conf.getboolean(DEFAULTSECT, 'dynamic_backends') factory.http_proxy = conf.get(DEFAULTSECT, 'http_proxy') if factory.debug != '0': factory.debug = {'debug':'9'} for domain in factory.do_debug.split(): if domain.find(':') != -1: name, level = domain.split(':') else: name, level = domain, 9 factory.debug[name] = int(level) factory.do_debug = 1 else: factory.debug = 0 factory.do_debug = 0 factory.finish_horphans = conf.getboolean(DEFAULTSECT, 'complete_clientless_downloads') factory.import_dir = conf.get(DEFAULTSECT, 'import_dir') factory.disable_pipelining = conf.getboolean(DEFAULTSECT, 'disable_pipelining') for name in conf.sections(): uris = [] if name.find('/') != -1: log.msg("WARNING: backend %s contains '/' (ignored)"%(name)) continue servers = conf.get(name, 'backends').split() if len(servers) == 0: log.msg("WARNING: [%s] has no backend servers (skiped)"%name) continue for server in servers: if server[-1] == '/': log.msg ("Removing unnecessary '/' at the end of %s"%(server)) server = server[0:-1] if urlparse.urlparse(server)[0] in ['http', 'ftp', 'rsync']: uris.append(server) else: log.msg ("WARNING: Wrong server '%s' found in backend '%s'. It was skiped." % (server, name)) continue if conf.has_option(name, 'timeout'): timeout = conf.gettime(name, 'timeout') else: timeout = factory.timeout if conf.has_option(name, 'passive_ftp'): passive_ftp = conf.getboolean(name, 'passive_ftp') else: passive_ftp = factory.passive_ftp backend = Backend(name, factory, uris, timeout, passive_ftp) factory.addBackend(backend) if shell: shell.username = conf.get(DEFAULTSECT, 'telnet_user') shell.password = conf.get(DEFAULTSECT, 'telnet_pass') if shell.username and shell.password: shell.port = conf.getint(DEFAULTSECT, 'telnet_port') else: shell.port = 0