def run(self): cnf = Config() count = 1 dirs = [] dirs.append(cnf['Dir::Done']) for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates" ]: queue = get_policy_queue(queue_name) if queue: dirs.append(os.path.abspath(queue.path)) else: warn("Could not find queue %s in database" % queue_name) for checkdir in dirs: if os.path.exists(checkdir): print "Looking into %s" % (checkdir) for dirpath, dirnames, filenames in os.walk(checkdir, topdown=True): if not filenames: # Empty directory (or only subdirectories), next continue for changesfile in filenames: try: if not changesfile.endswith(".changes"): # Only interested in changes files. continue count += 1 if not get_dbchange(changesfile, self.session): to_import = ChangesToImport(dirpath, changesfile, count) if self.die: return self.queue.enqueue(to_import) except KeyboardInterrupt: print("got Ctrl-c in enqueue thread. terminating") self.parent.plsDie() sys.exit(1) self.queue.enqueue(EndOfChanges())
def init (cnf): global delete_date, del_dir # Used for directory naming now_date = datetime.now() # Used for working out times delete_date = int(time.time())-(int(Options["Days"])*84600) morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue')) morguesubdir = cnf.get("Clean-Queues::MorgueSubDir", 'queue') # Build directory as morguedir/morguesubdir/year/month/day del_dir = os.path.join(morguedir, morguesubdir, str(now_date.year), '%.2d' % now_date.month, '%.2d' % now_date.day) # Ensure a directory exists to remove files to if not Options["No-Action"]: if not os.path.exists(del_dir): os.makedirs(del_dir, 0o2775) if not os.path.isdir(del_dir): utils.fubar("%s must be a directory." % (del_dir)) # Move to the directory to clean incoming = Options["Incoming"] if incoming == "": incoming_queue = get_policy_queue('unchecked') if not incoming_queue: utils.fubar("Cannot find 'unchecked' queue") incoming = incoming_queue.path try: os.chdir(incoming) except OSError as e: utils.fubar("Cannot chdir to %s" % incoming)
def main(): global Cnf Cnf = utils.get_conf() Arguments = [('h',"help","Queue-Report::Options::Help"), ('n',"new","Queue-Report::Options::New"), ('8','822',"Queue-Report::Options::822"), ('s',"sort","Queue-Report::Options::Sort", "HasArg"), ('a',"age","Queue-Report::Options::Age", "HasArg"), ('r',"rrd","Queue-Report::Options::Rrd", "HasArg"), ('d',"directories","Queue-Report::Options::Directories", "HasArg")] for i in [ "help" ]: if not Cnf.has_key("Queue-Report::Options::%s" % (i)): Cnf["Queue-Report::Options::%s" % (i)] = "" apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Queue-Report::Options") if Options["Help"]: usage() if Cnf.has_key("Queue-Report::Options::New"): header() # Initialize db so we can get the NEW comments dbconn = DBConn() queue_names = [ ] if Cnf.has_key("Queue-Report::Options::Directories"): for i in Cnf["Queue-Report::Options::Directories"].split(","): queue_names.append(i) elif Cnf.has_key("Queue-Report::Directories"): queue_names = Cnf.value_list("Queue-Report::Directories") else: queue_names = [ "byhand", "new" ] if Cnf.has_key("Queue-Report::Options::Rrd"): rrd_dir = Cnf["Queue-Report::Options::Rrd"] elif Cnf.has_key("Dir::Rrd"): rrd_dir = Cnf["Dir::Rrd"] else: rrd_dir = None f = None if Cnf.has_key("Queue-Report::Options::822"): # Open the report file f = open(Cnf["Queue-Report::ReportLocations::822Location"], "w") session = dbconn.session() for queue_name in queue_names: queue = get_policy_queue(queue_name, session) if queue: directory = os.path.abspath(queue.path) changes_files = glob.glob("%s/*.changes" % (directory)) process_changes_files(changes_files, os.path.basename(directory), f, rrd_dir) else: utils.warn("Cannot find queue %s" % queue_name) if Cnf.has_key("Queue-Report::Options::822"): f.close() if Cnf.has_key("Queue-Report::Options::New"): footer()
def __init__(self, session=None): cnf = Config() try: newq = get_policy_queue('new', session) for changes_fn in glob.glob(newq.path + "/*.changes"): changes_bn = os.path.basename(changes_fn) chg = get_dbchange(changes_bn, session) u = Upload() success = u.load_changes(changes_fn) u.pkg.changes_file = changes_bn u.check_hashes() if not chg: chg = u.pkg.add_known_changes(newq.path, newq.policy_queue_id, session) session.add(chg) if not success: log.critical("failed to load %s" % changes_fn) sys.exit(1) else: log.critical("ACCLAIM: %s" % changes_fn) files=[] for chg_fn in u.pkg.files.keys(): try: f = open(os.path.join(newq.path, chg_fn)) cpf = ChangePendingFile() cpf.filename = chg_fn cpf.size = u.pkg.files[chg_fn]['size'] cpf.md5sum = u.pkg.files[chg_fn]['md5sum'] if u.pkg.files[chg_fn].has_key('sha1sum'): cpf.sha1sum = u.pkg.files[chg_fn]['sha1sum'] else: log.warning("Having to generate sha1sum for %s" % chg_fn) f.seek(0) cpf.sha1sum = apt_pkg.sha1sum(f) if u.pkg.files[chg_fn].has_key('sha256sum'): cpf.sha256sum = u.pkg.files[chg_fn]['sha256sum'] else: log.warning("Having to generate sha256sum for %s" % chg_fn) f.seek(0) cpf.sha256sum = apt_pkg.sha256sum(f) session.add(cpf) files.append(cpf) f.close() except IOError: # Can't find the file, try to look it up in the pool poolname = poolify(u.pkg.changes["source"], u.pkg.files[chg_fn]["component"]) l = get_location(cnf["Dir::Pool"], u.pkg.files[chg_fn]["component"], session=session) if not l: log.critical("ERROR: Can't find location for %s (component %s)" % (chg_fn, u.pkg.files[chg_fn]["component"])) found, poolfile = check_poolfile(os.path.join(poolname, chg_fn), u.pkg.files[chg_fn]['size'], u.pkg.files[chg_fn]["md5sum"], l.location_id, session=session) if found is None: log.critical("ERROR: Found multiple files for %s in pool" % chg_fn) sys.exit(1) elif found is False and poolfile is not None: log.critical("ERROR: md5sum / size mismatch for %s in pool" % chg_fn) sys.exit(1) else: if poolfile is None: log.critical("ERROR: Could not find %s in pool" % chg_fn) sys.exit(1) else: chg.poolfiles.append(poolfile) chg.files = files session.commit() except KeyboardInterrupt: print("Caught C-c; terminating.") utils.warn("Caught C-c; terminating.") self.plsDie()