def export_files(session, archive, clpool, progress=False): """ Export interesting files from source packages. """ pool = os.path.join(archive.path, 'pool') sources = {} unpack = {} files = ('changelog', 'copyright', 'NEWS.Debian', 'README.Debian') stats = {'unpack': 0, 'created': 0, 'removed': 0, 'errors': 0, 'files': 0} query = """SELECT DISTINCT s.source, su.suite_name AS suite, s.version, c.name || '/' || f.filename AS filename FROM source s JOIN newest_source n ON n.source = s.source AND n.version = s.version JOIN src_associations sa ON sa.source = s.id JOIN suite su ON su.id = sa.suite JOIN files f ON f.id = s.file JOIN files_archive_map fam ON f.id = fam.file_id AND fam.archive_id = su.archive_id JOIN component c ON fam.component_id = c.id WHERE su.archive_id = :archive_id ORDER BY s.source, suite""" for p in session.execute(query, {'archive_id': archive.archive_id}): if not sources.has_key(p[0]): sources[p[0]] = {} sources[p[0]][p[1]] = (re_no_epoch.sub('', p[2]), p[3]) for p in sources.keys(): for s in sources[p].keys(): path = os.path.join(clpool, '/'.join(sources[p][s][1].split('/')[:-1])) if not os.path.exists(path): os.makedirs(path) if not os.path.exists(os.path.join(path, \ '%s_%s_changelog' % (p, sources[p][s][0]))): if not unpack.has_key(os.path.join(pool, sources[p][s][1])): unpack[os.path.join(pool, sources[p][s][1])] = (path, set()) unpack[os.path.join(pool, sources[p][s][1])][1].add(s) else: for file in glob('%s/%s_%s_*' % (path, p, sources[p][s][0])): link = '%s%s' % (s, file.split('%s_%s' \ % (p, sources[p][s][0]))[1]) try: os.unlink(os.path.join(path, link)) except OSError: pass os.link(os.path.join(path, file), os.path.join(path, link)) for p in unpack.keys(): package = os.path.splitext(os.path.basename(p))[0].split('_') try: unpacked = UnpackedSource(p, clpool) tempdir = unpacked.get_root_directory() stats['unpack'] += 1 if progress: if stats['unpack'] % 100 == 0: sys.stderr.write('%d packages unpacked\n' % stats['unpack']) elif stats['unpack'] % 10 == 0: sys.stderr.write('.') for file in files: for f in glob(os.path.join(tempdir, 'debian', '*%s' % file)): for s in unpack[p][1]: suite = os.path.join(unpack[p][0], '%s_%s' \ % (s, os.path.basename(f))) version = os.path.join(unpack[p][0], '%s_%s_%s' % \ (package[0], package[1], os.path.basename(f))) if not os.path.exists(version): os.link(f, version) stats['created'] += 1 try: os.unlink(suite) except OSError: pass os.link(version, suite) stats['created'] += 1 unpacked.cleanup() except Exception as e: print 'make-changelog: unable to unpack %s\n%s' % (p, e) stats['errors'] += 1 for root, dirs, files in os.walk(clpool, topdown=False): files = [f for f in files if f != filelist] if len(files): if root != clpool: if root.split('/')[-1] not in sources.keys(): if os.path.exists(root): stats['removed'] += len(os.listdir(root)) rmtree(root) for file in files: if os.path.exists(os.path.join(root, file)): if os.stat(os.path.join(root, file)).st_nlink == 1: stats['removed'] += 1 os.unlink(os.path.join(root, file)) for dir in dirs: try: os.rmdir(os.path.join(root, dir)) except OSError: pass stats['files'] += len(files) stats['files'] -= stats['removed'] print 'make-changelog: file exporting finished' print ' * New packages unpacked: %d' % stats['unpack'] print ' * New files created: %d' % stats['created'] print ' * New files removed: %d' % stats['removed'] print ' * Unpack errors: %d' % stats['errors'] print ' * Files available into changelog pool: %d' % stats['files']
def export_files(session, archive, clpool, progress=False): """ Export interesting files from source packages. """ pool = os.path.join(archive.path, 'pool') sources = {} unpack = {} files = ('changelog', 'copyright', 'NEWS', 'NEWS.Debian', 'README.Debian') stats = {'unpack': 0, 'created': 0, 'removed': 0, 'errors': 0, 'files': 0} query = """SELECT DISTINCT s.source, su.suite_name AS suite, s.version, c.name || '/' || f.filename AS filename FROM source s JOIN newest_source n ON n.source = s.source AND n.version = s.version JOIN src_associations sa ON sa.source = s.id JOIN suite su ON su.id = sa.suite JOIN files f ON f.id = s.file JOIN files_archive_map fam ON f.id = fam.file_id AND fam.archive_id = su.archive_id JOIN component c ON fam.component_id = c.id WHERE su.archive_id = :archive_id ORDER BY s.source, suite""" for p in session.execute(query, {'archive_id': archive.archive_id}): if p[0] not in sources: sources[p[0]] = {} sources[p[0]][p[1]] = (re_no_epoch.sub('', p[2]), p[3]) for p in sources.keys(): for s in sources[p].keys(): path = os.path.join(clpool, '/'.join(sources[p][s][1].split('/')[:-1])) if not os.path.exists(path): os.makedirs(path) if not os.path.exists(os.path.join(path, \ '%s_%s_changelog' % (p, sources[p][s][0]))): if os.path.join(pool, sources[p][s][1]) not in unpack: unpack[os.path.join(pool, sources[p][s][1])] = (path, set()) unpack[os.path.join(pool, sources[p][s][1])][1].add(s) else: for file in glob('%s/%s_%s_*' % (path, p, sources[p][s][0])): link = '%s%s' % (s, file.split('%s_%s' \ % (p, sources[p][s][0]))[1]) try: os.unlink(os.path.join(path, link)) except OSError: pass os.link(os.path.join(path, file), os.path.join(path, link)) for p in unpack.keys(): package = os.path.splitext(os.path.basename(p))[0].split('_') try: unpacked = UnpackedSource(p, clpool) tempdir = unpacked.get_root_directory() stats['unpack'] += 1 if progress: if stats['unpack'] % 100 == 0: sys.stderr.write('%d packages unpacked\n' % stats['unpack']) elif stats['unpack'] % 10 == 0: sys.stderr.write('.') for file in files: for f in glob(os.path.join(tempdir, 'debian', '*%s' % file)): for s in unpack[p][1]: suite = os.path.join(unpack[p][0], '%s_%s' \ % (s, os.path.basename(f))) version = os.path.join(unpack[p][0], '%s_%s_%s' % \ (package[0], package[1], os.path.basename(f))) if not os.path.exists(version): os.link(f, version) stats['created'] += 1 try: os.unlink(suite) except OSError: pass os.link(version, suite) stats['created'] += 1 unpacked.cleanup() except Exception as e: print 'make-changelog: unable to unpack %s\n%s' % (p, e) stats['errors'] += 1 for root, dirs, files in os.walk(clpool, topdown=False): files = [f for f in files if f != filelist] if len(files): if root != clpool: if root.split('/')[-1] not in sources.keys(): if os.path.exists(root): stats['removed'] += len(os.listdir(root)) rmtree(root) for file in files: if os.path.exists(os.path.join(root, file)): if os.stat(os.path.join(root, file)).st_nlink == 1: stats['removed'] += 1 os.unlink(os.path.join(root, file)) for dir in dirs: try: os.rmdir(os.path.join(root, dir)) except OSError: pass stats['files'] += len(files) stats['files'] -= stats['removed'] print 'make-changelog: file exporting finished' print ' * New packages unpacked: %d' % stats['unpack'] print ' * New files created: %d' % stats['created'] print ' * New files removed: %d' % stats['removed'] print ' * Unpack errors: %d' % stats['errors'] print ' * Files available into changelog pool: %d' % stats['files']
def export_files(session, pool, clpool): """ Export interesting files from source packages. """ sources = {} unpack = {} files = ('changelog', 'copyright', 'NEWS.Debian', 'README.Debian') stats = {'unpack': 0, 'created': 0, 'removed': 0, 'errors': 0, 'files': 0} query = """SELECT DISTINCT s.source, su.suite_name AS suite, s.version, f.filename FROM source s JOIN newest_source n ON n.source = s.source AND n.version = s.version JOIN src_associations sa ON sa.source = s.id JOIN suite su ON su.id = sa.suite JOIN files f ON f.id = s.file ORDER BY s.source, suite""" for p in session.execute(query): if not sources.has_key(p[0]): sources[p[0]] = {} sources[p[0]][p[1]] = (re_no_epoch.sub('', p[2]), p[3]) for p in sources.keys(): for s in sources[p].keys(): path = os.path.join(clpool, '/'.join(sources[p][s][1].split('/')[:-1])) if not os.path.exists(path): os.makedirs(path) if not os.path.exists(os.path.join(path, \ '%s_%s.changelog' % (p, sources[p][s][0]))): if not unpack.has_key(os.path.join(pool, sources[p][s][1])): unpack[os.path.join(pool, sources[p][s][1])] = (path, set()) unpack[os.path.join(pool, sources[p][s][1])][1].add(s) else: for file in glob('%s/%s_%s*' % (path, p, sources[p][s][0])): link = '%s%s' % (s, file.split('%s_%s' \ % (p, sources[p][s][0]))[1]) try: os.unlink(os.path.join(path, link)) except OSError: pass os.link(os.path.join(path, file), os.path.join(path, link)) for p in unpack.keys(): package = os.path.splitext(os.path.basename(p))[0].split('_') try: unpacked = UnpackedSource(p) tempdir = unpacked.get_root_directory() stats['unpack'] += 1 for file in files: for f in glob(os.path.join(tempdir, 'debian', '*%s' % file)): for s in unpack[p][1]: suite = os.path.join(unpack[p][0], '%s.%s' \ % (s, os.path.basename(f))) version = os.path.join(unpack[p][0], '%s_%s.%s' % \ (package[0], package[1], os.path.basename(f))) if not os.path.exists(version): os.link(f, version) stats['created'] += 1 try: os.unlink(suite) except OSError: pass os.link(version, suite) stats['created'] += 1 unpacked.cleanup() except Exception as e: print 'make-changelog: unable to unpack %s\n%s' % (p, e) stats['errors'] += 1 for root, dirs, files in os.walk(clpool): if len(files): if root.split('/')[-1] not in sources.keys(): if os.path.exists(root): rmtree(root) stats['removed'] += 1 for file in files: if os.path.exists(os.path.join(root, file)): if os.stat(os.path.join(root, file)).st_nlink == 1: os.unlink(os.path.join(root, file)) stats['removed'] += 1 for root, dirs, files in os.walk(clpool): stats['files'] += len(files) print 'make-changelog: file exporting finished' print ' * New packages unpacked: %d' % stats['unpack'] print ' * New files created: %d' % stats['created'] print ' * New files removed: %d' % stats['removed'] print ' * Unpack errors: %d' % stats['errors'] print ' * Files available into changelog pool: %d' % stats['files']