class FetcherTestCase(testcase.TestCase): def setUp(self): testcase.TestCase.setUp(self, database = False) self.spec = SpecFile() self.spec.read("tests/helloworld/pspec.xml") self.url = uri.URI(self.spec.source.archiveUri) self.destpath = ctx.config.archives_dir() self.fetch = fetcher.Fetcher(self.url, self.destpath) def testFetch(self): self.fetch.fetch() fetchedFile = os.path.join(self.destpath, self.url.filename()) if os.access(fetchedFile, os.R_OK): self.assertEqual(util.sha1_file(fetchedFile), self.spec.source.archiveSHA1) os.remove(fetchedFile) def testResume(self): resume_test_file = "tests/helloworld/hello-1.3.tar.gz.part" shutil.copy(resume_test_file, ctx.config.archives_dir()) self.fetch.fetch() fetchedFile = os.path.join(self.destpath, self.url.filename()) if os.access(fetchedFile, os.R_OK): self.assertEqual(util.sha1_file(fetchedFile), self.spec.source.archiveSHA1) os.remove(fetchedFile)
class FetcherTestCase(testcase.TestCase): def setUp(self): testcase.TestCase.setUp(self, database = False) self.spec = SpecFile() self.spec.read("tests/helloworld/pspec.xml") self.url = uri.URI(self.spec.source.archive.uri) self.destpath = ctx.config.archives_dir() self.fetch = fetcher.Fetcher(self.url, self.destpath) def testFetch(self): self.fetch.fetch() fetchedFile = os.path.join(self.destpath, self.url.filename()) if os.access(fetchedFile, os.R_OK): self.assertEqual(util.sha1_file(fetchedFile), self.spec.source.archive.sha1sum) os.remove(fetchedFile) def testResume(self): resume_test_file = "tests/helloworld/helloworld-2.0.tar.bz2.part" shutil.copy(resume_test_file, ctx.config.archives_dir()) self.fetch.fetch() fetchedFile = os.path.join(self.destpath, self.url.filename()) if os.access(fetchedFile, os.R_OK): self.assertEqual(util.sha1_file(fetchedFile), self.spec.source.archive.sha1sum) os.remove(fetchedFile)
def set_spec_file(self, specuri): if not specuri.is_remote_file(): specuri = URI(os.path.realpath(specuri.get_uri())) # FIXME: doesn't work for file:// self.specuri = specuri spec = SpecFile() spec.read(specuri, ctx.config.tmp_dir()) self.spec = spec
def set_spec_file(self, specuri): if not specuri.is_remote_file(): specuri = URI(os.path.realpath( specuri.get_uri())) # FIXME: doesn't work for file:// self.specuri = specuri spec = SpecFile() spec.read(specuri, ctx.config.tmp_dir()) self.spec = spec
def setUp(self): unittest.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('repos/pardus-2007/system/base/curl/pspec.xml') self.url = uri.URI(self.spec.source.archive.uri) self.url.set_auth_info(("user", "pass")) self.destpath = ctx.config.archives_dir() self.fetch = Fetcher(self.url, self.destpath)
def set_spec_file(self, pspecfile): self.pspecfile = pspecfile spec = SpecFile() spec.read(pspecfile) # FIXME: following checks the integrity but does nothing when it is wrong # -gurer #spec.verify() # check pspec integrity self.spec = spec
def setUp(self): testcase.TestCase.setUp(self, database = False) self.spec = SpecFile() self.spec.read("tests/helloworld/pspec.xml") self.url = uri.URI(self.spec.source.archive.uri) self.destpath = ctx.config.archives_dir() self.fetch = fetcher.Fetcher(self.url, self.destpath)
def setUp(self): testcase.TestCase.setUp(self) self.sourcedb = pisi.sourcedb.init() self.spec = SpecFile() self.spec.read("tests/popt/pspec.xml") if not ctx.repodb.has_repo('test'): ctx.repodb.add_repo('test', pisi.repodb.Repo(pisi.uri.URI('fakerepo.xml')))
def main(): pisi_init() spec = SpecFile() try: spec.read(sys.argv[1], ctx.config.tmp_dir()) except: print "'%s' geçerli bir pspec.xml dosyası değil.." % (sys.argv[1]) return package_dir = pkg_dir(spec) install_dir = util.join_path(package_dir + ctx.const.install_dir_suffix) if not os.path.isdir(install_dir): print "'%s' dizini yok. Paket build edilmemiş olabilir.." % (install_dir) return unowned_files = [] all_paths_in_packages = [] files_already_in_any_package = [] for package in spec.packages: for path in package.files: all_paths_in_packages.append(util.join_path(install_dir + path.path)) for root, dirs, files in os.walk(install_dir): for file_ in files: already_in_package = False fpath = util.join_path(root, file_) for path in all_paths_in_packages: if not fpath.find(path): already_in_package = True if not already_in_package: unowned_files.append(fpath) if unowned_files: print print "Install dizininde oluşmuş fakat pspec.xml içerisindeki hiç bir pakete dahil edilmemiş dosyalar:" print "===============================================================================================" print for p in unowned_files: print p print else: print "Install dizininde oluşmuş fakat pspec.xml içerisindeki hiç bir pakete dahil edilmemiş dosya yok" pisi_finalize()
def getVersion(pspecList): sources = {} for pspec in pspecList: specFile = SpecFile(join_path(pspec, "pspec.xml")) sources[specFile.source.name] = (specFile.source.version, specFile.source.release) return sources
class SourceDBTestCase(testcase.TestCase): def setUp(self): testcase.TestCase.setUp(self) self.sourcedb = pisi.sourcedb.init() self.spec = SpecFile() self.spec.read("tests/popt/pspec.xml") if not ctx.repodb.has_repo('test'): ctx.repodb.add_repo('test', pisi.repodb.Repo(pisi.uri.URI('fakerepo.xml')) ) def testAddRemove(self): self.sourcedb.add_spec(self.spec, 'test') self.assert_(self.sourcedb.has_spec("popt")) self.sourcedb.remove_spec("popt", 'test') self.assert_(not self.sourcedb.has_spec("popt"))
def testSetUri(self): self.spec = SpecFile() self.url = uri.URI(self.spec.source.archive.uri) self.url.set_uri('uri') assert 'uri' == self.url.get_uri() self.url.set_uri('urix') assert 'urix' == self.url.get_uri()
class SourceDBTestCase(testcase.TestCase): def setUp(self): testcase.TestCase.setUp(self) self.sourcedb = pisi.sourcedb.init() self.spec = SpecFile() self.spec.read("tests/popt/pspec.xml") if not ctx.repodb.has_repo('test'): ctx.repodb.add_repo('test', pisi.repodb.Repo(pisi.uri.URI('fakerepo.xml'))) def testAddRemove(self): self.sourcedb.add_spec(self.spec, 'test') self.assert_(self.sourcedb.has_spec("popt")) self.sourcedb.remove_spec("popt", 'test') self.assert_(not self.sourcedb.has_spec("popt"))
def getPackages(pspecList): packages = [] for pspec in pspecList: specFile = SpecFile(join_path(pspec, "pspec.xml")) for p in specFile.packages: packages += [p.name] return packages
class FetcherTestCase(unittest.TestCase): def setUp(self): pisi.api.init() self.spec = SpecFile() self.spec.read("tests/popt/pspec.xml") self.url = uri.URI(self.spec.source.archiveUri) self.destpath = ctx.config.archives_dir() self.fetch = fetcher.Fetcher(self.url, self.destpath) def testFetch(self): self.fetch.fetch() fetchedFile = os.path.join(self.destpath, self.url.filename()) if os.access(fetchedFile, os.R_OK): self.assertEqual(util.sha1_file(fetchedFile), self.spec.source.archiveSHA1)
def testTarUnpack(self): spec = SpecFile('repos/pardus-2007/system/base/curl/pspec.xml') targetDir = '/tmp/tests' archives = sourcearchive.SourceArchives(spec) archives.unpack(targetDir) for archive in spec.source.archive: assert archive.type == 'targz'
def testZipUnpack(self): spec = SpecFile('repos/pardus-2007/system/base/openssl/pspec.xml') targetDir = '/tmp/tests' archives = sourcearchive.SourceArchives(spec) archives.fetch() archives.unpack(targetDir) assert not exists(targetDir + '/openssl')
def setUp(self): testcase.TestCase.setUp(self) self.sourcedb = pisi.sourcedb.init() self.spec = SpecFile() self.spec.read("tests/popt/pspec.xml") if not ctx.repodb.has_repo('test'): ctx.repodb.add_repo('test', pisi.repodb.Repo(pisi.uri.URI('fakerepo.xml')) )
def setUp(self): # setUp will be called for each test individually pisi.api.init() self.spec = SpecFile() self.spec.read('tests/popt/pspec.xml') self.pdb = PackageDB('testdb')
def setUp(self): pisi.api.init() self.spec = SpecFile() self.spec.read("tests/popt/pspec.xml") self.url = uri.URI(self.spec.source.archiveUri) self.destpath = ctx.config.archives_dir() self.fetch = fetcher.Fetcher(self.url, self.destpath)
def setUp(self): unittest.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('repos/pardus-2007/system/base/curl/pspec.xml') self.url = uri.URI(self.spec.source.archive[0].uri) self.url.set_auth_info(("user", "pass")) self.destpath = ctx.config.archives_dir() self.fetch = Fetcher(self.url, self.destpath)
def setUp(self): testcase.TestCase.setUp(self, database = False) self.spec = SpecFile() self.spec.read("tests/helloworld/pspec.xml") self.url = uri.URI(self.spec.source.archiveUri) self.destpath = ctx.config.archives_dir() self.fetch = fetcher.Fetcher(self.url, self.destpath)
class SourceDBTestCase(unittest.TestCase): def setUp(self): pisi.api.init() self.sourcedb = pisi.sourcedb.init() self.spec = SpecFile() self.spec.read("tests/popt/pspec.xml") def testAdd(self): self.sourcedb.add_source(self.spec.source) self.assert_(self.sourcedb.has_source("popt")) def testRemove(self): self.testAdd() self.sourcedb.remove_source("popt") self.assert_(not self.sourcedb.has_source("popt"))
def getPackages(pspecList): packages = [] for pspec in pspecList: specFile = SpecFile(join_path(pspec, "pspec.xml")) for p in specFile.packages: packages += [(p.name, "%s-%s" % (specFile.history[0].version, specFile.history[0].release), specFile.source.name)] return packages
class PackageDBTestCase(testcase.TestCase): def setUp(self): testcase.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('tests/popt/pspec.xml') self.spec.check() def testAdd(self): if not ctx.repodb.has_repo('test'): ctx.repodb.add_repo('test', pisi.repodb.Repo(pisi.uri.URI('fakerepo.xml')) ) ctx.packagedb.add_package(self.spec.packages[1], 'test') self.assert_(ctx.packagedb.has_package('popt-libs')) # close the database and remove lock #self.pdb.close() def testRemove(self): ctx.packagedb.remove_package('popt-libs', 'test') self.assert_(not ctx.packagedb.has_package('popt-libs', 'test'))
class PackageDBTestCase(testcase.TestCase): def setUp(self): testcase.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('tests/popt/pspec.xml') self.pdb = PackageDB('testdb') def testAdd(self): self.pdb.add_package(self.spec.packages[0]) self.assert_(self.pdb.has_package('popt-libs')) # close the database and remove lock self.pdb.close() def testRemove(self): self.pdb.remove_package('popt-libs') self.assert_(not self.pdb.has_package('popt-libs')) self.pdb.close()
class PackageDBTestCase(testcase.TestCase): def setUp(self): testcase.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('tests/popt/pspec.xml') self.spec.check() def testAdd(self): if not ctx.repodb.has_repo('test'): ctx.repodb.add_repo('test', pisi.repodb.Repo(pisi.uri.URI('fakerepo.xml'))) ctx.packagedb.add_package(self.spec.packages[1], 'test') self.assert_(ctx.packagedb.has_package('popt-libs')) # close the database and remove lock #self.pdb.close() def testRemove(self): ctx.packagedb.remove_package('popt-libs', 'test') self.assert_(not ctx.packagedb.has_package('popt-libs', 'test'))
def testUnpackTarCond(self): spec = SpecFile('repos/pardus-2007/system/base/curl/pspec.xml') targetDir = '/tmp' archiv = sourcearchive.SourceArchive(spec, targetDir) url = uri.URI(spec.source.archive.uri) filePath = join(pisi.context.config.archives_dir(), url.filename()) if util.sha1_file(filePath) != spec.source.archive.sha1sum: fetch = fetcher.Fetcher(spec.source.archive.uri, targetDir) fetch.fetch() assert spec.source.archive.type == 'targz'
class PackageDBTestCase(unittest.TestCase): def setUp(self): # setUp will be called for each test individually pisi.api.init() self.spec = SpecFile() self.spec.read('tests/popt/pspec.xml') self.pdb = PackageDB('testdb') def testAdd(self): self.pdb.add_package(self.spec.packages[0]) self.assert_(self.pdb.has_package('popt-libs')) # close the database and remove lock del self.pdb def testRemove(self): self.pdb.remove_package('popt-libs') self.assert_(not self.pdb.has_package('popt-libs')) del self.pdb
class FetchTestCase(unittest.TestCase): def setUp(self): unittest.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('repos/pardus-2007/system/base/curl/pspec.xml') self.url = uri.URI(self.spec.source.archive[0].uri) self.url.set_auth_info(("user", "pass")) self.destpath = ctx.config.archives_dir() self.fetch = Fetcher(self.url, self.destpath) def testFetch(self): self.fetch.fetch() fetchedFile = os.path.join(self.destpath, self.url.filename()) if os.access(fetchedFile, os.R_OK): self.assertEqual(util.sha1_file(fetchedFile),self.spec.source.archive[0].sha1sum) os.remove(fetchedFile) def testFetcherFunctions(self): enc = base64.encodestring('%s:%s' % self.url.auth_info()) self.assertEqual(self.fetch._get_http_headers(),(('Authorization', 'Basic %s' % enc),)) assert not self.fetch._get_ftp_headers()
def testMakeZip(self): spec = SpecFile('repos/pardus-2007/system/base/openssl/pspec.xml') targetDir = '/tmp/tests' archives = sourcearchive.SourceArchives(spec) archives.fetch(interactive=False) archives.unpack(targetDir, clean_dir=True) del archives newDir = targetDir + '/newZip' zip = archive.ArchiveZip(newDir, 'zip', 'w') sourceDir = '/tmp/pisi-root' zip.add_to_archive(sourceDir) zip.close()
def fetch_all(self): # fetch pspec file self.fetch() pspec = join(self.dest, self.url.filename()) self.spec = SpecFile() self.spec.read(pspec) self.fetch_actionsfile() self.fetch_patches() self.fetch_comarfiles() self.fetch_additionalFiles() return pspec
class FetchTestCase(unittest.TestCase): def setUp(self): unittest.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('repos/pardus-2007/system/base/curl/pspec.xml') self.url = uri.URI(self.spec.source.archive.uri) self.url.set_auth_info(("user", "pass")) self.destpath = ctx.config.archives_dir() self.fetch = Fetcher(self.url, self.destpath) def testFetch(self): self.fetch.fetch() fetchedFile = os.path.join(self.destpath, self.url.filename()) if os.access(fetchedFile, os.R_OK): self.assertEqual(util.sha1_file(fetchedFile), self.spec.source.archive.sha1sum) os.remove(fetchedFile) def testFetcherFunctions(self): enc = base64.encodestring('%s:%s' % self.url.auth_info()) self.assertEqual(self.fetch._get_http_headers(), (('Authorization', 'Basic %s' % enc), )) assert not self.fetch._get_ftp_headers()
def read(filepath): spec = SpecFile() spec.read(filepath) tmp = Pspec(spec.source.name, filepath) tmp.pspec.source.packager = spec.source.packager tmp.pspec.source.homepage = spec.source.homepage tmp.pspec.source.archive = spec.source.archive tmp.pspec.source.name = spec.source.name tmp.pspec.source.license = spec.source.license tmp.pspec.source.partOf = spec.source.partOf tmp.pspec.source.summary = spec.source.summary tmp.pspec.source.description = spec.source.description for pkg in spec.packages: p = Package() p.name = pkg.name p.files = pkg.files p.conflicts = pkg.conflicts p.packageDependencies = pkg.packageDependencies tmp.pspec.packages.append(p) tmp.pspec.history = spec.history return tmp
def testMakeZip(self): # first unpack our dear sandbox.zip spec = SpecFile("tests/pccts/pspec.xml") targetDir = '/tmp/pisitest' achv = sourcearchive.SourceArchive(spec, targetDir) achv.fetch(interactive=False) achv.unpack(clean_dir=True) del achv newZip = targetDir + "/new.zip" zip = archive.ArchiveZip(newZip, 'zip', 'w') sourceDir = targetDir + "/pccts" zip.add_to_archive(sourceDir) zip.close() self.assertEqual(os.path.exists(newZip), True) self.assertEqual(len(zip.list_archive()), 326)
def testUnpackZipCond(self): spec = SpecFile("tests/pccts/pspec.xml") targetDir = '/tmp' achv = sourcearchive.SourceArchive(spec, targetDir) url = uri.URI(spec.source.archive.uri) filePath = join(ctx.config.archives_dir(), url.filename()) # check cached if util.sha1_file(filePath) != spec.source.archive.sha1sum: fetch = fetcher.Fetcher(spec.source.archive.uri, targetDir) fetch.fetch() assert spec.source.archive.type == "zip" achv = archive.Archive(filePath, spec.source.archive.type) achv.unpack_files(["pccts/history.txt"], targetDir) assert pathexists(targetDir + "/pccts") testfile = targetDir + "/pccts/history.txt" assert pathexists(testfile)
def testUnpackZip(self): spec = SpecFile("tests/pccts/pspec.xml") targetDir = '/tmp/pisitest' assert spec.source.archive.type == "zip" achv = sourcearchive.SourceArchive(spec, targetDir) achv.fetch(interactive=False) achv.unpack(clean_dir=True) assert pathexists(targetDir + "/pccts") testfile = targetDir + "/pccts/history.txt" assert pathexists(testfile) # check file integrity self.assertEqual(util.sha1_file(testfile), "f2be0f9783e84e98fe4e2b8201a8f506fcc07a4d")
def testUnpackTar(self): spec = SpecFile("tests/popt/pspec.xml") targetDir = '/tmp/pisitest' achv = sourcearchive.SourceArchive(spec, targetDir) assert spec.source.archive.type == "targz" # skip fetching and directly unpack the previously fetched (by # fetchertests) archive if not achv.is_cached(interactive=False): achv.fetch(interactive=False) achv.unpack() # but testing is hard # "var/tmp/pisi/popt-1.7-3/work" (targetDir) assert pathexists(targetDir + "/popt-1.7") testfile = targetDir + "/popt-1.7/Makefile.am" assert pathexists(testfile) # check file integrity self.assertEqual(util.sha1_file(testfile), "5af9dd7d754f788cf511c57ce0af3d555fed009d")
def fillDB(): """Veritabanını svn klasörlerinden aldığı bilgiler ile doldurur.""" ### Toplam süreyi ölç ### startTime=time() ### Save DB Creation Date ### db_creation_date = DBDate() db_creation_date.save() sayac = 1 # Paket sayısı for repo_name, repo_url in svn_repositories: for root, dirs, files in os.walk(os.path.join(home, expsvndir, repo_name)): for file in files: if file == 'pspec.xml': spec = SpecFile(os.path.join(root, file)) ### Paket BlackList'te ise atla ### if spec.source.name in package_blacklist: print "%s BlackList'te. Atlanıyor..." % spec.source.name sayac += 1 continue ### FIXME: Read from database and retrieve saved package objects, ### FIXME: there should be no need to erase all database everytime. ### Bilgi yaz ### print "\033[01;33m%s\033[0m - %s - (Paket \033[01;33m%d\033[0m)" % (spec.source.name, repo_name, sayac), ### Bir döngü süresi zamanı al ### packStartTime = time() ### Repo ### r, r_created = Repo.objects.get_or_create(name=repo_name, url=repo_url) if r_created: r.save() ### Depoda aynı kaynak varsa (isim ve repo'ya göre) bu paketi atla ### if not repo_name == "playground": try: s = Source.objects.get(name=spec.source.name, repo=r) print "Kaynak mevcut. Atlanıyor..." sayac += 1 continue except: pass ### Source nesnesini yarat ### s = Source() s.repo = r ### Source Verileri ### s.name=spec.source.name s.homepage = spec.source.homepage s.version = spec.getSourceVersion() ### Packager nesnesini yarat veya al ### pkgr, pkgr_created = Packager.objects.get_or_create(name=spec.source.packager.name) email, email_created = Email.objects.get_or_create(email=spec.source.packager.email) if email_created: email.save() pkgr.email.add(email) pkgr.save() s.packager = pkgr # Packager nesnesinden önceki durum #s.packager = spec.source.packager.name #s.email = spec.source.packager.email s.archive_name = spec.source.archive.name s.archive_sha1sum = spec.source.archive.sha1sum s.archive_type = spec.source.archive.type s.archive_url = spec.source.archive.uri ### Slug ### s.slug = spec.source.name ### İstatistikler ### s.last_update = spec.history[0].date s.firstReleaseDate = spec.history[-1].date s.buildScriptSize = getFileSize(os.path.join(root, 'actions.py')) s.specScriptSize = getFileSize(os.path.join(root, file)) s.updateCount = spec.history.__len__() s.patchCount = spec.source.patches.__len__() ### Component nesnesi ### try: # Component.xml'den almayı dene comp_file = PisiComponent(os.path.join(root, '../component.xml')) component, comp_created = Component.objects.get_or_create(component=comp_file.name) if comp_created: ### Sadece nesne yaratıldığında M2M alanları girmek component.xml bulamayıp ### klasör isminden çıkardığı zaman Component nesnesini diğer bilgilerden ### yoksun bıracak. Buna temiz bir çözüm bulmalı ya da Playground'un en son ### işlendiğinden emin olmalı. ### ### Component nesnesi için Summary döngüsü ### for localname_lang in comp_file.localName.keys(): localname = LocalName(lang=localname_lang, localname=comp_file.localName[localname_lang]) localname.save() component.localname.add(localname) ### Component nesnesi için Summary döngüsü ### for comp_summary_lang in comp_file.summary.keys(): comp_summary = Summary(lang=comp_summary_lang, summary=comp_file.summary[comp_summary_lang]) comp_summary.save() component.summary.add(comp_summary) ### Component nesnesi için Summary döngüsü ### for comp_desc_lang in comp_file.description.keys(): comp_desc = Description(lang=comp_desc_lang, desc=comp_file.description[comp_desc_lang]) comp_desc.save() component.desc.add(comp_desc) ### Döngüler bitince Component nesnesini kaydet ### component.save() ### Source'a bağla ### s.component = component except: # Component.xml yoksa klasör isminden çıkar print "Could not find component.xml, trying to retrieve component from directories" dir_comp = root[:root.rfind('/')].replace(os.path.join(home, expsvndir, repo_name) + '/','').replace("/",".") component, comp_created = Component.objects.get_or_create(component=dir_comp) if comp_created: component.save() s.component = component ### Source kaydet ### s.save() ### License ### for l in spec.source.license: li, li_created = License.objects.get_or_create(license=l) if li_created: li.save() s.license.add(li) ### IsA ### for i in spec.source.isA: isa, isa_created = IsA.objects.get_or_create(isa=i) if isa_created: isa.save() s.isa.add(isa) ### Summary ### for su in spec.source.summary.keys(): sum, sum_created = Summary.objects.get_or_create(summary=spec.source.summary[su]) if sum_created: sum.lang = su sum.save() s.summary.add(sum) ### Description ### if not spec.source.description.keys(): bos, bos_created = Description.objects.get_or_create(desc=" ") if bos_created: bos.lang = " " bos.save() s.desc.add(bos) for de in spec.source.description.keys(): des, des_created = Description.objects.get_or_create(desc=spec.source.description[de]) if des_created: des.lang = de des.save() s.desc.add(des) ### Build Dependencies ### for bu in spec.source.buildDependencies: bd, bd_created = Dependency.objects.get_or_create(name=bu.package, versionFrom=bu.versionFrom, versionTo=bu.versionTo, version=bu.version, releaseFrom=bu.releaseFrom, releaseTo=bu.releaseTo, release=bu.release, ) if bd_created: bd.save() s.build_dep.add(bd) ### Patches ### for pec in spec.source.patches: patch = Patch(name=pec.filename, level=pec.level) patch.save() s.patch.add(patch) ### Updates ### for h in spec.history: updater, updater_created = Packager.objects.get_or_create(name=h.name) u_email, u_email_created = Email.objects.get_or_create(email=h.email) if u_email_created: u_email.save() updater.email.add(u_email) updater.save() history = Update(release=h.release, type=h.type, date=h.date, version=h.version, comment=h.comment, packager=updater, #name=h.name, #email=h.email, ) history.save() history.email.add(u_email) s.update.add(history) ######### PACKAGE ######### ### Package nesneleri için döngü ### for package in spec.packages: ### Package nesnesini yarat ### p = Package(name=package.name, source=s, slug=package.name) #p, p_created = Package.objects.get_or_create(name=package.name, source=s) #if not p_created: # print "%s paketi mevcut. Atlanıyor..." % p.name p.save() ### Package'ı Source'a bağla s.packages.add(p) ### RuntimeDeps ## for run_dep in package.packageDependencies: runtime_dep, rundep_created = Dependency.objects.get_or_create(name=run_dep.package, versionFrom=run_dep.versionFrom, versionTo=run_dep.versionTo, version=run_dep.version, releaseFrom=run_dep.releaseFrom, releaseTo=run_dep.releaseTo, release=run_dep.release, ) if rundep_created: runtime_dep.save() p.runtime_dep.add(runtime_dep) ### Files ### for fi in package.files: pfile, file_created = Files.objects.get_or_create(path=fi.path, fileType=fi.fileType) if file_created: pfile.save() p.files.add(pfile) ### Additional Files ### for add_file in package.additionalFiles: af = A_files(filename=add_file.filename, target=add_file.target, perm=add_file.permission, owner=add_file.owner, group=add_file.group, ) af.save() p.a_files.add(af) ### PRINTS ### print "\033[01;33m%.3f s\033[0m" %(time() - packStartTime), print "- %d s" % (time() - startTime) sayac += 1 ### Veritabanına commit et ### print "Döngüler bitti, veritabanına commit ediliyor..." trans_time = time() transaction.commit() print "Veriler girildi. (%f sn)" % (time() - trans_time) print "Bitti. Toplam işlem süresi %.2f dakika." % ((time() - startTime)/60)
class Pspec: def __init__(self, pkgname, filepath): self.pspec = SpecFile() self.package = Package() self.update = Update() self.filepath = filepath self.name = pkgname @staticmethod def read(filepath): spec = SpecFile() spec.read(filepath) tmp = Pspec(spec.source.name, filepath) tmp.pspec.source.packager = spec.source.packager tmp.pspec.source.homepage = spec.source.homepage tmp.pspec.source.archive = spec.source.archive tmp.pspec.source.name = spec.source.name tmp.pspec.source.license = spec.source.license tmp.pspec.source.partOf = spec.source.partOf tmp.pspec.source.summary = spec.source.summary tmp.pspec.source.description = spec.source.description for pkg in spec.packages: p = Package() p.name = pkg.name p.files = pkg.files p.conflicts = pkg.conflicts p.packageDependencies = pkg.packageDependencies tmp.pspec.packages.append(p) tmp.pspec.history = spec.history return tmp def add_dependencies(self, dependencies): # special case of given one dependency package # with depedency versioning info [**kw, name] # [{"versionFrom":"0.4.2"}, "udev"] if type(dependencies[0]) == dict: dep = Dependency() (kw, dep.package) = dependencies dep.__dict__[kw.keys()[0]] = kw.values()[0] self.package.packageDependencies.append(dep) return for depname in dependencies: dep = Dependency() dep.package = depname self.package.packageDependencies.append(dep) def remove_dependencies(self, dependencies): for depname in dependencies: for dep in self.package.packageDependencies: if dep.package == depname: self.package.packageDependencies.remove(dep) def add_conflicts(self, conflicts): # special case of given one conflict package # with conflict versioning info [**kw, name] # [{"versionFrom":"0.4.2"}, "udev"] if type(conflicts[0]) == dict: conf = Conflict() (kw, conf.package) = conflicts conf.__dict__[kw.keys()[0]] = kw.values()[0] self.package.conflicts.append(conf) return for con in conflicts: conflict = Conflict() conflict.package = con self.package.conflicts.append(conflict) def remove_conflicts(self, conflicts): for con in conflicts: self.package.conflicts.remove(con) def update_history(self, date, version): new = Update() new.name = self.update.name new.email = self.update.email new.date = date new.version = version new.release = str(int(self.update.release) + 1) self.update = new self.pspec.history.append(self.update) self.pspec.history.reverse() def add_requires(self, actions): for action in actions: new = Action() new.action = action self.pspec.history[0].requires.append(new) def set_source(self, homepage, summary, description, license, partOf): self.pspec.source.name = self.name self.pspec.source.homepage = homepage self.pspec.source.license = license self.pspec.source.partOf = partOf self.pspec.source.summary = LocalText("Summary") self.pspec.source.description = LocalText("Description") self.pspec.source.summary["en"] = summary self.pspec.source.description["en"] = description def set_packager(self, name, email): self.pspec.source.packager.name = unicode(name) self.pspec.source.packager.email = email self.update.name = unicode(name) self.update.email = email def add_archive(self, sha1sum, type, uri): archive = Archive() archive.sha1sum = sha1sum archive.type = type archive.uri = uri self.pspec.source.archive.append(archive) def add_file_path(self, path, type): p = Path() p.path = path p.fileType = type self.pspec.packages[0].files.append(p) def set_package(self, dependencies, conflicts): self.package.name = self.name if dependencies: for depname in dependencies: dep = Dependency() dep.package = depname self.package.packageDependencies.append(dep) if conflicts: for package in conflicts: conflict = Conflict() conflict.package = package self.package.conflicts.append(conflict) self.pspec.packages.append(self.package) def set_history(self, date, version, comment = "No Comment", release = "1"): self.update.date = date self.update.version = version self.update.comment = comment self.update.release = release self.pspec.history.append(self.update) def write(self): self.pspec.write(self.filepath)
def testFetch(self): spec = SpecFile('repos/pardus-2007/system/base/curl/pspec.xml') srcarch = pisi.sourcearchive.SourceArchive(spec.source.archive[0]) self.assertTrue(not srcarch.fetch())
def setUp(self): testcase.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('tests/popt/pspec.xml') self.spec.check()
def set_spec_file(self, pspecfile): self.pspecfile = pspecfile spec = SpecFile() spec.read(pspecfile) self.spec = spec
def __init__(self, pkgname, filepath): self.pspec = SpecFile() self.package = Package() self.update = Update() self.filepath = filepath self.name = pkgname
class Pspec: def __init__(self, pkgname, filepath): self.pspec = SpecFile() self.package = Package() self.update = Update() self.filepath = filepath self.name = pkgname @staticmethod def read(filepath): spec = SpecFile() spec.read(filepath) tmp = Pspec(spec.source.name, filepath) tmp.pspec.source.packager = spec.source.packager tmp.pspec.source.homepage = spec.source.homepage tmp.pspec.source.archive = spec.source.archive tmp.pspec.source.name = spec.source.name tmp.pspec.source.license = spec.source.license tmp.pspec.source.partOf = spec.source.partOf tmp.pspec.source.summary = spec.source.summary tmp.pspec.source.description = spec.source.description for pkg in spec.packages: p = Package() p.name = pkg.name p.files = pkg.files p.conflicts = pkg.conflicts p.packageDependencies = pkg.packageDependencies tmp.pspec.packages.append(p) tmp.pspec.history = spec.history return tmp def add_dependencies(self, dependencies): # special case of given one dependency package # with depedency versioning info [**kw, name] # [{"versionFrom":"0.4.2"}, "udev"] if type(dependencies[0]) == dict: dep = Dependency() (kw, dep.package) = dependencies dep.__dict__[list(kw.keys())[0]] = list(kw.values())[0] self.package.packageDependencies.append(dep) return for depname in dependencies: dep = Dependency() dep.package = depname self.package.packageDependencies.append(dep) def remove_dependencies(self, dependencies): for depname in dependencies: for dep in self.package.packageDependencies: if dep.package == depname: self.package.packageDependencies.remove(dep) def add_conflicts(self, conflicts): # special case of given one conflict package # with conflict versioning info [**kw, name] # [{"versionFrom":"0.4.2"}, "udev"] if type(conflicts[0]) == dict: conf = Conflict() (kw, conf.package) = conflicts conf.__dict__[list(kw.keys())[0]] = list(kw.values())[0] self.package.conflicts.append(conf) return for con in conflicts: conflict = Conflict() conflict.package = con self.package.conflicts.append(conflict) def remove_conflicts(self, conflicts): for con in conflicts: self.package.conflicts.remove(con) def update_history(self, date, version): new = Update() new.name = self.update.name new.email = self.update.email new.date = date new.version = version new.release = str(int(self.update.release) + 1) self.update = new self.pspec.history.append(self.update) self.pspec.history.reverse() def add_requires(self, actions): for action in actions: new = Action() new.action = action self.pspec.history[0].requires.append(new) def set_source(self, homepage, summary, description, license, partOf): self.pspec.source.name = self.name self.pspec.source.homepage = homepage self.pspec.source.license = license self.pspec.source.partOf = partOf self.pspec.source.summary = LocalText("Summary") self.pspec.source.description = LocalText("Description") self.pspec.source.summary["en"] = summary self.pspec.source.description["en"] = description def set_packager(self, name, email): self.pspec.source.packager.name = str(name) self.pspec.source.packager.email = email self.update.name = str(name) self.update.email = email def add_archive(self, sha1sum, type, uri): archive = Archive() archive.sha1sum = sha1sum archive.type = type archive.uri = uri self.pspec.source.archive.append(archive) def add_file_path(self, path, type): p = Path() p.path = path p.fileType = type self.pspec.packages[0].files.append(p) def set_package(self, dependencies, conflicts): self.package.name = self.name if dependencies: for depname in dependencies: dep = Dependency() dep.package = depname self.package.packageDependencies.append(dep) if conflicts: for package in conflicts: conflict = Conflict() conflict.package = package self.package.conflicts.append(conflict) self.pspec.packages.append(self.package) def set_history(self, date, version, comment="No Comment", release="1"): self.update.date = date self.update.version = version self.update.comment = comment self.update.release = release self.pspec.history.append(self.update) def write(self): self.pspec.write(self.filepath)
def setUp(self): testcase.TestCase.setUp(self) self.spec = SpecFile() self.spec.read('tests/popt/pspec.xml') self.pdb = PackageDB('testdb')
def handle(self, *args, **options): repositories = ( # ('core', '2.0', 'https://ciftlik.pisilinux.org/2.0/pisi-index.xml.xz'), # ('main', '2.0', 'https://ciftlik.pisilinux.org/2.0/pisi-index.xml.xz'), ('contrib', '2.0', 'https://ciftlik.pisilinux.org/2.0/pisi-index.xml.xz'), ) home = os.environ.get('HOME') gitdir = 'workspaces/pisi' package_blacklist = [] startTime=time() package_counter = 1 for repo_os, repo_name, repo_url in repositories: repo_path = os.path.join(home, gitdir, repo_os) repo, created = Repo.objects.get_or_create(name=repo_os, url=repo_url) for root, dirs, files in os.walk(repo_path): for file in files: if file == 'pspec.xml': print "spec file: %s" % os.path.join(root, file) spec = SpecFile(os.path.join(root, file)) if spec.source.name in package_blacklist: print "%s BlackList'te. Atlanıyor..." % spec.source.name package_counter += 1 continue packStartTime = time() component, created = (None, None) ### Component ### try: comp_file = PisiComponent(os.path.join(root, '../component.xml')) component, created = Component.objects.get_or_create(component=comp_file.name, repo=repo) except: print "Could not find component.xml, trying to retrieve component from directories" dir_comp = root[:root.rfind('/')].replace(os.path.join(home, gitdir, repo_name) + '/','').replace("/",".") component, created = Component.objects.get_or_create(component=dir_comp, repo=repo) packager, created = Packager.objects.get_or_create(name=spec.source.packager.name, email=spec.source.packager.email) print "\033[01;33m%s\033[0m - %s - %s - (Paket \033[01;33m%d\033[0m)" % (spec.source.name, component.component, repo_name, package_counter) ######### Source ######### source_slug = '%s-%s' % (slugify(spec.source.name), repo_os) source, created = Source.objects.get_or_create(name=spec.source.name, slug=source_slug, repo=repo, defaults={ 'component':component, 'packager':packager }) #try: source.summary = spec.source.summary source.description = spec.source.description source.homepage = spec.source.homepage source.version = spec.getSourceVersion() source.archive_name = spec.source.archive[0].name source.archive_sha1sum = spec.source.archive[0].sha1sum source.archive_type = spec.source.archive[0].type source.archive_url = spec.source.archive[0].uri ### İstatistikler ### source.created_at = validate_date(spec.history[-1].date, repo_os) source.updated_at = validate_date(spec.history[0].date, repo_os) source.build_script_size = getFileSize(os.path.join(root, 'actions.py')) source.spec_script_size = getFileSize(os.path.join(root, file)) source.update_count = spec.history.__len__() source.patch_count = spec.source.patches.__len__() source.save() ### Build Dependencies ### for bu in spec.source.buildDependencies: bd, bd_created = Dependency.objects.get_or_create(name=bu.package, versionFrom=bu.versionFrom, versionTo=bu.versionTo, version=bu.version, releaseFrom=bu.releaseFrom, releaseTo=bu.releaseTo, release=bu.release) source.build_dep.add(bd) ### Patches ### for pec in spec.source.patches: patch, created = Patch.objects.get_or_create(source=source, name=pec.filename, level=pec.level) ######### Package ######### for package in spec.packages: package_slug = '%s-%s' % (slugify(package.name), repo_os) p, p_created = Package.objects.get_or_create(slug=package_slug, defaults={ 'name': package.name, 'source': source, }) source.packages.add(p) ### RuntimeDeps ## for run_dep in package.packageDependencies: runtime_dep, rundep_created = Dependency.objects.get_or_create(name=run_dep.package, versionFrom=run_dep.versionFrom, versionTo=run_dep.versionTo, version=run_dep.version, releaseFrom=run_dep.releaseFrom, releaseTo=run_dep.releaseTo, release=run_dep.release,) p.runtime_dep.add(runtime_dep) ### Files ### for package_file in package.files: file, created = Files.objects.get_or_create(path=package_file.path, fileType=package_file.fileType) p.files.add(file) ### Additional Files ### for additional_file in package.additionalFiles: a_file, created = A_files.objects.get_or_create(filename=additional_file.filename, target=additional_file.target, perm=additional_file.permission, owner=additional_file.owner, group=additional_file.group) p.a_files.add(a_file) ### Source kaydet ### source.save() print "\033[01;33m%.3f s\033[0m" %(time() - packStartTime), print "- %d s" % (time() - startTime) package_counter += 1
def testIscached(self): spec = SpecFile('repos/pardus-2007/system/base/curl/pspec.xml') srcarch = pisi.sourcearchive.SourceArchive(spec.source.archive[0]) assert srcarch.is_cached()
def testMakeUri(self): spec = SpecFile("repos/pardus-2007/system/base/curl/pspec.xml") url = uri.URI(spec.source.archive[0].uri) self.assert_(File.make_uri(url))
def testUnpack(self): spec = SpecFile('repos/pardus-2007/system/base/curl/pspec.xml') targetDir = '/tmp/tests' srcarch = pisi.sourcearchive.SourceArchive(spec.source.archive[0]) srcarch.unpack(targetDir)
def setUp(self): pisi.api.init() self.sourcedb = pisi.sourcedb.init() self.spec = SpecFile() self.spec.read("tests/popt/pspec.xml")
def bump(options, path): if path.endswith("/"): path = path[:-1] if not path.endswith("/pspec.xml"): path += "/pspec.xml" if not os.path.isfile(path): print "%s not found!" % path sys.exit(1) if not os.access(path, os.W_OK): print "Cannot write to %s." % path sys.exit(1) info = get_and_save_user_info() pspec = open(path, "r").read().strip() specfile = SpecFile(path) old_archive = specfile.source.archive if len(old_archive) == 0: print("No <Archive> tag found in %s." % path) return elif len( old_archive ) > 1 and not options.many and not options.release and not options.rrelease: print("Multiarchive pspec.xml not supported yet.") sys.exit(1) old_archive = old_archive[0].uri old_type = re.sub(ver_ext_pattern, "\\2", old_archive).replace(".", "").replace("src", "") new_type = old_type last = specfile.history[0] old_version = last.version if options.many: verfrom = options.many.split("-")[0] new_version = options.many.split("-")[1] if not old_version == verfrom: print "skipping %s, different versions" % specfile.source.name return new_archive = old_archive.replace(old_version, new_version) elif options.uri: if not options.uri.split(":")[0] in [ "ftp", "file", "http", "https", "mirrors" ]: print "Wrong uri: %s" % options.uri sys.exit(1) new_archive = options.uri new_version = re.sub(ver_ext_pattern, "\\1", new_archive) new_type = re.sub(ver_ext_pattern, "\\2", new_archive).replace( ".", "").replace("tgz", "targz").replace("src", "") elif options.ver: if not re.search("[\d\.]", options.ver): print "Wrong version number: %s" % options.ver sys.exit(1) new_version = options.ver new_archive = old_archive.replace(old_version, new_version) if "." in old_version: mver = ".".join(old_version.split(".")[:-1]) if "/%s/" % mver in new_archive: new_archive = new_archive.replace( "/%s/" % mver, "/%s/" % ".".join(new_version.split(".")[:-1])) elif options.release or options.rrelease: new_type = old_type new_archive = old_archive new_version = old_version else: print old_archive sys.exit(0) info["RELEASE"] = int(last.release) + 1 info["DATE"] = time.strftime("%Y-%m-%d") info["VERSION"] = new_version if options.release or options.rrelease: info["COMMENT"] = "Release bump." else: info["COMMENT"] = "Version bump." new_release = RELEASE % info new_pspec = '' if new_type == "tgz": new_type = "targz" elif not new_type in types: new_type = "binary" for line in pspec.split("\n"): if "<Archive" in line and old_archive in line: new_line = line.split('>') new_line = new_line[0] + '>' + new_archive + '<' + new_line[ 1].split('<')[1] + '>' new_pspec = "\n".join((new_pspec, new_line)) elif "<History>" in line: new_pspec = "\n".join( (new_pspec, " <History>\n%s" % new_release)) elif options.vfrom and "<Dependency versionFrom" in line: new_pspec = "\n".join( (new_pspec, line.replace(old_version, new_version))) else: if not new_pspec: new_pspec = line else: new_pspec = "\n".join((new_pspec, line)) open(path, "w").write(new_pspec) open(path, "a").write("\n") if options.release or options.rrelease: return specfile.source.name if os.getenv("USER") != "root": os.system("sudo pisi build %s --fetch" % path) else: os.system("pisi build %s --fetch" % path) pspec = open(path, "r").read().strip() new_pspec = '' for line in pspec.split("\n"): if "<Archive" in line and os.path.basename(new_archive) in line: sha1sum = os.popen("sha1sum /var/cache/pisi/archives/%s" % os.path.basename(new_archive)).read().split()[0] new_line = re.sub("(.*sha1sum=)[\"\'][^\"^\']+[\"\'](.*)", r'\1"%s"\2' % sha1sum, line) new_line = re.sub("(.*type=)[\"\'][^\"^\']+[\"\'](.*)", r'\1"%s"\2' % new_type, new_line) new_pspec = "\n".join((new_pspec, new_line)) else: if not new_pspec: new_pspec = line else: new_pspec = "\n".join((new_pspec, line)) open(path, "w").write(new_pspec) open(path, "a").write("\n") return specfile.source.name
class SourceFetcher(object): def __init__(self, url, authInfo=None): self.url = url if authInfo: self.url.set_auth_info(authInfo) self.location = dirname(self.url.uri) pkgname = basename(dirname(self.url.path())) self.dest = join(ctx.config.tmp_dir(), pkgname) def fetch_all(self): # fetch pspec file self.fetch() pspec = join(self.dest, self.url.filename()) self.spec = SpecFile() self.spec.read(pspec) self.fetch_actionsfile() self.fetch_patches() self.fetch_comarfiles() self.fetch_additionalFiles() return pspec def fetch_actionsfile(self): actionsuri = join(self.location, ctx.const.actions_file) self.url.uri = actionsuri self.fetch() def fetch_patches(self): spec = self.spec for patch in spec.source.patches: patchuri = join(self.location, ctx.const.files_dir, patch.filename) self.url.uri = patchuri self.fetch(ctx.const.files_dir) def fetch_comarfiles(self): spec = self.spec for package in spec.packages: for pcomar in package.providesComar: comaruri = join(self.location, ctx.const.comar_dir, pcomar.script) self.url.uri = comaruri self.fetch(ctx.const.comar_dir) def fetch_additionalFiles(self): spec = self.spec for pkg in spec.packages: for afile in pkg.additionalFiles: afileuri = join(self.location, ctx.const.files_dir, afile.filename) self.url.uri = afileuri self.fetch(ctx.const.files_dir) def fetch(self, appendDest=""): from fetcher import fetch_url ctx.ui.info(_("Fetching %s") % self.url.uri) dest = join(self.dest, appendDest) fetch_url(self.url, dest)
def testMakeUri(self): self.spec = SpecFile() self.url = uri.URI(self.spec.source.archive.uri) f = File('repos/pardus-2007/system/base/curl/pspec.xml', File.read) self.assert_(f.make_uri('uri'))