def _run(_pakfire, source_id, commit_id, updated_files, deleted_files): commit = None source = None if commit_id: commit = _pakfire.sources.get_commit_by_id(commit_id) assert commit source = commit.source if source_id and not source: source = _pakfire.sources.get_by_id(source_id) assert source if updated_files: # Create a temporary directory where to put all the files # that are generated here. pkg_dir = tempfile.mkdtemp() try: config = pakfire.config.Config(["general.conf",]) config.parse(source.distro.get_config()) p = pakfire.PakfireServer(config=config) pkgs = [] for file in updated_files: try: pkg_file = p.dist(file, pkg_dir) pkgs.append(pkg_file) except: raise # Import all packages in one swoop. for pkg in pkgs: # Import the package file and create a build out of it. backend.builds.import_from_package(_pakfire, pkg, distro=source.distro, commit=commit, type="release") except: if commit: commit.state = "failed" raise finally: if os.path.exists(pkg_dir): shutil.rmtree(pkg_dir) for file in deleted_files: # Determine the name of the package. name = os.path.basename(file) name = name[:len(MAKEFILE_EXTENSION) + 1] source.distro.delete_package(name) if commit: commit.state = "finished"
def resolvdep(self): log.info("Processing dependencies for %s..." % self) config = pakfire.config.Config(files=["general.conf"]) config.parse(self.get_config(local=True)) # The filename of the source file. filename = os.path.join(PACKAGES_DIR, self.build.pkg.path) assert os.path.exists(filename), filename # Create a new pakfire instance with the configuration for # this build. p = pakfire.PakfireServer(config=config, arch=self.arch) # Try to solve the build dependencies. try: solver = p.resolvdep(filename) # Catch dependency errors and log the problem string. except DependencyError, e: self.dependency_check_succeeded = False self.message = e
def execute_job(self, job): log.debug("Executing job: %s" % job) # Call the function that processes the build and try to catch general # exceptions and report them to the server. # If everything goes okay, we tell this the server, too. try: # Create a temporary file and a directory for the resulting files. tmpdir = tempfile.mkdtemp() tmpfile = os.path.join(tmpdir, os.path.basename(job.source_url)) logfile = os.path.join(tmpdir, "build.log") # Create pakfire configuration instance. config = pakfire.config.ConfigDaemon() config.parse(job.config) # Create pakfire instance. p = None try: p = pakfire.base.PakfireBuilder(config=config, arch=job.arch) # Download the source package. grabber = pakfire.downloader.PackageDownloader(p) grabber.urlgrab(job.source_url, filename=tmpfile) # Check if the download checksum matches (if provided). if job.source_hash_sha512: h = hashlib.new("sha512") f = open(tmpfile, "rb") while True: buf = f.read(BUFFER_SIZE) if not buf: break h.update(buf) f.close() if not job.source_hash_sha512 == h.hexdigest(): raise DownloadError, "Hash check did not succeed." # Create a new instance of a build environment. build = pakfire.builder.BuildEnviron(p, tmpfile, release_build=True, build_id=job.id, logfile=logfile) try: # Create the build environment. build.start() # Update the build status on the server. self.upload_buildroot(job, build.installed_packages) self.update_state(job, "running") # Run the build (without install test). build.build(install_test=False) # Copy the created packages to the tempdir. build.copy_result(tmpdir) finally: # Cleanup the build environment. build.stop() # Jippie, build is finished, we are going to upload the files. self.update_state(job, "uploading") # Walk through the result directory and upload all (binary) files. # Skip that for test builds. if not job.type == "test": for dir, subdirs, files in os.walk(tmpdir): for file in files: file = os.path.join(dir, file) if file in (logfile, tmpfile,): continue self.upload_file(job, file, "package") except DependencyError, e: message = "%s: %s" % (e.__class__.__name__, e) self.update_state(job, "dependency_error", message) raise except DownloadError, e: message = "%s: %s" % (e.__class__.__name__, e) self.update_state(job, "download_error", message) raise
def dist(self): # Walk through all source repositories for source in self: # Get access to the git repo with git.Repo(self.pakfire, source) as repo: # Walk through all pending commits for commit in source.pending_commits: commit.state = "running" logging.debug("Processing commit %s: %s" % (commit.revision, commit.subject)) # Navigate to the right revision. repo.checkout(commit.revision) # Get all changed makefiles. deleted_files = [] updated_files = [] for file in repo.changed_files(commit.revision): # Don't care about files that are not a makefile. if not file.endswith(".%s" % MAKEFILE_EXTENSION): continue if os.path.exists(file): updated_files.append(file) else: deleted_files.append(file) if updated_files: # Create a temporary directory where to put all the files # that are generated here. pkg_dir = tempfile.mkdtemp() try: config = pakfire.config.Config(["general.conf",]) config.parse(source.distro.get_config()) p = pakfire.PakfireServer(config=config) pkgs = [] for file in updated_files: try: pkg_file = p.dist(file, pkg_dir) pkgs.append(pkg_file) except: raise # Import all packages in one swoop. for pkg in pkgs: with self.db.transaction(): self.backend.builds.create_from_source_package(pkg, source.distro, commit=commit, type="release") except: if commit: commit.state = "failed" raise finally: if os.path.exists(pkg_dir): shutil.rmtree(pkg_dir) for file in deleted_files: # Determine the name of the package. name = os.path.basename(file) name = name[:len(MAKEFILE_EXTENSION) + 1] source.distro.delete_package(name) if commit: commit.state = "finished"