def download(build_id, data): if data not in (Build.Data_Artifact, Build.Data_Log): return abort(404) build = Build.get(id=build_id) if not build: return abort(404) if not build.check_download_permission(data, request.user): return abort(403) if not build.exists(data): return abort(404) mime = 'application/zip' if data == Build.Data_Artifact else 'text/plain' download_name = "{}-{}.{}".format(build.repo.name.replace("/", "_"), build.num, "zip" if data == Build.Data_Artifact else 'log') return utils.stream_file(build.path(data), name=download_name, mime=mime)
def worker(): while True: with self._cond: while not self._queue and self._running: self._cond.wait() if not self._running: break build_id = self._queue.popleft() with models.session(): build = Build.get(id=build_id) if not build or build.status != Build.Status_Queued: continue with self._cond: do_terminate = self._terminate_events[build_id] = Event() try: do_build(build_id, do_terminate) except BaseException as exc: traceback.print_exc() finally: with self._cond: self._terminate_events.pop(build_id)
def delete(): repo_id = request.args.get('repo_id', '') build_id = request.args.get('build_id', '') user_id = request.args.get('user_id', '') delete_target = None return_to = 'dashboard' if build_id: delete_target = Build.get(id=build_id) return_to = delete_target.repo.url() if not request.user.can_manage: return abort(403) elif repo_id: delete_target = Repository.get(id=repo_id) return_to = url_for('repositories') if not request.user.can_manage: return abort(403) elif user_id: delete_target = User.get(id=user_id) return_to = url_for('users') if delete_target and delete_target.id != request.user.id and not request.user.can_manage: return abort(403) if not delete_target: return abort(404) try: delete_target.delete() except Build.CanNotDelete as exc: models.rollback() utils.flash(str(exc)) referer = request.headers.get('Referer', return_to) return redirect(referer) utils.flash('{} deleted'.format(type(delete_target).__name__)) return redirect(return_to)
def do_build_(build, build_path, override_path, logger, logfile, terminate_event): logger.info('[Flux]: build {}#{} started'.format(build.repo.name, build.num)) # Clone the repository. if build.repo and os.path.isfile( utils.get_repo_private_key_path(build.repo)): identity_file = utils.get_repo_private_key_path(build.repo) else: identity_file = config.ssh_identity_file ssh_command = utils.ssh_command( None, identity_file=identity_file) # Enables batch mode env = {'GIT_SSH_COMMAND': ' '.join(map(shlex.quote, ssh_command))} logger.info('[Flux]: GIT_SSH_COMMAND={!r}'.format(env['GIT_SSH_COMMAND'])) clone_cmd = [ 'git', 'clone', build.repo.clone_url, build_path, '--recursive' ] res = utils.run(clone_cmd, logger, env=env) if res != 0: logger.error('[Flux]: unable to clone repository') return False if terminate_event.is_set(): logger.info('[Flux]: build stopped') return False if build.ref and build.commit_sha == ("0" * 32): build_start_point = build.ref is_ref_build = True else: build_start_point = build.commit_sha is_ref_build = False # Checkout the correct build_start_point. checkout_cmd = ['git', 'checkout', '-q', build_start_point] res = utils.run(checkout_cmd, logger, cwd=build_path) if res != 0: logger.error( '[Flux]: failed to checkout {!r}'.format(build_start_point)) return False # If checkout was initiated by Start build, update commit_sha and ref of build if is_ref_build: # update commit sha get_ref_sha_cmd = ['git', 'rev-parse', 'HEAD'] res_ref_sha, res_ref_sha_stdout = utils.run(get_ref_sha_cmd, logger, cwd=build_path, return_stdout=True) if res_ref_sha == 0 and res_ref_sha_stdout != None: with models.session(): Build.get(id=build.id).commit_sha = res_ref_sha_stdout.strip() else: logger.error('[Flux]: failed to read current sha') return False # update ref; user could enter just branch name, e.g 'master' get_ref_cmd = [ 'git', 'rev-parse', '--symbolic-full-name', build_start_point ] res_ref, res_ref_stdout = utils.run(get_ref_cmd, logger, cwd=build_path, return_stdout=True) if res_ref == 0 and res_ref_stdout != None and res_ref_stdout.strip( ) != 'HEAD' and res_ref_stdout.strip() != '': with models.session(): Build.get(id=build.id).ref = res_ref_stdout.strip() elif res_ref_stdout.strip() == '': # keep going, used ref was probably commit sha pass else: logger.error('[Flux]: failed to read current ref') return False if terminate_event.is_set(): logger.info('[Flux]: build stopped') return False # Deletes .git folder before build, if is configured so. if config.git_folder_handling == GitFolderHandling.DELETE_BEFORE_BUILD or config.git_folder_handling == None: logger.info('[Flux]: removing .git folder before build') deleteGitFolder(build_path) # Copy over overridden files if any if os.path.exists(override_path): dir_util.copy_tree(override_path, build_path) # Find the build script that we need to execute. script_fn = None for fname in config.build_scripts: script_fn = os.path.join(build_path, fname) if os.path.isfile(script_fn): break script_fn = None if not script_fn: choices = '{' + ','.join(map(str, config.build_scripts)) + '}' logger.error('[Flux]: no build script found, choices are ' + choices) return False # Make sure the build script is executable. st = os.stat(script_fn) os.chmod(script_fn, st.st_mode | stat.S_IEXEC) # Execute the script. logger.info('[Flux]: executing {}'.format(os.path.basename(script_fn))) logger.info('$ ' + shlex.quote(script_fn)) popen = subprocess.Popen([script_fn], cwd=build_path, stdout=logfile, stderr=subprocess.STDOUT, stdin=None) # Wait until the process finished or the terminate event is set. while popen.poll() is None and not terminate_event.is_set(): time.sleep(0.5) if terminate_event.is_set(): try: popen.terminate() except OSError as exc: logger.exception(exc) logger.error('[Flux]: build stopped. build script terminated') return False # Deletes .git folder after build, if is configured so. if config.git_folder_handling == GitFolderHandling.DELETE_AFTER_BUILD: logger.info('[Flux]: removing .git folder after build') deleteGitFolder(build_path) logger.info('[Flux]: exit-code {}'.format(popen.returncode)) return popen.returncode == 0
def do_build(build_id, terminate_event): """ Performs the build step for the build in the database with the specified *build_id*. """ logfile = None logger = None status = None with contextlib.ExitStack() as stack: try: try: # Retrieve the current build information. with models.session(): build = Build.get(id=build_id) app.logger.info('Build {}#{} started.'.format( build.repo.name, build.num)) build.status = Build.Status_Building build.date_started = datetime.now() build_path = build.path() override_path = build.path(Build.Data_OverrideDir) utils.makedirs(os.path.dirname(build_path)) logfile = stack.enter_context( open(build.path(build.Data_Log), 'w')) logger = utils.create_logger(logfile) # Prefetch the repository member as it is required in do_build_(). build.repo # Execute the actual build process (must not perform writes to the # 'build' object as the DB session is over). if do_build_(build, build_path, override_path, logger, logfile, terminate_event): status = Build.Status_Success else: if terminate_event.is_set(): status = Build.Status_Stopped else: status = Build.Status_Error finally: # Create a ZIP from the build directory. if os.path.isdir(build_path): logger.info('[Flux]: Zipping build directory...') utils.zipdir(build_path, build_path + '.zip') utils.rmtree(build_path, remove_write_protection=True) logger.info('[Flux]: Done') except BaseException as exc: with models.session(): build = Build.get(id=build_id) build.status = Build.Status_Error if logger: logger.exception(exc) else: app.logger.exception(exc) finally: with models.session(): build = Build.get(id=build_id) if status is not None: build.status = status build.date_finished = datetime.now() return status == Build.Status_Success