def download(build_id, data): if data not in (Build.Data_Artifact, Build.Data_Log): return abort(404) build = Build.get(id=build_id) if not build: return abort(404) if not build.check_download_permission(data, request.user): return abort(403) if not build.exists(data): return abort(404) mime = 'application/zip' if data == Build.Data_Artifact else 'text/plain' download_name = "{}-{}.{}".format(build.repo.name.replace("/", "_"), build.num, "zip" if data == Build.Data_Artifact else 'log') return utils.stream_file(build.path(data), name=download_name, mime=mime)
def worker(): while True: with self._cond: while not self._queue and self._running: self._cond.wait() if not self._running: break build_id = self._queue.popleft() with models.session(): build = Build.get(id=build_id) if not build or build.status != Build.Status_Queued: continue with self._cond: do_terminate = self._terminate_events[build_id] = Event() try: do_build(build_id, do_terminate) except BaseException as exc: traceback.print_exc() finally: with self._cond: self._terminate_events.pop(build_id)
def build(): repo_id = request.args.get('repo_id', '') ref_name = request.args.get('ref', '') if not repo_id or not ref_name: return abort(400) if not request.user.can_manage: return abort(403) commit = '0' * 32 repo = Repository.get(id=repo_id) build = Build(repo=repo, commit_sha=commit, num=repo.build_count, ref=ref_name, status=Build.Status_Queued, date_queued=datetime.now(), date_started=None, date_finished=None) repo.build_count += 1 models.commit() enqueue(build) return redirect(repo.url())
def delete(): repo_id = request.args.get('repo_id', '') build_id = request.args.get('build_id', '') user_id = request.args.get('user_id', '') delete_target = None return_to = 'dashboard' if build_id: delete_target = Build.get(id=build_id) return_to = delete_target.repo.url() if not request.user.can_manage: return abort(403) elif repo_id: delete_target = Repository.get(id=repo_id) return_to = url_for('repositories') if not request.user.can_manage: return abort(403) elif user_id: delete_target = User.get(id=user_id) return_to = url_for('users') if delete_target and delete_target.id != request.user.id and not request.user.can_manage: return abort(403) if not delete_target: return abort(404) try: delete_target.delete() except Build.CanNotDelete as exc: models.rollback() utils.flash(str(exc)) referer = request.headers.get('Referer', return_to) return redirect(referer) utils.flash('{} deleted'.format(type(delete_target).__name__)) return redirect(return_to)
def hook_push(logger): ''' PUSH event webhook. The URL parameter ``api`` must be specified for Flux to expect the correct JSON payload format. Supported values for ``api`` are * ``gogs`` * ``github`` * ``gitea`` * ``gitbucket`` * ``bitbucket`` * ``bitbucket-cloud`` * ``gitlab`` * ``bare`` If no or an invalid value is specified for this parameter, a 400 Invalid Request response is generator. ''' api = request.args.get('api') if api not in (API_GOGS, API_GITHUB, API_GITEA, API_GITBUCKET, API_BITBUCKET, API_BITBUCKET_CLOUD, API_GITLAB, API_BARE): logger.error('invalid `api` URL parameter: {!r}'.format(api)) return 400 logger.info('PUSH event received. Processing JSON payload.') try: # XXX Determine encoding from Request Headers, if possible. data = json.loads(request.data.decode('utf8')) except (UnicodeDecodeError, ValueError) as exc: logger.error('Invalid JSON data received: {}'.format(exc)) return 400 if api == API_GOGS: owner = utils.get(data, 'repository.owner.username', str) name = utils.get(data, 'repository.name', str) ref = utils.get(data, 'ref', str) commit = utils.get(data, 'after', str) secret = utils.get(data, 'secret', str) get_repo_secret = lambda r: r.secret elif api == API_GITHUB: event = request.headers.get('X-Github-Event') if event != 'push': logger.error( "Payload rejected (expected 'push' event, got {!r})".format( event)) return 400 owner = utils.get(data, 'repository.owner.name', str) name = utils.get(data, 'repository.name', str) ref = utils.get(data, 'ref', str) commit = utils.get(data, 'after', str) secret = request.headers.get('X-Hub-Signature', '').replace('sha1=', '') get_repo_secret = lambda r: utils.get_github_signature( r.secret, request.data) elif api == API_GITEA: event = request.headers.get('X-Gitea-Event') if event != 'push': logger.error( "Payload rejected (expected 'push' event, got {!r})".format( event)) return 400 owner = utils.get(data, 'repository.owner.username', str) name = utils.get(data, 'repository.name', str) ref = utils.get(data, 'ref', str) commit = utils.get(data, 'after', str) secret = utils.get(data, 'secret', str) get_repo_secret = lambda r: r.secret elif api == API_GITBUCKET: event = request.headers.get('X-Github-Event') if event != 'push': logger.error( "Payload rejected (expected 'push' event, got {!r})".format( event)) return 400 owner = utils.get(data, 'repository.owner.login', str) name = utils.get(data, 'repository.name', str) ref = utils.get(data, 'ref', str) commit = utils.get(data, 'after', str) secret = request.headers.get('X-Hub-Signature', '').replace('sha1=', '') if secret: get_repo_secret = lambda r: utils.get_github_signature( r.secret, request.data) else: get_repo_secret = lambda r: r.secret elif api == API_BITBUCKET: event = request.headers.get('X-Event-Key') if event != 'repo:refs_changed': logger.error( "Payload rejected (expected 'repo:refs_changed' event, got {!r})" .format(event)) return 400 owner = utils.get(data, 'repository.project.name', str) name = utils.get(data, 'repository.name', str) ref = utils.get(data, 'changes.0.refId', str) commit = utils.get(data, 'changes.0.toHash', str) secret = request.headers.get('X-Hub-Signature', '').replace('sha256=', '') if secret: get_repo_secret = lambda r: utils.get_bitbucket_signature( r.secret, request.data) else: get_repo_secret = lambda r: r.secret elif api == API_BITBUCKET_CLOUD: event = request.headers.get('X-Event-Key') if event != 'repo:push': logger.error( "Payload rejected (expected 'repo:push' event, got {!r})". format(event)) return 400 owner = utils.get(data, 'repository.project.project', str) name = utils.get(data, 'repository.name', str) ref_type = utils.get(data, 'push.changes.0.new.type', str) ref_name = utils.get(data, 'push.changes.0.new.name', str) ref = "refs/" + ("heads/" if ref_type == "branch" else "tags/") + ref_name commit = utils.get(data, 'push.changes.0.new.target.hash', str) secret = None get_repo_secret = lambda r: r.secret elif api == API_GITLAB: event = utils.get(data, 'object_kind', str) if event != 'push' and event != 'tag_push': logger.error( "Payload rejected (expected 'push' or 'tag_push' event, got {!r})" .format(event)) return 400 owner = utils.get(data, 'project.namespace', str) name = utils.get(data, 'project.name', str) ref = utils.get(data, 'ref', str) commit = utils.get(data, 'checkout_sha', str) secret = request.headers.get('X-Gitlab-Token') get_repo_secret = lambda r: r.secret elif api == API_BARE: owner = utils.get(data, 'owner', str) name = utils.get(data, 'name', str) ref = utils.get(data, 'ref', str) commit = utils.get(data, 'commit', str) secret = utils.get(data, 'secret', str) get_repo_secret = lambda r: r.secret else: assert False, "unreachable" if not name: logger.error('invalid JSON: no repository name received') return 400 if not owner: logger.error('invalid JSON: no repository owner received') return 400 if not ref: logger.error('invalid JSON: no Git ref received') return 400 if not commit: logger.error('invalid JSON: no commit SHA received') return 400 if len(commit) != 40: logger.error('invalid JSON: commit SHA has invalid length') return 400 if secret == None: secret = '' name = owner + '/' + name repo = Repository.get(name=name) if not repo: logger.error('PUSH event rejected (unknown repository)') return 400 if get_repo_secret(repo) != secret: logger.error('PUSH event rejected (invalid secret)') return 400 if not repo.check_accept_ref(ref): logger.info( 'Git ref {!r} not whitelisted. No build dispatched'.format(ref)) return 200 build = Build(repo=repo, commit_sha=commit, num=repo.build_count, ref=ref, status=Build.Status_Queued, date_queued=datetime.now(), date_started=None, date_finished=None) repo.build_count += 1 models.commit() enqueue(build) logger.info('Build #{} for repository {} queued'.format( build.num, repo.name)) logger.info(utils.strip_url_path(config.app_url) + build.url()) return 200
def do_build_(build, build_path, override_path, logger, logfile, terminate_event): logger.info('[Flux]: build {}#{} started'.format(build.repo.name, build.num)) # Clone the repository. if build.repo and os.path.isfile( utils.get_repo_private_key_path(build.repo)): identity_file = utils.get_repo_private_key_path(build.repo) else: identity_file = config.ssh_identity_file ssh_command = utils.ssh_command( None, identity_file=identity_file) # Enables batch mode env = {'GIT_SSH_COMMAND': ' '.join(map(shlex.quote, ssh_command))} logger.info('[Flux]: GIT_SSH_COMMAND={!r}'.format(env['GIT_SSH_COMMAND'])) clone_cmd = [ 'git', 'clone', build.repo.clone_url, build_path, '--recursive' ] res = utils.run(clone_cmd, logger, env=env) if res != 0: logger.error('[Flux]: unable to clone repository') return False if terminate_event.is_set(): logger.info('[Flux]: build stopped') return False if build.ref and build.commit_sha == ("0" * 32): build_start_point = build.ref is_ref_build = True else: build_start_point = build.commit_sha is_ref_build = False # Checkout the correct build_start_point. checkout_cmd = ['git', 'checkout', '-q', build_start_point] res = utils.run(checkout_cmd, logger, cwd=build_path) if res != 0: logger.error( '[Flux]: failed to checkout {!r}'.format(build_start_point)) return False # If checkout was initiated by Start build, update commit_sha and ref of build if is_ref_build: # update commit sha get_ref_sha_cmd = ['git', 'rev-parse', 'HEAD'] res_ref_sha, res_ref_sha_stdout = utils.run(get_ref_sha_cmd, logger, cwd=build_path, return_stdout=True) if res_ref_sha == 0 and res_ref_sha_stdout != None: with models.session(): Build.get(id=build.id).commit_sha = res_ref_sha_stdout.strip() else: logger.error('[Flux]: failed to read current sha') return False # update ref; user could enter just branch name, e.g 'master' get_ref_cmd = [ 'git', 'rev-parse', '--symbolic-full-name', build_start_point ] res_ref, res_ref_stdout = utils.run(get_ref_cmd, logger, cwd=build_path, return_stdout=True) if res_ref == 0 and res_ref_stdout != None and res_ref_stdout.strip( ) != 'HEAD' and res_ref_stdout.strip() != '': with models.session(): Build.get(id=build.id).ref = res_ref_stdout.strip() elif res_ref_stdout.strip() == '': # keep going, used ref was probably commit sha pass else: logger.error('[Flux]: failed to read current ref') return False if terminate_event.is_set(): logger.info('[Flux]: build stopped') return False # Deletes .git folder before build, if is configured so. if config.git_folder_handling == GitFolderHandling.DELETE_BEFORE_BUILD or config.git_folder_handling == None: logger.info('[Flux]: removing .git folder before build') deleteGitFolder(build_path) # Copy over overridden files if any if os.path.exists(override_path): dir_util.copy_tree(override_path, build_path) # Find the build script that we need to execute. script_fn = None for fname in config.build_scripts: script_fn = os.path.join(build_path, fname) if os.path.isfile(script_fn): break script_fn = None if not script_fn: choices = '{' + ','.join(map(str, config.build_scripts)) + '}' logger.error('[Flux]: no build script found, choices are ' + choices) return False # Make sure the build script is executable. st = os.stat(script_fn) os.chmod(script_fn, st.st_mode | stat.S_IEXEC) # Execute the script. logger.info('[Flux]: executing {}'.format(os.path.basename(script_fn))) logger.info('$ ' + shlex.quote(script_fn)) popen = subprocess.Popen([script_fn], cwd=build_path, stdout=logfile, stderr=subprocess.STDOUT, stdin=None) # Wait until the process finished or the terminate event is set. while popen.poll() is None and not terminate_event.is_set(): time.sleep(0.5) if terminate_event.is_set(): try: popen.terminate() except OSError as exc: logger.exception(exc) logger.error('[Flux]: build stopped. build script terminated') return False # Deletes .git folder after build, if is configured so. if config.git_folder_handling == GitFolderHandling.DELETE_AFTER_BUILD: logger.info('[Flux]: removing .git folder after build') deleteGitFolder(build_path) logger.info('[Flux]: exit-code {}'.format(popen.returncode)) return popen.returncode == 0
def do_build(build_id, terminate_event): """ Performs the build step for the build in the database with the specified *build_id*. """ logfile = None logger = None status = None with contextlib.ExitStack() as stack: try: try: # Retrieve the current build information. with models.session(): build = Build.get(id=build_id) app.logger.info('Build {}#{} started.'.format( build.repo.name, build.num)) build.status = Build.Status_Building build.date_started = datetime.now() build_path = build.path() override_path = build.path(Build.Data_OverrideDir) utils.makedirs(os.path.dirname(build_path)) logfile = stack.enter_context( open(build.path(build.Data_Log), 'w')) logger = utils.create_logger(logfile) # Prefetch the repository member as it is required in do_build_(). build.repo # Execute the actual build process (must not perform writes to the # 'build' object as the DB session is over). if do_build_(build, build_path, override_path, logger, logfile, terminate_event): status = Build.Status_Success else: if terminate_event.is_set(): status = Build.Status_Stopped else: status = Build.Status_Error finally: # Create a ZIP from the build directory. if os.path.isdir(build_path): logger.info('[Flux]: Zipping build directory...') utils.zipdir(build_path, build_path + '.zip') utils.rmtree(build_path, remove_write_protection=True) logger.info('[Flux]: Done') except BaseException as exc: with models.session(): build = Build.get(id=build_id) build.status = Build.Status_Error if logger: logger.exception(exc) else: app.logger.exception(exc) finally: with models.session(): build = Build.get(id=build_id) if status is not None: build.status = status build.date_finished = datetime.now() return status == Build.Status_Success