def index(): global webhook_secret, branch_whitelist, scripts, responses repository = request.get("repository", { "repo_name": None }).get("repo_name") tag = request.get("push_data", {"tag": None}).get("tag") pusher = request.get("push_data", {"pusher": None}).get("pusher") # Respond to ping properly if repository not in CONFIG.get("repositories", {}).keys(): logging.info("Not a push event, aborting") abort(403) if pusher not in CONFIG[repository].get("pushers", [pusher]): logging.info("Pusher not configured") abort(403) if tag not in CONFIG[repository].get("tags", ["latest"]): logging.info("Pusher not configured") abort(403) # Run scripts, saving into responses (which we clear out) responses = {} for script in scripts: proc = Popen([script, tag], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') # Log errors if a hook failed if proc.returncode != 0: logging.error('[%s]: %d\n%s', script, proc.returncode, stderr) responses[script] = {'stdout': stdout, 'stderr': stderr} return dumps(responses)
def worker(): while True: pullRequest = q.get() # run scripts in code\hooks - 01_print_branch, 02_checkout and 03_pr_comment responses = {} logging.info("Processing: " + pullRequest["prTitle"] + " #" + pullRequest["number"] + " " + pullRequest["action"] + " event") for script in scripts: proc = Popen([ script, pullRequest["prTitle"], pullRequest["branch"], pullRequest["number"], pullRequest["repoFullName"] ], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') # Log errors if a hook failed if proc.returncode != 0: logging.error('[%s]: %d\n%s', script, proc.returncode, stderr) else: logging.info(script + " : " + stdout) responses[script] = {'stdout': stdout, 'stderr': stderr} # logging.info(dumps(responses)) q.task_done()
def run_scripts(): for script in scripts: proc = Popen([script, dumps(payload)], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') # Log errors if a hook failed if proc.returncode != 0: logging.error('[%s]: %d\n%s', script, proc.returncode, stderr) responses[script] = {'stdout': stdout, 'stderr': stderr}
def __init__(self, args, cwd=None, stdin=None): assert isinstance(args, list) assert isinstance(cwd, str) or cwd == None assert isinstance(stdin, str) or stdin == None self.args = args # print('Running `' + ' '.join(self.args) + '`') p = subprocess.Popen(args, cwd=cwd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdin = bytes(stdin, 'utf-8') if stdin != None else None stdout, stderr = p.communicate(stdin) self.stdout = stdout.decode('utf-8') if stdout != None else '' self.stderr = stderr.decode('utf-8') if stderr != None else '' self.returncode = p.returncode
def callScript(script, branch, repository): os.chmod(script, 0o744) proc = Popen([script, branch, repository], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') # Log errors if a hook failed if proc.returncode != 0: logging.error('Script: [%s]: %d\n%s', script, proc.returncode, stderr) else: logging.info('Script: [%s]: %d\nOut: %s\nErr: %s', script, proc.returncode, stdout, stderr) responses[script] = {'script': script, 'args': stderr}
def ffmpeg_render(request, content): post_conversion = json.loads(request.body.decode('UTF-8')) post_args = post_conversion['conversion'] arg_list = [ '{}\\bin\\ffmpeg.exe'.format(FFMPEG_DIR), '-y', '-filter_threads', '2' ] opened = False arg = '' for char in post_args: if char == "'": if opened: opened = False else: opened = True continue if char == ' ' and not opened: arg_list.append(arg) arg = '' else: arg += char arg_list.append(arg) process = subprocess.Popen( #['ffmpeg.exe', '-version'], arg_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE) try: stdout, stderr = process.communicate(timeout=15) except: process.kill() stdout, stderr = process.communicate() pass if stderr != b'': return HttpResponse(stderr, content_type='text/plain') return HttpResponse('{}\r\n{}\r\n{}\r\n{}'.format(stdout.decode(), stderr.decode(), content, arg_list), content_type='text/plain')
def check_commands(): # Check for new commands to be run by this daemon process commands = firebase.get('commands', SERVER_ID) if not commands: return # firebase.delete('commands', SERVER_ID) # Run through each of the commands and run them in a subprocess for command, data in commands.items(): if not data['run_time']: safe = False for CMD in WHITELISTED_COMMANDS: if CMD in data['cmd']: safe = True break if safe: print('Running cmd "%s"' % command) p = subprocess.Popen(split(data['cmd']), stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() firebase.put( 'commands/%s' % SERVER_ID, command, { 'stdout': stdout.decode(), 'stderr': stderr.decode(), 'exit_code': p.returncode, 'run_time': datetime.now().strftime('%H:%M:%S %d/%m/%y'), 'cmd': data['cmd'] }) else: firebase.put( 'commands/%s' % SERVER_ID, command, { 'stdout': 'COMMAND NOT ALLOWED', 'stderr': 'COMMAND NOT ALLOWED', 'exit_code': -1, 'cmd': data['cmd'] })
def launch(profile: Profile, strict: bool, foreground: bool, qb_args: List[str]) -> bool: if not profiles.ensure_profile_exists(profile, not strict): return False args = profile.cmdline() + qb_args if not shutil.which(args[0]): error("qutebrowser is not installed") return False if foreground: return subprocess.run(args).returncode == 0 else: p = subprocess.Popen(args, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) try: # give qb a chance to validate input before returning to shell stdout, stderr = p.communicate(timeout=0.1) print(stderr.decode(errors="ignore"), end="") except subprocess.TimeoutExpired: pass return True
def index(): """ Main WSGI application entry. """ path = normpath(abspath(dirname(__file__))) # Only POST is implemented if request.method != 'POST': abort(501) # Load config with open(join(path, 'config.json'), 'r') as cfg: config = loads(cfg.read()) hooks = config.get('hooks_path', join(path, 'hooks')) # Allow Github IPs only if config.get('github_ips_only', True): src_ip = ip_address(u'{}'.format( request.remote_addr) # Fix stupid ipaddress issue ) whitelist = requests.get('https://api.github.com/meta').json()['hooks'] for valid_ip in whitelist: if src_ip in ip_network(valid_ip): break else: abort(403) # Enforce secret secret = config.get('enforce_secret', '') if secret: # Only SHA1 is supported header_signature = request.headers.get('X-Hub-Signature') if header_signature is None: abort(403) sha_name, signature = header_signature.split('=') if sha_name != 'sha1': abort(501) # HMAC requires the key to be bytes, but data is string mac = hmac.new(str(secret), msg=request.data, digestmod='sha1') # Python prior to 2.7.7 does not have hmac.compare_digest if hexversion >= 0x020707F0: if not hmac.compare_digest(str(mac.hexdigest()), str(signature)): abort(403) else: # What compare_digest provides is protection against timing # attacks; we can live without this protection for a web-based # application if not str(mac.hexdigest()) == str(signature): abort(403) # Implement ping event = request.headers.get('X-GitHub-Event', 'ping') if event == 'ping': return dumps({'msg': 'pong'}) # Gather data try: payload = request.get_json() except Exception: logging.warning('Request parsing failed') abort(400) # Determining the branch is tricky, as it only appears for certain event # types an at different levels branch = None try: # Case 1: a ref_type indicates the type of ref. # This true for create and delete events. if 'ref_type' in payload: if payload['ref_type'] == 'branch': branch = payload['ref'] # Case 2: a pull_request object is involved. This is pull_request and # pull_request_review_comment events. elif 'pull_request' in payload: # This is the TARGET branch for the pull-request, not the source # branch branch = payload['pull_request']['base']['ref'] elif event in ['push']: # Push events provide a full Git ref in 'ref' and not a 'ref_type'. branch = payload['ref'].split('/', 2)[2] except KeyError: # If the payload structure isn't what we expect, we'll live without # the branch name pass # All current events have a repository, but some legacy events do not, # so let's be safe name = payload['repository']['name'] if 'repository' in payload else None meta = {'name': name, 'branch': branch, 'event': event} logging.info('Metadata:\n{}'.format(dumps(meta))) # Skip push-delete if event == 'push' and payload['deleted']: logging.info('Skipping push-delete event for {}'.format(dumps(meta))) return dumps({'status': 'skipped'}) # Possible hooks scripts = [] if branch and name: scripts.append(join(hooks, '{event}-{name}-{branch}'.format(**meta))) if name: scripts.append(join(hooks, '{event}-{name}'.format(**meta))) scripts.append(join(hooks, '{event}'.format(**meta))) scripts.append(join(hooks, 'all')) # Check permissions scripts = [s for s in scripts if isfile(s) and access(s, X_OK)] if not scripts: return dumps({'status': 'nop'}) # Save payload to temporal file osfd, tmpfile = mkstemp() with fdopen(osfd, 'w') as pf: pf.write(dumps(payload)) # Run scripts ran = {} for s in scripts: proc = Popen([s, tmpfile, event], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() ran[basename(s)] = { 'returncode': proc.returncode, 'stdout': stdout.decode('utf-8'), 'stderr': stderr.decode('utf-8'), } # Log errors if a hook failed if proc.returncode != 0: logging.error('{} : {} \n{}'.format(s, proc.returncode, stderr)) # Remove temporal file remove(tmpfile) info = config.get('return_scripts_info', False) if not info: return dumps({'status': 'done'}) output = dumps(ran, sort_keys=True, indent=4) logging.info(output) return output
def index(): # noqa: C901 - ignore complexity of function """ Main WSGI application entry. """ app_path = os.path.dirname(os.path.abspath(__file__)) path = normpath(abspath(dirname(__file__))) with open(join(path, "config.json"), "r") as cfg: config = loads(cfg.read()) cfg.close() if "debug_level_old" not in locals(): debug_level_old = "INFO" debug_level = str(config.get("debug_level", "INFO")) if debug_level != debug_level_old: if debug_level == "DEBUG": logging.getLogger().setLevel(logging.DEBUG) elif debug_level == "INFO": logging.getLogger().setLevel(logging.INFO) elif debug_level == "WARNING": logging.getLogger().setLevel(logging.WARNING) elif debug_level == "ERROR": logging.getLogger().setLevel(logging.ERROR) elif debug_level == "CRITICAL": logging.getLogger().setLevel(logging.CRITICAL) else: logging.getLogger().setLevel(logging.INFO) logging.info("debug level set dynamically to: %s", debug_level) debug_level_old = debug_level # Only POST is implemented if request.method != "POST": abort(501) hooks = config.get("hooks_path", join(path, "hooks")) if os.path.isdir(config.get("hooks_path", "")): logging.debug("hooks path set to: %s", hooks) else: logging.warning("hooks path not valid: %s", hooks) # Allow Github IPs only logging.debug("checking valid IPs...") # get ip address of requester src_ip = ip_address("{}".format( request.access_route[0]) # Fix stupid ipaddress issue ) if config.get("github_ips_only", True): whitelist = requests.get("https://api.github.com/meta").json()["hooks"] for valid_ip in whitelist: if src_ip in ip_network(valid_ip): break else: # pylint: disable=logging-format-interpolation logging.error("[403] IP {} not allowed".format(src_ip)) abort(403) logging.debug("checking valid IPs...done.") # Enforce secret logging.debug("checking webhook secret...") secret = config.get("enforce_secret", "") if secret: # change type of secret secret = bytes(secret, "utf-8") # Only SHA1 is supported header_signature = request.headers.get("X-Hub-Signature") if header_signature is None: logging.error("403: secret check failed: header mandantory") abort(403) sha_name, signature = header_signature.split("=") if sha_name != "sha1": logging.error("501: secret check failed: sha1 mandantory") abort(501) # HMAC requires the key to be bytes, but data is string mac = hmac.new(secret, msg=request.data, digestmod="sha1") # Python prior to 2.7.7 does not have hmac.compare_digest if hexversion >= 0x020707F0: if not hmac.compare_digest(str(mac.hexdigest()), str(signature)): logging.warning("[403] secret check failed: hex version wrong") abort(403) else: # What compare_digest provides is protection against timing # attacks; we can live without this protection for a web-based # application if str(mac.hexdigest()) != str(signature): logging.warning("[403] secret check failed (ip=%s)", src_ip) abort(403) logging.debug("checking webhook secret...done.") # Implement ping event = request.headers.get("X-GitHub-Event", "ping") logging.debug("event type detected: %s", event) if event == "ping": return dumps({"msg": "pong"}) # Gather data try: payload = request.get_json() except Exception: logging.warning("[400] request parsing failed") abort(400) # Determining the branch is tricky, as it only appears for certain event # types an at different levels logging.debug("checking branch...") branch = None try: # backup evenry json backup_path = config.get("backup_path", "") logging.debug("backup path set: %s", backup_path) if os.path.exists(backup_path): # pylint: disable=line-too-long backup_file = (config.get("backup_path", path) + "/" + time.strftime("%Y%m%d-%H%M%S") + "-" + event + ".json") logging.debug("backup file set: %s", backup_file) with open(backup_file, "w") as this_payloadexport: json.dump(payload, this_payloadexport) this_payloadexport.close() else: logging.info( "backup not created; backup path not given or invalid") # Case 1: a ref_type indicates the type of ref. # This true for create and delete events. if "ref_type" in payload: if payload["ref_type"] == "branch": branch = payload["ref"] # Case 2: a pull_request object is involved. This is pull_request and # pull_request_review_comment events. elif "pull_request" in payload: # This is the TARGET branch for the pull-request, not the source # branch branch = payload["pull_request"]["base"]["ref"] elif event in ["push"]: # Push events provide a full Git ref in 'ref' and not a 'ref_type'. branch = payload["ref"].split("/", 2)[2] except KeyError: # If the payload structure isn't what we expect, we'll live without # the branch name logging.debug("payload structure not as expected") logging.debug("checking branch...done.") # All current events have a repository, but some legacy events do not, # so let's be safe name = payload["repository"]["name"] if "repository" in payload else None meta = {"name": name, "branch": branch, "event": event} # pylint: disable=logging-format-interpolation logging.info("Metadata:\n{}".format(dumps(meta))) # Skip push-delete if event == "push" and payload["deleted"]: # pylint: disable=logging-format-interpolation logging.info("skipping push-delete event for {}".format(dumps(meta))) return dumps({"status": "skipped"}) # Possible hooks scripts = [] if branch and name: scripts.append(join(hooks, "{event}-{name}-{branch}".format(**meta))) scripts.append(join(hooks, "all-{name}-{branch}".format(**meta))) if name: scripts.append(join(hooks, "{event}-{name}".format(**meta))) scripts.append(join(hooks, "all-{name}".format(**meta))) scripts.append(join(hooks, "{event}".format(**meta))) scripts.append(join(hooks, "all")) # Check permissions logging.debug("checking executable hook scripts...") scripts = [s for s in scripts if isfile(s) and access(s, X_OK)] if not scripts: return dumps({"status": "nop"}) logging.debug("checking executable hook scripts...done.") # Save payload to temporal file osfd, tmpfile = mkstemp() with fdopen(osfd, "w") as this_payloadfile: this_payloadfile.write(dumps(payload)) this_payloadfile.close() # search for sub hook scripts (e.g. all => all1, all2, all-test, all-function1, ...) logging.debug("checking executable child hook scripts...") for subhook_script in scripts: # remove hooks dir and slashes subhook_script = subhook_script.replace(hooks, "") subhook_script = subhook_script.replace("/", "") files = [ f for f in os.listdir(app_path + "/" + hooks + "/") if re.match(rf"{subhook_script}.*", f) ] for sub_script in files: # check if found files are executable (beware of x flag to non exectuable files!) sub_script_filename = app_path + "/" + hooks + "/" + sub_script if isfile(sub_script_filename) and access(sub_script_filename, X_OK): if join(hooks, sub_script) not in scripts: # just add new files to list scripts.append(join(hooks, sub_script)) logging.debug("adding child hook '%s'", sub_script) logging.debug("checking executable child hook scripts...done.") # Run scripts ran = {} for this_script in scripts: this_script = app_path + "/" + this_script logging.info("try to execute hook: %s", this_script) proc = Popen( # nosec - if insecure scripts are saved and can be calld by this function you have a bigger problem :( this script requires dynamic and flexible script calling [this_script, tmpfile, event], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() ran[basename(this_script)] = { "returncode": proc.returncode, "stdout": stdout.decode("utf-8"), "stderr": stderr.decode("utf-8"), } # Log errors if a hook failed if proc.returncode != 0: # pylint: disable=logging-too-many-args logging.error("{} : {} \n{}".format(this_script, proc.returncode, stderr)) # Remove temporal file remove(tmpfile) info = config.get("return_scripts_info", False) if not info: return dumps({"status": "done"}) output = dumps(ran, sort_keys=True, indent=4) logging.info(output) return output
def index(): """ Main WSGI application entry. """ path = normpath(abspath(dirname(__file__))) # Only POST is implemented if request.method != 'POST': abort(501) # Load config with open(join(path, 'config.json'), 'r') as cfg: config = loads(cfg.read()) hooks = config.get('hooks_path', join(path, 'hooks')) # Allow Github IPs only if config.get('github_ips_only', True): src_ip = ip_address( u'{}'.format(request.access_route[0]) # Fix stupid ipaddress issue ) whitelist = requests.get('https://api.github.com/meta').json()['hooks'] for valid_ip in whitelist: if src_ip in ip_network(valid_ip): break else: logging.error('IP {} not allowed'.format( src_ip )) abort(403) # Enforce secret secret = config.get('enforce_secret', '') if secret: # Only SHA1 is supported header_signature = request.headers.get('X-Hub-Signature') if header_signature is None: abort(403) sha_name, signature = header_signature.split('=') if sha_name != 'sha1': abort(501) # HMAC requires the key to be bytes, but data is string mac = hmac.new(str(secret), msg=request.data, digestmod='sha1') # Python prior to 2.7.7 does not have hmac.compare_digest if hexversion >= 0x020707F0: if not hmac.compare_digest(str(mac.hexdigest()), str(signature)): abort(403) else: # What compare_digest provides is protection against timing # attacks; we can live without this protection for a web-based # application if not str(mac.hexdigest()) == str(signature): abort(403) # Implement ping event = request.headers.get('X-GitHub-Event', 'ping') if event == 'ping': return dumps({'msg': 'pong'}) # Gather data try: payload = request.get_json() except Exception: logging.warning('Request parsing failed') abort(400) # Determining the branch is tricky, as it only appears for certain event # types an at different levels branch = None try: # Case 1: a ref_type indicates the type of ref. # This true for create and delete events. if 'ref_type' in payload: if payload['ref_type'] == 'branch': branch = payload['ref'] # Case 2: a pull_request object is involved. This is pull_request and # pull_request_review_comment events. elif 'pull_request' in payload: # This is the TARGET branch for the pull-request, not the source # branch branch = payload['pull_request']['base']['ref'] elif event in ['push']: # Push events provide a full Git ref in 'ref' and not a 'ref_type'. branch = payload['ref'].split('/', 2)[2] except KeyError: # If the payload structure isn't what we expect, we'll live without # the branch name pass # All current events have a repository, but some legacy events do not, # so let's be safe name = payload['repository']['name'] if 'repository' in payload else None meta = { 'name': name, 'branch': branch, 'event': event } logging.info('Metadata:\n{}'.format(dumps(meta))) # Skip push-delete if event == 'push' and payload['deleted']: logging.info('Skipping push-delete event for {}'.format(dumps(meta))) return dumps({'status': 'skipped'}) # Possible hooks scripts = [] if branch and name: scripts.append(join(hooks, '{event}-{name}-{branch}'.format(**meta))) if name: scripts.append(join(hooks, '{event}-{name}'.format(**meta))) scripts.append(join(hooks, '{event}'.format(**meta))) scripts.append(join(hooks, 'all')) # Check permissions scripts = [s for s in scripts if isfile(s) and access(s, X_OK)] if not scripts: return dumps({'status': 'nop'}) # Save payload to temporal file osfd, tmpfile = mkstemp() with fdopen(osfd, 'w') as pf: pf.write(dumps(payload)) # Run scripts ran = {} for s in scripts: proc = Popen( [s, tmpfile, event], stdout=PIPE, stderr=PIPE ) stdout, stderr = proc.communicate() ran[basename(s)] = { 'returncode': proc.returncode, 'stdout': stdout.decode('utf-8'), 'stderr': stderr.decode('utf-8'), } # Log errors if a hook failed if proc.returncode != 0: logging.error('{} : {} \n{}'.format( s, proc.returncode, stderr )) # Remove temporal file remove(tmpfile) info = config.get('return_scripts_info', False) if not info: return dumps({'status': 'done'}) output = dumps(ran, sort_keys=True, indent=4) logging.info(output) return output
def index(): global webhook_secret, branch_whitelist, scripts, responses # Get signature from the webhook request header_signature = request.headers.get('X-Hub-Signature') header_gitlab_token = request.headers.get('X-Gitlab-Token') if header_signature is not None: # Construct an hmac, abort if it doesn't match try: sha_name, signature = header_signature.split('=') except: logging.info("X-Hub-Signature format is incorrect (%s), aborting", header_signature) abort(400) data = request.get_data() try: mac = hmac.new(webhook_secret.encode('utf8'), msg=data, digestmod=sha_name) except: logging.info("Unsupported X-Hub-Signature type (%s), aborting", header_signature) abort(400) if not hmac.compare_digest(str(mac.hexdigest()), str(signature)): logging.info("Signature did not match (%s and %s), aborting", str(mac.hexdigest()), str(signature)) abort(403) event = request.headers.get("X-GitHub-Event", "ping") elif header_gitlab_token is not None: if webhook_secret != header_gitlab_token: logging.info("Gitlab Secret Token did not match, aborting") abort(403) event = request.headers.get("X-Gitlab-Event", "unknown") else: logging.info("X-Hub-Signature was missing, aborting") abort(403) # Respond to ping properly if event == "ping": return dumps({"msg": "pong"}) # Don't listen to anything but push if event != "push" and event != "Push Hook": logging.info("Not a push event, aborting") abort(403) # Try to parse out the branch from the request payload try: branch = request.get_json(force=True)["ref"].split("/", 2)[2] except: print_exc() logging.info("Parsing payload failed") abort(400) # Reject branches not in our whitelist if branch not in branch_whitelist: logging.info("Branch %s not in branch_whitelist %s", branch, branch_whitelist) abort(403) # Run scripts, saving into responses (which we clear out) responses = {} for script in scripts: proc = Popen([script, branch], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') # Log errors if a hook failed if proc.returncode != 0: logging.error('[%s]: %d\n%s', script, proc.returncode, stderr) responses[script] = {'stdout': stdout, 'stderr': stderr} return dumps(responses)
def index(): """ Main WSGI application entry. """ path = normpath(abspath(dirname(__file__))) # Only POST is implemented - same effect as removing 'GET' in methods above if request.method != 'POST': logging.warning("We got a {} request, this isn't supported".format( request.method)) abort(405) # Load config if isfile(join(path, 'config.json')): with open(join(path, 'config.json'), 'r') as cfg: config = loads(cfg.read()) else: # abort(503, 'Configuration file config.json is missing.') config = { "github_ips_only": False, "enforce_secret": "", "return_scripts_info": False, } hooks = config.get('hooks_path', join(path, 'hooks')) logging.info("Config loaded, handling request") # Allow Github IPs only if config.get('github_ips_only', True): src_ip = ip_address(u'{}'.format( request.access_route[0]) # Fix stupid ipaddress issue ) whitelist = requests.get('https://api.github.com/meta').json()['hooks'] for valid_ip in whitelist: if src_ip in ip_network(valid_ip): break else: logging.warning("We got a request from unauthorized IP: %s", src_ip) abort(403) # Enforce secret secret = config.get('enforce_secret', '') if secret: # Only SHA1 is supported header_signature = request.headers.get('X-Hub-Signature') if header_signature is None: logging.warning("No signature found when expecting one") abort(403) sha_name, signature = header_signature.split('=') if sha_name != 'sha1': logging.warning("Unsupported signature mech: %s", sha_name) abort(501) # HMAC requires the key to be bytes, but data is string # Convert key to ascii in unicode environment # https://stackoverflow.com/questions/33455463/python-3-sign-a-message-with-key-sha512 mac = hmac.new(bytearray(secret, "ASCII"), msg=request.data, digestmod=sha1) if not constant_time_compare(str(mac.hexdigest()), str(signature)): abort(403) # Implement ping event = request.headers.get('X-GitHub-Event', 'ping') if event == 'ping': logging.warning("Ping Pong") return dumps({'msg': 'pong'}) # Gather data try: payload = request.get_json() except Exception: logging.warning('Request parsing failed with exception %s', Exception) abort(400) # Determining the branch is tricky, as it only appears for certain event # types an at different levels branch = None try: # Case 1: a ref_type indicates the type of ref. # This true for create and delete events. if 'ref_type' in payload: if payload['ref_type'] == 'branch': branch = payload['ref'] # Case 2: a pull_request object is involved. This is pull_request and # pull_request_review_comment events. elif 'pull_request' in payload: # This is the TARGET branch for the pull-request, not the source # branch branch = payload['pull_request']['base']['ref'] elif event in ['push']: # Push events provide a full Git ref in 'ref' and not a 'ref_type'. branch = payload['ref'].split('/', 2)[2] except KeyError: # If the payload structure isn't what we expect, we'll live without # the branch name pass # All current events have a repository, but some legacy events do not, # so let's be safe name = payload['repository']['name'] if 'repository' in payload else None meta = {'name': name, 'branch': branch, 'event': event} logging.info('Metadata:\n{}'.format(dumps(meta))) # Skip push-delete if event == 'push' and payload['deleted']: logging.info('Skipping push-delete event for {}'.format(dumps(meta))) return jsonify({'status': 'skipped'}) # Possible hooks scripts = [] if branch and name: scripts.append(join(hooks, '{event}-{name}-{branch}'.format(**meta))) scripts.append( join(hooks, '{event}-{name}-{branch}-background'.format(**meta))) if name: scripts.append(join(hooks, '{event}-{name}'.format(**meta))) scripts.append(join(hooks, '{event}'.format(**meta))) scripts.append(join(hooks, '{event}-background'.format(**meta))) scripts.append(join(hooks, 'all')) scripts.append(join(hooks, 'all-background')) # Check permissions scripts = [s for s in scripts if isfile(s) and access(s, X_OK)] if not scripts: return jsonify({'status': 'nop'}) # Save payload to temporal file osfd, tmpfile = mkstemp() with fdopen(osfd, 'w') as pf: pf.write(dumps(payload)) # Run scripts ran = {} for s in scripts: if s.endswith('-background'): # each backgrounded script gets its own tempfile # in this case, the backgrounded script MUST clean up after this!!! # the per-job tempfile will NOT be deleted here! osfd2, tmpfile2 = mkstemp() with fdopen(osfd2, 'w') as pf2: pf2.write(dumps(payload)) proc = Popen([s, tmpfile2, event], stdout=PIPE, stderr=PIPE) ran[basename(s)] = {'backgrounded': 'yes'} else: proc = Popen([s, tmpfile, event], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() ran[basename(s)] = { 'returncode': proc.returncode, 'stdout': stdout.decode('utf-8'), 'stderr': stderr.decode('utf-8'), } # Log errors if a hook failed if proc.returncode != 0: logging.error('{} : {} \n{}'.format(s, proc.returncode, stderr)) # Remove temporal file remove(tmpfile) info = config.get('return_scripts_info', False) if not info: return jsonify({'status': 'done'}) output = dumps(ran, sort_keys=True, indent=4) logging.info(output) return jsonify(ran)
def index(): """ Main WSGI application entry. """ path = normpath(abspath(dirname(__file__))) # Only POST is implemented - same effect as removing 'GET' in methods above if request.method != "POST": abort(405) # Load config if isfile(join(path, "config.json")): with open(join(path, "config.json")) as cfg: config = loads(cfg.read()) else: # abort(503, 'Configuration file config.json is missing.') config = { "github_ips_only": False, "enforce_secret": "", "return_scripts_info": False, "hooks_path": "/missing", } hooks = config.get("hooks_path", join(path, "hooks")) # Allow Github IPs only if config.get("github_ips_only", True): src_ip = ip_address( f"{request.access_route[0]}") # Fix stupid ipaddress issue whitelist = requests.get("https://api.github.com/meta").json()["hooks"] for valid_ip in whitelist: if src_ip in ip_network(valid_ip): break else: logging.error(f"IP {src_ip} not allowed") abort(403) # Enforce secret secret = config.get("enforce_secret", "") if secret: # Only SHA1 is supported header_signature = request.headers.get("X-Hub-Signature") if header_signature is None: abort(403) sha_name, signature = header_signature.split("=") if sha_name != "sha1": abort(501) # HMAC requires the key to be bytes, but data is string mac = hmac.new(str(secret), msg=request.data, digestmod="sha1") # Python prior to 2.7.7 does not have hmac.compare_digest if hexversion >= 0x020707F0: if not hmac.compare_digest(str(mac.hexdigest()), str(signature)): abort(403) else: # What compare_digest provides is protection against timing # attacks; we can live without this protection for a web-based # application if not str(mac.hexdigest()) == str(signature): abort(403) # Implement ping event = request.headers.get("X-GitHub-Event", "ping") if event == "ping": if has_hook(event): return jsonify(run_hook(event)) return jsonify({"msg": "pang"}) # Gather data try: payload = request.get_json() except Exception: logging.warning("Request parsing failed") abort(400) # Determining the branch is tricky, as it only appears for certain event # types an at different levels if payload is None: payload = {} branch = None try: # Case 1: a ref_type indicates the type of ref. # This true for create and delete events. if "ref_type" in payload: if payload["ref_type"] == "branch": branch = payload["ref"] # Case 2: a pull_request object is involved. This is pull_request and # pull_request_review_comment events. elif "pull_request" in payload: # This is the TARGET branch for the pull-request, not the source # branch branch = payload["pull_request"]["base"]["ref"] elif event in ["push"]: # Push events provide a full Git ref in 'ref' and not a 'ref_type'. branch = payload["ref"].split("/", 2)[2] except KeyError: # If the payload structure isn't what we expect, we'll live without # the branch name pass # All current events have a repository, but some legacy events do not, # so let's be safe name = payload["repository"]["name"] if "repository" in payload else None meta = {"name": name, "branch": branch, "event": event} logging.info(f"Metadata:\n{dumps(meta)}") # Skip push-delete if event == "push" and payload["deleted"]: logging.info(f"Skipping push-delete event for {dumps(meta)}") return jsonify({"status": "skipped"}) # Possible hooks scripts = [] if branch and name: scripts.append(join(hooks, "{event}-{name}-{branch}".format(**meta))) if name: scripts.append(join(hooks, "{event}-{name}".format(**meta))) scripts.append(join(hooks, "{event}".format(**meta))) scripts.append(join(hooks, "all")) # Check permissions scripts = [s for s in scripts if isfile(s) and access(s, X_OK)] if not scripts: return jsonify({"status": "nop"}) # Save payload to temporal file osfd, tmpfile = mkstemp() with fdopen(osfd, "w") as pf: pf.write(dumps(payload)) # Run scripts ran = {} for s in scripts: proc = Popen([s, tmpfile, event], stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() ran[basename(s)] = { "returncode": proc.returncode, "stdout": stdout.decode("utf-8"), "stderr": stderr.decode("utf-8"), } # Log errors if a hook failed if proc.returncode != 0: logging.error(f"{s} : {proc.returncode} \n{stderr}") # Remove temporal file remove(tmpfile) info = config.get("return_scripts_info", False) if not info: return jsonify({"status": "done"}) output = dumps(ran, sort_keys=True, indent=4) logging.info(output) return jsonify(ran)