def hooked(): # Store the IP address blocks that github uses for hook requests. hook_blocks = requests.get('https://api.github.com/meta').json()['hooks'] if request.method == 'GET': return ' Nothing to see here, move along ...' elif request.method == 'POST': # Check if the POST request if from github.com if not request.headers.get('X-Phab-Event') == "push": for block in hook_blocks: ip = ipaddress.ip_address(u'%s' % request.remote_addr) if ipaddress.ip_address(ip) in ipaddress.ip_network(block): break # the remote_addr is within the network range of github else: abort(403) if request.headers.get('X-GitHub-Event') == "ping": return json.dumps({'msg': 'Hi!'}) if request.headers.get('X-GitHub-Event') != "push": return json.dumps({'msg': "wrong event type"}) payload = json.loads(request.data) repo = payload['repository']['name'] commits = payload['commits'] changes = [] added = [] for commit in commits: changes.append(commit['modified']) added.append(commit['added']) if repo == "antergos-packages": db.set('idle', 'False') db.set('building', "Initializing...") logger.info(changes) has_pkgs = False all_changes = changes + added no_dups = [] for changed in all_changes: for item in changed: pak = os.path.dirname(item) if pak is not None and pak != '' and pak not in no_dups: logger.info('Adding %s to the build queue' % pak) no_dups.append(pak) has_pkgs = True db.rpush('queue', pak) if has_pkgs: queue.enqueue_call(builder.handle_hook, timeout=9600) elif repo == "antergos-iso": db.set('idle', 'False') db.set('building', "Initializing...") db.set('isoFlag', 'True') queue.enqueue_call(builder.handle_hook, timeout=10000) return json.dumps({'msg': 'OK!'})
def handle_worker_exception(job, *exc_info): doc = docker.Client(base_url='unix://var/run/docker.sock', version='1.12', timeout=10) container = db.get('container') doc.remove_container(container) repo = os.path.join("/tmp", "staging") cache = os.path.join("/tmp", "pkg_cache") try: shutil.rmtree(repo) shutil.rmtree(cache) shutil.rmtree('/opt/antergos-packages') except Exception: pass db.set('idle', "True") db.set('building', 'Idle') db.set('container', '') db.set('building_num', '') db.set('building_start', '')
def db_filter_and_add(output=None, this_log=None): if output is None or this_log is None: return nodup = set() part2 = None filtered = [] for line in output: if not line or line == '': continue line = line.rstrip() end = line[20:] if end not in nodup: nodup.add(end) line = line.replace("can't", "can not") bad_date = re.search(r"\d{4}-.+Z(?=\s)", line) if bad_date: logger.info('The bad_date is %s' % bad_date) py_date = parser.parse(bad_date.group(0)) logger.info('The py_date is %s' % py_date) good_date = py_date.strftime("%m/%d/%Y %I:%M%p") line = line.replace(bad_date.group(0), good_date) if len(line) > 210: part1 = line[:210] part2 = line[211:] filtered.append(part1) #db.rpush('%s:content' % this_log, part1) continue elif part2: #db.rpush('%s:content' % this_log, part2) filtered.append(part2) part2 = None continue else: filtered.append(line) filtered_string = '\n '.join(filtered) #db.rpush('%s:content' % this_log, line) pretty = highlight( filtered_string, BashLexer(), HtmlFormatter(style='monokai', linenos='inline', prestyles="background:#272822;color:#fff;")) db.set('%s:content' % this_log, pretty)
def db_filter_and_add(output=None, this_log=None): if output is None or this_log is None: return nodup = set() part2 = None filtered = [] for line in output: if not line or line == '': continue line = line.rstrip() end = line[20:] if end not in nodup: nodup.add(end) line = line.replace("can't", "can not") bad_date = re.search(r"\d{4}-.+Z(?=\s)", line) if bad_date: logger.info('The bad_date is %s' % bad_date) py_date = parser.parse(bad_date.group(0)) logger.info('The py_date is %s' % py_date) good_date = py_date.strftime("%m/%d/%Y %I:%M%p") line = line.replace(bad_date.group(0), good_date) if len(line) > 210: part1 = line[:210] part2 = line[211:] filtered.append(part1) #db.rpush('%s:content' % this_log, part1) continue elif part2: #db.rpush('%s:content' % this_log, part2) filtered.append(part2) part2 = None continue else: filtered.append(line) filtered_string = '\n '.join(filtered) #db.rpush('%s:content' % this_log, line) pretty = highlight(filtered_string, BashLexer(), HtmlFormatter(style='monokai', linenos='inline', prestyles="background:#272822;color:#fff;")) db.set('%s:content' % this_log, pretty)
def build_iso(): iso_arch = ['x86_64', 'i686'] for arch in iso_arch: db.incr('build_number') dt = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") build_id = db.get('build_number') this_log = 'build_log:%s' % build_id db.set('%s:start' % this_log, dt) db.set('building_num', build_id) db.set(this_log, True) db.set('building_start', dt) logger.info('Building antergos-iso-%s' % arch) db.set('building', arch) db.lrem('queue', 0, 'antergos-iso-%s' % arch) db.set('%s:pkg' % this_log, 'antergos-iso-%s' % arch) db.rpush('pkg:antergos-iso-%s:build_logs' % arch, build_id) flag = '/srv/antergos.org/ISO32' if arch is 'i686': if not os.path.exists(flag): open(flag, 'a').close() else: if os.path.exists(flag): os.remove(flag) # Initiate communication with docker daemon try: doc = docker.Client(base_url='unix://var/run/docker.sock', version='1.12', timeout=10) iso_container = doc.create_container("lots0logs/antergos-iso", volumes=['/var/cache/pacman', '/antergos-iso/configs/antergos/out', '/var/run/dbus', '/start', '/sys/fs/cgroup'], tty=True, name=['antergos-iso-%s' % arch], cpu_shares=512) db.set('container', iso_container.get('Id')) except Exception as err: logger.error("Cant connect to Docker daemon. Error msg: %s", err) try: doc.start(iso_container, privileged=True, binds={ '/var/cache/pacman': { 'bind': '/var/cache/pacman', 'ro': False }, '/opt/archlinux-mkarchiso': { 'bind': '/start', 'ro': False }, '/run/dbus': { 'bind': '/var/run/dbus', 'ro': False }, '/srv/antergos.org': { 'bind': '/antergos-iso/configs/antergos/out', 'ro': False }, '/sys/fs/cgroup': { 'bind': '/sys/fs/cgroup', 'ro': True } }) except Exception as err: logger.error("Cant start container. Error msg: %s", err) doc.wait(iso_container) stream = doc.logs(iso_container, stdout=True, stderr=True, timestamps=True) log_stream = stream.split('\n') db_filter_and_add(log_stream, this_log) pkg = 'antergos-iso-%s' % arch iso_dir = os.listdir('/srv/antergos.org/') if pkg in iso_dir: db.rpush('completed', build_id) db.set('%s:result' % this_log, 'completed') else: logger.error('antergos-iso-%s not found after container exit.' % arch) failed = True db.set('%s:result' % this_log, 'failed') db.rpush('failed', build_id) try: shutil.rmtree('/opt/antergos-packages') except Exception: pass db.set('idle', "True") db.set('building', 'Idle') db.set('container', '') db.set('building_num', '') db.set('building_start', '') logger.info('All iso builds completed.')
def build_pkgs(): # Initiate communication with docker daemon try: doc = docker.Client(base_url='unix://var/run/docker.sock', version='1.12', timeout=10) # doc.build(path=DOC_DIR, tag="arch-devel", quiet=False, timeout=None) except Exception as err: logger.error("Cant connect to Docker daemon. Error msg: %s", err) # Create our tmp directories repo = os.path.join("/tmp", "staging") cache = os.path.join("/tmp", "pkg_cache") for d in [repo, cache]: if not os.path.exists(d): os.mkdir(d, 0o777) db.set('pkg_count', '0') pkglist = db.lrange('queue', 0, -1) for i in range(len(pkglist)): failed = False pkg = db.lpop('queue') if pkg is None or pkg == '': continue logger.info('Building %s' % pkg) db.incr('build_number') dt = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") build_id = db.get('build_number') db.set('building_num', build_id) this_log = 'build_log:%s' % build_id db.set(this_log, True) db.rpush('pkg:%s:build_logs' % pkg, build_id) db.set('%s:start' % this_log, dt) db.set('building_start', dt) db.set('%s:pkg' % this_log, pkg) db.set('building', pkg) pkgdir = os.path.join(REPO_DIR, pkg) pkg_deps = db.lrange('pkg:%s:deps' % pkg, 0, -1) pkg_deps_str = ' '.join(pkg_deps) logger.info('pkg_deps_str is %s' % pkg_deps_str) try: container = doc.create_container("antergos/makepkg", command=["/makepkg/build.sh", pkg_deps_str], name=pkg, volumes=['/var/cache/pacman', '/makepkg', '/repo', '/pkg', '/root/.gnupg', '/staging']) except Exception as err: logger.error('Create container failed. Error Msg: %s' % err) failed = True continue db.set('container', container.get('Id')) try: doc.start(container, binds={ cache: { 'bind': '/var/cache/pacman', 'ro': False }, DOC_DIR: { 'bind': '/makepkg', 'ro': True }, repo: { 'bind': '/staging', 'ro': False }, pkgdir: { 'bind': '/pkg', 'ro': False }, '/root/.gnupg': { 'bind': '/root/.gnupg', 'ro': False }, '/srv/antergos.info/repo/iso/testing/uefi/antergos/': { 'bind': '/repo', 'ro': False } }) doc.wait(container) except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) failed = True continue stream = doc.logs(container, stdout=True, stderr=True, timestamps=True) log_stream = stream.split('\n') db_filter_and_add(log_stream, this_log) in_dir = len([name for name in os.listdir(repo)]) last_count = int(db.get('pkg_count')) logger.info('last count is %s %s' % (last_count, type(last_count))) logger.info('in_dir is %s %s' % (in_dir, type(in_dir))) if in_dir > last_count: db.incr('pkg_count', (in_dir - last_count)) db.rpush('completed', build_id) db.set('%s:result' % this_log, 'completed') else: logger.error('No package found after container exit.') failed = True db.set('%s:result' % this_log, 'failed') db.rpush('failed', build_id) doc.remove_container(container) end = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") db.set('%s:end' % this_log, end) logger.info('Moving pkgs into repo and updating repo database') try: repo_container = doc.create_container("lots0logs/makepkg", command="/makepkg/repo_expect.sh", volumes=['/var/cache/pacman', '/makepkg', '/repo', '/root/.gnupg', '/staging']) except Exception as err: logger.error('Create container failed. Error Msg: %s' % err) try: doc.start(repo_container, binds={ cache: { 'bind': '/var/cache/pacman', 'ro': False }, DOC_DIR: { 'bind': '/makepkg', 'ro': True }, repo: { 'bind': '/staging', 'ro': False }, '/root/.gnupg': { 'bind': '/root/.gnupg', 'ro': False }, '/srv/antergos.info/repo/iso/testing/uefi/antergos/': { 'bind': '/repo', 'ro': False } }) doc.wait(repo_container) except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) doc.remove_container(repo_container) try: shutil.rmtree(repo) shutil.rmtree(cache) shutil.rmtree('/opt/antergos-packages') except Exception: pass db.set('idle', "True") db.set('building', 'Idle') db.set('container', '') db.set('building_num', '') db.set('building_start', '') logger.info('All builds completed. Repo has been updated.')
def handle_hook(): iso_flag = db.get('isoFlag') if iso_flag == 'True': archs = ['x86_64', 'i686'] for arch in archs: db.rpush('queue', 'antergos-iso-%s' % arch) version = datetime.datetime.now().strftime('%Y.%m.%d') if not db.exists('pkg:antergos-iso-%s' % arch): db.set('pkg:antergos-iso-%s' % arch, True) db.set('pkg:antergos-iso-%s:name' % arch, 'antergos-iso-%s' % arch) db.set('pkg:antergos-iso-%s:version' % arch, version) build_iso() else: logger.info('Pulling changes from github.') subprocess.call(['git', 'clone', 'http://github.com/lots0logs/antergos-packages.git'], cwd='/opt') subprocess.call(['chmod', '-R', '777', 'antergos-packages'], cwd='/opt') # Check database to see if packages exist and add them if necessary. packages = db.lrange('queue', 0, -1) logger.info('Checking database for packages.') for package in packages: version = get_pkgver(package) depends = get_deps(package) if not db.exists('pkg:%s' % package): logger.info('%s not found in database, adding entry..' % package) db.set('pkg:%s' % package, True) db.set('pkg:%s:name' % package, package) for dep in depends: db.rpush('pkg:%s:deps' % package, dep) logger.info('Updating pkgver in databse for %s to %s' % (package, version)) db.set('pkg:%s:version' % package, version) logger.info('All queued packages are in the database, checking deps to determine build order.') check = check_deps(packages) if len(check) > 0: for c in check: logger.info('%s depends on a pkg in this build. Moving it to the end of the queue.' % c) db.lrem('queue', 0, c) db.rpush('queue', c) logger.info('Check deps complete. Starting build_pkgs') build_pkgs()
def build_iso(): iso_arch = ['x86_64', 'i686'] for arch in iso_arch: db.incr('build_number') dt = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") build_id = db.get('build_number') this_log = 'build_log:%s' % build_id db.set('%s:start' % this_log, dt) db.set('building_num', build_id) db.set(this_log, True) db.set('building_start', dt) logger.info('Building antergos-iso-%s' % arch) db.set('building', arch) db.lrem('queue', 0, 'antergos-iso-%s' % arch) db.set('%s:pkg' % this_log, 'antergos-iso-%s' % arch) db.rpush('pkg:antergos-iso-%s:build_logs' % arch, build_id) flag = '/srv/antergos.org/ISO32' if arch is 'i686': if not os.path.exists(flag): open(flag, 'a').close() else: if os.path.exists(flag): os.remove(flag) # Initiate communication with docker daemon try: doc = docker.Client(base_url='unix://var/run/docker.sock', version='1.12', timeout=10) iso_container = doc.create_container( "lots0logs/antergos-iso", volumes=[ '/var/cache/pacman', '/antergos-iso/configs/antergos/out', '/var/run/dbus', '/start', '/sys/fs/cgroup' ], tty=True, name=['antergos-iso-%s' % arch], cpu_shares=512) db.set('container', iso_container.get('Id')) except Exception as err: logger.error("Cant connect to Docker daemon. Error msg: %s", err) try: doc.start(iso_container, privileged=True, binds={ '/var/cache/pacman': { 'bind': '/var/cache/pacman', 'ro': False }, '/opt/archlinux-mkarchiso': { 'bind': '/start', 'ro': False }, '/run/dbus': { 'bind': '/var/run/dbus', 'ro': False }, '/srv/antergos.org': { 'bind': '/antergos-iso/configs/antergos/out', 'ro': False }, '/sys/fs/cgroup': { 'bind': '/sys/fs/cgroup', 'ro': True } }) except Exception as err: logger.error("Cant start container. Error msg: %s", err) doc.wait(iso_container) stream = doc.logs(iso_container, stdout=True, stderr=True, timestamps=True) log_stream = stream.split('\n') db_filter_and_add(log_stream, this_log) pkg = 'antergos-iso-%s' % arch iso_dir = os.listdir('/srv/antergos.org/') if pkg in iso_dir: db.rpush('completed', build_id) db.set('%s:result' % this_log, 'completed') else: logger.error('antergos-iso-%s not found after container exit.' % arch) failed = True db.set('%s:result' % this_log, 'failed') db.rpush('failed', build_id) try: shutil.rmtree('/opt/antergos-packages') except Exception: pass db.set('idle', "True") db.set('building', 'Idle') db.set('container', '') db.set('building_num', '') db.set('building_start', '') logger.info('All iso builds completed.')
def build_pkgs(): # Initiate communication with docker daemon try: doc = docker.Client(base_url='unix://var/run/docker.sock', version='1.12', timeout=10) # doc.build(path=DOC_DIR, tag="arch-devel", quiet=False, timeout=None) except Exception as err: logger.error("Cant connect to Docker daemon. Error msg: %s", err) # Create our tmp directories repo = os.path.join("/tmp", "staging") cache = os.path.join("/tmp", "pkg_cache") for d in [repo, cache]: if not os.path.exists(d): os.mkdir(d, 0o777) db.set('pkg_count', '0') pkglist = db.lrange('queue', 0, -1) for i in range(len(pkglist)): failed = False pkg = db.lpop('queue') if pkg is None or pkg == '': continue logger.info('Building %s' % pkg) db.incr('build_number') dt = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") build_id = db.get('build_number') db.set('building_num', build_id) this_log = 'build_log:%s' % build_id db.set(this_log, True) db.rpush('pkg:%s:build_logs' % pkg, build_id) db.set('%s:start' % this_log, dt) db.set('building_start', dt) db.set('%s:pkg' % this_log, pkg) db.set('building', pkg) pkgdir = os.path.join(REPO_DIR, pkg) pkg_deps = db.lrange('pkg:%s:deps' % pkg, 0, -1) pkg_deps_str = ' '.join(pkg_deps) logger.info('pkg_deps_str is %s' % pkg_deps_str) try: container = doc.create_container( "antergos/makepkg", command=["/makepkg/build.sh", pkg_deps_str], name=pkg, volumes=[ '/var/cache/pacman', '/makepkg', '/repo', '/pkg', '/root/.gnupg', '/staging' ]) except Exception as err: logger.error('Create container failed. Error Msg: %s' % err) failed = True continue db.set('container', container.get('Id')) try: doc.start(container, binds={ cache: { 'bind': '/var/cache/pacman', 'ro': False }, DOC_DIR: { 'bind': '/makepkg', 'ro': True }, repo: { 'bind': '/staging', 'ro': False }, pkgdir: { 'bind': '/pkg', 'ro': False }, '/root/.gnupg': { 'bind': '/root/.gnupg', 'ro': False }, '/srv/antergos.info/repo/iso/testing/uefi/antergos/': { 'bind': '/repo', 'ro': False } }) doc.wait(container) except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) failed = True continue stream = doc.logs(container, stdout=True, stderr=True, timestamps=True) log_stream = stream.split('\n') db_filter_and_add(log_stream, this_log) in_dir = len([name for name in os.listdir(repo)]) last_count = int(db.get('pkg_count')) logger.info('last count is %s %s' % (last_count, type(last_count))) logger.info('in_dir is %s %s' % (in_dir, type(in_dir))) if in_dir > last_count: db.incr('pkg_count', (in_dir - last_count)) db.rpush('completed', build_id) db.set('%s:result' % this_log, 'completed') else: logger.error('No package found after container exit.') failed = True db.set('%s:result' % this_log, 'failed') db.rpush('failed', build_id) doc.remove_container(container) end = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") db.set('%s:end' % this_log, end) logger.info('Moving pkgs into repo and updating repo database') try: repo_container = doc.create_container( "lots0logs/makepkg", command="/makepkg/repo_expect.sh", volumes=[ '/var/cache/pacman', '/makepkg', '/repo', '/root/.gnupg', '/staging' ]) except Exception as err: logger.error('Create container failed. Error Msg: %s' % err) try: doc.start(repo_container, binds={ cache: { 'bind': '/var/cache/pacman', 'ro': False }, DOC_DIR: { 'bind': '/makepkg', 'ro': True }, repo: { 'bind': '/staging', 'ro': False }, '/root/.gnupg': { 'bind': '/root/.gnupg', 'ro': False }, '/srv/antergos.info/repo/iso/testing/uefi/antergos/': { 'bind': '/repo', 'ro': False } }) doc.wait(repo_container) except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) doc.remove_container(repo_container) try: shutil.rmtree(repo) shutil.rmtree(cache) shutil.rmtree('/opt/antergos-packages') except Exception: pass db.set('idle', "True") db.set('building', 'Idle') db.set('container', '') db.set('building_num', '') db.set('building_start', '') logger.info('All builds completed. Repo has been updated.')
def handle_hook(): iso_flag = db.get('isoFlag') if iso_flag == 'True': archs = ['x86_64', 'i686'] for arch in archs: db.rpush('queue', 'antergos-iso-%s' % arch) version = datetime.datetime.now().strftime('%Y.%m.%d') if not db.exists('pkg:antergos-iso-%s' % arch): db.set('pkg:antergos-iso-%s' % arch, True) db.set('pkg:antergos-iso-%s:name' % arch, 'antergos-iso-%s' % arch) db.set('pkg:antergos-iso-%s:version' % arch, version) build_iso() else: logger.info('Pulling changes from github.') subprocess.call([ 'git', 'clone', 'http://github.com/lots0logs/antergos-packages.git' ], cwd='/opt') subprocess.call(['chmod', '-R', '777', 'antergos-packages'], cwd='/opt') # Check database to see if packages exist and add them if necessary. packages = db.lrange('queue', 0, -1) logger.info('Checking database for packages.') for package in packages: version = get_pkgver(package) depends = get_deps(package) if not db.exists('pkg:%s' % package): logger.info('%s not found in database, adding entry..' % package) db.set('pkg:%s' % package, True) db.set('pkg:%s:name' % package, package) for dep in depends: db.rpush('pkg:%s:deps' % package, dep) logger.info('Updating pkgver in databse for %s to %s' % (package, version)) db.set('pkg:%s:version' % package, version) logger.info( 'All queued packages are in the database, checking deps to determine build order.' ) check = check_deps(packages) if len(check) > 0: for c in check: logger.info( '%s depends on a pkg in this build. Moving it to the end of the queue.' % c) db.lrem('queue', 0, c) db.rpush('queue', c) logger.info('Check deps complete. Starting build_pkgs') build_pkgs()