def check_for_new_items(): """ """ db.set("FEED_CHECKED", "True") db.expire("FEED_CHECKED", 900) build_pkgs = [] for service, project_list in MONITOR_ITEMS.iteritems(): logger.debug((service, project_list)) projects = project_list.split(",") logger.debug(projects) for project in projects: if not project or project == "": continue res = None if "github" == service: project = project.split("/") logger.debug(project) res = check_github_repo(project=project[0], repo=project[1]) elif "gitlab" == service: logger.debug(project) res = check_gitlab_repo(project_id=project) if res: build_pkgs = build_pkgs + res if len(build_pkgs) > 0: add_to_build_queue(build_pkgs)
def check_github_repo(project=None, repo=None): """ :param project: :param repo: :return: """ new_items = [] gh = login(token=GITHUB_TOKEN) key = "antbs:monitor:github:%s:%s" % (project, repo) last_id = db.get(key) or "" gh_repo = gh.repository(project, repo) commits = gh_repo.commits() latest = None try: commit = commits.next() latest = commit.sha except StopIteration: pass if latest != last_id: if "pamac" == repo: repo = "pamac-dev" db.set(key, latest) new_items.append([repo]) return new_items
def publish_build_ouput(container=None, bld_obj=None, upd_repo=False, is_iso=False): """ :param container: :param bld_obj: :param upd_repo: :param is_iso: :return: """ if not container or not bld_obj: logger.error('Unable to publish build output. (Container is None)') return # proc = subprocess.Popen(['docker', 'logs', '--follow', container], stdout=subprocess.PIPE) # output = iter(proc.stdout.readline, '') output = doc.logs(container=container, stream=True) nodup = set() content = [] for line in output: # time.sleep(.10) if not line or line == '' or 'makepkg]# PS1="' in line: continue line = line.rstrip() end = line[25:] if end not in nodup or (end in nodup and 'UTF-8' in end): nodup.add(end) # line = re.sub(r'(?<=[\w\d])(( \')|(\' )(?=[\w\d]+))|(\'\n)', ' ', line) line = line.replace("'", '') line = line.replace('"', '') line = '[%s]: %s' % (datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p"), line) if len(line) > 150: line = truncate_middle(line, 150) content.append(line) db.publish('build-output', line) db.set('build_log_last_line', line) result_ready = bld_obj.completed != bld_obj.failed if not result_ready: while not result_ready: result_ready = bld_obj.completed != bld_obj.failed time.sleep(3) failed = bld_obj.failed if upd_repo or failed: db.publish('build-output', 'ENDOFLOG') log = bld_obj.log existing = True if len(log) < 1 and not failed and not is_iso: existing = False for line in content: log.rpush(line) if existing: log_content = '\n '.join(log) pretty = highlight(log_content, BashLexer(), HtmlFormatter(style='monokai', linenos='inline', prestyles="background:#272822;color:#fff;", encoding='utf-8')) bld_obj.log_str = pretty
def process_cnchi(self): a_b_test = db.get("CNCHI_A_B_TEST") if a_b_test == "A": db.set("CNCHI_A_B_TEST", "B") elif a_b_test == "B": db.set("CNCHI_A_B_TEST", "A") self.result = json.dumps({"msg": a_b_test})
def publish_build_ouput(container=None, bld_obj=None, upd_repo=False): if not container or not bld_obj: logger.error('Unable to publish build output. (Container is None)') return # proc = subprocess.Popen(['docker', 'logs', '--follow', container], stdout=subprocess.PIPE) # output = iter(proc.stdout.readline, '') output = doc.logs(container, stream=True) nodup = set() content = [] for line in output: time.sleep(.10) if not line or line == '' or "Antergos Automated Build Server" in line or "--passphrase" in line \ or 'makepkg]# PS1="' in line: continue line = line.rstrip() # if db.get('isoBuilding') == "True": # line = line[15:] end = line[25:] if end not in nodup: nodup.add(end) line = re.sub('(?<=[\w\d]) \'(?=[\w\d]+)', ' ', line) # if line[-1:] == "'" or line[-1:] == '"': # line = line[:-1] line = re.sub('(?<=[\w\d])\' (?=[\w\d]+)', ' ', line) # bad_date = re.search(r"\d{4}-\d{2}-[\d\w:\.]+Z{1}", line) # if bad_date: # line = line.replace(bad_date.group(0), datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p")) line = '[%s]: %s' % (datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p"), line) if len(line) > 150: line = truncate_middle(line, 120) content.append(line) db.publish('build-output', line) db.set('build_log_last_line', line) if upd_repo: db.publish('build-output', 'ENDOFLOG') # content = '\n '.join(content) log = bld_obj.log() existing = True if not log or len(log) < 1: existing = False log.rpush(content) if existing: log_content = '\n '.join(log) pretty = highlight(log_content, BashLexer(), HtmlFormatter(style='monokai', linenos='inline', prestyles="background:#272822;color:#fff;", encoding='utf-8')) bld_obj.log_str = pretty
def handle_worker_exception(job, exc_type, exc_value, traceback): # TODO: This needs some thought on how to recover instead of bailing on entire build queue """ :param job: :param exc_type: :param exc_value: :param traceback: :return: """ doc = docker.Client(base_url='unix://var/run/docker.sock', timeout=10) if job['origin'] == 'build_queue': container = db.get('container') elif job['origin'] == 'repo_queue': container = db.get('repo_container') else: container = '' queue = status.queue now_building = status.now_building try: doc.kill(container) doc.remove_container(container) except Exception: logger.error('Unable to kill container') if job['origin'] == 'build_queue': db.set('%s:result' % now_building['key'], 'failed') db.rpush('failed', now_building['build_id']) if not queue or len(queue) == 0 or queue == []: repo = os.path.join("/tmp", "staging") cache = os.path.join("/tmp", "pkg_cache") remove(repo) remove(cache) remove('/opt/antergos-packages') # db.set('idle', "True") status.idle = True # db.set('building', 'Idle') status.current_status = 'Idle' # db.hset('now_building', 'pkg', '') # db.set('container', '') # db.set('building_num', '') # db.set('building_start', '') logger.error('Caught Build Exception: %s', traceback) return True
def check_gitlab_repo(project_id=None): """ :param project_id: :return: """ new_items = [] gl = Gitlab("https://gitlab.com", GITLAB_TOKEN) gl.auth() nxsq = gl.Project(id=project_id) key = "antbs:monitor:gitlab:%s" % project_id last_updated = db.get(key) events = nxsq.Event() for event in events: if event.action_name == "pushed to": if event.created_at != last_updated: db.set(key, event.created_at) new_items.append(["numix-icon-theme-square"]) new_items.append(["numix-icon-theme-square-kde"]) break return new_items
def check_for_new_items(): db.set('FEED_CHECKED', 'True') db.expire('FEED_CHECKED', 900) new_items = [] gh = login(token=GITHUB_TOKEN) last_id = db.get('ANTBS_GITHUB_LAST_EVENT') or '' repo = gh.repository('numixproject', "numix-icon-theme") commits = repo.commits() latest = None try: commit = commits.next() latest = commit.sha except StopIteration: pass if latest != last_id: db.set('ANTBS_GITHUB_LAST_EVENT', latest) new_items.append(['numix-icon-theme']) gl = Gitlab('https://gitlab.com', GITLAB_TOKEN) gl.auth() nxsq = gl.Project(id='61284') last_updated = db.get('ANTBS_GITLAB_LAST_UPDATED') events = nxsq.Event() for event in events: if event.action_name == 'pushed to': if event.created_at != last_updated: db.set('ANTBS_GITLAB_LAST_UPDATED', event.created_at) new_items.append(['numix-icon-theme-square']) new_items.append(['numix-icon-theme-square-kde']) break if len(new_items) > 0: add_to_build_queue(new_items)
def build_iso(pkg_obj=None): """ :param pkg_obj: :return: """ status.iso_building = True in_dir_last = len([name for name in os.listdir('/srv/antergos.info/repo/iso/testing')]) if in_dir_last is None: in_dir_last = "0" db.set('pkg_count_iso', in_dir_last) bld_obj = process_and_save_build_metadata(pkg_obj=pkg_obj) build_id = bld_obj.bnum fetch_and_compile_translations(translations_for=["cnchi_updater", "antergos-gfxboot"]) flag = '/srv/antergos.info/repo/iso/testing/.ISO32' minimal = '/srv/antergos.info/repo/iso/testing/.MINIMAL' if 'i686' in pkg_obj.name: if not os.path.exists(flag): open(flag, 'a').close() else: if os.path.exists(flag): os.remove(flag) if 'minimal' in pkg_obj.name: out_dir = '/out' if not os.path.exists(minimal): open(minimal, 'a').close() else: out_dir = '/out' if os.path.exists(minimal): os.remove(minimal) # Create docker host config dict hconfig = create_host_config(privileged=True, cap_add=['ALL'], binds={ '/opt/archlinux-mkarchiso': { 'bind': '/start', 'ro': False }, '/run/dbus': { 'bind': '/var/run/dbus', 'ro': False }, '/srv/antergos.info/repo/iso/testing': { 'bind': out_dir, 'ro': False }}, restart_policy={ "MaximumRetryCount": 2, "Name": "on-failure"}) iso_container = {} try: iso_container = doc.create_container("antergos/mkarchiso", command='/start/run.sh', name=pkg_obj.name, host_config=hconfig, cpuset='0-3') if iso_container.get('Warnings') and iso_container.get('Warnings') != '': logger.error(iso_container.get('Warnings')) except Exception as err: logger.error('Create container failed. Error Msg: %s' % err) bld_obj.failed = True return False bld_obj.container = iso_container.get('Id') status.container = bld_obj.container try: doc.start(bld_obj.container) cont = bld_obj.container stream_process = Process(target=publish_build_ouput, kwargs=dict(container=cont, bld_obj=bld_obj, is_iso=True)) stream_process.start() result = doc.wait(cont) if result != 0: bld_obj.failed = True logger.error('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (pkg_obj.name, result)) return False else: bld_obj.completed = True logger.info('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (pkg_obj.name, result)) except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) bld_obj.failed = True return False stream_process.join() in_dir = len([name for name in os.listdir('/srv/antergos.info/repo/iso/testing')]) last_count = int(db.get('pkg_count_iso')) if in_dir > last_count: bld_obj.completed = True tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> was successful.' % ( build_id, build_id, pkg_obj.name) Timeline(msg=tlmsg, tl_type=4) completed = status.completed completed.rpush(bld_obj.bnum) else: bld_obj.failed = True bld_obj.completed = False tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> failed.' % (build_id, build_id, pkg_obj.name) Timeline(msg=tlmsg, tl_type=5) failed = status.failed failed.rpush(build_id) bld_obj.end_str = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") if not bld_obj.failed: remove('/opt/archlinux-mkarchiso/antergos-iso') run_docker_clean(pkg_obj.name) db.set('antbs:misc:cache_buster:flag', True) return True return False
def build_pkgs(pkg_info=None): """ :param last: :param pkg_info: :return: """ if pkg_info is None: return False # Create our tmp directories result = '/tmp/result' cache = '/var/tmp/pkg_cache' for d in [result, cache, '/var/tmp/32build', '/var/tmp/32bit']: if os.path.exists(d) and 'pkg_cache' not in d: shutil.rmtree(d) os.makedirs(d, 0o777) elif os.path.exists(d) and 'pkg_cache' in d: logger.info('@@-build_pkg.py-@@ 476 | Cleaning package cache....') status.current_status = 'Cleaning package cache.' for pcache in os.listdir(d): pcache = os.path.join(d, pcache) if not os.path.isdir(pcache): logger.error('@@-build_pkg.py-@@ 479 | pcache is not a directory') continue for pfile in os.listdir(pcache): pname = re.search('^([a-z]|[0-9]|-|_)+(?=-\d|r|v)', pfile) if not pname or pname == '': continue pname = pname.group(0) pfile = os.path.join(pcache, pfile) dtime = time.time() if os.stat(pfile).st_mtime < (dtime - (7 * 86400)) or status.all_packages.ismember(pname): remove(pfile) else: os.makedirs(d, 0o777) pkglist1 = ['1'] in_dir_last = len([name for name in os.listdir(result)]) db.set('pkg_count', in_dir_last) for i in range(len(pkglist1)): pkg = pkg_info.name if pkg and pkg is not None and pkg != '': pkgbuild_dir = pkg_info.build_path pkg_deps = pkg_info.depends or [] pkg_deps_str = ' '.join(pkg_deps) if pkg_deps else '' bld_obj = process_and_save_build_metadata(pkg_obj=pkg_info) build_id = bld_obj.bnum if pkg_info is not None and pkg_info.autosum == "True": build_env = ['_AUTOSUMS=True'] else: build_env = ['_AUTOSUMS=False'] if '/cinnamon/' in pkg_info.path: build_env.append('_ALEXPKG=True') else: build_env.append('_ALEXPKG=False') hconfig = docker_utils.create_pkgs_host_config(cache, pkgbuild_dir, result) try: container = doc.create_container("antergos/makepkg", command="/makepkg/build.sh " + pkg_deps_str, volumes=['/var/cache/pacman', '/makepkg', '/antergos', '/pkg', '/root/.gnupg', '/staging', '/32bit', '/32build', '/result'], environment=build_env, cpuset='0-3', name=pkg, host_config=hconfig) if container.get('Warnings') and container.get('Warnings') != '': logger.error(container.get('Warnings')) except Exception as err: logger.error('Create container failed. Error Msg: %s' % err) bld_obj.failed = True continue bld_obj.container = container.get('Id') status.container = bld_obj.container try: doc.start(container.get('Id')) cont = bld_obj.container stream_process = Process(target=publish_build_ouput, kwargs=dict(container=cont, bld_obj=bld_obj)) stream_process.start() result = doc.wait(cont) if result != 0: bld_obj.failed = True logger.error('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (pkg, result)) else: logger.info('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (pkg, result)) bld_obj.completed = True stream_process.join() except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) bld_obj.failed = True bld_obj.completed = False continue repo_updated = False if bld_obj.completed: signed = sign_pkgs.sign_packages(bld_obj.pkgname) if signed: db.publish('build-output', 'Updating staging repo database..') repo_updated = update_main_repo(rev_result='staging', bld_obj=bld_obj, ) if repo_updated: tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> was successful.' % ( build_id, build_id, pkg) Timeline(msg=tlmsg, tl_type=4) completed = status.completed completed.rpush(bld_obj.bnum) bld_obj.review_status = 'pending' else: tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> failed.' % (build_id, build_id, pkg) Timeline(msg=tlmsg, tl_type=5) bld_obj.failed = True bld_obj.completed = False failed = status.failed failed.rpush(build_id) bld_obj.end_str = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") if not bld_obj.failed: db.set('antbs:misc:cache_buster:flag', True) return True return False
def update_main_repo(rev_result=None, bld_obj=None, is_review=False, rev_pkgname=None): """ :param rev_result: :param bld_obj: :param is_review: :param rev_pkgname: :return: """ logger.debug('update_main_repo fired! %s', rev_result) if rev_result: repo = 'antergos' repodir = 'main' if rev_result == 'staging': rev_result = '' repo = 'antergos-staging' repodir = 'staging' result = '/tmp/result' if os.path.exists(result): shutil.rmtree(result) os.mkdir(result, 0o777) if rev_pkgname is not None: pkgname = rev_pkgname else: pkgname = bld_obj.pkgname command = "/makepkg/build.sh" pkgenv = ["_PKGNAME=%s" % pkgname, "_RESULT=%s" % rev_result, "_UPDREPO=True", "_REPO=%s" % repo, "_REPO_DIR=%s" % repodir] building_saved = False if not status.idle: building_saved = status.current_status else: status.idle = False status.current_status = 'Updating repo database.' container = None run_docker_clean("update_repo") hconfig = docker_utils.create_repo_update_host_config() try: container = doc.create_container("antergos/makepkg", command=command, name="update_repo", environment=pkgenv, volumes=['/makepkg', '/root/.gnupg', '/main', '/result', '/staging'], host_config=hconfig) db.set('update_repo_container', container.get('Id')) doc.start(container.get('Id')) if not is_review: stream_process = Process(target=publish_build_ouput, kwargs=dict(container=container.get('Id'), bld_obj=bld_obj, upd_repo=True)) stream_process.start() result = doc.wait(container.get('Id')) if not is_review: stream_process.join() if result != 0: logger.error('update repo failed. exit status is: %s', result) else: doc.remove_container(container, v=True) db.set('antbs:misc:cache_buster:flag', True) except Exception as err: result = 1 logger.error('Start container failed. Error Msg: %s' % err) if not status.idle: if building_saved: status.current_status = building_saved else: status.idle = True status.current_status = 'Idle' if result != 0: return False else: return True
def build_iso(): iso_arch = ['x86_64', 'i686'] in_dir_last = len([name for name in os.listdir('/srv/antergos.info/repo/iso/testing')]) if in_dir_last is None: in_dir_last = "0" db.set('pkg_count_iso', in_dir_last) is_minimal = db.get('isoMinimal') if is_minimal == 'True': iso_name = 'antergos-iso-minimal-' else: iso_name = 'antergos-iso-' for arch in iso_arch: if db.exists('iso:one:arch') and arch == 'x86_64': continue pkgobj = package.get_pkg_object(iso_name + arch) failed = False db.incr('build_number') dt = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") build_id = db.get('build_number') pkgobj.save_to_db('builds', build_id, 'list') this_log = 'build_log:%s' % build_id db.set('%s:start' % this_log, dt) db.set('building_num', build_id) db.hset('now_building', 'build_id', build_id) db.hset('now_building', 'key', this_log) db.hset('now_building', 'pkg', pkgobj.name) db.set(this_log, True) db.set('building_start', dt) logger.info('Building %s' % pkgobj.name) db.set('building', 'Building: %s' % pkgobj.name) db.lrem('queue', 0, pkgobj.name) db.set('%s:pkg' % this_log, pkgobj.name) db.set('%s:version' % this_log, pkgobj.version) flag = '/srv/antergos.info/repo/iso/testing/.ISO32' minimal = '/srv/antergos.info/repo/iso/testing/.MINIMAL' if arch is 'i686': if not os.path.exists(flag): open(flag, 'a').close() else: if os.path.exists(flag): os.remove(flag) if is_minimal == "True": out_dir = '/out' if not os.path.exists(minimal): open(minimal, 'a').close() else: out_dir = '/out' if os.path.exists(minimal): os.remove(minimal) # Get and compile translations for updater script # TODO: Move this into its own method. trans_dir = "/opt/antergos-iso-translations/" trans_files_dir = os.path.join(trans_dir, "translations/antergos.cnchi_updaterpot") dest_dir = '/srv/antergos.info/repo/iso/testing/trans' if not os.path.exists(dest_dir): os.mkdir(dest_dir) try: subprocess.check_call(['tx', 'pull', '-a', '-r', 'antergos.cnchi_updaterpot', '--minimum-perc=50'], cwd=trans_dir) for r, d, f in os.walk(trans_files_dir): for tfile in f: logger.info('tfile is %s' % tfile) logger.info('tfile cut is %s' % tfile[:-2]) mofile = tfile[:-2] + 'mo' logger.info('mofile is %s' % mofile) subprocess.check_call(['msgfmt', '-v', tfile, '-o', mofile], cwd=trans_files_dir) os.rename(os.path.join(trans_files_dir, mofile), os.path.join(dest_dir, mofile)) except subprocess.CalledProcessError as err: logger.error(err.output) except Exception as err: logger.error(err) nm = iso_name + arch # Initiate communication with docker daemon run_docker_clean(nm) hconfig = create_host_config(privileged=True, cap_add=['ALL'], binds={ '/opt/archlinux-mkarchiso': { 'bind': '/start', 'ro': False }, '/run/dbus': { 'bind': '/var/run/dbus', 'ro': False }, '/srv/antergos.info/repo/iso/testing': { 'bind': out_dir, 'ro': False }}, restart_policy={ "MaximumRetryCount": 2, "Name": "on-failure"}) try: iso_container = doc.create_container("antergos/mkarchiso", command='/start/run.sh', tty=True, name=nm, host_config=hconfig, cpuset='0-3') db.set('container', iso_container.get('Id')) except Exception as err: logger.error("Cant connect to Docker daemon. Error msg: %s", err) failed = True break try: doc.start(iso_container, privileged=True, cap_add=['ALL'], binds={ '/opt/archlinux-mkarchiso': { 'bind': '/start', 'ro': False }, '/run/dbus': { 'bind': '/var/run/dbus', 'ro': False }, '/srv/antergos.info/repo/iso/testing': { 'bind': out_dir, 'ro': False }, }) cont = db.get('container') stream_process = Process(target=publish_build_ouput, args=(cont, this_log)) stream_process.start() result = doc.wait(cont) result2 = None if result is not 0: doc.restart(cont) stream_process2 = Process(target=publish_build_ouput, args=(cont, this_log)) stream_process2.start() result2 = doc.wait(cont) if result2 is not 0: # failed = True # db.set('build_failed', "True") logger.error('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (nm, result)) if result is 0 or (result2 and result2 is 0): logger.info('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (nm, result)) db.set('build_failed', "False") except Exception as err: logger.error("Cant start container. Error msg: %s", err) break db.publish('build-output', 'ENDOFLOG') db.set('%s:end' % this_log, datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p")) in_dir = len([name for name in os.listdir('/srv/antergos.info/repo/iso/testing')]) last_count = int(db.get('pkg_count_iso')) if in_dir > last_count: db.incr('pkg_count_iso', (in_dir - last_count)) db.rpush('completed', build_id) db.set('%s:result' % this_log, 'completed') # db.set('%s:review_stat' % this_log, '1') else: logger.error('%s not found after container exit.' % iso_name + arch) failed = True db.set('%s:result' % this_log, 'failed') db.rpush('failed', build_id) remove('/opt/archlinux-mkarchiso/antergos-iso') doc.remove_container(cont, v=True)
def build_pkgs(last=False, pkg_info=None): if pkg_info is None: return False # Create our tmp directories result = os.path.join("/tmp", "result") cache = os.path.join("/var/tmp", "pkg_cache") for d in [result, cache]: if os.path.exists(d) and 'result' in d: shutil.rmtree(d) os.mkdir(d, 0o777) elif os.path.exists(d) and 'pkg_cache' in d: logger.info('@@-build_pkg.py-@@ 476 | Cleaning package cache....') db.set('building', 'Cleaning package cache.') for pcache in os.listdir(d): pcache = os.path.join(d, pcache) if not os.path.isdir(pcache): logger.error('@@-build_pkg.py-@@ 479 | pcache is not a directory') continue for pfile in os.listdir(pcache): pname = re.search('^([a-z]|[0-9]|-|_)+(?=-\d|r|v)', pfile) if not pname or pname == '': continue pname = pname.group(0) pfile = os.path.join(pcache, pfile) dtime = time.time() if os.stat(pfile).st_mtime < (dtime - 7 * 86400) or status.all_packages().ismember(pname): remove(pfile) else: os.mkdir(d, 0o777) dirs = ['/var/tmp/32build', '/var/tmp/32bit'] for d in dirs: if os.path.exists(d): shutil.rmtree(d) os.mkdir(d, 0o777) # pkglist = db.lrange('queue', 0, -1) pkglist1 = ['1'] in_dir_last = len([name for name in os.listdir(result)]) db.set('pkg_count', in_dir_last) for i in range(len(pkglist1)): pkg = pkg_info.name if pkg and pkg is not None and pkg != '': pkgbuild_dir = pkg_info.build_path status.current_status = 'Building %s with makepkg' % pkg bld_obj = build_obj.get_build_object(pkg_obj=pkg_info) bld_obj.failed = False bld_obj.completed = False bld_obj.version_str = pkg_info.version_str bld_obj.start_str = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") status.building_num = bld_obj.bnum status.building_start = bld_obj.start_str build_id = bld_obj.bnum tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> started.' % (build_id, build_id, pkg) Timeline(msg=tlmsg, tl_type=3) pbuilds = pkg_info.builds() pbuilds.append(build_id) bld_obj.pkgname = pkg pkg_deps = pkg_info.depends() or [] pkg_deps_str = ' '.join(pkg_deps) run_docker_clean(pkg) if pkg_info is not None and pkg_info.autosum == "True": build_env = ['_AUTOSUMS=True'] else: build_env = ['_AUTOSUMS=False'] if '/cinnamon/' in pkg_info.path: build_env.append('_ALEXPKG=True') else: build_env.append('_ALEXPKG=False') hconfig = docker_utils.create_pkgs_host_config(cache, pkgbuild_dir, result) try: container = doc.create_container("antergos/makepkg", command="/makepkg/build.sh " + pkg_deps_str, volumes=['/var/cache/pacman', '/makepkg', '/repo', '/pkg', '/root/.gnupg', '/staging', '/32bit', '/32build', '/result'], environment=build_env, cpuset='0-3', name=pkg, host_config=hconfig) if container.get('Warnings') and container.get('Warnings') != '': logger.error(container.get('Warnings')) except Exception as err: logger.error('Create container failed. Error Msg: %s' % err) bld_obj.failed = True bld_obj.completed = False continue bld_obj.container = container.get('Id') try: doc.start(container.get('Id')) cont = bld_obj.container stream_process = Process(target=publish_build_ouput, args=(cont, bld_obj)) stream_process.start() result = doc.wait(cont) if result is not 0: bld_obj.failed = True bld_obj.completed = False logger.error('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (pkg, result)) else: logger.info('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (pkg, result)) bld_obj.failed = False bld_obj.completed = True except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) bld_obj.failed = True bld_obj.completed = False continue # db.publish('build-ouput', 'ENDOFLOG') # stream = doc.logs(container, stdout=True, stderr=True, timestamps=True) # log_stream = stream.split('\n') # db_filter_and_add(log_stream, this_log) # in_dir = len([name for name in os.listdir(result)]) # last_count = int(db.get('pkg_count')) # logger.info('last count is %s %s' % (last_count, type(last_count))) # logger.info('in_dir is %s %s' % (in_dir, type(in_dir))) pkgs2sign = None if not bld_obj.failed: db.publish('build-output', 'Signing package..') pkgs2sign = glob.glob( '/srv/antergos.info/repo/iso/testing/uefi/antergos-staging/x86_64/%s-***.xz' % pkg) pkgs2sign32 = glob.glob( '/srv/antergos.info/repo/iso/testing/uefi/antergos-staging/i686/%s-***.xz' % pkg) pkgs2sign = pkgs2sign + pkgs2sign32 logger.info('[PKGS TO SIGN] %s' % pkgs2sign) if pkgs2sign is not None and pkgs2sign != []: try_sign = sign_pkgs.batch_sign(pkgs2sign) else: try_sign = False if try_sign: db.publish('build-output', 'Signature created successfully for %s' % pkg) logger.info('[SIGN PKG] Signature created successfully for %s' % pkg) db.publish('build-output', 'Updating staging repo database..') update_main_repo(pkg, 'staging', bld_obj) else: bld_obj.failed = True bld_obj.completed = False if not bld_obj.failed: db.publish('build-output', 'Build completed successfully!') tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> completed.' % ( build_id, build_id, pkg) Timeline(msg=tlmsg, tl_type=4) # db.incr('pkg_count', (in_dir - last_count)) completed = status.completed() completed.rpush(build_id) bld_obj.review_stat = 'pending' else: tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> failed.' % (build_id, build_id, pkg) Timeline(msg=tlmsg, tl_type=5) if pkgs2sign is not None: for p in pkgs2sign: remove(p) remove(p + '.sig') failed = status.failed() failed.rpush(build_id) bld_obj.end_str = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") status.container = '' status.building_num = '' status.building_start = '' if not bld_obj.failed: db.set('antbs:misc:cache_buster:flag', True) return True return False
def update_main_repo(pkg=None, rev_result=None, this_log=None): if pkg and rev_result: repo = 'antergos' repodir = 'main' if rev_result == 'skip': rev_result = None repo = 'antergos-staging' repodir = 'staging' result = '/tmp/result' if os.path.exists(result): shutil.rmtree(result) os.mkdir(result, 0o777) command = "/makepkg/build.sh" pkgenv = ["_PKGNAME=%s" % pkg, "_RESULT=%s" % rev_result, "_UPDREPO=True", "_REPO=%s" % repo, "_REPO_DIR=%s" % repodir] building_saved = False if not status.idle: building_saved = status.current_status else: status.idle = False status.current_status = 'Updating repo database.' container = None run_docker_clean("update_repo") try: container = doc.create_container("antergos/makepkg", command=command, name="update_repo", environment=pkgenv, volumes=['/makepkg', '/root/.gnupg', '/main', '/result', '/staging']) db.set('update_repo_container', container.get('Id')) doc.start(container, binds={ DOC_DIR: { 'bind': '/makepkg', 'ro': True }, '/srv/antergos.info/repo/antergos': { 'bind': '/main', 'ro': False }, '/srv/antergos.info/repo/iso/testing/uefi/antergos-staging/': { 'bind': '/staging', 'ro': False }, '/root/.gnupg': { 'bind': '/root/.gnupg', 'ro': False }, '/tmp/result': { 'bind': '/result', 'ro': False } }, privileged=True) if this_log is None: this_log = 'repo_update_log' upd_repo = False else: upd_repo = True cont = db.get('update_repo_container') stream_process = Process(target=publish_build_ouput, args=(cont, this_log, upd_repo)) stream_process.start() doc.wait(container) db.set('antbs:misc:cache_buster:flag', True) except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) doc.remove_container(container, v=True) if not status.idle: if building_saved: status.current_status = building_saved else: status.idle = True status.current_status = 'Idle'
def handle_hook(first=False, last=False): status.idle = False pull_from = 'antergos' packages = status.queue() if os.path.exists(REPO_DIR): remove(REPO_DIR) try: subprocess.check_call( ['git', 'clone', 'http://github.com/antergos/antergos-packages.git'], cwd='/opt') subprocess.check_call(['chmod', '-R', 'a+rw', REPO_DIR], cwd='/opt') except subprocess.CalledProcessError as err: logger.error(err) if status.iso_flag: status.iso_flag = False status.current_status = 'Building docker image.' status.iso_building = True image = docker_utils.maybe_build_mkarchiso() db.lrem('queue', 0, 'antergos-iso') db.lrem('queue', 0, 'antergos-iso.openbox') if image: archs = ['x86_64', 'i686'] if db.get('isoMinimal') == 'True': iso_name = 'antergos-iso-minimal-' else: iso_name = 'antergos-iso-' for arch in archs: db.rpush('queue', iso_name + arch) version = datetime.datetime.now().strftime('%Y.%m.%d') pkgobj = package.get_pkg_object(iso_name + arch) pkgobj.save_to_db('version', version) build_iso() db.set('isoBuilding', 'False') db.set('isoMinimal', 'False') db.set('idle', "True") return True elif first and not status.iso_flag: status.current_status = 'Building docker image.' image = docker_utils.maybe_build_base_devel() if not image: return False logger.info('Checking database for packages.') status.current_status = 'Checking database for queued packages' all_deps = process_package_queue(packages) logger.info('All queued packages are in the database, checking deps to determine build order.') status.current_status = 'Determining build order by sorting package depends' if len(all_deps) > 1: topsort = check_deps(all_deps) check = [] packages.delete() for p in topsort: # TODO: What if there is already a group of packages in queue prior to the current group? packages.append(p) logger.info('Check deps complete. Starting build_pkgs') logger.debug((packages, status.iso_flag)) status.current_status = 'Check deps complete. Starting build container.' if not status.iso_flag and len(packages) > 0: pack = status.queue().lpop() if pack and pack is not None and pack != '': pkgobj = package.get_pkg_object(name=pack) else: return False rqjob = get_current_job(db) rqjob.meta['is_first'] = first rqjob.meta['is_last'] = last rqjob.meta['package'] = pkgobj.name rqjob.save() status.now_building = pkgobj.name built = build_pkgs(last, pkgobj) # TODO: Move this into its own method if built: completed = status.completed() failed = status.failed() blds = pkgobj.builds() total = len(blds) if total > 0: success = len([x for x in pkgobj.blds if x in completed]) failure = len([x for x in pkgobj.blds if x in failed]) if success > 0: success = 100 * success / total else: success = 0 if failure > 0: failure = 100 * failure / total else: failure = 0 pkgobj.success_rate = success pkgobj.failure_rate = failure if last: remove('/opt/antergos-packages') status.idle = True status.building = 'Idle' status.now_building = 'Idle' status.container = '' status.building_num = '' status.building_start = '' logger.info('All builds completed.')