def build_pkg_handler(): """ :return: """ status.idle = False packages = status.queue if len(packages) > 0: pack = status.queue.lpop() if pack and pack is not None and pack != '': pkgobj = package.get_pkg_object(name=pack) else: return False rqjob = get_current_job(db) rqjob.meta['package'] = pkgobj.name rqjob.save() status.now_building = pkgobj.name if pkgobj.is_iso is True or pkgobj.is_iso == 'True': status.iso_building = True build_result = build_iso(pkgobj) else: build_result = build_pkgs(pkgobj) # TODO: Move this into its own method if build_result is not None: completed = status.completed failed = status.failed blds = pkgobj.builds total = len(blds) if total > 0: success = len([x for x in blds if x in completed]) failure = len([x for x in blds if x in failed]) if success > 0: success = 100 * success / total else: success = 0 if failure > 0: failure = 100 * failure / total else: failure = 0 pkgobj.success_rate = success pkgobj.failure_rate = failure if build_result is True: run_docker_clean(pkgobj.pkgname) if not status.queue and not status.hook_queue: remove('/opt/antergos-packages') status.idle = True status.building = 'Idle' status.now_building = 'Idle' status.container = '' status.building_num = '' status.building_start = '' status.iso_building = False logger.info('All builds completed.')
def process_package_queue(the_queue=None): if the_queue is not None: all_deps = [] for pkg in the_queue: if pkg == '': continue pkg_obj = package.get_pkg_object(name=pkg) version = pkg_obj.get_version() if not version: continue logger.info('Updating pkgver in database for %s to %s' % (pkg_obj.name, version)) status.current_status = 'Updating pkgver in databse for %s to %s' % (pkg_obj.name, version) depends = pkg_obj.get_deps() paths = [os.path.join('/opt/antergos-packages/', pkg), os.path.join('/opt/antergos-packages/deepin_desktop', pkg), os.path.join('/opt/antergos-packages/cinnamon', pkg)] for p in paths: if os.path.exists(p): pkg_obj.build_path = p break if pkg == 'cnchi-dev': shutil.copy2('/var/tmp/antergos-packages/cnchi-dev/cnchi-dev.tar', '/opt/antergos-packages/cnchi-dev') if depends and len(the_queue) > 1: all_deps.append(depends) logger.info('@@-build_pkg.py-@@ 189 | all_deps before topsort: %s' % all_deps) return all_deps
def get_repo_info(repo=None, logged_in=False): """ :param repo: :param logged_in: :return: """ if repo is None: abort(500) pkg_list = {} p, a, rev_pending = get_build_info(1, repo, logged_in) logger.info('@@-antbs.py-@@ 295 | GET_REPO_INFO - CACHE CHECK FAILED. WE ARE NOT USING CACHED INFO') all_packages = glob.glob('/srv/antergos.info/repo/%s/x86_64/***.pkg.tar.xz' % repo) if all_packages is not None: for item in all_packages: # logger.info(item) item = item.split('/')[-1] item = re.search('^([a-z]|[0-9]|-|_)+(?=-\d|r|v)', item) item = item.group(0) or '' if not item or item == '': continue logger.info(item) pkg = package.get_pkg_object(item) builds = pkg.builds try: bnum = builds[0] except Exception: bnum = '' bld_obj = build_obj.get_build_object(bnum=bnum) all_info = dict(bnum=bnum, name=pkg.name, version=pkg.version_str, review_dev=bld_obj.review_dev, review_stat=bld_obj.review_stat, review_date=bld_obj.review_date, pkgid=pkg.pkgid) pkg_list[pkg.pkgid] = all_info return pkg_list, rev_pending
def process_package_queue(): """ :param the_queue: :return: :raise ValueError: """ hook_queue = status.hook_queue logger.info(hook_queue) if hook_queue is None: raise ValueError('the_queue cannot be None') all_deps = [] if not db.exists('BUILD_REPO_UPDATED'): if db.setnx('BUILD_REPO_LOCK', True): db.expire('BUILD_REPO_LOCK', 300) try: subprocess.check_call( ['git', 'clone', 'http://github.com/antergos/antergos-packages.git'], cwd='/opt') subprocess.check_call(['chmod', '-R', 'a+rw', REPO_DIR], cwd='/opt') except subprocess.CalledProcessError: try: subprocess.check_call(['git', 'reset', '--soft', 'origin/master'], cwd='/opt/antergos-packages') subprocess.check_call(['git', 'pull'], cwd='/opt/antergos-packages') db.setex('BUILD_REPO_UPDATED', 350, True) except subprocess.CalledProcessError as err: logger.error(err) db.delete('BUILD_REPO_LOCK') else: while not db.exists('BUILD_REPO_UPDATED') and db.exists('BUILD_REPO_LOCK'): time.sleep(2) for pkg in hook_queue: logger.info(pkg) if pkg == '': continue pkg_obj = package.get_pkg_object(name=pkg) version = pkg_obj.get_version() if not version: status.hook_queue.remove(pkg_obj.name) if 'cnchi-dev' != pkg: logger.error('pkgbuild path is not valid for %s', pkg_obj.name) else: continue logger.info('Updating pkgver in database for %s to %s' % (pkg_obj.name, version)) status.current_status = 'Updating pkgver in database for %s to %s' % (pkg_obj.name, version) depends = pkg_obj.get_deps() if not pkg_obj.build_path or pkg_obj.build_path == '': paths = [os.path.join('/opt/antergos-packages/', pkg), os.path.join('/opt/antergos-packages/deepin_desktop', pkg), os.path.join('/opt/antergos-packages/cinnamon', pkg)] for p in paths: if os.path.exists(p): pkg_obj.build_path = p break if 'cnchi' in pkg: logger.info('cnchi package detected.') src = os.path.join('/var/tmp/antergos-packages/', pkg, 'cnchi') dest = os.path.join('/opt/antergos-packages/', pkg) remove(os.path.join(dest, 'cnchi')) shutil.move(src, dest) status.current_status = 'Fetching latest translations for %s from Transifex.' % pkg logger.info(status.current_status) cnchi_dir = '/opt/antergos-packages/%s' % pkg fetch_and_compile_translations(translations_for=["cnchi"], pkg_obj=pkg_obj) remove(os.path.join(cnchi_dir, 'cnchi/.git')) subprocess.check_output(['tar', '-cf', 'cnchi.tar', 'cnchi'], cwd='/opt/antergos-packages/%s' % pkg) elif 'numix-icon-theme-square' in pkg: src = os.path.join('/var/tmp/antergos-packages/', pkg, pkg) dest = os.path.join('/opt/antergos-packages/', pkg) shutil.move(src, dest) subprocess.check_output(['tar', '-cf', pkg + '.tar', pkg], cwd='/opt/antergos-packages/%s' % pkg) if depends and len(hook_queue) > 1: all_deps.append(depends) elif len(hook_queue) == 1: all_deps.append(1) return all_deps
def build_iso(): iso_arch = ['x86_64', 'i686'] in_dir_last = len([name for name in os.listdir('/srv/antergos.info/repo/iso/testing')]) if in_dir_last is None: in_dir_last = "0" db.set('pkg_count_iso', in_dir_last) is_minimal = db.get('isoMinimal') if is_minimal == 'True': iso_name = 'antergos-iso-minimal-' else: iso_name = 'antergos-iso-' for arch in iso_arch: if db.exists('iso:one:arch') and arch == 'x86_64': continue pkgobj = package.get_pkg_object(iso_name + arch) failed = False db.incr('build_number') dt = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") build_id = db.get('build_number') pkgobj.save_to_db('builds', build_id, 'list') this_log = 'build_log:%s' % build_id db.set('%s:start' % this_log, dt) db.set('building_num', build_id) db.hset('now_building', 'build_id', build_id) db.hset('now_building', 'key', this_log) db.hset('now_building', 'pkg', pkgobj.name) db.set(this_log, True) db.set('building_start', dt) logger.info('Building %s' % pkgobj.name) db.set('building', 'Building: %s' % pkgobj.name) db.lrem('queue', 0, pkgobj.name) db.set('%s:pkg' % this_log, pkgobj.name) db.set('%s:version' % this_log, pkgobj.version) flag = '/srv/antergos.info/repo/iso/testing/.ISO32' minimal = '/srv/antergos.info/repo/iso/testing/.MINIMAL' if arch is 'i686': if not os.path.exists(flag): open(flag, 'a').close() else: if os.path.exists(flag): os.remove(flag) if is_minimal == "True": out_dir = '/out' if not os.path.exists(minimal): open(minimal, 'a').close() else: out_dir = '/out' if os.path.exists(minimal): os.remove(minimal) # Get and compile translations for updater script # TODO: Move this into its own method. trans_dir = "/opt/antergos-iso-translations/" trans_files_dir = os.path.join(trans_dir, "translations/antergos.cnchi_updaterpot") dest_dir = '/srv/antergos.info/repo/iso/testing/trans' if not os.path.exists(dest_dir): os.mkdir(dest_dir) try: subprocess.check_call(['tx', 'pull', '-a', '-r', 'antergos.cnchi_updaterpot', '--minimum-perc=50'], cwd=trans_dir) for r, d, f in os.walk(trans_files_dir): for tfile in f: logger.info('tfile is %s' % tfile) logger.info('tfile cut is %s' % tfile[:-2]) mofile = tfile[:-2] + 'mo' logger.info('mofile is %s' % mofile) subprocess.check_call(['msgfmt', '-v', tfile, '-o', mofile], cwd=trans_files_dir) os.rename(os.path.join(trans_files_dir, mofile), os.path.join(dest_dir, mofile)) except subprocess.CalledProcessError as err: logger.error(err.output) except Exception as err: logger.error(err) nm = iso_name + arch # Initiate communication with docker daemon run_docker_clean(nm) hconfig = create_host_config(privileged=True, cap_add=['ALL'], binds={ '/opt/archlinux-mkarchiso': { 'bind': '/start', 'ro': False }, '/run/dbus': { 'bind': '/var/run/dbus', 'ro': False }, '/srv/antergos.info/repo/iso/testing': { 'bind': out_dir, 'ro': False }}, restart_policy={ "MaximumRetryCount": 2, "Name": "on-failure"}) try: iso_container = doc.create_container("antergos/mkarchiso", command='/start/run.sh', tty=True, name=nm, host_config=hconfig, cpuset='0-3') db.set('container', iso_container.get('Id')) except Exception as err: logger.error("Cant connect to Docker daemon. Error msg: %s", err) failed = True break try: doc.start(iso_container, privileged=True, cap_add=['ALL'], binds={ '/opt/archlinux-mkarchiso': { 'bind': '/start', 'ro': False }, '/run/dbus': { 'bind': '/var/run/dbus', 'ro': False }, '/srv/antergos.info/repo/iso/testing': { 'bind': out_dir, 'ro': False }, }) cont = db.get('container') stream_process = Process(target=publish_build_ouput, args=(cont, this_log)) stream_process.start() result = doc.wait(cont) result2 = None if result is not 0: doc.restart(cont) stream_process2 = Process(target=publish_build_ouput, args=(cont, this_log)) stream_process2.start() result2 = doc.wait(cont) if result2 is not 0: # failed = True # db.set('build_failed', "True") logger.error('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (nm, result)) if result is 0 or (result2 and result2 is 0): logger.info('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (nm, result)) db.set('build_failed', "False") except Exception as err: logger.error("Cant start container. Error msg: %s", err) break db.publish('build-output', 'ENDOFLOG') db.set('%s:end' % this_log, datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p")) in_dir = len([name for name in os.listdir('/srv/antergos.info/repo/iso/testing')]) last_count = int(db.get('pkg_count_iso')) if in_dir > last_count: db.incr('pkg_count_iso', (in_dir - last_count)) db.rpush('completed', build_id) db.set('%s:result' % this_log, 'completed') # db.set('%s:review_stat' % this_log, '1') else: logger.error('%s not found after container exit.' % iso_name + arch) failed = True db.set('%s:result' % this_log, 'failed') db.rpush('failed', build_id) remove('/opt/archlinux-mkarchiso/antergos-iso') doc.remove_container(cont, v=True)
def handle_hook(first=False, last=False): status.idle = False pull_from = 'antergos' packages = status.queue() if os.path.exists(REPO_DIR): remove(REPO_DIR) try: subprocess.check_call( ['git', 'clone', 'http://github.com/antergos/antergos-packages.git'], cwd='/opt') subprocess.check_call(['chmod', '-R', 'a+rw', REPO_DIR], cwd='/opt') except subprocess.CalledProcessError as err: logger.error(err) if status.iso_flag: status.iso_flag = False status.current_status = 'Building docker image.' status.iso_building = True image = docker_utils.maybe_build_mkarchiso() db.lrem('queue', 0, 'antergos-iso') db.lrem('queue', 0, 'antergos-iso.openbox') if image: archs = ['x86_64', 'i686'] if db.get('isoMinimal') == 'True': iso_name = 'antergos-iso-minimal-' else: iso_name = 'antergos-iso-' for arch in archs: db.rpush('queue', iso_name + arch) version = datetime.datetime.now().strftime('%Y.%m.%d') pkgobj = package.get_pkg_object(iso_name + arch) pkgobj.save_to_db('version', version) build_iso() db.set('isoBuilding', 'False') db.set('isoMinimal', 'False') db.set('idle', "True") return True elif first and not status.iso_flag: status.current_status = 'Building docker image.' image = docker_utils.maybe_build_base_devel() if not image: return False logger.info('Checking database for packages.') status.current_status = 'Checking database for queued packages' all_deps = process_package_queue(packages) logger.info('All queued packages are in the database, checking deps to determine build order.') status.current_status = 'Determining build order by sorting package depends' if len(all_deps) > 1: topsort = check_deps(all_deps) check = [] packages.delete() for p in topsort: # TODO: What if there is already a group of packages in queue prior to the current group? packages.append(p) logger.info('Check deps complete. Starting build_pkgs') logger.debug((packages, status.iso_flag)) status.current_status = 'Check deps complete. Starting build container.' if not status.iso_flag and len(packages) > 0: pack = status.queue().lpop() if pack and pack is not None and pack != '': pkgobj = package.get_pkg_object(name=pack) else: return False rqjob = get_current_job(db) rqjob.meta['is_first'] = first rqjob.meta['is_last'] = last rqjob.meta['package'] = pkgobj.name rqjob.save() status.now_building = pkgobj.name built = build_pkgs(last, pkgobj) # TODO: Move this into its own method if built: completed = status.completed() failed = status.failed() blds = pkgobj.builds() total = len(blds) if total > 0: success = len([x for x in pkgobj.blds if x in completed]) failure = len([x for x in pkgobj.blds if x in failed]) if success > 0: success = 100 * success / total else: success = 0 if failure > 0: failure = 100 * failure / total else: failure = 0 pkgobj.success_rate = success pkgobj.failure_rate = failure if last: remove('/opt/antergos-packages') status.idle = True status.building = 'Idle' status.now_building = 'Idle' status.container = '' status.building_num = '' status.building_start = '' logger.info('All builds completed.')