def get_and_show_pkg_profile(pkgname=None): if pkgname is None: abort(404) check = status.all_packages() check = check.ismember(pkgname) if not check: abort(404) # all_pkgs = db.scan_iter('pkg:*:name', 100) # # for pkg in all_pkgs: # try: # pkgobj = package.Package(db.get(pkg)) # #key = 'pkg:' + db.get(pkg) + ':' # completed = db.lrange('completed', 0, -1) # failed = db.lrange('failed', 0, -1) # success = len([x for x in pkgobj.builds if x in completed]) # failure = len([x for x in pkgobj.builds if x in failed]) # total = len(pkgobj.builds) # success = 100 * success/total # failure = 100 * failure/total # pkgobj.save_to_db('success_rate', success) # pkgobj.save_to_db('failure_rate', failure) # except Exception as err: # logger.error(err) pkgobj = package.Package(name=pkgname) if '' == pkgobj.description: desc = pkgobj.get_from_pkgbuild('pkgdesc') pkgobj.description = desc return render_template('package.html', pkg=pkgobj)
def __init__(self, name): super(Package, self).__init__(self, name=name) self.maybe_update_pkgbuild_repo() try: if (not self.pkgname or self.pkgname == '') and os.path.exists(os.path.join(REPO_DIR, name)): key_lists = ['redis_string', 'redis_string_bool', 'redis_string_int', 'redis_list', 'redis_zset'] for key_list_name in key_lists: key_list = self.all_keys[key_list_name] for key in key_list: if key_list_name.endswith('string') and key != 'name': setattr(self, key, '') elif key_list_name.endswith('bool'): setattr(self, key, False) elif key_list_name.endswith('int'): setattr(self, key, 0) elif key_list_name.endswith('list'): setattr(self, key, RedisList.as_child(self, key, str)) elif key_list_name.endswith('zset'): setattr(self, key, RedisZSet.as_child(self, key, str)) self.pkgname = name next_id = db.incr('antbs:misc:pkgid:next') self.pkg_id = next_id all_pkgs = status.all_packages() all_pkgs.add(self.name) except Exception: logger.error('unable to init package object for %s', name)
def build_pkg_now(): if request.method == 'POST': pkgname = request.form['pkgname'] dev = request.form['dev'] if not pkgname or pkgname is None or pkgname == '': abort(500) pexists = status.all_packages() pexists = pexists.ismember(pkgname) if not pexists: try: package.Package(name=pkgname) if os.path.exists('/var/tmp/antergos-packages/' + pkgname): pexists = True except Exception: pass if pexists: is_logged_in = user.is_authenticated() p, a, rev_pending = get_build_info(1, 'completed', is_logged_in) # logger.info(rev_pending) pending = False for bnum in rev_pending.keys(): bld_obj = build_obj.get_build_object(bnum=bnum) if pkgname == bld_obj.pkgname: pending = True break if pending: flash('Unable to build %s because it is in "pending review" status.' % pkgname, category='error') else: args = (True, True) if 'antergos-iso' in pkgname: if not status.iso_building: status.iso_flag = True args = (True, True) if 'openbox' in pkgname: status.iso_minimal = True else: logger.info('RATE LIMIT ON ANTERGOS ISO IN EFFECT') return redirect(redirect_url()) q = status.queue() q.rpush(pkgname) queue.enqueue_call(builder.handle_hook, args=args, timeout=84600) tl_event( msg='<strong>%s</strong> added <strong>%s</strong> to the build queue.' % (dev, pkgname), tl_type='0') else: flash('Package not found. Has the PKGBUILD been pushed to github?', category='error') return redirect(redirect_url())
def __init__(self, request=None): self.can_process = False self.is_monitor = False self.is_cnchi = False try: self.request = request.method except AttributeError: self.request = False self.is_monitor = True if self.request is None or db is None or queue is None: logger.error("@@-webhook.py-@@ 40 | Cant process new webhook because request or db is None.") elif self.request or self.is_monitor is True: self.can_process = True self.request = request self.is_manual = False self.is_numix = False self.is_github = False self.is_gitlab = False self.changes = [] self.phab_payload = False self.the_queue = status.queue() self.repo = "antergos-packages" self.payload = None self.full_name = None self.pusher = None self.commits = None self.result = None self.building = status.now_building self.result = None self.allpkgs = status.all_packages() self.is_authorized = self.is_from_authorized_sender() if self.is_authorized: # Process Webhook if self.is_manual: self.process_manual() if self.is_cnchi: self.process_cnchi() if self.is_github: self.process_github() if len(self.changes) > 0: self.process_changes() else: if self.result is None: self.result = "Nothing to see here, move along ..."
def build_pkgs(last=False, pkg_info=None): if pkg_info is None: return False # Create our tmp directories result = os.path.join("/tmp", "result") cache = os.path.join("/var/tmp", "pkg_cache") for d in [result, cache]: if os.path.exists(d) and 'result' in d: shutil.rmtree(d) os.mkdir(d, 0o777) elif os.path.exists(d) and 'pkg_cache' in d: logger.info('@@-build_pkg.py-@@ 476 | Cleaning package cache....') db.set('building', 'Cleaning package cache.') for pcache in os.listdir(d): pcache = os.path.join(d, pcache) if not os.path.isdir(pcache): logger.error('@@-build_pkg.py-@@ 479 | pcache is not a directory') continue for pfile in os.listdir(pcache): pname = re.search('^([a-z]|[0-9]|-|_)+(?=-\d|r|v)', pfile) if not pname or pname == '': continue pname = pname.group(0) pfile = os.path.join(pcache, pfile) dtime = time.time() if os.stat(pfile).st_mtime < (dtime - 7 * 86400) or status.all_packages().ismember(pname): remove(pfile) else: os.mkdir(d, 0o777) dirs = ['/var/tmp/32build', '/var/tmp/32bit'] for d in dirs: if os.path.exists(d): shutil.rmtree(d) os.mkdir(d, 0o777) # pkglist = db.lrange('queue', 0, -1) pkglist1 = ['1'] in_dir_last = len([name for name in os.listdir(result)]) db.set('pkg_count', in_dir_last) for i in range(len(pkglist1)): pkg = pkg_info.name if pkg and pkg is not None and pkg != '': pkgbuild_dir = pkg_info.build_path status.current_status = 'Building %s with makepkg' % pkg bld_obj = build_obj.get_build_object(pkg_obj=pkg_info) bld_obj.failed = False bld_obj.completed = False bld_obj.version_str = pkg_info.version_str bld_obj.start_str = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") status.building_num = bld_obj.bnum status.building_start = bld_obj.start_str build_id = bld_obj.bnum tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> started.' % (build_id, build_id, pkg) Timeline(msg=tlmsg, tl_type=3) pbuilds = pkg_info.builds() pbuilds.append(build_id) bld_obj.pkgname = pkg pkg_deps = pkg_info.depends() or [] pkg_deps_str = ' '.join(pkg_deps) run_docker_clean(pkg) if pkg_info is not None and pkg_info.autosum == "True": build_env = ['_AUTOSUMS=True'] else: build_env = ['_AUTOSUMS=False'] if '/cinnamon/' in pkg_info.path: build_env.append('_ALEXPKG=True') else: build_env.append('_ALEXPKG=False') hconfig = docker_utils.create_pkgs_host_config(cache, pkgbuild_dir, result) try: container = doc.create_container("antergos/makepkg", command="/makepkg/build.sh " + pkg_deps_str, volumes=['/var/cache/pacman', '/makepkg', '/repo', '/pkg', '/root/.gnupg', '/staging', '/32bit', '/32build', '/result'], environment=build_env, cpuset='0-3', name=pkg, host_config=hconfig) if container.get('Warnings') and container.get('Warnings') != '': logger.error(container.get('Warnings')) except Exception as err: logger.error('Create container failed. Error Msg: %s' % err) bld_obj.failed = True bld_obj.completed = False continue bld_obj.container = container.get('Id') try: doc.start(container.get('Id')) cont = bld_obj.container stream_process = Process(target=publish_build_ouput, args=(cont, bld_obj)) stream_process.start() result = doc.wait(cont) if result is not 0: bld_obj.failed = True bld_obj.completed = False logger.error('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (pkg, result)) else: logger.info('[CONTAINER EXIT CODE] Container %s exited. Return code was %s' % (pkg, result)) bld_obj.failed = False bld_obj.completed = True except Exception as err: logger.error('Start container failed. Error Msg: %s' % err) bld_obj.failed = True bld_obj.completed = False continue # db.publish('build-ouput', 'ENDOFLOG') # stream = doc.logs(container, stdout=True, stderr=True, timestamps=True) # log_stream = stream.split('\n') # db_filter_and_add(log_stream, this_log) # in_dir = len([name for name in os.listdir(result)]) # last_count = int(db.get('pkg_count')) # logger.info('last count is %s %s' % (last_count, type(last_count))) # logger.info('in_dir is %s %s' % (in_dir, type(in_dir))) pkgs2sign = None if not bld_obj.failed: db.publish('build-output', 'Signing package..') pkgs2sign = glob.glob( '/srv/antergos.info/repo/iso/testing/uefi/antergos-staging/x86_64/%s-***.xz' % pkg) pkgs2sign32 = glob.glob( '/srv/antergos.info/repo/iso/testing/uefi/antergos-staging/i686/%s-***.xz' % pkg) pkgs2sign = pkgs2sign + pkgs2sign32 logger.info('[PKGS TO SIGN] %s' % pkgs2sign) if pkgs2sign is not None and pkgs2sign != []: try_sign = sign_pkgs.batch_sign(pkgs2sign) else: try_sign = False if try_sign: db.publish('build-output', 'Signature created successfully for %s' % pkg) logger.info('[SIGN PKG] Signature created successfully for %s' % pkg) db.publish('build-output', 'Updating staging repo database..') update_main_repo(pkg, 'staging', bld_obj) else: bld_obj.failed = True bld_obj.completed = False if not bld_obj.failed: db.publish('build-output', 'Build completed successfully!') tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> completed.' % ( build_id, build_id, pkg) Timeline(msg=tlmsg, tl_type=4) # db.incr('pkg_count', (in_dir - last_count)) completed = status.completed() completed.rpush(build_id) bld_obj.review_stat = 'pending' else: tlmsg = 'Build <a href="/build/%s">%s</a> for <strong>%s</strong> failed.' % (build_id, build_id, pkg) Timeline(msg=tlmsg, tl_type=5) if pkgs2sign is not None: for p in pkgs2sign: remove(p) remove(p + '.sig') failed = status.failed() failed.rpush(build_id) bld_obj.end_str = datetime.datetime.now().strftime("%m/%d/%Y %I:%M%p") status.container = '' status.building_num = '' status.building_start = '' if not bld_obj.failed: db.set('antbs:misc:cache_buster:flag', True) return True return False