def check_for_new_items(): """ """ db.set("FEED_CHECKED", "True") db.expire("FEED_CHECKED", 900) build_pkgs = [] for service, project_list in MONITOR_ITEMS.iteritems(): logger.debug((service, project_list)) projects = project_list.split(",") logger.debug(projects) for project in projects: if not project or project == "": continue res = None if "github" == service: project = project.split("/") logger.debug(project) res = check_github_repo(project=project[0], repo=project[1]) elif "gitlab" == service: logger.debug(project) res = check_gitlab_repo(project_id=project) if res: build_pkgs = build_pkgs + res if len(build_pkgs) > 0: add_to_build_queue(build_pkgs)
def get_repo_info(repo=None, logged_in=False): if repo is None: abort(500) rinfo_key = 'cache:repo_info:%s' % repo repo_info_cache = db.exists(rinfo_key) pkg_list = {} p, a, rev_pending = get_build_info(1, repo, logged_in) # logger.info('@@-antbs.py-@@ 293 | GET_REPO_INFO - FIRED') if not repo_info_cache: logger.info('@@-antbs.py-@@ 295 | GET_REPO_INFO - CACHE CHECK FAILED. WE ARE NOT USING CACHED INFO') all_packages = glob.glob('/srv/antergos.info/repo/%s/x86_64/***.pkg.tar.xz' % repo) if all_packages is not None: for item in all_packages: # logger.info(item) item = item.split('/')[-1] item = re.search('^([a-z]|[0-9]|-|_)+(?=-\d|r|v)', item) item = item.group(0) or '' if not item or item == '': continue logger.info(item) pkg = package.Package(item) builds = pkg.builds try: bnum = builds[0] except Exception: bnum = '' review_stat = db.get('build_log:%s:review_stat' % bnum) or 'n/a' review_stat = db.get('review_stat:%s:string' % review_stat) or 'n/a' review_dev = db.get('build_log:%s:review_dev' % bnum) or 'n/a' review_date = db.get('build_log:%s:review_date' % bnum) or 'n/a' all_info = dict(bnum=bnum, name=pkg.name, version=pkg.version, review_dev=review_dev, review_stat=review_stat, review_date=review_date, pkgid=pkg.pkgid) db.hmset('%s:%s' % (rinfo_key, pkg.pkgid), all_info) db.expire('%s:%s' % (rinfo_key, pkg.pkgid), 901) db.rpush(rinfo_key, pkg.pkgid) db.expire(rinfo_key, 900) pkg_list[pkg.pkgid] = all_info else: logger.info('@@-antbs.py-@@ 318 | GET_REPO_INFO - CACHE CHECK PASSED. WE ARE USING CACHED INFO') rindex = db.lrange(rinfo_key, 0, -1) for i in rindex: h = db.hgetall('%s:%s' % (rinfo_key, i)) pkg_list[i] = h # logger.info('@@-antbs.py-@@ 320 | GET_REPO_INFO - pkg_list hash is %s' % str(pkg_list)) return pkg_list, rev_pending
def maybe_update_pkgbuild_repo(): """ """ if not db.exists('PKGBUILD_REPO_UPDATED'): if db.setnx('PKGBUILD_REPO_LOCK', True): db.expire('PKGBUILD_REPO_LOCK', 300) try: if os.path.exists('/var/tmp/antergos-packages'): shutil.rmtree('/var/tmp/antergos-packages') subprocess.check_call(['git', 'clone', 'http://github.com/antergos/antergos-packages'], cwd='/var/tmp') db.setex('PKGBUILD_REPO_UPDATED', 350, True) except subprocess.CalledProcessError as err: logger.error(err) db.delete('PKGBUILD_REPO_UPDATED') db.delete('PKGBUILD_REPO_LOCK') return else: while not db.exists('PKGBUILD_REPO_UPDATED') and db.exists('PKGBUILD_REPO_LOCK'): time.sleep(2) return
def check_for_new_items(): db.set('FEED_CHECKED', 'True') db.expire('FEED_CHECKED', 900) new_items = [] gh = login(token=GITHUB_TOKEN) last_id = db.get('ANTBS_GITHUB_LAST_EVENT') or '' repo = gh.repository('numixproject', "numix-icon-theme") commits = repo.commits() latest = None try: commit = commits.next() latest = commit.sha except StopIteration: pass if latest != last_id: db.set('ANTBS_GITHUB_LAST_EVENT', latest) new_items.append(['numix-icon-theme']) gl = Gitlab('https://gitlab.com', GITLAB_TOKEN) gl.auth() nxsq = gl.Project(id='61284') last_updated = db.get('ANTBS_GITLAB_LAST_UPDATED') events = nxsq.Event() for event in events: if event.action_name == 'pushed to': if event.created_at != last_updated: db.set('ANTBS_GITLAB_LAST_UPDATED', event.created_at) new_items.append(['numix-icon-theme-square']) new_items.append(['numix-icon-theme-square-kde']) break if len(new_items) > 0: add_to_build_queue(new_items)
def process_package_queue(): """ :param the_queue: :return: :raise ValueError: """ hook_queue = status.hook_queue logger.info(hook_queue) if hook_queue is None: raise ValueError('the_queue cannot be None') all_deps = [] if not db.exists('BUILD_REPO_UPDATED'): if db.setnx('BUILD_REPO_LOCK', True): db.expire('BUILD_REPO_LOCK', 300) try: subprocess.check_call( ['git', 'clone', 'http://github.com/antergos/antergos-packages.git'], cwd='/opt') subprocess.check_call(['chmod', '-R', 'a+rw', REPO_DIR], cwd='/opt') except subprocess.CalledProcessError: try: subprocess.check_call(['git', 'reset', '--soft', 'origin/master'], cwd='/opt/antergos-packages') subprocess.check_call(['git', 'pull'], cwd='/opt/antergos-packages') db.setex('BUILD_REPO_UPDATED', 350, True) except subprocess.CalledProcessError as err: logger.error(err) db.delete('BUILD_REPO_LOCK') else: while not db.exists('BUILD_REPO_UPDATED') and db.exists('BUILD_REPO_LOCK'): time.sleep(2) for pkg in hook_queue: logger.info(pkg) if pkg == '': continue pkg_obj = package.get_pkg_object(name=pkg) version = pkg_obj.get_version() if not version: status.hook_queue.remove(pkg_obj.name) if 'cnchi-dev' != pkg: logger.error('pkgbuild path is not valid for %s', pkg_obj.name) else: continue logger.info('Updating pkgver in database for %s to %s' % (pkg_obj.name, version)) status.current_status = 'Updating pkgver in database for %s to %s' % (pkg_obj.name, version) depends = pkg_obj.get_deps() if not pkg_obj.build_path or pkg_obj.build_path == '': paths = [os.path.join('/opt/antergos-packages/', pkg), os.path.join('/opt/antergos-packages/deepin_desktop', pkg), os.path.join('/opt/antergos-packages/cinnamon', pkg)] for p in paths: if os.path.exists(p): pkg_obj.build_path = p break if 'cnchi' in pkg: logger.info('cnchi package detected.') src = os.path.join('/var/tmp/antergos-packages/', pkg, 'cnchi') dest = os.path.join('/opt/antergos-packages/', pkg) remove(os.path.join(dest, 'cnchi')) shutil.move(src, dest) status.current_status = 'Fetching latest translations for %s from Transifex.' % pkg logger.info(status.current_status) cnchi_dir = '/opt/antergos-packages/%s' % pkg fetch_and_compile_translations(translations_for=["cnchi"], pkg_obj=pkg_obj) remove(os.path.join(cnchi_dir, 'cnchi/.git')) subprocess.check_output(['tar', '-cf', 'cnchi.tar', 'cnchi'], cwd='/opt/antergos-packages/%s' % pkg) elif 'numix-icon-theme-square' in pkg: src = os.path.join('/var/tmp/antergos-packages/', pkg, pkg) dest = os.path.join('/opt/antergos-packages/', pkg) shutil.move(src, dest) subprocess.check_output(['tar', '-cf', pkg + '.tar', pkg], cwd='/opt/antergos-packages/%s' % pkg) if depends and len(hook_queue) > 1: all_deps.append(depends) elif len(hook_queue) == 1: all_deps.append(1) return all_deps
def process_github(self): if not self.is_manual: self.payload = json.loads(self.request.data) # Save payload in the database temporarily in case we need it later. dt = datetime.datetime.now().strftime("%m%d%Y-%I%M") key = "antbs:github:payloads:%s" % dt if db.exists(key): for i in range(1, 5): tmp = "%s:%s" % (key, i) if not db.exists(tmp): key = tmp break db.hmset(key, self.payload) db.rpush("antbs:github:payloads:index", key) db.expire(key, 172800) self.full_name = self.payload["repository"]["full_name"] self.repo = self.payload["repository"]["name"] self.pusher = self.payload["pusher"]["name"] self.commits = self.payload["commits"] if self.repo == "numix-icon-theme": rate_limit = True if "numix-icon-theme" not in self.the_queue and "numix-icon-theme" != self.building: if not db.exists("numix-commit-flag"): self.changes.append(["numix-icon-theme"]) self.is_numix = True db.setex("numix-commit-flag", 1200, "True") rate_limit = False if rate_limit: msg = "RATE LIMIT IN EFFECT FOR numix-icon-theme" logger.info(msg) self.result = json.dumps({"msg": msg}) else: self.repo = "antergos-packages" elif self.repo == "cnchi-dev": self.changes.append(["cnchi-dev"]) self.repo = "antergos-packages" self.is_cnchi = True # idle = db.get('idle') # working = db.exists('creating-cnchi-archive-from-dev') # check = 'cnchi-dev' != self.building or idle == "True" # if not working and 'cnchi-dev' not in self.the_queue and check: # db.set('creating-cnchi-archive-from-dev', 'True') # cnchi_git = 'https://github.com/lots0logs/cnchi-dev.git' # cnchi_clone = '/tmp/cnchi' # git = '/tmp/cnchi/.git' # cnchi_tar_tmp = '/tmp/cnchi.tar' # cnchi_tar = '/srv/antergos.org/cnchi.tar' # # for f in [cnchi_clone, cnchi_tar, cnchi_tar_tmp]: # if os.path.exists(f): # rm_file_or_dir(f) # try: # subprocess.check_call(['git', 'clone', cnchi_git, 'cnchi'], cwd='/tmp') # shutil.rmtree(git) # subprocess.check_call(['tar', '-cf', '/tmp/cnchi.tar', '-C', '/tmp', 'cnchi']) # shutil.copy('/tmp/cnchi.tar', '/srv/antergos.org/') # except subprocess.CalledProcessError as err: # logger.error(err.output) # # db.delete('creating-cnchi-archive-from-dev') elif self.pusher != "antbs": for commit in self.commits: self.changes.append(commit["modified"]) self.changes.append(commit["added"])