def get_result(): if 'pkgname' in request.args.keys(): pkgname = request.args['pkgname'][0] total = Package.selectBy( pkgname=pkgname).orderBy("-id").count() else: total = Package.selectBy().orderBy("-id").count() return {'total': total}
def get_result(): content = json.loads(request.content.read(), object_pairs_hook=deunicodify_hook) id = content.get('taskid', 0) if Package.selectBy(id=id).count() != 0: package = Package.selectBy(id=id)[0] task_cache = os.path.join(config['cache']['tasks'], str(package.id)) if os.path.exists(task_cache): shutil.rmtree(task_cache) for job in Job.selectBy(packageID=package.id): job.destroySelf() package.destroySelf() return {'message': "task %s deleted" % str(id)} else: return {'message': 'no task %s found' % str(id)}
def get_result(): limit = 25 contexts = [] if 'pkgname' in request.args.keys(): pkgname = request.args['pkgname'][0] packages = Package.selectBy( pkgname=pkgname).orderBy("-id")[(page - 1) * limit:page * limit] else: packages = Package.selectBy().orderBy("-id")[(page - 1) * limit:page * limit] for package in packages: result = package.dict() jobs = Job.selectBy(packageID=package.id) result['tasks'] = [job.dict() for job in jobs] contexts.append(result) return {'data': contexts}
def get_result(): content = json.loads(request.content.read(), object_pairs_hook=deunicodify_hook) id = content.get('taskid', 0) if Package.selectBy(id=id).count() != 0: package = Package.selectBy(id=id)[0] package.triggered = package.triggered + 1 for job in Job.selectBy(packageID=package.id): if job.status >= JobStatus.BUILDING: job.status = JobStatus.WAIT Log(section='task', message='rebuild %(pkgname)s %(pkgver)s to %(reponame)s' % package.dict()) message = "package set rebuilded" else: message = "no package set rebuilded" return {'message': message}
def finish_jobs(self): for package in Package.selectBy(upload_status=UploadStatus.UNKNOWN): jobs = Job.selectBy(packageID=package.id) all_ok = True for job in jobs: if job.status != JobStatus.BUILD_OK: all_ok = False break if all_ok: package.upload_status = UploadStatus.WAIT
def upload_tasks(self): for package in Package.selectBy(upload_status=UploadStatus.WAIT)[:5]: package.upload_status = UploadStatus.UPLOADING repo_base = config['cache']['repos'] env = os.environ.copy() env['REPOPATH'] = repo_base env['NAME'] = package.reponame task_cache = os.path.join(config['cache']['tasks'], str(package.id)) command = "../tools/repo.py include --cache %(cache)s --base %(base)s" % { "cache": task_cache, "base": package.action } status, _ = functions.getstatusoutput(command, env=env) if status != 0: Log(status=False, section='task', message='upload tasks %(pkgname)s %(pkgver)s to %(reponame)s' % package.dict()) package.upload_status = UploadStatus.UPLOAD_FAILED else: package.upload_status = UploadStatus.UPLOAD_OK
def start_jobs(self): for slave in self.slaves: try: if slave.enabled and slave.status.get('builder_status') == 'BuilderStatus.IDLE': if Job.selectBy(status=JobStatus.WAIT).count() > 0: job = Job.selectBy(status=JobStatus.WAIT)[0] package = Package.selectBy(id=job.package.id)[0] if job.dist in slave.info.get('dists') and job.arch in slave.info.get('arches'): with self.jobs_locker: print("send job %s to builder %s" % (job.id, slave.name)) job.status = JobStatus.WAIT_LOCKED try: job.start(slave, 'debian') except Exception as e: print(e) job.status = JobStatus.WAIT slave.inactive() except Exception as e: print(e)
def get_result(): content = json.loads(request.content.read(), object_pairs_hook=deunicodify_hook) # first will checkif the repo exists reponame = content['reponame'].split('/')[0] repopath = os.path.join(config['cache'].get('repos'), reponame) repo_config = os.path.join(repopath, '%s.json' % reponame) if not os.path.exists(repo_config): raise OSError( "Repository has not exists, Please create it first!") repo_config = json.load(open(repo_config, "r")) action_config = repo_config.get(content['action']) if not action_config: raise OSError("Repository is not support this action.") if action_config.get('division'): if '/' not in content['reponame']: raise OSError("Reponame should like dde/1207") #checkif the division created division_path = os.path.join(repopath, content['action'], content['reponame'].split('/')[1], 'db/packages.db') if not os.path.exists(division_path): raise OSError("Division repo is not created.") dist = action_config['dist'] # filter the source from arches arches = filter(lambda arch: arch != 'source', repo_config.get(content['action'])['arches']) command = "../tools/git.py --pkgname %(pkgname)s --action %(action)s --cachedir %(cache)s \ --source %(source)s --reponame %(reponame)s" % { "pkgname": content['pkgname'], "action": content['action'], "source": content['source'], "cache": config['cache'].get('sources'), "reponame": content['reponame'] } if content.get('debian'): command += " --debian %(debian)s" % { "debian": content['debian'] } if content.get('version'): command += " --version %(version)s" % { "version": content['version'] } try: """result will like blow {'files': ['dde-session-ui_4.3.1+2+gc1ab148.dsc', 'dde-session-ui_4.3.1+2+gc1ab148.tar.xz'], 'path': '/tmp/git-archive-temp/tmp3HoN4D', 'version': '4.3.1+2+gc1ab148', 'hashsum': 'c1ab1484818011ab76bbe383101b25d33e923ef4' } """ result = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: raise exc result = json.loads(result) for file in result['files']: if file.endswith('.dsc'): dsc_file = os.path.join(result['path'], file) _arches = parser_dscfile(dsc_file) break if content.get('arches'): added_arches = set(content['arches']) else: added_arches = set() for arch in _arches: if arch == 'any': for repo_arch in arches: added_arches.add(repo_arch) elif arch == 'all': added_arches.add(arches[0]) elif arch in arches: added_arches.add(arch) if len(added_arches) == 0: os.system("rm -rf %s" % result['path']) raise OSError("None of architecture support with this action.") kwargs = { 'pkgname': content['pkgname'], 'pkgver': result['version'], 'reponame': content['reponame'], 'action': content['action'], 'hashsum': result['revision'], 'reponame': content['reponame'] } build_args = [] if content.get('build_args'): build_args.extend(content['build_args']) if result.get('build_args'): build_args.extend(result['build_args']) build_args = set(build_args) build_args = '|'.join(build_args) if build_args.startswith('|'): build_args = build_args[1:] if Package.selectBy(**kwargs).count() != 0: package = Package.selectBy(**kwargs).orderBy('-id')[0] package.triggered = package.triggered + 1 #update build_args if build_args: package.build_args = build_args package.upload_status = UploadStatus.UNKNOWN for job in Job.selectBy(packageID=package.id): if job.status != JobStatus.BUILDING: job.status = JobStatus.WAIT else: package = Package(**kwargs) if build_args: package.build_args = build_args for arch in added_arches: Job(package=package, arch=arch, dist=dist, status=JobStatus.WAIT) #save the source to cache tasks_cache = config['cache'].get('tasks') if not os.path.exists(tasks_cache): os.makedirs(tasks_cache) source_cache = os.path.join(tasks_cache, str(package.id), 'source') for file in result['files']: os.system( "install -Dm644 %(source)s %(dest)s" % { 'source': os.path.join(result['path'], file), 'dest': os.path.join(source_cache, file) }) Log(section='task', message='apply %(pkgname)s %(pkgver)s to %(reponame)s' % package.dict()) os.system("rm -rf %s" % result['path']) return package.dict()
def add_task(kwargs): if Package.selectBy(**kwargs).count() != 0: package = Package.selectBy(**kwargs).orderBy('-id')[0] package.triggered = package.triggered + 1 else: package = package(**kwargs)
def get_result(): package = Package.selectBy(id=id)[0] result = package.dict() jobs = Job.selectBy(packageID=package.id) result['tasks'] = [job.dict() for job in jobs] return result
def get_expired_task(self): expired_days = config['runtime'].get('expired_days', 7) now = sqlobject.DateTimeCol.now() for package in Package.selectBy(): if now - package.status_changed > timedelta(days=expired_days): self.destroy_task(package)