コード例 #1
0
ファイル: web.py プロジェクト: geekbuntu/Quarters
    def get_jobs( self ):
        ''' returns a list of new jobdescriptions '''

        ret = []

        # make sure we can exit safely if the web ui is down
        try:
            json_data = get_url( 'http://localhost:8080/stat' )
        except:
            return ret

        temp_json = bytes.decode( json_data )
        print( 'temp_json is:', temp_json )
        print( 'json_data is:', json_data )
        remote_pkgs = json.loads( temp_json )

        makepkg_cmd = [ 'makepkg', '--source', '--skipinteg' ]
        for rpkg in remote_pkgs:
            # copy over the sources to a temp directory
            #orig_dir = os.path.join( '/var/abs/core', rpkg[ 'pkgname' ] )
            dest_dir = os.path.join( '/tmp', rpkg[ 'uuid' ] )
            # check if we already did this
            if os.path.exists( dest_dir ):
                continue
            svnco_cmd = [ 'svn', 'checkout', 'svn://svn.archlinux.org/packages/' + rpkg[ 'pkgname' ] + '/trunk', rpkg[ 'uuid' ] ]
            proc = subprocess.Popen( svnco_cmd, cwd='/tmp' )
            proc.wait()
            #shutil.copytree( orig_dir, dest_dir )
            pkgbuild_path = os.path.join( '/tmp', rpkg[ 'uuid' ], 'PKGBUILD' )
            pkgbuild_data = pacman.load( pkgbuild_path )

            # build the .src.tar.gz file
            proc = subprocess.Popen( makepkg_cmd, cwd=dest_dir )
            proc.wait()

            # find the resulting .src.tar.gz file
            getsrc = glob.glob( os.path.join( dest_dir, '*.src.tar.gz' ) )
            print( 'glob returned' + str( getsrc ) )
            if len( getsrc ) != 1:
                print( 'error, not enough, or too many srcpkgs detected in Web' )
                # TODO need to create a build log with a message from the scm telling what happened
                self.local_state.create_empty_job( rpkg[ 'uuid' ], 'failed' )
                continue

            # get the sha256sum of the file
            sha256sum = sha256sum_file( getsrc[0] )

            # move the srcpkg to the final resting place
            srcpkg_path = os.path.join( self.master_root, rpkg[ 'uuid' ] )
            os.makedirs( srcpkg_path, exist_ok=True )
            srcpkg_path = os.path.join( srcpkg_path, rpkg[ 'uuid' ] + '.src.tar.gz' )
            shutil.move( getsrc[0], srcpkg_path )

            # add the final jobdescription to the list
            jd = JobDescription( rpkg[ 'uuid' ], rpkg[ 'pkgname' ], sha256sum, pkgbuild_data[ 'arch' ] )
            ret.append( jd )

        return ret
コード例 #2
0
ファイル: archsvn.py プロジェクト: geekbuntu/Quarters
    def get_jobs( self ):
        ''' returns a list of new jobdescriptions '''
        ret = []
        svnup_cmd = [ '/usr/bin/svn', 'up' ]
        pkgs = set()

        with subprocess.Popen( svnup_cmd, cwd=self.svn_root, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) as proc:
            lines = proc.communicate()[0].splitlines()

        # find unique pkgnames
        for line in lines[0:-1]:
            pkgname = bytes.decode(line).split('/')[0].split()[1]
            pkgs.add( pkgname )

        makepkg_cmd = [ 'makepkg', '--source', '--skipinteg' ]
        for pkg in pkgs:
            new_ujid = str( uuid.uuid4() )

            # create a srcpkg
            print( 'getting package from svn' )
            print( pkg )
            pkg_path = os.path.join( self.svn_root,  pkg + '/trunk' )
            proc = subprocess.Popen( makepkg_cmd, cwd=pkg_path ) #, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
            proc.wait()
            print( 'process returned ' + str( proc.returncode ) )
            srcpkg_path = os.path.join( self.master_root, new_ujid )
            os.makedirs( srcpkg_path, exist_ok=True )
            srcpkg_path = os.path.join( srcpkg_path, new_ujid + '.src.tar.gz' )

            getsrc = glob.glob( os.path.join( pkg_path, pkg + '*.src.tar.gz' ) )
            print( 'glob returned' + str( getsrc ) )

            if len( getsrc ) != 1:
                print( 'error moving srcpkgs, not enough, or too many detected' )
                continue

            shutil.move( getsrc[0], srcpkg_path )

            sha256sum = sha256sum_file( srcpkg_path )

            # job description: ujid, cur_pkgname, pkgsrc, sha256sum of srcpkg, architecture to build (x86_64,i686,any)
            jd = JobDescription( new_ujid, pkgname, sha256sum, 'x86_64' )

            ret.append( jd )

        return ret
コード例 #3
0
ファイル: jobmanager.py プロジェクト: geekbuntu/Quarters
def worker( worker_id, local_state, config ):
    ''' worker where the grunt work takes place '''
    builder_root = config['builder_root']
    worker_chroot = config[ 'chroot_root' ] + str( worker_id )
    while 1:
        current_job = local_state.get_pending_job()

        # update job state here (running)
        local_state.set_status( current_job.ujid, 'inprogress' )

        job_path = os.path.join( builder_root, str(current_job.ujid) )
        pkgsrc_path = os.path.join( job_path, current_job.package_name + '.tar.gz' )
        pkg_path = os.path.join( job_path, current_job.package_name )

        os.makedirs( job_path, exist_ok=True )

        # TODO: implement when we start using https
        # need to make sure that urlretrieve overwrites if existing file with same name is found
        # "If the URL points to a local file, or a valid cached copy of the object exists, the object is not copied."
        # http://docs.python.org/py3k/library/urllib.request.html#urllib.request.urlretrieve
        srcpkg_url = 'http://' + config[ 'master' ] + ':' + str( config[ 'master_port' ] ) + '/' + current_job.ujid + '/' + current_job.ujid + '.src.tar.gz'
        # TODO: wrap this in a protocol.py function
        urllib.request.urlretrieve( srcpkg_url, pkgsrc_path )

        # extract
        temp_tar = tarfile.open( pkgsrc_path )
        temp_tar.extractall( job_path )

        # chroot
        errors = False
        for arch in current_job.architecture:
            if arch == 'i686':
                chroot_cmd = [ 'sudo', 'extra-i686-build', '-r', worker_chroot ]
            else:
                # handle x86_64 & any
                chroot_cmd = [ 'sudo', 'extra-x86_64-build', '-r', worker_chroot ]

            with subprocess.Popen( chroot_cmd, cwd=pkg_path, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) as proc:
                log_path = os.path.join( job_path, 'build_log' )
                with open( log_path, 'ab' ) as f:
                    f.write( proc.communicate()[0] )
                return_code = proc.returncode

            # if failed, just ignore the rest of the code
            if return_code != 0:
                local_state.set_status( current_job.ujid, 'failed' )
                errors = True
                break
        if errors:
            continue

        # move to final destination
        getsrc = glob.glob( os.path.join( pkg_path, '*.pkg.tar.[gx]z' ) )
        for pkg in getsrc:
            shutil.move( pkg, job_path )

        getsrc = glob.glob( os.path.join( job_path, '*.pkg.tar.[gx]z' ) )
        # update list of packages
        results = list( map( (lambda x: x.split('/')[-1]), getsrc ) )
        temp = [{ 'package' : i, 'sha256sum' : sha256sum_file( job_path + '/' + i ) } for i in results]
        local_state.set_packages( current_job.ujid, temp )

        local_state.set_status( current_job.ujid, 'done' )