Пример #1
0
    def build(self, pkg):
        """ Build a new package into a given COPR """

        # already in the queue
        if pkg in self.builds_in_progress:
            return True

        user = copr_cli.subcommands.get_user()
        copr_api_url = copr_cli.subcommands.get_api_url()
        url = '{0}/coprs/{1}/{2}/new_build/'.format(
            copr_api_url,
            user['username'],
            self.copr_id)

        data = {'pkgs': pkg.get_url(),
                'memory': None,
                'timeout': None
                }

        req = requests.post(url,
                            auth=(user['login'], user['token']),
                            data=data)
        output = copr_cli.subcommands._get_data(req, user, self.copr_id)
        if output is None:
            return False
        else:
            print_debug(output['message'])
        pkg.build_id = output['ids'][0]
        print_debug("Adding build " + str(pkg.build_id))
        self.builds_in_progress.append(pkg)
        return True
Пример #2
0
    def wait_for_builds(self):
        """ Waits for all submitted builds to finish """

        # nothing to do
        if len(self.builds_in_progress) == 0:
            return True

        success = True
        for pkg in self.builds_in_progress:
            print_info("Waiting for %s [%i]" % (pkg.get_nvr(), pkg.build_id))
        try:
            while len(self.builds_in_progress) > 0:
                for pkg in self.builds_in_progress:
                    try:
                        (ret, status) = copr_cli.subcommands._fetch_status(pkg.build_id)
                    except requests.exceptions.ConnectionError, e:
                        self.builds_in_progress.remove(pkg)
                        print_fail("Lost connection for build %i" % pkg.build_id)
                        success = False
                    if not ret:
                        self.builds_in_progress.remove(pkg)
                        print_fail("Unable to get build status for %i" % pkg.build_id)
                        continue
                    if status == 'succeeded':
                        self.builds_in_progress.remove(pkg)
                        print_debug("Build %s [%i] succeeded" % (pkg.name, pkg.build_id))
                    elif status == 'failed':
                        self.builds_in_progress.remove(pkg)
                        print_fail("Build %s [%i] failed" % (pkg.name, pkg.build_id))
                        success = False
                    time.sleep(1)
                time.sleep(10)
        except KeyboardInterrupt:
            success = False
        return success
Пример #3
0
def run_command(cwd, argv, print_failures=True):
    print_debug("Running %s" % " ".join(argv))
    p = subprocess.Popen(argv, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    output, error = p.communicate()
    if p.returncode != 0 and print_failures:
        print(output)
        print(error)
    if p.returncode != 0:
        return False
    return True
Пример #4
0
def determine_yellow_plug(cnts, btn_x, btn_y):
    slope_left = 1
    slope_right = 2
    for cnt in cnts:
        x, y, w, h = cv2.boundingRect(cnt)
        if x != btn_x:
            slope = float(np.abs(y - btn_y)) / float(np.abs(x - btn_x))
            log.print_debug(
                TAG, "slope " + str(slope) + " Posx: " + str(x) + " Posy: " +
                str(y))
            if slope > slope_left and slope < slope_right:
                return 1
    return 0
Пример #5
0
def main():

    parser = argparse.ArgumentParser(description='Build a list of packages')
    parser.add_argument('--branch-source',
                        default="f21",
                        help='The branch to use as a source')
    parser.add_argument('--copr-id',
                        default="el7-gnome-3-14",
                        help='The COPR to use')
    parser.add_argument('--packages',
                        default="./data/el7-gnome-3-14.txt",
                        help='the list if packages to build')
    args = parser.parse_args()

    copr = CoprHelper(args.copr_id)
    koji = KojiHelper()

    data = ModulesXml('modules.xml')

    # add the copr id (e.g. el7) to any items in modules.xml file
    f = open(args.packages, 'r')
    for l in f.readlines():
        if l.startswith('#'):
            continue
        if l.startswith('\n'):
            continue
        linedata = l.strip().split(',')
        pkgname = linedata[0]
        item = data._get_item_by_pkgname(pkgname)
        if not item:
            print("%s not found" % pkgname)
            continue
        item.releases.append(copr.release)
        item.custom_package_url = None
        if len(linedata) > 1:
            item.custom_package_url = linedata[1]
    f.close()

    # disable any modules without the copr-specific release
    for item in data.items:
        if copr.release not in item.releases:
            item.disabled = True
            continue

    # depsolve
    print_debug("Depsolving moduleset...")
    if not data.depsolve():
        print_fail("Failed to depsolve")
        return

    # process all packages
    current_depsolve_level = 0
    for item in data.items:
        if item.disabled:
            continue

        # wait for builds
        if current_depsolve_level != item.depsolve_level:
            rc = copr.wait_for_builds()
            if not rc:
                print_fail("A build failed, so aborting")
                break
            current_depsolve_level = item.depsolve_level
            print_debug("Now running depsolve level %i" %
                        current_depsolve_level)

        # find the koji package
        pkg = None
        if not item.custom_package_url:
            pkg = koji.get_newest_build(args.branch_source, item.pkgname)
            if not pkg:
                print_fail("package %s does not exists in koji" % item.pkgname)
                continue
            pkg2 = koji.get_newest_build(
                args.branch_source + '-updates-candidate', item.pkgname)
            if not pkg2:
                print_fail("package %s does not exists in koji" % item.pkgname)
                continue

            # use the newest package
            if pkg.get_nvr() != pkg2.get_nvr():
                if rpm.labelCompare(pkg.get_evr(), pkg2.get_evr()) < 0:
                    pkg = pkg2
        else:
            pkg = Package()
            nvr = os.path.basename(item.custom_package_url).rsplit('-', 2)
            pkg.name = nvr[0]
            pkg.version = nvr[1]
            pkg.release = nvr[2].replace('.src.rpm', '')
            pkg.url = item.custom_package_url

        print_debug("Latest version of %s: %s" % (item.pkgname, pkg.get_nvr()))

        # find if the package has been built in the copr
        try:
            status = copr.get_pkg_status(pkg)
        except CoprException, e:
            print_fail(str(e))
            continue
        if status == CoprBuildStatus.ALREADY_BUILT:
            print_debug("Already built")
            continue
        elif status == CoprBuildStatus.FAILED_TO_BUILD:
            print_debug("Failed, so retrying build")
        elif status == CoprBuildStatus.NOT_FOUND:
            print_debug("Not found, so building")
        elif status == CoprBuildStatus.IN_PROGRESS:
            print_debug("Already in progress")
            continue
        else:
            print_fail("copr status unknown: %s" % status)
            continue

        # submit build and wait for it to complete
        if not copr.build(pkg):
            print_fail("Failed to submit build")
            break
Пример #6
0
def main():

    # use the main mirror
    gnome_ftp = 'http://ftp.gnome.org/pub/GNOME/sources'

    # read defaults from command line arguments
    parser = argparse.ArgumentParser(description='Automatically build Fedora packages for a GNOME release')
    parser.add_argument('--fedora-branch', default="rawhide", help='The fedora release to target (default: rawhide)')
    parser.add_argument('--simulate', action='store_true', help='Do not commit any changes')
    parser.add_argument('--check-installed', action='store_true', help='Check installed version against built version')
    parser.add_argument('--force-build', action='store_true', help='Always build even when not newer')
    parser.add_argument('--relax-version-checks', action='store_true', help='Relax checks on the version numbering')
    parser.add_argument('--cache', default="cache", help='The cache of checked out packages')
    parser.add_argument('--buildone', default=None, help='Only build one specific package')
    parser.add_argument('--buildroot', default=None, help='Use a custom buildroot, e.g. f18-gnome')
    parser.add_argument('--bump-soname', default=None, help='Build any package that deps on this')
    parser.add_argument('--copr-id', default=None, help='The COPR to optionally use')
    args = parser.parse_args()

    if args.copr_id:
        copr = CoprHelper(args.copr_id)

    # create the cache directory if it's not already existing
    if not os.path.isdir(args.cache):
        os.mkdir(args.cache)

    # use rpm to check the installed version
    installed_pkgs = {}
    if args.check_installed:
        print_info("Loading rpmdb")
        ts = rpm.TransactionSet()
        mi = ts.dbMatch()
        for h in mi:
            installed_pkgs[h['name']] = h['version']
        print_debug("Loaded rpmdb with %i items" % len(installed_pkgs))

    # parse the configuration file
    modules = []
    data = ModulesXml('modules.xml')
    if not args.buildone:
        print_debug("Depsolving moduleset...")
        if not data.depsolve():
            print_fail("Failed to depsolve")
            return
    for item in data.items:

        # ignore just this one module
        if item.disabled:
            continue

        # build just one module
        if args.buildone:
            if args.buildone != item.name:
                continue

        # just things that have this as a dep
        if args.bump_soname:
            if args.bump_soname not in item.deps:
                continue

        # things we can't autobuild as we don't have upstream data files
        if not item.ftpadmin:
            continue

        # things that are obsolete in later versions
        if args.copr_id:
            if not args.copr_id[10:] in item.branches:
                continue

        # get started
        print_info("Loading %s" % item.name)
        if item.pkgname != item.name:
            print_debug("Package name: %s" % item.pkgname)
        print_debug("Version glob: %s" % item.release_glob[args.fedora_branch])

        # ensure package is checked out
        if not item.setup_pkgdir(args.cache, args.fedora_branch):
            continue

        # get the current version from the spec file
        if not item.parse_spec():
            continue

        print_debug("Current version is %s" % item.version)

        # check for newer version on GNOME.org
        success = False
        for i in range (1, 20):
            try:
                urllib.urlretrieve ("%s/%s/cache.json" % (gnome_ftp, item.name), "%s/%s/cache.json" % (args.cache, item.pkgname))
                success = True
                break
            except IOError as e:
                print_fail("Failed to get JSON on try %i: %s" % (i, e))
        if not success:
            continue

        new_version = None
        gnome_branch = item.release_glob[args.fedora_branch]
        local_json_file = "%s/%s/cache.json" % (args.cache, item.pkgname)
        with open(local_json_file, 'r') as f:

            # the format of the json file is as follows:
            # j[0] = some kind of version number?
            # j[1] = the files keyed for each release, e.g.
            #        { 'pkgname' : {'2.91.1' : {u'tar.gz': u'2.91/gpm-2.91.1.tar.gz'} } }
            # j[2] = array of remote versions, e.g.
            #        { 'pkgname' : {  '3.3.92', '3.4.0' }
            # j[3] = the LATEST-IS files
            try:
                j = json.loads(f.read())
            except Exception, e:
                print_fail("Failed to read JSON at %s: %s" % (local_json_file, str(e)))
                continue

            # find the newest version
            newest_remote_version = '0'
            for remote_ver in j[2][item.name]:
                version_valid = False
                for b in gnome_branch.split(','):
                    if fnmatch.fnmatch(remote_ver, b):
                        version_valid = True
                        break
                if not args.relax_version_checks and not version_valid:
                    continue
                rc = rpm.labelCompare((None, remote_ver, None), (None, newest_remote_version, None))
                if rc > 0:
                    newest_remote_version = remote_ver
        if newest_remote_version == '0':
            print_fail("No remote versions matching the gnome branch %s" % gnome_branch)
            print_fail("Check modules.xml is looking at the correct branch")
            continue

        print_debug("Newest remote version is: %s" % newest_remote_version)

        # is this newer than the rpm spec file version
        rc = rpm.labelCompare((None, newest_remote_version, None), (None, item.version, None))
        new_version = None
        if rc > 0:
            new_version = newest_remote_version

        # check the installed version
        if args.check_installed:
            if item.pkgname in installed_pkgs:
                installed_ver = installed_pkgs[item.pkgname]
                if installed_ver == newest_remote_version:
                    print_debug("installed version is up to date")
                else:
                    print_debug("installed version is", installed_ver)
                    rc = rpm.labelCompare((None, installed_ver, None), (None, newest_remote_version, None))
                    if rc > 0:
                        print_fail("installed version is newer than gnome branch version")
                        print_fail("check modules.xml is looking at the correct branch")

        # nothing to do
        if new_version == None and not args.bump_soname and not args.force_build:
            print_debug("No updates available")
            continue

        # never update a major version number */
        if new_version:
            if args.relax_version_checks:
                print_debug("Updating major version number, but ignoring")
            elif new_version.split('.')[0] != item.version.split('.')[0]:
                print_fail("Cannot update major version numbers")
                continue

        # we need to update the package
        if new_version:
            print_debug("Need to update from %s to %s" %(item.version, new_version))

        # download the tarball if it doesn't exist
        if new_version:
            tarball = j[1][item.name][new_version]['tar.xz']
            dest_tarball = tarball.split('/')[1]
            if os.path.exists(item.pkgname + "/" + dest_tarball):
                print_debug("Source %s already exists" % dest_tarball)
            else:
                tarball_url = gnome_ftp + "/" + item.name + "/" + tarball
                print_debug("Download %s" % tarball_url)
                if not args.simulate:
                    try:
                        urllib.urlretrieve (tarball_url, args.cache + "/" + item.pkgname + "/" + dest_tarball)
                    except IOError as e:
                        print_fail("Failed to get tarball: %s" % e)
                        continue

                    # add the new source
                    item.new_tarball(dest_tarball)

        # prep the spec file for rpmdev-bumpspec
        if new_version:
            with open(item.spec_filename, 'r') as f:
                with open(item.spec_filename+".tmp", "w") as tmp_spec:
                    for line in f:
                        if line.startswith('Version:'):
                            line = replace_spec_value(line, new_version + '\n')
                        elif line.startswith('Release:'):
                            line = replace_spec_value(line, '0%{?dist}\n')
                        elif line.startswith(('Source:', 'Source0:')):
                            line = re.sub("/" + majorminor(item.version) + "/",
                                          "/" + majorminor(new_version) + "/",
                                          line)
                        tmp_spec.write(line)
            os.rename(item.spec_filename + ".tmp", item.spec_filename)

        # bump the spec file
        comment = None
        if args.bump_soname:
            comment = "Rebuilt for %s soname bump" % args.bump_soname
        elif new_version:
            comment = "Update to " + new_version
        if comment:
            cmd = ['rpmdev-bumpspec', "--comment=%s" % comment, "%s.spec" % item.pkgname]
            item.run_command(cmd)

        # run prep, and make sure patches still apply
        if not args.simulate:
            if not item.check_patches():
                print_fail("to build %s as patches did not apply" % item.pkgname)
                continue

        # push the changes
        if args.simulate:
            print_debug("Not pushing as simulating")
            continue

        # commit the changes
        if comment and not item.commit_and_push(comment):
            print_fail("push")
            continue

        # COPR, so build srpm, upload and build
        if item.is_copr:
            if not item.run_command(['fedpkg', "--dist=%s" % item.dist, 'srpm']):
                print_fail("to build srpm")
                continue

            # extract the nevr from the package
            new_srpm = glob.glob(args.cache + "/" + item.pkgname + '/*.src.rpm')[0]
            pkg = Package(new_srpm)

            # check if it already exists
            status = copr.get_pkg_status(pkg)
            if status == CoprBuildStatus.ALREADY_BUILT:
                print_debug ("Already built in COPR")
                continue
            elif status == CoprBuildStatus.IN_PROGRESS:
                print_debug ("Already building in COPR")
                continue

            # upload the package somewhere shared
            if os.getenv('USERNAME') == 'hughsie':
                upload_dir = '[email protected]:/home/fedora/rhughes/public_html/copr/'
                upload_url = 'http://rhughes.fedorapeople.org/copr/'
            elif os.getenv('USERNAME') == 'kalev':
                upload_dir = '[email protected]:/home/fedora/kalev/public_html/copr/'
                upload_url = 'http://kalev.fedorapeople.org/copr/'
            else:
                print_fail ("USERNAME not valid, ping hughsie on irc")
                continue

            print_debug("Uploading local package to " + upload_dir)
            p = subprocess.Popen(['scp', '-q', new_srpm, upload_dir])
            p.wait()
            pkg.url = upload_url + os.path.basename(new_srpm)

            if not copr.build(pkg):
                print_fail("COPR build")
                break
            rc = copr.wait_for_builds()
            if not rc:
                print_fail("waiting")
            continue

        # work out release tag
        if args.fedora_branch == "f18":
            pkg_release_tag = 'fc18'
        elif args.fedora_branch == "f19":
            pkg_release_tag = 'fc19'
        elif args.fedora_branch == "f20":
            pkg_release_tag = 'fc20'
        elif args.fedora_branch == "f21":
            pkg_release_tag = 'fc21'
        elif args.fedora_branch == "f22":
            pkg_release_tag = 'fc22'
        elif args.fedora_branch == "rawhide":
            pkg_release_tag = 'fc23'
        else:
            print_fail("Failed to get release tag for %s" % args.fedora_branch)
            continue

        # build package
        if new_version:
            print_info("Building %s-%s-1.%s" % (item.pkgname, new_version, pkg_release_tag))
        else:
            print_info("Building %s-%s-1.%s" % (item.pkgname, item.version, pkg_release_tag))
        if args.buildroot:
            rc = item.run_command(['fedpkg', 'build', '--target', args.buildroot])
        else:
            rc = item.run_command(['fedpkg', 'build'])
        if not rc:
            print_fail("Build")
            continue

        # work out repo branch
        if args.fedora_branch == "f18":
            pkg_branch_name = 'f18-build'
        elif args.fedora_branch == "f19":
            pkg_branch_name = 'f19-build'
        elif args.fedora_branch == "f20":
            pkg_branch_name = 'f20-build'
        elif args.fedora_branch == "f21":
            pkg_branch_name = 'f21-build'
        elif args.fedora_branch == "f22":
            pkg_branch_name = 'f22-build'
        elif args.fedora_branch == "rawhide":
            pkg_branch_name = 'f23-build'
        else:
            print_fail("Failed to get repo branch tag for" + args.fedora_branch)
            continue

        # wait for repo to sync
        if item.wait_repo and args.fedora_branch == "rawhide":
            rc = item.run_command(['koji', 'wait-repo', pkg_branch_name, '--build', "%s-%s-1.%s" % (item.pkgname, new_version, pkg_release_tag)])
            if not rc:
                print_fail("Wait for repo")
                continue
Пример #7
0
def main():

    # read defaults from command line arguments
    parser = argparse.ArgumentParser(description='Automatically build Fedora packages in COPR')
    parser.add_argument('--branch-source', default="rawhide", help='The branch to use as a source (default: rawhide)')
    parser.add_argument('--branch-destination', default="f20", help='The branch to use as a destination (default: f20)')
    parser.add_argument('--simulate', action='store_true', help='Do not commit any changes')
    parser.add_argument('--modules', default="modules.xml", help='The modules to search')
    parser.add_argument('--copr-id', default="f20-gnome-3-14", help='The COPR to use')
    parser.add_argument('--buildone', default=None, help='Only build one specific package')
    parser.add_argument('--bump-soname', default=None, help='Build this package any any that dep on it')
    parser.add_argument('--ignore-existing', action='store_true', help='Build the module even if it already exists in COPR')
    parser.add_argument('--ignore-version', action='store_true', help='Build the module even if the same version exists in the destination')
    parser.add_argument('--rebuild-srpm', action='store_true', help='Rebuild the package with a bumped release version')
    args = parser.parse_args()

    # parse the configuration file
    data = ModulesXml(args.modules)

    koji = KojiHelper()
    copr = CoprHelper(args.copr_id)

    current_depsolve_level = 0

    # only build one module
    if args.buildone:
        for item in data.items:
            if item.name == args.buildone:
                item.disabled = False
            elif item.pkgname in args.buildone.split(','):
                item.disabled = False
            else:
                item.disabled = True
    else:

        # disable before depsolve
        for item in data.items:
            if copr.release not in item.releases:
                # not for this release
                print_debug("Skipping %s as release only lists %s" % (item.name, ','.join(item.releases)))
                item.disabled = True
                continue

        print_info("Depsolving moduleset")
        if not data.depsolve():
            print_fail("Failed to depsolve")
            return

    # build one module, plus the things that depend on it
    if args.bump_soname:
        for item in data.items:
            disabled = True
            if item.pkgname == args.bump_soname:
                disabled = False
            else:
                for dep in item.deps:
                    if dep == args.bump_soname:
                        disabled = False
                        break
            item.disabled = disabled

    for item in data.items:

        # wait for builds
        if current_depsolve_level != item.depsolve_level:
            rc = copr.wait_for_builds()
            if not rc:
                print_fail("A build failed, so aborting")
                break
            current_depsolve_level = item.depsolve_level
            print_debug("Now running depsolve level %i" % current_depsolve_level)

        # skip
        if item.disabled:
            if not args.buildone:
                print_debug("Skipping %s as disabled" % item.name)
            continue

        # get the latest build from koji
        pkg = koji.get_newest_build(args.branch_source, item.pkgname)
        if not pkg:
            print_fail("package %s does not exists in %s" % (item.pkgname, args.branch_destination))
            continue
        print_debug("Latest version of %s in %s: %s" % (item.pkgname, args.branch_source, pkg.get_nvr()))

        # has this build been submitted?
        try:
            status = copr.get_pkg_status(pkg)
        except CoprException, e:
            print_fail(str(e))
            continue
        if status == CoprBuildStatus.ALREADY_BUILT:
            if not args.ignore_existing and not args.bump_soname:
                print_debug("Already built in copr")
                continue
        elif status == CoprBuildStatus.FAILED_TO_BUILD:
            if not args.ignore_existing and not args.bump_soname:
                print_fail("Previous build failed in copr")
                continue
        elif status == CoprBuildStatus.NOT_FOUND:
            if args.bump_soname and args.bump_soname != item.pkgname:
                print_debug("Not building %s as not yet built in copr" % item.pkgname)
                continue
        elif status == CoprBuildStatus.IN_PROGRESS:
            print_debug("Already building in copr")
            continue
        else:
            print_fail("copr status unknown: %s" % status)
            continue

        # does this version already exist?
        pkg_stable = koji.get_newest_build(args.branch_destination, item.pkgname)
        if pkg_stable:
            print_debug("Latest version in %s: %s" % (args.branch_destination, pkg_stable.get_nvr()))
            if not args.ignore_version and pkg.version == pkg_stable.version:
                print_debug("Already exists same version")
                continue

        # this is expensive!
        if args.rebuild_srpm:
            if not rebuild_srpm(pkg):
                continue

        # submit to copr
        print_debug("Submitting URL " + pkg.get_url())
        if args.simulate:
            continue
        if not copr.build(pkg):
            print_fail("build")
            break
Пример #8
0
def rebuild_srpm(pkg):
    import shutil
    import os
    import urllib2
    import subprocess

    # create new /tmp/copr/pkgname
    tmp_path = '/tmp/copr/' + pkg.name
    if os.path.exists(tmp_path):
        shutil.rmtree(tmp_path, True)
    os.makedirs(tmp_path)

    # download the package to /tmp
    print_debug("Downloading SRPM from %s" % pkg.get_url())
    response = urllib2.urlopen(pkg.get_url())
    pkg_binary = response.read()
    f = open(tmp_path + '/pkg.src.rpm', 'w')
    f.write(pkg_binary)
    f.close()

    # explode the package with rpm2cpio
    print_debug("Extracting SRPM to %s" % tmp_path)
    p = subprocess.Popen(["rpm2cpio pkg.src.rpm | cpio --extract"],
                         cwd=tmp_path, shell=True,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    p.wait()
    out, err = p.communicate()
    if p.returncode != 0:
        print_fail(err)
        return False

    # bump the release tag
    print_debug("Bumping revision and adding comment")
    specfile = tmp_path + '/' + pkg.name + '.spec'
    p = subprocess.Popen(["rpmdev-bumpspec",
                         "-c Built for COPR",
                         '-r',
                         specfile])
    p.wait()

    # rebuild the package with rpmbuild -bs
    print_debug("Building local package " + pkg.get_nvr())
    p = subprocess.Popen(['rpmbuild',
                         "--define=_sourcedir /tmp/copr/" + pkg.name,
                         "--define=_srcrpmdir /tmp/copr/" + pkg.name,
                         '-bs', specfile],
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    p.wait()
    out, err = p.communicate()
    if p.returncode != 0:
        print_fail(err)
        return False
    new_srpm = out.replace('\n', '').split(' ')[1]

    # upload the package somewhere shared
    upload_dir = '[email protected]:/home/fedora/rhughes/public_html/copr/'
    print_debug("Uploading local package to " + upload_dir)
    p = subprocess.Popen(['scp', '-q', new_srpm, upload_dir])
    p.wait()

    # over-ride this from the koji one
    pkg.url = "http://rhughes.fedorapeople.org/copr/" + os.path.basename(new_srpm)

    # delete our temp space
    shutil.rmtree(tmp_path, True)
    return True
Пример #9
0
def prepare(last_valid_frame, frame):

    log.print_debug(TAG, "------------prepare------------")

    is_prepared = False
    global detected_org_x
    global detected_org_y
    global detected_org_size
    global detected_counter
    global current_image
    global filter_image

    res1, res2 = color_filter.filter_orange(last_valid_frame, frame)
    # transfer HSV to Binary image for contour detection
    tmp1 = cv2.cvtColor(res1, cv2.COLOR_HSV2BGR)
    tmp2 = cv2.cvtColor(res2, cv2.COLOR_HSV2BGR)
    black1 = cv2.cvtColor(tmp1, cv2.COLOR_BGR2GRAY)
    black2 = cv2.cvtColor(tmp2, cv2.COLOR_BGR2GRAY)
    ret1, thresh1 = cv2.threshold(black1, 0, 255, cv2.THRESH_BINARY)
    ret2, thresh2 = cv2.threshold(black2, 0, 255, cv2.THRESH_BINARY)
    return1 = cv2.findContours(thresh1.copy(), cv2.RETR_LIST,
                               cv2.CHAIN_APPROX_SIMPLE)
    return2 = cv2.findContours(thresh2.copy(), cv2.RETR_LIST,
                               cv2.CHAIN_APPROX_SIMPLE)

    current_image = frame
    filter_image = res2

    if len(return1) == 3:
        cnts1 = return1[1]
        hierarchy1 = return1[2]
    else:
        cnts1 = return1[0]
        hierarchy1 = return1[1]

    if len(return2) == 3:
        cnts2 = return2[1]
        hierarchy2 = return2[2]
    else:
        cnts2 = return2[0]
        hierarchy2 = return2[1]

    # cnts1 and cnts2 contain the contour result
    org_btn_candidate_1 = []
    org_btn_candidate_2 = []

    log.print_debug(TAG, "image1 begin find")
    for cnt in cnts1:
        area0 = cv2.contourArea(cnt)
        cnt_len = cv2.arcLength(cnt, True)
        cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, True)
        if area0 > 100:
            x1, y1, w1, h1 = cv2.boundingRect(cnt)
            log.print_debug(
                TAG, "orange button area size > 100 " + str(area0) + " Posx " +
                str(x1) + " Posy " + str(y1))
        if within_org_dummy_size(area0):
            x1, y1, w1, h1 = cv2.boundingRect(cnt)
            log.print_debug(
                TAG, "orange button candidate area size " + str(area0) +
                " Posx " + str(x1) + " Posy " + str(y1))
            org_btn_candidate_1.append(cnt)
    log.print_debug(TAG, "image1 end find")

    log.print_debug(TAG, "image2 begin find")
    for cnt in cnts2:
        cnt_len = cv2.arcLength(cnt, True)
        area0 = cv2.contourArea(cnt)
        cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, True)
        if within_org_dummy_size(area0):
            x1, y1, w1, h1 = cv2.boundingRect(cnt)
            log.print_debug(
                TAG, "orange button candidate area size " + str(area0) +
                " Posx " + str(x1) + " Posy " + str(y1))
            org_btn_candidate_2.append(cnt)
    log.print_debug(TAG, "image2 end find")

    nums = is_similar(org_btn_candidate_1, org_btn_candidate_2)
    if len(nums) > 0:
        detected_counter += 1
        if detected_counter > Confidence_Min_Counter:
            reset()
            is_prepared = True
            detected_org_x = nums[0]
            detected_org_y = nums[1]
            detected_org_size = nums[2]
            log.print_debug(
                TAG, "find orange button area size " + str(nums[2]) +
                " Posx " + str(nums[0]) + " Posy " + str(nums[1]))
    # is_aed_detected = aed_detector.aed_detect(last_valid_frame, frame)
    # # now find the orange button and other important items precisely
    # if is_aed_detected:
    #     # get the orange button position and relative size
    #     a = 0
    # else:
    #     is_prepared = False
    return is_prepared
Пример #10
0
def flash_detection(img1, img2, orange_x, orange_y, size, show_type=0):

    log.print_debug(TAG, "----------flash detection start----------")
    global org_pos_x
    global org_pos_y
    global confidence_counter
    global current_image
    global filter_image

    org_pos_x = orange_x
    org_pos_y = orange_y
    log.print_debug(
        TAG, "passed in org_pos_x " + str(org_pos_x) + " org_pos_y " +
        str(org_pos_y) + " size " + str(size))

    hsv1, hsv2 = color_filter.filter_flash(img1, img2)

    return1 = cv2.findContours(hsv1.copy(), cv2.RETR_LIST,
                               cv2.CHAIN_APPROX_SIMPLE)
    return2 = cv2.findContours(hsv2.copy(), cv2.RETR_LIST,
                               cv2.CHAIN_APPROX_SIMPLE)
    # cnts1 = cv2.findContours(hsv1.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
    # cnts2 = cv2.findContours(hsv2.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)

    current_image = img2
    filter_image = hsv2

    if len(return1) == 3:
        cnts1 = return1[1]
        hierarchy1 = return1[2]
    else:
        cnts1 = return1[0]
        hierarchy1 = return1[1]

    if len(return2) == 3:
        cnts2 = return2[1]
        hierarchy2 = return2[2]
    else:
        cnts2 = return2[0]
        hierarchy2 = return2[1]

    if show_type == 1:
        util.show_two_image(hsv1, hsv2)
    elif show_type == 2:
        util.show_image("w1", hsv1, 640, 320)
        util.show_image("w2", img2, 640, 320)

    # org_btn_candidate_1 = []
    org_btn_candidate_2 = []

    # print "for area of cnts 1"
    # for cnt in cnts1:
    #     area0 = cv2.contourArea(cnt)
    #     #if area0 > 10:
    #     #    print area0
    #     cnt_len = cv2.arcLength(cnt, True)
    #     cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, False)
    #     #if area0 > 100:
    #         #print "area " + str(area0)
    #     if estimate_org_flash_size(area0, size):#feature_detetor.estimate_orange_area(area0,size):
    #         x, y, w, h = cv2.boundingRect(cnt)
    #         print org_pos_x,org_pos_y,x,y,area0
    #         org_btn_candidate_1.append(cnt)

    log.print_debug(TAG, "for area of cnts 2")
    for cnt in cnts2:
        # if area0 > 10:
        #   print area0
        cnt_len = cv2.arcLength(cnt, True)
        area0 = cv2.contourArea(cnt)
        cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, False)
        if area0 > 100:
            x, y, w, h = cv2.boundingRect(cnt)
            print "flash button area size " + str(area0) + " Posx " + str(
                x) + " Posy " + str(y)

        if estimate_org_flash_size(
                area0,
                size):  # feature_detetor.estimate_orange_area(area0,size):
            x, y, w, h = cv2.boundingRect(cnt)
            log.print_debug(
                TAG, "flash button area size " + str(area0) + " Posx " +
                str(x) + " Posy " + str(y))
            org_btn_candidate_2.append(cnt)
    # counter1 = orange_flash_num(org_btn_candidate_1)
    counter2 = orange_flash_num(org_btn_candidate_2)

    if counter2 >= 1:
        # print org_btn_candidate_2
        confidence_counter += 1
    if confidence_counter >= 4:
        log.print_debug(TAG, "detected!!")
        reset()
        return True
    print '------------end'
    return False
Пример #11
0
def detect_yellow_plug(image1,
                       image2,
                       org_pos_x,
                       org_pos_y,
                       org_size,
                       show_type=0):

    log.print_debug(TAG, "-----------detect_yellow_plug-----------")
    log.print_debug(
        TAG, "value passed in" + " Posx " + str(org_pos_x) + " Posy " +
        str(org_pos_y) + " size " + str(org_size))

    global position_failed_counter
    global current_image
    global filter_image
    #
    # if last_detected_org_x == 0 and last_detected_org_y == 0:
    #     dynamic_org_btn_var_x = 0
    #     dynamic_org_btn_var_y = 0
    # else:
    #     dynamic_org_btn_var_x = abs(org_pos_x - last_detected_org_x)
    #     dynamic_org_btn_var_y = abs(org_pos_y - last_detected_org_y)
    #     if dynamic_org_btn_var_x == 0 and dynamic_org_btn_var_y == 0:
    #         position_failed_counter += 1
    #     else:
    #         position_failed_counter = 0
    # last_detected_org_x = org_pos_x
    # last_detected_org_y = org_pos_y
    res1, res2 = color_filter.filter_yellow(image1, image2)
    # # transfer HSV to Binary image for contour detection
    # tmp1 = cv2.cvtColor(res1, cv2.COLOR_HSV2BGR)
    tmp2 = cv2.cvtColor(res2, cv2.COLOR_HSV2BGR)
    # black1 = cv2.cvtColor(tmp1, cv2.COLOR_BGR2GRAY)
    black2 = cv2.cvtColor(tmp2, cv2.COLOR_BGR2GRAY)
    # ret1, thresh1 = cv2.threshold(black1, 0, 255, cv2.THRESH_BINARY)
    ret2, thresh2 = cv2.threshold(black2, 0, 255, cv2.THRESH_BINARY)
    # cnts1, hierarchy = cv2.findContours(thresh1.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
    return1 = cv2.findContours(thresh2.copy(), cv2.RETR_LIST,
                               cv2.CHAIN_APPROX_SIMPLE)

    x2, y2, org_size, is_success = detect_orange_btn(image1, image2, org_pos_x,
                                                     org_pos_y, org_size,
                                                     show_type)

    current_image = image2
    filter_image = black2

    if is_success == False:
        return 0, org_pos_x, org_pos_y, org_size

    if position_failed_counter > 10:
        # find the orange button again
        is_prepared = stage_pre_main.prepare(image1, image2)
        if is_prepared:
            x2, y2, org_size = stage_pre_main.retrieve_org_btn_params()
        return 0, x2, y2, org_size

    if len(return1) == 3:
        cnts2 = return1[1]
        hierarchy = return1[2]
    else:
        cnts2 = return1[0]
        hierarchy = return1[1]
    # # cnts1 and cnts2 contain the contour result
    # org_btn_candidate_1 = []
    org_btn_candidate_2 = []

    # for cnt in cnts1:
    #     area0 = cv2.contourArea(cnt)
    #     cnt_len = cv2.arcLength(cnt, True)
    #     cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, True)
    #     if area_estimate(area0,1):
    #         print area0
    #         x1, y1, w1, h1 = cv2.boundingRect(cnt)
    #         print x1,y1,w1,h1
    #         org_btn_candidate_1.append(cnt)
    for cnt in cnts2:
        cnt_len = cv2.arcLength(cnt, True)
        area0 = cv2.contourArea(cnt)
        cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, True)
        # if area0 > 10:
        #print area0
        #x1, y1, w1, h1 = cv2.boundingRect(cnt)
        #print x1, y1, w1, h1
        if area_estimate(area0, org_size, 1):
            #print area0
            x1, y1, w1, h1 = cv2.boundingRect(cnt)
            #print x1, y1, w1, h1
            org_btn_candidate_2.append(cnt)

            # cnt1 = determine_orange_btn(org_btn_candidate_1,org_pos_x,org_pos_y)
            # cnt2 = determine_orange_btn(org_btn_candidate_2,org_pos_x,org_pos_y)

            # if length of cnt1 == 0 or length of cnt2 == 0, means two image detect orange btn wrong
            # if len(cnt1) == 0 or len(cnt2) == 0:
            #   return 0,org_pos_x,org_pos_y

    # x1, y1, w1, h1 = cv2.boundingRect(cnt1)
    # result1 = determine_yellow_plug(org_btn_candidate_1,x1,y1)
    # x2, y2, w2, h2 = cv2.boundingRect(cnt2)
    result2 = determine_yellow_plug(org_btn_candidate_2, x2, y2)

    # if result1 == 1 and result2 == 1:
    if result2 == 1:
        global confidence_counter
        confidence_counter += 1
    if confidence_counter > 10:
        cv2.imwrite("plug.jpg", image2)
        cv2.imwrite("plug-gray.jpg", res2)
        return 1, x2, y2, org_size
    return 0, x2, y2, org_size
Пример #12
0
def detect_orange_btn(image1,
                      image2,
                      org_pos_x,
                      org_pos_y,
                      org_size,
                      show_type=0):

    log.print_debug(TAG, "-----------detect_orange_btn-----------")

    global dynamic_org_size
    global last_detected_org_size
    global current_image
    global filter_image

    #print org_pos_x, org_pos_y, org_size

    log.print_debug(
        TAG, "passed in Posx " + str(org_pos_x) + " Poxy " + str(org_pos_y) +
        " size " + str(org_size))

    if last_detected_org_size != 0:
        dynamic_org_size = org_size - last_detected_org_size
    last_detected_org_size = org_size

    res1, res2 = color_filter.filter_orange(image1, image2)
    # tmp1 = cv2.cvtColor(res1, cv2.COLOR_HSV2BGR)
    tmp2 = cv2.cvtColor(res2, cv2.COLOR_HSV2BGR)
    # black1 = cv2.cvtColor(tmp1, cv2.COLOR_BGR2GRAY)
    black2 = cv2.cvtColor(tmp2, cv2.COLOR_BGR2GRAY)
    # ret1, thresh1 = cv2.threshold(black1, 0, 255, cv2.THRESH_BINARY)
    ret2, thresh2 = cv2.threshold(black2, 0, 255, cv2.THRESH_BINARY)
    # cnts1, hierarchy = cv2.findContours(thresh1.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
    current_image = image2
    filter_image = black2
    tmp = cv2.findContours(thresh2.copy(), cv2.RETR_LIST,
                           cv2.CHAIN_APPROX_SIMPLE)
    if len(tmp) == 3:
        cnts2 = tmp[1]
        hierarchy = tmp[2]
    else:
        cnts2 = tmp[0]
        hierarchy = tmp[1]
    # cnts1 and cnts2 contain the contour result
    # org_btn_candidate_1 = []
    org_btn_candidate_2 = []

    # util.debug_print(TAG,"detect_orange_btn img1")
    # for cnt in cnts1:
    #     area0 = cv2.contourArea(cnt)
    #     cnt_len = cv2.arcLength(cnt, True)
    #     cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, True)
    #     if estimate_orange_area(area0, org_size):
    #         x1, y1, w1, h1 = cv2.boundingRect(cnt)
    #         util.debug_print(TAG, "size1 " + str(area0) + " x " + str(x1) +" y "+ str(y1))
    #         org_btn_candidate_1.append(cnt)

    util.debug_print(TAG, "img2:")
    for cnt in cnts2:
        cnt_len = cv2.arcLength(cnt, True)
        # area0 = cv2.contourArea(cnt)
        cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, False)
        area0 = cv2.contourArea(cnt)
        if area0 > 100:
            x1, y1, w1, h1 = cv2.boundingRect(cnt)
            #log.print_debug(TAG, "area0 > 100 " + str(area0) + " Posx " + str(x1) + " Posy " + str(y1))
        if estimate_orange_area(area0, org_size):
            x1, y1, w1, h1 = cv2.boundingRect(cnt)
            log.print_debug(
                TAG, "candidate area size " + str(area0) + " Posx " + str(x1) +
                " Posy " + str(y1))
            org_btn_candidate_2.append(cnt)

    if len(org_btn_candidate_2) == 0:
        # thinking if is because size variant too much
        log.print_debug(TAG, "not orange button found size variant too much ?")
        for cnt in cnts2:
            cnt_len = cv2.arcLength(cnt, True)
            # area0 = cv2.contourArea(cnt)
            cnt = cv2.approxPolyDP(cnt, 0.02 * cnt_len, False)
            area0 = cv2.contourArea(cnt)
            x1, y1, w1, h1 = cv2.boundingRect(cnt)
            if estimate_org_area_coor(area0, org_size, x1, y1, org_pos_x,
                                      org_pos_y):
                log.print_debug(
                    TAG, "variant area size " + str(area0) + " Posx " +
                    str(x1) + " Posy " + str(y1))
                org_btn_candidate_2.append(cnt)

    # cnt1 = determine_orange_btn(org_btn_candidate_1, org_pos_x, org_pos_y)
    cnt2 = determine_orange_btn(org_btn_candidate_2, org_pos_x, org_pos_y)

    if show_type == 1:
        util.show_two_image(image2, res2)
    if show_type == 2:
        util.show_image("orange_btn_1", image2)
        util.show_image("orange_btn_2", res2)

    # if len(cnt1) == 0 or len(cnt2) == 0:
    if len(cnt2) == 0:
        return org_pos_x, org_pos_y, org_size, False
    x2, y2, w2, h2 = cv2.boundingRect(cnt2)
    area0 = cv2.contourArea(cnt2)
    log.print_debug(
        TAG, "detected orange button size " + str(area0) + " " + " x2 " +
        str(x2) + " y2 " + str(y2))
    return x2, y2, area0, True
Пример #13
0
def main():

    parser = argparse.ArgumentParser(description='Build a list of packages')
    parser.add_argument('--branch-source', default="f21", help='The branch to use as a source')
    parser.add_argument('--copr-id', default="el7-gnome-3-14", help='The COPR to use')
    parser.add_argument('--packages', default="./data/el7-gnome-3-14.txt", help='the list if packages to build')
    args = parser.parse_args()

    copr = CoprHelper(args.copr_id)
    koji = KojiHelper()

    data = ModulesXml('modules.xml')

    # add the copr id (e.g. el7) to any items in modules.xml file
    f = open(args.packages, 'r')
    for l in f.readlines():
        if l.startswith('#'):
            continue
        if l.startswith('\n'):
            continue
        linedata = l.strip().split(',')
        pkgname = linedata[0]
        item = data._get_item_by_pkgname(pkgname)
        if not item:
            print("%s not found" % pkgname)
            continue
        item.releases.append(copr.release)
        item.custom_package_url = None
        if len(linedata) > 1:
            item.custom_package_url = linedata[1]
    f.close()

    # disable any modules without the copr-specific release
    for item in data.items:
        if copr.release not in item.releases:
            item.disabled = True
            continue

    # depsolve
    print_debug("Depsolving moduleset...")
    if not data.depsolve():
        print_fail("Failed to depsolve")
        return

    # process all packages
    current_depsolve_level = 0
    for item in data.items:
        if item.disabled:
            continue;

        # wait for builds
        if current_depsolve_level != item.depsolve_level:
            rc = copr.wait_for_builds()
            if not rc:
                print_fail("A build failed, so aborting")
                break
            current_depsolve_level = item.depsolve_level
            print_debug("Now running depsolve level %i" % current_depsolve_level)

        # find the koji package
        pkg = None
        if not item.custom_package_url:
            pkg = koji.get_newest_build(args.branch_source, item.pkgname)
            if not pkg:
                print_fail("package %s does not exists in koji" % item.pkgname)
                continue
            pkg2 = koji.get_newest_build(args.branch_source + '-updates-candidate', item.pkgname)
            if not pkg2:
                print_fail("package %s does not exists in koji" % item.pkgname)
                continue

            # use the newest package
            if pkg.get_nvr() != pkg2.get_nvr():
                if rpm.labelCompare(pkg.get_evr(), pkg2.get_evr()) < 0:
                    pkg = pkg2;
        else:
            pkg = Package()
            nvr = os.path.basename(item.custom_package_url).rsplit('-', 2)
            pkg.name = nvr[0]
            pkg.version = nvr[1]
            pkg.release = nvr[2].replace('.src.rpm', '')
            pkg.url = item.custom_package_url
            
        print_debug("Latest version of %s: %s" % (item.pkgname, pkg.get_nvr()))

        # find if the package has been built in the copr
        try:
            status = copr.get_pkg_status(pkg)
        except CoprException, e:
            print_fail(str(e))
            continue
        if status == CoprBuildStatus.ALREADY_BUILT:
            print_debug("Already built")
            continue
        elif status == CoprBuildStatus.FAILED_TO_BUILD:
            print_debug("Failed, so retrying build")
        elif status == CoprBuildStatus.NOT_FOUND:
            print_debug("Not found, so building")
        elif status == CoprBuildStatus.IN_PROGRESS:
            print_debug("Already in progress")
            continue
        else:
            print_fail("copr status unknown: %s" % status)
            continue

        # submit build and wait for it to complete
        if not copr.build(pkg):
            print_fail("Failed to submit build")
            break
Пример #14
0
def main():

    # use the main mirror
    gnome_ftp = 'http://ftp.gnome.org/pub/GNOME/sources'

    # read defaults from command line arguments
    parser = argparse.ArgumentParser(
        description='Automatically build Fedora packages for a GNOME release')
    parser.add_argument('--fedora-branch',
                        default="rawhide",
                        help='The fedora release to target (default: rawhide)')
    parser.add_argument('--simulate',
                        action='store_true',
                        help='Do not commit any changes')
    parser.add_argument('--check-installed',
                        action='store_true',
                        help='Check installed version against built version')
    parser.add_argument('--force-build',
                        action='store_true',
                        help='Always build even when not newer')
    parser.add_argument('--relax-version-checks',
                        action='store_true',
                        help='Relax checks on the version numbering')
    parser.add_argument('--cache',
                        default="cache",
                        help='The cache of checked out packages')
    parser.add_argument('--buildone',
                        default=None,
                        help='Only build one specific package')
    parser.add_argument('--buildroot',
                        default=None,
                        help='Use a custom buildroot, e.g. f18-gnome')
    parser.add_argument('--bump-soname',
                        default=None,
                        help='Build any package that deps on this')
    parser.add_argument('--copr-id',
                        default=None,
                        help='The COPR to optionally use')
    args = parser.parse_args()

    if args.copr_id:
        copr = CoprHelper(args.copr_id)

    # create the cache directory if it's not already existing
    if not os.path.isdir(args.cache):
        os.mkdir(args.cache)

    # use rpm to check the installed version
    installed_pkgs = {}
    if args.check_installed:
        print_info("Loading rpmdb")
        ts = rpm.TransactionSet()
        mi = ts.dbMatch()
        for h in mi:
            installed_pkgs[h['name']] = h['version']
        print_debug("Loaded rpmdb with %i items" % len(installed_pkgs))

    # parse the configuration file
    modules = []
    data = ModulesXml('modules.xml')
    if not args.buildone:
        print_debug("Depsolving moduleset...")
        if not data.depsolve():
            print_fail("Failed to depsolve")
            return
    for item in data.items:

        # ignore just this one module
        if item.disabled:
            continue

        # build just one module
        if args.buildone:
            if args.buildone != item.name:
                continue

        # just things that have this as a dep
        if args.bump_soname:
            if args.bump_soname not in item.deps:
                continue

        # things we can't autobuild as we don't have upstream data files
        if not item.ftpadmin:
            continue

        # things that are obsolete in later versions
        if args.copr_id:
            if not args.copr_id[10:] in item.branches:
                continue

        # get started
        print_info("Loading %s" % item.name)
        if item.pkgname != item.name:
            print_debug("Package name: %s" % item.pkgname)
        print_debug("Version glob: %s" % item.release_glob[args.fedora_branch])

        # ensure package is checked out
        if not item.setup_pkgdir(args.cache, args.fedora_branch):
            continue

        # get the current version from the spec file
        if not item.parse_spec():
            continue

        print_debug("Current version is %s" % item.version)

        # check for newer version on GNOME.org
        success = False
        for i in range(1, 20):
            try:
                urllib.urlretrieve(
                    "%s/%s/cache.json" % (gnome_ftp, item.name),
                    "%s/%s/cache.json" % (args.cache, item.pkgname))
                success = True
                break
            except IOError as e:
                print_fail("Failed to get JSON on try %i: %s" % (i, e))
        if not success:
            continue

        new_version = None
        gnome_branch = item.release_glob[args.fedora_branch]
        local_json_file = "%s/%s/cache.json" % (args.cache, item.pkgname)
        with open(local_json_file, 'r') as f:

            # the format of the json file is as follows:
            # j[0] = some kind of version number?
            # j[1] = the files keyed for each release, e.g.
            #        { 'pkgname' : {'2.91.1' : {u'tar.gz': u'2.91/gpm-2.91.1.tar.gz'} } }
            # j[2] = array of remote versions, e.g.
            #        { 'pkgname' : {  '3.3.92', '3.4.0' }
            # j[3] = the LATEST-IS files
            try:
                j = json.loads(f.read())
            except Exception, e:
                print_fail("Failed to read JSON at %s: %s" %
                           (local_json_file, str(e)))
                continue

            # find the newest version
            newest_remote_version = '0'
            for remote_ver in j[2][item.name]:
                version_valid = False
                for b in gnome_branch.split(','):
                    if fnmatch.fnmatch(remote_ver, b):
                        version_valid = True
                        break
                if not args.relax_version_checks and not version_valid:
                    continue
                rc = rpm.labelCompare((None, remote_ver, None),
                                      (None, newest_remote_version, None))
                if rc > 0:
                    newest_remote_version = remote_ver
        if newest_remote_version == '0':
            print_fail("No remote versions matching the gnome branch %s" %
                       gnome_branch)
            print_fail("Check modules.xml is looking at the correct branch")
            continue

        print_debug("Newest remote version is: %s" % newest_remote_version)

        # is this newer than the rpm spec file version
        rc = rpm.labelCompare((None, newest_remote_version, None),
                              (None, item.version, None))
        new_version = None
        if rc > 0:
            new_version = newest_remote_version

        # check the installed version
        if args.check_installed:
            if item.pkgname in installed_pkgs:
                installed_ver = installed_pkgs[item.pkgname]
                if installed_ver == newest_remote_version:
                    print_debug("installed version is up to date")
                else:
                    print_debug("installed version is", installed_ver)
                    rc = rpm.labelCompare((None, installed_ver, None),
                                          (None, newest_remote_version, None))
                    if rc > 0:
                        print_fail(
                            "installed version is newer than gnome branch version"
                        )
                        print_fail(
                            "check modules.xml is looking at the correct branch"
                        )

        # nothing to do
        if new_version == None and not args.bump_soname and not args.force_build:
            print_debug("No updates available")
            continue

        # never update a major version number */
        if new_version:
            if args.relax_version_checks:
                print_debug("Updating major version number, but ignoring")
            elif new_version.split('.')[0] != item.version.split('.')[0]:
                print_fail("Cannot update major version numbers")
                continue

        # we need to update the package
        if new_version:
            print_debug("Need to update from %s to %s" %
                        (item.version, new_version))

        # download the tarball if it doesn't exist
        if new_version:
            tarball = j[1][item.name][new_version]['tar.xz']
            dest_tarball = tarball.split('/')[1]
            if os.path.exists(item.pkgname + "/" + dest_tarball):
                print_debug("Source %s already exists" % dest_tarball)
            else:
                tarball_url = gnome_ftp + "/" + item.name + "/" + tarball
                print_debug("Download %s" % tarball_url)
                if not args.simulate:
                    try:
                        urllib.urlretrieve(
                            tarball_url, args.cache + "/" + item.pkgname +
                            "/" + dest_tarball)
                    except IOError as e:
                        print_fail("Failed to get tarball: %s" % e)
                        continue

                    # add the new source
                    item.new_tarball(dest_tarball)

        # prep the spec file for rpmdev-bumpspec
        if new_version:
            with open(item.spec_filename, 'r') as f:
                with open(item.spec_filename + ".tmp", "w") as tmp_spec:
                    for line in f:
                        if line.startswith('Version:'):
                            line = replace_spec_value(line, new_version + '\n')
                        elif line.startswith('Release:'):
                            line = replace_spec_value(line, '0%{?dist}\n')
                        elif line.startswith(('Source:', 'Source0:')):
                            line = re.sub("/" + majorminor(item.version) + "/",
                                          "/" + majorminor(new_version) + "/",
                                          line)
                        tmp_spec.write(line)
            os.rename(item.spec_filename + ".tmp", item.spec_filename)

        # bump the spec file
        comment = None
        if args.bump_soname:
            comment = "Rebuilt for %s soname bump" % args.bump_soname
        elif new_version:
            comment = "Update to " + new_version
        if comment:
            cmd = [
                'rpmdev-bumpspec',
                "--comment=%s" % comment,
                "%s.spec" % item.pkgname
            ]
            item.run_command(cmd)

        # run prep, and make sure patches still apply
        if not args.simulate:
            if not item.check_patches():
                print_fail("to build %s as patches did not apply" %
                           item.pkgname)
                continue

        # push the changes
        if args.simulate:
            print_debug("Not pushing as simulating")
            continue

        # commit the changes
        if comment and not item.commit_and_push(comment):
            print_fail("push")
            continue

        # COPR, so build srpm, upload and build
        if item.is_copr:
            if not item.run_command(
                ['fedpkg', "--dist=%s" % item.dist, 'srpm']):
                print_fail("to build srpm")
                continue

            # extract the nevr from the package
            new_srpm = glob.glob(args.cache + "/" + item.pkgname +
                                 '/*.src.rpm')[0]
            pkg = Package(new_srpm)

            # check if it already exists
            status = copr.get_pkg_status(pkg)
            if status == CoprBuildStatus.ALREADY_BUILT:
                print_debug("Already built in COPR")
                continue
            elif status == CoprBuildStatus.IN_PROGRESS:
                print_debug("Already building in COPR")
                continue

            # upload the package somewhere shared
            if os.getenv('USERNAME') == 'hughsie':
                upload_dir = '[email protected]:/home/fedora/rhughes/public_html/copr/'
                upload_url = 'http://rhughes.fedorapeople.org/copr/'
            elif os.getenv('USERNAME') == 'kalev':
                upload_dir = '[email protected]:/home/fedora/kalev/public_html/copr/'
                upload_url = 'http://kalev.fedorapeople.org/copr/'
            else:
                print_fail("USERNAME not valid, ping hughsie on irc")
                continue

            print_debug("Uploading local package to " + upload_dir)
            p = subprocess.Popen(['scp', '-q', new_srpm, upload_dir])
            p.wait()
            pkg.url = upload_url + os.path.basename(new_srpm)

            if not copr.build(pkg):
                print_fail("COPR build")
                break
            rc = copr.wait_for_builds()
            if not rc:
                print_fail("waiting")
            continue

        # work out release tag
        if args.fedora_branch == "f18":
            pkg_release_tag = 'fc18'
        elif args.fedora_branch == "f19":
            pkg_release_tag = 'fc19'
        elif args.fedora_branch == "f20":
            pkg_release_tag = 'fc20'
        elif args.fedora_branch == "f21":
            pkg_release_tag = 'fc21'
        elif args.fedora_branch == "f22":
            pkg_release_tag = 'fc22'
        elif args.fedora_branch == "rawhide":
            pkg_release_tag = 'fc23'
        else:
            print_fail("Failed to get release tag for %s" % args.fedora_branch)
            continue

        # build package
        if new_version:
            print_info("Building %s-%s-1.%s" %
                       (item.pkgname, new_version, pkg_release_tag))
        else:
            print_info("Building %s-%s-1.%s" %
                       (item.pkgname, item.version, pkg_release_tag))
        if args.buildroot:
            rc = item.run_command(
                ['fedpkg', 'build', '--target', args.buildroot])
        else:
            rc = item.run_command(['fedpkg', 'build'])
        if not rc:
            print_fail("Build")
            continue

        # work out repo branch
        if args.fedora_branch == "f18":
            pkg_branch_name = 'f18-build'
        elif args.fedora_branch == "f19":
            pkg_branch_name = 'f19-build'
        elif args.fedora_branch == "f20":
            pkg_branch_name = 'f20-build'
        elif args.fedora_branch == "f21":
            pkg_branch_name = 'f21-build'
        elif args.fedora_branch == "f22":
            pkg_branch_name = 'f22-build'
        elif args.fedora_branch == "rawhide":
            pkg_branch_name = 'f23-build'
        else:
            print_fail("Failed to get repo branch tag for" +
                       args.fedora_branch)
            continue

        # wait for repo to sync
        if item.wait_repo and args.fedora_branch == "rawhide":
            rc = item.run_command([
                'koji', 'wait-repo', pkg_branch_name, '--build',
                "%s-%s-1.%s" % (item.pkgname, new_version, pkg_release_tag)
            ])
            if not rc:
                print_fail("Wait for repo")
                continue