Example #1
0
    def rgbdslam_on_bags(self, bags_list):
        """ Execute rgbdslam code on specified bags 

        Usage: rgbdslam_on_bags (bags_list)

        Input:
            bags_list - List of bag filenames (with full path) to run rgbdslam.
                Output files are bags_list[i] + '_g2o.txt'

        Output:
            g2o_files - List of rgbdslam (g2o) outputs for given filename bags.
        """
        import subprocess
        import shlex
        import utils

        g2o_files = list()
        for bag in bags_list: 
            g2o_file = bag + '_g2o.txt'
            locked = utils.lock_file(g2o_file)
            if locked:
                print("Calling RGBDSlam on bag " + bag + "... ")
                launch_str = "roslaunch rgbdslam rgbdslam_bag.launch" + \
                            " bag:=" + bag 
                # Call process silently
                fnull = open(os.devnull, 'w')
                subprocess.call(shlex.split(launch_str), stdout = fnull)
                print(" Done! \n")
                fnull.close()
                utils.unlock_file(g2o_file)
                g2o_files.append(g2o_file)

        return g2o_files
Example #2
0
def main():
    if '--help' in sys.argv:
        usage()
        sys.exit(0)
    if len(sys.argv) < 3:
        usage()
        sys.exit(1)

    utils.setup_django()
    import settings

    branch = utils.get_branch('master')
    fetchdir = settings.LAYER_FETCH_DIR

    import layerindex.models import LayerItem
    bitbakeitem = LayerItem()
    bitbakeitem.vcs_url = settings.BITBAKE_REPO_URL
    bitbakepath = os.path.join(fetchdir, bitbakeitem.get_fetch_dir())
    if getattr(settings, 'BITBAKE_PATH', ''):
        bitbakepath = os.path.join(bitbakepath, settings.BITBAKE_PATH)

    if not os.path.exists(bitbakepath):
        sys.stderr.write("Unable to find bitbake checkout at %s" % bitbakepath)
        sys.exit(1)

    lockfn = os.path.join(fetchdir, "layerindex.lock")
    lockfile = utils.lock_file(lockfn)
    if not lockfile:
        sys.stderr.write("Layer index lock timeout expired\n")
        sys.exit(1)
    try:
        (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, True)
        try:
            changeset = get_changeset(sys.argv[1])
            if not changeset:
                sys.stderr.write("Unable to find changeset with id %s\n" % sys.argv[1])
                sys.exit(1)

            utils.setup_core_layer_sys_path(settings, branch.name)

            outp = generate_patches(tinfoil, fetchdir, changeset, sys.argv[2])
        finally:
            tinfoil.shutdown()
    finally:
        utils.unlock_file(lockfile)

    if outp:
        print(outp)
    else:
        sys.stderr.write("No changes to write\n")
        sys.exit(1)

    shutil.rmtree(tempdir)
    sys.exit(0)
Example #3
0
def main():
    if '--help' in sys.argv:
        usage()
        sys.exit(0)
    if len(sys.argv) < 3:
        usage()
        sys.exit(1)

    utils.setup_django()
    import settings

    branch = utils.get_branch('master')
    fetchdir = settings.LAYER_FETCH_DIR
    bitbakepath = os.path.join(fetchdir, 'bitbake')

    lockfn = os.path.join(fetchdir, "layerindex.lock")
    lockfile = utils.lock_file(lockfn)
    if not lockfile:
        sys.stderr.write("Layer index lock timeout expired\n")
        sys.exit(1)
    try:
        (tinfoil, tempdir) = recipeparse.init_parser(settings, branch,
                                                     bitbakepath, True)

        changeset = get_changeset(sys.argv[1])
        if not changeset:
            sys.stderr.write("Unable to find changeset with id %s\n" %
                             sys.argv[1])
            sys.exit(1)

        outp = generate_patches(tinfoil, fetchdir, changeset, sys.argv[2])
    finally:
        tinfoil.shutdown()
        utils.unlock_file(lockfile)

    if outp:
        print(outp)
    else:
        sys.stderr.write("No changes to write\n")
        sys.exit(1)

    shutil.rmtree(tempdir)
    sys.exit(0)
Example #4
0
def main():
    if '--help' in sys.argv:
        usage()
        sys.exit(0)
    if len(sys.argv) < 3:
        usage()
        sys.exit(1)

    utils.setup_django()
    import settings

    branch = utils.get_branch('master')
    fetchdir = settings.LAYER_FETCH_DIR
    bitbakepath = os.path.join(fetchdir, 'bitbake')

    lockfn = os.path.join(fetchdir, "layerindex.lock")
    lockfile = utils.lock_file(lockfn)
    if not lockfile:
        sys.stderr.write("Layer index lock timeout expired\n")
        sys.exit(1)
    try:
        (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, True)

        changeset = get_changeset(sys.argv[1])
        if not changeset:
            sys.stderr.write("Unable to find changeset with id %s\n" % sys.argv[1])
            sys.exit(1)

        outp = generate_patches(tinfoil, fetchdir, changeset, sys.argv[2])
    finally:
        utils.unlock_file(lockfile)

    if outp:
        print outp
    else:
        sys.stderr.write("No changes to write\n")
        sys.exit(1)

    shutil.rmtree(tempdir)
    sys.exit(0)
Example #5
0
async def send_chart_data(req, writer):
    cmax = 10
    sqr = False
    req.parse_qs()
    file_name = req.form["file"]
    aliases = utils.get_config("aliases", {})
    data_alias = ""
    gc.collect(generation=2)
    if file_name == 'termometr.hist':
        termometr = sensors.Sensory()
        curr = await termometr.pomiar_temperatury()
        data_alias = "Piec - termometr"
        del termometr
    elif file_name == 'piec.hist':
        curr = int(utils.get_config("piec_temperatura", 40))
        data_alias = "Piec - temperatura"
        sqr = True
    else:
        data_alias = aliases[file_name]
        curr = None
    prev = None
    data = """{"name": "%s", "data": [""" % data_alias
    await writer.awrite(data.encode('utf-8'))
    await writer.drain()
    data = ""
    tc = 0
    try:
        await utils.lock_file(file_name)

        with open(file_name, 'r') as fi:
            c = 0

            data = ""

            while 1:
                buf = fi.readline()
                if str(buf) == '':
                    break
                else:
                    d = buf.rstrip().split(" - ")

                    if sqr and prev is not None:
                        dp = buf.rstrip().split(" - ")
                        dp[1] = prev
                        dp[0] += " GMT"
                        if tc != 0:
                            data += ","
                        else:
                            tc = 1
                        data += json.dumps(dp)

                    prev = d[1]
                    d[0] += " GMT"
                    if tc != 0:
                        data += ","
                    else:
                        tc = 1
                    data += json.dumps(d)
                    c += 1

                    if c == cmax:
                        await writer.awrite(data.encode('utf-8'))
                        await writer.drain()
                        c = 0
                        del data
                        gc.collect()
                        data = ""

            fi.close()
            print('1')
            utils.unlock_file(file_name)
    except Exception as eee:
        print('e')
        utils.log_exception(eee, 1)

    if utils.dst_time()[0] > 2000 and curr is not None:
        czas = utils.czas(True)
        if sqr:
            d = [czas + ' GMT', prev]
            if tc != 0:
                data += ","
            else:
                tc = 1
            data += (json.dumps(d))

        d = [czas + ' GMT', curr]
        if tc != 0:
            data += ","
        data += json.dumps(d)

    await writer.awrite(data.encode('utf-8'))
    del data

    await writer.drain()

    await writer.awrite("""]}""".encode('utf-8'))

    await writer.drain()
    print('f')
    utils.unlock_file(file_name)
Example #6
0
def main():
    if LooseVersion(git.__version__) < '0.3.1':
        logger.error(
            "Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script"
        )
        sys.exit(1)

    parser = optparse.OptionParser(usage="""
    %prog [options]""")

    parser.add_option(
        "-b",
        "--branch",
        help=
        "Specify branch(es) to update (use commas to separate multiple). Default is all enabled branches.",
        action="store",
        dest="branch",
        default='')
    parser.add_option(
        "-l",
        "--layer",
        help=
        "Specify layers to update (use commas to separate multiple). Default is all published layers.",
        action="store",
        dest="layers")
    parser.add_option(
        "-t",
        "--timeout",
        help=
        "Specify timeout in seconds to get layerindex.lock. Default is 30 seconds.",
        type="int",
        action="store",
        dest="timeout",
        default=30)
    parser.add_option(
        "-r",
        "--reload",
        help="Reload recipe data instead of updating since last update",
        action="store_true",
        dest="reload")
    parser.add_option(
        "",
        "--fullreload",
        help="Discard existing recipe data and fetch it from scratch",
        action="store_true",
        dest="fullreload")
    parser.add_option("-n",
                      "--dry-run",
                      help="Don't write any data back to the database",
                      action="store_true",
                      dest="dryrun")
    parser.add_option("-x",
                      "--nofetch",
                      help="Don't fetch repositories",
                      action="store_true",
                      dest="nofetch")
    parser.add_option("",
                      "--nocheckout",
                      help="Don't check out branches",
                      action="store_true",
                      dest="nocheckout")
    parser.add_option("",
                      "--stop-on-error",
                      help="Stop on first parsing error",
                      action="store_true",
                      default=False,
                      dest="stop_on_error")
    parser.add_option("-a",
                      "--actual-branch",
                      help="Update actual branch for layer and bitbake",
                      action="store",
                      dest="actual_branch",
                      default='')
    parser.add_option("-d",
                      "--debug",
                      help="Enable debug output",
                      action="store_const",
                      const=logging.DEBUG,
                      dest="loglevel",
                      default=logging.INFO)
    parser.add_option("-q",
                      "--quiet",
                      help="Hide all output except error messages",
                      action="store_const",
                      const=logging.ERROR,
                      dest="loglevel")
    parser.add_option(
        "",
        "--keep-temp",
        help="Preserve temporary directory at the end instead of deleting it",
        action="store_true")

    options, args = parser.parse_args(sys.argv)
    if len(args) > 1:
        logger.error('unexpected argument "%s"' % args[1])
        parser.print_help()
        sys.exit(1)

    utils.setup_django()
    import settings
    from layerindex.models import Branch, LayerItem, Update, LayerUpdate, LayerBranch

    logger.setLevel(options.loglevel)

    if options.branch:
        branches = options.branch.split(',')
        for branch in branches:
            if not utils.get_branch(branch):
                logger.error("Specified branch %s is not valid" % branch)
                sys.exit(1)
    else:
        branchquery = Branch.objects.filter(updates_enabled=True)
        branches = [branch.name for branch in branchquery]

    fetchdir = settings.LAYER_FETCH_DIR
    if not fetchdir:
        logger.error("Please set LAYER_FETCH_DIR in settings.py")
        sys.exit(1)

    # We deliberately exclude status == 'X' ("no update") here
    layerquery_all = LayerItem.objects.filter(comparison=False).filter(
        status='P')
    if layerquery_all.count() == 0:
        logger.info("No published layers to update")
        sys.exit(1)

    # For -a option to update bitbake branch
    update_bitbake = False
    if options.layers:
        layers = options.layers.split(',')
        if 'bitbake' in layers:
            update_bitbake = True
            layers.remove('bitbake')
        for layer in layers:
            layerquery = LayerItem.objects.filter(comparison=False).filter(
                name=layer)
            if layerquery.count() == 0:
                logger.error('No layers matching specified query "%s"' % layer)
                sys.exit(1)
        layerquery = LayerItem.objects.filter(comparison=False).filter(
            name__in=layers)
    else:
        layerquery = layerquery_all
        update_bitbake = True

    if options.actual_branch:
        if not options.branch:
            logger.error("-a option requires -b")
            sys.exit(1)
        elif len(branches) != 1:
            logger.error("Only one branch should be used with -a")
            sys.exit(1)

    if not os.path.exists(fetchdir):
        os.makedirs(fetchdir)

    allrepos = {}
    fetchedresult = []
    fetchedrepos = []
    failedrepos = {}

    # We don't want git to prompt for any passwords (e.g. when accessing renamed/hidden github repos)
    os.environ['SSH_ASKPASS'] = ''
    os.environ['GIT_ASKPASS'] = ''
    os.environ['GIT_TERMINAL_PROMPT'] = '0'

    listhandler = utils.ListHandler()
    listhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
    logger.addHandler(listhandler)

    update = Update()
    update.started = datetime.now()
    if options.fullreload or options.reload:
        update.reload = True
    else:
        update.reload = False
    if not options.dryrun:
        update.save()
    try:
        lockfn = os.path.join(fetchdir, "layerindex.lock")
        lockfile = utils.lock_file(lockfn, options.timeout, logger)
        if not lockfile:
            logger.error("Layer index lock timeout expired")
            sys.exit(1)
        try:
            # Make sure oe-core is fetched since recipe parsing requires it
            layerquery_core = LayerItem.objects.filter(
                comparison=False).filter(name=settings.CORE_LAYER_NAME)
            if layerquery_core in layerquery:
                layerquery_fetch = list(layerquery)
            else:
                layerquery_fetch = list(layerquery) + list(layerquery_core)
            # Fetch latest metadata from repositories
            for layer in layerquery_fetch:
                # Handle multiple layers in a single repo
                urldir = layer.get_fetch_dir()
                repodir = os.path.join(fetchdir, urldir)
                if layer.vcs_url not in allrepos:
                    allrepos[layer.vcs_url] = (repodir, urldir, fetchdir,
                                               layer.name)
            # Add bitbake
            if settings.BITBAKE_REPO_URL not in allrepos:
                bitbakeitem = LayerItem()
                bitbakeitem.vcs_url = settings.BITBAKE_REPO_URL
                bitbakeurldir = bitbakeitem.get_fetch_dir()
                bitbakepath = os.path.join(fetchdir, bitbakeurldir)
                allrepos[settings.BITBAKE_REPO_URL] = (bitbakepath,
                                                       bitbakeurldir, fetchdir,
                                                       "bitbake")

            (bitbakepath, _, _, _) = allrepos[settings.BITBAKE_REPO_URL]
            if getattr(settings, 'BITBAKE_PATH', ''):
                bitbakepath = os.path.join(bitbakepath, settings.BITBAKE_PATH)

            if not options.nofetch:
                # Parallel fetching
                pool = multiprocessing.Pool(int(settings.PARALLEL_JOBS))
                for url in allrepos:
                    fetchedresult.append(pool.apply_async(fetch_repo, \
                        (url, allrepos[url][0], allrepos[url][1], allrepos[url][2], allrepos[url][3],)))
                pool.close()
                pool.join()

                for url in fetchedresult[:]:
                    # The format is (url, error), the error is None when succeed.
                    if url.get()[1]:
                        failedrepos[url.get()[0]] = url.get()[1]
                    else:
                        fetchedrepos.append(url.get()[0])

                if not (fetchedrepos or update_bitbake):
                    logger.error("No repositories could be fetched, exiting")
                    sys.exit(1)

            if options.actual_branch:
                update_actual_branch(layerquery, fetchdir, branches[0],
                                     options, update_bitbake, bitbakepath)
                return

            # Get a safe bitbake branch to call into from this script (used later on)
            safe_bitbake_branch = 'origin/master'
            master_branch = Branch.objects.filter(name='master').first()
            if master_branch and master_branch.bitbake_branch:
                safe_bitbake_branch = 'origin/' + master_branch.bitbake_branch

            # Process and extract data from each layer
            # We now do this by calling out to a separate script; doing otherwise turned out to be
            # unreliable due to leaking memory (we're using bitbake internals in a manner in which
            # they never get used during normal operation).
            failed_layers = {}
            for branch in branches:
                failed_layers[branch] = []
                # If layer_A depends(or recommends) on layer_B, add layer_B before layer_A
                deps_dict_all = {}
                layerquery_sorted = []
                collections = set()
                branchobj = utils.get_branch(branch)
                for layer in layerquery_all:
                    # Get all collections from database, but we can't trust the
                    # one which will be updated since its collections maybe
                    # changed (different from database).
                    if layer in layerquery:
                        continue
                    layerbranch = layer.get_layerbranch(branch)
                    if layerbranch:
                        collections.add(
                            (layerbranch.collection, layerbranch.version))

                for layer in layerquery:
                    if layer.vcs_url in failedrepos:
                        logger.info(
                            "Skipping update of layer %s - fetch failed" %
                            layer.name)
                        continue

                    layerbranch = layer.get_layerbranch(branch)
                    branchname = branch
                    branchdesc = branch
                    newbranch = False
                    branchobj = utils.get_branch(branch)
                    if layerbranch:
                        if layerbranch.actual_branch:
                            branchname = layerbranch.actual_branch
                            branchdesc = "%s (%s)" % (branch, branchname)
                    else:
                        # LayerBranch doesn't exist for this branch, create it temporarily
                        # (we won't save this - update_layer.py will do the actual creation
                        # if it gets called).
                        newbranch = True
                        layerbranch = LayerBranch()
                        layerbranch.layer = layer
                        layerbranch.branch = branchobj
                        layerbranch_source = layer.get_layerbranch(branchobj)
                        if not layerbranch_source:
                            layerbranch_source = layer.get_layerbranch(None)
                        if layerbranch_source:
                            layerbranch.vcs_subdir = layerbranch_source.vcs_subdir

                    # Collect repo info
                    urldir = layer.get_fetch_dir()
                    repodir = os.path.join(fetchdir, urldir)
                    repo = git.Repo(repodir)
                    if repo.bare:
                        logger.error('Repository %s is bare, not supported' %
                                     repodir)
                        continue
                    try:
                        # Always get origin/branchname, so it raises error when branch doesn't exist when nocheckout
                        topcommit = repo.commit('origin/%s' % branchname)
                        if options.nocheckout:
                            topcommit = repo.commit('HEAD')
                    except:
                        if newbranch:
                            logger.info(
                                "Skipping update of layer %s - branch %s doesn't exist"
                                % (layer.name, branchdesc))
                        else:
                            logger.info(
                                "layer %s - branch %s no longer exists, removing it from database"
                                % (layer.name, branchdesc))
                            if not options.dryrun:
                                layerbranch.delete()
                        continue

                    if layerbranch.vcs_subdir and not options.nocheckout:
                        # Find latest commit in subdirectory
                        # A bit odd to do it this way but apparently there's no other way in the GitPython API
                        topcommit = next(
                            repo.iter_commits('origin/%s' % branchname,
                                              paths=layerbranch.vcs_subdir),
                            None)
                        if not topcommit:
                            print_subdir_error(newbranch, layer.name,
                                               layerbranch.vcs_subdir,
                                               branchdesc)
                            if not (newbranch and layerbranch.vcs_subdir):
                                logger.error(
                                    "Failed to get last revision for layer %s on branch %s"
                                    % (layer.name, branchdesc))
                            continue

                    if layerbranch.vcs_last_rev == topcommit.hexsha and not update.reload:
                        logger.info(
                            "Layer %s is already up-to-date for branch %s" %
                            (layer.name, branchdesc))
                        collections.add(
                            (layerbranch.collection, layerbranch.version))
                        continue
                    else:
                        # Check out appropriate branch
                        if not options.nocheckout:
                            utils.checkout_layer_branch(layerbranch,
                                                        repodir,
                                                        logger=logger)
                        layerdir = os.path.join(repodir,
                                                layerbranch.vcs_subdir)
                        if layerbranch.vcs_subdir and not os.path.exists(
                                layerdir):
                            print_subdir_error(newbranch, layer.name,
                                               layerbranch.vcs_subdir,
                                               branchdesc)
                            continue

                        if not os.path.exists(
                                os.path.join(layerdir, 'conf/layer.conf')):
                            logger.error(
                                "conf/layer.conf not found for layer %s - is subdirectory set correctly?"
                                % layer.name)
                            continue

                    cmd = prepare_update_layer_command(options,
                                                       branchobj,
                                                       layer,
                                                       initial=True)
                    logger.debug('Running layer update command: %s' % cmd)
                    ret, output = utils.run_command_interruptible(cmd)
                    logger.debug('output: %s' % output)
                    if ret == 254:
                        # Interrupted by user, break out of loop
                        logger.info('Update interrupted, exiting')
                        sys.exit(254)
                    elif ret != 0:
                        output = output.rstrip()
                        # Save a layerupdate here or we won't see this output
                        layerupdate = LayerUpdate()
                        layerupdate.update = update
                        layerupdate.layer = layer
                        layerupdate.branch = branchobj
                        layerupdate.started = datetime.now()
                        layerupdate.log = output
                        layerupdate.retcode = ret
                        if not options.dryrun:
                            layerupdate.save()
                        continue

                    col = extract_value('BBFILE_COLLECTIONS', output)
                    if not col:
                        logger.error(
                            'Unable to find BBFILE_COLLECTIONS value in initial output'
                        )
                        # Assume (perhaps naively) that it's an error specific to the layer
                        continue
                    ver = extract_value('LAYERVERSION', output)
                    deps = extract_value('LAYERDEPENDS', output)
                    recs = extract_value('LAYERRECOMMENDS', output)

                    if not options.nocheckout:
                        # We need to check this out because we're using stuff from bb.utils
                        # below, and if we don't it might be a python 2 revision which would
                        # be an issue
                        utils.checkout_repo(bitbakepath,
                                            safe_bitbake_branch,
                                            logger=logger)

                    deps_dict = utils.explode_dep_versions2(bitbakepath, deps)
                    recs_dict = utils.explode_dep_versions2(bitbakepath, recs)
                    if not (deps_dict or recs_dict):
                        # No depends, add it firstly
                        layerquery_sorted.append(layer)
                        collections.add((col, ver))
                        continue
                    deps_dict_all[layer] = {'deps': deps_dict, \
                                            'recs': recs_dict, \
                                            'collection': col, \
                                            'version': ver}

                # Move deps_dict_all to layerquery_sorted orderly
                if deps_dict_all:
                    logger.info("Sorting layers for branch %s" % branch)
                while True:
                    deps_dict_all_copy = deps_dict_all.copy()
                    for layer, value in deps_dict_all_copy.items():
                        for deps_recs in ('deps', 'recs'):
                            for req_col, req_ver_list in value[deps_recs].copy(
                            ).items():
                                matched = False
                                if req_ver_list:
                                    req_ver = req_ver_list[0]
                                else:
                                    req_ver = None
                                if utils.is_deps_satisfied(
                                        req_col, req_ver, collections):
                                    del (value[deps_recs][req_col])
                        if not (value['deps'] or value['recs']):
                            # All the depends are in collections:
                            del (deps_dict_all[layer])
                            layerquery_sorted.append(layer)
                            collections.add(
                                (value['collection'], value['version']))

                    if not len(deps_dict_all):
                        break

                    finished = True
                    # If nothing changed after a run, drop recs and try again
                    if operator.eq(deps_dict_all_copy, deps_dict_all):
                        for layer, value in deps_dict_all.items():
                            if value['recs'] and not value['deps']:
                                # Add it if recs isn't satisfied only.
                                logger.warn(
                                    'Adding %s without LAYERRECOMMENDS...' %
                                    layer.name)
                                del (deps_dict_all[layer])
                                layerquery_sorted.append(layer)
                                collections.add(
                                    (value['collection'], value['version']))
                                failed_msg = '%s: Added without LAYERRECOMMENDS' % layer.name
                                failed_layers[branch].append(failed_msg)
                                finished = False
                                break
                        if not finished:
                            continue
                        logger.warning(
                            "Cannot find required collections on branch %s:" %
                            branch)
                        for layer, value in deps_dict_all.items():
                            logger.warn(
                                '%s: LAYERDEPENDS: %s LAYERRECOMMENDS: %s' %
                                (layer.name, value['deps'], value['recs']))
                            if value['deps']:
                                failed_layers[branch].append(
                                    '%s: Failed to add since LAYERDEPENDS is not satisfied'
                                    % layer.name)
                            else:
                                # Should never come here
                                logger.error(
                                    "Unexpected errors when sorting layers")
                                sys.exit(1)
                        logger.warning("Known collections on branch %s: %s" %
                                       (branch, collections))
                        break

                for layer in layerquery_sorted:
                    layerupdate = LayerUpdate()
                    layerupdate.update = update
                    layerupdate.layer = layer
                    layerupdate.branch = branchobj
                    layerbranch = layer.get_layerbranch(branch)
                    if layerbranch:
                        layerupdate.vcs_before_rev = layerbranch.vcs_last_rev

                    errmsg = failedrepos.get(layer.vcs_url, '')
                    if errmsg:
                        logger.info(
                            "Skipping update of layer %s as fetch of repository %s failed:\n%s"
                            % (layer.name, layer.vcs_url, errmsg))
                        layerupdate.started = datetime.now()
                        layerupdate.finished = datetime.now()
                        layerupdate.log = 'ERROR: fetch failed: %s' % errmsg
                        if not options.dryrun:
                            layerupdate.save()
                        continue

                    layerupdate.started = datetime.now()
                    if not options.dryrun:
                        layerupdate.save()
                    cmd = prepare_update_layer_command(options, branchobj,
                                                       layer)
                    logger.debug('Running layer update command: %s' % cmd)
                    ret, output = utils.run_command_interruptible(cmd)
                    layerupdate.finished = datetime.now()

                    # We need to get layerbranch here because it might not have existed until
                    # layer_update.py created it, but it still may not create one (e.g. if subdir
                    # didn't exist) so we still need to check
                    layerbranch = layer.get_layerbranch(branch)
                    if layerbranch:
                        layerupdate.vcs_after_rev = layerbranch.vcs_last_rev
                    layerupdate.log = output
                    layerupdate.retcode = ret
                    if not options.dryrun:
                        layerupdate.save()

                    if ret == 254:
                        # Interrupted by user, break out of loop
                        logger.info('Update interrupted, exiting')
                        sys.exit(254)
                    if options.stop_on_error and ret != 0:
                        logger.info(
                            'Layer update failed with --stop-on-error, stopping'
                        )
                        sys.exit(1)
            if failed_layers:
                for branch, err_msg_list in failed_layers.items():
                    if err_msg_list:
                        print()
                        logger.error("Issues found on branch %s:\n    %s" %
                                     (branch, "\n    ".join(err_msg_list)))
                        print()
        finally:
            utils.unlock_file(lockfile)

    except KeyboardInterrupt:
        logger.info('Update interrupted, exiting')
        sys.exit(254)
    except Exception:
        import traceback
        logger.error(traceback.format_exc().rstrip())
        sys.exit(1)
    finally:
        update.log = ''.join(listhandler.read())
        update.finished = datetime.now()
        if not options.dryrun:
            update.save()

    if not options.dryrun:
        # Purge old update records
        update_purge_days = getattr(settings, 'UPDATE_PURGE_DAYS', 30)
        Update.objects.filter(started__lte=datetime.now() -
                              timedelta(days=update_purge_days)).delete()

    sys.exit(0)
Example #7
0
def main():
    if LooseVersion(git.__version__) < '0.3.1':
        logger.error("Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script")
        sys.exit(1)


    parser = optparse.OptionParser(
        usage = """
    %prog [options]""")

    parser.add_option("-b", "--branch",
            help = "Specify branch to update",
            action="store", dest="branch", default='master')
    parser.add_option("-l", "--layer",
            help = "Specify layers to update (use commas to separate multiple). Default is all published layers.",
            action="store", dest="layers")
    parser.add_option("-r", "--reload",
            help = "Reload recipe data instead of updating since last update",
            action="store_true", dest="reload")
    parser.add_option("", "--fullreload",
            help = "Discard existing recipe data and fetch it from scratch",
            action="store_true", dest="fullreload")
    parser.add_option("-n", "--dry-run",
            help = "Don't write any data back to the database",
            action="store_true", dest="dryrun")
    parser.add_option("-x", "--nofetch",
            help = "Don't fetch repositories",
            action="store_true", dest="nofetch")
    parser.add_option("", "--nocheckout",
            help = "Don't check out branches",
            action="store_true", dest="nocheckout")
    parser.add_option("-d", "--debug",
            help = "Enable debug output",
            action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO)
    parser.add_option("-q", "--quiet",
            help = "Hide all output except error messages",
            action="store_const", const=logging.ERROR, dest="loglevel")

    options, args = parser.parse_args(sys.argv)
    if len(args) > 1:
        logger.error('unexpected argument "%s"' % args[1])
        parser.print_help()
        sys.exit(1)

    if options.fullreload:
        options.reload = True

    utils.setup_django()
    import settings
    from layerindex.models import LayerItem, LayerBranch, Recipe, RecipeFileDependency, Machine, BBAppend, BBClass
    from django.db import transaction

    logger.setLevel(options.loglevel)

    branch = utils.get_branch(options.branch)
    if not branch:
        logger.error("Specified branch %s is not valid" % options.branch)
        sys.exit(1)

    fetchdir = settings.LAYER_FETCH_DIR
    if not fetchdir:
        logger.error("Please set LAYER_FETCH_DIR in settings.py")
        sys.exit(1)

    if options.layers:
        layerquery = LayerItem.objects.filter(classic=False).filter(name__in=options.layers.split(','))
        if layerquery.count() == 0:
            logger.error('No layers matching specified query "%s"' % options.layers)
            sys.exit(1)
    else:
        layerquery = LayerItem.objects.filter(classic=False).filter(status='P')
        if layerquery.count() == 0:
            logger.info("No published layers to update")
            sys.exit(1)

    if not os.path.exists(fetchdir):
        os.makedirs(fetchdir)
    fetchedrepos = []
    failedrepos = []

    lockfn = os.path.join(fetchdir, "layerindex.lock")
    lockfile = utils.lock_file(lockfn)
    if not lockfile:
        logger.error("Layer index lock timeout expired")
        sys.exit(1)
    try:
        bitbakepath = os.path.join(fetchdir, 'bitbake')

        if not options.nofetch:
            # Fetch latest metadata from repositories
            for layer in layerquery:
                # Handle multiple layers in a single repo
                urldir = layer.get_fetch_dir()
                repodir = os.path.join(fetchdir, urldir)
                if not (layer.vcs_url in fetchedrepos or layer.vcs_url in failedrepos):
                    logger.info("Fetching remote repository %s" % layer.vcs_url)
                    out = None
                    try:
                        if not os.path.exists(repodir):
                            out = utils.runcmd("git clone %s %s" % (layer.vcs_url, urldir), fetchdir, logger=logger)
                        else:
                            out = utils.runcmd("git fetch", repodir, logger=logger)
                    except Exception as e:
                        logger.error("Fetch of layer %s failed: %s" % (layer.name, str(e)))
                        failedrepos.append(layer.vcs_url)
                        continue
                    fetchedrepos.append(layer.vcs_url)

            if not fetchedrepos:
                logger.error("No repositories could be fetched, exiting")
                sys.exit(1)

            logger.info("Fetching bitbake from remote repository %s" % settings.BITBAKE_REPO_URL)
            if not os.path.exists(bitbakepath):
                out = utils.runcmd("git clone %s %s" % (settings.BITBAKE_REPO_URL, 'bitbake'), fetchdir, logger=logger)
            else:
                out = utils.runcmd("git fetch", bitbakepath, logger=logger)

        try:
            (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, nocheckout=options.nocheckout, logger=logger)
        except recipeparse.RecipeParseError as e:
            logger.error(str(e))
            sys.exit(1)

        # Clear the default value of SUMMARY so that we can use DESCRIPTION instead if it hasn't been set
        tinfoil.config_data.setVar('SUMMARY', '')
        # Clear the default value of DESCRIPTION so that we can see where it's not set
        tinfoil.config_data.setVar('DESCRIPTION', '')
        # Clear the default value of HOMEPAGE ('unknown')
        tinfoil.config_data.setVar('HOMEPAGE', '')
        # Set a blank value for LICENSE so that it doesn't cause the parser to die (e.g. with meta-ti -
        # why won't they just fix that?!)
        tinfoil.config_data.setVar('LICENSE', '')

        # Process and extract data from each layer
        for layer in layerquery:
            transaction.enter_transaction_management()
            transaction.managed(True)
            try:
                urldir = layer.get_fetch_dir()
                repodir = os.path.join(fetchdir, urldir)
                if layer.vcs_url in failedrepos:
                    logger.info("Skipping update of layer %s as fetch of repository %s failed" % (layer.name, layer.vcs_url))
                    transaction.rollback()
                    continue

                layerbranch = layer.get_layerbranch(options.branch)

                branchname = options.branch
                branchdesc = options.branch
                if layerbranch:
                    if layerbranch.actual_branch:
                        branchname = layerbranch.actual_branch
                        branchdesc = "%s (%s)" % (options.branch, branchname)

                # Collect repo info
                repo = git.Repo(repodir)
                assert repo.bare == False
                try:
                    if options.nocheckout:
                        topcommit = repo.commit('HEAD')
                    else:
                        topcommit = repo.commit('origin/%s' % branchname)
                except:
                    if layerbranch:
                        logger.error("Failed update of layer %s - branch %s no longer exists" % (layer.name, branchdesc))
                    else:
                        logger.info("Skipping update of layer %s - branch %s doesn't exist" % (layer.name, branchdesc))
                    transaction.rollback()
                    continue

                newbranch = False
                if not layerbranch:
                    # LayerBranch doesn't exist for this branch, create it
                    newbranch = True
                    layerbranch = LayerBranch()
                    layerbranch.layer = layer
                    layerbranch.branch = branch
                    layerbranch_source = layer.get_layerbranch('master')
                    if not layerbranch_source:
                        layerbranch_source = layer.get_layerbranch(None)
                    if layerbranch_source:
                        layerbranch.vcs_subdir = layerbranch_source.vcs_subdir
                    layerbranch.save()
                    if layerbranch_source:
                        for maintainer in layerbranch_source.layermaintainer_set.all():
                            maintainer.pk = None
                            maintainer.id = None
                            maintainer.layerbranch = layerbranch
                            maintainer.save()
                        for dep in layerbranch_source.dependencies_set.all():
                            dep.pk = None
                            dep.id = None
                            dep.layerbranch = layerbranch
                            dep.save()

                if layerbranch.vcs_subdir and not options.nocheckout:
                    # Find latest commit in subdirectory
                    # A bit odd to do it this way but apparently there's no other way in the GitPython API
                    topcommit = next(repo.iter_commits('origin/%s' % branchname, paths=layerbranch.vcs_subdir), None)
                    if not topcommit:
                        # This will error out if the directory is completely invalid or had never existed at this point
                        # If it previously existed but has since been deleted, you will get the revision where it was
                        # deleted - so we need to handle that case separately later
                        if newbranch:
                            logger.info("Skipping update of layer %s for branch %s - subdirectory %s does not exist on this branch" % (layer.name, branchdesc, layerbranch.vcs_subdir))
                        elif layerbranch.vcs_subdir:
                            logger.error("Subdirectory for layer %s does not exist on branch %s - if this is legitimate, the layer branch record should be deleted" % (layer.name, branchdesc))
                        else:
                            logger.error("Failed to get last revision for layer %s on branch %s" % (layer.name, branchdesc))
                        transaction.rollback()
                        continue

                layerdir = os.path.join(repodir, layerbranch.vcs_subdir)
                layerdir_start = os.path.normpath(layerdir) + os.sep
                layerrecipes = Recipe.objects.filter(layerbranch=layerbranch)
                layermachines = Machine.objects.filter(layerbranch=layerbranch)
                layerappends = BBAppend.objects.filter(layerbranch=layerbranch)
                layerclasses = BBClass.objects.filter(layerbranch=layerbranch)
                if layerbranch.vcs_last_rev != topcommit.hexsha or options.reload:
                    # Check out appropriate branch
                    if not options.nocheckout:
                        out = utils.runcmd("git checkout origin/%s" % branchname, repodir, logger=logger)
                        out = utils.runcmd("git clean -f -x", repodir, logger=logger)

                    if layerbranch.vcs_subdir and not os.path.exists(layerdir):
                        if newbranch:
                            logger.info("Skipping update of layer %s for branch %s - subdirectory %s does not exist on this branch" % (layer.name, branchdesc, layerbranch.vcs_subdir))
                        else:
                            logger.error("Subdirectory for layer %s does not exist on branch %s - if this is legitimate, the layer branch record should be deleted" % (layer.name, branchdesc))
                        transaction.rollback()
                        continue

                    if not os.path.exists(os.path.join(layerdir, 'conf/layer.conf')):
                        logger.error("conf/layer.conf not found for layer %s - is subdirectory set correctly?" % layer.name)
                        transaction.rollback()
                        continue

                    logger.info("Collecting data for layer %s on branch %s" % (layer.name, branchdesc))

                    try:
                        config_data_copy = recipeparse.setup_layer(tinfoil.config_data, fetchdir, layerdir, layer, layerbranch)
                    except recipeparse.RecipeParseError as e:
                        logger.error(str(e))
                        transaction.rollback()
                        continue

                    if layerbranch.vcs_last_rev and not options.reload:
                        try:
                            diff = repo.commit(layerbranch.vcs_last_rev).diff(topcommit)
                        except Exception as e:
                            logger.warn("Unable to get diff from last commit hash for layer %s - falling back to slow update: %s" % (layer.name, str(e)))
                            diff = None
                    else:
                        diff = None

                    # We handle recipes specially to try to preserve the same id
                    # when recipe upgrades happen (so that if a user bookmarks a
                    # recipe page it remains valid)
                    layerrecipes_delete = []
                    layerrecipes_add = []

                    # Check if any paths should be ignored because there are layers within this layer
                    removedirs = []
                    for root, dirs, files in os.walk(layerdir):
                        for d in dirs:
                            if os.path.exists(os.path.join(root, d, 'conf', 'layer.conf')):
                                removedirs.append(os.path.join(root, d) + os.sep)

                    if diff:
                        # Apply git changes to existing recipe list

                        if layerbranch.vcs_subdir:
                            subdir_start = os.path.normpath(layerbranch.vcs_subdir) + os.sep
                        else:
                            subdir_start = ""

                        updatedrecipes = set()
                        for d in diff.iter_change_type('D'):
                            path = d.a_blob.path
                            if path.startswith(subdir_start):
                                skip = False
                                for d in removedirs:
                                    if path.startswith(d):
                                        skip = True
                                        break
                                if skip:
                                    continue
                                (typename, filepath, filename) = recipeparse.detect_file_type(path, subdir_start)
                                if typename == 'recipe':
                                    values = layerrecipes.filter(filepath=filepath).filter(filename=filename).values('id', 'filepath', 'filename', 'pn')
                                    if len(values):
                                        layerrecipes_delete.append(values[0])
                                        logger.debug("Mark %s for deletion" % values[0])
                                        updatedrecipes.add(os.path.join(values[0]['filepath'], values[0]['filename']))
                                    else:
                                        logger.warn("Deleted recipe %s could not be found" % path)
                                elif typename == 'bbappend':
                                    layerappends.filter(filepath=filepath).filter(filename=filename).delete()
                                elif typename == 'machine':
                                    layermachines.filter(name=filename).delete()
                                elif typename == 'bbclass':
                                    layerclasses.filter(name=filename).delete()

                        for d in diff.iter_change_type('A'):
                            path = d.b_blob.path
                            if path.startswith(subdir_start):
                                skip = False
                                for d in removedirs:
                                    if path.startswith(d):
                                        skip = True
                                        break
                                if skip:
                                    continue
                                (typename, filepath, filename) = recipeparse.detect_file_type(path, subdir_start)
                                if typename == 'recipe':
                                    layerrecipes_add.append(os.path.join(repodir, path))
                                    logger.debug("Mark %s for addition" % path)
                                    updatedrecipes.add(os.path.join(filepath, filename))
                                elif typename == 'bbappend':
                                    append = BBAppend()
                                    append.layerbranch = layerbranch
                                    append.filename = filename
                                    append.filepath = filepath
                                    append.save()
                                elif typename == 'machine':
                                    machine = Machine()
                                    machine.layerbranch = layerbranch
                                    machine.name = filename
                                    update_machine_conf_file(os.path.join(repodir, path), machine)
                                    machine.save()
                                elif typename == 'bbclass':
                                    bbclass = BBClass()
                                    bbclass.layerbranch = layerbranch
                                    bbclass.name = filename
                                    bbclass.save()

                        dirtyrecipes = set()
                        for d in diff.iter_change_type('M'):
                            path = d.a_blob.path
                            if path.startswith(subdir_start):
                                skip = False
                                for d in removedirs:
                                    if path.startswith(d):
                                        skip = True
                                        break
                                if skip:
                                    continue
                                (typename, filepath, filename) = recipeparse.detect_file_type(path, subdir_start)
                                if typename == 'recipe':
                                    logger.debug("Mark %s for update" % path)
                                    results = layerrecipes.filter(filepath=filepath).filter(filename=filename)[:1]
                                    if results:
                                        recipe = results[0]
                                        update_recipe_file(config_data_copy, os.path.join(layerdir, filepath), recipe, layerdir_start, repodir)
                                        recipe.save()
                                        updatedrecipes.add(recipe.full_path())
                                elif typename == 'machine':
                                    results = layermachines.filter(name=filename)
                                    if results:
                                        machine = results[0]
                                        update_machine_conf_file(os.path.join(repodir, path), machine)
                                        machine.save()

                                deps = RecipeFileDependency.objects.filter(layerbranch=layerbranch).filter(path=path)
                                for dep in deps:
                                    dirtyrecipes.add(dep.recipe)

                        for recipe in dirtyrecipes:
                            if not recipe.full_path() in updatedrecipes:
                                update_recipe_file(config_data_copy, os.path.join(layerdir, recipe.filepath), recipe, layerdir_start, repodir)
                    else:
                        # Collect recipe data from scratch

                        layerrecipe_fns = []
                        if options.fullreload:
                            layerrecipes.delete()
                        else:
                            # First, check which recipes still exist
                            layerrecipe_values = layerrecipes.values('id', 'filepath', 'filename', 'pn')
                            for v in layerrecipe_values:
                                root = os.path.join(layerdir, v['filepath'])
                                fullpath = os.path.join(root, v['filename'])
                                preserve = True
                                if os.path.exists(fullpath):
                                    for d in removedirs:
                                        if fullpath.startswith(d):
                                            preserve = False
                                            break
                                else:
                                    preserve = False

                                if preserve:
                                    # Recipe still exists, update it
                                    results = layerrecipes.filter(id=v['id'])[:1]
                                    recipe = results[0]
                                    update_recipe_file(config_data_copy, root, recipe, layerdir_start, repodir)
                                else:
                                    # Recipe no longer exists, mark it for later on
                                    layerrecipes_delete.append(v)
                                layerrecipe_fns.append(fullpath)

                        layermachines.delete()
                        layerappends.delete()
                        layerclasses.delete()
                        for root, dirs, files in os.walk(layerdir):
                            if '.git' in dirs:
                                dirs.remove('.git')
                            for d in dirs[:]:
                                fullpath = os.path.join(root, d) + os.sep
                                if fullpath in removedirs:
                                    dirs.remove(d)
                            for f in files:
                                fullpath = os.path.join(root, f)
                                (typename, _, filename) = recipeparse.detect_file_type(fullpath, layerdir_start)
                                if typename == 'recipe':
                                    if fullpath not in layerrecipe_fns:
                                        layerrecipes_add.append(fullpath)
                                elif typename == 'bbappend':
                                    append = BBAppend()
                                    append.layerbranch = layerbranch
                                    append.filename = f
                                    append.filepath = os.path.relpath(root, layerdir)
                                    append.save()
                                elif typename == 'machine':
                                    machine = Machine()
                                    machine.layerbranch = layerbranch
                                    machine.name = filename
                                    update_machine_conf_file(fullpath, machine)
                                    machine.save()
                                elif typename == 'bbclass':
                                    bbclass = BBClass()
                                    bbclass.layerbranch = layerbranch
                                    bbclass.name = filename
                                    bbclass.save()

                    for added in layerrecipes_add:
                        # This is good enough without actually parsing the file
                        (pn, pv) = split_recipe_fn(added)
                        oldid = -1
                        for deleted in layerrecipes_delete:
                            if deleted['pn'] == pn:
                                oldid = deleted['id']
                                layerrecipes_delete.remove(deleted)
                                break
                        if oldid > -1:
                            # Reclaim a record we would have deleted
                            results = Recipe.objects.filter(id=oldid)[:1]
                            recipe = results[0]
                            logger.debug("Reclaim %s for %s %s" % (recipe, pn, pv))
                        else:
                            # Create new record
                            logger.debug("Add new recipe %s" % added)
                            recipe = Recipe()
                        recipe.layerbranch = layerbranch
                        recipe.filename = os.path.basename(added)
                        root = os.path.dirname(added)
                        recipe.filepath = os.path.relpath(root, layerdir)
                        update_recipe_file(config_data_copy, root, recipe, layerdir_start, repodir)
                        recipe.save()

                    for deleted in layerrecipes_delete:
                        logger.debug("Delete %s" % deleted)
                        results = Recipe.objects.filter(id=deleted['id'])[:1]
                        recipe = results[0]
                        recipe.delete()

                    # Save repo info
                    layerbranch.vcs_last_rev = topcommit.hexsha
                    layerbranch.vcs_last_commit = datetime.fromtimestamp(topcommit.committed_date)
                else:
                    logger.info("Layer %s is already up-to-date for branch %s" % (layer.name, branchdesc))

                layerbranch.vcs_last_fetch = datetime.now()
                layerbranch.save()

                if options.dryrun:
                    transaction.rollback()
                else:
                    transaction.commit()

                # Slightly hacky way of avoiding memory leaks
                bb.event.ui_queue = []
                bb.parse.parse_py.BBHandler.cached_statements = {}
                bb.codeparser.codeparsercache = bb.codeparser.CodeParserCache()
                if hasattr(bb.codeparser, 'codecache'):
                    bb.codeparser.codecache = bb.codeparser.SetCache()
                bb.fetch._checksum_cache = bb.checksum.FileChecksumCache()
                bb.fetch.urldata_cache = {}
                bb.fetch.saved_headrevs = {}
                bb.parse.__pkgsplit_cache__={}
                bb.parse.__mtime_cache = {}
                bb.parse.init_parser(tinfoil.config_data)

            except KeyboardInterrupt:
                transaction.rollback()
                logger.warn("Update interrupted, changes to %s rolled back" % layer.name)
                break
            except:
                import traceback
                traceback.print_exc()
                transaction.rollback()
            finally:
                transaction.leave_transaction_management()

    finally:
        utils.unlock_file(lockfile)

    shutil.rmtree(tempdir)
    sys.exit(0)