def import_pkgspec(args): utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Machine, BBAppend, BBClass, ComparisonRecipeUpdate from django.db import transaction ret, layerbranch = check_branch_layer(args) if ret: return ret updateobj = get_update_obj(args) logdir = getattr(settings, 'TASK_LOG_DIR') if updateobj and updateobj.task_id and logdir: pwriter = utils.ProgressWriter(logdir, updateobj.task_id, logger=logger) else: pwriter = None metapath = args.pkgdir try: with transaction.atomic(): layerrecipes = ClassicRecipe.objects.filter(layerbranch=layerbranch) existing = list(layerrecipes.filter(deleted=False).values_list('filepath', 'filename')) count = import_specdir(metapath, layerbranch, existing, updateobj, pwriter) if count == 0: logger.error('No spec files found in directory %s' % metapath) return 1 if args.relative_path: layerbranch.local_path = os.path.relpath(metapath, args.relative_path) if existing: fpaths = sorted(['%s/%s' % (pth, fn) for pth, fn in existing]) logger.info('Marking as deleted:\n %s' % '\n '.join(fpaths)) for entry in existing: layerrecipes.filter(filepath=entry[0], filename=entry[1]).update(deleted=True) if args.description: logger.debug('Setting description to "%s"' % args.description) branch = layerbranch.branch branch.short_description = args.description branch.save() layer = layerbranch.layer layer.summary = args.description layer.save() layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if args.dry_run: raise DryRunRollbackException() except DryRunRollbackException: pass except: import traceback traceback.print_exc() return 1 return 0
def rrs_remove_duplicates(args): utils.setup_django() import settings from django.db import transaction from rrs.models import RecipeUpstreamHistory from layerindex.models import Recipe core_layer = utils.get_layer(settings.CORE_LAYER_NAME) if not core_layer: logger.error('Unable to find core layer %s' % settings.CORE_LAYER_NAME) return 1 core_layerbranch = core_layer.get_layerbranch('master') if not core_layerbranch: logger.error('Unable to find branch master of layer %s' % core_layerbranch.name) return 1 try: with transaction.atomic(): for row in RecipeUpstreamHistory.objects.filter(layerbranch=core_layerbranch).order_by('-id'): if RecipeUpstreamHistory.objects.filter(layerbranch=row.layerbranch, start_date=row.start_date).count() > 1: logger.info('Deleting duplicate %d' % row.id) row.delete() if args.dry_run: raise DryRunRollbackException except DryRunRollbackException: pass return 0
def main(): if '--help' in sys.argv: usage() sys.exit(0) if len(sys.argv) < 3: usage() sys.exit(1) utils.setup_django() import settings branch = utils.get_branch('master') fetchdir = settings.LAYER_FETCH_DIR import layerindex.models import LayerItem bitbakeitem = LayerItem() bitbakeitem.vcs_url = settings.BITBAKE_REPO_URL bitbakepath = os.path.join(fetchdir, bitbakeitem.get_fetch_dir()) if getattr(settings, 'BITBAKE_PATH', ''): bitbakepath = os.path.join(bitbakepath, settings.BITBAKE_PATH) if not os.path.exists(bitbakepath): sys.stderr.write("Unable to find bitbake checkout at %s" % bitbakepath) sys.exit(1) lockfn = os.path.join(fetchdir, "layerindex.lock") lockfile = utils.lock_file(lockfn) if not lockfile: sys.stderr.write("Layer index lock timeout expired\n") sys.exit(1) try: (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, True) try: changeset = get_changeset(sys.argv[1]) if not changeset: sys.stderr.write("Unable to find changeset with id %s\n" % sys.argv[1]) sys.exit(1) utils.setup_core_layer_sys_path(settings, branch.name) outp = generate_patches(tinfoil, fetchdir, changeset, sys.argv[2]) finally: tinfoil.shutdown() finally: utils.unlock_file(lockfile) if outp: print(outp) else: sys.stderr.write("No changes to write\n") sys.exit(1) shutil.rmtree(tempdir) sys.exit(0)
def add_core(self, layers): utils.setup_django() core = None import settings for layer in layers: layer_name = self.get_layer_name(layer) if layer_name == settings.CORE_LAYER_NAME: if self.add_layer(layer): self.logger.info('Failed to add core layer\n') core = layer self.update() break return core
def rrs_import(args): utils.setup_django() import settings from django.db import transaction from rrs.models import RecipeUpstreamHistory, RecipeUpstream from layerindex.models import Recipe core_layer = utils.get_layer(settings.CORE_LAYER_NAME) if not core_layer: logger.error('Unable to find core layer %s' % settings.CORE_LAYER_NAME) return 1 core_layerbranch = core_layer.get_layerbranch('master') if not core_layerbranch: logger.error('Unable to find branch master of layer %s' % core_layerbranch.name) return 1 layerbranch = core_layerbranch try: with transaction.atomic(): with open(args.infile, 'r') as f: data = json.load(f) for item, itemdata in data.items(): if item == 'recipeupstreamhistory': for histdata in itemdata: ruh = RecipeUpstreamHistory() ruh.start_date = histdata['start_date'] ruh.end_date = histdata['end_date'] ruh.layerbranch = layerbranch ruh.save() for upstreamdata in histdata['upstreams']: ru = RecipeUpstream() ru.history = ruh pn = upstreamdata['recipe'] recipe = Recipe.objects.filter(layerbranch=layerbranch, pn=pn).first() if not recipe: logger.warning('Could not find recipe %s in layerbranch %s' % (pn, layerbranch)) continue ru.recipe = recipe ru.version = upstreamdata['version'] ru.type = upstreamdata['type'] ru.status = upstreamdata['status'] ru.no_update_reason = upstreamdata['no_update_reason'] ru.date = upstreamdata['date'] ru.save() if args.dry_run: raise DryRunRollbackException except DryRunRollbackException: pass return 0
def try_specfile(args): utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Machine, BBAppend, BBClass from django.db import transaction ret, layerbranch = check_branch_layer(args) if ret: return ret specfile = args.specfile # Hack to handle files in the current directory if not os.sep in specfile: specfile = '.' + os.sep + specfile metapath = os.path.dirname(specfile) try: with transaction.atomic(): recipe = ClassicRecipe() recipe.layerbranch = layerbranch recipe.filename = os.path.basename(specfile) recipe.filepath = os.path.relpath(os.path.dirname(specfile), metapath) update_recipe_file(specfile, recipe, metapath, raiseexceptions=True) recipe.save() for f in Recipe._meta.get_fields(): if not (f.auto_created and f.is_relation): print('%s: %s' % (f.name, getattr(recipe, f.name))) if recipe.source_set.exists(): print('sources:') for src in recipe.source_set.all(): print(' * %s' % src.url) if recipe.patch_set.exists(): print('patches:') for patch in recipe.patch_set.all(): print(' * %s' % patch.src_path) raise DryRunRollbackException() except DryRunRollbackException: pass except: import traceback traceback.print_exc() return 1 return 0
def main(settings, logfile=None, loglevel=None): setup_django(settings) if logfile: kwargs = { 'filename': logfile } if loglevel and hasattr(logging, loglevel): kwargs['level'] = getattr(logging, loglevel) logging.basicConfig(**kwargs) from django.core.handlers.wsgi import WSGIHandler # Run WSGI handler for the application return WSGIHandler()
def main(): parser = optparse.OptionParser(usage=""" %prog [options""") parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) utils.setup_django() import settings from layerindex.models import Recipe from django.db import transaction logger.setLevel(options.loglevel) try: with transaction.atomic(): #LayerBranch.objects.filter(layermaintainer__isnull=True).delete() #LayerItem.objects.filter(layerbranch__isnull=True).filter(classic=False).delete() #LayerItem.objects.filter(layerbranch__isnull=True).filter(classic=False).delete() for recipe in Recipe.objects.filter(filepath__startswith='../'): print('Deleting erroneous recipe %s %s' % (recipe.layerbranch, recipe)) recipe.delete() if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)
def main(): if '--help' in sys.argv: usage() sys.exit(0) if len(sys.argv) < 3: usage() sys.exit(1) utils.setup_django() import settings branch = utils.get_branch('master') fetchdir = settings.LAYER_FETCH_DIR bitbakepath = os.path.join(fetchdir, 'bitbake') lockfn = os.path.join(fetchdir, "layerindex.lock") lockfile = utils.lock_file(lockfn) if not lockfile: sys.stderr.write("Layer index lock timeout expired\n") sys.exit(1) try: (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, True) changeset = get_changeset(sys.argv[1]) if not changeset: sys.stderr.write("Unable to find changeset with id %s\n" % sys.argv[1]) sys.exit(1) outp = generate_patches(tinfoil, fetchdir, changeset, sys.argv[2]) finally: tinfoil.shutdown() utils.unlock_file(lockfile) if outp: print(outp) else: sys.stderr.write("No changes to write\n") sys.exit(1) shutil.rmtree(tempdir) sys.exit(0)
def main(): if '--help' in sys.argv: usage() sys.exit(0) if len(sys.argv) < 3: usage() sys.exit(1) utils.setup_django() import settings branch = utils.get_branch('master') fetchdir = settings.LAYER_FETCH_DIR bitbakepath = os.path.join(fetchdir, 'bitbake') lockfn = os.path.join(fetchdir, "layerindex.lock") lockfile = utils.lock_file(lockfn) if not lockfile: sys.stderr.write("Layer index lock timeout expired\n") sys.exit(1) try: (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, True) changeset = get_changeset(sys.argv[1]) if not changeset: sys.stderr.write("Unable to find changeset with id %s\n" % sys.argv[1]) sys.exit(1) outp = generate_patches(tinfoil, fetchdir, changeset, sys.argv[2]) finally: utils.unlock_file(lockfile) if outp: print outp else: sys.stderr.write("No changes to write\n") sys.exit(1) shutil.rmtree(tempdir) sys.exit(0)
def rrs_export(args): utils.setup_django() import settings from rrs.models import RecipeUpstreamHistory, RecipeUpstream class DatetimeEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, (datetime, date)): return obj.isoformat() return super(DatetimeEncoder, self).default(obj) # FIXME this doesn't export the layerbranch associated with the recipe (since it # was intended to export from the original forked RRS which was OE-Core only) data = {} data['recipeupstreamhistory'] = [] for hist in RecipeUpstreamHistory.objects.all(): histdata = {} histdata['start_date'] = hist.start_date histdata['end_date'] = hist.end_date upstreamsdata = [] for upstream in hist.recipeupstream_set.all(): upstreamdata = {} upstreamdata['recipe'] = upstream.recipe.pn upstreamdata['version'] = upstream.version upstreamdata['type'] = upstream.type upstreamdata['status'] = upstream.status upstreamdata['no_update_reason'] = upstream.no_update_reason upstreamdata['date'] = upstream.date upstreamsdata.append(upstreamdata) histdata['upstreams'] = upstreamsdata data['recipeupstreamhistory'].append(histdata) with open(args.outfile, 'w') as f: json.dump(data, f, cls=DatetimeEncoder, indent=4) return 0
def main(): if LooseVersion(git.__version__) < '0.3.1': logger.error( "Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script" ) sys.exit(1) parser = optparse.OptionParser(usage=""" %prog [options]""") parser.add_option( "-b", "--branch", help= "Specify branch(es) to update (use commas to separate multiple). Default is all enabled branches.", action="store", dest="branch", default='') parser.add_option( "-l", "--layer", help= "Specify layers to update (use commas to separate multiple). Default is all published layers.", action="store", dest="layers") parser.add_option( "-t", "--timeout", help= "Specify timeout in seconds to get layerindex.lock. Default is 30 seconds.", type="int", action="store", dest="timeout", default=30) parser.add_option( "-r", "--reload", help="Reload recipe data instead of updating since last update", action="store_true", dest="reload") parser.add_option( "", "--fullreload", help="Discard existing recipe data and fetch it from scratch", action="store_true", dest="fullreload") parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-x", "--nofetch", help="Don't fetch repositories", action="store_true", dest="nofetch") parser.add_option("", "--nocheckout", help="Don't check out branches", action="store_true", dest="nocheckout") parser.add_option("", "--stop-on-error", help="Stop on first parsing error", action="store_true", default=False, dest="stop_on_error") parser.add_option("-a", "--actual-branch", help="Update actual branch for layer and bitbake", action="store", dest="actual_branch", default='') parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") parser.add_option( "", "--keep-temp", help="Preserve temporary directory at the end instead of deleting it", action="store_true") options, args = parser.parse_args(sys.argv) if len(args) > 1: logger.error('unexpected argument "%s"' % args[1]) parser.print_help() sys.exit(1) utils.setup_django() import settings from layerindex.models import Branch, LayerItem, Update, LayerUpdate, LayerBranch logger.setLevel(options.loglevel) if options.branch: branches = options.branch.split(',') for branch in branches: if not utils.get_branch(branch): logger.error("Specified branch %s is not valid" % branch) sys.exit(1) else: branchquery = Branch.objects.filter(updates_enabled=True) branches = [branch.name for branch in branchquery] fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) # We deliberately exclude status == 'X' ("no update") here layerquery_all = LayerItem.objects.filter(comparison=False).filter( status='P') if layerquery_all.count() == 0: logger.info("No published layers to update") sys.exit(1) # For -a option to update bitbake branch update_bitbake = False if options.layers: layers = options.layers.split(',') if 'bitbake' in layers: update_bitbake = True layers.remove('bitbake') for layer in layers: layerquery = LayerItem.objects.filter(comparison=False).filter( name=layer) if layerquery.count() == 0: logger.error('No layers matching specified query "%s"' % layer) sys.exit(1) layerquery = LayerItem.objects.filter(comparison=False).filter( name__in=layers) else: layerquery = layerquery_all update_bitbake = True if options.actual_branch: if not options.branch: logger.error("-a option requires -b") sys.exit(1) elif len(branches) != 1: logger.error("Only one branch should be used with -a") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) allrepos = {} fetchedresult = [] fetchedrepos = [] failedrepos = {} # We don't want git to prompt for any passwords (e.g. when accessing renamed/hidden github repos) os.environ['SSH_ASKPASS'] = '' os.environ['GIT_ASKPASS'] = '' os.environ['GIT_TERMINAL_PROMPT'] = '0' listhandler = utils.ListHandler() listhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) logger.addHandler(listhandler) update = Update() update.started = datetime.now() if options.fullreload or options.reload: update.reload = True else: update.reload = False if not options.dryrun: update.save() try: lockfn = os.path.join(fetchdir, "layerindex.lock") lockfile = utils.lock_file(lockfn, options.timeout, logger) if not lockfile: logger.error("Layer index lock timeout expired") sys.exit(1) try: # Make sure oe-core is fetched since recipe parsing requires it layerquery_core = LayerItem.objects.filter( comparison=False).filter(name=settings.CORE_LAYER_NAME) if layerquery_core in layerquery: layerquery_fetch = list(layerquery) else: layerquery_fetch = list(layerquery) + list(layerquery_core) # Fetch latest metadata from repositories for layer in layerquery_fetch: # Handle multiple layers in a single repo urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) if layer.vcs_url not in allrepos: allrepos[layer.vcs_url] = (repodir, urldir, fetchdir, layer.name) # Add bitbake if settings.BITBAKE_REPO_URL not in allrepos: bitbakeitem = LayerItem() bitbakeitem.vcs_url = settings.BITBAKE_REPO_URL bitbakeurldir = bitbakeitem.get_fetch_dir() bitbakepath = os.path.join(fetchdir, bitbakeurldir) allrepos[settings.BITBAKE_REPO_URL] = (bitbakepath, bitbakeurldir, fetchdir, "bitbake") (bitbakepath, _, _, _) = allrepos[settings.BITBAKE_REPO_URL] if getattr(settings, 'BITBAKE_PATH', ''): bitbakepath = os.path.join(bitbakepath, settings.BITBAKE_PATH) if not options.nofetch: # Parallel fetching pool = multiprocessing.Pool(int(settings.PARALLEL_JOBS)) for url in allrepos: fetchedresult.append(pool.apply_async(fetch_repo, \ (url, allrepos[url][0], allrepos[url][1], allrepos[url][2], allrepos[url][3],))) pool.close() pool.join() for url in fetchedresult[:]: # The format is (url, error), the error is None when succeed. if url.get()[1]: failedrepos[url.get()[0]] = url.get()[1] else: fetchedrepos.append(url.get()[0]) if not (fetchedrepos or update_bitbake): logger.error("No repositories could be fetched, exiting") sys.exit(1) if options.actual_branch: update_actual_branch(layerquery, fetchdir, branches[0], options, update_bitbake, bitbakepath) return # Get a safe bitbake branch to call into from this script (used later on) safe_bitbake_branch = 'origin/master' master_branch = Branch.objects.filter(name='master').first() if master_branch and master_branch.bitbake_branch: safe_bitbake_branch = 'origin/' + master_branch.bitbake_branch # Process and extract data from each layer # We now do this by calling out to a separate script; doing otherwise turned out to be # unreliable due to leaking memory (we're using bitbake internals in a manner in which # they never get used during normal operation). failed_layers = {} for branch in branches: failed_layers[branch] = [] # If layer_A depends(or recommends) on layer_B, add layer_B before layer_A deps_dict_all = {} layerquery_sorted = [] collections = set() branchobj = utils.get_branch(branch) for layer in layerquery_all: # Get all collections from database, but we can't trust the # one which will be updated since its collections maybe # changed (different from database). if layer in layerquery: continue layerbranch = layer.get_layerbranch(branch) if layerbranch: collections.add( (layerbranch.collection, layerbranch.version)) for layer in layerquery: if layer.vcs_url in failedrepos: logger.info( "Skipping update of layer %s - fetch failed" % layer.name) continue layerbranch = layer.get_layerbranch(branch) branchname = branch branchdesc = branch newbranch = False branchobj = utils.get_branch(branch) if layerbranch: if layerbranch.actual_branch: branchname = layerbranch.actual_branch branchdesc = "%s (%s)" % (branch, branchname) else: # LayerBranch doesn't exist for this branch, create it temporarily # (we won't save this - update_layer.py will do the actual creation # if it gets called). newbranch = True layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = branchobj layerbranch_source = layer.get_layerbranch(branchobj) if not layerbranch_source: layerbranch_source = layer.get_layerbranch(None) if layerbranch_source: layerbranch.vcs_subdir = layerbranch_source.vcs_subdir # Collect repo info urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) repo = git.Repo(repodir) if repo.bare: logger.error('Repository %s is bare, not supported' % repodir) continue try: # Always get origin/branchname, so it raises error when branch doesn't exist when nocheckout topcommit = repo.commit('origin/%s' % branchname) if options.nocheckout: topcommit = repo.commit('HEAD') except: if newbranch: logger.info( "Skipping update of layer %s - branch %s doesn't exist" % (layer.name, branchdesc)) else: logger.info( "layer %s - branch %s no longer exists, removing it from database" % (layer.name, branchdesc)) if not options.dryrun: layerbranch.delete() continue if layerbranch.vcs_subdir and not options.nocheckout: # Find latest commit in subdirectory # A bit odd to do it this way but apparently there's no other way in the GitPython API topcommit = next( repo.iter_commits('origin/%s' % branchname, paths=layerbranch.vcs_subdir), None) if not topcommit: print_subdir_error(newbranch, layer.name, layerbranch.vcs_subdir, branchdesc) if not (newbranch and layerbranch.vcs_subdir): logger.error( "Failed to get last revision for layer %s on branch %s" % (layer.name, branchdesc)) continue if layerbranch.vcs_last_rev == topcommit.hexsha and not update.reload: logger.info( "Layer %s is already up-to-date for branch %s" % (layer.name, branchdesc)) collections.add( (layerbranch.collection, layerbranch.version)) continue else: # Check out appropriate branch if not options.nocheckout: utils.checkout_layer_branch(layerbranch, repodir, logger=logger) layerdir = os.path.join(repodir, layerbranch.vcs_subdir) if layerbranch.vcs_subdir and not os.path.exists( layerdir): print_subdir_error(newbranch, layer.name, layerbranch.vcs_subdir, branchdesc) continue if not os.path.exists( os.path.join(layerdir, 'conf/layer.conf')): logger.error( "conf/layer.conf not found for layer %s - is subdirectory set correctly?" % layer.name) continue cmd = prepare_update_layer_command(options, branchobj, layer, initial=True) logger.debug('Running layer update command: %s' % cmd) ret, output = utils.run_command_interruptible(cmd) logger.debug('output: %s' % output) if ret == 254: # Interrupted by user, break out of loop logger.info('Update interrupted, exiting') sys.exit(254) elif ret != 0: output = output.rstrip() # Save a layerupdate here or we won't see this output layerupdate = LayerUpdate() layerupdate.update = update layerupdate.layer = layer layerupdate.branch = branchobj layerupdate.started = datetime.now() layerupdate.log = output layerupdate.retcode = ret if not options.dryrun: layerupdate.save() continue col = extract_value('BBFILE_COLLECTIONS', output) if not col: logger.error( 'Unable to find BBFILE_COLLECTIONS value in initial output' ) # Assume (perhaps naively) that it's an error specific to the layer continue ver = extract_value('LAYERVERSION', output) deps = extract_value('LAYERDEPENDS', output) recs = extract_value('LAYERRECOMMENDS', output) if not options.nocheckout: # We need to check this out because we're using stuff from bb.utils # below, and if we don't it might be a python 2 revision which would # be an issue utils.checkout_repo(bitbakepath, safe_bitbake_branch, logger=logger) deps_dict = utils.explode_dep_versions2(bitbakepath, deps) recs_dict = utils.explode_dep_versions2(bitbakepath, recs) if not (deps_dict or recs_dict): # No depends, add it firstly layerquery_sorted.append(layer) collections.add((col, ver)) continue deps_dict_all[layer] = {'deps': deps_dict, \ 'recs': recs_dict, \ 'collection': col, \ 'version': ver} # Move deps_dict_all to layerquery_sorted orderly if deps_dict_all: logger.info("Sorting layers for branch %s" % branch) while True: deps_dict_all_copy = deps_dict_all.copy() for layer, value in deps_dict_all_copy.items(): for deps_recs in ('deps', 'recs'): for req_col, req_ver_list in value[deps_recs].copy( ).items(): matched = False if req_ver_list: req_ver = req_ver_list[0] else: req_ver = None if utils.is_deps_satisfied( req_col, req_ver, collections): del (value[deps_recs][req_col]) if not (value['deps'] or value['recs']): # All the depends are in collections: del (deps_dict_all[layer]) layerquery_sorted.append(layer) collections.add( (value['collection'], value['version'])) if not len(deps_dict_all): break finished = True # If nothing changed after a run, drop recs and try again if operator.eq(deps_dict_all_copy, deps_dict_all): for layer, value in deps_dict_all.items(): if value['recs'] and not value['deps']: # Add it if recs isn't satisfied only. logger.warn( 'Adding %s without LAYERRECOMMENDS...' % layer.name) del (deps_dict_all[layer]) layerquery_sorted.append(layer) collections.add( (value['collection'], value['version'])) failed_msg = '%s: Added without LAYERRECOMMENDS' % layer.name failed_layers[branch].append(failed_msg) finished = False break if not finished: continue logger.warning( "Cannot find required collections on branch %s:" % branch) for layer, value in deps_dict_all.items(): logger.warn( '%s: LAYERDEPENDS: %s LAYERRECOMMENDS: %s' % (layer.name, value['deps'], value['recs'])) if value['deps']: failed_layers[branch].append( '%s: Failed to add since LAYERDEPENDS is not satisfied' % layer.name) else: # Should never come here logger.error( "Unexpected errors when sorting layers") sys.exit(1) logger.warning("Known collections on branch %s: %s" % (branch, collections)) break for layer in layerquery_sorted: layerupdate = LayerUpdate() layerupdate.update = update layerupdate.layer = layer layerupdate.branch = branchobj layerbranch = layer.get_layerbranch(branch) if layerbranch: layerupdate.vcs_before_rev = layerbranch.vcs_last_rev errmsg = failedrepos.get(layer.vcs_url, '') if errmsg: logger.info( "Skipping update of layer %s as fetch of repository %s failed:\n%s" % (layer.name, layer.vcs_url, errmsg)) layerupdate.started = datetime.now() layerupdate.finished = datetime.now() layerupdate.log = 'ERROR: fetch failed: %s' % errmsg if not options.dryrun: layerupdate.save() continue layerupdate.started = datetime.now() if not options.dryrun: layerupdate.save() cmd = prepare_update_layer_command(options, branchobj, layer) logger.debug('Running layer update command: %s' % cmd) ret, output = utils.run_command_interruptible(cmd) layerupdate.finished = datetime.now() # We need to get layerbranch here because it might not have existed until # layer_update.py created it, but it still may not create one (e.g. if subdir # didn't exist) so we still need to check layerbranch = layer.get_layerbranch(branch) if layerbranch: layerupdate.vcs_after_rev = layerbranch.vcs_last_rev layerupdate.log = output layerupdate.retcode = ret if not options.dryrun: layerupdate.save() if ret == 254: # Interrupted by user, break out of loop logger.info('Update interrupted, exiting') sys.exit(254) if options.stop_on_error and ret != 0: logger.info( 'Layer update failed with --stop-on-error, stopping' ) sys.exit(1) if failed_layers: for branch, err_msg_list in failed_layers.items(): if err_msg_list: print() logger.error("Issues found on branch %s:\n %s" % (branch, "\n ".join(err_msg_list))) print() finally: utils.unlock_file(lockfile) except KeyboardInterrupt: logger.info('Update interrupted, exiting') sys.exit(254) except Exception: import traceback logger.error(traceback.format_exc().rstrip()) sys.exit(1) finally: update.log = ''.join(listhandler.read()) update.finished = datetime.now() if not options.dryrun: update.save() if not options.dryrun: # Purge old update records update_purge_days = getattr(settings, 'UPDATE_PURGE_DAYS', 30) Update.objects.filter(started__lte=datetime.now() - timedelta(days=update_purge_days)).delete() sys.exit(0)
def main(): parser = optparse.OptionParser( usage = """ %prog [options] <url> [name]""") parser.add_option("-s", "--subdir", help = "Specify subdirectory", action="store", dest="subdir") parser.add_option("-n", "--dry-run", help = "Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("", "--github-auth", help = "Specify github username:password", action="store", dest="github_auth") parser.add_option("-q", "--quiet", help = "Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) if len(args) < 2: print("Please specify URL of repository for layer") sys.exit(1) layer_url = args[1] if len(args) > 2: layer_name = args[2] else: if options.subdir: layer_name = options.subdir else: layer_name = filter(None, layer_url.split('/'))[-1] if layer_name.endswith('.git'): layer_name = layer_name[:-4] if options.github_auth: if not ':' in options.github_auth: logger.error('--github-auth value must be specified as username:password') sys.exit(1) splitval = options.github_auth.split(':') github_login = splitval[0] github_password = splitval[1] else: github_login = None github_password = None utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, LayerDependency, LayerMaintainer from django.db import transaction logger.setLevel(options.loglevel) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) master_branch = utils.get_branch('master') core_layer = None transaction.enter_transaction_management() transaction.managed(True) try: # Fetch layer logger.info('Fetching repository %s' % layer_url) layer = LayerItem() layer.name = layer_name layer.status = 'P' layer.layer_type = 'M' layer.summary = 'tempvalue' layer.description = layer.summary set_vcs_fields(layer, layer_url) urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) out = None try: if not os.path.exists(repodir): out = utils.runcmd("git clone %s %s" % (layer.vcs_url, urldir), fetchdir, logger=logger) else: out = utils.runcmd("git fetch", repodir, logger=logger) except Exception as e: logger.error("Fetch failed: %s" % str(e)) sys.exit(1) actual_branch = '' try: out = utils.runcmd("git checkout origin/master", repodir, logger=logger) except subprocess.CalledProcessError: branches = utils.runcmd("git branch -r", repodir, logger=logger) for line in branches.splitlines(): if 'origin/HEAD ->' in line: actual_branch = line.split('-> origin/')[-1] break if not actual_branch: logger.error("Repository has no master branch nor origin/HEAD") sys.exit(1) out = utils.runcmd("git checkout origin/%s" % actual_branch, repodir, logger=logger) layer_paths = [] if options.subdir: layerdir = os.path.join(repodir, options.subdir) if not os.path.exists(layerdir): logger.error("Subdirectory %s does not exist in repository for master branch" % options.subdir) sys.exit(1) if not os.path.exists(os.path.join(layerdir, 'conf/layer.conf')): logger.error("conf/layer.conf not found in subdirectory %s" % options.subdir) sys.exit(1) layer_paths.append(layerdir) else: if os.path.exists(os.path.join(repodir, 'conf/layer.conf')): layer_paths.append(repodir) # Find subdirs with a conf/layer.conf for subdir in os.listdir(repodir): subdir_path = os.path.join(repodir, subdir) if os.path.isdir(subdir_path): if os.path.exists(os.path.join(subdir_path, 'conf/layer.conf')): layer_paths.append(subdir_path) if not layer_paths: logger.error("conf/layer.conf not found in repository or first level subdirectories - is subdirectory set correctly?") sys.exit(1) if 'github.com' in layer.vcs_url: json_data, owner_json_data = get_github_layerinfo(layer.vcs_url, github_login, github_password) for layerdir in layer_paths: layer.pk = None if layerdir != repodir: subdir = os.path.relpath(layerdir, repodir) if len(layer_paths) > 1: layer.name = subdir else: subdir = '' if LayerItem.objects.filter(name=layer.name).exists(): logger.error('A layer named "%s" already exists in the database' % layer_name) sys.exit(1) logger.info('Creating layer %s' % layer.name) # Guess layer type if glob.glob(os.path.join(layerdir, 'conf/distro/*.conf')): layer.layer_type = 'D' elif glob.glob(os.path.join(layerdir, 'conf/machine/*.conf')): layer.layer_type = 'B' layer.save() layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = master_branch if layerdir != repodir: layerbranch.vcs_subdir = subdir if actual_branch: layerbranch.actual_branch = actual_branch layerbranch.save() if layer.name != settings.CORE_LAYER_NAME: if not core_layer: core_layer = utils.get_layer(settings.CORE_LAYER_NAME) if core_layer: layerdep = LayerDependency() layerdep.layerbranch = layerbranch layerdep.dependency = core_layer layerdep.save() # Get some extra meta-information readme_files = glob.glob(os.path.join(layerdir, 'README*')) if (not readme_files) and subdir: readme_files = glob.glob(os.path.join(repodir, 'README*')) maintainer_files = glob.glob(os.path.join(layerdir, 'MAINTAINERS')) if (not maintainer_files) and subdir: maintainer_files = glob.glob(os.path.join(repodir, 'MAINTAINERS')) maintainers = [] if readme_files: (desc, maintainers, deps) = readme_extract(readme_files[0]) if desc: layer.summary = layer.name layer.description = desc if maintainer_files: maintainers.extend(maintainers_extract(readme_files[0])) if (not maintainers) and 'github.com' in layer.vcs_url: if json_data: layer.summary = json_data['description'] layer.description = layer.summary if owner_json_data: owner_name = owner_json_data.get('name', None) owner_email = owner_json_data.get('email', None) if owner_name and owner_email: maintainers.append('%s <%s>' % (owner_name, owner_email)) if layer.name == 'openembedded-core': layer.summary = 'Core metadata' layer.layer_type = 'A' elif layer.name == 'meta-oe': layer.summary = 'Additional shared OE metadata' layer.description = layer.summary layer.layer_type = 'A' if maintainers: maint_re = re.compile(r'^"?([^"@$<>]+)"? *<([^<> ]+)>[ -]*(.+)?$') for maintentry in maintainers: res = maint_re.match(maintentry) if res: maintainer = LayerMaintainer() maintainer.layerbranch = layerbranch maintainer.name = res.group(1).strip() maintainer.email = res.group(2) if res.group(3): maintainer.responsibility = res.group(3).strip() maintainer.save() layer.save() if options.dryrun: transaction.rollback() else: transaction.commit() except: transaction.rollback() raise finally: transaction.leave_transaction_management() sys.exit(0)
def main(): parser = optparse.OptionParser( usage = """ %prog [options]""") options, args = parser.parse_args(sys.argv) utils.setup_django() from layerindex.models import LayerItem, LayerBranch, LayerDependency from django.db import transaction import httplib conn = httplib.HTTPConnection("www.openembedded.org") conn.request("GET", "/wiki/LayerIndex?action=raw") resp = conn.getresponse() if resp.status in [200, 302]: data = resp.read() in_table = False layer_type = 'M' nowiki_re = re.compile(r'</?nowiki>') link_re = re.compile(r'\[(http.*) +link\]') readme_re = re.compile(r';f=[a-zA-Z0-9/-]*README;') master_branch = utils.get_branch('master') core_layer = None transaction.enter_transaction_management() transaction.managed(True) try: for line in data.splitlines(): if line.startswith('{|'): in_table = True continue if in_table: if line.startswith('|}'): # We're done break elif line.startswith('!'): section = line.split('|', 1)[1].strip("'") if section.startswith('Base'): layer_type = 'A' elif section.startswith('Board'): layer_type = 'B' elif section.startswith('Software'): layer_type = 'S' elif section.startswith('Distribution'): layer_type = 'D' else: layer_type = 'M' elif not line.startswith('|-'): if line.startswith("|| ''"): continue fields = line.split('||') layer = LayerItem() layer.name = fields[1].strip() if ' ' in layer.name: logger.warn('Skipping layer %s - name invalid' % layer.name) continue logger.info('Adding layer %s' % layer.name) layer.status = 'P' layer.layer_type = layer_type layer.summary = fields[2].strip() layer.description = layer.summary if len(fields) > 6: res = link_re.match(fields[6].strip()) if res: link = res.groups(1)[0].strip() if link.endswith('/README') or readme_re.search(link): link = 'README' layer.usage_url = link repoval = nowiki_re.sub('', fields[4]).strip() layer.vcs_url = repoval if repoval.startswith('git://git.openembedded.org/'): reponame = re.sub('^.*/', '', repoval) layer.vcs_web_url = 'http://cgit.openembedded.org/cgit.cgi/' + reponame layer.vcs_web_tree_base_url = 'http://cgit.openembedded.org/cgit.cgi/' + reponame + '/tree/%path%?h=%branch%' layer.vcs_web_file_base_url = 'http://cgit.openembedded.org/cgit.cgi/' + reponame + '/tree/%path%?h=%branch%' elif 'git.yoctoproject.org/' in repoval: reponame = re.sub('^.*/', '', repoval) layer.vcs_web_url = 'http://git.yoctoproject.org/cgit/cgit.cgi/' + reponame layer.vcs_web_tree_base_url = 'http://git.yoctoproject.org/cgit/cgit.cgi/' + reponame + '/tree/%path%?h=%branch%' layer.vcs_web_file_base_url = 'http://git.yoctoproject.org/cgit/cgit.cgi/' + reponame + '/tree/%path%?h=%branch%' elif 'github.com/' in repoval: reponame = re.sub('^.*github.com/', '', repoval) reponame = re.sub('.git$', '', reponame) layer.vcs_web_url = 'http://github.com/' + reponame layer.vcs_web_tree_base_url = 'http://github.com/' + reponame + '/tree/%branch%/' layer.vcs_web_file_base_url = 'http://github.com/' + reponame + '/blob/%branch%/' elif 'gitorious.org/' in repoval: reponame = re.sub('^.*gitorious.org/', '', repoval) reponame = re.sub('.git$', '', reponame) layer.vcs_web_url = 'http://gitorious.org/' + reponame layer.vcs_web_tree_base_url = 'http://gitorious.org/' + reponame + '/trees/%branch%/' layer.vcs_web_file_base_url = 'http://gitorious.org/' + reponame + '/blobs/%branch%/' elif 'bitbucket.org/' in repoval: reponame = re.sub('^.*bitbucket.org/', '', repoval) reponame = re.sub('.git$', '', reponame) layer.vcs_web_url = 'http://bitbucket.org/' + reponame layer.vcs_web_tree_base_url = 'http://bitbucket.org/' + reponame + '/src/%branch%/%path%?at=%branch%' layer.vcs_web_file_base_url = 'http://bitbucket.org/' + reponame + '/src/%branch%/%path%?at=%branch%' elif '.git' in repoval: res = link_re.match(fields[5].strip()) layer.vcs_web_url = res.groups(1)[0] layer.vcs_web_tree_base_url = re.sub(r'\.git.*', '.git;a=tree;f=%path%;hb=%branch%', layer.vcs_web_url) layer.vcs_web_file_base_url = re.sub(r'\.git.*', '.git;a=blob;f=%path%;hb=%branch%', layer.vcs_web_url) layer.save() layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = master_branch layerbranch.vcs_subdir = fields[3].strip() layerbranch.save() if layer.name != 'openembedded-core': if not core_layer: core_layer = utils.get_layer('openembedded-core') if core_layer: layerdep = LayerDependency() layerdep.layerbranch = layerbranch layerdep.dependency = core_layer layerdep.save() transaction.commit() except: transaction.rollback() raise finally: transaction.leave_transaction_management() else: logger.error('Fetch failed: %d: %s' % (resp.status, resp.reason)) sys.exit(0)
def main(): parser = argparse.ArgumentParser(description="Layer index import utility. Imports layer information from another layer index instance using the REST API. WARNING: this will overwrite data in your database, use with caution!") parser.add_argument('url', help='Layer index URL to fetch from') parser.add_argument('-b', '--branch', action='store', help='Restrict to import a specific branch only (separate multiple branches with commas)') parser.add_argument('-l', '--layer', action='store', help='Restrict to import a specific layer only (regular expressions allowed)') parser.add_argument('-r', '--reload', action='store_true', help='Reload data even if it is up-to-date') parser.add_argument('-n', '--dry-run', action='store_true', help="Don't write any data back to the database") parser.add_argument('-d', '--debug', action='store_true', help='Enable debug output') parser.add_argument('-q', '--quiet', action='store_true', help='Hide all output except error messages') args = parser.parse_args() if args.debug: loglevel = logging.DEBUG elif args.quiet: loglevel = logging.WARNING else: loglevel = logging.INFO utils.setup_django() import settings from layerindex.models import Branch, LayerItem, LayerBranch, LayerDependency, LayerMaintainer, LayerNote, Recipe, Source, Patch, PackageConfig, StaticBuildDep, DynamicBuildDep, RecipeFileDependency, Machine, Distro, BBClass, BBAppend, IncFile from django.db import transaction logger.setLevel(loglevel) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) layerindex_url = args.url if not layerindex_url.endswith('/'): layerindex_url += '/' if not '/layerindex/api/' in layerindex_url: layerindex_url += 'layerindex/api/' rq = urllib.request.Request(layerindex_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) branches_url = jsdata['branches'] layers_url = jsdata['layerItems'] layerdeps_url = jsdata['layerDependencies'] layerbranches_url = jsdata['layerBranches'] layermaintainers_url = jsdata.get('layerMaintainers', None) layernotes_url = jsdata.get('layerNotes', None) recipes_url = jsdata.get('recipesExtended', None) machines_url = jsdata.get('machines', None) distros_url = jsdata.get('distros', None) classes_url = jsdata.get('classes', None) appends_url = jsdata.get('appends', None) incfiles_url = jsdata.get('incFiles', None) logger.debug('Getting branches') # Get branches (we assume the ones we want are already there, so skip any that aren't) rq = urllib.request.Request(branches_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) branch_idmap = {} filter_branches = [] if args.branch: for branch in args.branch.split(','): if not Branch.objects.filter(name=branch).exists(): logger.error('"%s" is not a valid branch in this database (branches must be created manually first)' % branch) sys.exit(1) filter_branches.append(branch) for branchjs in jsdata: if filter_branches and branchjs['name'] not in filter_branches: logger.debug('Skipping branch %s, not in specified branch list' % branchjs['name']) continue res = Branch.objects.filter(name=branchjs['name']) if res: branch = res.first() branch_idmap[branchjs['id']] = branch else: logger.debug('Skipping branch %s, not in database' % branchjs['name']) if args.layer: layer_re = re.compile('^' + args.layer + '$') else: layer_re = None try: with transaction.atomic(): # Get layers logger.info('Importing layers') rq = urllib.request.Request(layers_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8'), object_hook=datetime_hook) layer_idmap = {} exclude_fields = ['id', 'updated'] for layerjs in jsdata: if layer_re and not layer_re.match(layerjs['name']): logger.debug('Skipping layer %s, does not match layer restriction' % layerjs['name']) continue layeritem = LayerItem.objects.filter(name=layerjs['name']).first() if layeritem: # Already have this layer if layerjs['updated'] <= layeritem.updated and not args.reload: logger.debug('Skipping layer %s, already up-to-date' % layerjs['name']) layer_idmap[layerjs['id']] = layeritem continue else: logger.debug('Updating layer %s' % layerjs['name']) else: logger.debug('Adding layer %s' % layerjs['name']) layeritem = LayerItem() for key, value in layerjs.items(): if key in exclude_fields: continue setattr(layeritem, key, value) layeritem.save() layer_idmap[layerjs['id']] = layeritem # Get layer branches logger.debug('Importing layer branches') rq = urllib.request.Request(layerbranches_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8'), object_hook=datetime_hook) layerbranch_idmap = {} def import_child_items(parentobj, objclass, childlist=None, url=None, parent_orig_id=None, parentfield=None, exclude_fields=None, key_fields=None, custom_fields=None, custom_field_cb=None): logger.debug('Importing %s for %s' % (objclass._meta.verbose_name_plural, parentobj)) if parentfield is None: parentfield = parentobj.__class__.__name__.lower() if exclude_fields is None: exclude = ['id', parentfield] else: exclude = exclude_fields[:] if custom_fields is not None: exclude += custom_fields if key_fields is None: keys = None else: # The parent field always needs to be part of the keys keys = key_fields + [parentfield] def fetch_api_url(api_url): rq = urllib.request.Request(api_url) data = urllib.request.urlopen(rq).read() return json.loads(data.decode('utf-8')) if url: if parent_orig_id is None: raise Exception('import_child_items: if url is specified then parent_orig_id must also be specified') childjsdata = fetch_api_url(url + '?filter=%s:%s' % (parentfield, parent_orig_id)) elif childlist is not None: childjsdata = childlist else: raise Exception('import_child_items: either url or childlist must be specified') manager = getattr(parentobj, objclass.__name__.lower() + '_set') existing_ids = list(manager.values_list('id', flat=True)) updated_ids = [] def import_list(childjslist): for childjs in childjslist: vals = {} for key, value in childjs.items(): if key in exclude: continue vals[key] = value vals[parentfield] = parentobj if keys: keyvals = {k: vals[k] for k in keys} else: keyvals = vals # In the case of multiple records with the same keys (e.g. multiple recipes with same pn), # we need to skip ones we've already touched obj = None created = False for entry in manager.filter(**keyvals): if entry.id not in updated_ids: obj = entry break else: created = True obj = objclass(**keyvals) for key, value in vals.items(): setattr(obj, key, value) # Need to have saved before calling custom_field_cb since the function might be adding child objects obj.save() updated_ids.append(obj.id) if custom_field_cb is not None: custom_field_cb(obj, childjs) if not created: if obj.id in existing_ids: existing_ids.remove(obj.id) if 'results' in childjsdata: while True: import_list(childjsdata['results']) if childjsdata.get('next', None): childjsdata = fetch_api_url(childjsdata['next']) if not 'results' in childjsdata: break else: break else: import_list(childjsdata) for idv in existing_ids: objclass.objects.filter(id=idv).delete() def package_config_field_handler(package_config, pjsdata): for dep in pjsdata['builddeps']: dynamic_build_dependency, created = DynamicBuildDep.objects.get_or_create(name=dep) if created: dynamic_build_dependency.save() dynamic_build_dependency.package_configs.add(package_config) dynamic_build_dependency.recipes.add(package_config.recipe) def recipe_field_handler(recipe, recipejs): sources = recipejs.get('sources', []) import_child_items(recipe, Source, childlist=sources, key_fields=['url']) patches = recipejs.get('patches', []) import_child_items(recipe, Patch, childlist=patches, key_fields=['path']) existing_deps = list(recipe.staticbuilddep_set.values_list('name', flat=True)) for dep in recipejs['staticbuilddeps']: depobj, created = StaticBuildDep.objects.get_or_create(name=dep) if created: depobj.save() elif dep in existing_deps: existing_deps.remove(dep) depobj.recipes.add(recipe) for existing_dep in existing_deps: recipe.staticbuilddep_set.filter(name=existing_dep).first().recipes.remove(recipe) package_configs = recipejs.get('package_configs', []) import_child_items(recipe, PackageConfig, childlist=package_configs, custom_fields=['builddeps'], custom_field_cb=package_config_field_handler, key_fields=['feature']) # RecipeFileDependency objects need to be handled specially (since they link to a separate LayerBranch) existing_filedeps = list(recipe.recipefiledependency_set.values_list('id', flat=True)) filedeps = recipejs.get('filedeps', []) for filedep in filedeps: target_layerbranch = layerbranch_idmap.get(filedep['layerbranch'], None) if target_layerbranch is None: logger.debug('Skipping recipe file dependency on layerbranch %s, branch not imported' % filedep['layerbranch']) continue depobj, created = RecipeFileDependency.objects.get_or_create(recipe=recipe, layerbranch=target_layerbranch, path=filedep['path']) if created: depobj.save() elif depobj.id in existing_filedeps: existing_filedeps.remove(depobj.id) for idv in existing_filedeps: RecipeFileDependency.objects.filter(id=idv).delete() # Get list of layerbranches that currently exist, so we can delete any that # we don't find in the remote layer index (assuming they are on branches # that *do* exist in the remote index and are in the list specified by # -b/--branch, if any) existing_layerbranches = list(LayerBranch.objects.filter(branch__in=branch_idmap.values()).values_list('id', flat=True)) exclude_fields = ['id', 'layer', 'branch', 'yp_compatible_version', 'updated'] layercount = len(jsdata) for i, layerbranchjs in enumerate(jsdata): branch = branch_idmap.get(layerbranchjs['branch'], None) if not branch: # We don't have this branch, skip it logger.debug('Skipping layerbranch %s, branch not imported' % layerbranchjs['id']) continue layer = layer_idmap.get(layerbranchjs['layer'], None) if not layer: # We didn't import this layer, skip it logger.debug('Skipping layerbranch %s, layer not imported' % layerbranchjs['id']) continue layerbranch = LayerBranch.objects.filter(layer=layer).filter(branch=branch).first() if layerbranch: # The layerbranch already exists (this will occur for layers # that already existed, since we need to have those in layer_idmap # to be able to import layer dependencies) existing_layerbranches.remove(layerbranch.id) if layerbranchjs['updated'] <= layerbranch.updated and not args.reload: logger.debug('Skipping layerbranch %s, already up-to-date' % layerbranchjs['id']) layerbranch_idmap[layerbranchjs['id']] = layerbranch continue logger.info('Updating %s (%d/%d)' % (layerbranch, i+1, layercount)) else: layerbranch = LayerBranch() layerbranch.branch = branch layerbranch.layer = layer logger.info('Importing %s (%d/%d)' % (layerbranch, i+1, layercount)) for key, value in layerbranchjs.items(): if key in exclude_fields: continue setattr(layerbranch, key, value) layerbranch.save() layerbranch_idmap[layerbranchjs['id']] = layerbranch if recipes_url: import_child_items(layerbranch, Recipe, url=recipes_url, parent_orig_id=layerbranchjs['id'], exclude_fields=['id', 'layerbranch', 'updated'], custom_fields=['sources', 'patches', 'package_configs'], custom_field_cb=recipe_field_handler, key_fields=['pn']) if machines_url: import_child_items(layerbranch, Machine, url=machines_url, parent_orig_id=layerbranchjs['id'], exclude_fields=['id', 'layerbranch', 'updated'], key_fields=['name']) if distros_url: import_child_items(layerbranch, Distro, url=distros_url, parent_orig_id=layerbranchjs['id'], exclude_fields=['id', 'layerbranch', 'updated'], key_fields=['name']) # The models below don't have an "updated" field at present, but it does # no harm to leave it as excluded in case it does get added in the future if classes_url: import_child_items(layerbranch, BBClass, url=classes_url, parent_orig_id=layerbranchjs['id'], exclude_fields=['id', 'layerbranch', 'updated'], key_fields=['name']) if appends_url: import_child_items(layerbranch, BBAppend, url=appends_url, parent_orig_id=layerbranchjs['id'], exclude_fields=['id', 'layerbranch', 'updated'], key_fields=['filename']) if incfiles_url: import_child_items(layerbranch, IncFile, url=incfiles_url, parent_orig_id=layerbranchjs['id'], exclude_fields=['id', 'layerbranch', 'updated'], key_fields=['path']) for idv in existing_layerbranches: layerbranch = LayerBranch.objects.get(id=idv) if layer_re is None or layer_re.match(layerbranch.layer.name): logger.debug('Deleting layerbranch %s' % layerbranch) layerbranch.delete() # Get layer dependencies logger.info('Importing layer dependencies') rq = urllib.request.Request(layerdeps_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) exclude_fields = ['id', 'layerbranch', 'dependency', 'updated'] existing_deps = [] for layerbranch in layerbranch_idmap.values(): existing_deps += list(LayerDependency.objects.filter(layerbranch=layerbranch).values_list('id', flat=True)) for layerdepjs in jsdata: layerbranch = layerbranch_idmap.get(layerdepjs['layerbranch'], None) if not layerbranch: # We didn't import this layerbranch, skip it continue dependency = layer_idmap.get(layerdepjs['dependency'], None) if not dependency: # We didn't import the dependency, skip it continue layerdep, created = LayerDependency.objects.get_or_create(layerbranch=layerbranch, dependency=dependency) if not created and layerdep.id in existing_deps: existing_deps.remove(layerdep.id) for key, value in layerdepjs.items(): if key in exclude_fields: continue setattr(layerdep, key, value) layerdep.save() for idv in existing_deps: LayerDependency.objects.filter(id=idv).delete() def import_items(desc, url, exclude_fields, objclass, idmap, parentfield): logger.debug('Importing %s' % desc) rq = urllib.request.Request(url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) existing_ids = [] for parentobj in idmap.values(): existing_ids += list(objclass.objects.values_list('id', flat=True)) for itemjs in jsdata: parentobj = idmap.get(itemjs[parentfield], None) if not parentobj: # We didn't import the parent, skip it continue vals = {} for key, value in itemjs.items(): if key in exclude_fields: continue vals[key] = value vals[parentfield] = parentobj manager = getattr(parentobj, objclass.__name__.lower() + '_set') obj, created = manager.get_or_create(**vals) for key, value in vals.items(): setattr(obj, key, value) obj.save() for idv in existing_deps: objclass.objects.filter(id=idv).delete() if layermaintainers_url: import_items('layer maintainers', layermaintainers_url, ['id', 'layerbranch'], LayerMaintainer, layerbranch_idmap, 'layerbranch') if layernotes_url: import_items('layer notes', layernotes_url, ['id', 'layer'], LayerNote, layer_idmap, 'layer') if args.dry_run: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)
def main(settings): setup_django(settings) utility = ManagementUtility(sys.argv) utility.execute()
def main(): if LooseVersion(git.__version__) < '0.3.1': logger.error("Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script") sys.exit(1) parser = optparse.OptionParser( usage = """ %prog [options]""") parser.add_option("-b", "--branch", help = "Specify branch to update", action="store", dest="branch", default='master') parser.add_option("-l", "--layer", help = "Specify layers to update (use commas to separate multiple). Default is all published layers.", action="store", dest="layers") parser.add_option("-r", "--reload", help = "Reload recipe data instead of updating since last update", action="store_true", dest="reload") parser.add_option("", "--fullreload", help = "Discard existing recipe data and fetch it from scratch", action="store_true", dest="fullreload") parser.add_option("-n", "--dry-run", help = "Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-x", "--nofetch", help = "Don't fetch repositories", action="store_true", dest="nofetch") parser.add_option("", "--nocheckout", help = "Don't check out branches", action="store_true", dest="nocheckout") parser.add_option("-d", "--debug", help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help = "Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) if len(args) > 1: logger.error('unexpected argument "%s"' % args[1]) parser.print_help() sys.exit(1) if options.fullreload: options.reload = True utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, RecipeFileDependency, Machine, BBAppend, BBClass from django.db import transaction logger.setLevel(options.loglevel) branch = utils.get_branch(options.branch) if not branch: logger.error("Specified branch %s is not valid" % options.branch) sys.exit(1) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if options.layers: layerquery = LayerItem.objects.filter(classic=False).filter(name__in=options.layers.split(',')) if layerquery.count() == 0: logger.error('No layers matching specified query "%s"' % options.layers) sys.exit(1) else: layerquery = LayerItem.objects.filter(classic=False).filter(status='P') if layerquery.count() == 0: logger.info("No published layers to update") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) fetchedrepos = [] failedrepos = [] lockfn = os.path.join(fetchdir, "layerindex.lock") lockfile = utils.lock_file(lockfn) if not lockfile: logger.error("Layer index lock timeout expired") sys.exit(1) try: bitbakepath = os.path.join(fetchdir, 'bitbake') if not options.nofetch: # Fetch latest metadata from repositories for layer in layerquery: # Handle multiple layers in a single repo urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) if not (layer.vcs_url in fetchedrepos or layer.vcs_url in failedrepos): logger.info("Fetching remote repository %s" % layer.vcs_url) out = None try: if not os.path.exists(repodir): out = utils.runcmd("git clone %s %s" % (layer.vcs_url, urldir), fetchdir, logger=logger) else: out = utils.runcmd("git fetch", repodir, logger=logger) except Exception as e: logger.error("Fetch of layer %s failed: %s" % (layer.name, str(e))) failedrepos.append(layer.vcs_url) continue fetchedrepos.append(layer.vcs_url) if not fetchedrepos: logger.error("No repositories could be fetched, exiting") sys.exit(1) logger.info("Fetching bitbake from remote repository %s" % settings.BITBAKE_REPO_URL) if not os.path.exists(bitbakepath): out = utils.runcmd("git clone %s %s" % (settings.BITBAKE_REPO_URL, 'bitbake'), fetchdir, logger=logger) else: out = utils.runcmd("git fetch", bitbakepath, logger=logger) try: (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, nocheckout=options.nocheckout, logger=logger) except recipeparse.RecipeParseError as e: logger.error(str(e)) sys.exit(1) # Clear the default value of SUMMARY so that we can use DESCRIPTION instead if it hasn't been set tinfoil.config_data.setVar('SUMMARY', '') # Clear the default value of DESCRIPTION so that we can see where it's not set tinfoil.config_data.setVar('DESCRIPTION', '') # Clear the default value of HOMEPAGE ('unknown') tinfoil.config_data.setVar('HOMEPAGE', '') # Set a blank value for LICENSE so that it doesn't cause the parser to die (e.g. with meta-ti - # why won't they just fix that?!) tinfoil.config_data.setVar('LICENSE', '') # Process and extract data from each layer for layer in layerquery: transaction.enter_transaction_management() transaction.managed(True) try: urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) if layer.vcs_url in failedrepos: logger.info("Skipping update of layer %s as fetch of repository %s failed" % (layer.name, layer.vcs_url)) transaction.rollback() continue layerbranch = layer.get_layerbranch(options.branch) branchname = options.branch branchdesc = options.branch if layerbranch: if layerbranch.actual_branch: branchname = layerbranch.actual_branch branchdesc = "%s (%s)" % (options.branch, branchname) # Collect repo info repo = git.Repo(repodir) assert repo.bare == False try: if options.nocheckout: topcommit = repo.commit('HEAD') else: topcommit = repo.commit('origin/%s' % branchname) except: if layerbranch: logger.error("Failed update of layer %s - branch %s no longer exists" % (layer.name, branchdesc)) else: logger.info("Skipping update of layer %s - branch %s doesn't exist" % (layer.name, branchdesc)) transaction.rollback() continue newbranch = False if not layerbranch: # LayerBranch doesn't exist for this branch, create it newbranch = True layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = branch layerbranch_source = layer.get_layerbranch('master') if not layerbranch_source: layerbranch_source = layer.get_layerbranch(None) if layerbranch_source: layerbranch.vcs_subdir = layerbranch_source.vcs_subdir layerbranch.save() if layerbranch_source: for maintainer in layerbranch_source.layermaintainer_set.all(): maintainer.pk = None maintainer.id = None maintainer.layerbranch = layerbranch maintainer.save() for dep in layerbranch_source.dependencies_set.all(): dep.pk = None dep.id = None dep.layerbranch = layerbranch dep.save() if layerbranch.vcs_subdir and not options.nocheckout: # Find latest commit in subdirectory # A bit odd to do it this way but apparently there's no other way in the GitPython API topcommit = next(repo.iter_commits('origin/%s' % branchname, paths=layerbranch.vcs_subdir), None) if not topcommit: # This will error out if the directory is completely invalid or had never existed at this point # If it previously existed but has since been deleted, you will get the revision where it was # deleted - so we need to handle that case separately later if newbranch: logger.info("Skipping update of layer %s for branch %s - subdirectory %s does not exist on this branch" % (layer.name, branchdesc, layerbranch.vcs_subdir)) elif layerbranch.vcs_subdir: logger.error("Subdirectory for layer %s does not exist on branch %s - if this is legitimate, the layer branch record should be deleted" % (layer.name, branchdesc)) else: logger.error("Failed to get last revision for layer %s on branch %s" % (layer.name, branchdesc)) transaction.rollback() continue layerdir = os.path.join(repodir, layerbranch.vcs_subdir) layerdir_start = os.path.normpath(layerdir) + os.sep layerrecipes = Recipe.objects.filter(layerbranch=layerbranch) layermachines = Machine.objects.filter(layerbranch=layerbranch) layerappends = BBAppend.objects.filter(layerbranch=layerbranch) layerclasses = BBClass.objects.filter(layerbranch=layerbranch) if layerbranch.vcs_last_rev != topcommit.hexsha or options.reload: # Check out appropriate branch if not options.nocheckout: out = utils.runcmd("git checkout origin/%s" % branchname, repodir, logger=logger) out = utils.runcmd("git clean -f -x", repodir, logger=logger) if layerbranch.vcs_subdir and not os.path.exists(layerdir): if newbranch: logger.info("Skipping update of layer %s for branch %s - subdirectory %s does not exist on this branch" % (layer.name, branchdesc, layerbranch.vcs_subdir)) else: logger.error("Subdirectory for layer %s does not exist on branch %s - if this is legitimate, the layer branch record should be deleted" % (layer.name, branchdesc)) transaction.rollback() continue if not os.path.exists(os.path.join(layerdir, 'conf/layer.conf')): logger.error("conf/layer.conf not found for layer %s - is subdirectory set correctly?" % layer.name) transaction.rollback() continue logger.info("Collecting data for layer %s on branch %s" % (layer.name, branchdesc)) try: config_data_copy = recipeparse.setup_layer(tinfoil.config_data, fetchdir, layerdir, layer, layerbranch) except recipeparse.RecipeParseError as e: logger.error(str(e)) transaction.rollback() continue if layerbranch.vcs_last_rev and not options.reload: try: diff = repo.commit(layerbranch.vcs_last_rev).diff(topcommit) except Exception as e: logger.warn("Unable to get diff from last commit hash for layer %s - falling back to slow update: %s" % (layer.name, str(e))) diff = None else: diff = None # We handle recipes specially to try to preserve the same id # when recipe upgrades happen (so that if a user bookmarks a # recipe page it remains valid) layerrecipes_delete = [] layerrecipes_add = [] # Check if any paths should be ignored because there are layers within this layer removedirs = [] for root, dirs, files in os.walk(layerdir): for d in dirs: if os.path.exists(os.path.join(root, d, 'conf', 'layer.conf')): removedirs.append(os.path.join(root, d) + os.sep) if diff: # Apply git changes to existing recipe list if layerbranch.vcs_subdir: subdir_start = os.path.normpath(layerbranch.vcs_subdir) + os.sep else: subdir_start = "" updatedrecipes = set() for d in diff.iter_change_type('D'): path = d.a_blob.path if path.startswith(subdir_start): skip = False for d in removedirs: if path.startswith(d): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type(path, subdir_start) if typename == 'recipe': values = layerrecipes.filter(filepath=filepath).filter(filename=filename).values('id', 'filepath', 'filename', 'pn') if len(values): layerrecipes_delete.append(values[0]) logger.debug("Mark %s for deletion" % values[0]) updatedrecipes.add(os.path.join(values[0]['filepath'], values[0]['filename'])) else: logger.warn("Deleted recipe %s could not be found" % path) elif typename == 'bbappend': layerappends.filter(filepath=filepath).filter(filename=filename).delete() elif typename == 'machine': layermachines.filter(name=filename).delete() elif typename == 'bbclass': layerclasses.filter(name=filename).delete() for d in diff.iter_change_type('A'): path = d.b_blob.path if path.startswith(subdir_start): skip = False for d in removedirs: if path.startswith(d): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type(path, subdir_start) if typename == 'recipe': layerrecipes_add.append(os.path.join(repodir, path)) logger.debug("Mark %s for addition" % path) updatedrecipes.add(os.path.join(filepath, filename)) elif typename == 'bbappend': append = BBAppend() append.layerbranch = layerbranch append.filename = filename append.filepath = filepath append.save() elif typename == 'machine': machine = Machine() machine.layerbranch = layerbranch machine.name = filename update_machine_conf_file(os.path.join(repodir, path), machine) machine.save() elif typename == 'bbclass': bbclass = BBClass() bbclass.layerbranch = layerbranch bbclass.name = filename bbclass.save() dirtyrecipes = set() for d in diff.iter_change_type('M'): path = d.a_blob.path if path.startswith(subdir_start): skip = False for d in removedirs: if path.startswith(d): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type(path, subdir_start) if typename == 'recipe': logger.debug("Mark %s for update" % path) results = layerrecipes.filter(filepath=filepath).filter(filename=filename)[:1] if results: recipe = results[0] update_recipe_file(config_data_copy, os.path.join(layerdir, filepath), recipe, layerdir_start, repodir) recipe.save() updatedrecipes.add(recipe.full_path()) elif typename == 'machine': results = layermachines.filter(name=filename) if results: machine = results[0] update_machine_conf_file(os.path.join(repodir, path), machine) machine.save() deps = RecipeFileDependency.objects.filter(layerbranch=layerbranch).filter(path=path) for dep in deps: dirtyrecipes.add(dep.recipe) for recipe in dirtyrecipes: if not recipe.full_path() in updatedrecipes: update_recipe_file(config_data_copy, os.path.join(layerdir, recipe.filepath), recipe, layerdir_start, repodir) else: # Collect recipe data from scratch layerrecipe_fns = [] if options.fullreload: layerrecipes.delete() else: # First, check which recipes still exist layerrecipe_values = layerrecipes.values('id', 'filepath', 'filename', 'pn') for v in layerrecipe_values: root = os.path.join(layerdir, v['filepath']) fullpath = os.path.join(root, v['filename']) preserve = True if os.path.exists(fullpath): for d in removedirs: if fullpath.startswith(d): preserve = False break else: preserve = False if preserve: # Recipe still exists, update it results = layerrecipes.filter(id=v['id'])[:1] recipe = results[0] update_recipe_file(config_data_copy, root, recipe, layerdir_start, repodir) else: # Recipe no longer exists, mark it for later on layerrecipes_delete.append(v) layerrecipe_fns.append(fullpath) layermachines.delete() layerappends.delete() layerclasses.delete() for root, dirs, files in os.walk(layerdir): if '.git' in dirs: dirs.remove('.git') for d in dirs[:]: fullpath = os.path.join(root, d) + os.sep if fullpath in removedirs: dirs.remove(d) for f in files: fullpath = os.path.join(root, f) (typename, _, filename) = recipeparse.detect_file_type(fullpath, layerdir_start) if typename == 'recipe': if fullpath not in layerrecipe_fns: layerrecipes_add.append(fullpath) elif typename == 'bbappend': append = BBAppend() append.layerbranch = layerbranch append.filename = f append.filepath = os.path.relpath(root, layerdir) append.save() elif typename == 'machine': machine = Machine() machine.layerbranch = layerbranch machine.name = filename update_machine_conf_file(fullpath, machine) machine.save() elif typename == 'bbclass': bbclass = BBClass() bbclass.layerbranch = layerbranch bbclass.name = filename bbclass.save() for added in layerrecipes_add: # This is good enough without actually parsing the file (pn, pv) = split_recipe_fn(added) oldid = -1 for deleted in layerrecipes_delete: if deleted['pn'] == pn: oldid = deleted['id'] layerrecipes_delete.remove(deleted) break if oldid > -1: # Reclaim a record we would have deleted results = Recipe.objects.filter(id=oldid)[:1] recipe = results[0] logger.debug("Reclaim %s for %s %s" % (recipe, pn, pv)) else: # Create new record logger.debug("Add new recipe %s" % added) recipe = Recipe() recipe.layerbranch = layerbranch recipe.filename = os.path.basename(added) root = os.path.dirname(added) recipe.filepath = os.path.relpath(root, layerdir) update_recipe_file(config_data_copy, root, recipe, layerdir_start, repodir) recipe.save() for deleted in layerrecipes_delete: logger.debug("Delete %s" % deleted) results = Recipe.objects.filter(id=deleted['id'])[:1] recipe = results[0] recipe.delete() # Save repo info layerbranch.vcs_last_rev = topcommit.hexsha layerbranch.vcs_last_commit = datetime.fromtimestamp(topcommit.committed_date) else: logger.info("Layer %s is already up-to-date for branch %s" % (layer.name, branchdesc)) layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if options.dryrun: transaction.rollback() else: transaction.commit() # Slightly hacky way of avoiding memory leaks bb.event.ui_queue = [] bb.parse.parse_py.BBHandler.cached_statements = {} bb.codeparser.codeparsercache = bb.codeparser.CodeParserCache() if hasattr(bb.codeparser, 'codecache'): bb.codeparser.codecache = bb.codeparser.SetCache() bb.fetch._checksum_cache = bb.checksum.FileChecksumCache() bb.fetch.urldata_cache = {} bb.fetch.saved_headrevs = {} bb.parse.__pkgsplit_cache__={} bb.parse.__mtime_cache = {} bb.parse.init_parser(tinfoil.config_data) except KeyboardInterrupt: transaction.rollback() logger.warn("Update interrupted, changes to %s rolled back" % layer.name) break except: import traceback traceback.print_exc() transaction.rollback() finally: transaction.leave_transaction_management() finally: utils.unlock_file(lockfile) shutil.rmtree(tempdir) sys.exit(0)
def import_clearderiv(args): utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Machine, BBAppend, BBClass, ComparisonRecipeUpdate from django.db import transaction ret, layerbranch = check_branch_layer(args) if ret: return ret updateobj = get_update_obj(args) logdir = getattr(settings, 'TASK_LOG_DIR') if updateobj and updateobj.task_id and logdir: pwriter = utils.ProgressWriter(logdir, updateobj.task_id, logger=logger) else: pwriter = None srcpath = args.sourcedir localrpmpath = os.path.join(srcpath, 'src', 'local-rpms') if not os.path.exists(localrpmpath): logger.error( '%s does not appear to be an unpacked Clear Linux derivative release source directory' % srcpath) return 1 # FIXME progress reporting isn't right try: with transaction.atomic(): if args.relative_path: layerbranch.local_path = os.path.relpath( args.pkgdir, args.relative_path) layerrecipes = ClassicRecipe.objects.filter( layerbranch=layerbranch) layerrecipes.filter(deleted=True).delete() existing = list( layerrecipes.filter(deleted=False).values_list( 'filepath', 'filename')) logger.info('Importing original packages') import_specdir(args.pkgdir, layerbranch, existing, updateobj, pwriter, pn_overwrite=True) srpmpath = os.path.join(srcpath, 'src', 'src-rpms') srpms = [] specpns = [] if os.path.exists(srpmpath): logger.info('Importing derivative source RPMs') for root, dirs, files in os.walk(srpmpath): for f in files: if f.endswith('.src.rpm'): fpath = os.path.join(root, f) srpms.append(fpath) # We assume it's OK to put stuff in the package source directory extpath = args.pkgdir for srpm in srpms: srpmextpath = os.path.join( extpath, os.path.basename(srpm).rsplit('.', 2)[0]) try: shutil.rmtree(srpmextpath) except FileNotFoundError: pass os.makedirs(srpmextpath) cmd = 'rpm2cpio %s | cpio -idmv' % shlex.quote(srpm) output = subprocess.check_output( cmd, shell=True, cwd=srpmextpath).decode('utf-8').rstrip() specfiles = glob.glob(os.path.join(srpmextpath, '*.spec')) recipes = import_specfiles(specfiles, layerbranch, existing, updateobj, extpath, pn_overwrite=True) for recipe in recipes: specpns.append(recipe.pn) rpms = [] for root, dirs, files in os.walk(localrpmpath): for f in files: if f.endswith('.rpm'): fpath = os.path.join(root, f) rpms.append(fpath) logger.info('Importing derivative binary RPMs') srpminfo = {} total = len(rpms) for count, rpm in enumerate(rpms): logger.debug('Processing %s' % rpm) cmd = ['rpm', '-qpi', rpm] expanded = subprocess.check_output(cmd).decode( 'utf-8').rstrip() description = [] indesc = False rpminfo = {'Package': rpm} for line in expanded.splitlines(): if indesc: description.append(line) elif ':' in line: linesplit = line.split(':', 1) key = linesplit[0].rstrip() value = linesplit[1].strip() if key == 'Description': indesc = True else: rpminfo[key] = value rpminfo['Description'] = ' '.join(description) srpm = rpminfo['Source RPM'] if srpm in srpminfo: if len(rpminfo['Name']) < len(srpminfo[srpm]['Name']): srpminfo[srpm] = rpminfo.copy() else: logger.debug( 'Skipping %s (main package already present)' % rpm) else: srpminfo[srpm] = rpminfo.copy() if pwriter: pwriter.write(int(count / total * 100)) srcsrcpath = os.path.join(srcpath, 'src') for vals in srpminfo.values(): pkgfn = os.path.basename(vals['Package']) pkgpath = os.path.relpath(os.path.dirname(vals['Package']), srcsrcpath) if vals['Name'] in specpns: logger.info('Skipping %s (already imported source)' % pkgfn) continue recipe, created = ClassicRecipe.objects.get_or_create( layerbranch=layerbranch, pn=vals['Name']) if created: logger.info('Importing %s' % pkgfn) else: logger.info('Updating %s' % pkgfn) recipe.filepath = pkgpath recipe.filename = pkgfn recipe.pn = vals['Name'] recipe.pv = vals['Version'] recipe.section = vals['Group'] recipe.license = vals['License'] recipe.summary = vals['Summary'] recipe.description = vals['Description'] recipe.homepage = vals.get('URL', '') recipe.deleted = False recipe.save() existingentry = (pkgpath, pkgfn) if existingentry in existing: existing.remove(existingentry) if updateobj: rupdate, _ = ComparisonRecipeUpdate.objects.get_or_create( update=updateobj, recipe=recipe) rupdate.meta_updated = True rupdate.save() if existing: fpaths = sorted(['%s/%s' % (pth, fn) for pth, fn in existing]) logger.info('Marking as deleted:\n %s' % '\n '.join(fpaths)) for entry in existing: layerrecipes.filter(filepath=entry[0], filename=entry[1]).update(deleted=True) layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if args.description: logger.debug('Setting description to "%s"' % args.description) branch = layerbranch.branch branch.short_description = args.description branch.save() layer = layerbranch.layer layer.summary = args.description layer.save() if args.dry_run: raise DryRunRollbackException() except DryRunRollbackException: pass except: import traceback traceback.print_exc() return 1 return 0
def main(): parser = optparse.OptionParser( usage = """ %prog [options]""") options, args = parser.parse_args(sys.argv) utils.setup_django() from layerindex.models import LayerItem, LayerBranch, LayerDependency from django.db import transaction import httplib conn = httplib.HTTPConnection("www.openembedded.org") conn.request("GET", "/wiki/LayerIndex?action=raw") resp = conn.getresponse() if resp.status in [200, 302]: data = resp.read() in_table = False layer_type = 'M' nowiki_re = re.compile(r'</?nowiki>') link_re = re.compile(r'\[(http.*) +link\]') readme_re = re.compile(r';f=[a-zA-Z0-9/-]*README;') master_branch = utils.get_branch('master') core_layer = None with transaction.atomic(): for line in data.splitlines(): if line.startswith('{|'): in_table = True continue if in_table: if line.startswith('|}'): # We're done break elif line.startswith('!'): section = line.split('|', 1)[1].strip("'") if section.startswith('Base'): layer_type = 'A' elif section.startswith('Board'): layer_type = 'B' elif section.startswith('Software'): layer_type = 'S' elif section.startswith('Distribution'): layer_type = 'D' else: layer_type = 'M' elif not line.startswith('|-'): if line.startswith("|| ''"): continue fields = line.split('||') layer = LayerItem() layer.name = fields[1].strip() if ' ' in layer.name: logger.warn('Skipping layer %s - name invalid' % layer.name) continue logger.info('Adding layer %s' % layer.name) layer.status = 'P' layer.layer_type = layer_type layer.summary = fields[2].strip() layer.description = layer.summary if len(fields) > 6: res = link_re.match(fields[6].strip()) if res: link = res.groups(1)[0].strip() if link.endswith('/README') or readme_re.search(link): link = 'README' layer.usage_url = link repoval = nowiki_re.sub('', fields[4]).strip() layer.vcs_url = repoval if repoval.startswith('git://git.openembedded.org/'): reponame = re.sub('^.*/', '', repoval) layer.vcs_web_url = 'http://cgit.openembedded.org/' + reponame layer.vcs_web_tree_base_url = 'http://cgit.openembedded.org/' + reponame + '/tree/%path%?h=%branch%' layer.vcs_web_file_base_url = 'http://cgit.openembedded.org/' + reponame + '/tree/%path%?h=%branch%' layer.vcs_web_commit_url = 'http://cgit.openembedded.org/' + reponame + '/commit/?id=%hash%' elif repoval.startswith('git://git.yoctoproject.org/'): reponame = re.sub('^.*/', '', repoval) layer.vcs_web_url = 'http://git.yoctoproject.org/cgit/cgit.cgi/' + reponame layer.vcs_web_tree_base_url = 'http://git.yoctoproject.org/cgit/cgit.cgi/' + reponame + '/tree/%path%?h=%branch%' layer.vcs_web_file_base_url = 'http://git.yoctoproject.org/cgit/cgit.cgi/' + reponame + '/tree/%path%?h=%branch%' layer.vcs_web_commit_url = 'http://git.yoctoproject.org/cgit/cgit.cgi/' + reponame + '/commit/?id=%hash%' elif repoval.startswith('git://github.com/') or repoval.startswith('http://github.com/') or repoval.startswith('https://github.com/'): reponame = re.sub('^.*github.com/', '', repoval) reponame = re.sub('.git$', '', reponame) layer.vcs_web_url = 'http://github.com/' + reponame layer.vcs_web_tree_base_url = 'http://github.com/' + reponame + '/tree/%branch%/' layer.vcs_web_file_base_url = 'http://github.com/' + reponame + '/blob/%branch%/' layer.vcs_web_commit_url = 'http://github.com/' + reponame + '/commit/%hash%' elif repoval.startswith('git://gitlab.com/') or repoval.startswith('http://gitlab.com/') or repoval.startswith('https://gitlab.com/'): reponame = re.sub('^.*gitlab.com/', '', repoval) reponame = re.sub('.git$', '', reponame) layer.vcs_web_url = 'http://gitlab.com/' + reponame layer.vcs_web_tree_base_url = 'http://gitlab.com/' + reponame + '/tree/%branch%/' layer.vcs_web_file_base_url = 'http://gitlab.com/' + reponame + '/blob/%branch%/' layer.vcs_web_commit_url = 'http://gitlab.com/' + reponame + '/commit/%hash%' elif repoval.startswith('git://bitbucket.org/') or repoval.startswith('http://bitbucket.org/') or repoval.startswith('https://bitbucket.org/'): reponame = re.sub('^.*bitbucket.org/', '', repoval) reponame = re.sub('.git$', '', reponame) layer.vcs_web_url = 'http://bitbucket.org/' + reponame layer.vcs_web_tree_base_url = 'http://bitbucket.org/' + reponame + '/src/%branch%/%path%?at=%branch%' layer.vcs_web_file_base_url = 'http://bitbucket.org/' + reponame + '/src/%branch%/%path%?at=%branch%' layer.vcs_web_commit_url = 'http://bitbucket.org/' + reponame + '/commits/%hash%' elif '.git' in repoval: res = link_re.match(fields[5].strip()) layer.vcs_web_url = res.groups(1)[0] layer.vcs_web_tree_base_url = re.sub(r'\.git.*', '.git;a=tree;f=%path%;hb=%branch%', layer.vcs_web_url) layer.vcs_web_file_base_url = re.sub(r'\.git.*', '.git;a=blob;f=%path%;hb=%branch%', layer.vcs_web_url) layer.vcs_web_file_base_url = re.sub(r'\.git.*', '.git;a=commit;h=%hash%', layer.vcs_web_url) layer.save() layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = master_branch layerbranch.vcs_subdir = fields[3].strip() layerbranch.save() if layer.name != 'openembedded-core': if not core_layer: core_layer = utils.get_layer('openembedded-core') if core_layer: layerdep = LayerDependency() layerdep.layerbranch = layerbranch layerdep.dependency = core_layer layerdep.save() else: logger.error('Fetch failed: %d: %s' % (resp.status, resp.reason)) sys.exit(0)
def main(): valid_layer_name = re.compile('[-\w]+$') parser = optparse.OptionParser(usage=""" %prog [options] <url> [name]""") utils.setup_django() layer_type_help, layer_type_choices = get_layer_type_choices() parser.add_option("-s", "--subdir", help="Specify subdirectory", action="store", dest="subdir") parser.add_option("-t", "--type", help=layer_type_help, choices=layer_type_choices, action="store", dest="layer_type", default='') parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("", "--github-auth", help="Specify github username:password", action="store", dest="github_auth") parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") parser.add_option("-a", "--actual-branch", help="Set actual branch", action="store", dest="actual_branch") options, args = parser.parse_args(sys.argv) if len(args) < 2: print("Please specify URL of repository for layer") sys.exit(1) layer_url = args[1] if len(args) > 2: layer_name = args[2] else: if options.subdir: layer_name = options.subdir else: layer_name = [x for x in layer_url.split('/') if x][-1] if layer_name.endswith('.git'): layer_name = layer_name[:-4] if not valid_layer_name.match(layer_name): logger.error( 'Invalid layer name "%s" - Layer name can only include letters, numbers and dashes.', layer_name) sys.exit(1) if options.github_auth: if not ':' in options.github_auth: logger.error( '--github-auth value must be specified as username:password') sys.exit(1) splitval = options.github_auth.split(':') github_login = splitval[0] github_password = splitval[1] else: github_login = None github_password = None import settings from layerindex.models import LayerItem, LayerBranch, LayerDependency, LayerMaintainer from django.db import transaction logger.setLevel(options.loglevel) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) master_branch = utils.get_branch('master') core_layer = None try: with transaction.atomic(): # Fetch layer logger.info('Fetching repository %s' % layer_url) layer = LayerItem() layer.name = layer_name layer.status = 'P' layer.summary = 'tempvalue' layer.description = layer.summary set_vcs_fields(layer, layer_url) urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) out = None try: if not os.path.exists(repodir): out = utils.runcmd("git clone %s %s" % (layer.vcs_url, urldir), fetchdir, logger=logger) else: out = utils.runcmd("git fetch", repodir, logger=logger) except Exception as e: logger.error("Fetch failed: %s" % str(e)) sys.exit(1) actual_branch = 'master' if (options.actual_branch): actual_branch = options.actual_branch try: out = utils.runcmd("git checkout origin/%s" % actual_branch, repodir, logger=logger) except subprocess.CalledProcessError: actual_branch = None branches = utils.runcmd("git branch -r", repodir, logger=logger) for line in branches.splitlines(): if 'origin/HEAD ->' in line: actual_branch = line.split('-> origin/')[-1] break if not actual_branch: logger.error( "Repository has no master branch nor origin/HEAD") sys.exit(1) out = utils.runcmd("git checkout origin/%s" % actual_branch, repodir, logger=logger) layer_paths = [] if options.subdir: layerdir = os.path.join(repodir, options.subdir) if not os.path.exists(layerdir): logger.error( "Subdirectory %s does not exist in repository for master branch" % options.subdir) sys.exit(1) if not os.path.exists(os.path.join(layerdir, 'conf/layer.conf')): logger.error( "conf/layer.conf not found in subdirectory %s" % options.subdir) sys.exit(1) layer_paths.append(layerdir) else: if os.path.exists(os.path.join(repodir, 'conf/layer.conf')): layer_paths.append(repodir) # Find subdirs with a conf/layer.conf for subdir in os.listdir(repodir): subdir_path = os.path.join(repodir, subdir) if os.path.isdir(subdir_path): if os.path.exists( os.path.join(subdir_path, 'conf/layer.conf')): layer_paths.append(subdir_path) if not layer_paths: logger.error( "conf/layer.conf not found in repository or first level subdirectories - is subdirectory set correctly?" ) sys.exit(1) if 'github.com' in layer.vcs_url: json_data, owner_json_data = get_github_layerinfo( layer.vcs_url, github_login, github_password) for layerdir in layer_paths: layer.pk = None if layerdir != repodir: subdir = os.path.relpath(layerdir, repodir) if len(layer_paths) > 1: layer.name = subdir else: subdir = '' if LayerItem.objects.filter(name=layer.name).exists(): if LayerItem.objects.filter(name=layer.name).exclude( vcs_url=layer.vcs_url).exists(): conflict_list = LayerItem.objects.filter( name=layer.name).exclude(vcs_url=layer.vcs_url) conflict_list_urls = [] for conflict in conflict_list: conflict_list_urls.append(conflict.vcs_url) cln = ', '.join(conflict_list_urls) logger.error( 'A layer named "%s" already exists in the database. Possible name collision with %s.vcs_url = %s' % (layer.name, layer.name, cln)) sys.exit(1) else: logger.info( 'The layer named "%s" already exists in the database. Skipping this layer with same vcs_url' % layer.name) layer_paths = [x for x in layer_paths if x != layerdir] continue logger.info('Creating layer %s' % layer.name) # Guess layer type if not specified if options.layer_type: layer.layer_type = options.layer_type elif layer.name in ['openembedded-core', 'meta-oe']: layer.layer_type = 'A' elif glob.glob(os.path.join(layerdir, 'conf/distro/*.conf')): layer.layer_type = 'D' elif glob.glob(os.path.join(layerdir, 'conf/machine/*.conf')): layer.layer_type = 'B' else: layer.layer_type = 'M' layer.save() layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = master_branch if layerdir != repodir: layerbranch.vcs_subdir = subdir if actual_branch: layerbranch.actual_branch = actual_branch layerbranch.save() if layer.name != settings.CORE_LAYER_NAME: if not core_layer: core_layer = utils.get_layer(settings.CORE_LAYER_NAME) if core_layer: logger.debug('Adding dep %s to %s' % (core_layer.name, layer.name)) layerdep = LayerDependency() layerdep.layerbranch = layerbranch layerdep.dependency = core_layer layerdep.save() layerconfparser = LayerConfParse(logger=logger) try: config_data = layerconfparser.parse_layer(layerdir) if config_data: utils.add_dependencies(layerbranch, config_data, logger=logger) utils.add_recommends(layerbranch, config_data, logger=logger) finally: layerconfparser.shutdown() # Get some extra meta-information readme_files = glob.glob(os.path.join(layerdir, 'README*')) if (not readme_files) and subdir: readme_files = glob.glob(os.path.join(repodir, 'README*')) maintainer_files = glob.glob( os.path.join(layerdir, 'MAINTAINERS')) if (not maintainer_files) and subdir: maintainer_files = glob.glob( os.path.join(repodir, 'MAINTAINERS')) maintainers = [] if readme_files: (desc, maintainers, deps) = readme_extract(readme_files[0]) if desc: layer.summary = layer.name layer.description = desc if maintainer_files: maintainers.extend(maintainers_extract(readme_files[0])) if (not maintainers) and 'github.com' in layer.vcs_url: if json_data: layer.summary = json_data['description'] layer.description = layer.summary if owner_json_data: owner_name = owner_json_data.get('name', None) owner_email = owner_json_data.get('email', None) if owner_name and owner_email: maintainers.append('%s <%s>' % (owner_name, owner_email)) if layer.name == 'openembedded-core': layer.summary = 'Core metadata' elif layer.name == 'meta-oe': layer.summary = 'Additional shared OE metadata' layer.description = layer.summary if maintainers: maint_re = re.compile( r'^"?([^"@$<>]+)"? *<([^<> ]+)>[ -]*(.+)?$') for maintentry in maintainers: res = maint_re.match(maintentry) if res: maintainer = LayerMaintainer() maintainer.layerbranch = layerbranch maintainer.name = res.group(1).strip() maintainer.email = res.group(2) if res.group(3): maintainer.responsibility = res.group( 3).strip() maintainer.save() layer.save() if not layer_paths: logger.error('No layers added.') sys.exit(1) if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)
def main(): parser = optparse.OptionParser( usage = """ %prog [options] <bitbakepath> <oeclassicpath>""") parser.add_option("-b", "--branch", help = "Specify branch to import into", action="store", dest="branch", default='oe-classic') parser.add_option("-l", "--layer", help = "Specify layer to import into", action="store", dest="layer", default='oe-classic') parser.add_option("-n", "--dry-run", help = "Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help = "Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) if len(args) < 3: logger.error('You must specify bitbakepath and oeclassicpath') parser.print_help() sys.exit(1) if len(args) > 3: logger.error('unexpected argument "%s"' % args[3]) parser.print_help() sys.exit(1) utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Machine, BBAppend, BBClass from django.db import transaction logger.setLevel(options.loglevel) branch = utils.get_branch(options.branch) if not branch: logger.error("Specified branch %s is not valid" % options.branch) sys.exit(1) res = list(LayerItem.objects.filter(name=options.layer)[:1]) if res: layer = res[0] else: layer = LayerItem() layer.name = options.layer layer.status = 'P' layer.layer_type = 'M' layer.summary = 'OE-Classic' layer.description = 'OpenEmbedded-Classic' layer.vcs_url = 'git://git.openembedded.org/openembedded' layer.vcs_web_url = 'http://cgit.openembedded.org/cgit.cgi/openembedded' layer.vcs_web_tree_base_url = 'http://cgit.openembedded.org/cgit.cgi/openembedded/tree/%path%' layer.vcs_web_file_base_url = 'http://cgit.openembedded.org/cgit.cgi/openembedded/tree/%path%' layer.classic = True layer.save() layerbranch = layer.get_layerbranch(options.branch) if not layerbranch: # LayerBranch doesn't exist for this branch, create it layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = branch layerbranch.save() fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) fetchedrepos = [] failedrepos = [] bitbakepath = args[1] oeclassicpath = args[2] confparentdir = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../oe-classic')) os.environ['BBPATH'] = str("%s:%s" % (confparentdir, oeclassicpath)) try: (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, nocheckout=True, classic=True, logger=logger) except recipeparse.RecipeParseError as e: logger.error(str(e)) sys.exit(1) # Clear the default value of SUMMARY so that we can use DESCRIPTION instead if it hasn't been set tinfoil.config_data.setVar('SUMMARY', '') # Clear the default value of DESCRIPTION so that we can see where it's not set tinfoil.config_data.setVar('DESCRIPTION', '') # Clear the default value of HOMEPAGE ('unknown') tinfoil.config_data.setVar('HOMEPAGE', '') transaction.enter_transaction_management() transaction.managed(True) try: layerdir_start = os.path.normpath(oeclassicpath) + os.sep layerrecipes = Recipe.objects.filter(layerbranch=layerbranch) layermachines = Machine.objects.filter(layerbranch=layerbranch) layerappends = BBAppend.objects.filter(layerbranch=layerbranch) layerclasses = BBClass.objects.filter(layerbranch=layerbranch) try: config_data_copy = recipeparse.setup_layer(tinfoil.config_data, fetchdir, oeclassicpath, layer, layerbranch) except recipeparse.RecipeParseError as e: logger.error(str(e)) transaction.rollback() sys.exit(1) layerrecipes.delete() layermachines.delete() layerappends.delete() layerclasses.delete() for root, dirs, files in os.walk(oeclassicpath): if '.git' in dirs: dirs.remove('.git') for f in files: fullpath = os.path.join(root, f) (typename, filepath, filename) = recipeparse.detect_file_type(fullpath, layerdir_start) if typename == 'recipe': recipe = ClassicRecipe() recipe.layerbranch = layerbranch recipe.filename = filename recipe.filepath = filepath update_recipe_file(config_data_copy, root, recipe, layerdir_start, oeclassicpath) recipe.save() layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if options.dryrun: transaction.rollback() else: transaction.commit() except: import traceback traceback.print_exc() transaction.rollback() finally: transaction.leave_transaction_management() shutil.rmtree(tempdir) sys.exit(0)
def import_deblist(args): utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Machine, BBAppend, BBClass from django.db import transaction ret, layerbranch = check_branch_layer(args) if ret: return ret updateobj = get_update_obj(args) try: with transaction.atomic(): layerrecipes = ClassicRecipe.objects.filter( layerbranch=layerbranch) existing = list( layerrecipes.filter(deleted=False).values_list('pn', flat=True)) def handle_pkg(pkg): pkgname = pkg['Package'] recipe, created = ClassicRecipe.objects.get_or_create( layerbranch=layerbranch, pn=pkgname) if created: logger.info('Importing %s' % pkgname) elif recipe.deleted: logger.info('Restoring and updating %s' % pkgname) recipe.deleted = False else: logger.info('Updating %s' % pkgname) filename = pkg.get('Filename', '') if filename: recipe.filename = os.path.basename(filename) recipe.filepath = os.path.dirname(filename) recipe.section = pkg.get('Section', '') description = pkg.get('Description', '') if description: description = description.splitlines() recipe.summary = description.pop(0) recipe.description = ' '.join(description) recipe.pv = pkg.get('Version', '') recipe.homepage = pkg.get('Homepage', '') recipe.license = pkg.get('License', '') recipe.save() if pkgname in existing: existing.remove(pkgname) if updateobj: rupdate, _ = ComparisonRecipeUpdate.objects.get_or_create( update=updateobj, recipe=recipe) rupdate.meta_updated = True rupdate.save() pkgs = [] pkginfo = {} lastfield = '' with open(args.pkglistfile, 'r') as f: for line in f: linesplit = line.split() if line.startswith('Package:'): # Next package starting, deal with the last one (unless this is the first) if pkginfo: handle_pkg(pkginfo) pkginfo = {} lastfield = 'Package' if line.startswith(' '): if lastfield: pkginfo[lastfield] += '\n' + line.strip() elif ':' in line: field, value = line.split(':', 1) pkginfo[field] = value.strip() lastfield = field else: lastfield = '' if pkginfo: # Handle last package handle_pkg(pkginfo) if existing: logger.info('Marking as deleted: %s' % ', '.join(existing)) layerrecipes.filter(pn__in=existing).update(deleted=True) layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if args.dry_run: raise DryRunRollbackException() except DryRunRollbackException: pass except: import traceback traceback.print_exc() return 1
def main(): parser = optparse.OptionParser(usage=""" %prog [options] <bitbakepath> <oeclassicpath>""") parser.add_option("-b", "--branch", help="Specify branch to import into", action="store", dest="branch", default='oe-classic') parser.add_option("-l", "--layer", help="Specify layer to import into", action="store", dest="layer", default='oe-classic') parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) if len(args) < 3: logger.error('You must specify bitbakepath and oeclassicpath') parser.print_help() sys.exit(1) if len(args) > 3: logger.error('unexpected argument "%s"' % args[3]) parser.print_help() sys.exit(1) utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Machine, BBAppend, BBClass from django.db import transaction logger.setLevel(options.loglevel) branch = utils.get_branch(options.branch) if not branch: logger.error("Specified branch %s is not valid" % options.branch) sys.exit(1) res = list(LayerItem.objects.filter(name=options.layer)[:1]) if res: layer = res[0] else: layer = LayerItem() layer.name = options.layer layer.status = 'P' layer.layer_type = 'M' layer.summary = 'OE-Classic' layer.description = 'OpenEmbedded-Classic' layer.vcs_url = 'git://git.openembedded.org/openembedded' layer.vcs_web_url = 'http://cgit.openembedded.org/openembedded' layer.vcs_web_tree_base_url = 'http://cgit.openembedded.org/openembedded/tree/%path%' layer.vcs_web_file_base_url = 'http://cgit.openembedded.org/openembedded/tree/%path%' layer.comparison = True layer.save() layerbranch = layer.get_layerbranch(options.branch) if not layerbranch: # LayerBranch doesn't exist for this branch, create it layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = branch layerbranch.save() fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) fetchedrepos = [] failedrepos = [] bitbakepath = args[1] oeclassicpath = args[2] confparentdir = os.path.abspath( os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../oe-classic')) os.environ['BBPATH'] = str("%s:%s" % (confparentdir, oeclassicpath)) try: (tinfoil, tempdir) = recipeparse.init_parser(settings, branch, bitbakepath, nocheckout=True, classic=True, logger=logger) except recipeparse.RecipeParseError as e: logger.error(str(e)) sys.exit(1) # Clear the default value of SUMMARY so that we can use DESCRIPTION instead if it hasn't been set tinfoil.config_data.setVar('SUMMARY', '') # Clear the default value of DESCRIPTION so that we can see where it's not set tinfoil.config_data.setVar('DESCRIPTION', '') # Clear the default value of HOMEPAGE ('unknown') tinfoil.config_data.setVar('HOMEPAGE', '') try: with transaction.atomic(): layerdir_start = os.path.normpath(oeclassicpath) + os.sep layerrecipes = Recipe.objects.filter(layerbranch=layerbranch) layermachines = Machine.objects.filter(layerbranch=layerbranch) layerdistros = Distro.objects.filter(layerbranch=layerbranch) layerappends = BBAppend.objects.filter(layerbranch=layerbranch) layerclasses = BBClass.objects.filter(layerbranch=layerbranch) try: config_data_copy = recipeparse.setup_layer( tinfoil.config_data, fetchdir, oeclassicpath, layer, layerbranch, logger) except recipeparse.RecipeParseError as e: logger.error(str(e)) sys.exit(1) layerrecipes.delete() layermachines.delete() layerdistros.delete() layerappends.delete() layerclasses.delete() for root, dirs, files in os.walk(oeclassicpath): if '.git' in dirs: dirs.remove('.git') for f in files: fullpath = os.path.join(root, f) (typename, filepath, filename) = recipeparse.detect_file_type( fullpath, layerdir_start) if typename == 'recipe': recipe = ClassicRecipe() recipe.layerbranch = layerbranch recipe.filename = filename recipe.filepath = filepath update_recipe_file(tinfoil, config_data_copy, root, recipe, layerdir_start, oeclassicpath) recipe.save() layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass except: import traceback traceback.print_exc() finally: tinfoil.shutdown() shutil.rmtree(tempdir) sys.exit(0)
def main(): parser = argparse.ArgumentParser( description='Comparison recipe cover status update tool') parser.add_argument('-b', '--branch', default='oe-classic', help='Specify branch to import into') parser.add_argument('-l', '--layer', default='oe-classic', help='Specify layer to import into') parser.add_argument('-u', '--update', help='Specify update record to link to') parser.add_argument('-n', '--dry-run', action='store_true', help='Don\'t write any data back to the database') parser.add_argument( '-s', '--skip', help='Skip specified packages (comma-separated list, no spaces)') parser.add_argument('-d', '--debug', action='store_const', const=logging.DEBUG, dest='loglevel', default=logging.INFO, help='Enable debug output') parser.add_argument('-q', '--quiet', action='store_const', const=logging.ERROR, dest='loglevel', help='Hide all output except error messages') parser.add_argument('-i', '--import-data', metavar='FILE', help='Import cover status data') parser.add_argument( '--ignore-missing', action='store_true', help= 'Do not warn if a recipe is missing when importing cover status data') parser.add_argument('--export-data', metavar='FILE', help='Export cover status data') args = parser.parse_args() utils.setup_django() from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Update, ComparisonRecipeUpdate from django.db import transaction logger.setLevel(args.loglevel) if args.import_data and args.export_data: logger.error( '--i/--import-data and --export-data are mutually exclusive') sys.exit(1) layer = LayerItem.objects.filter(name=args.layer).first() if not layer: logger.error('Specified layer %s does not exist in database' % args.layer) sys.exit(1) layerbranch = layer.get_layerbranch(args.branch) if not layerbranch: logger.error("Specified branch %s does not exist in database" % args.branch) sys.exit(1) if args.skip: skiplist = args.skip.split(',') else: skiplist = [] if args.export_data: export(args, layerbranch, skiplist) sys.exit(0) updateobj = None if args.update: updateobj = Update.objects.filter(id=int(args.update)).first() if not updateobj: logger.error("Specified update id %s does not exist in database" % args.update) sys.exit(1) try: with transaction.atomic(): def recipe_pn_query(pn): return Recipe.objects.filter( layerbranch__branch__name='master').filter(pn=pn).order_by( '-layerbranch__layer__index_preference') if args.import_data: recipequery = ClassicRecipe.objects.filter( layerbranch=layerbranch) layerbranches = {} with open(args.import_data, 'r') as f: jsdata = json.load(f) for jsitem in jsdata['coverlist']: changed = False pn = jsitem.pop('pn') recipe = recipequery.filter(pn=pn).first() if not recipe: if not args.ignore_missing: logger.warning('Could not find recipe %s in %s' % (pn, layerbranch)) continue cover_layer = jsitem.pop('cover_layer', None) if cover_layer: orig_layerbranch = recipe.cover_layerbranch recipe.cover_layerbranch = layerbranches.get( cover_layer, None) if recipe.cover_layerbranch is None: recipe.cover_layerbranch = LayerBranch.objects.filter( branch__name='master', layer__name=cover_layer).first() if recipe.cover_layerbranch is None: logger.warning( 'Could not find cover layer %s in master branch' % cover_layer) else: layerbranches[ cover_layer] = recipe.cover_layerbranch if orig_layerbranch != recipe.cover_layerbranch: changed = True elif recipe.cover_layerbranch is not None: recipe.cover_layerbranch = None changed = True valid_fields = [ fld.name for fld in ClassicRecipe._meta.get_fields() ] for fieldname, value in jsitem.items(): if fieldname in valid_fields: if getattr(recipe, fieldname) != value: setattr(recipe, fieldname, value) changed = True else: logger.error('Invalid field %s' % fieldname) sys.exit(1) if changed: logger.info('Updating %s' % pn) utils.validate_fields(recipe) recipe.save() else: recipequery = ClassicRecipe.objects.filter( layerbranch=layerbranch).filter(deleted=False).filter( cover_status__in=['U', 'N']) for recipe in recipequery: if recipe.pn in skiplist: logger.debug('Skipping %s' % recipe.pn) continue updated = False sanepn = recipe.pn.lower().replace('_', '-') replquery = recipe_pn_query(sanepn) found = False for replrecipe in replquery: logger.debug( 'Matched %s in layer %s' % (recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break if not found: if layerbranch.layer.name == 'oe-classic': if recipe.pn.endswith( '-native') or recipe.pn.endswith( '-nativesdk'): searchpn, _, suffix = recipe.pn.rpartition('-') replquery = recipe_pn_query(searchpn) for replrecipe in replquery: if suffix in replrecipe.bbclassextend.split( ): logger.debug( 'Found BBCLASSEXTEND of %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name) ) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'P' recipe.cover_verified = False recipe.save() updated = True found = True break if not found and recipe.pn.endswith( '-nativesdk'): searchpn, _, _ = recipe.pn.rpartition('-') replquery = recipe_pn_query( 'nativesdk-%s' % searchpn) for replrecipe in replquery: logger.debug( 'Found replacement %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name) ) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'R' recipe.cover_verified = False recipe.save() updated = True found = True break else: if recipe.source_set.exists(): source0 = recipe.source_set.first() if 'pypi.' in source0.url or 'pythonhosted.org' in source0.url: attempts = [ 'python3-%s' % sanepn, 'python-%s' % sanepn ] if sanepn.startswith('py'): attempts.extend([ 'python3-%s' % sanepn[2:], 'python-%s' % sanepn[2:] ]) for attempt in attempts: replquery = recipe_pn_query(attempt) for replrecipe in replquery: logger.debug( 'Found match %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch. layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break if found: break if not found: recipe.classic_category = 'python' recipe.save() updated = True elif 'cpan.org' in source0.url: perlpn = sanepn if perlpn.startswith('perl-'): perlpn = perlpn[5:] if not (perlpn.startswith('lib') and perlpn.endswith('-perl')): perlpn = 'lib%s-perl' % perlpn replquery = recipe_pn_query(perlpn) for replrecipe in replquery: logger.debug( 'Found match %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name) ) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break if not found: recipe.classic_category = 'perl' recipe.save() updated = True elif 'kde.org' in source0.url or 'github.com/KDE' in source0.url: recipe.classic_category = 'kde' recipe.save() updated = True if not found: if recipe.pn.startswith('R-'): recipe.classic_category = 'R' recipe.save() updated = True elif recipe.pn.startswith('rubygem-'): recipe.classic_category = 'ruby' recipe.save() updated = True elif recipe.pn.startswith('jdk-'): sanepn = sanepn[4:] replquery = recipe_pn_query(sanepn) for replrecipe in replquery: logger.debug( 'Found match %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name) ) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break recipe.classic_category = 'java' recipe.save() updated = True elif recipe.pn.startswith('golang-'): if recipe.pn.startswith('golang-github-'): sanepn = 'go-' + sanepn[14:] else: sanepn = 'go-' + sanepn[7:] replquery = recipe_pn_query(sanepn) for replrecipe in replquery: logger.debug( 'Found match %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name) ) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break recipe.classic_category = 'go' recipe.save() updated = True elif recipe.pn.startswith('gnome-'): recipe.classic_category = 'gnome' recipe.save() updated = True elif recipe.pn.startswith('perl-'): recipe.classic_category = 'perl' recipe.save() updated = True if updated and updateobj: rupdate, _ = ComparisonRecipeUpdate.objects.get_or_create( update=updateobj, recipe=recipe) rupdate.link_updated = True rupdate.save() if args.dry_run: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)
def main(): parser = optparse.OptionParser( usage = """ %prog [options]""") parser.add_option("-b", "--branch", help = "Specify branch to import into", action="store", dest="branch", default='oe-classic') parser.add_option("-l", "--layer", help = "Specify layer to import into", action="store", dest="layer", default='oe-classic') parser.add_option("-n", "--dry-run", help = "Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help = "Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) utils.setup_django() from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe from django.db import transaction logger.setLevel(options.loglevel) res = list(LayerItem.objects.filter(name=options.layer)[:1]) if res: layer = res[0] else: logger.error('Specified layer %s does not exist in database' % options.layer) sys.exit(1) layerbranch = layer.get_layerbranch(options.branch) if not layerbranch: logger.error("Specified branch %s does not exist in database" % options.branch) sys.exit(1) transaction.enter_transaction_management() transaction.managed(True) try: def recipe_pn_query(pn): return Recipe.objects.filter(layerbranch__branch__name='master').filter(pn=pn).order_by('layerbranch__layer__index_preference') recipequery = ClassicRecipe.objects.filter(layerbranch=layerbranch).filter(cover_status__in=['U', 'N']) for recipe in recipequery: replquery = recipe_pn_query(recipe.pn) found = False for replrecipe in replquery: logger.debug('Matched %s in layer %s' % (recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() found = True break if not found: if recipe.pn.endswith('-native') or recipe.pn.endswith('-nativesdk'): searchpn, _, suffix = recipe.pn.rpartition('-') replquery = recipe_pn_query(searchpn) for replrecipe in replquery: if suffix in replrecipe.bbclassextend.split(): logger.debug('Found BBCLASSEXTEND of %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'P' recipe.cover_verified = False recipe.save() found = True break if not found and recipe.pn.endswith('-nativesdk'): searchpn, _, _ = recipe.pn.rpartition('-') replquery = recipe_pn_query('nativesdk-%s' % searchpn) for replrecipe in replquery: logger.debug('Found replacement %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'R' recipe.cover_verified = False recipe.save() found = True break if options.dryrun: transaction.rollback() else: transaction.commit() except: transaction.rollback() raise finally: transaction.leave_transaction_management() sys.exit(0)
def main(): valid_layer_name = re.compile('[-\w]+$') parser = optparse.OptionParser(usage=""" %prog [options] <url>""") parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) if len(args) < 2: print("Please specify URL of the layer index") sys.exit(1) layerindex_url = args[1] utils.setup_django() import settings from layerindex.models import Branch, LayerItem, LayerBranch, LayerDependency, LayerMaintainer, LayerNote from django.db import transaction logger.setLevel(options.loglevel) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) if not layerindex_url.endswith('/'): layerindex_url += '/' if not '/layerindex/api/' in layerindex_url: layerindex_url += '/layerindex/api/' rq = urllib.request.Request(layerindex_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) branches_url = jsdata['branches'] layers_url = jsdata['layerItems'] layerdeps_url = jsdata['layerDependencies'] layerbranches_url = jsdata['layerBranches'] layermaintainers_url = jsdata.get('layerMaintainers', None) layernotes_url = jsdata.get('layerNotes', None) logger.debug('Getting branches') # Get branches (we assume the ones we want are already there, so skip any that aren't) rq = urllib.request.Request(branches_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) branch_idmap = {} for branchjs in jsdata: res = Branch.objects.filter(name=branchjs['name']) if res: branch = res.first() branch_idmap[branchjs['id']] = branch try: with transaction.atomic(): # Get layers logger.debug('Importing layers') rq = urllib.request.Request(layers_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) layer_idmap = {} exclude_fields = ['id', 'updated'] for layerjs in jsdata: res = LayerItem.objects.filter(name=layerjs['name']) if res: # Already have this layer logger.debug('Skipping layer %s, already in database' % layerjs['name']) layer_idmap[layerjs['id']] = res[0] continue logger.debug('Adding layer %s' % layerjs['name']) layeritem = LayerItem() for key, value in layerjs.items(): if key in exclude_fields: continue setattr(layeritem, key, value) layeritem.save() layer_idmap[layerjs['id']] = layeritem # Get layer branches logger.debug('Importing layer branches') rq = urllib.request.Request(layerbranches_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) layerbranch_idmap = {} exclude_fields = [ 'id', 'layer', 'branch', 'vcs_last_fetch', 'vcs_last_rev', 'vcs_last_commit', 'yp_compatible_version', 'updated' ] for layerbranchjs in jsdata: branch = branch_idmap.get(layerbranchjs['branch'], None) if not branch: # We don't have this branch, skip it logger.debug( 'Skipping layerbranch %s, branch not imported' % layerbranchjs['id']) continue layer = layer_idmap.get(layerbranchjs['layer'], None) if not layer: # We didn't import this layer, skip it logger.debug( 'Skipping layerbranch %s, layer not imported' % layerbranchjs['id']) continue res = LayerBranch.objects.filter(layer=layer).filter( branch=branch) if res: # The layerbranch already exists (this will occur for layers # that already existed, since we need to have those in layer_idmap # to be able to import layer dependencies) logger.debug('Skipping layerbranch %s, already exists' % layerbranchjs['id']) continue layerbranch = LayerBranch() for key, value in layerbranchjs.items(): if key in exclude_fields: continue setattr(layerbranch, key, value) layerbranch.branch = branch layerbranch.layer = layer layerbranch.save() layerbranch_idmap[layerbranchjs['id']] = layerbranch # Get layer dependencies logger.debug('Importing layer dependencies') rq = urllib.request.Request(layerdeps_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) exclude_fields = ['id', 'layerbranch', 'dependency', 'updated'] for layerdepjs in jsdata: layerbranch = layerbranch_idmap.get(layerdepjs['layerbranch'], None) if not layerbranch: # We didn't import this layerbranch, skip it continue dependency = layer_idmap.get(layerdepjs['dependency'], None) if not dependency: # We didn't import the dependency, skip it continue layerdep = LayerDependency() for key, value in layerdepjs.items(): if key in exclude_fields: continue setattr(layerdep, key, value) layerdep.layerbranch = layerbranch layerdep.dependency = dependency layerdep.save() if layermaintainers_url: # Get layer maintainers (only available in latest code) logger.debug('Importing layer maintainers') rq = urllib.request.Request(layermaintainers_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) exclude_fields = ['id', 'layerbranch'] for layermaintainerjs in jsdata: layerbranch = layerbranch_idmap.get( layermaintainerjs['layerbranch'], None) if not layerbranch: # We didn't import this layerbranch, skip it continue layermaintainer = LayerMaintainer() for key, value in layermaintainerjs.items(): if key in exclude_fields: continue setattr(layermaintainer, key, value) layermaintainer.layerbranch = layerbranch layermaintainer.save() if layernotes_url: # Get layer notes (only available in latest code) logger.debug('Importing layer notes') rq = urllib.request.Request(layernotes_url) data = urllib.request.urlopen(rq).read() jsdata = json.loads(data.decode('utf-8')) exclude_fields = ['id', 'layer'] for layernotejs in jsdata: layer = layer_idmap.get(layernotejs['layer'], None) if not layer: # We didn't import this layer, skip it continue res = LayerNote.objects.filter(layer=layer).filter( text=layernotejs['text']) if res: # The note already exists (this will occur for layers # that already existed, since we need to have those in layer_idmap # to be able to import layer dependencies) logger.debug('Skipping note %s, already exists' % layernotejs['id']) continue layernote = LayerNote() for key, value in layernotejs.items(): if key in exclude_fields: continue setattr(layernote, key, value) layernote.layer = layer layernote.save() if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)
def main(): parser = argparse.ArgumentParser(description="Dump RRS upgrade info") parser.add_argument("plan", help="Specify maintenance plan to operate on") parser.add_argument('-d', '--debug', action='store_true', help='Enable debug output') parser.add_argument('-q', '--quiet', action='store_true', help='Hide all output except error messages') args = parser.parse_args() if args.debug: loglevel = logging.DEBUG elif args.quiet: loglevel = logging.WARNING else: loglevel = logging.INFO utils.setup_django() import settings from rrs.models import MaintenancePlan, Release, Milestone, RecipeUpgrade, RecipeSymbol import rrs.views from django.db import transaction logger.setLevel(loglevel) maintplan = MaintenancePlan.objects.filter(id=args.plan).first() if not maintplan: logger.error('No maintenance plan with id %s' % args.plan) sys.exit(1) release = maintplan.get_default_release() if not release: logger.error('No default release for maintenance plan %s' % maintplan) sys.exit(1) milestone = release.get_default_milestone() if not milestone: logger.error('No default milestone for release %s' % release) sys.exit(1) recipe_list = rrs.views._get_recipe_list(milestone) for r in recipe_list: recipesymbol = RecipeSymbol.objects.get(id=r.pk) print('* %s %s %s %s' % (r.name, r.version, r.upstream_version, r.upstream_status)) details = [] for ru in RecipeUpgrade.objects.filter( recipesymbol=recipesymbol).exclude(upgrade_type='M').order_by( 'group', '-commit_date', '-id'): details.append(rrs.views._get_recipe_upgrade_detail(maintplan, ru)) details.sort( key=lambda s: rrs.views.RecipeUpgradeGroupSortItem(s.group), reverse=True) group = None for rud in details: if rud.group != group: print(' ---- %s ----' % rud.group.title) group = rud.group print(' - %s | %s | %s | %s | %s' % (rud.title, rud.version, rud.upgrade_type, rud.milestone_name, rud.date)) sys.exit(0)
def main(): parser = optparse.OptionParser( usage = """ %prog [options]""") parser.add_option("-b", "--branch", help = "Specify branch to import into", action="store", dest="branch", default='oe-classic') parser.add_option("-l", "--layer", help = "Specify layer to import into", action="store", dest="layer", default='oe-classic') parser.add_option("-n", "--dry-run", help = "Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help = "Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) utils.setup_django() from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe from django.db import transaction logger.setLevel(options.loglevel) res = list(LayerItem.objects.filter(name=options.layer)[:1]) if res: layer = res[0] else: logger.error('Specified layer %s does not exist in database' % options.layer) sys.exit(1) layerbranch = layer.get_layerbranch(options.branch) if not layerbranch: logger.error("Specified branch %s does not exist in database" % options.branch) sys.exit(1) recipes_ai = read_page("www.openembedded.org", "/wiki/OE-Classic_Recipes_A-I?action=raw") recipes_jz = read_page("www.openembedded.org", "/wiki/OE-Classic_Recipes_J-Z?action=raw") transaction.enter_transaction_management() transaction.managed(True) try: recipes = dict(list(recipes_ai.items()) + list(recipes_jz.items())) for pn, comment in recipes.items(): newpn = '' newlayer = '' status = 'U' comment = comment.strip(' -') if 'provided by' in comment: res = re.match(r'[a-zA-Z- ]*provided by ([a-zA-Z0-9-]*) in ([a-zA-Z0-9-]*)(.*)', comment) if res: newpn = res.group(1) newlayer = res.group(2) comment = res.group(3) if pn.endswith('-native') or pn.endswith('-cross'): status = 'P' else: status = 'R' elif 'replaced by' in comment or 'renamed to' in comment or ' is in ' in comment: res = re.match(r'.*replaced by ([a-zA-Z0-9-.]*) in ([a-zA-Z0-9-]*)(.*)', comment) if not res: res = re.match(r'.*renamed to ([a-zA-Z0-9-.]*) in ([a-zA-Z0-9-]*)(.*)', comment) if not res: res = re.match(r'([a-zA-Z0-9-.]*) is in ([a-zA-Z0-9-]*)(.*)', comment) if res: newpn = res.group(1) newlayer = res.group(2) comment = res.group(3) status = 'R' elif 'obsolete' in comment or 'superseded' in comment: res = re.match(r'provided by ([a-zA-Z0-9-]*) in ([a-zA-Z0-9-]*)(.*)', comment) if res: newpn = res.group(1) newlayer = res.group(2) comment = res.group(3) elif comment.startswith('superseded by'): comment = comment[14:] elif comment.startswith('obsolete'): comment = comment[9:] status = 'O' elif 'PACKAGECONFIG' in comment: res = re.match(r'[a-zA-Z ]* PACKAGECONFIG [a-zA-Z ]* to ([a-zA-Z0-9-]*) in ([a-zA-Z0-9-]*)(.*)', comment) if res: newpn = res.group(1) newlayer = res.group(2) comment = res.group(3) status = 'C' if newlayer: if newlayer.lower() == 'oe-core': newlayer = 'openembedded-core' # Remove all links from comments because they'll be picked up as categories comment = re.sub(r'\[(http[^[]*)\]', r'\1', comment) # Split out categories categories = re.findall(r'\[([^]]*)\]', comment) for cat in categories: comment = comment.replace('[%s]' % cat, '') if '(GPE)' in comment or pn.startswith('gpe'): categories.append('GPE') comment = comment.replace('(GPE)', '') comment = comment.strip('- ') logger.debug("%s|%s|%s|%s|%s|%s" % (pn, status, newpn, newlayer, categories, comment)) recipequery = ClassicRecipe.objects.filter(layerbranch=layerbranch).filter(pn=pn) if recipequery: for recipe in recipequery: recipe.cover_layerbranch = None if newlayer: res = list(LayerItem.objects.filter(name=newlayer)[:1]) if res: newlayeritem = res[0] recipe.cover_layerbranch = newlayeritem.get_layerbranch('master') else: logger.info('Replacement layer "%s" for %s could not be found' % (newlayer, pn)) recipe.cover_pn = newpn recipe.cover_status = status recipe.cover_verified = True recipe.cover_comment = comment recipe.classic_category = " ".join(categories) recipe.save() else: logger.info('No OE-Classic recipe with name "%s" count be found' % pn) sys.exit(1) if options.dryrun: transaction.rollback() else: transaction.commit() except: transaction.rollback() raise finally: transaction.leave_transaction_management() sys.exit(0)
def main(): parser = optparse.OptionParser(usage=""" %prog [options]""") parser.add_option("-b", "--branch", help="Specify branch to import into", action="store", dest="branch", default='oe-classic') parser.add_option("-l", "--layer", help="Specify layer to import into", action="store", dest="layer", default='oe-classic') parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) utils.setup_django() from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe from django.db import transaction logger.setLevel(options.loglevel) res = list(LayerItem.objects.filter(name=options.layer)[:1]) if res: layer = res[0] else: logger.error('Specified layer %s does not exist in database' % options.layer) sys.exit(1) layerbranch = layer.get_layerbranch(options.branch) if not layerbranch: logger.error("Specified branch %s does not exist in database" % options.branch) sys.exit(1) recipes_ai = read_page("www.openembedded.org", "/wiki/OE-Classic_Recipes_A-I?action=raw") recipes_jz = read_page("www.openembedded.org", "/wiki/OE-Classic_Recipes_J-Z?action=raw") try: with transaction.atomic(): recipes = dict(list(recipes_ai.items()) + list(recipes_jz.items())) for pn, comment in recipes.items(): newpn = '' newlayer = '' status = 'U' comment = comment.strip(' -') if 'provided by' in comment: res = re.match( r'[a-zA-Z- ]*provided by ([a-zA-Z0-9-]*) in ([a-zA-Z0-9-]*)(.*)', comment) if res: newpn = res.group(1) newlayer = res.group(2) comment = res.group(3) if pn.endswith('-native') or pn.endswith('-cross'): status = 'P' else: status = 'R' elif 'replaced by' in comment or 'renamed to' in comment or ' is in ' in comment: res = re.match( r'.*replaced by ([a-zA-Z0-9-.]*) in ([a-zA-Z0-9-]*)(.*)', comment) if not res: res = re.match( r'.*renamed to ([a-zA-Z0-9-.]*) in ([a-zA-Z0-9-]*)(.*)', comment) if not res: res = re.match( r'([a-zA-Z0-9-.]*) is in ([a-zA-Z0-9-]*)(.*)', comment) if res: newpn = res.group(1) newlayer = res.group(2) comment = res.group(3) status = 'R' elif 'obsolete' in comment or 'superseded' in comment: res = re.match( r'provided by ([a-zA-Z0-9-]*) in ([a-zA-Z0-9-]*)(.*)', comment) if res: newpn = res.group(1) newlayer = res.group(2) comment = res.group(3) elif comment.startswith('superseded by'): comment = comment[14:] elif comment.startswith('obsolete'): comment = comment[9:] status = 'O' elif 'PACKAGECONFIG' in comment: res = re.match( r'[a-zA-Z ]* PACKAGECONFIG [a-zA-Z ]* to ([a-zA-Z0-9-]*) in ([a-zA-Z0-9-]*)(.*)', comment) if res: newpn = res.group(1) newlayer = res.group(2) comment = res.group(3) status = 'C' if newlayer: if newlayer.lower() == 'oe-core': newlayer = 'openembedded-core' # Remove all links from comments because they'll be picked up as categories comment = re.sub(r'\[(http[^[]*)\]', r'\1', comment) # Split out categories categories = re.findall(r'\[([^]]*)\]', comment) for cat in categories: comment = comment.replace('[%s]' % cat, '') if '(GPE)' in comment or pn.startswith('gpe'): categories.append('GPE') comment = comment.replace('(GPE)', '') comment = comment.strip('- ') logger.debug( "%s|%s|%s|%s|%s|%s" % (pn, status, newpn, newlayer, categories, comment)) recipequery = ClassicRecipe.objects.filter( layerbranch=layerbranch).filter(pn=pn) if recipequery: for recipe in recipequery: recipe.cover_layerbranch = None if newlayer: res = list( LayerItem.objects.filter(name=newlayer)[:1]) if res: newlayeritem = res[0] recipe.cover_layerbranch = newlayeritem.get_layerbranch( 'master') else: logger.info( 'Replacement layer "%s" for %s could not be found' % (newlayer, pn)) recipe.cover_pn = newpn recipe.cover_status = status recipe.cover_verified = True recipe.cover_comment = comment recipe.classic_category = " ".join(categories) recipe.save() else: logger.info( 'No OE-Classic recipe with name "%s" count be found' % pn) sys.exit(1) if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)
def main(): parser = optparse.OptionParser( usage = """ %prog [options]""") parser.add_option("-b", "--branch", help = "Specify branch to import into", action="store", dest="branch", default='oe-classic') parser.add_option("-l", "--layer", help = "Specify layer to import into", action="store", dest="layer", default='oe-classic') parser.add_option("-n", "--dry-run", help = "Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help = "Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help = "Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) utils.setup_django() from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe from django.db import transaction logger.setLevel(options.loglevel) res = list(LayerItem.objects.filter(name=options.layer)[:1]) if res: layer = res[0] else: logger.error('Specified layer %s does not exist in database' % options.layer) sys.exit(1) layerbranch = layer.get_layerbranch(options.branch) if not layerbranch: logger.error("Specified branch %s does not exist in database" % options.branch) sys.exit(1) try: with transaction.atomic(): def recipe_pn_query(pn): return Recipe.objects.filter(layerbranch__branch__name='master').filter(pn=pn).order_by('layerbranch__layer__index_preference') recipequery = ClassicRecipe.objects.filter(layerbranch=layerbranch).filter(cover_status__in=['U', 'N']) for recipe in recipequery: replquery = recipe_pn_query(recipe.pn) found = False for replrecipe in replquery: logger.debug('Matched %s in layer %s' % (recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() found = True break if not found: if recipe.pn.endswith('-native') or recipe.pn.endswith('-nativesdk'): searchpn, _, suffix = recipe.pn.rpartition('-') replquery = recipe_pn_query(searchpn) for replrecipe in replquery: if suffix in replrecipe.bbclassextend.split(): logger.debug('Found BBCLASSEXTEND of %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'P' recipe.cover_verified = False recipe.save() found = True break if not found and recipe.pn.endswith('-nativesdk'): searchpn, _, _ = recipe.pn.rpartition('-') replquery = recipe_pn_query('nativesdk-%s' % searchpn) for replrecipe in replquery: logger.debug('Found replacement %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'R' recipe.cover_verified = False recipe.save() found = True break if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)
def import_pkgspec(args): utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Machine, BBAppend, BBClass, ComparisonRecipeUpdate from django.db import transaction ret, layerbranch = check_branch_layer(args) if ret: return ret updateobj = get_update_obj(args) logdir = getattr(settings, 'TASK_LOG_DIR') if updateobj and updateobj.task_id and logdir: pwriter = utils.ProgressWriter(logdir, updateobj.task_id, logger=logger) else: pwriter = None metapath = args.pkgdir try: with transaction.atomic(): layerrecipes = ClassicRecipe.objects.filter( layerbranch=layerbranch) existing = list( layerrecipes.filter(deleted=False).values_list( 'filepath', 'filename')) dirlist = os.listdir(metapath) total = len(dirlist) for count, entry in enumerate(dirlist): if os.path.exists(os.path.join(metapath, entry, 'dead.package')): logger.info('Skipping dead package %s' % entry) continue specfiles = glob.glob(os.path.join(metapath, entry, '*.spec')) if specfiles: for specfile in specfiles: specfn = os.path.basename(specfile) specpath = os.path.relpath(os.path.dirname(specfile), metapath) recipe, created = ClassicRecipe.objects.get_or_create( layerbranch=layerbranch, filepath=specpath, filename=specfn) if created: logger.info('Importing %s' % specfn) elif recipe.deleted: logger.info('Restoring and updating %s' % specpath) recipe.deleted = False else: logger.info('Updating %s' % specpath) recipe.layerbranch = layerbranch recipe.filename = specfn recipe.filepath = specpath update_recipe_file(specfile, recipe, metapath) recipe.save() existingentry = (specpath, specfn) if existingentry in existing: existing.remove(existingentry) if updateobj: rupdate, _ = ComparisonRecipeUpdate.objects.get_or_create( update=updateobj, recipe=recipe) rupdate.meta_updated = True rupdate.save() else: logger.warn('Missing spec file in %s' % os.path.join(metapath, entry)) if pwriter: pwriter.write(int(count / total * 100)) if existing: fpaths = ['%s/%s' % (pth, fn) for pth, fn in existing] logger.info('Marking as deleted: %s' % ', '.join(fpaths)) for entry in existing: layerrecipes.filter(filepath=entry[0], filename=entry[1]).update(deleted=True) layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if args.dry_run: raise DryRunRollbackException() except DryRunRollbackException: pass except: import traceback traceback.print_exc() return 1 return 0
def main(): if LooseVersion(git.__version__) < '0.3.1': logger.error( "Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script" ) sys.exit(1) parser = optparse.OptionParser(usage=""" %prog [options]""") parser.add_option("-b", "--branch", help="Specify branch to update", action="store", dest="branch", default='master') parser.add_option("-l", "--layer", help="Layer to update", action="store", dest="layer") parser.add_option( "-r", "--reload", help="Reload recipe data instead of updating since last update", action="store_true", dest="reload") parser.add_option( "", "--fullreload", help="Discard existing recipe data and fetch it from scratch", action="store_true", dest="fullreload") parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("", "--nocheckout", help="Don't check out branches", action="store_true", dest="nocheckout") parser.add_option("", "--stop-on-error", help="Stop on first parsing error", action="store_true", default=False, dest="stop_on_error") parser.add_option("-i", "--initial", help="Print initial values parsed from layer.conf only", action="store_true") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") parser.add_option( "", "--keep-temp", help="Preserve temporary directory at the end instead of deleting it", action="store_true") options, args = parser.parse_args(sys.argv) if len(args) > 1: logger.error('unexpected argument "%s"' % args[1]) parser.print_help() sys.exit(1) if options.fullreload: options.reload = True utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, RecipeFileDependency, Machine, Distro, BBAppend, BBClass, IncFile from django.db import transaction logger.setLevel(options.loglevel) branch = utils.get_branch(options.branch) if not branch: logger.error("Specified branch %s is not valid" % options.branch) sys.exit(1) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) bitbakepath = os.path.join(fetchdir, 'bitbake') layer = utils.get_layer(options.layer) urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) layerbranch = layer.get_layerbranch(options.branch) branchname = options.branch branchdesc = options.branch if layerbranch: if layerbranch.actual_branch: branchname = layerbranch.actual_branch branchdesc = "%s (%s)" % (options.branch, branchname) # Collect repo info repo = git.Repo(repodir) if repo.bare: logger.error('Repository %s is bare, not supported' % repodir) sys.exit(1) topcommit = repo.commit('origin/%s' % branchname) if options.nocheckout: topcommit = repo.commit('HEAD') tinfoil = None tempdir = None try: with transaction.atomic(): newbranch = False if not layerbranch: # LayerBranch doesn't exist for this branch, create it newbranch = True layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = branch layerbranch_source = layer.get_layerbranch(branch) if not layerbranch_source: layerbranch_source = layer.get_layerbranch(None) if layerbranch_source: layerbranch.vcs_subdir = layerbranch_source.vcs_subdir layerbranch.save() if layerbranch_source: for maintainer in layerbranch_source.layermaintainer_set.all( ): maintainer.pk = None maintainer.id = None maintainer.layerbranch = layerbranch maintainer.save() if layerbranch.vcs_subdir and not options.nocheckout: # Find latest commit in subdirectory # A bit odd to do it this way but apparently there's no other way in the GitPython API topcommit = next( repo.iter_commits('origin/%s' % branchname, paths=layerbranch.vcs_subdir), None) layerdir = os.path.join(repodir, layerbranch.vcs_subdir) layerdir_start = os.path.normpath(layerdir) + os.sep layerrecipes = Recipe.objects.filter(layerbranch=layerbranch) layermachines = Machine.objects.filter(layerbranch=layerbranch) layerdistros = Distro.objects.filter(layerbranch=layerbranch) layerappends = BBAppend.objects.filter(layerbranch=layerbranch) layerclasses = BBClass.objects.filter(layerbranch=layerbranch) layerincfiles = IncFile.objects.filter(layerbranch=layerbranch) if layerbranch.vcs_last_rev != topcommit.hexsha or options.reload or options.initial: # Check out appropriate branch if not options.nocheckout: utils.checkout_layer_branch(layerbranch, repodir, logger=logger) logger.info("Collecting data for layer %s on branch %s" % (layer.name, branchdesc)) try: (tinfoil, tempdir) = recipeparse.init_parser( settings, branch, bitbakepath, nocheckout=options.nocheckout, logger=logger) except recipeparse.RecipeParseError as e: logger.error(str(e)) sys.exit(1) logger.debug('Using temp directory %s' % tempdir) # Clear the default value of SUMMARY so that we can use DESCRIPTION instead if it hasn't been set tinfoil.config_data.setVar('SUMMARY', '') # Clear the default value of DESCRIPTION so that we can see where it's not set tinfoil.config_data.setVar('DESCRIPTION', '') # Clear the default value of HOMEPAGE ('unknown') tinfoil.config_data.setVar('HOMEPAGE', '') # Set a blank value for LICENSE so that it doesn't cause the parser to die (e.g. with meta-ti - # why won't they just fix that?!) tinfoil.config_data.setVar('LICENSE', '') layerconfparser = layerconfparse.LayerConfParse( logger=logger, tinfoil=tinfoil) layer_config_data = layerconfparser.parse_layer(layerdir) if not layer_config_data: logger.info( "Skipping update of layer %s for branch %s - conf/layer.conf may have parse issues" % (layer.name, branchdesc)) layerconfparser.shutdown() sys.exit(1) utils.set_layerbranch_collection_version(layerbranch, layer_config_data, logger=logger) if options.initial: # Use print() rather than logger.info() since "-q" makes it print nothing. for i in [ "BBFILE_COLLECTIONS", "LAYERVERSION", "LAYERDEPENDS", "LAYERRECOMMENDS" ]: print( '%s = "%s"' % (i, utils.get_layer_var( layer_config_data, i, logger))) sys.exit(0) # Set up for recording patch info utils.setup_core_layer_sys_path(settings, branch.name) skip_patches = False try: import oe.recipeutils except ImportError: logger.warn( 'Failed to find lib/oe/recipeutils.py in layers - patch information will not be collected' ) skip_patches = True utils.add_dependencies(layerbranch, layer_config_data, logger=logger) utils.add_recommends(layerbranch, layer_config_data, logger=logger) layerbranch.save() try: config_data_copy = recipeparse.setup_layer( tinfoil.config_data, fetchdir, layerdir, layer, layerbranch, logger) except recipeparse.RecipeParseError as e: logger.error(str(e)) sys.exit(1) if layerbranch.vcs_last_rev and not options.reload: try: diff = repo.commit( layerbranch.vcs_last_rev).diff(topcommit) except Exception as e: logger.warn( "Unable to get diff from last commit hash for layer %s - falling back to slow update: %s" % (layer.name, str(e))) diff = None else: diff = None # We handle recipes specially to try to preserve the same id # when recipe upgrades happen (so that if a user bookmarks a # recipe page it remains valid) layerrecipes_delete = [] layerrecipes_add = [] # Check if any paths should be ignored because there are layers within this layer removedirs = [] for root, dirs, files in os.walk(layerdir): for diritem in dirs: if os.path.exists( os.path.join(root, diritem, 'conf', 'layer.conf')): removedirs.append( os.path.join(root, diritem) + os.sep) if diff: # Apply git changes to existing recipe list if layerbranch.vcs_subdir: subdir_start = os.path.normpath( layerbranch.vcs_subdir) + os.sep else: subdir_start = "" updatedrecipes = set() dirtyrecipes = set() other_deletes = [] other_adds = [] for diffitem in diff.iter_change_type('R'): oldpath = diffitem.a_blob.path newpath = diffitem.b_blob.path skip = False for removedir in removedirs: # FIXME what about files moved into removedirs? if oldpath.startswith(removedir): skip = True break if skip: continue if oldpath.startswith(subdir_start): if not newpath.startswith(subdir_start): logger.debug( "Treating rename of %s to %s as a delete since new path is outside layer" % (oldpath, newpath)) other_deletes.append(diffitem) continue (oldtypename, oldfilepath, oldfilename) = recipeparse.detect_file_type( oldpath, subdir_start) (newtypename, newfilepath, newfilename) = recipeparse.detect_file_type( newpath, subdir_start) if oldtypename != newtypename: # This is most likely to be a .inc file renamed to a .bb - and since # there may be another recipe deleted at the same time we probably want # to consider that, so just treat it as a delete and an add logger.debug( "Treating rename of %s to %s as a delete and add (since type changed)" % (oldpath, newpath)) other_deletes.append(diffitem) other_adds.append(diffitem) elif oldtypename == 'recipe': results = layerrecipes.filter( filepath=oldfilepath).filter( filename=oldfilename) if len(results): recipe = results[0] logger.debug("Rename recipe %s to %s" % (recipe, newpath)) recipe.filepath = newfilepath recipe.filename = newfilename recipe.save() update_recipe_file( tinfoil, config_data_copy, os.path.join(layerdir, newfilepath), recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) updatedrecipes.add( os.path.join(oldfilepath, oldfilename)) updatedrecipes.add( os.path.join(newfilepath, newfilename)) else: logger.warn( "Renamed recipe %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'bbappend': results = layerappends.filter( filepath=oldfilepath).filter( filename=oldfilename) if len(results): logger.debug( "Rename bbappend %s to %s" % (results[0], os.path.join(newfilepath, newfilename))) results[0].filepath = newfilepath results[0].filename = newfilename results[0].save() else: logger.warn( "Renamed bbappend %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'machine': results = layermachines.filter( name=oldfilename) if len(results): logger.debug("Rename machine %s to %s" % (results[0], newfilename)) results[0].name = newfilename results[0].save() else: logger.warn( "Renamed machine %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'distro': results = layerdistros.filter(name=oldfilename) if len(results): logger.debug("Rename distro %s to %s" % (results[0], newfilename)) results[0].name = newfilename results[0].save() else: logger.warn( "Renamed distro %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'bbclass': results = layerclasses.filter(name=oldfilename) if len(results): logger.debug("Rename class %s to %s" % (results[0], newfilename)) results[0].name = newfilename results[0].save() else: logger.warn( "Renamed class %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'incfile': results = layerincfiles.filter( path=os.path.join(oldfilepath, oldfilename)) if len(results): logger.debug("Rename inc file %s to %s" % (results[0], newfilename)) results[0].name = newfilename results[0].save() else: logger.warn( "Renamed inc file %s could not be found" % oldpath) other_adds.append(diffitem) deps = RecipeFileDependency.objects.filter( layerbranch=layerbranch).filter(path=oldpath) for dep in deps: dirtyrecipes.add(dep.recipe) for diffitem in itertools.chain(diff.iter_change_type('D'), other_deletes): path = diffitem.a_blob.path if path.startswith(subdir_start): skip = False for removedir in removedirs: if path.startswith(removedir): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type( path, subdir_start) if typename == 'recipe': values = layerrecipes.filter( filepath=filepath).filter( filename=filename).values( 'id', 'filepath', 'filename', 'pn') if len(values): layerrecipes_delete.append(values[0]) logger.debug("Mark %s for deletion" % values[0]) updatedrecipes.add( os.path.join(values[0]['filepath'], values[0]['filename'])) else: logger.warn( "Deleted recipe %s could not be found" % path) elif typename == 'bbappend': layerappends.filter(filepath=filepath).filter( filename=filename).delete() elif typename == 'machine': layermachines.filter(name=filename).delete() elif typename == 'distro': layerdistros.filter(name=filename).delete() elif typename == 'bbclass': layerclasses.filter(name=filename).delete() elif typename == 'incfile': layerincfiles.filter(path=os.path.join( filepath, filename)).delete() for diffitem in itertools.chain(diff.iter_change_type('A'), other_adds): path = diffitem.b_blob.path if path.startswith(subdir_start): skip = False for removedir in removedirs: if path.startswith(removedir): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type( path, subdir_start) if typename == 'recipe': layerrecipes_add.append( os.path.join(repodir, path)) logger.debug("Mark %s for addition" % path) updatedrecipes.add( os.path.join(filepath, filename)) elif typename == 'bbappend': append = BBAppend() append.layerbranch = layerbranch append.filename = filename append.filepath = filepath append.save() elif typename == 'machine': machine = Machine() machine.layerbranch = layerbranch machine.name = filename update_machine_conf_file( os.path.join(repodir, path), machine) machine.save() elif typename == 'distro': distro = Distro() distro.layerbranch = layerbranch distro.name = filename update_distro_conf_file( os.path.join(repodir, path), distro, config_data_copy) distro.save() elif typename == 'bbclass': bbclass = BBClass() bbclass.layerbranch = layerbranch bbclass.name = filename bbclass.save() elif typename == 'incfile': incfile = IncFile() incfile.layerbranch = layerbranch incfile.path = os.path.join(filepath, filename) incfile.save() for diffitem in diff.iter_change_type('M'): path = diffitem.b_blob.path if path.startswith(subdir_start): skip = False for removedir in removedirs: if path.startswith(removedir): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type( path, subdir_start) if typename == 'recipe': logger.debug("Mark %s for update" % path) results = layerrecipes.filter( filepath=filepath).filter( filename=filename)[:1] if results: recipe = results[0] update_recipe_file( tinfoil, config_data_copy, os.path.join(layerdir, filepath), recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) recipe.save() updatedrecipes.add(recipe.full_path()) elif typename == 'machine': results = layermachines.filter(name=filename) if results: machine = results[0] update_machine_conf_file( os.path.join(repodir, path), machine) machine.save() elif typename == 'distro': results = layerdistros.filter(name=filename) if results: distro = results[0] update_distro_conf_file( os.path.join(repodir, path), distro, config_data_copy) distro.save() deps = RecipeFileDependency.objects.filter( layerbranch=layerbranch).filter(path=path) for dep in deps: dirtyrecipes.add(dep.recipe) for recipe in dirtyrecipes: if not recipe.full_path() in updatedrecipes: update_recipe_file( tinfoil, config_data_copy, os.path.join(layerdir, recipe.filepath), recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) else: # Collect recipe data from scratch layerrecipe_fns = [] if options.fullreload: layerrecipes.delete() else: # First, check which recipes still exist layerrecipe_values = layerrecipes.values( 'id', 'filepath', 'filename', 'pn') for v in layerrecipe_values: if v['filepath'].startswith('../'): # FIXME: These recipes were present due to a bug (not handling renames # to paths outside the layer) - this can be removed at some point in the future preserve = False else: root = os.path.join(layerdir, v['filepath']) fullpath = os.path.join(root, v['filename']) if os.path.exists(fullpath): preserve = True for removedir in removedirs: if fullpath.startswith(removedir): preserve = False break else: preserve = False if preserve: # Recipe still exists, update it results = layerrecipes.filter(id=v['id'])[:1] recipe = results[0] update_recipe_file(tinfoil, config_data_copy, root, recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) else: # Recipe no longer exists, mark it for later on layerrecipes_delete.append(v) layerrecipe_fns.append(fullpath) layermachines.delete() layerdistros.delete() layerappends.delete() layerclasses.delete() for root, dirs, files in os.walk(layerdir): if '.git' in dirs: dirs.remove('.git') for diritem in dirs[:]: fullpath = os.path.join(root, diritem) + os.sep if fullpath in removedirs: dirs.remove(diritem) for f in files: fullpath = os.path.join(root, f) (typename, _, filename) = recipeparse.detect_file_type( fullpath, layerdir_start) if typename == 'recipe': if fullpath not in layerrecipe_fns: layerrecipes_add.append(fullpath) elif typename == 'bbappend': append = BBAppend() append.layerbranch = layerbranch append.filename = f append.filepath = os.path.relpath( root, layerdir) append.save() elif typename == 'machine': machine = Machine() machine.layerbranch = layerbranch machine.name = filename update_machine_conf_file(fullpath, machine) machine.save() elif typename == 'distro': distro = Distro() distro.layerbranch = layerbranch distro.name = filename update_distro_conf_file( fullpath, distro, config_data_copy) distro.save() elif typename == 'bbclass': bbclass = BBClass() bbclass.layerbranch = layerbranch bbclass.name = filename bbclass.save() elif typename == 'incfile': incfile = IncFile() incfile.layerbranch = layerbranch incfile.path = os.path.relpath( fullpath, layerdir) incfile.save() for added in layerrecipes_add: # This is good enough without actually parsing the file (pn, pv) = split_recipe_fn(added) oldid = -1 for deleted in layerrecipes_delete: if deleted['pn'] == pn: oldid = deleted['id'] layerrecipes_delete.remove(deleted) break if oldid > -1: # Reclaim a record we would have deleted results = Recipe.objects.filter(id=oldid)[:1] recipe = results[0] logger.debug("Reclaim %s for %s %s" % (recipe, pn, pv)) else: # Create new record logger.debug("Add new recipe %s" % added) recipe = Recipe() recipe.layerbranch = layerbranch recipe.filename = os.path.basename(added) root = os.path.dirname(added) recipe.filepath = os.path.relpath(root, layerdir) update_recipe_file(tinfoil, config_data_copy, root, recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) recipe.save() for deleted in layerrecipes_delete: logger.debug("Delete %s" % deleted) results = Recipe.objects.filter(id=deleted['id'])[:1] recipe = results[0] recipe.delete() # Save repo info layerbranch.vcs_last_rev = topcommit.hexsha layerbranch.vcs_last_commit = datetime.fromtimestamp( topcommit.committed_date) else: logger.info("Layer %s is already up-to-date for branch %s" % (layer.name, branchdesc)) layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if options.dryrun: raise DryRunRollbackException() except KeyboardInterrupt: logger.warn("Update interrupted, changes to %s rolled back" % layer.name) sys.exit(254) except SystemExit: raise except DryRunRollbackException: pass except: import traceback logger.error(traceback.format_exc().rstrip()) sys.exit(1) finally: if tinfoil and (LooseVersion(bb.__version__) > LooseVersion("1.27")): tinfoil.shutdown() if tempdir: if options.keep_temp: logger.debug('Preserving temp directory %s' % tempdir) else: logger.debug('Deleting temp directory') utils.rmtree_force(tempdir) sys.exit(0)
def main(): parser = optparse.OptionParser(usage=""" %prog [options]""") parser.add_option("-b", "--branch", help="Specify branch to import into", action="store", dest="branch", default='oe-classic') parser.add_option("-l", "--layer", help="Specify layer to import into", action="store", dest="layer", default='oe-classic') parser.add_option("-u", "--update", help="Specify update record to link to", action="store", dest="update") parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option( "-s", "--skip", help="Skip specified packages (comma-separated list, no spaces)", action="store", dest="skip") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") options, args = parser.parse_args(sys.argv) utils.setup_django() from layerindex.models import LayerItem, LayerBranch, Recipe, ClassicRecipe, Update, ComparisonRecipeUpdate from django.db import transaction logger.setLevel(options.loglevel) res = list(LayerItem.objects.filter(name=options.layer)[:1]) if res: layer = res[0] else: logger.error('Specified layer %s does not exist in database' % options.layer) sys.exit(1) layerbranch = layer.get_layerbranch(options.branch) if not layerbranch: logger.error("Specified branch %s does not exist in database" % options.branch) sys.exit(1) updateobj = None if options.update: updateobj = Update.objects.filter(id=int(options.update)) if not updateobj: logger.error("Specified update id %s does not exist in database" % options.update) sys.exit(1) updateobj = updateobj.first() if options.skip: skiplist = options.skip.split(',') else: skiplist = [] try: with transaction.atomic(): def recipe_pn_query(pn): return Recipe.objects.filter( layerbranch__branch__name='master').filter(pn=pn).order_by( '-layerbranch__layer__index_preference') recipequery = ClassicRecipe.objects.filter( layerbranch=layerbranch).filter(deleted=False).filter( cover_status__in=['U', 'N']) for recipe in recipequery: if recipe.pn in skiplist: logger.debug('Skipping %s' % recipe.pn) continue updated = False sanepn = recipe.pn.lower().replace('_', '-') replquery = recipe_pn_query(sanepn) found = False for replrecipe in replquery: logger.debug( 'Matched %s in layer %s' % (recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break if not found: if layerbranch.layer.name == 'oe-classic': if recipe.pn.endswith('-native') or recipe.pn.endswith( '-nativesdk'): searchpn, _, suffix = recipe.pn.rpartition('-') replquery = recipe_pn_query(searchpn) for replrecipe in replquery: if suffix in replrecipe.bbclassextend.split(): logger.debug( 'Found BBCLASSEXTEND of %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'P' recipe.cover_verified = False recipe.save() updated = True found = True break if not found and recipe.pn.endswith('-nativesdk'): searchpn, _, _ = recipe.pn.rpartition('-') replquery = recipe_pn_query('nativesdk-%s' % searchpn) for replrecipe in replquery: logger.debug( 'Found replacement %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'R' recipe.cover_verified = False recipe.save() updated = True found = True break else: if recipe.source_set.exists(): source0 = recipe.source_set.first() if 'pypi.' in source0.url or 'pythonhosted.org' in source0.url: attempts = [ 'python3-%s' % sanepn, 'python-%s' % sanepn ] if sanepn.startswith('py'): attempts.extend([ 'python3-%s' % sanepn[2:], 'python-%s' % sanepn[2:] ]) for attempt in attempts: replquery = recipe_pn_query(attempt) for replrecipe in replquery: logger.debug( 'Found match %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name) ) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break if found: break if not found: recipe.classic_category = 'python' recipe.save() updated = True elif 'cpan.org' in source0.url: perlpn = sanepn if perlpn.startswith('perl-'): perlpn = perlpn[5:] if not (perlpn.startswith('lib') and perlpn.endswith('-perl')): perlpn = 'lib%s-perl' % perlpn replquery = recipe_pn_query(perlpn) for replrecipe in replquery: logger.debug( 'Found match %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break if not found: recipe.classic_category = 'perl' recipe.save() updated = True elif 'kde.org' in source0.url or 'github.com/KDE' in source0.url: recipe.classic_category = 'kde' recipe.save() updated = True if not found: if recipe.pn.startswith('R-'): recipe.classic_category = 'R' recipe.save() updated = True elif recipe.pn.startswith('rubygem-'): recipe.classic_category = 'ruby' recipe.save() updated = True elif recipe.pn.startswith('jdk-'): sanepn = sanepn[4:] replquery = recipe_pn_query(sanepn) for replrecipe in replquery: logger.debug( 'Found match %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break recipe.classic_category = 'java' recipe.save() updated = True elif recipe.pn.startswith('golang-'): if recipe.pn.startswith('golang-github-'): sanepn = 'go-' + sanepn[14:] else: sanepn = 'go-' + sanepn[7:] replquery = recipe_pn_query(sanepn) for replrecipe in replquery: logger.debug( 'Found match %s to cover %s in layer %s' % (replrecipe.pn, recipe.pn, replrecipe.layerbranch.layer.name)) recipe.cover_layerbranch = replrecipe.layerbranch recipe.cover_pn = replrecipe.pn recipe.cover_status = 'D' recipe.cover_verified = False recipe.save() updated = True found = True break recipe.classic_category = 'go' recipe.save() updated = True elif recipe.pn.startswith('gnome-'): recipe.classic_category = 'gnome' recipe.save() updated = True elif recipe.pn.startswith('perl-'): recipe.classic_category = 'perl' recipe.save() updated = True if updated and updateobj: rupdate, _ = ComparisonRecipeUpdate.objects.get_or_create( update=updateobj, recipe=recipe) rupdate.link_updated = True rupdate.save() if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)