def generate(self, manifest, tags): # Generate conda package files and build driver script shutil.rmtree(self.config.output_dir, ignore_errors=True) os.makedirs(self.config.output_dir) print "generating recipes: " for (product, sha, version, deps) in manifest.itervalues(): if product in self.config.internal_products: continue if product in self.config.skip_products: continue # override gitrevs (these are temporary hacks/fixes; they should go away when those branches are merged) sha = self.config.override_gitrev.get(product, sha) # Where is the source? giturl = self.config.get_giturl(product) self.gen_conda_package(product, sha, version, giturl, deps, tags) print "done." # # write out the rebuild script for packages that need rebuilding # rebuilds = [] print "generating rebuild script:" for pi in self.products.itervalues(): conda_version = "%s-%s" % (pi.version, pi.build_string) rebuilds.append( "rebuild %s %s %s %s" % (pi.conda_name, conda_version, pi.product, pi.eups_version)) if not pi.is_built: print " will build: %s-%s" % (pi.conda_name, conda_version) else: with open( os.path.join(self.config.output_dir, pi.conda_name, '.done'), 'w'): # create the .done marker file pass print " already built: %s-%s" % (pi.conda_name, conda_version) if pi.conda_name in self.config.skip_build: # create the .skip.$PLATFORM marker files for platform in self.config.skip_build[pi.conda_name]: with open( os.path.join(self.config.output_dir, pi.conda_name, '.skip.' + platform), 'w'): pass print " (builds will always be skipped on %s)" % ', '.join( self.config.skip_build[pi.conda_name]) print "done." fill_out_template(os.path.join(self.config.output_dir, 'rebuild.sh'), os.path.join(self.config.template_dir, 'rebuild.sh.template'), output_dir=self.config.output_dir, rebuilds='\n'.join(rebuilds))
def generate(self, manifest, tags): # Generate conda package files and build driver script shutil.rmtree(self.config.output_dir, ignore_errors=True) os.makedirs(self.config.output_dir) print "generating recipes: " for (product, sha, version, deps) in manifest.itervalues(): if product in self.config.internal_products: continue if product in self.config.skip_products: continue # override gitrevs (these are temporary hacks/fixes; they should go away when those branches are merged) sha = self.config.override_gitrev.get(product, sha) # Where is the source? giturl = self.config.get_giturl(product) self.gen_conda_package(product, sha, version, giturl, deps, tags) print "done." # # write out the rebuild script for packages that need rebuilding # rebuilds = [] print "generating rebuild script:" for pi in self.products.itervalues(): conda_version = "%s-%s" % (pi.version, pi.build_string) rebuilds.append("rebuild %s %s %s %s" % (pi.conda_name, conda_version, pi.product, pi.eups_version)) if not pi.is_built: print " will build: %s-%s" % (pi.conda_name, conda_version) else: with open(os.path.join(self.config.output_dir, pi.conda_name, '.done'), 'w'): # create the .done marker file pass print " already built: %s-%s" % (pi.conda_name, conda_version) if pi.conda_name in self.config.skip_build: # create the .skip.$PLATFORM marker files for platform in self.config.skip_build[pi.conda_name]: with open(os.path.join(self.config.output_dir, pi.conda_name, '.skip.'+platform), 'w'): pass print " (builds will always be skipped on %s)" % ', '.join(self.config.skip_build[pi.conda_name]) print "done." fill_out_template(os.path.join(self.config.output_dir, 'rebuild.sh'), os.path.join(self.config.template_dir, 'rebuild.sh.template'), output_dir = self.config.output_dir, rebuilds = '\n'.join(rebuilds) )
def gen_conda_package(self, product, sha, eups_version, giturl, eups_deps): # What do we call this product in conda? conda_name = self.config.conda_name_for(product) # convert to conda version version, build_string_prefix, buildnum, compliant = eups_to_conda_version(product, eups_version, giturl) # warn if the version is not compliant problem = "" if compliant else " [WARNING: version format incompatible with conda]" # write out a progress message self.report_progress(conda_name, "%s%s" % (version, problem)) # # process dependencies # eups_deps = set(eups_deps) eups_deps -= self.config.skip_products # skip unwanted dependencies # Now start tracking runtime vs build depencendies separately # FIXME: We should do this from the start, but EUPS still isn't tracking the two separately bdeps, rdeps = [], [] for prod in eups_deps: # Add the dependency dep_conda_name = self.config.conda_name_for(prod) bdeps.append(dep_conda_name) rdeps.append(dep_conda_name) # If the dependency is an internal product, add the internal product # to the build dependencies as well. This works around the problem where # e.g., lsst-afw depends on lsst-numpy-eups-config which depends on # numpy ==1.9 in the build section, but numpy >=1.9 in the run. If lsst-afw # didn't depend on numpy ==1.9 directly in its build section, then numpy # would've been pulled in via lsst-numpy-eups-config's *run* section (and # thus may be built against a newer numpy). if prod in self.config.internal_products: bdeps.append(self.config.internal_products[prod]['build']) # If this is an internal product, also add the conda package as a dependency try: internals = self.config.internal_products[product] except KeyError: pass else: bdeps.append(internals['build']) rdeps.append(internals['run']) bplus, rplus = self.add_missing_deps(conda_name) # manually add any missing dependencies bdeps += bplus; rdeps += rplus; bdeps, rdeps = sorted(bdeps), sorted(rdeps) # sort, so the ordering is predicatble in meta.yaml # # Create the Conda packaging spec files # dir = os.path.join(self.config.output_dir, conda_name) os.makedirs(dir) # Copy any patches into the recipe dir patches = self.prepare_patches(product, dir) # build.sh (TBD: use exact eups versions, instead of -r .) setups = [] SEP = 'setup ' setups = SEP + ('\n'+SEP).join(setups) if setups else '' template_dir = self.config.template_dir build_template = 'build.sh.template' if product not in self.config.internal_products else 'build-internal.sh.template' fill_out_template(os.path.join(dir, 'build.sh'), os.path.join(template_dir, build_template), setups = setups, eups_version = eups_version, eups_tags = ' '.join(self.config.global_eups_tags) ) # pre-link.sh (to add the global tags) fill_out_template(os.path.join(dir, 'pre-link.sh'), os.path.join(template_dir, 'pre-link.sh.template'), product = product, ) # meta.yaml rdeps = [ self.conda_version_spec(p) if p in self.products else p for p in rdeps ] bdeps = [ self.conda_version_spec(p) if p in self.products else p for p in bdeps ] reqstr_r = create_yaml_list(rdeps) reqstr_b = create_yaml_list(bdeps) meta_yaml = os.path.join(dir, 'meta.yaml') fill_out_template(meta_yaml, os.path.join(template_dir, 'meta.yaml.template'), productNameLowercase = conda_name.lower(), version = version, gitrev = sha, giturl = giturl, build_req = reqstr_b, run_req = reqstr_r, patches = patches, ) # The recipe is now (almost) complete. # Find our build number. If this package already exists in the release DB, # re-use the build number and mark it as '.done' so it doesn't get rebuilt. # Otherwise, increment the max build number by one and use that. buildnum, build_string, is_built = self.get_build_info(conda_name.lower(), version, dir, build_string_prefix) # Fill in the build number and string fill_out_template(meta_yaml, meta_yaml, buildnum = buildnum, build_string = build_string ) # record we've seen this product self.products[conda_name] = ProductInfo(conda_name, version, build_string, buildnum, product, eups_version, is_built, True)
def gen_conda_package(self, product, sha, eups_version, giturl, eups_deps, eups_tags): # What do we call this product in conda? conda_name = self.config.conda_name_for(product) # convert to conda version version, build_string_prefix, buildnum, compliant = eups_to_conda_version(product, eups_version, giturl) # warn if the version is not compliant problem = "" if compliant else " [WARNING: version format incompatible with conda]" # write out a progress message self.report_progress(conda_name, "%s%s" % (version, problem)) # # process dependencies # eups_deps = set(eups_deps) eups_deps -= self.config.skip_products # skip unwanted dependencies # Now start tracking runtime vs build depencendies separately # FIXME: We should do this from the start, but EUPS still isn't tracking the two separately bdeps, rdeps = [], [] depends_on_internal = False for prod in eups_deps: # transform to Anaconda product names dep_conda_name = self.config.conda_name_for(prod) internals = self.config.internal_products.get(prod, None) depends_on_internal |= prod in self.config.internal_products if internals is not None: bdeps.append(internals.get('build', dep_conda_name)) rdeps.append(internals.get('run', dep_conda_name)) else: bdeps.append(dep_conda_name) rdeps.append(dep_conda_name) # If we depend on one of the internal packages, make sure we depend on # the lsst-product-configs package as well, as that's where the .table # and .cfg files are. if depends_on_internal: # add lsst-product-configs to the build queue then fish out the # version string self.copy_additional_recipe('lsst-product-configs') lpc_version = self.products['lsst-product-configs'].version # inject a dep on the specific version product_configs = 'lsst-product-configs ==%s' % lpc_version bdeps.append(product_configs) rdeps.append(product_configs) bplus, rplus = self.add_missing_deps(conda_name) # manually add any missing dependencies bdeps += bplus; rdeps += rplus; bdeps, rdeps = sorted(bdeps), sorted(rdeps) # sort, so the ordering is predicatble in meta.yaml # # Create the Conda packaging spec files # dir = os.path.join(self.config.output_dir, conda_name) os.makedirs(dir) # Copy any patches into the recipe dir patches = self.prepare_patches(product, dir) # build.sh (TBD: use exact eups versions, instead of -r .) setups = [] SEP = 'setup ' setups = SEP + ('\n'+SEP).join(setups) if setups else '' template_dir = self.config.template_dir fill_out_template(os.path.join(dir, 'build.sh'), os.path.join(template_dir, 'build.sh.template'), setups = setups, eups_version = eups_version, eups_tags = ' '.join(eups_tags + self.config.global_eups_tags) ) # pre-link.sh (to add the global tags) fill_out_template(os.path.join(dir, 'pre-link.sh'), os.path.join(template_dir, 'pre-link.sh.template'), product = product, ) # meta.yaml rdeps = [ self.conda_version_spec(p) if p in self.products else p for p in rdeps ] bdeps = [ self.conda_version_spec(p) if p in self.products else p for p in bdeps ] reqstr_r = create_yaml_list(rdeps) reqstr_b = create_yaml_list(bdeps) meta_yaml = os.path.join(dir, 'meta.yaml') fill_out_template(meta_yaml, os.path.join(template_dir, 'meta.yaml.template'), productNameLowercase = conda_name.lower(), version = version, gitrev = sha, giturl = giturl, build_req = reqstr_b, run_req = reqstr_r, patches = patches, ) # The recipe is now (almost) complete. # Find our build number. If this package already exists in the release DB, # re-use the build number and mark it as '.done' so it doesn't get rebuilt. # Otherwise, increment the max build number by one and use that. buildnum, build_string, is_built = self.get_build_info(conda_name.lower(), version, dir, build_string_prefix) # Fill in the build number and string fill_out_template(meta_yaml, meta_yaml, buildnum = buildnum, build_string = build_string ) # record we've seen this product self.products[conda_name] = ProductInfo(conda_name, version, build_string, buildnum, product, eups_version, is_built, True)
def gen_conda_package(self, product, sha, eups_version, giturl, eups_deps, eups_tags): # What do we call this product in conda? conda_name = self.config.conda_name_for(product) # convert to conda version version, build_string_prefix, buildnum, compliant = eups_to_conda_version( product, eups_version, giturl) # warn if the version is not compliant problem = "" if compliant else " [WARNING: version format incompatible with conda]" # write out a progress message self.report_progress(conda_name, "%s%s" % (version, problem)) # # process dependencies # eups_deps = set(eups_deps) eups_deps -= self.config.skip_products # skip unwanted dependencies # Now start tracking runtime vs build depencendies separately # FIXME: We should do this from the start, but EUPS still isn't tracking the two separately bdeps, rdeps = [], [] depends_on_internal = False for prod in eups_deps: # transform to Anaconda product names dep_conda_name = self.config.conda_name_for(prod) internals = self.config.internal_products.get(prod, None) depends_on_internal |= prod in self.config.internal_products if internals is not None: bdeps.append(internals.get('build', dep_conda_name)) rdeps.append(internals.get('run', dep_conda_name)) else: bdeps.append(dep_conda_name) rdeps.append(dep_conda_name) # If we depend on one of the internal packages, make sure we depend on # the lsst-product-configs package as well, as that's where the .table # and .cfg files are. if depends_on_internal: # add lsst-product-configs to the build queue then fish out the # version string self.copy_additional_recipe('lsst-product-configs') lpc_version = self.products['lsst-product-configs'].version # inject a dep on the specific version product_configs = 'lsst-product-configs ==%s' % lpc_version bdeps.append(product_configs) rdeps.append(product_configs) bplus, rplus = self.add_missing_deps( conda_name) # manually add any missing dependencies bdeps += bplus rdeps += rplus bdeps, rdeps = sorted(bdeps), sorted( rdeps) # sort, so the ordering is predicatble in meta.yaml # # Create the Conda packaging spec files # dir = os.path.join(self.config.output_dir, conda_name) os.makedirs(dir) # Copy any patches into the recipe dir patches = self.prepare_patches(product, dir) # build.sh (TBD: use exact eups versions, instead of -r .) setups = [] SEP = 'setup ' setups = SEP + ('\n' + SEP).join(setups) if setups else '' template_dir = self.config.template_dir fill_out_template(os.path.join(dir, 'build.sh'), os.path.join(template_dir, 'build.sh.template'), setups=setups, eups_version=eups_version, eups_tags=' '.join(eups_tags + self.config.global_eups_tags)) # pre-link.sh (to add the global tags) fill_out_template( os.path.join(dir, 'pre-link.sh'), os.path.join(template_dir, 'pre-link.sh.template'), product=product, ) # meta.yaml rdeps = [ self.conda_version_spec(p) if p in self.products else p for p in rdeps ] bdeps = [ self.conda_version_spec(p) if p in self.products else p for p in bdeps ] reqstr_r = create_yaml_list(rdeps) reqstr_b = create_yaml_list(bdeps) meta_yaml = os.path.join(dir, 'meta.yaml') fill_out_template( meta_yaml, os.path.join(template_dir, 'meta.yaml.template'), productNameLowercase=conda_name.lower(), version=version, gitrev=sha, giturl=giturl, build_req=reqstr_b, run_req=reqstr_r, patches=patches, ) # The recipe is now (almost) complete. # Find our build number. If this package already exists in the release DB, # re-use the build number and mark it as '.done' so it doesn't get rebuilt. # Otherwise, increment the max build number by one and use that. buildnum, build_string, is_built = self.get_build_info( conda_name.lower(), version, dir, build_string_prefix) # Fill in the build number and string fill_out_template(meta_yaml, meta_yaml, buildnum=buildnum, build_string=build_string) # record we've seen this product self.products[conda_name] = ProductInfo(conda_name, version, build_string, buildnum, product, eups_version, is_built, True)