def build(m, get_src=True): rm_rf(prefix) create_env(prefix, [ms.spec for ms in m.ms_depends('build')]) print("BUILD START:", m.dist()) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) if os.listdir(source.get_dir()): print("source tree in:", source.get_dir()) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if sys.platform == 'win32': import conda.builder.windows as windows windows.build(m.path) else: env = environ.get_dict() env['RECIPE_DIR'] = m.path cmd = ['/bin/bash', '-x', '-e', join(m.path, 'build.sh')] _check_call(cmd, env=env, cwd=source.get_dir()) create_entry_points(m.get_value('build/entry_points')) post_process() assert not exists(info_dir) files2 = prefix_files() sorted_files = sorted(files2 - files1) post_build(sorted_files) create_info_files(m, sorted_files) files3 = prefix_files() fix_permissions() path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(bldpkgs_dir) # remove from packages, because we're going to test it rm_pkgs_cache(m.dist()) test(m)
def build(m, get_src=True, pypi=False): rm_rf(prefix) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], pypi) print("BUILD START:", m.dist()) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) if os.listdir(source.get_dir()): print("source tree in:", source.get_dir()) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if sys.platform == 'win32': import conda.builder.windows as windows windows.build(m) else: build_sh = join(m.path, 'build.sh') if exists(build_sh): env = environ.get_dict(m) cmd = ['/bin/bash', '-x', '-e', build_sh] _check_call(cmd, env=env, cwd=source.get_dir()) else: print("no build.sh file") create_entry_points(m.get_value('build/entry_points')) post_process(preserve_egg_dir=bool( m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(bldpkgs_dir)
def build(m, get_src=True, pypi=False): rm_rf(prefix) create_env(prefix, [ms.spec for ms in m.ms_depends('build')], pypi) print("BUILD START:", m.dist()) if get_src: source.provide(m.path, m.get_section('source')) assert isdir(source.WORK_DIR) if os.listdir(source.get_dir()): print("source tree in:", source.get_dir()) else: print("no source") rm_rf(info_dir) files1 = prefix_files() if sys.platform == 'win32': import conda.builder.windows as windows windows.build(m) else: build_sh = join(m.path, 'build.sh') if exists(build_sh): env = environ.get_dict(m) cmd = ['/bin/bash', '-x', '-e', build_sh] _check_call(cmd, env=env, cwd=source.get_dir()) else: print("no build.sh file") create_entry_points(m.get_value('build/entry_points')) post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir'))) assert not exists(info_dir) files2 = prefix_files() post_build(sorted(files2 - files1)) create_info_files(m, sorted(files2 - files1)) files3 = prefix_files() fix_permissions(files3 - files1) path = bldpkg_path(m) t = tarfile.open(path, 'w:bz2') for f in sorted(files3 - files1): t.add(join(prefix, f), f) t.close() print("BUILD END:", m.dist()) # we're done building, perform some checks tarcheck.check_all(path) update_index(bldpkgs_dir)
def build(m): env = dict(os.environ) env.update(environ.get_dict(m)) for name in 'BIN', 'INC', 'LIB': path = env['LIBRARY_' + name] if not isdir(path): os.makedirs(path) src_dir = source.get_dir() bld_bat = join(m.path, 'bld.bat') with open(bld_bat) as fi: data = fi.read() with open(join(src_dir, 'bld.bat'), 'w') as fo: fo.write(msvc_env_cmd()) # more debuggable with echo on fo.write('@echo on\n') for kv in iteritems(env): fo.write('set %s=%s\n' % kv) fo.write("REM ===== end generated header =====\n") fo.write(data) cmd = [os.environ['COMSPEC'], '/c', 'bld.bat'] _check_call(cmd, cwd=src_dir) kill_processes() fix_staged_scripts()
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile import conda.builder.build as build from conda.builder.config import croot import conda.builder.source as source from conda.builder.metadata import MetaData from conda.lock import Locked with Locked(croot): for arg in args.recipe: if isfile(arg): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) if args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: build.build(m) if need_cleanup: shutil.rmtree(recipe_dir) print("""\ # If you want to upload this package to binstar.org, type: # # $ binstar upload %s """ % build.bldpkg_path(m) )
def get_dict(m=None): d = {'CONDA_BUILD': '1'} d['ARCH'] = str(cc.bits) d['PREFIX'] = build_prefix d['PYTHON'] = build_python d['PY3K'] = str(PY3K) d['STDLIB_DIR'] = stdlib_dir d['SP_DIR'] = sp_dir d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PY_VER'] = py_ver d['SRC_DIR'] = source.get_dir() if sys.platform == 'win32': # -------- Windows d['PATH'] = (join(build_prefix, 'Library', 'bin') + ';' + join(build_prefix) + ';' + join(build_prefix, 'Scripts') + ';%PATH%') d['SCRIPTS'] = join(build_prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(build_prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') else: # -------- Unix d['PATH'] = '%s/bin:%s' % (build_prefix, os.getenv('PATH')) d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['LANG'] = 'en_US.UTF-8' d['PKG_CONFIG_PATH'] = join(build_prefix, 'lib', 'pkgconfig') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.5' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = build_prefix + '/lib' if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['RECIPE_DIR'] = m.path return d
def execute(args, parser): import sys from os.path import abspath, isdir import conda.builder.build as build import conda.builder.source as source from conda.builder.metadata import MetaData for arg in args.recipe: recipe_dir = abspath(arg) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) if args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print 'Source tree in:', source.get_dir() else: build.build(m)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile, join from conda.lock import Locked import conda.builder.build as build import conda.builder.source as source from conda.builder.config import croot from conda.builder.metadata import MetaData with Locked(croot): for arg in args.recipe: if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): # See if it's a spec and the directory is in conda-recipes recipe_dir = join(config.root_dir, 'conda-recipes', arg) if not isdir(recipe_dir): # if --use-pypi and recipe_dir is a spec # try to create the skeleton if args.pypi: from conda.from_pypi import create_recipe try: recipe_dir = create_recipe(arg) except: recipe_dir = abspath(arg) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, pypi=args.pypi) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: build.build(m, pypi=args.pypi) if not args.notest: build.test(m, pypi=args.pypi) if need_cleanup: shutil.rmtree(recipe_dir) handle_binstar_upload(build.bldpkg_path(m), args)
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile import conda.builder.build as build from conda.builder.external import find_executable from conda.builder.config import croot import conda.builder.source as source from conda.builder.metadata import MetaData from conda.lock import Locked with Locked(croot): for arg in args.recipe: if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: build.build(m) if need_cleanup: shutil.rmtree(recipe_dir) if args.binstar_upload is None: args.yes = False args.dry_run = False upload = common.confirm_yn( args, message="Do you want to upload this " "package to binstar", default='yes', exit_no=False) else: upload = args.binstar_upload if not upload: print("""\ # If you want to upload this package to binstar.org later, type: # # $ binstar upload %s """ % build.bldpkg_path(m)) continue binstar = find_executable('binstar') if binstar is None: sys.exit(''' Error: cannot locate binstar (required for upload) # Try: # $ conda install binstar ''') subprocess.call([binstar, 'upload', build.bldpkg_path(m)])
def get_dict(m=None): d = {'CONDA_BUILD': '1'} d['ARCH'] = str(cc.bits) d['PREFIX'] = build_prefix d['PYTHON'] = build_python d['R'] = build_r d['PY3K'] = str(PY3K) d['STDLIB_DIR'] = stdlib_dir d['SP_DIR'] = sp_dir d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PY_VER'] = py_ver d['SRC_DIR'] = source.get_dir() d['TCL_CONFIG'] = join(build_prefix, 'lib/tclConfig.sh') d['TCL_LIBRARY'] = join(build_prefix, 'lib/tcl8.5') d['TK_CONFIG'] = join(build_prefix, 'lib/tkConfig.sh') d['TK_LIBRARY'] = join(build_prefix, 'lib/tk8.5') if sys.platform == 'win32': # -------- Windows d['PATH'] = (join(build_prefix, 'Library', 'bin') + ';' + join(build_prefix) + ';' + join(build_prefix, 'Scripts') + ';%PATH%') d['SCRIPTS'] = join(build_prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(build_prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') else: # -------- Unix d['PATH'] = '%s/bin:%s' % (build_prefix, os.getenv('PATH')) d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['LANG'] = 'en_US.UTF-8' d['PKG_CONFIG_PATH'] = join(build_prefix, 'lib', 'pkgconfig') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.5' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = build_prefix + '/lib' # trent: I'm not sure why we weren't setting these. Is it the # responsibility of the build.sh-authorer? It's easy to forget, # and it can break things in some *really* bad ways if we pick # up the wrong headers/libs during build versus runtime. cflags = "-I%s/include" % build_prefix ldflags = "-L%s/lib -lgfortran" % build_prefix # Of course the -lgfortran one is highly questionable -- definitely # needed for anything linking against gfortran (like everything in # the R ecosystem). But if you don't have system as a build dep... # it'll probably bomb out. d.update( dict(( ('CFLAGS', cflags), ('FFLAGS', cflags), ('FCFLAGS', cflags), ('CPPFLAGS', cflags), ('CXXFLAGS', cflags), ('OBJCFLAGS', cflags), ('PKG_CPPFLAGS', cflags), ('LDFLAGS', ldflags), ('PKG_LDFLAGS', ldflags), ('LAPACK_LDFLAGS', ldflags), ))) if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['RECIPE_DIR'] = m.path return d
def execute(args, parser): import sys import shutil import tarfile import tempfile from os.path import abspath, isdir, isfile, join from conda.lock import Locked import conda.builder.build as build import conda.builder.source as source from conda.builder.config import croot from conda.builder.metadata import MetaData check_external() with Locked(croot): for arg in args.recipe: if isfile(arg): if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')): recipe_dir = tempfile.mkdtemp() t = tarfile.open(arg, 'r:*') t.extractall(path=recipe_dir) t.close() need_cleanup = True else: print("Ignoring non-recipe: %s" % arg) continue else: recipe_dir = abspath(arg) need_cleanup = False if not isdir(recipe_dir): # See if it's a spec and the directory is in conda-recipes recipe_dir = join(config.root_dir, 'conda-recipes', arg) if not isdir(recipe_dir): # if --use-pypi and recipe_dir is a spec # try to create the skeleton if args.pypi: from conda.from_pypi import create_recipe try: recipe_dir = create_recipe(arg) except: recipe_dir = abspath(arg) if not isdir(recipe_dir): sys.exit("Error: no such directory: %s" % recipe_dir) m = MetaData(recipe_dir) binstar_upload = False if args.check and len(args.recipe) > 1: print(m.path) m.check_fields() if args.check: continue if args.output: print(build.bldpkg_path(m)) continue elif args.test: build.test(m, pypi=args.pypi) elif args.source: source.provide(m.path, m.get_section('source')) print('Source tree in:', source.get_dir()) else: build.build(m, pypi=args.pypi) if not args.notest: build.test(m, pypi=args.pypi) binstar_upload = True if need_cleanup: shutil.rmtree(recipe_dir) if binstar_upload: handle_binstar_upload(build.bldpkg_path(m), args)
def get_dict(m=None): d = {'CONDA_BUILD': '1'} d['ARCH'] = str(cc.bits) d['PREFIX'] = build_prefix d['PYTHON'] = build_python d['R'] = build_r d['PY3K'] = str(PY3K) d['STDLIB_DIR'] = stdlib_dir d['SP_DIR'] = sp_dir d['SYS_PREFIX'] = sys.prefix d['SYS_PYTHON'] = sys.executable d['PY_VER'] = py_ver d['SRC_DIR'] = source.get_dir() d['TCL_CONFIG'] = join(build_prefix, 'lib/tclConfig.sh') d['TCL_LIBRARY'] = join(build_prefix, 'lib/tcl8.5') d['TK_CONFIG'] = join(build_prefix, 'lib/tkConfig.sh') d['TK_LIBRARY'] = join(build_prefix, 'lib/tk8.5') if sys.platform == 'win32': # -------- Windows d['PATH'] = (join(build_prefix, 'Library', 'bin') + ';' + join(build_prefix) + ';' + join(build_prefix, 'Scripts') + ';%PATH%') d['SCRIPTS'] = join(build_prefix, 'Scripts') d['LIBRARY_PREFIX'] = join(build_prefix, 'Library') d['LIBRARY_BIN'] = join(d['LIBRARY_PREFIX'], 'bin') d['LIBRARY_INC'] = join(d['LIBRARY_PREFIX'], 'include') d['LIBRARY_LIB'] = join(d['LIBRARY_PREFIX'], 'lib') else: # -------- Unix d['PATH'] = '%s/bin:%s' % (build_prefix, os.getenv('PATH')) d['HOME'] = os.getenv('HOME', 'UNKNOWN') d['LANG'] = 'en_US.UTF-8' d['PKG_CONFIG_PATH'] = join(build_prefix, 'lib', 'pkgconfig') if sys.platform == 'darwin': # -------- OSX d['OSX_ARCH'] = 'i386' if cc.bits == 32 else 'x86_64' d['CFLAGS'] = '-arch %(OSX_ARCH)s' % d d['CXXFLAGS'] = d['CFLAGS'] d['LDFLAGS'] = d['CFLAGS'] d['MACOSX_DEPLOYMENT_TARGET'] = '10.5' elif sys.platform.startswith('linux'): # -------- Linux d['LD_RUN_PATH'] = build_prefix + '/lib' # trent: I'm not sure why we weren't setting these. Is it the # responsibility of the build.sh-authorer? It's easy to forget, # and it can break things in some *really* bad ways if we pick # up the wrong headers/libs during build versus runtime. cflags = "-I%s/include" % build_prefix ldflags = "-L%s/lib -lgfortran" % build_prefix # Of course the -lgfortran one is highly questionable -- definitely # needed for anything linking against gfortran (like everything in # the R ecosystem). But if you don't have system as a build dep... # it'll probably bomb out. d.update(dict(( ('CFLAGS', cflags), ('FFLAGS', cflags), ('FCFLAGS', cflags), ('CPPFLAGS', cflags), ('CXXFLAGS', cflags), ('OBJCFLAGS', cflags), ('PKG_CPPFLAGS', cflags), ('LDFLAGS', ldflags), ('PKG_LDFLAGS', ldflags), ('LAPACK_LDFLAGS', ldflags), ))) if m: d['PKG_NAME'] = m.name() d['PKG_VERSION'] = m.version() d['RECIPE_DIR'] = m.path return d