Beispiel #1
0
def create_qt_moc_task(self, node):

    if self.env['PLATFORM'] == 'project_generator':
        return

    if not 'qt' in getattr(self, 'features', []):
        return

    # Check for moc folder
    moc_file_folder = self.bld.get_bintemp_folder_node()
    moc_file_folder = moc_file_folder.make_node('moc_files').make_node(
        self.target)
    Utils.check_dir(moc_file_folder.abspath())

    # Check for PCH file
    pch = ''
    if hasattr(self, 'pch_name'):
        pch = self.pch_name.replace('.cpp', '.h')
    elif hasattr(self, 'pch'):
        pch = self.pch.replace('.cpp', '.h')

    # Create moc task and store in list to create a dependency on the link_task later
    moc_cxx_file = moc_file_folder.make_node(node.change_ext('_moc.cpp').name)
    moc_task = self.create_compiled_task('qt_moc', moc_cxx_file)
    moc_task.h_node = node
    moc_task.inputs.append(node)
    moc_task.env['MOC_PCH'] = pch
Beispiel #2
0
def resolve_dependency(ctx, name, optional=False):
    # The --%s-path option is parsed directly here, since we want to allow
    # option arguments without the = sign, e.g. --xy-path my-path-to-xy
    # We cannot use ctx.options where --xy-path would be handled as a
    # standalone boolean option (which has no arguments)
    p = argparse.ArgumentParser()
    p.add_argument("--%s-path" % name, dest="dependency_path", type=str)
    args, unknown = p.parse_known_args(args=sys.argv[1:])
    dependency_path = args.dependency_path

    if dependency_path:

        dependency_path = expand_path(dependency_path)

        ctx.start_msg("User resolve dependency %s" % name)
        ctx.end_msg(dependency_path)

    else:
        # Download the dependency to bundle_path

        # Get the path where the bundled dependencies should be placed
        bundle_path = expand_path(ctx.options.bundle_path)
        Utils.check_dir(bundle_path)

        ctx.start_msg("Resolve dependency %s" % name)

        # The --%s-use-checkout option is parsed directly, since we want to
        # allow option arguments without the = sign
        p = argparse.ArgumentParser()
        p.add_argument("--%s-use-checkout" % name, dest="dependency_checkout", type=str)
        args, unknown = p.parse_known_args(args=sys.argv[1:])
        dependency_checkout = args.dependency_checkout

        # Try to resolve this dependency
        try:
            dependency_path = dependencies[name].resolve(ctx=ctx, path=bundle_path, use_checkout=dependency_checkout)
        except Exception as e:
            ctx.to_log("Exception when resolving dependency: {}".format(name))
            ctx.to_log(e)
            if optional:
                # An optional dependency might be unavailable if the user
                # does not have a license to access the repository, so we just
                # print the status message and continue
                ctx.end_msg("Unavailable", color="RED")
            else:
                # A non-optional dependency must be resolved
                repo_url = dependencies[name].repository_url(ctx, "https://")
                ctx.fatal(
                    'Error: the "{}" dependency is not available. '
                    "Please check that you have a valid Steinwurf "
                    "license and you can access the repository at: "
                    "{}".format(name, repo_url)
                )
        else:
            ctx.end_msg(dependency_path)

    if dependency_path:
        ctx.env["DEPENDENCY_DICT"][name] = dependency_path
        dependency_list.append(dependency_path)
    return dependency_path
Beispiel #3
0
def configure(conf):
    conf.load('wurf_dependency_resolve')
    bundle_path = expand_path(conf.options.bundle_path)
    bundle_list = expand_bundle(conf, conf.options.bundle)
    explicit_list = explicit_dependencies(conf.options)
    overlap = set(bundle_list).intersection(set(explicit_list))
    if len(overlap) > 0:
        conf.fatal("Overlapping dependencies %r" % overlap)
    conf.env['BUNDLE_DEPENDENCIES'] = dict()
    for name in bundle_list:
        Utils.check_dir(bundle_path)
        conf.start_msg('Resolve dependency %s' % name)
        key = DEPENDENCY_CHECKOUT_KEY % name
        dependency_checkout = getattr(conf.options, key, None)
        dependency_path = dependencies[name].resolve(
            ctx=conf, path=bundle_path, use_checkout=dependency_checkout)
        conf.end_msg(dependency_path)
        conf.env['BUNDLE_DEPENDENCIES'][name] = dependency_path
    for name in explicit_list:
        key = DEPENDENCY_PATH_KEY % name
        dependency_path = getattr(conf.options, key)
        dependency_path = expand_path(dependency_path)
        conf.start_msg('User resolve dependency %s' % name)
        conf.env['BUNDLE_DEPENDENCIES'][name] = dependency_path
        conf.end_msg(dependency_path)
Beispiel #4
0
    def do_link(self, src, tgt):
        """
		Create a symlink from tgt to src.

		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_link` to remove the symlink.

		:param src: file name as absolute path
		:type src: string
		:param tgt: file destination, as absolute path
		:type tgt: string
		"""
        d, _ = os.path.split(tgt)
        Utils.check_dir(d)

        link = False
        if not os.path.islink(tgt):
            link = True
        elif os.readlink(tgt) != src:
            link = True

        if link:
            try:
                os.remove(tgt)
            except OSError:
                pass
            if not self.progress_bar:
                Logs.info('+ symlink %s (to %s)' % (tgt, src))
            os.symlink(src, tgt)
        else:
            if not self.progress_bar:
                Logs.info('- symlink %s (to %s)' % (tgt, src))
Beispiel #5
0
def configure_paths(ctx):
    """Setup blender paths"""
    # Get the username
    user = getuser()
    _platform = Utils.unversioned_sys_platform()
    config_path = {"user": "", "system": ""}
    if _platform.startswith("linux"):
        config_path["user"] = "******" % user
        config_path["system"] = "/usr/share/blender/"
    elif _platform == "darwin":
        # MAC OS X
        config_path["user"] = "******" % user
        config_path["system"] = "/Library/Application Support/Blender/"
    elif Utils.is_win32:
        # Windows
        appdata_path = ctx.getenv("APPDATA").replace("\\", "/")
        homedrive = ctx.getenv("HOMEDRIVE").replace("\\", "/")

        config_path["user"] = "******" % appdata_path
        config_path["system"] = "%sAll Users/AppData/Roaming/Blender Foundation/Blender/" % homedrive
    else:
        ctx.fatal("Unsupported platform. " "Available platforms: Linux, OSX, MS-Windows.")

    blender_version = ctx.env["BLENDER_VERSION"]

    config_path["user"] += blender_version + "/"
    config_path["system"] += blender_version + "/"

    ctx.env["BLENDER_CONFIG_DIR"] = os.path.abspath(config_path["user"])
    if ctx.options.directory_system:
        ctx.env["BLENDER_CONFIG_DIR"] = config_path["system"]

    ctx.env["BLENDER_ADDONS_DIR"] = os.path.join(ctx.env["BLENDER_CONFIG_DIR"], "scripts/addons")
    Utils.check_dir(ctx.env["BLENDER_ADDONS_DIR"])
Beispiel #6
0
    def do_install(self, src, tgt, **kw):
        """
		Copy a file from src to tgt with given file permissions. The actual copy is not performed
		if the source and target file have the same size and the same timestamps. When the copy occurs,
		the file is first removed and then copied (prevent stale inodes).

		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_install` to remove the file.

		:param src: file name as absolute path
		:type src: string
		:param tgt: file destination, as absolute path
		:type tgt: string
		:param chmod: installation mode
		:type chmod: int
		"""
        d, _ = os.path.split(tgt)
        if not d:
            raise Errors.WafError('Invalid installation given %r->%r' %
                                  (src, tgt))
        Utils.check_dir(d)

        srclbl = src.replace(self.srcnode.abspath() + os.sep, '')
        if not Options.options.force:
            # check if the file is already there to avoid a copy
            try:
                st1 = os.stat(tgt)
                st2 = os.stat(src)
            except OSError:
                pass
            else:
                # same size and identical timestamps -> make no copy
                if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size:
                    if not self.progress_bar:
                        Logs.info('- install %s (from %s)' % (tgt, srclbl))
                    return False

        if not self.progress_bar:
            Logs.info('+ install %s (from %s)' % (tgt, srclbl))

        # Give best attempt at making destination overwritable,
        # like the 'install' utility used by 'make install' does.
        try:
            os.chmod(tgt, Utils.O644 | stat.S_IMODE(os.stat(tgt).st_mode))
        except (OSError, IOError):
            pass

        # following is for shared libs and stale inodes (-_-)
        try:
            os.remove(tgt)
        except OSError:
            pass

        try:
            self.copy_fun(src, tgt, **kw)
        except IOError:
            try:
                os.stat(src)
            except (OSError, IOError):
                Logs.error('File %r does not exist' % src)
            raise Errors.WafError('Could not install the file %r' % tgt)
Beispiel #7
0
	def do_link(self, src, tgt):
		"""
		Create a symlink from tgt to src.

		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_link` to remove the symlink.

		:param src: file name as absolute path
		:type src: string
		:param tgt: file destination, as absolute path
		:type tgt: string
		"""
		d, _ = os.path.split(tgt)
		Utils.check_dir(d)

		link = False
		if not os.path.islink(tgt):
			link = True
		elif os.readlink(tgt) != src:
			link = True

		if link:
			try: os.remove(tgt)
			except OSError: pass
			if not self.progress_bar:
				Logs.info('+ symlink %s (to %s)' % (tgt, src))
			os.symlink(src, tgt)
		else:
			if not self.progress_bar:
				Logs.info('- symlink %s (to %s)' % (tgt, src))
Beispiel #8
0
 def do_install(self, src, tgt, chmod=Utils.O644):
     d, _ = os.path.split(tgt)
     Utils.check_dir(d)
     srclbl = src.replace(self.srcnode.abspath() + os.sep, '')
     if not Options.options.force:
         try:
             st1 = os.stat(tgt)
             st2 = os.stat(src)
         except OSError:
             pass
         else:
             if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
                 Logs.info('- install %s (from %s)' % (tgt, srclbl))
                 return False
     Logs.info('+ install %s (from %s)' % (tgt, srclbl))
     try:
         os.remove(tgt)
     except OSError:
         pass
     try:
         shutil.copy2(src, tgt)
         os.chmod(tgt, chmod)
     except IOError:
         try:
             os.stat(src)
         except (OSError, IOError):
             Logs.error('File %r does not exist' % src)
         raise Errors.WafError('Could not install the file %r' % tgt)
Beispiel #9
0
	def do_install(self,src,tgt,**kw):
		d,_=os.path.split(tgt)
		if not d:
			raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt))
		Utils.check_dir(d)
		srclbl=src.replace(self.srcnode.abspath()+os.sep,'')
		if not Options.options.force:
			try:
				st1=os.stat(tgt)
				st2=os.stat(src)
			except OSError:
				pass
			else:
				if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size:
					if not self.progress_bar:
						Logs.info('- install %s (from %s)'%(tgt,srclbl))
					return False
		if not self.progress_bar:
			Logs.info('+ install %s (from %s)'%(tgt,srclbl))
		try:
			os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode))
		except EnvironmentError:
			pass
		try:
			os.remove(tgt)
		except OSError:
			pass
		try:
			self.copy_fun(src,tgt,**kw)
		except IOError:
			try:
				os.stat(src)
			except EnvironmentError:
				Logs.error('File %r does not exist'%src)
			raise Errors.WafError('Could not install the file %r'%tgt)
Beispiel #10
0
	def do_install(self,src,tgt,chmod=Utils.O644):
		d,_=os.path.split(tgt)
		Utils.check_dir(d)
		srclbl=src.replace(self.srcnode.abspath()+os.sep,'')
		if not Options.options.force:
			try:
				st1=os.stat(tgt)
				st2=os.stat(src)
			except OSError:
				pass
			else:
				if st1.st_mtime>=st2.st_mtime and st1.st_size==st2.st_size:
					Logs.info('- install %s (from %s)'%(tgt,srclbl))
					return False
		Logs.info('+ install %s (from %s)'%(tgt,srclbl))
		try:
			os.remove(tgt)
		except OSError:
			pass
		try:
			shutil.copy2(src,tgt)
			os.chmod(tgt,chmod)
		except IOError:
			try:
				os.stat(src)
			except(OSError,IOError):
				Logs.error('File %r does not exist'%src)
			raise Errors.WafError('Could not install the file %r'%tgt)
Beispiel #11
0
 def do_install(self, src, tgt, chmod=Utils.O644):
     d, _ = os.path.split(tgt)
     if not d:
         raise Errors.WafError("Invalid installation given %r->%r" % (src, tgt))
     Utils.check_dir(d)
     srclbl = src.replace(self.srcnode.abspath() + os.sep, "")
     if not Options.options.force:
         try:
             st1 = os.stat(tgt)
             st2 = os.stat(src)
         except OSError:
             pass
         else:
             if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
                 if not self.progress_bar:
                     Logs.info("- install %s (from %s)" % (tgt, srclbl))
                 return False
     if not self.progress_bar:
         Logs.info("+ install %s (from %s)" % (tgt, srclbl))
     try:
         os.remove(tgt)
     except OSError:
         pass
     try:
         shutil.copy2(src, tgt)
         os.chmod(tgt, chmod)
     except IOError:
         try:
             os.stat(src)
         except (OSError, IOError):
             Logs.error("File %r does not exist" % src)
         raise Errors.WafError("Could not install the file %r" % tgt)
Beispiel #12
0
	def do_install(self, src, tgt, **kw):
		"""
		Copy a file from src to tgt with given file permissions. The actual copy is not performed
		if the source and target file have the same size and the same timestamps. When the copy occurs,
		the file is first removed and then copied (prevent stale inodes).

		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_install` to remove the file.

		:param src: file name as absolute path
		:type src: string
		:param tgt: file destination, as absolute path
		:type tgt: string
		:param chmod: installation mode
		:type chmod: int
		"""
		d, _ = os.path.split(tgt)
		if not d:
			raise Errors.WafError('Invalid installation given %r->%r' % (src, tgt))
		Utils.check_dir(d)

		srclbl = src.replace(self.srcnode.abspath() + os.sep, '')
		if not Options.options.force:
			# check if the file is already there to avoid a copy
			try:
				st1 = os.stat(tgt)
				st2 = os.stat(src)
			except OSError:
				pass
			else:
				# same size and identical timestamps -> make no copy
				if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size:
					if not self.progress_bar:
						Logs.info('- install %s (from %s)' % (tgt, srclbl))
					return False

		if not self.progress_bar:
			Logs.info('+ install %s (from %s)' % (tgt, srclbl))

		# Give best attempt at making destination overwritable,
		# like the 'install' utility used by 'make install' does.
		try:
			os.chmod(tgt, Utils.O644 | stat.S_IMODE(os.stat(tgt).st_mode))
		except EnvironmentError:
			pass

		# following is for shared libs and stale inodes (-_-)
		try:
			os.remove(tgt)
		except OSError:
			pass

		try:
			self.copy_fun(src, tgt, **kw)
		except IOError:
			try:
				os.stat(src)
			except EnvironmentError:
				Logs.error('File %r does not exist' % src)
			raise Errors.WafError('Could not install the file %r' % tgt)
Beispiel #13
0
 def store_pickle(self, filename):
     dirname, basename = os.path.split(filename)
     if basename == Options.lockfile:
         return store_orig(self, filename)
     Utils.check_dir(dirname)
     table = sorted(kv for kv in self.get_merged_dict().iteritems()
                    if kv[0] != 'undo_stack')
     Utils.writef(filename, pickle.dumps(table, 2), m='wb')
Beispiel #14
0
 def exec_install_files(self):
     destpath = self.get_install_path()
     for x, y in zip(self.source, self.inputs):
         if self.relative_trick:
             destfile = os.path.join(destpath, y.path_from(self.path))
             Utils.check_dir(os.path.dirname(destfile))
         else:
             destfile = os.path.join(destpath, y.name)
         self.generator.bld.do_install(y.abspath(), destfile, self.chmod)
Beispiel #15
0
	def exec_install_files(self):
		destpath=self.get_install_path()
		for x,y in zip(self.source,self.inputs):
			if self.relative_trick:
				destfile=os.path.join(destpath,y.path_from(self.path))
				Utils.check_dir(os.path.dirname(destfile))
			else:
				destfile=os.path.join(destpath,y.name)
			self.generator.bld.do_install(y.abspath(),destfile,self.chmod)
Beispiel #16
0
Datei: Build.py Projekt: zsx/waf
	def exec_install_files(self):
		"""predefined method for installing files"""
		destpath = self.get_install_path()
		for x, y in zip(self.source, self.inputs):
			if self.relative_trick:
				destfile = os.path.join(destpath, x)
				Utils.check_dir(os.path.dirname(destfile))
			else:
				destfile = os.path.join(destpath, y.name)
			self.generator.bld.do_install(y.abspath(), destfile, self.chmod)
def configure(conf):
    """
    The configure function for the bundle dependency tool
    :param conf: the configuration context
    """
    conf.load('wurf_dependency_resolve')

    # Get the path where the bundled dependencies should be
    # placed
    bundle_path = expand_path(conf.options.bundle_path)

    # List all the dependencies to be bundled
    bundle_list = expand_bundle(conf, conf.options.bundle)

    # List all the dependencies with an explicit path
    explicit_list = explicit_dependencies(conf.options)

    # Make sure that no dependencies were both explicitly specified
    # and specified as bundled
    overlap = set(bundle_list).intersection(set(explicit_list))

    if len(overlap) > 0:
        conf.fatal("Overlapping dependencies %r" % overlap)

    conf.env['BUNDLE_DEPENDENCIES'] = dict()

    # Loop over all dependencies and fetch the ones
    # specified in the bundle_list
    for name in bundle_list:

        Utils.check_dir(bundle_path)

        conf.start_msg('Resolve dependency %s' % name)

        key = DEPENDENCY_CHECKOUT_KEY % name
        dependency_checkout = getattr(conf.options, key, None)

        dependency_path = dependencies[name].resolve(
            ctx=conf,
            path=bundle_path,
            use_checkout=dependency_checkout)

        conf.end_msg(dependency_path)

        conf.env['BUNDLE_DEPENDENCIES'][name] = dependency_path

    for name in explicit_list:
        key = DEPENDENCY_PATH_KEY % name
        dependency_path = getattr(conf.options, key)
        dependency_path = expand_path(dependency_path)

        conf.start_msg('User resolve dependency %s' % name)
        conf.env['BUNDLE_DEPENDENCIES'][name] = dependency_path
        conf.end_msg(dependency_path)
Beispiel #18
0
	def exec_install_files(self):
		destpath=self.get_install_path()
		if not destpath:
			raise Errors.WafError('unknown installation path %r'%self.generator)
		for x,y in zip(self.source,self.inputs):
			if self.relative_trick:
				destfile=os.path.join(destpath,y.path_from(self.path))
				Utils.check_dir(os.path.dirname(destfile))
			else:
				destfile=os.path.join(destpath,y.name)
			self.generator.bld.do_install(y.abspath(),destfile,self.chmod)
Beispiel #19
0
 def exec_install_files(self):
     destpath = self.get_install_path()
     if not destpath:
         raise Errors.WafError("unknown installation path %r" % self.generator)
     for x, y in zip(self.source, self.inputs):
         if self.relative_trick:
             destfile = os.path.join(destpath, y.path_from(self.path))
             Utils.check_dir(os.path.dirname(destfile))
         else:
             destfile = os.path.join(destpath, y.name)
         self.generator.bld.do_install(y.abspath(), destfile, self.chmod)
Beispiel #20
0
 def store_pickle(self, filename):
     dirname, basename = os.path.split(filename)
     if basename == Options.lockfile:
         return store_orig(self, filename)
     Utils.check_dir(dirname)
     table = sorted(
             kv
             for kv in self.get_merged_dict().iteritems()
                 if kv[0] != 'undo_stack'
             )
     Utils.writef(filename, pickle.dumps(table, 2), m='wb')
Beispiel #21
0
def install_dir(self,path):
	if not path:
		return[]
	destpath=Utils.subst_vars(path,self.env)
	if self.is_install>0:
		Logs.info('* creating %s'%destpath)
		Utils.check_dir(destpath)
	elif self.is_install<0:
		Logs.info('* removing %s'%destpath)
		try:
			os.remove(destpath)
		except OSError:
			pass
Beispiel #22
0
def install_dir(self, path):
    if not path:
        return []
    destpath = Utils.subst_vars(path, self.env)
    if self.is_install > 0:
        Logs.info('* creating %s', destpath)
        Utils.check_dir(destpath)
    elif self.is_install < 0:
        Logs.info('* removing %s', destpath)
        try:
            os.remove(destpath)
        except OSError:
            pass
Beispiel #23
0
Datei: Build.py Projekt: SjB/waf
	def do_install(self, src, tgt, chmod=Utils.O644):
		"""
		Copy a file from src to tgt with given file permissions. The actual copy is not performed
		if the source and target file have the same size and the same timestamps. When the copy occurs,
		the file is first removed and then copied (prevent stale inodes).

		This method is overridden in :py:meth:`waflib.Build.UninstallContext.do_install` to remove the file.

		:param src: file name as absolute path
		:type src: string
		:param tgt: file destination, as absolute path
		:type tgt: string
		:param chmod: installation mode
		:type chmod: int
		"""
		d, _ = os.path.split(tgt)
		Utils.check_dir(d)

		srclbl = src.replace(self.srcnode.abspath() + os.sep, '')
		if not Options.options.force:
			# check if the file is already there to avoid a copy
			try:
				st1 = os.stat(tgt)
				st2 = os.stat(src)
			except OSError:
				pass
			else:
				# same size and identical timestamps -> make no copy
				if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
					Logs.info('- install %s (from %s)' % (tgt, srclbl))
					return False

		Logs.info('+ install %s (from %s)' % (tgt, srclbl))

		# following is for shared libs and stale inodes (-_-)
		try:
			os.remove(tgt)
		except OSError:
			pass

		try:
			shutil.copy2(src, tgt)
			os.chmod(tgt, chmod)
		except IOError:
			try:
				os.stat(src)
			except (OSError, IOError):
				Logs.error('File %r does not exist' % src)
			raise Errors.WafError('Could not install the file %r' % tgt)
Beispiel #24
0
	def do_link(self,src,tgt):
		d,_=os.path.split(tgt)
		Utils.check_dir(d)
		link=False
		if not os.path.islink(tgt):
			link=True
		elif os.readlink(tgt)!=src:
			link=True
		if link:
			try:os.remove(tgt)
			except OSError:pass
			Logs.info('+ symlink %s (to %s)'%(tgt,src))
			os.symlink(src,tgt)
		else:
			Logs.info('- symlink %s (to %s)'%(tgt,src))
Beispiel #25
0
	def do_link(self,src,tgt):
		d,_=os.path.split(tgt)
		Utils.check_dir(d)
		link=False
		if not os.path.islink(tgt):
			link=True
		elif os.readlink(tgt)!=src:
			link=True
		if link:
			try:os.remove(tgt)
			except OSError:pass
			Logs.info('+ symlink %s (to %s)'%(tgt,src))
			os.symlink(src,tgt)
		else:
			Logs.info('- symlink %s (to %s)'%(tgt,src))
Beispiel #26
0
Datei: Build.py Projekt: zsx/waf
	def do_link(self, src, tgt):
		"""create a symlink from tgt to src (will be overridden in UninstallContext)"""
		d, _ = os.path.split(tgt)
		Utils.check_dir(d)

		link = False
		if not os.path.islink(tgt):
			link = True
		elif os.readlink(tgt) != src:
			link = True

		if link:
			try: os.remove(tgt)
			except OSError: pass
			Logs.info('+ symlink %s (to %s)' % (tgt, src))
			os.symlink(src, tgt)
		else:
			Logs.info('- symlink %s (to %s)' % (tgt, src))
Beispiel #27
0
 def do_link(self, src, tgt, **kw):
     d, _ = os.path.split(tgt)
     Utils.check_dir(d)
     link = False
     if not os.path.islink(tgt):
         link = True
     elif os.readlink(tgt) != src:
         link = True
     if link:
         try:
             os.remove(tgt)
         except OSError:
             pass
         if not self.progress_bar:
             Logs.info("+ symlink %s (to %s)" % (tgt, src))
         os.symlink(src, tgt)
     else:
         if not self.progress_bar:
             Logs.info("- symlink %s (to %s)" % (tgt, src))
Beispiel #28
0
def configure_paths(ctx):
	"""Setup blender paths"""
	# Get the username
	user = getuser()
	_platform = Utils.unversioned_sys_platform()
	config_path = {'user': '', 'system': ''}
	if _platform.startswith('linux'):
		config_path['user'] = '******' % user
		config_path['system'] = '/usr/share/blender/'
	elif _platform == 'darwin':
		# MAC OS X
		config_path['user'] = \
			'/Users/%s/Library/Application Support/Blender/' % user
		config_path['system'] = '/Library/Application Support/Blender/'
	elif Utils.is_win32:
		# Windows
		appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
		homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')

		config_path['user'] = '******' % appdata_path
		config_path['system'] = \
			'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
	else:
		ctx.fatal(
			'Unsupported platform. '
			'Available platforms: Linux, OSX, MS-Windows.'
		)

	blender_version = ctx.env['BLENDER_VERSION']

	config_path['user'] += blender_version + '/'
	config_path['system'] += blender_version + '/'

	ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
	if ctx.options.directory_system:
		ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']

	ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
		ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
	)
	Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
def configure_paths(ctx):
    """Setup blender paths"""
    # Get the username
    user = getuser()
    _platform = Utils.unversioned_sys_platform()
    config_path = {"user": "", "system": ""}
    if _platform.startswith("linux"):
        config_path["user"] = "******" % user
        config_path["system"] = "/usr/share/blender/"
    elif _platform == "darwin":
        # MAC OS X
        config_path[
            "user"] = "******" % user
        config_path["system"] = "/Library/Application Support/Blender/"
    elif Utils.is_win32:
        # Windows
        appdata_path = ctx.getenv("APPDATA").replace("\\", "/")
        homedrive = ctx.getenv("HOMEDRIVE").replace("\\", "/")

        config_path["user"] = "******" % appdata_path
        config_path["system"] = (
            "%sAll Users/AppData/Roaming/Blender Foundation/Blender/" %
            homedrive)
    else:
        ctx.fatal("Unsupported platform. "
                  "Available platforms: Linux, OSX, MS-Windows.")

    blender_version = ctx.env["BLENDER_VERSION"]

    config_path["user"] += blender_version + "/"
    config_path["system"] += blender_version + "/"

    ctx.env["BLENDER_CONFIG_DIR"] = os.path.abspath(config_path["user"])
    if ctx.options.directory_system:
        ctx.env["BLENDER_CONFIG_DIR"] = config_path["system"]

    ctx.env["BLENDER_ADDONS_DIR"] = os.path.join(ctx.env["BLENDER_CONFIG_DIR"],
                                                 "scripts/addons")
    Utils.check_dir(ctx.env["BLENDER_ADDONS_DIR"])
Beispiel #30
0
    def run(self):
        for source, target, chmod in self.install_step:
            d, _ = os.path.split(target)
            Utils.check_dir(d)
            # following is for shared libs and stale inodes (-_-)
            try:
                os.remove(target)
            except OSError:
                pass

            try:
                shutil.copy2(source.abspath(), target)
                os.chmod(target, chmod)
            except IOError:
                try:
                    os.stat(source.abspath())
                except (OSError, IOError):
                    Logs.error('File %r does not exist' % source.abspath())
                    return 1
                Logs.error('Could not install the file %r' % target)
                return 1
        return 0
def configure_paths(ctx):
    """Setup blender paths"""
    # Get the username
    user = getuser()
    _platform = Utils.unversioned_sys_platform()
    config_path = {'user': '', 'system': ''}
    if _platform.startswith('linux'):
        config_path['user'] = '******' % user
        config_path['system'] = '/usr/share/blender/'
    elif _platform == 'darwin':
        # MAC OS X
        config_path['user'] = \
         '/Users/%s/Library/Application Support/Blender/' % user
        config_path['system'] = '/Library/Application Support/Blender/'
    elif Utils.is_win32:
        # Windows
        appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
        homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')

        config_path['user'] = '******' % appdata_path
        config_path['system'] = \
         '%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
    else:
        ctx.fatal('Unsupported platform. '
                  'Available platforms: Linux, OSX, MS-Windows.')

    blender_version = ctx.env['BLENDER_VERSION']

    config_path['user'] += blender_version + '/'
    config_path['system'] += blender_version + '/'

    ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
    if ctx.options.directory_system:
        ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']

    ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(ctx.env['BLENDER_CONFIG_DIR'],
                                                 'scripts/addons')
    Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
def configure(conf):
	conf.load('wurf_dependency_resolve')
	bundle_path=expand_path(conf.options.bundle_path)
	bundle_list=expand_bundle(conf,conf.options.bundle)
	explicit_list=explicit_dependencies(conf.options)
	overlap=set(bundle_list).intersection(set(explicit_list))
	if len(overlap)>0:
		conf.fatal("Overlapping dependencies %r"%overlap)
	conf.env['BUNDLE_DEPENDENCIES']=dict()
	for name in bundle_list:
		Utils.check_dir(bundle_path)
		conf.start_msg('Resolve dependency %s'%name)
		key=DEPENDENCY_CHECKOUT_KEY%name
		dependency_checkout=getattr(conf.options,key,None)
		dependency_path=dependencies[name].resolve(ctx=conf,path=bundle_path,use_checkout=dependency_checkout)
		conf.end_msg(dependency_path)
		conf.env['BUNDLE_DEPENDENCIES'][name]=dependency_path
	for name in explicit_list:
		key=DEPENDENCY_PATH_KEY%name
		dependency_path=getattr(conf.options,key)
		dependency_path=expand_path(dependency_path)
		conf.start_msg('User resolve dependency %s'%name)
		conf.env['BUNDLE_DEPENDENCIES'][name]=dependency_path
		conf.end_msg(dependency_path)
Beispiel #33
0
Datei: Build.py Projekt: zsx/waf
	def do_install(self, src, tgt, chmod=Utils.O644):
		"""copy a file from src to tgt with given file permissions (will be overridden in UninstallContext)"""
		d, _ = os.path.split(tgt)
		Utils.check_dir(d)

		srclbl = src.replace(self.srcnode.abspath() + os.sep, '')
		if not Options.options.force:
			# check if the file is already there to avoid a copy
			try:
				st1 = os.stat(tgt)
				st2 = os.stat(src)
			except OSError:
				pass
			else:
				# same size and identical timestamps -> make no copy
				if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
					Logs.info('- install %s (from %s)' % (tgt, srclbl))
					return False

		Logs.info('+ install %s (from %s)' % (tgt, srclbl))

		# following is for shared libs and stale inodes (-_-)
		try:
			os.remove(tgt)
		except OSError:
			pass

		try:
			shutil.copy2(src, tgt)
			os.chmod(tgt, chmod)
		except IOError:
			try:
				os.stat(src)
			except (OSError, IOError):
				Logs.error('File %r does not exist' % src)
			raise Errors.WafError('Could not install the file %r' % tgt)
Beispiel #34
0
def blender(self):
    # Two ways to install a blender extension: as a module or just .py files
    dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
    Utils.check_dir(dest_dir)
    self.bld.install_files(dest_dir, getattr(self, "files", "."))
Beispiel #35
0
def zpy_requirements(cnf, *nodes, **ctx):
    env = cnf.env
    zpy = cnf.zpy
    opt = zpy.opt

    #FIXME: drop this once cnf.dependency_finder handles direct refs
    # current directory is zippy
    urls = [zpy.top]
    reqts = dict()
    dists = set()

    for node in filter(None, nodes):
        for url in sorted(glob.glob(node)) or [node]:
            if url and url not in urls:
                urls.append(url)

    urls.reverse()
    bld_abspath = cnf.bldnode.abspath()
    while urls:
        url = urls.pop()
        urldata = None

        if pth.isdir(url):
            url = pth.relpath(url, zpy.top)
            path = pth.join(url, metadata.METADATA_FILENAME)
            if pth.exists(path):
                # url is a project dir
                dist = database.Distribution(
                    metadata=metadata.Metadata(path=path), )
                dist.requested = True
                dists.add(dist)

                dist.metadata.source_url = url

                #TODO: build_requires/test_requires/etc/?
                urldata = dist.run_requires

        if urldata is None:
            urldata = urllib.urlopen(url).read().splitlines()

        for spec in sorted(urldata):
            spec = spec.strip()
            if not spec or spec[0] == '#':
                continue

            # handle urls a bit nicer
            if '://' in spec:
                spec = '__anonymous__ (from {0})'.format(spec)

            req = parse_requirement(spec)
            if not req:
                continue

            if req.url:
                req = cnf.git_locator.add_hint(req, cnf)

            key = req.name.lower()
            if key in reqts and req.constraints:
                #FIXME: handle extras/url
                # merge requirements
                constraints = reqts[key].constraints or list()
                constraints.extend(req.constraints)
                constraints = ', '.join(' '.join(c) for c in constraints)
                spec = '{0} ({1})'.format(req.name, constraints)
                req = parse_requirement(spec)
                req.origins = reqts[key].origins

            if not hasattr(req, 'origins'):
                req.origins = list()
            req.origins.append(url)
            reqts[key] = req

    #FIXME: get zippy here once cnf.dependency_finder finds local checkouts
    for special in ('Python (== 2.7.7, < 3.0)', 'setuptools (< 8)'):
        req = parse_requirement(special)
        key = req.name.lower()
        if key not in reqts and key not in dists:
            req.origins = ['(internal)']
            reqts[key] = req

    origin = None
    node = cnf.bldnode.find_or_declare('config.requirements.txt')
    with open(node.abspath(), mode='w') as fp:
        for _, _, req in sorted((req.origins, req.requirement.lower(), req)
                                for req in reqts.values()):
            if origin != req.origins:
                origin = req.origins
                fp.write('# {0}\n'.format(', '.join(req.origins)))
            fp.write('{0}\n'.format(req.requirement))

    Logs.pprint(None, 'Resolving distributions...')
    #FIXME: drop Anonymous once cnf.dependency_finder finds local checkouts
    anonymous = make_dist('Anonymous', '1.0')
    requirements = tuple(req.requirement for req in reqts.values())
    anonymous.metadata.add_requirements(requirements)
    hits, probs = cnf.dependency_finder.find(anonymous)
    hits.discard(anonymous)
    for prob in list(probs):
        if prob[0] == 'cantreplace':
            probs.discard(prob)
            for r in prob[3]:
                probs.add((prob[0], r))
        elif prob[0] == 'unsatisfied' and prob[1].startswith('dateutil '):
            # bogus dist (should be python-dateutil) referenced by tastypie?
            probs.discard(prob)

    if probs:
        problems = defaultdict(list)
        for typ, spec in probs:
            req = parse_requirement(spec)
            req = reqts.get(req.name.lower(), req)
            if not hasattr(req, 'origins'):
                req.origins = list()
            for i, origin in enumerate(req.origins):
                constraint = None
                problem = '{0}: {1}'.format(origin, req.name)
                if req.url:
                    constraint = ('from', req.url)
                elif req.constraints and len(req.constraints) > i:
                    constraint = req.constraints[i]
                if constraint:
                    problem += ' ({0} {1})'.format(*constraint)
                problems[typ].append(problem)
        problem_str = list()
        for problem_key in sorted(problems):
            problem_str.append(
                'dependency failure ({0}):'.format(problem_key), )
            for problem_val in problems[problem_key]:
                problem_str.append('    {}'.format(problem_val))
        problem_str = '\n'.join(problem_str)
        cnf.fatal(problem_str)

    dists.update(hits)
    zpy.dist.update((dist.key, dist.metadata.dictionary) for dist in dists)

    for dist in sorted(dists, key=operator.attrgetter('key')):
        #FIXME: .format()
        sys.stderr.write('%7s %s%s %s%s\n%s' % (
            '',
            Logs.colors.BOLD_BLUE,
            dist.name,
            Logs.colors.NORMAL + Logs.colors.BLUE,
            dist.version,
            Logs.colors.NORMAL,
        ))

    feats = Utils.to_list(ctx.get('features', ''))
    if 'zpy-requirements' not in feats:
        feats.append('zpy-requirements')
        ctx['features'] = feats
    inputs = ctx.setdefault('source', list())
    inputs.extend(dists)

    #FIXME:upstream:waf
    # workaround to clobbering .wafpickle-* cache
    dbfile_orig = Context.DBFILE
    Context.DBFILE = dbfile_orig + '-requirements'
    bld = sub_build(cnf, ctx, logger=cnf.logger)
    bld.compile()
    Context.DBFILE = dbfile_orig

    python = cnf.zippy_dist_get('python')
    py = cnf.bldnode.find_node('python')
    if py is None:
        cnf.fatal('%s does not exist' % python.name_and_version)

    zpy.PYTHON = pth.join(py.abspath(), py_v('pt'))
    zpy.py_v = tuple(map(int, python.version.split('.')))
    zpy.py_fqn = py_v('pt-x.y.z', v=zpy.py_v)
    zpy.py_v1 = py_v('x', v=zpy.py_v)
    zpy.py_v2 = py_v('x.y', v=zpy.py_v)
    zpy.py_v3 = py_v('x.y.z', v=zpy.py_v)
    zpy.py_ver1 = py_v('ptx', v=zpy.py_v)
    zpy.py_ver2 = py_v('ptx.y', v=zpy.py_v)
    zpy.py_ver3 = py_v('ptx.y.z', v=zpy.py_v)
    zpy.py_ver2_nodot = py_v('ptxy', v=zpy.py_v)
    zpy.o_stlib = 'lib%s.a' % zpy.py_ver2
    zpy.O_PYTHON = pth.join(zpy.o_bin, zpy.py_ver2)
    zpy.o_lib_py = pth.join(zpy.o_lib, zpy.py_ver2)
    zpy.o_lib_py_site = pth.join(zpy.o_lib_py, 'site-packages')
    zpy.o_inc_py = pth.join(zpy.o_inc, zpy.py_ver2)
    zpy.o_landmark = pth.join(zpy.o_lib_py, zpy.landmark)
    Utils.check_dir(zpy.o_lib_py_site)

    _pybuilddir = 'build/lib.%s-%s' % (
        distutils.util.get_platform(),
        zpy.py_v2,
    )
    zpy.pybuilddir = py.make_node(_pybuilddir).abspath()
    zpy.pylibdir = py.make_node('Lib').abspath()
    zpy.env['PYTHONHOME'] = zpy.o
    zpy.env['PYTHONPATH'] = ':'.join([
        'wheel-{0}/lib'.format(zpy.tstamp),
        zpy.pybuilddir,
        zpy.pylibdir,
        zpy.o_lib_py_site,
    ])

    # touch LANDMARK so PYTHONHOME doesn't need export afterwards
    open(pth.join(zpy.o_lib_py, zpy.landmark), mode='a').close()
Beispiel #36
0
def configure(cnf):
    """core configuration/checks
    """
    opt = cnf.options
    env = cnf.env
    zpy = cnf.zpy
    environ = cnf.environ

    zpy.tstamp = environ['ZIPPY_BUILD']
    zpy.api_pypi = 'https://pypi.python.org/simple/'

    zpy.top = cnf.path.abspath()
    zpy.opt = vars(opt).copy()

    _ident = zpy.opt['identifier']
    zpy.identifier = re.sub('[^-0-9A-Za-z_]', '', _ident)
    if zpy.identifier != _ident:
        cnf.fatal('ident MUST be alphanumeric: %r' % _ident)

    zpy.dist = dict()
    zpy.variant_name = cnf.variant
    zpy.variant_file = cnf.variant + Build.CACHE_SUFFIX
    zpy.landmark = '{0}.{1}.json'.format(__package__, _ident)

    zpy.bld_name = str(cnf.bldnode)
    zpy.bld_path = cnf.bldnode.path_from(cnf.path)
    zpy.bld_landmark = pth.join(zpy.bld_path, 'config.json')
    zpy.bld_cache_name = str(cnf.cachedir)
    zpy.bld_cache_path = cnf.cachedir.path_from(cnf.path)
    zpy.bld_cache_file = pth.join(zpy.bld_cache_path, zpy.variant_file)

    bld_zippy = cnf.bldnode.make_node('zippy-app-{0}'.format(_ident))
    zpy.bld_zippy_name = str(bld_zippy)
    zpy.bld_zippy_path = bld_zippy.path_from(cnf.path)

    dirs = set((
        ('cache', None),
        ('xsrc', 'extern/sources'),
    ))
    for k, v in sorted(dirs):
        key = 'top_' + k
        zpy[key] = cnf.find_file(v or k, zpy.top)

    #...use default name until we actually need multiple builds
    _o = cnf.bldnode.make_node('--')
    zpy.o = _o.abspath()
    zpy.o_bin = _o.make_node('bin').abspath()
    zpy.o_lib = _o.make_node('lib').abspath()
    zpy.o_inc = _o.make_node('include').abspath()
    Utils.check_dir(zpy.o_bin)
    Utils.check_dir(zpy.o_lib)
    Utils.check_dir(zpy.o_inc)

    _user = pwd.getpwuid(os.getuid())
    _machine = platform.machine()
    _platform = distutils.util.get_platform()
    _triplet = (sysconfig.get_config_var('HOST_GNU_TYPE')
                or sysconfig.get_config_var('host') or cnf.cmd_and_log(
                    ['gcc', '-dumpmachine'],
                    output=Context.STDOUT,
                    quiet=Context.BOTH,
                ) or '%s-%s-%s' % (
                    _machine,
                    platform.system().lower(),
                    'gnu',
                )).strip()
    zpy.machine = _machine
    zpy.platform = _platform
    zpy.triplet = _triplet

    _xdg_cache = pth.abspath(
        environ.get('XDG_CACHE_HOME') or pth.join(_user.pw_dir, '.cache'))
    _cache = cnf.root.make_node(pth.join(_xdg_cache, 'zippy'))
    zpy.cache = _cache.abspath()

    for ent in ('bin', 'out', 'tmp', 'wheel'):
        key = 'cache_%s' % ent
        zpy[key] = _cache.make_node(ent).abspath()
        Utils.check_dir(zpy[key])

    #...used by exec_command() for subprocesses
    env.env = dict()
    _path = os.pathsep.join(filter(None, (zpy.cache_bin, environ.get('PATH'))))
    _path = env.PATH = env.env['PATH'] = os.environ['PATH'] = _path

    _cflags = [
        '-march=%s' % _machine.replace('_', '-'),
        '-mtune=generic',
        '--param=ssp-buffer-size=4',
        '-pipe',
        '-O2',
        '-fPIC',
        #FIXME: wheezy can do this, DISABLE FOR DEBUG
        #'-flto=%s' % opt.jobs,
        #'-fno-fat-lto-objects',
        #FIXME: this needs gold or ld 2.21
        #'-fuse-linker-plugin',
        #'-fuse-ld=gold',
        #FIXME: this should be with other profile opts, not here!
        #'-fprofile-correction',
        '-fno-common',
        '-fstack-protector',
        '-fvisibility=hidden',
        '-Wno-error=coverage-mismatch',
    ]
    _exports = {
        'ZIPPY_CONFIG': pth.abspath(zpy.bld_landmark),
        'ZIPPY_BUILD': zpy.tstamp,
        'UWSGI_USE_DISTUTILS': 'x',
        'LANG': 'en_US.UTF-8',
        'USER': _user.pw_name,
        'HOME': _user.pw_dir,
        'CARCH': _machine,
        'CHOST': _triplet,
        'TMPDIR': tempfile.gettempdir(),
        'MAKEFLAGS': '-j%s' % opt.jobs,
        'CCACHE_DIR': zpy.cache_out,
        'CCACHE_BASEDIR': cnf.bldnode.abspath(),
        'CCACHE_COMPRESS': '1',
        'CFLAGS': _cflags,
        'CXXFLAGS': _cflags,
        'CPPFLAGS': ['-D_FORTIFY_SOURCE=2'],
        'LDFLAGS': ['-Wl,-O1,--sort-common,--as-needed,-z,relro'],
    }
    if zpy.opt.get('debug'):
        _exports['PYTHONVERBOSE'] = 'x'
    for k, v in _exports.iteritems():
        if v is not None:
            env.append_value(k, v)
        cnf.add_os_flags(k)
        if isinstance(v, str):
            env[k][:-1] = []
            env[k] = env[k] and env[k].pop() or str()
        env.env.setdefault(k, env.get_flat(k))

    progs = set((
        'make',
        'tar',
        'unzip',
        'nm',
        'objcopy',
        'git',
        'ld',
        'strip',
    ))
    map(cnf.find_program, sorted(progs))

    if not _cache.find_node('bin/ccache'):
        import shutil
        shutil.rmtree(zpy.cache, ignore_errors=True)
        shutil.copytree(zpy.top_cache, zpy.cache, symlinks=True)
        _bin = pth.join(zpy.cache_bin, '')
        _slink = _bin + 'ccache.%s' % _machine
        _dlink = _bin + 'ccache'
        if pth.exists(_dlink) and not pth.samefile(_slink, _dlink):
            os.remove(_dlink)
        if not pth.exists(_dlink):
            os.link(_slink, _dlink)
        for lnk in ('%s%s' % (pfx, sfx) for pfx in ('', _triplet + '-')
                    for sfx in ('g++', 'gcc', 'cpp', 'c++', 'cc')):
            _dlink = _bin + lnk
            if not pth.exists(_dlink):
                os.symlink('ccache', _dlink)
def blender(self):
    # Two ways to install a blender extension: as a module or just .py files
    dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
    Utils.check_dir(dest_dir)
    self.bld.install_files(dest_dir, getattr(self, 'files', '.'))
def blender(self):
    # Two ways to install a blender extension: as a module or just .py files
    dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
    Utils.check_dir(dest_dir)
    self.add_install_files(install_to=dest_dir,
                           install_from=getattr(self, "files", "."))
Beispiel #39
0
    def run(self):
        env = self.env
        gen = self.generator
        bld = gen.bld
        zpy = bld.zpy
        out = self.outputs[0].parent

        dist = self.dist
        signode = self.signode

        out_path = out.abspath()
        bld_path = bld.bldnode.abspath()
        source_url = dist.source_url or ''
        url = urlparse.urlsplit(source_url)
        # normalize initially to dist.key
        name = pth.basename(url.path).lower() or dist.key
        # ensure sdist filename matches dist.name!
        # otherwise dist.name != dist.metadata.name if the object was created
        # from a filename, even if metadata is loaded later!
        #FIXME: upstream fix to above...
        name = name.replace(dist.key, dist.name, 1)
        if dist.name not in name:
            # probably a hash
            ext = name[name.find('.'):]
            if len(ext) > 1:
                name = dist.metadata.name_and_version + ext
        # no raison other than consistency
        if name.endswith('.tgz'):
            name = name[:-4] + '.tar.gz'
        path = pth.join(zpy.top_xsrc, name)
        meta = path + '.' + metadata.METADATA_FILENAME
        meta_alt = pth.join(url.path, metadata.METADATA_FILENAME)
        meta_out = pth.join(out_path, metadata.METADATA_FILENAME)

        if url.scheme and url.path and url.path != path:
            path, message = urllib.urlretrieve(url.geturl(), path)

        if url.path and pth.isdir(url.path):
            try:
                git_dir = subprocess.check_output(
                    ['git', 'rev-parse', '--show-toplevel'], cwd=url.path,
                    ).strip()
                # avoid checking out the wrong repo due to nesting; ie. don't
                # checkout someone's dotfile repo just because they happen to
                # technically be "under" it
                if pth.abspath(git_dir) == pth.abspath(url.path):
                    git_dir = subprocess.check_output(
                        args=['git', 'rev-parse', '--git-dir'], cwd=url.path,
                        )
                    git_dir = pth.join(url.path, git_dir.strip())
                else:
                    git_dir = None
            except subprocess.CalledProcessError:
                git_dir = None
            if git_dir:
                if not pth.exists(out_path):
                    os.mkdir(out_path)
                subprocess.call([
                    'git',
                        '--git-dir={0}'.format(git_dir),
                        '--work-tree={0}'.format(out_path),
                            'checkout-index',
                                '--all',
                                '--quiet',
                                '--force',
                                ])
                if pth.exists(meta):
                    shutil.copy2(meta, meta_out)
                elif pth.exists(meta_alt):
                    shutil.copy2(meta_alt, meta_out)
            else:
                # symlink local dist checkout
                local_path = pth.join(zpy.top, url.path)
                local_path = pth.abspath(local_path)
                local_sym = pth.relpath(local_path, bld_path)
                try:
                    # clear broken symlinks
                    os.unlink(out_path)
                except OSError:
                    pass
                finally:
                    os.symlink(local_sym, out_path)
        elif pth.isfile(path):
            _zip = ('.zip',)
            _whl = ('.whl',)
            _tar = tuple(
                set(distlib.util.ARCHIVE_EXTENSIONS) - set(_zip + _whl)
                )
            if path.endswith(_whl):
                dist.metadata = wheel.Wheel(path).metadata
                dist_dir = pth.join(out_path, 'dist')
                Utils.check_dir(dist_dir)
                self.outputs[0].write(
                    json.dumps(
                        dist.metadata.dictionary,
                        ensure_ascii=True,
                        sort_keys=True,
                        indent=2,
                        ))
                whl_dst = pth.join(dist_dir, pth.basename(path))
                whl_sym = pth.relpath(path, dist_dir)
                if not pth.exists(whl_dst):
                    os.symlink(whl_sym, whl_dst)
            else:
                if pth.isfile(meta):
                    #TODO: needs to use zpy.dist
                    dist.metadata = metadata.Metadata(path=meta)
                else:
                    pydist = normalize_pydist(dist.metadata.dictionary)
                    pydist.update(source_url=pth.relpath(path, zpy.top))

                    with codecs.open(meta, 'w', 'utf-8') as fp:
                        json.dump(
                            pydist,
                            fp=fp,
                            ensure_ascii=True,
                            sort_keys=True,
                            indent=2,
                            )

                    dist.metadata._legacy = None
                    dist.metadata._data = pydist

                sig_path = signode.abspath()
                for sfx, cmd in (
                    (_tar, '{env.TAR}\0-C\0{sig_path}\0-xf\0{path}\0'),
                    (_zip, '{env.UNZIP}\0-q\0-o\0-d\0{sig_path}\0{path}\0'),
                    (None, None),
                    ):
                    if sfx is None:
                        distlib.util.unarchive(path, bld_path)
                        break

                    try:
                        cmd = cmd.format(**locals())
                        cmd = cmd.strip('\0').split('\0')
                    except AttributeError:
                        continue

                    rc = self.exec_command(cmd, env=env.env)
                    if rc == 0:
                        if not pth.exists(out_path):
                            tmp = signode.make_node(
                                Utils.listdir(signode.abspath())
                                )
                            os.rename(tmp.abspath(), out_path)
                        break
                shutil.copy2(meta, pth.join(out_path, metadata.METADATA_FILENAME))

        if dist.key == 'python':
            lib = pth.join(out_path, 'Lib')
            zippy_src = pth.join(zpy.top, 'zippy')
            zippy_dst = pth.join(lib, 'zippy')
            zippy_sym = pth.relpath(zippy_src, lib)
            if not pth.lexists(zippy_dst):
                os.symlink(zippy_sym, zippy_dst)
            incl_src = pth.join(out_path, 'Include')
            incl_dst = pth.join(zpy.o_inc, 'python'+dist.version[0:3])
            incl_sym = pth.relpath(incl_src, zpy.o_inc)
            if not pth.lexists(incl_dst):
                os.symlink(incl_sym, incl_dst)
            pyconfig = pth.join(out_path, 'Include', 'pyconfig.h')
            if not pth.lexists(pyconfig):
                os.symlink('../pyconfig.h', pyconfig)

        return 0
Beispiel #40
0
    def run(self):
        env = self.env
        gen = self.generator
        bld = gen.bld
        zpy = bld.zpy
        out = self.outputs[0].parent

        dist = self.dist
        signode = self.signode

        out_path = out.abspath()
        bld_path = bld.bldnode.abspath()
        source_url = dist.source_url or ''
        url = urlparse.urlsplit(source_url)
        # normalize initially to dist.key
        name = pth.basename(url.path).lower() or dist.key
        # ensure sdist filename matches dist.name!
        # otherwise dist.name != dist.metadata.name if the object was created
        # from a filename, even if metadata is loaded later!
        #FIXME: upstream fix to above...
        name = name.replace(dist.key, dist.name, 1)
        if dist.name not in name:
            # probably a hash
            ext = name[name.find('.'):]
            if len(ext) > 1:
                name = dist.metadata.name_and_version + ext
        # no raison other than consistency
        if name.endswith('.tgz'):
            name = name[:-4] + '.tar.gz'
        path = pth.join(zpy.top_xsrc, name)
        meta = path + '.' + metadata.METADATA_FILENAME
        meta_alt = pth.join(url.path, metadata.METADATA_FILENAME)
        meta_out = pth.join(out_path, metadata.METADATA_FILENAME)

        if url.scheme and url.path and url.path != path:
            path, message = urllib.urlretrieve(url.geturl(), path)

        if url.path and pth.isdir(url.path):
            try:
                git_dir = subprocess.check_output(
                    ['git', 'rev-parse', '--show-toplevel'],
                    cwd=url.path,
                ).strip()
                # avoid checking out the wrong repo due to nesting; ie. don't
                # checkout someone's dotfile repo just because they happen to
                # technically be "under" it
                if pth.abspath(git_dir) == pth.abspath(url.path):
                    git_dir = subprocess.check_output(
                        args=['git', 'rev-parse', '--git-dir'],
                        cwd=url.path,
                    )
                    git_dir = pth.join(url.path, git_dir.strip())
                else:
                    git_dir = None
            except subprocess.CalledProcessError:
                git_dir = None
            if git_dir:
                if not pth.exists(out_path):
                    os.mkdir(out_path)
                subprocess.call([
                    'git',
                    '--git-dir={0}'.format(git_dir),
                    '--work-tree={0}'.format(out_path),
                    'checkout-index',
                    '--all',
                    '--quiet',
                    '--force',
                ])
                if pth.exists(meta):
                    shutil.copy2(meta, meta_out)
                elif pth.exists(meta_alt):
                    shutil.copy2(meta_alt, meta_out)
            else:
                # symlink local dist checkout
                local_path = pth.join(zpy.top, url.path)
                local_path = pth.abspath(local_path)
                local_sym = pth.relpath(local_path, bld_path)
                try:
                    # clear broken symlinks
                    os.unlink(out_path)
                except OSError:
                    pass
                finally:
                    os.symlink(local_sym, out_path)
        elif pth.isfile(path):
            _zip = ('.zip', )
            _whl = ('.whl', )
            _tar = tuple(
                set(distlib.util.ARCHIVE_EXTENSIONS) - set(_zip + _whl))
            if path.endswith(_whl):
                dist.metadata = wheel.Wheel(path).metadata
                dist_dir = pth.join(out_path, 'dist')
                Utils.check_dir(dist_dir)
                self.outputs[0].write(
                    json.dumps(
                        dist.metadata.dictionary,
                        ensure_ascii=True,
                        sort_keys=True,
                        indent=2,
                    ))
                whl_dst = pth.join(dist_dir, pth.basename(path))
                whl_sym = pth.relpath(path, dist_dir)
                if not pth.exists(whl_dst):
                    os.symlink(whl_sym, whl_dst)
            else:
                if pth.isfile(meta):
                    #TODO: needs to use zpy.dist
                    dist.metadata = metadata.Metadata(path=meta)
                else:
                    pydist = normalize_pydist(dist.metadata.dictionary)
                    pydist.update(source_url=pth.relpath(path, zpy.top))

                    with codecs.open(meta, 'w', 'utf-8') as fp:
                        json.dump(
                            pydist,
                            fp=fp,
                            ensure_ascii=True,
                            sort_keys=True,
                            indent=2,
                        )

                    dist.metadata._legacy = None
                    dist.metadata._data = pydist

                sig_path = signode.abspath()
                for sfx, cmd in (
                    (_tar, '{env.TAR}\0-C\0{sig_path}\0-xf\0{path}\0'),
                    (_zip, '{env.UNZIP}\0-q\0-o\0-d\0{sig_path}\0{path}\0'),
                    (None, None),
                ):
                    if sfx is None:
                        distlib.util.unarchive(path, bld_path)
                        break

                    try:
                        cmd = cmd.format(**locals())
                        cmd = cmd.strip('\0').split('\0')
                    except AttributeError:
                        continue

                    rc = self.exec_command(cmd, env=env.env)
                    if rc == 0:
                        if not pth.exists(out_path):
                            tmp = signode.make_node(
                                Utils.listdir(signode.abspath()))
                            os.rename(tmp.abspath(), out_path)
                        break
                shutil.copy2(meta,
                             pth.join(out_path, metadata.METADATA_FILENAME))

        if dist.key == 'python':
            lib = pth.join(out_path, 'Lib')
            zippy_src = pth.join(zpy.top, 'zippy')
            zippy_dst = pth.join(lib, 'zippy')
            zippy_sym = pth.relpath(zippy_src, lib)
            if not pth.lexists(zippy_dst):
                os.symlink(zippy_sym, zippy_dst)
            incl_src = pth.join(out_path, 'Include')
            incl_dst = pth.join(zpy.o_inc, 'python' + dist.version[0:3])
            incl_sym = pth.relpath(incl_src, zpy.o_inc)
            if not pth.lexists(incl_dst):
                os.symlink(incl_sym, incl_dst)
            pyconfig = pth.join(out_path, 'Include', 'pyconfig.h')
            if not pth.lexists(pyconfig):
                os.symlink('../pyconfig.h', pyconfig)

        return 0
Beispiel #41
0
def blender(self):
	# Two ways to install a blender extension: as a module or just .py files
	dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
	Utils.check_dir(dest_dir)
	self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))
Beispiel #42
0
def configure(cnf):
    """core configuration/checks
    """
    opt = cnf.options
    env = cnf.env
    zpy = cnf.zpy
    environ = cnf.environ

    zpy.tstamp = environ['ZIPPY_BUILD']
    zpy.api_pypi = 'https://pypi.python.org/simple/'

    zpy.top = cnf.path.abspath()
    zpy.opt = vars(opt).copy()

    _ident = zpy.opt['identifier']
    zpy.identifier = re.sub('[^-0-9A-Za-z_]', '', _ident)
    if zpy.identifier != _ident:
        cnf.fatal('ident MUST be alphanumeric: %r' % _ident)

    zpy.dist = dict()
    zpy.variant_name = cnf.variant
    zpy.variant_file = cnf.variant + Build.CACHE_SUFFIX
    zpy.landmark = '{0}.{1}.json'.format(__package__, _ident)

    zpy.bld_name = str(cnf.bldnode)
    zpy.bld_path = cnf.bldnode.path_from(cnf.path)
    zpy.bld_landmark = pth.join(zpy.bld_path, 'config.json')
    zpy.bld_cache_name = str(cnf.cachedir)
    zpy.bld_cache_path = cnf.cachedir.path_from(cnf.path)
    zpy.bld_cache_file = pth.join(zpy.bld_cache_path, zpy.variant_file)

    bld_zippy = cnf.bldnode.make_node('zippy-app-{0}'.format(_ident))
    zpy.bld_zippy_name = str(bld_zippy)
    zpy.bld_zippy_path = bld_zippy.path_from(cnf.path)

    dirs = set((
        ('cache', None),
        ('xsrc', 'extern/sources'),
        ))
    for k, v in sorted(dirs):
        key = 'top_' + k
        zpy[key] = cnf.find_file(v or k, zpy.top)

    #...use default name until we actually need multiple builds
    _o = cnf.bldnode.make_node('--')
    zpy.o = _o.abspath()
    zpy.o_bin = _o.make_node('bin').abspath()
    zpy.o_lib = _o.make_node('lib').abspath()
    zpy.o_inc = _o.make_node('include').abspath()
    Utils.check_dir(zpy.o_bin)
    Utils.check_dir(zpy.o_lib)
    Utils.check_dir(zpy.o_inc)

    _user = pwd.getpwuid(os.getuid())
    _machine = platform.machine()
    _platform = distutils.util.get_platform()
    _triplet = (
        sysconfig.get_config_var('HOST_GNU_TYPE') or
        sysconfig.get_config_var('host') or
        cnf.cmd_and_log(
            ['gcc', '-dumpmachine'],
            output=Context.STDOUT,
            quiet=Context.BOTH,
            ) or
        '%s-%s-%s' % (
            _machine,
            platform.system().lower(),
            'gnu',
            )
        ).strip()
    zpy.machine = _machine
    zpy.platform = _platform
    zpy.triplet = _triplet

    _xdg_cache = pth.abspath(
            environ.get('XDG_CACHE_HOME') or
            pth.join(_user.pw_dir, '.cache')
            )
    _cache = cnf.root.make_node(pth.join(_xdg_cache, 'zippy'))
    zpy.cache = _cache.abspath()

    for ent in ('bin', 'out', 'tmp', 'wheel'):
        key = 'cache_%s' % ent
        zpy[key] = _cache.make_node(ent).abspath()
        Utils.check_dir(zpy[key])

    #...used by exec_command() for subprocesses
    env.env = dict()
    _path = os.pathsep.join(filter(None, (
        zpy.cache_bin, environ.get('PATH')
        )))
    _path = env.PATH = env.env['PATH'] = os.environ['PATH'] = _path

    _cflags = [
        '-march=%s' % _machine.replace('_','-'),
        '-mtune=generic',
        '--param=ssp-buffer-size=4',
        '-pipe',
        '-O2',
        '-fPIC',
        #FIXME: wheezy can do this, DISABLE FOR DEBUG
        #'-flto=%s' % opt.jobs,
        #'-fno-fat-lto-objects',
        #FIXME: this needs gold or ld 2.21
        #'-fuse-linker-plugin',
        #'-fuse-ld=gold',
        #FIXME: this should be with other profile opts, not here!
        #'-fprofile-correction',
        '-fno-common',
        '-fstack-protector',
        '-fvisibility=hidden',
        '-Wno-error=coverage-mismatch',
        ]
    _exports = {
        'ZIPPY_CONFIG': pth.abspath(zpy.bld_landmark),
        'ZIPPY_BUILD': zpy.tstamp,
        'UWSGI_USE_DISTUTILS': 'x',
        'LANG': 'en_US.UTF-8',
        'USER': _user.pw_name,
        'HOME': _user.pw_dir,
        'CARCH': _machine,
        'CHOST': _triplet,
        'TMPDIR': tempfile.gettempdir(),
        'MAKEFLAGS': '-j%s' % opt.jobs,
        'CCACHE_DIR': zpy.cache_out,
        'CCACHE_BASEDIR': cnf.bldnode.abspath(),
        'CCACHE_COMPRESS': '1',
        'CFLAGS': _cflags,
        'CXXFLAGS': _cflags,
        'CPPFLAGS': ['-D_FORTIFY_SOURCE=2'],
        'LDFLAGS': ['-Wl,-O1,--sort-common,--as-needed,-z,relro'],
        }
    if zpy.opt.get('debug'):
        _exports['PYTHONVERBOSE'] = 'x'
    for k, v in _exports.iteritems():
        if v is not None:
            env.append_value(k, v)
        cnf.add_os_flags(k)
        if isinstance(v, str):
            env[k][:-1] = []
            env[k] = env[k] and env[k].pop() or str()
        env.env.setdefault(k, env.get_flat(k))

    progs = set((
        'make',
        'tar',
        'unzip',
        'nm',
        'objcopy',
        'git',
        'ld',
        'strip',
        ))
    map(cnf.find_program, sorted(progs))

    if not _cache.find_node('bin/ccache'):
        import shutil
        shutil.rmtree(zpy.cache, ignore_errors=True)
        shutil.copytree(zpy.top_cache, zpy.cache, symlinks=True)
        _bin = pth.join(zpy.cache_bin, '')
        _slink = _bin + 'ccache.%s' % _machine
        _dlink = _bin + 'ccache'
        if pth.exists(_dlink) and not pth.samefile(_slink, _dlink):
            os.remove(_dlink)
        if not pth.exists(_dlink):
            os.link(_slink, _dlink)
        for lnk in (
                '%s%s' % (pfx, sfx)
                for pfx in ('', _triplet + '-')
                for sfx in ('g++', 'gcc', 'cpp', 'c++', 'cc')
                ):
            _dlink = _bin + lnk
            if not pth.exists(_dlink):
                os.symlink('ccache', _dlink)
Beispiel #43
0
def zpy_requirements(cnf, *nodes, **ctx):
    env = cnf.env
    zpy = cnf.zpy
    opt = zpy.opt

    #FIXME: drop this once cnf.dependency_finder handles direct refs
    # current directory is zippy
    urls = [zpy.top]
    reqts = dict()
    dists = set()

    for node in filter(None, nodes):
        for url in sorted(glob.glob(node)) or [node]:
            if url and url not in urls:
                urls.append(url)

    urls.reverse()
    bld_abspath = cnf.bldnode.abspath()
    while urls:
        url = urls.pop()
        urldata = None

        if pth.isdir(url):
            url = pth.relpath(url, zpy.top)
            path = pth.join(url, metadata.METADATA_FILENAME)
            if pth.exists(path):
                # url is a project dir
                dist = database.Distribution(
                    metadata=metadata.Metadata(path=path),
                    )
                dist.requested = True
                dists.add(dist)

                dist.metadata.source_url = url

                #TODO: build_requires/test_requires/etc/?
                urldata = dist.run_requires

        if urldata is None:
            urldata = urllib.urlopen(url).read().splitlines()

        for spec in sorted(urldata):
            spec = spec.strip()
            if not spec or spec[0]=='#':
                continue

            # handle urls a bit nicer
            if '://' in spec:
                spec = '__anonymous__ (from {0})'.format(spec)

            req = parse_requirement(spec)
            if not req:
                continue

            if req.url:
                req = cnf.git_locator.add_hint(req, cnf)

            key = req.name.lower()
            if key in reqts and req.constraints:
                #FIXME: handle extras/url
                # merge requirements
                constraints = reqts[key].constraints or list()
                constraints.extend(req.constraints)
                constraints = ', '.join(' '.join(c) for c in constraints)
                spec = '{0} ({1})'.format(req.name, constraints)
                req = parse_requirement(spec)
                req.origins = reqts[key].origins

            if not hasattr(req, 'origins'):
                req.origins = list()
            req.origins.append(url)
            reqts[key] = req

    #FIXME: get zippy here once cnf.dependency_finder finds local checkouts
    for special in ('Python (== 2.7.7, < 3.0)', 'setuptools (< 8)'):
        req = parse_requirement(special)
        key = req.name.lower()
        if key not in reqts and key not in dists:
            req.origins = ['(internal)']
            reqts[key] = req

    origin = None
    node = cnf.bldnode.find_or_declare('config.requirements.txt')
    with open(node.abspath(), mode='w') as fp:
        for _,_,req in sorted(
            (req.origins, req.requirement.lower(), req)
            for req in reqts.values()
            ):
            if origin != req.origins:
                origin = req.origins
                fp.write('# {0}\n'.format(', '.join(req.origins)))
            fp.write('{0}\n'.format(req.requirement))

    Logs.pprint(None, 'Resolving distributions...')
    #FIXME: drop Anonymous once cnf.dependency_finder finds local checkouts
    anonymous = make_dist('Anonymous', '1.0')
    requirements = tuple(req.requirement for req in reqts.values())
    anonymous.metadata.add_requirements(requirements)
    hits, probs = cnf.dependency_finder.find(anonymous)
    hits.discard(anonymous)
    for prob in list(probs):
        if prob[0] == 'cantreplace':
            probs.discard(prob)
            for r in prob[3]:
                probs.add((prob[0], r))
        elif prob[0] == 'unsatisfied' and prob[1].startswith('dateutil '):
            # bogus dist (should be python-dateutil) referenced by tastypie?
            probs.discard(prob)

    if probs:
        problems = defaultdict(list)
        for typ, spec in probs:
            req = parse_requirement(spec)
            req = reqts.get(req.name.lower(), req)
            if not hasattr(req, 'origins'):
                req.origins = list()
            for i, origin in enumerate(req.origins):
                constraint = None
                problem = '{0}: {1}'.format(origin, req.name)
                if req.url:
                    constraint = ('from', req.url)
                elif req.constraints and len(req.constraints) > i:
                    constraint = req.constraints[i]
                if constraint:
                    problem += ' ({0} {1})'.format(*constraint)
                problems[typ].append(problem)
        problem_str = list()
        for problem_key in sorted(problems):
            problem_str.append(
                'dependency failure ({0}):'.format(problem_key),
                )
            for problem_val in problems[problem_key]:
                problem_str.append('    {}'.format(problem_val))
        problem_str = '\n'.join(problem_str)
        cnf.fatal(problem_str)

    dists.update(hits)
    zpy.dist.update(
        (dist.key, dist.metadata.dictionary)
        for dist in dists
        )

    for dist in sorted(dists, key=operator.attrgetter('key')):
        #FIXME: .format()
        sys.stderr.write('%7s %s%s %s%s\n%s' % (
            '',
            Logs.colors.BOLD_BLUE,
            dist.name,
            Logs.colors.NORMAL + Logs.colors.BLUE,
            dist.version,
            Logs.colors.NORMAL,
            ))

    feats = Utils.to_list(ctx.get('features', ''))
    if 'zpy-requirements' not in feats:
        feats.append('zpy-requirements')
        ctx['features'] = feats
    inputs = ctx.setdefault('source', list())
    inputs.extend(dists)

    #FIXME:upstream:waf
    # workaround to clobbering .wafpickle-* cache
    dbfile_orig = Context.DBFILE
    Context.DBFILE = dbfile_orig + '-requirements'
    bld = sub_build(cnf, ctx, logger=cnf.logger)
    bld.compile()
    Context.DBFILE = dbfile_orig

    python = cnf.zippy_dist_get('python')
    py = cnf.bldnode.find_node('python')
    if py is None:
        cnf.fatal('%s does not exist' % python.name_and_version)

    zpy.PYTHON = pth.join(py.abspath(), py_v('pt'))
    zpy.py_v = tuple(map(int, python.version.split('.')))
    zpy.py_fqn = py_v('pt-x.y.z', v=zpy.py_v)
    zpy.py_v1 = py_v('x', v=zpy.py_v)
    zpy.py_v2 = py_v('x.y', v=zpy.py_v)
    zpy.py_v3 = py_v('x.y.z', v=zpy.py_v)
    zpy.py_ver1 = py_v('ptx', v=zpy.py_v)
    zpy.py_ver2 = py_v('ptx.y', v=zpy.py_v)
    zpy.py_ver3 = py_v('ptx.y.z', v=zpy.py_v)
    zpy.py_ver2_nodot = py_v('ptxy', v=zpy.py_v)
    zpy.o_stlib = 'lib%s.a' % zpy.py_ver2
    zpy.O_PYTHON = pth.join(zpy.o_bin, zpy.py_ver2)
    zpy.o_lib_py = pth.join(zpy.o_lib, zpy.py_ver2)
    zpy.o_lib_py_site = pth.join(zpy.o_lib_py, 'site-packages')
    zpy.o_inc_py = pth.join(zpy.o_inc, zpy.py_ver2)
    zpy.o_landmark = pth.join(zpy.o_lib_py, zpy.landmark)
    Utils.check_dir(zpy.o_lib_py_site)

    _pybuilddir = 'build/lib.%s-%s' % (
        distutils.util.get_platform(), zpy.py_v2,
        )
    zpy.pybuilddir = py.make_node(_pybuilddir).abspath()
    zpy.pylibdir = py.make_node('Lib').abspath()
    zpy.env['PYTHONHOME'] = zpy.o
    zpy.env['PYTHONPATH'] = ':'.join([
        'wheel-{0}/lib'.format(zpy.tstamp),
        zpy.pybuilddir,
        zpy.pylibdir,
        zpy.o_lib_py_site,
        ])

    # touch LANDMARK so PYTHONHOME doesn't need export afterwards
    open(pth.join(zpy.o_lib_py, zpy.landmark), mode='a').close()