コード例 #1
0
def apply_patch_if_needed(home, version_string):

    patch_applied_path = os.path.join(home, "cuppa_test_patch_applied.txt")

    expected_diff_file = os.path.join(
        os.path.split(__file__)[0],
        "boost_test_patch_{}.diff".format(version_string))

    available_diff_files = sorted(glob.glob(
        os.path.join(os.path.split(__file__)[0], "boost_test_patch_*.diff")),
                                  reverse=True)

    for diff_file in available_diff_files:
        if diff_file <= expected_diff_file:
            break

    logger.debug("Using diff file [{}]".format(as_info(diff_file)))

    if os.path.exists(patch_applied_path):
        logger.debug("[{}] already applied".format(as_info(diff_file)))
        return

    command = "patch --batch -p1 --input={}".format(diff_file)

    logger.info("Applying [{}] using [{}] in [{}]".format(
        as_info(diff_file), as_info(command), as_info(home)))

    if subprocess.call(shlex.split(command), cwd=home) != 0:
        logger.error("Could not apply [{}]".format(diff_file))
    else:
        with open(patch_applied_path, "w") as patch_applied_file:
            pass
コード例 #2
0
ファイル: location.py プロジェクト: iCodeIN/cuppa
    def obtain_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ):
        rev_options = self.get_rev_options( vc_type, vcs_backend )
        action = "Cloning"
        if vc_type == "svn":
            action = "Checking out"
        max_attempts = 2
        attempt = 1
        while attempt <= max_attempts:
            logger.info( "{} [{}] into [{}]{}".format(
                    action,
                    as_info( location ),
                    as_info( local_dir_with_sub_dir ),
                    attempt > 1 and "(attempt {})".format( str(attempt) ) or ""
            ) )
            try:
                obtain( vcs_backend, local_dir_with_sub_dir, vcs_backend.url )
                logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                break
            except pip_exceptions.PipError as error:
                attempt = attempt + 1
                log_as = logger.warn
                if attempt > max_attempts:
                    log_as = logger.error

                log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                        as_info( location ),
                        as_notice( local_dir_with_sub_dir ),
                        ( rev_options and  " to {}".format( as_notice(  str(rev_options) ) ) or ""),
                        as_error( str(error) )
                ) )
                if attempt > max_attempts:
                    raise LocationException( str(error) )
コード例 #3
0
ファイル: configure.py プロジェクト: necrosisbb/cuppa
 def _clear_config( self, conf_path ):
     if os.path.exists( conf_path ):
         logger.info( "Removing configure file [{}]".format(
                 as_info( conf_path ) ) )
         os.remove( conf_path )
     else:
         logger.info( "Configure file [{}] does not exist. Unconfigure not needed".format(
                 as_info( conf_path ) ) )
コード例 #4
0
ファイル: configure.py プロジェクト: marcelhuberfoo/cuppa
 def _remove_settings( self ):
     initial_option_count = len(self._loaded_options)
     logger.info( "Remove settings requested for the following options {}".format( self._remove ) )
     for setting in self._remove:
         if setting in self._loaded_options:
             del self._loaded_options[setting]
             logger.info( "Removing option [{}] as requested".format( as_notice( "--" + setting ) ) )
     if initial_option_count != len(self._loaded_options):
         self._update_conf()
コード例 #5
0
ファイル: configure.py プロジェクト: necrosisbb/cuppa
 def _remove_settings( self, conf_path, remove_options ):
     initial_option_count = len(self._loaded_options)
     logger.info( "Remove settings requested for the following options {}".format( remove_options ) )
     for setting in remove_options:
         if setting in self._loaded_options:
             del self._loaded_options[setting]
             logger.info( "Removing option [{}] as requested".format( as_notice( "--" + setting ) ) )
     if initial_option_count != len(self._loaded_options):
         self._update_conf( conf_path )
コード例 #6
0
    def get_local_directory_for_download_url(self, location, sub_dir,
                                             local_directory):

        logger.debug("[{}] is an archive download".format(as_info(location)))

        local_dir_with_sub_dir = os.path.join(local_directory,
                                              sub_dir and sub_dir or "")

        # First we check to see if we already downloaded and extracted this archive before
        if os.path.exists(local_dir_with_sub_dir):
            try:
                # If not empty this will fail
                os.rmdir(local_dir_with_sub_dir)
            except:
                # Not empty so we'll return this as the local_directory

                logger.debug("(already present) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(already present) Local folder = [{}]".format(
                    as_info(str(self._local_folder))))

                return local_directory

        if self._cuppa_env['dump'] or self._cuppa_env['clean']:
            return local_directory

        # If not we then check to see if we cached the download
        cached_archive = self.get_cached_archive(self._cuppa_env['cache_root'],
                                                 self._local_folder)
        if cached_archive:
            logger.debug("Cached archive [{}] found for [{}]".format(
                as_info(cached_archive), as_info(location)))
            self.extract(cached_archive, local_dir_with_sub_dir)
        else:
            logger.info("Downloading [{}]...".format(as_info(location)))
            try:
                report_hook = None
                if logger.isEnabledFor(logging.INFO):
                    report_hook = ReportDownloadProgress()
                filename, headers = urlretrieve(location,
                                                reporthook=report_hook)
                name, extension = os.path.splitext(filename)
                logger.info("[{}] successfully downloaded to [{}]".format(
                    as_info(location), as_info(filename)))
                self.extract(filename, local_dir_with_sub_dir)
                if self._cuppa_env['cache_root']:
                    cached_archive = os.path.join(
                        self._cuppa_env['cache_root'], self._local_folder)
                    logger.debug("Caching downloaded file as [{}]".format(
                        as_info(cached_archive)))
                    shutil.copyfile(filename, cached_archive)
            except ContentTooShortError as error:
                logger.error("Download of [{}] failed with error [{}]".format(
                    as_error(location), as_error(str(error))))
                raise LocationException(error)

        return local_directory
コード例 #7
0
ファイル: configure.py プロジェクト: necrosisbb/cuppa
 def _load_settings_from_file( self, conf_path, conf_file, settings ):
     logger.info( "Configure file [{}] exists. Load stored settings...".format( as_info( conf_path ) ) )
     for line in conf_file.readlines():
         line = line.strip()
         if not line or line.startswith('#'):
             continue
         name, value = tuple( word.strip() for word in line.split('=', 1) )
         try:
             value = ast.literal_eval( str(value) )
         except:
             pass
         self._print_setting( 'loading', name, value )
         settings[name] = value
コード例 #8
0
ファイル: configure.py プロジェクト: marcelhuberfoo/cuppa
    def load( self ):
        self._show   = self._env.get_option( 'show_conf' )
        self._save   = self._env.get_option( 'save_conf' )
        self._remove = self._env.get_option( 'remove_settings' )
        self._update = self._env.get_option( 'update_conf' )
        self._clear  = self._env.get_option( 'clear_conf' )

        self._configure   = self._save or self._remove or self._update

        self._clean       = self._env.get_option( 'clean' )

        self._unconfigure =  ( self._save and self._clean ) or self._clear

        if self._unconfigure:
            self._configure = False
            logger.info( "{}".format( as_notice( "Clear configuration requested..." ) ) )
            if os.path.exists( self._conf_path ):
                logger.info( "Removing configure file [{}]".format(
                        as_info( self._conf_path ) ) )
                os.remove( self._conf_path )
            else:
                logger.info( "Configure file [{}] does not exist. Unconfigure not needed".format(
                        as_info( self._conf_path ) ) )
            return
        elif self._configure:
            print logger.info( "{}".format( as_notice( "Update configuration requested..." ) ) )

        if not self._save:
            self._loaded_options = self._load_conf()
        else:
            self._loaded_options = {}
        self._env['configured_options'] = self._loaded_options
        self._env['default_options'].update( self._loaded_options )
コード例 #9
0
    def dump(cls):
        import json

        def expand_node(node):
            if isinstance(node, list):
                return [expand_node(i) for i in node]
            elif isinstance(node, dict):
                return {str(k): expand_node(v) for k, v in node.iteritems()}
            elif isinstance(node, set):
                return [expand_node(s) for s in node]
            elif hasattr(node, "__dict__"):
                return {
                    str(k): expand_node(v)
                    for k, v in node.__dict__.iteritems()
                }
            else:
                return str(node)

        logger.info(as_info_label("Displaying Options"))
        options = json.dumps(expand_node(cls._options),
                             sort_keys=True,
                             indent=4)
        logger.info("\n" + options + "\n")

        logger.info(as_info_label("Displaying Methods"))
        methods = json.dumps(expand_node(cls._methods),
                             sort_keys=True,
                             indent=4)
        logger.info("\n" + methods + "\n")
コード例 #10
0
ファイル: version.py プロジェクト: jondo2010/cuppa
def check_current_version():

    installed_version = get_version()
    logger.info("cuppa: version {}".format(as_info(installed_version)))
    try:
        pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
        latest_available = pypi.package_releases('cuppa')[0]
        if parse_version(installed_version) < parse_version(latest_available):
            logger.warn(
                "Newer version [{}] available. Upgrade using \"{}\"\n".format(
                    as_warning(latest_available),
                    as_emphasised("pip install -U cuppa")))
    except:
        pass
コード例 #11
0
ファイル: version.py プロジェクト: ja11sop/cuppa
def check_current_version( offline ):

    installed_version = get_version()
    logger.info( "cuppa: version {}".format( as_info( installed_version ) ) )
    if not offline:
        try:
            pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
            latest_available = pypi.package_releases('cuppa')[0]
            if parse_version( installed_version ) < parse_version( latest_available ):
                logger.warn( "Newer version [{}] available. Upgrade using \"{}\"\n".format(
                        as_warning( latest_available ),
                        as_emphasised( "pip install -U cuppa" )
                ) )
        except:
            pass
コード例 #12
0
ファイル: configure.py プロジェクト: necrosisbb/cuppa
    def _load_conf( self ):
        settings = {}

        if os.path.exists( self._global_conf_path ):
            with open( self._global_conf_path ) as conf_file:
                self._load_settings_from_file( self._global_conf_path, conf_file, settings )

        if os.path.exists( self._conf_path ):
            with open(self._conf_path) as conf_file:
                self._load_settings_from_file( self._conf_path, conf_file, settings )

        if settings:
            logger.info( "Load complete" )
        else:
            logger.info( "No settings to load, skipping configure" )
        return settings
コード例 #13
0
ファイル: configure.py プロジェクト: necrosisbb/cuppa
    def load( self ):
        self._show          = self._env.get_option( 'show_conf' )
        self._save          = self._env.get_option( 'save_conf' )
        self._save_global   = self._env.get_option( 'save_global_conf' )
        self._remove        = self._env.get_option( 'remove_settings' )
        self._remove_global = self._env.get_option( 'remove_global_settings' )
        self._update        = self._env.get_option( 'update_conf' )
        self._update_global = self._env.get_option( 'update_global_conf' )
        self._clear         = self._env.get_option( 'clear_conf' )
        self._clear_global  = self._env.get_option( 'clear_global_conf' )

        self._configure = (
                self._save
            or  self._save_global
            or  self._remove
            or  self._remove_global
            or  self._update
            or  self._update_global
        )

        self._clean = self._env.get_option( 'clean' )

        self._unconfigure = (
                ( ( self._save or self._save_global ) and self._clean )
            or  self._clear
            or  self._clear_global
        )

        if self._unconfigure:
            self._configure = False
            logger.info( "{}".format( as_notice( "Clear configuration requested..." ) ) )

            if self._save or self._clear:
                self._clear_config( self._conf_path )

            if self._save_global or self._clear_global:
                self._clear_config( self._global_conf_path )

        elif self._configure:
            logger.info( "{}".format( as_notice( "Update configuration requested..." ) ) )

        if not self._save and not self._save_global:
            self._loaded_options = self._load_conf()
        else:
            self._loaded_options = {}
        self._env['configured_options'] = self._loaded_options
        self._env['default_options'].update( self._loaded_options )
コード例 #14
0
ファイル: configjam.py プロジェクト: necrosisbb/cuppa
    def __call__( self, target, source, env ):
        path = str(target[0])
        if not os.path.exists( path ):
            toolchain = env['toolchain']
            current_toolset = "using {} : {} :".format( toolset_name_from_toolchain( toolchain ), toolchain.cxx_version() )
            toolset_config_line = "{} {} ;\n".format( current_toolset, toolchain.binary() )

            with open( path, 'w' ) as toolchain_config:
                logger.info( "adding toolset config [{}] to dummy toolset config [{}]".format( str(toolset_config_line.strip()), path ) )
                toolchain_config.write( toolset_config_line )

            self._update_project_config_jam(
                os.path.join( os.path.split( path )[0], "project-config.jam" ),
                current_toolset,
                toolset_config_line
            )

        return None
コード例 #15
0
    def add_toolchains( self, env ):
        toolchains = self.toolchains_key
        cuppa.modules.registration.add_to_env( toolchains, env, env.add_available_toolchain, env.add_supported_toolchain )

        logger.trace( "supported toolchains are [{}]".format(
                colour_items( env["supported_toolchains"] )
        ) )
        logger.info( "available toolchains are [{}]".format(
                colour_items( sorted( env[toolchains].keys(), reverse=True ), as_info )
        ) )

        SCons.Script.AddOption(
            '--toolchains',
            type     = 'string',
            nargs    = 1,
            action   = 'callback',
            callback = ParseToolchainsOption( env['supported_toolchains'], env[toolchains].keys() ),
            help     = 'The Toolchains you wish to build against. A comma separate list with wildcards'
                       ' may be provided. For example --toolchains=gcc*,clang37,clang36'
        )
コード例 #16
0
ファイル: location.py プロジェクト: iCodeIN/cuppa
 def update_from_repository( self, location, full_url, local_dir_with_sub_dir, vc_type, vcs_backend ):
     url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type )
     rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote )
     version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
     logger.info( "Updating [{}] in [{}]{} at [{}]".format(
             as_info( location ),
             as_notice( local_dir_with_sub_dir ),
             ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
             as_info( version )
     ) )
     try:
         update( vcs_backend, local_dir_with_sub_dir, rev_options )
         logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
     except pip_exceptions.PipError as error:
         logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                 as_warning( location ),
                 as_warning( local_dir_with_sub_dir ),
                 ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                 as_warning( str(error) )
         ) )
コード例 #17
0
 def get_entry(self,
               index_file,
               lines_summary,
               branches_summary,
               tool_variant_dir,
               offset_dir,
               destination,
               subdir=None,
               name=None):
     entry_string = "{}\n{}\n{}\n{}\n{}{}{}".format(
         index_file.strip(),
         lines_summary.strip(),
         branches_summary.strip(),
         tool_variant_dir.strip(),
         offset_dir.strip(),
         subdir and "\n" + subdir.strip() or "",
         name and "\n" + name.strip() or "",
     )
     logger.info("coverage entry from\n{}\nin {}".format(
         as_info(entry_string), as_notice(destination)))
     return coverage_entry.create_from_string(entry_string, destination)
コード例 #18
0
ファイル: patch_boost.py プロジェクト: kjing/cuppa
def apply_patch_if_needed(home):

    patch_applied_path = os.path.join(home, "cuppa_test_patch_applied.txt")
    diff_file = "boost_test_patch.diff"

    if os.path.exists(patch_applied_path):
        logger.debug("[{}] already applied".format(as_info(diff_file)))
        return

    diff_path = os.path.join(os.path.split(__file__)[0], "boost", diff_file)

    command = "patch --batch -p1 --input={}".format(diff_path)

    logger.info("Applying [{}] using [{}] in [{}]".format(
        as_info(diff_file), as_info(command), as_info(home)))

    if subprocess.call(shlex.split(command), cwd=home) != 0:
        logger.error("Could not apply [{}]".format(diff_file))

    with open(patch_applied_path, "w") as patch_applied_file:
        pass
コード例 #19
0
def _determine_latest_boost_verion(offline):
    current_release = "1.71.0"
    if not offline:
        try:
            boost_version_url = 'https://www.boost.org/users/download/'
            logger.info("Checking current boost version from {}...".format(
                as_info(boost_version_url)))
            html = lxml.html.parse(urlopen(boost_version_url))

            current_release = html.xpath(
                "/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span"
            )[0].text
            current_release = str(
                re.search(r'(\d[.]\d+([.]\d+)?)', current_release).group(1))

            logger.info("Latest boost release detected as [{}]".format(
                as_info(current_release)))

        except Exception as e:
            logger.warn(
                "Cannot determine latest version of boost - [{}]. Assuming [{}]."
                .format(str(e), current_release))
    else:
        logger.info(
            "In offline mode. No version of boost specified so assuming [{}]".
            format(as_info(current_release)))

    return current_release
コード例 #20
0
ファイル: patch_boost.py プロジェクト: ja11sop/cuppa
def apply_patch_if_needed( home, version_string ):

    patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" )

    expected_diff_file = os.path.join(
            os.path.split( __file__ )[0],
            "boost_test_patch_{}.diff".format( version_string )
    )

    available_diff_files = sorted( glob.glob( os.path.join(
            os.path.split( __file__ )[0],
            "boost_test_patch_*.diff"
    ) ), reverse=True )

    for diff_file in available_diff_files:
        if diff_file <= expected_diff_file:
            break

    logger.debug( "Using diff file [{}]".format( as_info( diff_file ) ) )

    if os.path.exists( patch_applied_path ):
        logger.debug( "[{}] already applied".format( as_info( diff_file ) ) )
        return

    command = "patch --batch -p1 --input={}".format( diff_file )

    logger.info( "Applying [{}] using [{}] in [{}]".format(
            as_info( diff_file ),
            as_info( command ),
            as_info( home )
    ) )

    if subprocess.call( shlex.split( command ), cwd=home ) != 0:
        logger.error( "Could not apply [{}]".format( diff_file ) )
    else:
        with open( patch_applied_path, "w" ) as patch_applied_file:
            pass
コード例 #21
0
    def apply_patch_if_needed( cls, home ):

        patch_applied_path = os.path.join( home, "cuppa_test_patch_applied.txt" )
        diff_file = "boost_test_patch.diff"

        if os.path.exists( patch_applied_path ):
            logger.debug( "[{}] already applied".format( as_info( diff_file ) ) )
            return

        diff_path = os.path.join( os.path.split( __file__ )[0], "boost", diff_file )

        command = "patch --batch -p1 --input={}".format( diff_path )

        logger.info( "Applying [{}] using [{}] in [{}]".format(
                as_info( diff_file ),
                as_info( command ),
                as_info( home )
        ) )

        if subprocess.call( shlex.split( command ), cwd=home ) != 0:
            logger.error( "Could not apply [{}]".format( diff_file ) )

        with open( patch_applied_path, "w" ) as patch_applied_file:
            pass
コード例 #22
0
ファイル: environment.py プロジェクト: ja11sop/cuppa
    def dump( cls ):
        import json

        def expand_node( node ):
            if isinstance( node, list ):
                return [ expand_node(i) for i in node ]
            elif isinstance( node, dict ):
                return { str(k): expand_node(v) for k,v in node.iteritems() }
            elif isinstance( node, set ):
                return [ expand_node(s) for s in node ]
            elif hasattr( node, "__dict__" ):
                return { str(k): expand_node(v) for k,v in node.__dict__.iteritems() }
            else:
                return str( node )

        logger.info( as_info_label( "Displaying Options" ) )
        options = json.dumps( expand_node(cls._options), sort_keys=True, indent=4 )
        logger.info( "\n" + options + "\n" )

        logger.info( as_info_label("Displaying Methods" ) )
        methods = json.dumps( expand_node(cls._methods), sort_keys=True, indent=4 )
        logger.info( "\n" + methods + "\n" )
コード例 #23
0
    def load(self):
        self._show = self._env.get_option('show_conf')
        self._save = self._env.get_option('save_conf')
        self._remove = self._env.get_option('remove_settings')
        self._update = self._env.get_option('update_conf')
        self._clear = self._env.get_option('clear_conf')

        self._configure = self._save or self._remove or self._update

        self._clean = self._env.get_option('clean')

        self._unconfigure = (self._save and self._clean) or self._clear

        if self._unconfigure:
            self._configure = False
            logger.info("{}".format(
                as_notice("Clear configuration requested...")))
            if os.path.exists(self._conf_path):
                logger.info("Removing configure file [{}]".format(
                    as_info(self._conf_path)))
                os.remove(self._conf_path)
            else:
                logger.info(
                    "Configure file [{}] does not exist. Unconfigure not needed"
                    .format(as_info(self._conf_path)))
            return
        elif self._configure:
            print logger.info("{}".format(
                as_notice("Update configuration requested...")))

        if not self._save:
            self._loaded_options = self._load_conf()
        else:
            self._loaded_options = {}
        self._env['configured_options'] = self._loaded_options
        self._env['default_options'].update(self._loaded_options)
コード例 #24
0
ファイル: configure.py プロジェクト: marcelhuberfoo/cuppa
 def _load_conf( self ):
     settings = {}
     if os.path.exists(self._conf_path):
         with open(self._conf_path) as config_file:
             logger.info( "Configure file [{}] exists. Load stored settings...".format(
                     as_info( self._conf_path ) ) )
             for line in config_file.readlines():
                 name, value = tuple( l.strip() for l in line.split('=', 1) )
                 try:
                     value = ast.literal_eval( str(value) )
                 except:
                     pass
                 self._print_setting( 'loading', name, value )
                 settings[name] = value
     if settings:
         logger.info( "Load complete" )
     else:
         logger.info( "No settings to load, skipping configure" )
     return settings
コード例 #25
0
ファイル: version_and_location.py プロジェクト: ja11sop/cuppa
def _determine_latest_boost_verion( offline ):
    current_release = "1.69.0"
    if not offline:
        try:
            boost_version_url = 'https://www.boost.org/users/download/'
            logger.info( "Checking current boost version from {}...".format( as_info( boost_version_url ) ) )
            html = lxml.html.parse( urllib2.urlopen( boost_version_url ) )

            current_release = html.xpath("/html/body/div[2]/div/div[1]/div/div/div[2]/h3[1]/span")[0].text
            current_release = str( re.search( r'(\d[.]\d+([.]\d+)?)', current_release ).group(1) )

            logger.info( "Latest boost release detected as [{}]".format( as_info( current_release ) ) )

        except Exception as e:
            logger.warn( "Cannot determine latest version of boost - [{}]. Assuming [{}].".format( str(e), current_release ) )
    else:
        logger.info( "In offline mode. No version of boost specified so assuming [{}]".format( as_info( current_release ) ) )

    return current_release
コード例 #26
0
 def _load_conf(self):
     settings = {}
     if os.path.exists(self._conf_path):
         with open(self._conf_path) as config_file:
             logger.info(
                 "Configure file [{}] exists. Load stored settings...".
                 format(as_info(self._conf_path)))
             for line in config_file.readlines():
                 name, value = tuple(l.strip() for l in line.split('=', 1))
                 try:
                     value = ast.literal_eval(str(value))
                 except:
                     pass
                 self._print_setting('loading', name, value)
                 settings[name] = value
     if settings:
         logger.info("Load complete")
     else:
         logger.info("No settings to load, skipping configure")
     return settings
コード例 #27
0
ファイル: construct.py プロジェクト: iCodeIN/cuppa
    def build(self, cuppa_env):

        #        cuppa.progress.NotifyProgress.register_callback( None, self.on_progress )

        cuppa_env['empty_env'] = cuppa_env.create_env()
        projects = cuppa_env.get_option('projects')
        toolchains = cuppa_env['active_toolchains']

        if not projects:
            projects = cuppa_env['default_projects']

            if not projects or not cuppa_env['run_from_launch_dir']:
                sub_sconscripts = self.get_sub_sconscripts(
                    cuppa_env['launch_dir'],
                    [cuppa_env['build_root'], cuppa_env['download_root']])
                if sub_sconscripts:
                    projects = sub_sconscripts
                    logger.info("Using sub-sconscripts [{}]".format(
                        colour_items(projects)))
            elif projects:
                logger.info("Using default_projects [{}]".format(
                    colour_items(projects)))

        if projects:

            sconscripts = []

            for project in projects:

                if (not os.path.exists(project)
                        and not cuppa_env['run_from_launch_dir']
                        and not os.path.isabs(project)):

                    path = os.path.join(cuppa_env['launch_dir'], project)

                    if os.path.exists(path):
                        if os.path.isdir(path):
                            sub_sconscripts = self.get_sub_sconscripts(
                                project, [
                                    cuppa_env['build_root'],
                                    cuppa_env['download_root']
                                ])
                            if sub_sconscripts:
                                logger.info(
                                    "Reading project folder [{}] and using sub-sconscripts [{}]"
                                    .format(project,
                                            colour_items(sub_sconscripts)))
                                sconscripts.extend(sub_sconscripts)
                        else:
                            sconscripts.append(path)

                elif os.path.exists(project) and os.path.isdir(project):
                    sub_sconscripts = self.get_sub_sconscripts(
                        project,
                        [cuppa_env['build_root'], cuppa_env['download_root']])
                    if sub_sconscripts:
                        logger.info(
                            "Reading project folder [{}] and using sub-sconscripts [{}]"
                            .format(project, colour_items(sub_sconscripts)))
                        sconscripts.extend(sub_sconscripts)
                else:
                    sconscripts.append(project)

            for toolchain in toolchains:
                build_envs = self.create_build_envs(toolchain, cuppa_env)
                for build_env in build_envs:
                    for sconscript in sconscripts:
                        decider = cuppa_env.get_option('decider')
                        if decider:
                            build_env['env'].Decider(decider)
                        self.call_project_sconscript_files(
                            toolchain, build_env['variant'],
                            build_env['target_arch'], build_env['abi'],
                            build_env['env'], sconscript)

            if cuppa_env['dump']:
                print(
                    "cuppa: Performing dump only, so no builds will be attempted."
                )
                print("cuppa: Nothing to be done. Exiting.")
                SCons.Script.Exit()

        else:
            logger.warn("No projects to build. Nothing to be done")
コード例 #28
0
ファイル: construct.py プロジェクト: iCodeIN/cuppa
    def create_build_envs(self, toolchain, cuppa_env):

        propagate_environment = cuppa_env['propagate_env']
        propagate_path = cuppa_env['propagate_path']
        merge_path = cuppa_env['merge_path']

        variants = cuppa_env[self.variants_key]
        actions = cuppa_env[self.actions_key]

        target_architectures = cuppa_env['target_architectures']

        if not target_architectures:
            target_architectures = [None]

        def get_active_from_options(tasks):
            active_tasks = {}
            for key, task in tasks.items():
                if cuppa_env.get_option(task.name()):
                    active_tasks[task.name()] = task
            return active_tasks

        active_variants = get_active_from_options(variants)
        active_actions = get_active_from_options(actions)

        def get_active_from_defaults(default_tasks, tasks):
            active_tasks = {}
            for task in default_tasks:
                if task in tasks.keys():
                    active_tasks[task] = tasks[task]
            return active_tasks

        if not active_variants and not active_actions:
            default_variants = cuppa_env[
                'default_variants'] or toolchain.default_variants()
            if default_variants:
                active_variants = get_active_from_defaults(
                    default_variants, variants)
                active_actions = get_active_from_defaults(
                    default_variants, actions)
                if active_variants:
                    logger.info(
                        "Default build variants of [{}] being used.".format(
                            colour_items(active_variants, as_info)))
                if active_actions:
                    logger.info(
                        "Default build actions of [{}] being used.".format(
                            colour_items(active_actions, as_info)))

        if not active_variants:
            active_variants = get_active_from_defaults(
                toolchain.default_variants(), variants)
            logger.info(
                "No active variants specified so toolchain defaults of [{}] being used."
                .format(colour_items(active_variants, as_info)))

        logger.debug("Using active_variants = [{}]".format(
            colour_items(active_variants, as_info)))
        logger.debug("Using active_actions = [{}]".format(
            colour_items(active_actions, as_info)))

        build_envs = []

        for key, variant in active_variants.items():

            for target_arch in target_architectures:

                env, target_arch = toolchain.make_env(cuppa_env, variant,
                                                      target_arch)

                if env:

                    # TODO: Refactor this code out
                    if propagate_environment or propagate_path or merge_path:

                        def merge_paths(default_paths, env_paths):
                            path_set = set(default_paths + env_paths)

                            def record_path(path):
                                path_set.discard(path)
                                return path

                            return [
                                record_path(p)
                                for p in default_paths + env_paths
                                if p in path_set
                            ]

                        def get_paths_from(environment):
                            return 'PATH' in environment and environment[
                                'PATH'].split(os.pathsep) or []

                        default_paths = get_paths_from(env['ENV'])
                        env_paths = get_paths_from(os.environ)
                        if propagate_environment:
                            env['ENV'] = os.environ.copy()
                            logger.debug(
                                "propagating environment for [{}:{}] to all subprocesses: [{}]"
                                .format(variant.name(), target_arch,
                                        as_notice(str(env['ENV']))))
                        if propagate_path and not propagate_environment:
                            env['ENV']['PATH'] = env_paths
                            logger.debug(
                                "propagating PATH for [{}:{}] to all subprocesses: [{}]"
                                .format(variant.name(), target_arch,
                                        colour_items(env_paths)))
                        elif merge_path:
                            merged_paths = merge_paths(default_paths,
                                                       env_paths)
                            env['ENV']['PATH'] = os.pathsep.join(merged_paths)
                            logger.debug(
                                "merging PATH for [{}:{}] to all subprocesses: [{}]"
                                .format(variant.name(), target_arch,
                                        colour_items(merged_paths)))

                    build_envs.append({
                        'variant': key,
                        'target_arch': target_arch,
                        'abi': toolchain.abi(env),
                        'env': env
                    })

                    if not cuppa_env['raw_output']:
                        cuppa.output_processor.Processor.install(env)

                    env['toolchain'] = toolchain
                    env['variant'] = variant
                    env['target_arch'] = target_arch
                    env['abi'] = toolchain.abi(env)
                    env['variant_actions'] = self.get_active_actions(
                        cuppa_env, variant, active_variants, active_actions)

        return build_envs
コード例 #29
0
ファイル: construct.py プロジェクト: iCodeIN/cuppa
    def __init__(self,
                 sconstruct_path,
                 base_path=os.path.abspath('.'),
                 branch_root=None,
                 default_options={},
                 default_projects=[],
                 default_variants=[],
                 default_dependencies=[],
                 default_profiles=[],
                 dependencies=[],
                 profiles=[],
                 default_runner=None,
                 configure_callback=None,
                 tools=[]):

        cuppa.core.base_options.set_base_options()

        cuppa_env = cuppa.core.environment.CuppaEnvironment()
        cuppa_env.add_tools(tools)

        dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults(
            dependencies, default_dependencies, "dependencies")
        profiles, default_profiles, profiles_warning = self._normalise_with_defaults(
            profiles, default_profiles, "profiles")

        self.initialise_options(cuppa_env, default_options, profiles,
                                dependencies)
        cuppa_env['configured_options'] = {}
        self._configure = cuppa.configure.Configure(
            cuppa_env, callback=configure_callback)

        enable_thirdparty_logging(
            cuppa_env.get_option('enable-thirdparty-logging') and True
            or False)
        self._set_verbosity_level(cuppa_env)

        cuppa_env['sconstruct_path'] = sconstruct_path
        cuppa_env['sconstruct_dir'], cuppa_env[
            'sconstruct_file'] = os.path.split(sconstruct_path)

        self._set_output_format(cuppa_env)

        self._configure.load()

        cuppa_env['offline'] = cuppa_env.get_option('offline')

        cuppa.version.check_current_version(cuppa_env['offline'])

        if cuppa_env['offline']:
            logger.info(as_info_label("Running in OFFLINE mode"))

        logger.info("using sconstruct file [{}]".format(
            as_notice(cuppa_env['sconstruct_file'])))

        if dependencies_warning:
            logger.warn(dependencies_warning)

        if profiles_warning:
            logger.warn(profiles_warning)

        help = cuppa_env.get_option('help') and True or False

        cuppa_env['minimal_output'] = cuppa_env.get_option('minimal_output')
        cuppa_env['ignore_duplicates'] = cuppa_env.get_option(
            'ignore_duplicates')

        cuppa_env['working_dir'] = os.getcwd()
        cuppa_env['launch_dir'] = os.path.relpath(SCons.Script.GetLaunchDir(),
                                                  cuppa_env['working_dir'])
        cuppa_env['run_from_launch_dir'] = cuppa_env['launch_dir'] == "."

        cuppa_env['launch_offset_dir'] = "."

        if not cuppa_env['run_from_launch_dir']:
            levels = len(cuppa_env['launch_dir'].split(os.path.sep))
            cuppa_env['launch_offset_dir'] = os.path.sep.join(
                ['..' for i in range(levels)])

        cuppa_env['base_path'] = os.path.normpath(
            os.path.expanduser(base_path))
        cuppa_env['branch_root'] = branch_root and os.path.normpath(
            os.path.expanduser(branch_root)) or base_path
        cuppa_env['branch_dir'] = cuppa_env['branch_root'] and os.path.relpath(
            cuppa_env['base_path'], cuppa_env['branch_root']) or None

        thirdparty = cuppa_env.get_option('thirdparty')
        if thirdparty:
            thirdparty = os.path.normpath(os.path.expanduser(thirdparty))

        cuppa_env['thirdparty'] = thirdparty

        cuppa.core.storage_options.process_storage_options(cuppa_env)
        cuppa.core.location_options.process_location_options(cuppa_env)

        cuppa_env['current_branch'] = ''
        cuppa_env['current_revision'] = ''
        if not help and not self._configure.handle_conf_only():
            if cuppa_env['location_match_current_branch']:
                url, repo, branch, remote, rev = cuppa.scms.scms.get_current_rev_info(
                    cuppa_env['sconstruct_dir'])
                if branch:
                    cuppa_env['current_branch'] = branch
                if rev:
                    cuppa_env['current_revision'] = rev
                logger.info(
                    "Current build on branch [{}] at revision [{}] from remote [{}] in [{}] at [{}]"
                    .format(as_info(str(branch)), as_info(str(rev)),
                            as_info(str(remote)), as_info(str(repo)),
                            as_info(str(url))))

        cuppa_env['default_projects'] = default_projects
        cuppa_env['default_variants'] = default_variants and set(
            default_variants) or set()
        cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or []
        cuppa_env['BUILD_WITH'] = cuppa_env['default_dependencies']
        cuppa_env['dependencies'] = {}
        cuppa_env[
            'default_profiles'] = default_profiles and default_profiles or []
        cuppa_env['BUILD_PROFILE'] = cuppa_env['default_profiles']
        cuppa_env['profiles'] = {}

        test_runner = cuppa_env.get_option(
            'runner', default=default_runner and default_runner or 'process')
        cuppa_env['default_runner'] = test_runner

        cuppa_env['propagate_env'] = cuppa_env.get_option(
            'propagate-env') and True or False
        cuppa_env['propagate_path'] = cuppa_env.get_option(
            'propagate-path') and True or False
        cuppa_env['merge_path'] = cuppa_env.get_option(
            'merge-path') and True or False
        cuppa_env['show_test_output'] = cuppa_env.get_option(
            'show-test-output') and True or False
        cuppa_env['suppress_process_output'] = cuppa_env.get_option(
            'suppress-process-output') and True or False
        cuppa_env['dump'] = cuppa_env.get_option('dump') and True or False
        cuppa_env['clean'] = cuppa_env.get_option('clean') and True or False

        self.add_variants(cuppa_env)
        self.add_toolchains(cuppa_env)
        self.add_platforms(cuppa_env)

        cuppa_env['platform'] = cuppa.build_platform.Platform.current()

        toolchains = cuppa_env.get_option('toolchains')
        cuppa_env['target_architectures'] = None

        if not help and not self._configure.handle_conf_only():
            default_toolchain = cuppa_env['platform'].default_toolchain()

            if not toolchains:
                toolchains = [
                    cuppa_env[self.toolchains_key][default_toolchain]
                ]
            else:
                toolchains = [
                    cuppa_env[self.toolchains_key][t] for t in toolchains
                ]

            cuppa_env['active_toolchains'] = toolchains

            def add_profile(name, profile):
                cuppa_env['profiles'][name] = profile

            def add_dependency(name, dependency):
                cuppa_env['dependencies'][name] = dependency

            cuppa.modules.registration.get_options("methods", cuppa_env)

            if not help and not self._configure.handle_conf_only():
                cuppa_env[self.project_generators_key] = {}
                cuppa.modules.registration.add_to_env("dependencies",
                                                      cuppa_env,
                                                      add_dependency)
                cuppa.modules.registration.add_to_env("profiles", cuppa_env,
                                                      add_profile)
                cuppa.modules.registration.add_to_env("methods", cuppa_env)
                cuppa.modules.registration.add_to_env("project_generators",
                                                      cuppa_env)

                for method_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.method.plugins', name=None):
                    method_plugin.load().add_to_env(cuppa_env)

                for profile_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.profile.plugins', name=None):
                    profile_plugin.load().add_to_env(cuppa_env)

                if profiles:
                    for profile in profiles:
                        profile.add_to_env(cuppa_env, add_profile)

                logger.trace("available profiles are [{}]".format(
                    colour_items(sorted(cuppa_env["profiles"].keys()))))

                logger.info("default profiles are [{}]".format(
                    colour_items(sorted(cuppa_env["default_profiles"]),
                                 as_info)))

                for dependency_plugin in pkg_resources.iter_entry_points(
                        group='cuppa.dependency.plugins', name=None):
                    dependency_plugin.load().add_to_env(
                        cuppa_env, add_dependency)

                if dependencies:
                    for dependency in dependencies:
                        dependency.add_to_env(cuppa_env, add_dependency)

                logger.trace("available dependencies are [{}]".format(
                    colour_items(sorted(cuppa_env["dependencies"].keys()))))

                logger.info("default dependencies are [{}]".format(
                    colour_items(sorted(cuppa_env["default_dependencies"]),
                                 as_info)))

            # TODO - default_profile

            if cuppa_env['dump']:
                logger.info(
                    as_info_label(
                        "Running in DUMP mode, no building will be attempted"))
                cuppa_env.dump()

            job_count = cuppa_env.get_option('num_jobs')
            parallel = cuppa_env.get_option('parallel')
            parallel_mode = "manually"

            if job_count == 1 and parallel:
                job_count = multiprocessing.cpu_count()
                if job_count > 1:
                    SCons.Script.SetOption('num_jobs', job_count)
                    parallel_mode = "automatically"
            cuppa_env['job_count'] = job_count
            cuppa_env['parallel'] = parallel
            if job_count > 1:
                logger.info(
                    "Running in {} with option [{}] set {} as [{}]".format(
                        as_emphasised("parallel mode"), as_info("jobs"),
                        as_emphasised(parallel_mode),
                        as_info(str(SCons.Script.GetOption('num_jobs')))))

        if not help and self._configure.handle_conf_only():
            self._configure.save()

        if not help and not self._configure.handle_conf_only():
            self.build(cuppa_env)

        if self._configure.handle_conf_only():
            print(
                "cuppa: Handling configuration only, so no builds will be attempted."
            )
            print(
                "cuppa: With the current configuration executing 'scons -D' would be equivalent to:"
            )
            print("")
            print("scons -D {}".format(
                self._command_line_from_settings(
                    cuppa_env['configured_options'])))
            print("")
            print("cuppa: Nothing to be done. Exiting.")
            SCons.Script.Exit()
コード例 #30
0
ファイル: configure.py プロジェクト: necrosisbb/cuppa
 def _save_conf( self, conf_path ):
     logger.info( "{}".format( as_notice( "Save current settings..." ) ) )
     self._save_settings( conf_path )
     logger.info( "{}".format( as_notice( "Save complete" ) ) )
コード例 #31
0
ファイル: core.py プロジェクト: marcelhuberfoo/cuppa
    def build( self, cuppa_env ):

#        cuppa.progress.NotifyProgress.register_callback( None, self.on_progress )

        cuppa_env['empty_env'] = cuppa_env.create_env()
        projects   = cuppa_env.get_option( 'projects' )
        toolchains = cuppa_env['active_toolchains']

        if not projects:
            projects = cuppa_env['default_projects']

            if not projects or not cuppa_env['run_from_launch_dir']:
                sub_sconscripts = self.get_sub_sconscripts(
                        cuppa_env['launch_dir'],
                        [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                )
                if sub_sconscripts:
                    projects = sub_sconscripts
                    logger.info( "Using sub-sconscripts [{}]".format( colour_items( projects ) ) )
            elif projects:
                logger.info( "Using default_projects [{}]".format( colour_items( projects ) ) )

        if projects:

            sconscripts = []

            for project in projects:

                if(     not os.path.exists( project )
                    and not cuppa_env['run_from_launch_dir']
                    and not os.path.isabs( project ) ):

                    path = os.path.join( cuppa_env['launch_dir'], project )

                    if os.path.exists( path ):
                        if os.path.isdir( path ):
                            sub_sconscripts = self.get_sub_sconscripts(
                                project,
                                [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                            )
                            if sub_sconscripts:
                                logger.info( "Reading project folder [{}] and using sub-sconscripts [{}]".format(
                                        project, colour_items( sub_sconscripts )
                                ) )
                                sconscripts.extend( sub_sconscripts )
                        else:
                            sconscripts.append( path )

                elif os.path.exists( project ) and os.path.isdir( project ):
                    sub_sconscripts = self.get_sub_sconscripts(
                            project,
                            [ cuppa_env['build_root'], cuppa_env['download_root'] ]
                    )
                    if sub_sconscripts:
                        logger.info( "Reading project folder [{}] and using sub-sconscripts [{}]".format(
                                project, colour_items( sub_sconscripts )
                        ) )
                        sconscripts.extend( sub_sconscripts )
                else:
                    sconscripts.append( project )

            for toolchain in toolchains:
                build_envs = self.create_build_envs( toolchain, cuppa_env )
                for build_env in build_envs:
                    for sconscript in sconscripts:
                        decider = cuppa_env.get_option( 'decider' )
                        if decider:
                            build_env['env'].Decider( decider )
                        self.call_project_sconscript_files( toolchain, build_env['variant'], build_env['target_arch'], build_env['env'], sconscript )

        else:
            logger.warn( "No projects to build. Nothing to be done" )
コード例 #32
0
ファイル: output_processor.py プロジェクト: ja11sop/cuppa
    def Popen2( cls, stdout_processor, stderr_processor, args_list, **kwargs ):

        kwargs['stdout'] = subprocess.PIPE
        kwargs['stderr'] = subprocess.PIPE

        timing_enabled = logger.isEnabledFor( logging.DEBUG )

        suppress_output = False
        if 'suppress_output' in kwargs:
            suppress_output = kwargs['suppress_output']
            del kwargs['suppress_output']

        use_shell = False
        if 'scons_env' in kwargs:
            use_shell = kwargs['scons_env'].get_option( 'use-shell' )
            del kwargs['scons_env']

        orig_stdout = sys.stdout
        orig_stderr = sys.stderr

        try:
            # TODO: Review this as it might be needed for Windows otherwise replace
            # the wrapped values with orig_stdout and orig_stderr respectively
            sys.stdout = AutoFlushFile( colorama.initialise.wrapped_stdout )
            sys.stderr = AutoFlushFile( colorama.initialise.wrapped_stderr )

            process = None
            stderr_thread = None

            timer = timing_enabled and cuppa.timer.Timer() or None
            if timer:
                logger.debug( "Command [{}] - Running...".format( as_notice(str(timer.timer_id())) ) )

            close_fds = platform.system() == "Windows" and False or True

            if not suppress_output:
                sys.stdout.write( " ".join(args_list) + "\n" )

            process = subprocess.Popen(
                use_shell and " ".join(args_list) or args_list,
                **dict( kwargs, close_fds=close_fds, shell=use_shell )
            )

            stderr_consumer = LineConsumer( process.stderr.readline, stderr_processor )
            stdout_consumer = LineConsumer( process.stdout.readline, stdout_processor )

            stderr_thread = threading.Thread( target=stderr_consumer )
            stderr_thread.start()
            stdout_consumer();
            stderr_thread.join()

            process.wait()

            if timer:
                timer.stop()
                logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) )

            return process.returncode

        except Exception as e:
            if timer:
                timer.stop()
                logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) )
            logger.error( "IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e) ) )
            if process:
                logger.info( "Killing existing POpen object" )
                process.kill()
            if stderr_thread:
                logger.info( "Joining any running threads" )
                stderr_thread.join()
            raise e

        finally:
            sys.stdout = orig_stdout
            sys.stderr = orig_stderr
コード例 #33
0
ファイル: output_processor.py プロジェクト: iCodeIN/cuppa
    def Popen2( cls, stdout_processor, stderr_processor, args_list, **kwargs ):

        kwargs['stdout'] = subprocess.PIPE
        kwargs['stderr'] = subprocess.PIPE

        timing_enabled = logger.isEnabledFor( logging.DEBUG )

        suppress_output = False
        if 'suppress_output' in kwargs:
            suppress_output = kwargs['suppress_output']
            del kwargs['suppress_output']

        use_shell = False
        if 'scons_env' in kwargs:
            use_shell = kwargs['scons_env'].get_option( 'use-shell' )
            del kwargs['scons_env']

        try:
            process = None
            stderr_thread = None

            timer = timing_enabled and cuppa.timer.Timer() or None
            if timer:
                logger.debug( "Command [{}] - Running...".format( as_notice(str(timer.timer_id())) ) )

            close_fds = platform.system() == "Windows" and False or True

            if not suppress_output:
                sys.stdout.write( " ".join(args_list) + "\n" )

            process = subprocess.Popen(
                use_shell and " ".join(args_list) or args_list,
                **dict( kwargs, close_fds=close_fds, shell=use_shell, universal_newlines=True )
            )

            stderr_consumer = LineConsumer( process.stderr.readline, stderr_processor )
            stdout_consumer = LineConsumer( process.stdout.readline, stdout_processor )

            stderr_thread = threading.Thread( target=stderr_consumer )
            stderr_thread.start()
            stdout_consumer();
            stderr_thread.join()

            process.wait()

            if timer:
                timer.stop()
                logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) )

            return process.returncode

        except Exception as e:
            if timer:
                timer.stop()
                logger.debug( "Command [{}] - Elapsed {}".format( as_notice(str(timer.timer_id())), cuppa.timer.as_string( timer.elapsed() ) ) )
            logger.error( "IncrementalSubProcess.Popen2() failed with error [{}]".format( str(e) ) )
            if process:
                logger.info( "Killing existing POpen object" )
                process.kill()
            if stderr_thread:
                logger.info( "Joining any running threads" )
                stderr_thread.join()
            raise e
コード例 #34
0
ファイル: construct.py プロジェクト: ja11sop/cuppa
    def call_project_sconscript_files( self, toolchain, variant, target_arch, abi, sconscript_env, project ):

        sconscript_file = project

        if os.path.exists( sconscript_file ) and os.path.isfile( sconscript_file ):

            logger.debug( "project exists and added to build [{}] using [{},{},{}]".format(
                    as_notice( sconscript_file ),
                    as_notice( toolchain.name() ),
                    as_notice( variant ),
                    as_notice( target_arch )
            ) )

            path_without_ext = os.path.splitext( sconscript_file )[0]

            sconstruct_offset_path, sconscript_name = os.path.split( sconscript_file )

            name = os.path.splitext( sconscript_name )[0]
            sconscript_env['sconscript_name_id'] = name
            if name.lower() == "sconscript":
                sconscript_env['sconscript_name_id'] = ""
                path_without_ext = sconstruct_offset_path
                name = path_without_ext

            sconscript_env['sconscript_file'] = sconscript_file

            build_root = sconscript_env['build_root']
            working_folder = 'working'

            sconscript_env = sconscript_env.Clone()
            sconscript_env['sconscript_env'] = sconscript_env

            sconscript_env['sconscript_build_dir'] = path_without_ext
            sconscript_env['sconscript_toolchain_build_dir'] = os.path.join( path_without_ext, toolchain.name() )
            sconscript_env['sconscript_dir'] = os.path.join( sconscript_env['base_path'], sconstruct_offset_path )
            sconscript_env['abs_sconscript_dir'] = os.path.abspath( sconscript_env['sconscript_dir'] )
            sconscript_env['tool_variant_dir'] = os.path.join( toolchain.name(), variant, target_arch, abi )
            sconscript_env['tool_variant_working_dir'] = os.path.join( sconscript_env['tool_variant_dir'], working_folder )

            build_base_path = os.path.join( path_without_ext, sconscript_env['tool_variant_dir'] )

            def flatten_dir( directory, join_char="_" ):
                return join_char.join( os.path.normpath( directory ).split( os.path.sep ) )

            sconscript_env['build_base_path']  = build_base_path
            sconscript_env['flat_build_base']  = flatten_dir( build_base_path )

            sconscript_env['tool_variant_build_dir']  = os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder )
            sconscript_env['build_dir']               = os.path.normpath( os.path.join( build_root, build_base_path, working_folder, '' ) )
            sconscript_env['abs_build_dir']           = os.path.abspath( sconscript_env['build_dir'] )
            sconscript_env['build_tool_variant_dir']  = os.path.normpath( os.path.join( build_root, sconscript_env['tool_variant_dir'], working_folder, '' ) )
            sconscript_env['offset_dir']              = sconstruct_offset_path
            sconscript_env['offset_tool_variant_dir'] = os.path.join( sconscript_env['offset_dir'], sconscript_env['tool_variant_dir'] )
            sconscript_env['tool_variant_dir_offset'] = os.path.normpath( os.path.join( sconscript_env['tool_variant_dir'], sconscript_env['offset_dir'] ) )
            sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath( os.path.join( flatten_dir( sconscript_env['tool_variant_dir'] ), sconscript_env['offset_dir'] ) )
            sconscript_env['final_dir']               = '..' + os.path.sep + 'final' + os.path.sep
            sconscript_env['active_toolchain']        = toolchain

            def abs_final_dir( abs_build_dir, final_dir ):
                return os.path.isabs( final_dir ) and final_dir or os.path.normpath( os.path.join( abs_build_dir, final_dir ) )

            sconscript_env['abs_final_dir']  = abs_final_dir( sconscript_env['abs_build_dir'], sconscript_env['final_dir'] )

            sconscript_env.AppendUnique( INCPATH = [
                    sconscript_env['offset_dir']
            ] )

            sconscript_exports = {
                'env'                     : sconscript_env,
                'sconscript_env'          : sconscript_env,
                'build_root'              : build_root,
                'build_dir'               : sconscript_env['build_dir'],
                'abs_build_dir'           : sconscript_env['abs_build_dir'],
                'final_dir'               : sconscript_env['final_dir'],
                'abs_final_dir'           : sconscript_env['abs_final_dir'],
                'common_variant_final_dir': '../../../common/final/',
                'common_project_final_dir': build_root + '/common/final/',
                'project'                 : name,
            }

            self._configure.configure( sconscript_exports['env'] )

            cuppa.modules.registration.init_env_for_variant( "methods", sconscript_exports )

            if sconscript_env['dump']:
                logger.info( "{} {}".format( as_info_label( "Dumping ENV for"), as_info( sconscript_exports['build_dir'] ) ) )
                dump = sconscript_env.Dump()
                logger.info( "\n" + dump + "\n" )
            else:
                SCons.Script.SConscript(
                    [ sconscript_file ],
                    variant_dir = sconscript_exports['build_dir'],
                    duplicate   = 0,
                    exports     = sconscript_exports
                )

        else:
            logger.error( "Skipping non-existent project [{}] using [{},{},{}]".format(
                    as_error( sconscript_file ),
                    as_error( toolchain.name() ),
                    as_error( variant ),
                    as_error( target_arch )
            ) )
コード例 #35
0
ファイル: configure.py プロジェクト: marcelhuberfoo/cuppa
 def _print_setting( self, action, key, value ):
     logger.info( "{} [{}] = [{}]".format(
             action,
             as_notice( key ),
             as_notice( str(value) )
     ) )
コード例 #36
0
ファイル: construct.py プロジェクト: ja11sop/cuppa
    def create_build_envs( self, toolchain, cuppa_env ):

        propagate_environment = cuppa_env['propagate_env']
        propagate_path        = cuppa_env['propagate_path']
        merge_path            = cuppa_env['merge_path']

        variants = cuppa_env[ self.variants_key ]
        actions  = cuppa_env[ self.actions_key ]

        target_architectures = cuppa_env[ 'target_architectures' ]

        if not target_architectures:
            target_architectures = [ None ]

        def get_active_from_options( tasks ):
            active_tasks = {}
            for key, task in tasks.items():
                if cuppa_env.get_option( task.name() ):
                    active_tasks[ task.name() ] = task
            return active_tasks

        active_variants = get_active_from_options( variants )
        active_actions  = get_active_from_options( actions )

        def get_active_from_defaults( default_tasks, tasks ):
            active_tasks = {}
            for task in default_tasks:
                if tasks.has_key( task ):
                    active_tasks[ task ] = tasks[ task ]
            return active_tasks

        if not active_variants and not active_actions:
            default_variants = cuppa_env['default_variants'] or toolchain.default_variants()
            if default_variants:
                active_variants = get_active_from_defaults( default_variants, variants )
                active_actions = get_active_from_defaults( default_variants, actions )
                if active_variants:
                    logger.info( "Default build variants of [{}] being used.".format( colour_items( active_variants, as_info ) ) )
                if active_actions:
                    logger.info( "Default build actions of [{}] being used.".format( colour_items( active_actions, as_info ) ) )

        if not active_variants:
            active_variants = get_active_from_defaults( toolchain.default_variants(), variants )
            logger.info( "No active variants specified so toolchain defaults of [{}] being used.".format( colour_items( active_variants, as_info ) ) )

        logger.debug( "Using active_variants = [{}]".format( colour_items( active_variants, as_info ) ) )
        logger.debug( "Using active_actions = [{}]".format( colour_items( active_actions, as_info ) ) )

        build_envs = []

        for key, variant in active_variants.items():

            for target_arch in target_architectures:

                env, target_arch = toolchain.make_env( cuppa_env, variant, target_arch )

                if env:

                    # TODO: Refactor this code out
                    if propagate_environment or propagate_path or merge_path:

                        def merge_paths( default_paths, env_paths ):
                            path_set = set( default_paths + env_paths )
                            def record_path( path ):
                                path_set.discard(path)
                                return path
                            return [ record_path(p) for p in default_paths + env_paths if p in path_set ]

                        def get_paths_from( environment ):
                            return 'PATH' in environment and environment['PATH'].split(os.pathsep) or []

                        default_paths = get_paths_from( env['ENV'] )
                        env_paths = get_paths_from( os.environ )
                        if propagate_environment:
                            env['ENV'] = os.environ.copy()
                            logger.debug( "propagating environment for [{}:{}] to all subprocesses: [{}]".format(
                                    variant.name(),
                                    target_arch,
                                    as_notice( str(env['ENV']) ) )
                            )
                        if propagate_path and not propagate_environment:
                            env['ENV']['PATH'] = env_paths
                            logger.debug( "propagating PATH for [{}:{}] to all subprocesses: [{}]".format(
                                    variant.name(),
                                    target_arch,
                                    colour_items( env_paths ) )
                            )
                        elif merge_path:
                            merged_paths = merge_paths( default_paths, env_paths )
                            env['ENV']['PATH'] = os.pathsep.join( merged_paths )
                            logger.debug( "merging PATH for [{}:{}] to all subprocesses: [{}]".format(
                                    variant.name(),
                                    target_arch,
                                    colour_items( merged_paths ) )
                            )

                    build_envs.append( {
                        'variant': key,
                        'target_arch': target_arch,
                        'abi': toolchain.abi( env ),
                        'env': env } )

                    if not cuppa_env['raw_output']:
                        cuppa.output_processor.Processor.install( env )

                    env['toolchain']       = toolchain
                    env['variant']         = variant
                    env['target_arch']     = target_arch
                    env['abi']             = toolchain.abi( env )
                    env['variant_actions'] = self.get_active_actions( cuppa_env, variant, active_variants, active_actions )

        return build_envs
コード例 #37
0
ファイル: construct.py プロジェクト: ja11sop/cuppa
    def __init__( self,
                  sconstruct_path,
                  base_path            = os.path.abspath( '.' ),
                  branch_root          = None,
                  default_options      = {},
                  default_projects     = [],
                  default_variants     = [],
                  default_dependencies = [],
                  default_profiles     = [],
                  dependencies         = [],
                  profiles             = [],
                  default_runner       = None,
                  configure_callback   = None,
                  tools                = [] ):

        cuppa.core.base_options.set_base_options()

        cuppa_env = cuppa.core.environment.CuppaEnvironment()
        cuppa_env.add_tools( tools )

        dependencies, default_dependencies, dependencies_warning = self._normalise_with_defaults( dependencies, default_dependencies, "dependencies" )
        profiles, default_profiles, profiles_warning = self._normalise_with_defaults( profiles, default_profiles, "profiles" )

        self.initialise_options( cuppa_env, default_options, profiles, dependencies )
        cuppa_env['configured_options'] = {}
        self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback )

        enable_thirdparty_logging( cuppa_env.get_option( 'enable-thirdparty-logging' ) and True or False )
        self._set_verbosity_level( cuppa_env )

        cuppa_env['sconstruct_path'] = sconstruct_path
        cuppa_env['sconstruct_dir'], cuppa_env['sconstruct_file'] = os.path.split(sconstruct_path)

        self._set_output_format( cuppa_env )

        self._configure.load()

        cuppa_env['offline'] = cuppa_env.get_option( 'offline' )

        cuppa.version.check_current_version( cuppa_env['offline'] )

        if cuppa_env['offline']:
            logger.info( as_info_label( "Running in OFFLINE mode" ) )

        logger.info( "using sconstruct file [{}]".format( as_notice( cuppa_env['sconstruct_file'] ) ) )

        if dependencies_warning:
            logger.warn( dependencies_warning )

        if profiles_warning:
            logger.warn( profiles_warning )

        help = cuppa_env.get_option( 'help' ) and True or False

        cuppa_env['minimal_output']       = cuppa_env.get_option( 'minimal_output' )
        cuppa_env['ignore_duplicates']    = cuppa_env.get_option( 'ignore_duplicates' )

        cuppa_env['working_dir']          = os.getcwd()
        cuppa_env['launch_dir']           = os.path.relpath( SCons.Script.GetLaunchDir(), cuppa_env['working_dir'] )
        cuppa_env['run_from_launch_dir']  = cuppa_env['launch_dir'] == "."

        cuppa_env['launch_offset_dir']    = "."

        if not cuppa_env['run_from_launch_dir']:
            levels = len( cuppa_env['launch_dir'].split( os.path.sep ) )
            cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)] )

        cuppa_env['base_path']   = os.path.normpath( os.path.expanduser( base_path ) )
        cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser( branch_root ) ) or base_path
        cuppa_env['branch_dir']  = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root'] ) or None

        thirdparty = cuppa_env.get_option( 'thirdparty' )
        if thirdparty:
            thirdparty = os.path.normpath( os.path.expanduser( thirdparty ) )

        cuppa_env['thirdparty'] = thirdparty

        cuppa.core.storage_options.process_storage_options( cuppa_env )
        cuppa.core.location_options.process_location_options( cuppa_env )

        cuppa_env['default_projects']     = default_projects
        cuppa_env['default_variants']     = default_variants and set( default_variants ) or set()
        cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or []
        cuppa_env['BUILD_WITH']           = cuppa_env['default_dependencies']
        cuppa_env['dependencies']         = {}
        cuppa_env['default_profiles']     = default_profiles and default_profiles or []
        cuppa_env['BUILD_PROFILE']        = cuppa_env['default_profiles']
        cuppa_env['profiles']             = {}

        test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process' )
        cuppa_env['default_runner']  = test_runner

        cuppa_env['propagate_env']       = cuppa_env.get_option( 'propagate-env' )       and True or False
        cuppa_env['propagate_path']      = cuppa_env.get_option( 'propagate-path' )      and True or False
        cuppa_env['merge_path']          = cuppa_env.get_option( 'merge-path' )          and True or False
        cuppa_env['show_test_output']    = cuppa_env.get_option( 'show-test-output' )    and True or False
        cuppa_env['suppress_process_output'] = cuppa_env.get_option( 'suppress-process-output' ) and True or False
        cuppa_env['dump']                = cuppa_env.get_option( 'dump' )                and True or False
        cuppa_env['clean']               = cuppa_env.get_option( 'clean' )               and True or False

        self.add_variants   ( cuppa_env )
        self.add_toolchains ( cuppa_env )
        self.add_platforms  ( cuppa_env )

        cuppa_env['platform'] = cuppa.build_platform.Platform.current()

        toolchains = cuppa_env.get_option( 'toolchains' )
        cuppa_env[ 'target_architectures' ] = None

        if not help and not self._configure.handle_conf_only():
            default_toolchain = cuppa_env['platform'].default_toolchain()

            if not toolchains:
                toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ]
            else:
                toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ]

            cuppa_env['active_toolchains'] = toolchains

            def add_profile( name, profile ):
                cuppa_env['profiles'][name] = profile

            def add_dependency( name, dependency ):
                cuppa_env['dependencies'][name] = dependency

            cuppa.modules.registration.get_options( "methods", cuppa_env )

            if not help and not self._configure.handle_conf_only():
                cuppa_env[self.project_generators_key] = {}
                cuppa.modules.registration.add_to_env( "dependencies",       cuppa_env, add_dependency )
                cuppa.modules.registration.add_to_env( "profiles",           cuppa_env, add_profile )
                cuppa.modules.registration.add_to_env( "methods",            cuppa_env )
                cuppa.modules.registration.add_to_env( "project_generators", cuppa_env )

                for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None ):
                    method_plugin.load().add_to_env( cuppa_env )

                for profile_plugin in pkg_resources.iter_entry_points( group='cuppa.profile.plugins', name=None ):
                    profile_plugin.load().add_to_env( cuppa_env )

                if profiles:
                    for profile in profiles:
                        profile.add_to_env( cuppa_env, add_profile )

                logger.trace( "available profiles are [{}]".format(
                        colour_items( sorted( cuppa_env["profiles"].keys() ) )
                ) )

                logger.info( "default profiles are [{}]".format(
                        colour_items( sorted( cuppa_env["default_profiles"] ), as_info )
                ) )

                for dependency_plugin in pkg_resources.iter_entry_points( group='cuppa.dependency.plugins', name=None ):
                    dependency_plugin.load().add_to_env( cuppa_env, add_dependency )

                if dependencies:
                    for dependency in dependencies:
                        dependency.add_to_env( cuppa_env, add_dependency )


                logger.trace( "available dependencies are [{}]".format(
                        colour_items( sorted( cuppa_env["dependencies"].keys() ) )
                ) )

                logger.info( "default dependencies are [{}]".format(
                        colour_items( sorted( cuppa_env["default_dependencies"] ), as_info )
                ) )


            # TODO - default_profile

            if cuppa_env['dump']:
                logger.info( as_info_label( "Running in DUMP mode, no building will be attempted" ) )
                cuppa_env.dump()

            job_count = cuppa_env.get_option( 'num_jobs' )
            parallel  = cuppa_env.get_option( 'parallel' )
            parallel_mode = "manually"

            if job_count==1 and parallel:
                job_count = multiprocessing.cpu_count()
                if job_count > 1:
                    SCons.Script.SetOption( 'num_jobs', job_count )
                    parallel_mode = "automatically"
            cuppa_env['job_count'] = job_count
            cuppa_env['parallel']  = parallel
            if job_count>1:
                logger.debug( "Running in {} with option [{}] set {} as [{}]".format(
                        as_emphasised("parallel mode"),
                        as_info( "jobs" ),
                        as_emphasised(parallel_mode),
                        as_info( str( SCons.Script.GetOption( 'num_jobs') ) )
                ) )

        if not help and self._configure.handle_conf_only():
            self._configure.save()

        if not help and not self._configure.handle_conf_only():
            self.build( cuppa_env )

        if self._configure.handle_conf_only():
            print "cuppa: Handling configuration only, so no builds will be attempted."
            print "cuppa: With the current configuration executing 'scons -D' would be equivalent to:"
            print ""
            print "scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options'] ) )
            print ""
            print "cuppa: Nothing to be done. Exiting."
            SCons.Script.Exit()
コード例 #38
0
ファイル: configure.py プロジェクト: marcelhuberfoo/cuppa
 def _update_conf( self ):
     logger.info( "{}".format( as_notice( "Updating current settings..." ) ) )
     self._save_settings()
     logger.info( "{}".format( as_notice( "Update complete" ) ) )
コード例 #39
0
ファイル: construct.py プロジェクト: iCodeIN/cuppa
    def call_project_sconscript_files(self, toolchain, variant, target_arch,
                                      abi, sconscript_env, project):

        sconscript_file = project

        if os.path.exists(sconscript_file) and os.path.isfile(sconscript_file):

            logger.debug(
                "project exists and added to build [{}] using [{},{},{}]".
                format(as_notice(sconscript_file), as_notice(toolchain.name()),
                       as_notice(variant), as_notice(target_arch)))

            path_without_ext = os.path.splitext(sconscript_file)[0]

            sconstruct_offset_path, sconscript_name = os.path.split(
                sconscript_file)

            name = os.path.splitext(sconscript_name)[0]
            sconscript_env['sconscript_name_id'] = name
            if name.lower() == "sconscript":
                sconscript_env['sconscript_name_id'] = ""
                path_without_ext = sconstruct_offset_path
                name = path_without_ext

            sconscript_env['sconscript_file'] = sconscript_file

            build_root = sconscript_env['build_root']
            working_folder = 'working'

            sconscript_env = sconscript_env.Clone()
            sconscript_env['sconscript_env'] = sconscript_env

            sconscript_env['sconscript_build_dir'] = path_without_ext
            sconscript_env['sconscript_toolchain_build_dir'] = os.path.join(
                path_without_ext, toolchain.name())
            sconscript_env['sconscript_dir'] = os.path.join(
                sconscript_env['base_path'], sconstruct_offset_path)
            sconscript_env['abs_sconscript_dir'] = os.path.abspath(
                sconscript_env['sconscript_dir'])
            sconscript_env['tool_variant_dir'] = os.path.join(
                toolchain.name(), variant, target_arch, abi)
            sconscript_env['tool_variant_working_dir'] = os.path.join(
                sconscript_env['tool_variant_dir'], working_folder)

            build_base_path = os.path.join(path_without_ext,
                                           sconscript_env['tool_variant_dir'])

            def flatten_dir(directory, join_char="_"):
                return join_char.join(
                    os.path.normpath(directory).split(os.path.sep))

            sconscript_env['build_base_path'] = build_base_path
            sconscript_env['flat_build_base'] = flatten_dir(build_base_path)

            sconscript_env['tool_variant_build_dir'] = os.path.join(
                build_root, sconscript_env['tool_variant_dir'], working_folder)
            sconscript_env['build_dir'] = os.path.normpath(
                os.path.join(build_root, build_base_path, working_folder, ''))
            sconscript_env['abs_build_dir'] = os.path.abspath(
                sconscript_env['build_dir'])
            sconscript_env['build_tool_variant_dir'] = os.path.normpath(
                os.path.join(build_root, sconscript_env['tool_variant_dir'],
                             working_folder, ''))
            sconscript_env['offset_dir'] = sconstruct_offset_path
            sconscript_env['offset_tool_variant_dir'] = os.path.join(
                sconscript_env['offset_dir'],
                sconscript_env['tool_variant_dir'])
            sconscript_env['tool_variant_dir_offset'] = os.path.normpath(
                os.path.join(sconscript_env['tool_variant_dir'],
                             sconscript_env['offset_dir']))
            sconscript_env['flat_tool_variant_dir_offset'] = os.path.normpath(
                os.path.join(flatten_dir(sconscript_env['tool_variant_dir']),
                             sconscript_env['offset_dir']))
            sconscript_env[
                'final_dir'] = '..' + os.path.sep + 'final' + os.path.sep
            sconscript_env['active_toolchain'] = toolchain

            def abs_final_dir(abs_build_dir, final_dir):
                return os.path.isabs(
                    final_dir) and final_dir or os.path.normpath(
                        os.path.join(abs_build_dir, final_dir))

            sconscript_env['abs_final_dir'] = abs_final_dir(
                sconscript_env['abs_build_dir'], sconscript_env['final_dir'])

            sconscript_env.AppendUnique(INCPATH=[sconscript_env['offset_dir']])

            sconscript_exports = {
                'env': sconscript_env,
                'sconscript_env': sconscript_env,
                'build_root': build_root,
                'build_dir': sconscript_env['build_dir'],
                'abs_build_dir': sconscript_env['abs_build_dir'],
                'final_dir': sconscript_env['final_dir'],
                'abs_final_dir': sconscript_env['abs_final_dir'],
                'common_variant_final_dir': '../../../common/final/',
                'common_project_final_dir': build_root + '/common/final/',
                'project': name,
            }

            self._configure.configure(sconscript_exports['env'])

            cuppa.modules.registration.init_env_for_variant(
                "methods", sconscript_exports)

            if sconscript_env['dump']:
                logger.info("{} {}".format(
                    as_info_label("Dumping ENV for"),
                    as_info(sconscript_exports['build_dir'])))
                dump = sconscript_env.Dump()
                logger.info("\n" + dump + "\n")
            else:
                SCons.Script.SConscript(
                    [sconscript_file],
                    variant_dir=sconscript_exports['build_dir'],
                    duplicate=0,
                    exports=sconscript_exports)

        else:
            logger.error(
                "Skipping non-existent project [{}] using [{},{},{}]".format(
                    as_error(sconscript_file), as_error(toolchain.name()),
                    as_error(variant), as_error(target_arch)))
コード例 #40
0
 def _update_conf(self):
     logger.info("{}".format(as_notice("Updating current settings...")))
     self._save_settings()
     logger.info("{}".format(as_notice("Update complete")))
コード例 #41
0
    def get_local_directory(self, cuppa_env, location, sub_dir, branch,
                            full_url):

        offline = cuppa_env['offline']
        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs(base):
            base = os.path.join(cuppa_env['working_dir'], base)

        if location.startswith('file:'):
            location = pip_download.url_to_path(location)

        if not pip_is_url(location):

            if pip_is_archive_file(location):

                self._local_folder = self.folder_name_from_path(
                    location, cuppa_env)
                local_directory = os.path.join(base, self._local_folder)

                local_dir_with_sub_dir = os.path.join(
                    local_directory, sub_dir and sub_dir or "")

                if os.path.exists(local_dir_with_sub_dir):
                    try:
                        os.rmdir(local_dir_with_sub_dir)
                    except:
                        return local_directory

                self.extract(location, local_dir_with_sub_dir)
                logger.debug("(local archive) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(local archive) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            else:
                local_directory = branch and os.path.join(location,
                                                          branch) or location
                self._local_folder = self.folder_name_from_path(
                    location, cuppa_env)

                logger.debug("(local file) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(local file) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            return local_directory
        else:

            self._local_folder = self.folder_name_from_path(
                full_url, cuppa_env)
            local_directory = os.path.join(base, self._local_folder)

            if full_url.scheme.startswith(
                    'http') and self.url_is_download_archive_url(
                        full_url.path):
                logger.debug("[{}] is an archive download".format(
                    as_info(location)))

                local_dir_with_sub_dir = os.path.join(
                    local_directory, sub_dir and sub_dir or "")

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists(local_dir_with_sub_dir):
                    try:
                        # If not empty this will fail
                        os.rmdir(local_dir_with_sub_dir)
                    except:
                        # Not empty so we'll return this as the local_directory

                        logger.debug(
                            "(already present) Location = [{}]".format(
                                as_info(location)))
                        logger.debug(
                            "(already present) Local folder = [{}]".format(
                                as_info(str(self._local_folder))))

                        return local_directory

                if cuppa_env['dump'] or cuppa_env['clean']:
                    return local_directory

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive(
                    cuppa_env['cache_root'], self._local_folder)
                if cached_archive:
                    logger.debug("Cached archive [{}] found for [{}]".format(
                        as_info(cached_archive), as_info(location)))
                    self.extract(cached_archive, local_dir_with_sub_dir)
                else:
                    logger.info("Downloading [{}]...".format(
                        as_info(location)))
                    try:
                        report_hook = None
                        if logger.isEnabledFor(logging.INFO):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urlretrieve(location,
                                                        reporthook=report_hook)
                        name, extension = os.path.splitext(filename)
                        logger.info(
                            "[{}] successfully downloaded to [{}]".format(
                                as_info(location), as_info(filename)))
                        self.extract(filename, local_dir_with_sub_dir)
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join(
                                cuppa_env['cache_root'], self._local_folder)
                            logger.debug(
                                "Caching downloaded file as [{}]".format(
                                    as_info(cached_archive)))
                            shutil.copyfile(filename, cached_archive)
                    except ContentTooShortError as error:
                        logger.error(
                            "Download of [{}] failed with error [{}]".format(
                                as_error(location), as_error(str(error))))
                        raise LocationException(error)

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip_vcs.vcs.get_backend(vc_type)
                if backend:
                    try:
                        vcs_backend = backend(self.expand_secret(location))
                    except:  # Pip version >= 19
                        backend.url = self.expand_secret(location)
                        vcs_backend = backend
                    local_dir_with_sub_dir = os.path.join(
                        local_directory, sub_dir and sub_dir or "")

                    if cuppa_env['dump'] or cuppa_env['clean']:
                        return local_directory

                    if os.path.exists(local_directory):
                        url, repository, branch, remote, revision = self.get_info(
                            location, local_dir_with_sub_dir, full_url,
                            vc_type)
                        rev_options = self.get_rev_options(vc_type,
                                                           vcs_backend,
                                                           local_remote=remote)
                        version = self.ver_rev_summary(branch, revision,
                                                       self._full_url.path)[0]
                        if not offline:
                            logger.info(
                                "Updating [{}] in [{}]{} at [{}]".format(
                                    as_info(location),
                                    as_notice(local_dir_with_sub_dir),
                                    (rev_options and " on {}".format(
                                        as_notice(str(rev_options))) or ""),
                                    as_info(version)))
                            try:
                                update(vcs_backend, local_dir_with_sub_dir,
                                       rev_options)
                                logger.debug(
                                    "Successfully updated [{}]".format(
                                        as_info(location)))
                            except pip_exceptions.PipError as error:
                                logger.warn(
                                    "Could not update [{}] in [{}]{} due to error [{}]"
                                    .format(as_warning(location),
                                            as_warning(local_dir_with_sub_dir),
                                            (rev_options and " at {}".format(
                                                as_warning(str(rev_options)))
                                             or ""), as_warning(str(error))))
                        else:
                            logger.debug(
                                "Skipping update for [{}] as running in offline mode"
                                .format(as_info(location)))
                    else:
                        rev_options = self.get_rev_options(
                            vc_type, vcs_backend)
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        max_attempts = 2
                        attempt = 1
                        while attempt <= max_attempts:
                            logger.info("{} [{}] into [{}]{}".format(
                                action, as_info(location),
                                as_info(local_dir_with_sub_dir), attempt > 1
                                and "(attempt {})".format(str(attempt)) or ""))
                            try:
                                obtain(vcs_backend, local_dir_with_sub_dir,
                                       vcs_backend.url)
                                logger.debug(
                                    "Successfully retrieved [{}]".format(
                                        as_info(location)))
                                break
                            except pip_exceptions.PipError as error:
                                attempt = attempt + 1
                                log_as = logger.warn
                                if attempt > max_attempts:
                                    log_as = logger.error

                                log_as(
                                    "Could not retrieve [{}] into [{}]{} due to error [{}]"
                                    .format(as_info(location),
                                            as_notice(local_dir_with_sub_dir),
                                            (rev_options and " to {}".format(
                                                as_notice(str(rev_options)))
                                             or ""), as_error(str(error))))
                                if attempt > max_attempts:
                                    raise LocationException(str(error))

                logger.debug("(url path) Location = [{}]".format(
                    as_info(location)))
                logger.debug("(url path) Local folder = [{}]".format(
                    as_info(self._local_folder)))

            return local_directory
コード例 #42
0
    def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ):

        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip.download.url_to_path( location )

        if not pip.download.is_url( location ):

            if pip.download.is_archive_file( location ):

                local_folder = self.folder_name_from_path( location )
                local_directory = os.path.join( base, local_folder )

                if os.path.exists( local_directory ):
                    try:
                        os.rmdir( local_directory )
                    except:
                        return local_directory, False

                self.extract( location, local_directory )
            else:
                local_directory = branch and os.path.join( location, branch ) or location
                return local_directory, False
        else:

            local_folder = self.folder_name_from_path( full_url )
            local_directory = os.path.join( base, local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                logger.debug( "[{}] is an archive download".format( as_info( location ) ) )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir )

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        # If not empty this will fail
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        # Not empty so we'll return this as the local_directory
                        return local_directory, True

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive( cuppa_env['cache_root'], local_folder )
                if cached_archive:
                    logger.debug( "Cached archive [{}] found for [{}]".format(
                            as_info( cached_archive ),
                            as_info( location )
                    ) )
                    self.extract( cached_archive, local_dir_with_sub_dir )
                else:
                    logger.info( "Downloading [{}]...".format( as_info( location ) ) )
                    try:
                        report_hook = None
                        if logger.isEnabledFor( logging.INFO ):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urllib.urlretrieve( location, reporthook=report_hook )
                        name, extension = os.path.splitext( filename )
                        logger.info( "[{}] successfully downloaded to [{}]".format(
                                as_info( location ),
                                as_info( filename )
                        ) )
                        self.extract( filename, local_dir_with_sub_dir )
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join( cuppa_env['cache_root'], local_folder )
                            logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) )
                            shutil.copyfile( filename, cached_archive )
                    except urllib.ContentTooShortError as error:
                        logger.error( "Download of [{}] failed with error [{}]".format(
                                as_error( location ),
                                as_error( str(error) )
                        ) )
                        raise LocationException( "Error obtaining [{}]: {}".format( location, error ) )

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip.vcs.vcs.get_backend( vc_type )
                if backend:
                    vcs_backend = backend( location )
                    rev_options = self.get_rev_options( vc_type, vcs_backend )

                    local_dir_with_sub_dir = os.path.join( local_directory, sub_dir )

                    if os.path.exists( local_directory ):

                        url, repository, branch, revision = self.get_info( location, local_dir_with_sub_dir, full_url )
                        version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
                        logger.debug( "Updating [{}] in [{}]{} at [{}]".format(
                                as_info( location ),
                                as_notice( local_dir_with_sub_dir ),
                                ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
                                as_info( version )
                        ) )
                        try:
                            vcs_backend.update( local_dir_with_sub_dir, rev_options )
                            logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
                        except pip.exceptions.InstallationError as error:
                            logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                                    as_warning( location ),
                                    as_warning( local_dir_with_sub_dir ),
                                    ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                                    as_warning( str(error) )
                            ) )
                    else:
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        logger.info( "{} [{}] into [{}]".format(
                                action, as_info( location ),
                                as_info( local_dir_with_sub_dir )
                        ) )
                        try:
                            vcs_backend.obtain( local_dir_with_sub_dir )
                            logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                        except pip.exceptions.InstallationError as error:
                            logger.error( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                                    as_error( location ),
                                    as_error( local_dir_with_sub_dir ),
                                    ( rev_options and  " to {}".format( as_error(  str(rev_options) ) ) or ""),
                                    as_error( str( error ) )
                            ) )
                            raise LocationException( "Error obtaining [{}]: {}".format( location, error ) )

            return local_directory, True
コード例 #43
0
 def _print_setting(self, action, key, value):
     logger.info("{} [{}] = [{}]".format(action, as_notice(key),
                                         as_notice(str(value))))
コード例 #44
0
ファイル: location.py プロジェクト: ja11sop/cuppa
    def get_local_directory( self, cuppa_env, location, sub_dir, branch, full_url ):

        offline = cuppa_env['offline']
        local_directory = None

        base = cuppa_env['download_root']
        if not os.path.isabs( base ):
            base = os.path.join( cuppa_env['working_dir'], base )

        if location.startswith( 'file:' ):
            location = pip_download.url_to_path( location )

        if not pip_download.is_url( location ):

            if pip_download.is_archive_file( location ):

                self._local_folder = self.folder_name_from_path( location, cuppa_env )
                local_directory = os.path.join( base, self._local_folder )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        return local_directory

                self.extract( location, local_dir_with_sub_dir )
                logger.debug( "(local archive) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(local archive) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            else:
                local_directory = branch and os.path.join( location, branch ) or location
                self._local_folder = self.folder_name_from_path( location, cuppa_env )

                logger.debug( "(local file) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(local file) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            return local_directory
        else:

            self._local_folder = self.folder_name_from_path( full_url, cuppa_env )
            local_directory = os.path.join( base, self._local_folder )

            if full_url.scheme.startswith( 'http' ) and self.url_is_download_archive_url( full_url.path ):
                logger.debug( "[{}] is an archive download".format( as_info( location ) ) )

                local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                # First we check to see if we already downloaded and extracted this archive before
                if os.path.exists( local_dir_with_sub_dir ):
                    try:
                        # If not empty this will fail
                        os.rmdir( local_dir_with_sub_dir )
                    except:
                        # Not empty so we'll return this as the local_directory

                        logger.debug( "(already present) Location = [{}]".format( as_info( location ) ) )
                        logger.debug( "(already present) Local folder = [{}]".format( as_info( str(self._local_folder) ) ) )

                        return local_directory

                if cuppa_env['dump'] or cuppa_env['clean']:
                    return local_directory

                # If not we then check to see if we cached the download
                cached_archive = self.get_cached_archive( cuppa_env['cache_root'], self._local_folder )
                if cached_archive:
                    logger.debug( "Cached archive [{}] found for [{}]".format(
                            as_info( cached_archive ),
                            as_info( location )
                    ) )
                    self.extract( cached_archive, local_dir_with_sub_dir )
                else:
                    logger.info( "Downloading [{}]...".format( as_info( location ) ) )
                    try:
                        report_hook = None
                        if logger.isEnabledFor( logging.INFO ):
                            report_hook = ReportDownloadProgress()
                        filename, headers = urllib.urlretrieve( location, reporthook=report_hook )
                        name, extension = os.path.splitext( filename )
                        logger.info( "[{}] successfully downloaded to [{}]".format(
                                as_info( location ),
                                as_info( filename )
                        ) )
                        self.extract( filename, local_dir_with_sub_dir )
                        if cuppa_env['cache_root']:
                            cached_archive = os.path.join( cuppa_env['cache_root'], self._local_folder )
                            logger.debug( "Caching downloaded file as [{}]".format( as_info( cached_archive ) ) )
                            shutil.copyfile( filename, cached_archive )
                    except urllib.ContentTooShortError as error:
                        logger.error( "Download of [{}] failed with error [{}]".format(
                                as_error( location ),
                                as_error( str(error) )
                        ) )
                        raise LocationException( error )

            elif '+' in full_url.scheme:
                vc_type = location.split('+', 1)[0]
                backend = pip_vcs.vcs.get_backend( vc_type )
                if backend:
                    vcs_backend = backend( self.expand_secret( location ) )
                    local_dir_with_sub_dir = os.path.join( local_directory, sub_dir and sub_dir or "" )

                    if cuppa_env['dump'] or cuppa_env['clean']:
                        return local_directory

                    if os.path.exists( local_directory ):
                        url, repository, branch, remote, revision = self.get_info( location, local_dir_with_sub_dir, full_url, vc_type )
                        rev_options = self.get_rev_options( vc_type, vcs_backend, local_remote=remote )
                        version = self.ver_rev_summary( branch, revision, self._full_url.path )[0]
                        if not offline:
                            logger.info( "Updating [{}] in [{}]{} at [{}]".format(
                                    as_info( location ),
                                    as_notice( local_dir_with_sub_dir ),
                                    ( rev_options and  " on {}".format( as_notice( str(rev_options) ) ) or "" ),
                                    as_info( version )
                            ) )
                            try:
                                update( vcs_backend, local_dir_with_sub_dir, rev_options )
                                logger.debug( "Successfully updated [{}]".format( as_info( location ) ) )
                            except pip_exceptions.PipError as error:
                                logger.warn( "Could not update [{}] in [{}]{} due to error [{}]".format(
                                        as_warning( location ),
                                        as_warning( local_dir_with_sub_dir ),
                                        ( rev_options and  " at {}".format( as_warning( str(rev_options) ) ) or "" ),
                                        as_warning( str(error) )
                                ) )
                        else:
                            logger.debug( "Skipping update for [{}] as running in offline mode".format( as_info( location ) ) )
                    else:
                        rev_options = self.get_rev_options( vc_type, vcs_backend )
                        action = "Cloning"
                        if vc_type == "svn":
                            action = "Checking out"
                        max_attempts = 2
                        attempt = 1
                        while attempt <= max_attempts:
                            logger.info( "{} [{}] into [{}]{}".format(
                                    action,
                                    as_info( location ),
                                    as_info( local_dir_with_sub_dir ),
                                    attempt > 1 and "(attempt {})".format( str(attempt) ) or ""
                            ) )
                            try:
                                vcs_backend.obtain( local_dir_with_sub_dir )
                                logger.debug( "Successfully retrieved [{}]".format( as_info( location ) ) )
                                break
                            except pip_exceptions.PipError as error:
                                attempt = attempt + 1
                                log_as = logger.warn
                                if attempt > max_attempts:
                                    log_as = logger.error

                                log_as( "Could not retrieve [{}] into [{}]{} due to error [{}]".format(
                                        as_info( location ),
                                        as_notice( local_dir_with_sub_dir ),
                                        ( rev_options and  " to {}".format( as_notice(  str(rev_options) ) ) or ""),
                                        as_error( str(error) )
                                ) )
                                if attempt > max_attempts:
                                    raise LocationException( str(error) )

                logger.debug( "(url path) Location = [{}]".format( as_info( location ) ) )
                logger.debug( "(url path) Local folder = [{}]".format( as_info( self._local_folder ) ) )

            return local_directory
コード例 #45
0
    def __init__( self,
                  base_path            = os.path.abspath( '.' ),
                  branch_root          = None,
                  default_options      = {},
                  default_projects     = [],
                  default_variants     = [],
                  default_dependencies = [],
                  default_profiles     = [],
                  default_runner       = None,
                  configure_callback   = None,
                  dependencies         = {},
                  tools                = [] ):

        cuppa.version.check_current_version()
        set_base_options()
        initialise_logging()

        cuppa_env = CuppaEnvironment()
        cuppa_env.add_tools( tools )

        self.initialise_options( cuppa_env, default_options, dependencies )
        cuppa_env['configured_options'] = {}
        self._configure = cuppa.configure.Configure( cuppa_env, callback=configure_callback )

        verbosity = cuppa_env.get_option( 'verbosity' )
        if verbosity:
            set_logging_level( verbosity )

        cuppa_env['sconstruct_file'] = cuppa_env.get_option( 'file' )

        if not cuppa_env['sconstruct_file']:
            for path in [ 'SConstruct', 'Sconstruct', 'sconstruct' ]:
                if os.path.exists( path ):
                    cuppa_env['sconstruct_file'] = path
        cuppa_env['raw_output']      = cuppa_env.get_option( 'raw_output' ) and True or False
        cuppa_env['standard_output'] = cuppa_env.get_option( 'standard_output' ) and True or False

        if not cuppa_env['raw_output'] and not cuppa_env['standard_output']:
            cuppa_env.colouriser().enable()
            reset_logging_format()

        logger.info( "using sconstruct file [{}]".format( as_notice( cuppa_env['sconstruct_file'] ) ) )

        help = cuppa_env.get_option( 'help' ) and True or False

        self._configure.load()

        cuppa_env['minimal_output']       = cuppa_env.get_option( 'minimal_output' )
        cuppa_env['ignore_duplicates']    = cuppa_env.get_option( 'ignore_duplicates' )

        cuppa_env['working_dir']          = os.getcwd()
        cuppa_env['launch_dir']           = os.path.relpath( SCons.Script.GetLaunchDir(), cuppa_env['working_dir'] )
        cuppa_env['run_from_launch_dir']  = cuppa_env['launch_dir'] == "."

        cuppa_env['launch_offset_dir']    = "."

        if not cuppa_env['run_from_launch_dir']:
            levels = len( cuppa_env['launch_dir'].split( os.path.sep ) )
            cuppa_env['launch_offset_dir'] = os.path.sep.join( ['..' for i in range(levels)] )

        cuppa_env['base_path']   = os.path.normpath( os.path.expanduser( base_path ) )
        cuppa_env['branch_root'] = branch_root and os.path.normpath( os.path.expanduser( branch_root ) ) or base_path
        cuppa_env['branch_dir']  = cuppa_env['branch_root'] and os.path.relpath( cuppa_env['base_path'], cuppa_env['branch_root'] ) or None

        thirdparty = cuppa_env.get_option( 'thirdparty' )
        if thirdparty:
            thirdparty = os.path.normpath( os.path.expanduser( thirdparty ) )

        cuppa_env['thirdparty'] = thirdparty

        build_root = cuppa_env.get_option( 'build_root', default='_build' )
        cuppa_env['build_root'] = os.path.normpath( os.path.expanduser( build_root ) )

        download_root = cuppa_env.get_option( 'download_root', default='_cuppa' )
        cuppa_env['download_root'] = os.path.normpath( os.path.expanduser( download_root ) )

        cache_root = cuppa_env.get_option( 'cache_root', default='~/_cuppa/_cache' )
        cuppa_env['cache_root'] = os.path.normpath( os.path.expanduser( cache_root ) )
        if not os.path.exists( cuppa_env['cache_root'] ):
            os.makedirs( cuppa_env['cache_root'] )

        cuppa_env['default_projects']     = default_projects
        cuppa_env['default_variants']     = default_variants and set( default_variants ) or set()
        cuppa_env['default_dependencies'] = default_dependencies and default_dependencies or []
        cuppa_env['BUILD_WITH']           = cuppa_env['default_dependencies']
        cuppa_env['dependencies']         = {}
        cuppa_env['default_profiles']     = default_profiles and default_profiles or []
        cuppa_env['BUILD_PROFILE']        = cuppa_env['default_profiles']
        cuppa_env['profiles']             = {}

        test_runner = cuppa_env.get_option( 'runner', default=default_runner and default_runner or 'process' )
        cuppa_env['default_runner']  = test_runner

        cuppa_env['show_test_output'] = cuppa_env.get_option( 'show-test-output' ) and True or False

        self.add_variants   ( cuppa_env )
        self.add_toolchains ( cuppa_env )
        self.add_platforms  ( cuppa_env )

        cuppa_env['platform'] = cuppa.build_platform.Platform.current()

        toolchains = cuppa_env.get_option( 'toolchains' )
        cuppa_env[ 'target_architectures' ] = None

        if not help and not self._configure.handle_conf_only():
            default_toolchain = cuppa_env['platform'].default_toolchain()

            if not toolchains:
                toolchains = [ cuppa_env[self.toolchains_key][default_toolchain] ]
            else:
                toolchains = [ cuppa_env[self.toolchains_key][t] for t in toolchains ]

            cuppa_env['active_toolchains'] = toolchains

            def add_dependency( name, dependency ):
                cuppa_env['dependencies'][name] = dependency

            cuppa.modules.registration.get_options( "methods", cuppa_env )

            if not help and not self._configure.handle_conf_only():
                cuppa_env[self.project_generators_key] = {}
                cuppa.modules.registration.add_to_env( "dependencies",       cuppa_env, add_dependency )
                cuppa.modules.registration.add_to_env( "profiles",           cuppa_env )
                cuppa.modules.registration.add_to_env( "methods",            cuppa_env )
                cuppa.modules.registration.add_to_env( "project_generators", cuppa_env )

                for method_plugin in pkg_resources.iter_entry_points( group='cuppa.method.plugins', name=None ):
                    method_plugin.load().add_to_env( cuppa_env )

                if dependencies:
                    for name, dependency in dependencies.iteritems():
                        dependency.add_to_env( cuppa_env, add_dependency )

            # TODO - default_profile

            if cuppa_env.get_option( 'dump' ):
                cuppa_env.dump()
                SCons.Script.Exit()

            job_count = cuppa_env.get_option( 'num_jobs' )
            parallel  = cuppa_env.get_option( 'parallel' )
            parallel_mode = "manually"

            if job_count==1 and parallel:
                job_count = multiprocessing.cpu_count()
                if job_count > 1:
                    SCons.Script.SetOption( 'num_jobs', job_count )
                    parallel_mode = "automatically"
            cuppa_env['job_count'] = job_count
            cuppa_env['parallel']  = parallel
            if job_count>1:
                logger.debug( "Running in {} with option [{}] set {} as [{}]".format(
                        as_emphasised("parallel mode"),
                        as_info( "jobs" ),
                        as_emphasised(parallel_mode),
                        as_info( str( SCons.Script.GetOption( 'num_jobs') ) )
                ) )

        if not help and self._configure.handle_conf_only():
            self._configure.save()

        if not help and not self._configure.handle_conf_only():
            self.build( cuppa_env )

        if self._configure.handle_conf_only():
            print "cuppa: Handling onfiguration only, so no builds will be attempted."
            print "cuppa: With the current configuration executing 'scons -D' would be equivalent to:"
            print ""
            print "scons -D {}".format( self._command_line_from_settings( cuppa_env['configured_options'] ) )
            print ""
            print "cuppa: Nothing to be done. Exiting."
            SCons.Script.Exit()
コード例 #46
0
ファイル: run_process.py プロジェクト: brianfpeters/cuppa
    def __call__(self, target, source, env):

        command = None
        working_dir = None
        program_path = None

        if self._command and callable(self._command):
            program_path = os.path.splitext(
                os.path.splitext(str(target[0]))[0])[0]
            monitor = Monitor(program_path, env)
            monitor.start()

            result = self._command(target, source, env)

            if result == 0 or result == None:
                self._write_success_file(success_file_name_from(program_path))
                monitor.stop('success')
            else:
                self._remove_success_file(success_file_name_from(program_path))
                monitor.stop('failed')
        else:

            if self._command:
                command = self._command
                if self._format_args:
                    format_args = {}
                    for key, value in self._format_args.iteritems():
                        format_args[key] = callable(value) and value() or value
                    command = command.format(**format_args)
                working_dir = self._working_dir and self._working_dir or self._final_dir
                program_path = os.path.splitext(
                    os.path.splitext(str(target[0]))[0])[0]
            else:
                executable = str(source[0].abspath)
                working_dir, test = os.path.split(executable)
                if self._working_dir:
                    working_dir = self._working_dir
                program_path = source[0].path

                if cuppa.build_platform.name() == "Windows":
                    executable = '"' + executable + '"'

                test_command = executable
                if self._command:
                    command = self._command
                    working_dir = self._working_dir and self._working_dir or self._final_dir

            suppress_output = env['suppress_process_output']
            retry_count = self._retry_count

            while retry_count >= 0:

                retry = (retry_count > 0)

                success = self._run_command(source, suppress_output,
                                            program_path, command, working_dir,
                                            env, retry)

                if not success and retry:
                    logger.info("Retrying [{}]...".format(as_notice(command)))
                else:
                    break

                retry_count = retry_count - 1

        return None