Exemple #1
0
    def execute_scripts(self):

        log.info('executing docker build...')
        opt = ['-v', self.build_cfg.src_dir() + ':' + self._srcdir + ':ro']
        opt.extend([
            '-v',
            self.build_cfg.import_dir() + ':' + self._importdir + ':ro'
        ])
        opt.extend(['-v', self.build_cfg.gen_dir() + ':' + self._gendir])
        opt.extend([
            '-v',
            self.build_cfg.tools().artifact_deployer() +
            ':/xmake/tools/artifactdeployer:ro'
        ])
        opt.extend(['-v', self.build_cfg.component_dir() + ':/project:ro'])

        if not self.build_cfg.suppress_variant_handling():
            self.handle_dict(opt, self.build_cfg.variant_coords(),
                             'XMAKE_VARIANT_COORD')
            self.handle_dict(opt, self.build_cfg.variant_info(),
                             'XMAKE_VARIANT_INFO')

        _execute_build(self, opt)
        df = os.path.join(self.build_cfg.gen_dir(), 'export.df')
        if is_existing_file(df):
            log.info('deploy file already created by build')
            if is_existing_file(self.build_cfg.export_file()):
                os.remove(self.build_cfg.export_file())
            os.symlink(df, self.build_cfg.export_file())
Exemple #2
0
 def matches(self):
     f = join(self._build_cfg.component_dir(), "Dockerfile")
     if is_existing_file(f):
         return True
     f = join(self._build_cfg.src_dir(), "Dockerfile")
     if is_existing_file(f):
         return True
     return False
Exemple #3
0
    def xmake_cfg(self):
        if self._xmake_cfg is None:
            cfg = None
            if utils.is_existing_directory(self.cfg_dir()):
                cfg = join(self.cfg_dir(), XMAKE_CFG)

            if cfg is None or not utils.is_existing_file(cfg):
                cfg = join(self.component_dir(), "." + XMAKE_CFG)

            if utils.is_existing_file(cfg):
                log.info("found xmake.cfg...")
                config = utils.IterableAwareCfgParser()
                config.read(cfg)
                self._xmake_cfg = config
        return self._xmake_cfg
Exemple #4
0
    def execute_scripts(self):
        log.info('executing docker build...')
        self.image = None

        def handler(line):
            if self.src_image is None:
                self.src_imagename = self.match(line, 'Step 0 : FROM', self.src_imagename)
                if not self.src_imagename:
                    self.src_imagename = self.match(line, 'Step 1 : FROM', self.src_imagename)
                self.src_image = self.match(line, ' --->', self.src_image)
            self.image = self.match(line, 'Successfully built', self.src_image)
            return line
        try:
            self.docker(["build",self.context_dir],handler=handler)
        except XmakeException:
            log.error("docker image '%s' not found.\nPlease check the docker registry '%s'" % (self.src_imagename, self.registry))
            raise XmakeException("ERR: docker image '%s' not found.\nPlease check the docker registry '%s'" % (self.src_imagename, self.registry))
        if self.image is None:
            raise XmakeException('ERR: no image found')
        log.info('generated docker image '+self.image)
        addfile = os.path.join(self.build_cfg.src_dir(), 'DockerCommands')
        if is_existing_file(addfile):
            log.info('executing additional commands...')
            succeeded = False
            try:
                self.image = self.handle_script(self.image, addfile)
                log.info('generated docker image '+self.image)
                succeeded = True
            finally:
                if self.image is not None and not succeeded:
                    self.cleanup_image(self.image)
        log.info('root image '+self.src_imagename+' ('+self.src_image+')')
        with open(self.image_name_file, 'w') as f:
            f.write(self.image+'\n')
Exemple #5
0
 def publish(self):
     if self.image is None:
         self.image = get_first_line(self.image_name_file, 'cannot read image id from last build')
         log.info('reading image name from last build: '+self.image)
         if is_existing_file(self.delta_file):
             log.info('reloading image to daemon cache...')
             docker.docker(['load', '-i', self.delta_file])
     repo = self.repo_host()
     if repo is None:
         raise XmakeException('no DOCKER deployment repository configured')
     self.tag_image()
     log.info('publishing to DOCKER repository '+repo+'...')
     resolve_deployment_credentials(self.build_cfg, DOCKERREPO)
     user = self.build_cfg.deploy_user(DOCKERREPO)
     password = self.build_cfg.deploy_password(DOCKERREPO)
     dir = '.'
     if user is not None and password is not None:
         log.info('  using user '+user)
         dir = docker.prepare_dockercfg(user, password, repo)
     success = False
     try:
         tag = self.tag_name()
         log.info('publishing image to '+repo+' ('+tag+')')
         docker.docker(['push', tag], None, dir=dir, home=dir)
         log.info('docker logout '+repo)
         docker.docker(['logout', repo], None, dir=dir, home=dir)
         success = True
     finally:
         if dir is not '.':
             log.info('cleanup login info '+dir)
             shutil.rmtree(dir)
         if success:
             self.cleanup_image(tag)
Exemple #6
0
    def gather_dependencies(self):
        log.info('gathering dependencies...')
        self.npm(['shrinkwrap'])
        shrinkwrapFile = join(self.module_dir, 'npm-shrinkwrap.json')
        if is_existing_file(shrinkwrapFile):
            move(shrinkwrapFile, self.shrinkwrap)

        # clean shrinkwrap file see spec on https://wiki.wdf.sap.corp/wiki/display/xs2/Filtering+SAP-internal+metadata+before+release
        cleanedShrinwrap = {}
        with open(self.shrinkwrap, 'r') as f:
            cleanedShrinwrap = json.load(f)

        keystoremove = []
        self._clean_shrinkwrap(cleanedShrinwrap,
                               listofkeytoremove=keystoremove)
        for keys in keystoremove:
            shrinkwrappart = cleanedShrinwrap
            for key in keys[:-1]:
                shrinkwrappart = shrinkwrappart[key]
            shrinkwrappart.pop(keys[-1], None)

        with open(self.shrinkwrap, 'w') as jsonfile:
            json.dump(cleanedShrinwrap, jsonfile, indent=2)

        dep = self.module_dir
        p = Properties()
        for (m, v) in self.list_deps(join(dep), ''):
            p.setProperty(m, v)
        with open(self.deps, 'w') as f:
            p.store(f)
        copyfile(self.deps, join(dep, 'dependencies'))
Exemple #7
0
 def matches(self):
     relPath = "pom.xml"
     if self._build_cfg.alternate_path() is not None: relPath = join(self._build_cfg.alternate_path(),"pom.xml") 
     f=join(self._build_cfg.component_dir(),relPath)
     if is_existing_file(f):
         return True
     return False
Exemple #8
0
    def install_dependencies(self):
        # npm install
        log.info('installing dependencies...')
        os.mkdir(self.import_dir)

        if not is_existing_directory(self.build_cfg.import_dir()):
            return

        if OS_Utils.is_UNIX():
            names = os.listdir(self.build_cfg.import_dir())
            if names is not None:
                for name in names:
                    os.link(join(self.build_cfg.import_dir(), name), join(self.import_dir, name))
        else:
            copytree(self.build_cfg.import_dir(), self.import_dir)

        if self.load is not None:
            log.info('preloading images...')
            for image in self.load:
                ifile = join(self.build_cfg.import_dir(), image)
                if is_existing_file(ifile):
                    log.info('  loading '+image)
                    self.docker(['load', '-i', ifile])
                else:
                    log.warning('image '+image+' not imported')
Exemple #9
0
    def build_step(self, img, cmd, priv=False):
        cidfile = os.path.join(self.build_cfg.gen_dir(), 'cid')
        if is_existing_file(cidfile):
            os.remove(cidfile)
        args = ['run', '--cidfile='+cidfile]
        args.extend(['-v', self.context_dir+':/xmake/context:ro'])
        if priv:
            args.extend(['--privileged=true'])
        args.extend([img, 'bash', '-c', cmd])

        def gap(line): return '      '+line
        docker.docker(args, gap, echo=self.echo)
        cid = get_first_line(cidfile, 'cannot read container id')
        # log.info('execution container is '+cid)

        last = [None]

        def gather(line):
            last[0] = line.strip()
        if self._cmd is not None:
            cmd = ['commit', '--change', 'CMD '+self._cmd, cid]
        else:
            cmd = ['commit', cid]
        docker.docker(cmd, gather, echo=True)
        log.info(' ---> '+last[0][:12])
        log.info('Removing intermediate container '+cid[:12])
        self.cleanup_container(cid)
        return last[0]
Exemple #10
0
 def forward_buildresults(self):
     f=join(self.build_cfg.cfg_dir(),"forward.py")
     if is_existing_file(f):
         p=self.acquire_forwarding_script(self.build_cfg, f)
         if p is not None:
             p.run()
     else:
         log.warning("no forwarding script 'forward.py' found in cfg folder")
Exemple #11
0
 def _setup(self):
     log.info("  assuming some defaults")
     self._build_cfg._build_script_name="generic"
     self._build_cfg.set_base_version('1.0.0')
     if is_existing_file(join(self._build_cfg.component_dir(),".xmake.cfg")):
         log.info("    found .xmake.cfg")
         if not is_existing_directory(self._build_cfg.src_dir()):
             self._build_cfg.set_src_dir(self._build_cfg.component_dir())
             log.info("    using flat source dir: "+self._build_cfg.src_dir())
Exemple #12
0
    def __init__(self, build_cfg):
        BuildPlugin.__init__(self, build_cfg)
        self._detected_npm_version = None
        self._node_version = '0.12.0'
        self._npm_user_options = []
        self._bundle = False
        self._run_shrinkwrap = True
        self._after_install = []
        self._before_publish = []
        self._node_executable = None
        self._npmcmd = None
        self._env = None
        self._path = None
        self._rel_path = 'bin'
        self._rel_npm = 'lib/node_modules/npm/bin/npm-cli.js'
        self._root = self.build_cfg.src_dir()
        self._pkgfile = join(self._root, 'package.json')
        self._npm_prefix_dir = join(self.build_cfg.temp_dir(), '.npm-global')

        self.build_cfg.tools().declare_runtime_tool(
            'nodejs', 'com.sap.prd.distributions.org.nodejs:nodejs:tar.gz')
        repos = self.build_cfg.import_repos(NPMREPO)
        if repos is None or len(repos) == 0:
            raise XmakeException('npm repository required')
        if len(repos) > 1:
            log.warning(
                'multiple NPM import repositories found -> ignore all but the first one'
            )
        self.registry = repos[0]
        log.info('using NPM import repository ' + self.registry)

        if self._is_plain():
            log.info('using plain mode for build')
        if not is_existing_file(self._pkgfile):
            raise XmakeException(
                'package.json required in projects root or src folder')
        with open(self._pkgfile, 'r') as d:
            self.pkg = json.load(d)
        if 'name' not in self.pkg:
            raise XmakeException('package.json must contain a name field')
        self.module_name = self.pkg['name']
        if 'dependencies' not in self.pkg:
            self._run_shrinkwrap = False
        else:
            dependencies = self.pkg['dependencies']
            if dependencies is not None and bool(dependencies) is False:
                self._run_shrinkwrap = False

        self.deps = join(self.build_cfg.temp_dir(), 'dependencies')
        self.build_cfg.add_metadata_file(self.deps)

        # Take in account arguments after the --
        # All these arguments will be passed to the npm command
        if self.build_cfg.build_args():
            for arg in self.build_cfg.build_args():
                log.info('  using custom option ' + arg)
                self._npm_user_options.append(arg)
Exemple #13
0
 def _validate_vmake_installation(self):
     if OS_Utils.find_in_PATH("vmake") is not None:
         return
     if is_existing_file(
             join(self.build_cfg.vmake_instdir(), "pgm",
                  "vmake" if OS_Utils.is_UNIX() else "vmake.exe")):
         return
     raise XmakeException('Vmake was not found neither in PATH nor in %s.' %
                          (self.build_cfg.vmake_instdir()))
Exemple #14
0
    def __init__(self, build_cfg):
        BuildPlugin.__init__(self, build_cfg)

        if not build_cfg.runtime().startswith('linux'):
            raise XmakeException('docker build available only on linux runtime')

        repos = self.build_cfg.import_repos('DOCKER')
        if repos is None or len(repos) == 0:
            log.warning('no docker repository specified')
            self.registry = None
        else:
            if len(repos) > 1:
                log.warning('multiple DOCKER import repositories found -> ignore all but the first one')
            self.registry = repos[0]
            log.info('using DOCKER import repository '+self.registry)
        f = join(self.build_cfg.src_dir(), 'Dockerfile')
        if not is_existing_file(f):
            raise XmakeException('Docker file required in projects root folder')
        if 'IS_NEW_DOCKER' in os.environ and os.environ['IS_NEW_DOCKER'] == 'true':
            with open('Dockerfile') as infile, open('Dockerfile_tmp', 'w') as outfile:
                for line in infile:
                    line = re.sub('^\s*FROM\s*[^:]+:\d+/', 'FROM ', line)
                    if self.registry is not None:
                        line = re.sub('^\s*FROM\s*', 'FROM '+self.registry+'/', line)
                    outfile.write(line)
            shutil.copyfile('Dockerfile_tmp', 'Dockerfile')
        
        self.context_dir = join(self.build_cfg.gen_dir(), 'context')
        self.import_dir = join(self.context_dir, 'imports')
        self.image_file = join(build_cfg.gen_dir(), 'image.tar.gz')
        self.delta_file = join(build_cfg.gen_dir(), 'delta.tar.gz')
        self.image_name_file = join(build_cfg.gen_dir(), 'imagename')

        self.aid = None
        self.gid = 'com.sap.docker'
        self.load = None
        self.delta = False
        self.common = False
        self.echo = False

        self.image = None
        self.src_image = None
        self.src_imagename = None
        self.docker_force_enabled=False
        
        output=[]
        def catch_output(line):
            if not len(output):
                log.info(line.rstrip())
            output.append(line)        
        docker.docker(["-v"],handler=catch_output)
        m = re.match(r".*?\s+(?P<Version>\d+\.[\w\.\-]+).*", ''.join(output))        
        if m and m.group('Version'):
            log.info("Checking --force flag for version: "+m.group('Version'))
            if LooseVersion(m.group('Version'))<LooseVersion('1.10'): # starting from 1.10 forcing option is deprecated
                self.docker_force_enabled=True
Exemple #15
0
 def setup(self):
     if self.matches():
         f = join(self._build_cfg.component_dir(), "Dockerfile")
         if is_existing_file(f):
             self._build_cfg.set_src_dir(self._build_cfg.component_dir())
         log.info("  found Dockerfile at " + self._build_cfg.src_dir() +
                  " ...")
         log.info("  setting defaults for docker build project")
         self._setup()
         return True
     return False
Exemple #16
0
 def get_version(self, md, mp):
     f = join(md, 'package.json')
     if not is_existing_file(f):
         log.warning('missing package.json in dependency' + mp)
         return None
     with open(f, 'r') as d:
         pkg = json.load(d)
     if 'version' not in pkg:
         raise XmakeException('package.json in dependency ' + mp +
                              ' must contain a version field')
     return pkg['version']
Exemple #17
0
def discover(build_cfg, wantedPluginName=None):
    log.info('looking for a built-in plugin compatible with source ...')
    contentPlugins={}
    files=glob(join(inst.get_content_plugin_dir(), '*.py'))
    files.remove(join(inst.get_content_plugin_dir(),'__init__.py'))
    files.remove(join(inst.get_content_plugin_dir(),'zzdefaults.py')) #Don't want to use generic plugin by default
    def load_plugin(f):
        plugin_name=basename(f)[:-3]
        module=imp.load_source('plugin',f)
        if not hasattr(module, 'content'):raise XmakeException('ERR: content plugin %s does not define a class "content"' % (plugin_name))

        if not issubclass(module.content, spi.ContentPlugin):
            log.warning("content plugin class is not a subclass of spi.ContentPlugin: "+plugin_name)
        if not has_method(module.content,'__init__', 2):
            log.error( "content plugin class for %s must implement a c'tor w/ exactly two formal parameters" % (plugin_name))
            raise XmakeException("failed to instantiate content plugin "+plugin_name+" (no c'tor def w/ correct amount of formal parameters (which is one) found)")
        impl=module.content(build_cfg,plugin_name)
        contentPlugins[plugin_name] = impl
    map(load_plugin, files)
    list=sorted(contentPlugins.keys())

    compatiblePlugins = []

    if wantedPluginName:
        log.info('\tplugin set in configuration file: '+wantedPluginName)
        if wantedPluginName in contentPlugins:
            p=contentPlugins[wantedPluginName]
            log.info('\t{} is compatible'.format(wantedPluginName))
            compatiblePlugins.append({'name': wantedPluginName,'content_plugin': p})
            return compatiblePlugins
        else:
            plugin=join(inst.get_build_plugin_dir(), wantedPluginName+".py")
            if not is_existing_file(plugin):
                log.warning('\tgiven plugin is not standard: {}'.format(wantedPluginName))
            else:
                log.info('\t{} is compatible'.format(wantedPluginName))
                compatiblePlugins.append({'name': wantedPluginName})
            return compatiblePlugins

    for pn in list:
        p=contentPlugins[pn]
        if p.matches():
            log.info('\t{} is compatible'.format(pn))
            compatiblePlugins.append({'name': pn, 'content_plugin': p})

    if len(compatiblePlugins) > 0:
        return compatiblePlugins

    log.info('\tno compatible built-in plugin found')
    return []
Exemple #18
0
 def node_setup(self):
     self._build_cfg._build_script_name="node"
     
     f=join(self._build_cfg.component_dir(),"package.json")
     if is_existing_file(f):
         self._build_cfg.set_src_dir(self._build_cfg.component_dir())
     else:
         f=join(self._build_cfg.src_dir(),"package.json")
     
     with open(f,"r") as d:
         pkg=json.load(d)
     if self._build_cfg.base_version() is None:
         if 'version' not in pkg:
             raise XmakeException('no version entry in package json') 
         self._build_cfg.set_base_version(str(pkg['version']))
Exemple #19
0
def get_base(ifile):
    def base_file(members):
        for tarinfo in members:
            if tarinfo.name == "base":
                yield tarinfo

    d = tempfile.mkdtemp(prefix=os.path.basename(ifile))
    tar = tarfile.open(ifile)
    try:
        tar.extractall(path=d, members=base_file(tar))
        tar.close()
        p = os.path.join(d, "base")
        return utils.get_first_line(
            p, "cannot read base file") if utils.is_existing_file(p) else None
    finally:
        shutil.rmtree(d, True)
Exemple #20
0
 def _before_build_callback(self):
     before_build_script = join(self.build_cfg.cfg_dir(),
                                'vmake_before_build.py')
     if not is_existing_file(before_build_script):
         return
     module = imp.load_source('before_build', before_build_script)
     if not hasattr(module, 'run'):
         raise XmakeException(
             'ERR: tool f %s does not define a method "run"' %
             (before_build_script))
     run_method = module.run
     if not callable(run_method):
         raise XmakeException(
             'ERR: tool f %s does not define a method "run"' %
             (before_build_script))
     argcount = len(inspect.getargspec(run_method)[0])
     if not argcount < 2:
         raise XmakeException(
             'ERR: tool file %s does not define a function "run" with one or no formal parameters'
             % (before_build_script))
     run_method(self.build_cfg) if argcount == 1 else run_method()
Exemple #21
0
def bootstrap2(argv):
    global xmake_status, build_cfg
    prepare_bootstrap()
    xmake_inst_dir = inst.get_installation_dir()
    if len(argv)>1 and argv[1]=='--bootstrap':
        xmake_status='bootstrap'
        sys.argv=argv=argv[0:1]+argv[2:]
    else:
        if isfile(join(xmake_inst_dir,'.loaded')):
            log.warning( 'directly using loaded sub level version of xmake')
            xmake_status='loaded'

    if xmake_status=='loaded':
        run(argv)
    else:
        log.info( 'bootstrapping xmake...')
        build_cfg = BuildConfig()
        (args,_,_) = setup_config(build_cfg, True)
        log.info("component root is "+build_cfg.component_dir())
        log.info( 'build runtime is ' + build_cfg.runtime())
        create_gendir(build_cfg)
        log.start_logfile(join(build_cfg.genroot_dir(),"boot.log"))
        determine_version_suffix(build_cfg, args.version) # required by xmake version check below

        if args.use_current_xmake_version:
            log.warning( 'using actually installed version as requested by option --use-current-xmake-version')
            run(argv)
        else:
            v=determine_xmake_version(build_cfg)
            if v==None:
                log.warning( 'no xmake version specified (please maintain file '+XMAKE_VERSION+" or xmake.cfg in project's cfg folder")
                log.info("default version is "+str(args.default_xmake_version))
                if args.default_xmake_version==None:
                    if build_cfg.is_release() or build_cfg.is_milestone():
                        raise XmakeException('no version specified for xmake for a productive build')
                    else:
                        log.warning( 'using actually installed version')
                        run(argv)
                else:
                    v=args.default_xmake_version
                    log.warning( 'using explicit default version '+v)
            if v==None:
                log.error("do not know any xmake version to use -> exit")
                sys.exit(2)
            else:
                v=find_latest(v)
                log.info( 'required xmake version is '+v)
                if v.endswith("-SNAPSHOT"):
                    if build_cfg.is_release() or build_cfg.is_milestone():
                        log.info("version suffix is "+str(build_cfg.version_suffix()))
                        log.error( 'this is a snapshot version, it cannot be used for release or milestone builds')
                        raise XmakeException('snapshot version specified for xmake for a realease or milestone build')
                    else:
                        log.warning( 'this is a snapshot version, it cannot be used for release builds')
                l=get_xmake_version(v, build_cfg)
                if is_existing_file(xmake_loaded): os.remove(xmake_loaded)
                log.info( 'required xmake version found at '+l)
                if test_mode:
                    cmd=[sys.executable, join(l,'xmake','bootstrap.py'), '--use-current-xmake-version']
                else:
                    cmd=[sys.executable, join(l,'xmake','xmake.py')]
                log.info( 'starting xmake...')
                cmd.extend(prepare_args(build_cfg, l,argv[1:]))
                if build_cfg.component_dir()!=None: os.chdir(build_cfg.component_dir())
                flush()
                log.stop_logfile()
                rc=subprocess.call(cmd)
                sys.exit(rc)
Exemple #22
0
 def matches(self):
     if not is_existing_directory(self._build_cfg.cfg_dir()):
         return True
     if is_existing_file(join(self._build_cfg.component_dir(),".xmake.cfg")):
         return True
     return False
Exemple #23
0
 def after_PRELUDE(self, build_cfg):
     if not is_existing_file(self.build_cfg.export_script()):
         self.prepare_export()
Exemple #24
0
def execute_exports(build_cfg):
    '''performs the xmake EXPORT phase (exports are defined in <cfgdir>/export.ads)
the export phase results in a Artifact Deployer 'deploy file'. Its contents may be deployed to a maven repository during the DEPLOY phase'''
    if build_cfg.do_export() is False:
        if build_cfg.do_deploy() is True:
            if os.path.exists(build_cfg.export_file()):
                log.info(
                    'exporting was skipped,because the according option \'-e\' was not set but option \'-d\' is set and export.df exists'
                )
                return
        else:
            log.info(
                'exporting was skipped, because the according option \'-e\' was not set'
            )
            return
    if not os.path.exists(build_cfg.export_dir()):
        os.mkdir(build_cfg.export_dir())
    if not os.path.exists(build_cfg.export_script()):
        write_version_properties(build_cfg)
        log.warning(
            'exporting was switched on, but there was no export description file at: '
            + build_cfg.export_script())
        return
    adargs = [
        build_cfg.tools().artifact_deployer(), 'pack', '-f',
        build_cfg.export_script(), '-p',
        build_cfg.export_file(), '-Dcfgdir=' + build_cfg.cfg_dir(),
        '-Dbsedir=' + build_cfg.build_script_ext_dir(),
        '-Dgendir=' + build_cfg.gen_dir(), '-Dsrcdir=' + build_cfg.src_dir(),
        '-Dgenroot=' + build_cfg.genroot_dir(), '-Dcomponentroot=' +
        build_cfg.component_dir(), '-Druntime=' + build_cfg.runtime(),
        '-Dbaseversion=' + build_cfg.base_version(), '-DbuildRuntime=' +
        build_cfg.runtime(), '-DbuildVersion=' + build_cfg.version(),
        '-DbuildBaseVersion=' + build_cfg.base_version(),
        '-DbuildVersionSuffix=' + stripToBlank(build_cfg.version_suffix()),
        '-Dimportdir=' + build_cfg.import_dir()
    ]
    # add variant coordinates only if corresponding args are present
    if not build_cfg.suppress_variant_handling():

        def add_coord(k):
            adargs.extend(['-Dbuild' + k + '=' + vcoords[k]])

        vcoords = build_cfg.variant_coords()
        map(add_coord, vcoords.keys())
        log.info('variant vector of actual build is ' +
                 str(build_cfg.variant_vector()))
        adargs += [
            '--variant-coordinate-system',
            build_cfg.variant_cosy(), '--variant-projection-method', 'groupId',
            '--variant-coordinates', ','.join(build_cfg.variant_vector())
        ]
    else:
        log.warning(
            'no variant coordinates are available for export (i.e. export will fail for platform-dependent contents'
        )
    if build_cfg.suppress_variant_handling():
        log.info(
            'build plugin suppressed variant handling by returning \'None\' as a variant coordinate system'
        )

    # add custom deploy variables if any
    build_script = build_cfg.build_script()
    adargs.extend([
        '-D' + name + '=' + value
        for (name, value) in build_script.deploy_variables().items()
    ])
    adargs.extend([
        '-Dimport' + name + '=' + value
        for (name, value) in build_script.import_roots().items()
    ])

    # write metatdata
    if build_cfg.scm_snapshot_url() is not None:
        print_attribs = [
            'scm_snapshot_url', 'build_args', 'productive', 'version',
            'xmake_version'
        ]
        if not build_cfg.suppress_variant_handling():
            print_attribs.append('variant_coords')
        metadata_str = '''xmake release metadata
~~~~~~~~~~~~~~~~~~~~~~
'''
        metadata_str += '\n'.join([
            ' {0:25}: {1}'.format(attr, str(getattr(build_cfg, attr)()))
            for attr in print_attribs
        ])
        if build_cfg._externalplugin_setup:
            metadata_str += '\n {0:25}: {1}'.format(
                'xmake_plugin', build_cfg._build_script_name)
            metadata_str += '\n {0:25}: {1}'.format(
                'xmake_plugin_version', build_cfg._build_script_version)
        with open(build_cfg.release_metadata_file(), 'w') as rmf:
            rmf.write(metadata_str)

        # add release metadata if present
        meta = [
            build_cfg.release_metadata_file(),
            build_cfg.import_file(),
            build_cfg.xmake_file(),
            build_cfg.dependency_file()
        ]
        meta.extend(build_cfg.addtional_metadata())
        for f in meta:
            if is_existing_file(f):
                adargs.extend(['--metadata-file', f])
        adargs.extend(
            ['--metadata-type-id', 'xmake', '--metadata-type-name', 'xmake'])
    elif build_cfg.productive():
        log.error(
            'no release metadata available. Resulting export file must not be released.',
            log.INFRA)
        raise XmakeException(
            'missing release metadata file for productive build')
    else:
        log.warning(
            'no release metadata available. Resulting export file must not be released.'
        )

    log.info('calling ' + ' '.join(adargs))
    utils.flush()
    rc = log.log_execute(adargs)
    utils.flush()
    log.info('done')
    if rc == 0:
        log.info('export file creation succeeded')
    else:
        log.info('export file creation returned w/ RC==' + str(rc))
        log.error(
            'export file creation resulted in an error. See log output for further hints)',
            log.INFRA)
        raise XmakeException('export failed')

    write_version_properties(build_cfg)

    p = subprocess.Popen([
        build_cfg.tools().artifact_deployer(), 'showpackage', '-p',
        build_cfg.export_file()
    ],
                         stdout=subprocess.PIPE,
                         stderr=subprocess.STDOUT)
    for line in p.stdout:
        searchGroup = re.search(r"group\s+\'(.+)\'", line)
        if searchGroup:
            group = searchGroup.group(1)
            if group.startswith('com.sap.'):
                if not re.search(r'^[1-9]\d*\.\d+\.\d+$',
                                 build_cfg.base_version()):
                    if build_cfg.is_release() == 'direct-shipment':
                        log.error(
                            'ERR: project version %s does not respect the format for the direct shipment release.'
                            % build_cfg.base_version())
                        raise XmakeException(
                            'ERR: project version %s does not respect the format for the direct shipment release.'
                            % build_cfg.base_version())
                    log.warning(
                        'project version %s does not respect the format for the direct shipment release.'
                        % build_cfg.base_version())
                if not re.search(r'^[1-9]+', build_cfg.base_version()):
                    if build_cfg.is_release() == 'indirect-shipment':
                        log.error(
                            'ERR: project version %s does not respect the format for the indirect shipment release.'
                            % build_cfg.base_version())
                        raise XmakeException(
                            'ERR: project version %s does not respect the format for the indirect shipment release.'
                            % build_cfg.base_version())
                    log.warning(
                        'project version %s does not respect the format for the indirect shipment release.'
                        % build_cfg.base_version())
                if not re.search(r'^\d+', build_cfg.base_version()):
                    if build_cfg.is_release(
                    ) == 'milestone' or build_cfg.is_milestone():
                        log.error(
                            'ERR: project version %s does not respect the format for the milestone release.'
                            % build_cfg.base_version())
                        raise XmakeException(
                            'ERR: project version %s does not respect the format for the milestone release.'
                            % build_cfg.base_version())
                    log.warning(
                        'project version %s does not respect the format for the milestone release.'
                        % build_cfg.base_version())
            else:
                if build_cfg.is_release() == 'direct-shipment':
                    log.error(
                        'ERR: the group %s does not respect the format for the direct shipment release.'
                        % group)
                    raise XmakeException(
                        'ERR: the group %s does not respect the format for the direct shipment release.'
                        % group)
                log.warning(
                    'the group %s does not respect the format for the direct shipment release.'
                    % group)
                if not (re.search(r'^\d+[\d\.]*[-\.]sap-\d+',
                                  build_cfg.base_version()) or
                        re.search(r'^(\d+\.\d+\.\d+\.[a-zA-Z0-9_-]+)-sap-\d+',
                                  build_cfg.base_version())):
                    if build_cfg.is_release(
                    ) == 'indirect-shipment' or build_cfg.is_release(
                    ) == 'milestone' or build_cfg.is_milestone():
                        log.error(
                            'ERR: project version %s does not respect the format for the indirect shipment or milestone release.'
                            % build_cfg.base_version())
                        raise XmakeException(
                            'ERR: project version %s does not respect the format for the indirect shipment or milestone release.'
                            % build_cfg.base_version())
                    log.warning(
                        'project version %s does not respect the format for the indirect shipment or milestone release.'
                        % build_cfg.base_version())
    rc = p.wait()
    if rc != 0:
        log.info('showpackage returned w/ RC==' + str(rc))
        log.error('showpackage resulted in an error.', log.INFRA)
        raise XmakeException('export failed')
Exemple #25
0
 def matches(self):
     f = join(self._build_cfg.cfg_dir(), "build.cfg")
     if is_existing_file(f):
         return True
     return False
Exemple #26
0
def prepare_args(build_cfg, path, args):
    log.info("  cleanup command line for selected xmake version")
#     keep --xmake-version and -X option in order to be stored in release-metatada file
#     args=remove_arg('--xmake-version',1,args)
#     args=remove_arg('-X',1,args)
    args=remove_arg('--default-xmake-version',1,args)
    args=remove_arg('--buildruntime',1,args)

    p=join(path,'xmake','options.py')
    targetopts=None
    if is_existing_file(p):
        log.info("  loading options for selected xmake version")
        mod = imp.load_source('targetoptions', p)
        if hasattr(mod, 'features'):
            features=mod.features
            log.info("  found xmake version with features "+str(features))
        else:
            log.info("  found featureless xmake version")
            features={}
        targetopts=mod.cli_parser()
    else:
        log.info('  falling back to default options for older xmake version')
        targetopts=options.base_09_options();
        features={}

    if targetopts is not None:
        def handle_option(o,args):
            # option arguments must be ignored, but here only a basic option parsing is done just by looking
            # for option markers.
            if options.cli_parser().has_option(o): # WORKARROUND: ignore option arguments, if they look like options
                #log.info("    opt: "+o)
                if o != '-V' and not o.startswith('--variant-') and not targetopts.has_option(o):
                    index=contains(o,'=')
                    if index>=0:
                        k=o[:index]
                    else:
                        k=o
                    #log.info("found unsupported option "+k)
                    nargs=curopts.get_option(k).nargs
                    log.warning("given option '"+k+"' with "+str(nargs)+" arg(s) is not supported by selected xmake version -> option omitted")
                    if k!=o:
                        nargs=0
                    args=remove_arg(o,nargs,args)
            return args

        curopts=options.cli_parser()
        for a in [ x for x in args] :
            if a.startswith('-'):
                if a == '--':
                    break
                #log.info("  arg: "+a)
                if a.startswith('--'):
                    args=handle_option(a,args)
                else:
                    for o in a[1:]:
                        args=handle_option('-'+o,args)

    args=handle_features(features,args)
    if targetopts.has_option("--buildruntime"):
        log.info("  passing buildruntime "+build_cfg.runtime())
        tmp=["--buildruntime", build_cfg.runtime() ]
        tmp.extend(args)
        args=tmp
    else:
        log.info("  using legacy mode to detect build runtime in selected xmake version")

    if '--xmake-version' not in args and '-X' not in args and build_cfg.xmake_version():
        if '--' in args:
            indexOfDashDash = args.index('--')
            args.insert(indexOfDashDash, build_cfg.xmake_version())
            args.insert(indexOfDashDash, '--xmake-version')
        else:
            args.extend(['--xmake-version', build_cfg.xmake_version()])

    log.info("effective args: "+str(args))
    return args
Exemple #27
0
 def after_PRELUDE(self, build_cfg):
     log.info("artifact id is "+self.aid)
     log.info("group id is    "+self.gid)
     if not is_existing_file(self.build_cfg.export_script()):
         self.prepare_export()
Exemple #28
0
def execute_imports(build_cfg):
    '''performs the xmake IMPORT phase (imports are defined in <cfgdir>/import.ais and resolved using the Artifact Importer)'''
    mkdirs(build_cfg.import_dir())

    if not build_cfg.do_import():
        log.info(
            "importing was skipped, because the according option '-i' was not set\n"
        )
        return
    absent_import_scripts = filter(lambda (x): not is_existing_file(x),
                                   build_cfg.import_scripts())
    import_scripts = filter(lambda (x): is_existing_file(x),
                            build_cfg.import_scripts())
    if len(import_scripts) == 0:
        log.info('no standard import')
    else:
        log.info('standard import scripts: ' + str(import_scripts))
    #add explicit import targets from build plugin
    tool_import_script = _create_tool_import_script(build_cfg)
    if tool_import_script is not None:
        log.info('adding tool import script ' + tool_import_script)
        import_scripts.insert(0, tool_import_script)
    if not len(absent_import_scripts) == 0:
        log.warning(
            'importing was switched on, but the following import mapping scripts were not found:'
        )
        log.warning(', '.join(build_cfg.import_scripts()))
        if len(import_scripts) == 0: return
    #run artifact importer
    log.info("performing import...")
    log.info('import scripts: ' + str(import_scripts))

    ai_args = prepare_ai_command(build_cfg, {
        'default': build_cfg.import_dir(),
        'tools': build_cfg.import_tools_dir()
    }, build_cfg.import_repos(), '.tmp')

    if not build_cfg.suppress_variant_handling():

        def add_variant_coord(k):
            ai_args.extend(['-Dbuild' + k.capitalize() + '=' + vcoords[k]
                            ])  # why different from export script variables???
            ai_args.extend(['-Dbuild' + k + '=' + vcoords[k]])

        vcoords = build_cfg.variant_coords()
        if vcoords != None and len(vcoords) != 0:
            map(add_variant_coord, vcoords.keys())
        else:
            log.error("using variant coordinate system (" +
                      build_cfg.variant_cosy_gav() +
                      ") requires coordinates/variant options")
            raise XmakeException("using variant coordinate system (" +
                                 build_cfg.variant_cosy_gav() +
                                 ") requires coordinates/variant options")

    #add custom import config if present
    bs = build_cfg.build_script()
    for (name, value) in bs.import_roots().items():
        ai_args.extend(['-C', 'root.' + name + '=' + value])
    for (name, value) in bs.import_variables().items():
        ai_args.extend(['-D', name + '=' + value])

    assert_import_file(build_cfg)
    for script in import_scripts:
        execute_ai(build_cfg, ai_args, script, "")
        update_import_file(build_cfg, '.tmp')

    _setup_global_settings_xml(build_cfg)
Exemple #29
0
def assert_import_file(build_cfg):
    ifile=build_cfg.import_file()
    if not is_existing_file(ifile):
        with open(build_cfg.import_file(),"w"): pass