Exemple #1
0
def resolve_deployment_credentials(build_cfg, t=config.COMMONREPO):
    # skip resolving if either no credential key was defined or user and password were specified
    if build_cfg.deploy_cred_key(t) is None or build_cfg.deploy_user(
            t) is not None and build_cfg.deploy_password(t) is not None:
        return
    prodpassaccess = build_cfg.tools().prodpassaccess()

    log.info("accessing credential key " + build_cfg.deploy_cred_key(t) +
             " for repository type " + t)

    # user
    log.info("call prodpassaccess to retrieve user")
    (rc, stdout, _) = OS_Utils.exec_script(
        [prodpassaccess, 'get',
         build_cfg.deploy_cred_key(t), 'user'])
    if rc > 0:
        raise XmakeException('prodpassaccess returned %s' % str(rc))
    build_cfg.set_deploy_user(stdout.strip(), t)
    log.info("user retrieved")

    # password
    log.info("call prodpassaccess to retrieve password")
    (rc, stdout, _) = OS_Utils.exec_script(
        [prodpassaccess, 'get',
         build_cfg.deploy_cred_key(t), 'password'])
    if rc > 0:
        raise XmakeException('prodpassaccess returned %s' % str(rc))
    build_cfg.set_deploy_password(stdout.strip(), t)
    log.info("password retrieved")
Exemple #2
0
def select_bootstrapper(f,argv):
    parser = OptionParser()
    parser.add_option( '--select-bootstrapper', dest="boot", action='store_true', help="the bootstrapper version to use")
    (values,args)=parser.parse_args(argv[1:],Bunch(boot=False))
    if not values.boot:
        raise XmakeException("inconsistent use of bootstrapper option")

    if len(args) == 0:
        "check and notify about the actually configured bootstrapper"
        if isfile(f):
            log.info("determining bootstrapper")
            v=get_first_line(f,'cannot read '+f)
            if v!=None:
                log.info("actual bootstrapper version is "+v)
            else:
                log.info("no bootstrapper version configured in "+f)
        else:
            log.info("no bootstrapper version configured")
    else:
        "set or change the bootstrapper version to use"
        if len(args)!=1:
            raise XmakeException("only one version argument possible")
        (v,_)=load_latest(args[0])
        with open(f,"w") as b:
            b.write(v+"\n")
            log.info("setting bootstrapper to version "+v)
Exemple #3
0
def _check(build):
    if build.image_name is not None:
        if build.mode != 'docker':
            raise XmakeException(
                'using docker image name requires mode "docker"')
        return
    if build.mode is None:
        if build.version is None or build.gid is None:
            build.mode = 'docker'
    if build.aid is None:
        raise XmakeException(
            'docker image name or artifact id required: specify property aid for build plugin'
        )
    if build.gid is None:
        if build.mode != 'docker':
            raise XmakeException(
                'group id for docker image required: specify property gid for build plugin'
            )
    elif 'IS_NEW_DOCKER' in os.environ and os.environ[
            'IS_NEW_DOCKER'] == 'true':
        build.gid = re.sub('^[^:]+:\d+/', '', build.gid)
        build.gid = build.registry + '/' + build.gid
    if build.version is None:
        if build.mode != 'docker':
            raise XmakeException(
                'version of docker image required: specify property version for build plugin'
            )
    else:
        if build.mode is 'tool':
            if build.version.endwith('-SNAPSHOT'):
                raise XmakeException(
                    'snapshot versions not supported for tool dependencies')
Exemple #4
0
    def acquire_forwarding_script(self, build_cfg, plugin_path):
        #dynamically load custom build script
        log.info( 'loading forwarding plugin '+plugin_path)
        plugin_code = compile(open(plugin_path, "rt").read(), plugin_path, "exec")
        plugin_dict = {
                "__name__": "forwardplugin",
                "object": _ObjectBase,
        }
        exec plugin_code in plugin_dict

        try:
            forward_script_class = plugin_dict["forward"]
        except KeyError:
            log.error( "custom forwarding script must define a class 'forward' - no such class def found at: " + plugin_path)
            raise XmakeException("failed to instantiate 'forward' (no such class def found)")

        #check for existence of c'tor w/ one formal parameter
        if not len(inspect.getargspec(forward_script_class.__init__)[0]) == 2:
            log.error( "custom forward class must implement a c'tor w/ exactly one formal parameter (which is of type BuildConfig)")
            raise XmakeException("failed to instantiate 'forward' (no c'tor def w/ correct amount of formal parameters (which is one) found)")
        if not len(inspect.getargspec(forward_script_class.run)[0]) == 1:
            log.error( "custom forward class must implement a method 'run' w/ exactly one formal parameter (which is of type BuildConfig)")
            raise XmakeException("failed to instantiate 'forward' (no method 'run' def w/ correct amount of formal parameters (which is one) found)")

        forward_script = forward_script_class(build_cfg)

        return forward_script
Exemple #5
0
    def execute_scripts(self):
        log.info('executing docker build...')
        self.image = None

        def handler(line):
            if self.src_image is None:
                self.src_imagename = self.match(line, 'Step 0 : FROM', self.src_imagename)
                if not self.src_imagename:
                    self.src_imagename = self.match(line, 'Step 1 : FROM', self.src_imagename)
                self.src_image = self.match(line, ' --->', self.src_image)
            self.image = self.match(line, 'Successfully built', self.src_image)
            return line
        try:
            self.docker(["build",self.context_dir],handler=handler)
        except XmakeException:
            log.error("docker image '%s' not found.\nPlease check the docker registry '%s'" % (self.src_imagename, self.registry))
            raise XmakeException("ERR: docker image '%s' not found.\nPlease check the docker registry '%s'" % (self.src_imagename, self.registry))
        if self.image is None:
            raise XmakeException('ERR: no image found')
        log.info('generated docker image '+self.image)
        addfile = os.path.join(self.build_cfg.src_dir(), 'DockerCommands')
        if is_existing_file(addfile):
            log.info('executing additional commands...')
            succeeded = False
            try:
                self.image = self.handle_script(self.image, addfile)
                log.info('generated docker image '+self.image)
                succeeded = True
            finally:
                if self.image is not None and not succeeded:
                    self.cleanup_image(self.image)
        log.info('root image '+self.src_imagename+' ('+self.src_image+')')
        with open(self.image_name_file, 'w') as f:
            f.write(self.image+'\n')
Exemple #6
0
    def tool(self, version):
        self._tools._fail_if_not_windows('msvc')
        try:
            environment_variable = self.versions[version]
        except KeyError:
            log.error("visual studio version '%s' is not supported" %
                      (version))
            raise XmakeException("unsupported visual studio version: %s" %
                                 version)

        if os.environ.has_key(environment_variable):
            visual_studio_inst_dir = os.environ[environment_variable]
            if not is_existing_directory(visual_studio_inst_dir):
                log.error(
                    "visual studio '%s' not found in '%s=%s'. Either unset environment variable '%s' or change it"
                    % (version, environment_variable, visual_studio_inst_dir,
                       environment_variable))
                raise XmakeException(
                    "visual studio '%s' not found in '%s=%s'" %
                    (version, environment_variable, visual_studio_inst_dir))
            visual_studio_inst_dir = os.path.abspath(
                os.path.join(visual_studio_inst_dir, '..', '..'))
        else:
            visual_studio_inst_dir = r"C:\Program Files (x86)\Microsoft Visual Studio " + environment_variable[
                2:4] + "." + environment_variable[4:5]
            if not is_existing_directory(visual_studio_inst_dir):
                log.error(
                    "visual studio version '%s' not found in the default folder '%s'. Set environment variable '%s' or install visual studio in the default folder"
                    % (version, visual_studio_inst_dir, environment_variable))
                raise XmakeException(
                    "visual studio '%s' not found in the default folder '%s'" %
                    (version, visual_studio_inst_dir))
        return visual_studio_inst_dir
Exemple #7
0
    def __init__(self, build_cfg):
        BuildPlugin.__init__(self, build_cfg)
        self._detected_npm_version = None
        self._node_version = '0.12.0'
        self._npm_user_options = []
        self._bundle = False
        self._run_shrinkwrap = True
        self._after_install = []
        self._before_publish = []
        self._node_executable = None
        self._npmcmd = None
        self._env = None
        self._path = None
        self._rel_path = 'bin'
        self._rel_npm = 'lib/node_modules/npm/bin/npm-cli.js'
        self._root = self.build_cfg.src_dir()
        self._pkgfile = join(self._root, 'package.json')
        self._npm_prefix_dir = join(self.build_cfg.temp_dir(), '.npm-global')

        self.build_cfg.tools().declare_runtime_tool(
            'nodejs', 'com.sap.prd.distributions.org.nodejs:nodejs:tar.gz')
        repos = self.build_cfg.import_repos(NPMREPO)
        if repos is None or len(repos) == 0:
            raise XmakeException('npm repository required')
        if len(repos) > 1:
            log.warning(
                'multiple NPM import repositories found -> ignore all but the first one'
            )
        self.registry = repos[0]
        log.info('using NPM import repository ' + self.registry)

        if self._is_plain():
            log.info('using plain mode for build')
        if not is_existing_file(self._pkgfile):
            raise XmakeException(
                'package.json required in projects root or src folder')
        with open(self._pkgfile, 'r') as d:
            self.pkg = json.load(d)
        if 'name' not in self.pkg:
            raise XmakeException('package.json must contain a name field')
        self.module_name = self.pkg['name']
        if 'dependencies' not in self.pkg:
            self._run_shrinkwrap = False
        else:
            dependencies = self.pkg['dependencies']
            if dependencies is not None and bool(dependencies) is False:
                self._run_shrinkwrap = False

        self.deps = join(self.build_cfg.temp_dir(), 'dependencies')
        self.build_cfg.add_metadata_file(self.deps)

        # Take in account arguments after the --
        # All these arguments will be passed to the npm command
        if self.build_cfg.build_args():
            for arg in self.build_cfg.build_args():
                log.info('  using custom option ' + arg)
                self._npm_user_options.append(arg)
Exemple #8
0
    def __init__(self, build_cfg):
        BuildPlugin.__init__(self, build_cfg)

        if not build_cfg.runtime().startswith('linux'):
            raise XmakeException('docker build available only on linux runtime')

        repos = self.build_cfg.import_repos('DOCKER')
        if repos is None or len(repos) == 0:
            log.warning('no docker repository specified')
            self.registry = None
        else:
            if len(repos) > 1:
                log.warning('multiple DOCKER import repositories found -> ignore all but the first one')
            self.registry = repos[0]
            log.info('using DOCKER import repository '+self.registry)
        f = join(self.build_cfg.src_dir(), 'Dockerfile')
        if not is_existing_file(f):
            raise XmakeException('Docker file required in projects root folder')
        if 'IS_NEW_DOCKER' in os.environ and os.environ['IS_NEW_DOCKER'] == 'true':
            with open('Dockerfile') as infile, open('Dockerfile_tmp', 'w') as outfile:
                for line in infile:
                    line = re.sub('^\s*FROM\s*[^:]+:\d+/', 'FROM ', line)
                    if self.registry is not None:
                        line = re.sub('^\s*FROM\s*', 'FROM '+self.registry+'/', line)
                    outfile.write(line)
            shutil.copyfile('Dockerfile_tmp', 'Dockerfile')
        
        self.context_dir = join(self.build_cfg.gen_dir(), 'context')
        self.import_dir = join(self.context_dir, 'imports')
        self.image_file = join(build_cfg.gen_dir(), 'image.tar.gz')
        self.delta_file = join(build_cfg.gen_dir(), 'delta.tar.gz')
        self.image_name_file = join(build_cfg.gen_dir(), 'imagename')

        self.aid = None
        self.gid = 'com.sap.docker'
        self.load = None
        self.delta = False
        self.common = False
        self.echo = False

        self.image = None
        self.src_image = None
        self.src_imagename = None
        self.docker_force_enabled=False
        
        output=[]
        def catch_output(line):
            if not len(output):
                log.info(line.rstrip())
            output.append(line)        
        docker.docker(["-v"],handler=catch_output)
        m = re.match(r".*?\s+(?P<Version>\d+\.[\w\.\-]+).*", ''.join(output))        
        if m and m.group('Version'):
            log.info("Checking --force flag for version: "+m.group('Version'))
            if LooseVersion(m.group('Version'))<LooseVersion('1.10'): # starting from 1.10 forcing option is deprecated
                self.docker_force_enabled=True
Exemple #9
0
 def check_coords(self, coords):
     for c in self._dimensions:
         if coords.has_key(c):
             if not coords[c] in self.get_dimension(c):
                 raise XmakeException("invalid value '"+coords[c]+"' for variant dimension '"+c+"'")
         else:
             raise XmakeException("missing value for variant dimension '"+c+"'")
     for c in coords.keys():
         if not c in self._dimensions:
             raise XmakeException("invalid variant dimension '"+c+"'")
Exemple #10
0
def _create_tool_import_script(build_cfg):
    build_script = build_cfg.build_script()
    (tool_imports, allgavs) = _gather_tool_imports(build_cfg, build_script)
    if (not build_cfg.suppress_variant_handling()
            and build_cfg.variant_cosy_gav() != None):
        tool_imports.add(build_cfg.variant_cosy_gav())
    if len(tool_imports) > 0:
        imports = {'tools': tool_imports}
    else:
        imports = {}

    imps = build_script.plugin_imports()
    if imps is not None:
        if not isinstance(imps, dict):
            raise XmakeException('plugin_imports must return a dict')
        log.info('build plugin requests imports: ' + str(imps))
        if imps.has_key('tools') and len(imports) > 0:
            imports['tools'].update(imps['tools'])
            del imps['tools']
        if len(imps) > 0: imports.update(imps)

    ct = build_cfg.configured_tools()
    if len(ct) != 0:
        log.info('found configured tools...')
        gavs = set()
        for n in ct.keys():
            tid = ct[n].toolid()
            _handleToolImports(build_cfg, tid, ct[n].version(), gavs, allgavs)
        if len(gavs) > 0:
            if imports.has_key('tools'):
                imports['tools'].update(gavs)
            else:
                imports['tools'] = gavs

    log.info('all tool imports: ' + str(allgavs))
    append_import_file(build_cfg, allgavs)
    if len(imports) == 0:
        log.info('no tool import script required')
        return None

    #dict keys must denote existing (defined) import roots
    import_keys = ['default', 'tools'
                   ]  # default and tools is provided by IMPORT phase (xmake)
    import_keys.extend(build_script.import_roots().keys())
    undefined_import_roots = filter(lambda (key): key not in import_keys,
                                    imports.keys())
    if len(undefined_import_roots) > 0:
        raise XmakeException(
            'the following import roots were not defined by build plugin: %s' %
            ', '.join(undefined_import_roots))
    import_file = create_import_script(build_cfg, 'plugin-imports.ais',
                                       imports)

    return import_file
Exemple #11
0
    def load_plugin(f):
        plugin_name=basename(f)[:-3]
        module=imp.load_source('plugin',f)
        if not hasattr(module, 'content'):raise XmakeException('ERR: content plugin %s does not define a class "content"' % (plugin_name))

        if not issubclass(module.content, spi.ContentPlugin):
            log.warning("content plugin class is not a subclass of spi.ContentPlugin: "+plugin_name)
        if not has_method(module.content,'__init__', 2):
            log.error( "content plugin class for %s must implement a c'tor w/ exactly two formal parameters" % (plugin_name))
            raise XmakeException("failed to instantiate content plugin "+plugin_name+" (no c'tor def w/ correct amount of formal parameters (which is one) found)")
        impl=module.content(build_cfg,plugin_name)
        contentPlugins[plugin_name] = impl
Exemple #12
0
    def extract_msvc_env(self, vs_inst_path, arch):
        """ extracts the msvc environment variables out of the local installation
            and returns them as a dict
        """

        if arch not in self.archs:
            log.error("invalid architecture provided: %s" % arch)
            raise XmakeException("invalid architecture provided: %s" % arch)
        log.info("looking up env for " + vs_inst_path)
        vc_vars_all = os.path.normpath(
            os.path.join(vs_inst_path, "VC", "vcvarsall.bat"))
        if not os.path.exists(vc_vars_all):
            log.error("vcvarsall.bat not found")
            raise XmakeException("vcvarsall.bat not found")

        cmd = subprocess.Popen(args=["cmd.exe"],
                               stdin=subprocess.PIPE,
                               stdout=subprocess.PIPE)

        cmd.stdin.write('"%s" %s\n' % (vc_vars_all, arch))
        cmd.stdin.write(
            '''"%s" -c "import pickle, os; print '---{1}---\\n{0}\\n---{1}---'.format(pickle.dumps(dict(os.environ), -1).encode('base64'), 'ENV')"\n'''
            % sys.executable)
        cmd.stdin.close()
        output = cmd.stdout.read()
        rc = cmd.wait()

        if rc != 0:
            log.error("could not determine msvc environment")
            raise XmakeException("could not determine msvc environment")

        match = re.search("---ENV---(.*)---ENV---", output, re.DOTALL)

        if match is None:
            log.error("getting environment failed")
            raise XmakeException("getting environment failed")

        environ_data = match.group(1)
        environ = pickle.loads(environ_data.strip().decode("base64"))

        if self.printenv:
            log.info("environment modifications: ")
            for v in environ.keys():
                n = environ[v]
                if os.environ.has_key(v):
                    if os.environ[v] != n:
                        log.info("  modified: " + v + "=" + os.environ[v] +
                                 " -> " + n)
                else:
                    log.info("  new     : " + v + "=" + n)

        return environ
Exemple #13
0
def _nexus_staging_push(build_cfg, jsonUserAgent, nexusApi, repoId,
                        diskDeploymentPath):
    for root, dirs, files in os.walk(diskDeploymentPath):
        for name in files:
            artifact = {}
            artifact['filename'] = name
            artifact['path'] = os.path.join(root, name)
            artifact['relpath'] = artifact['path'].replace(
                diskDeploymentPath, '').replace('\\', '/')
            if artifact['relpath'].startswith('/'):
                artifact['relpath'] = artifact['relpath'][1:]

            succeeded = nexusApi.deployByRepositoryId(repoId,
                                                      artifact,
                                                      userAgent=jsonUserAgent)
            if not succeeded:
                raise XmakeException(
                    'Failed to deploy file {} to repository {}'.format(
                        artifact, repoId))

    status = nexusApi.getrepositorystatus(repoId)
    log.info(
        'build results pushed into staging repo \'{}\'. This staging repo is not closed yet.'
        .format(status['repositoryURI']))

    return status
Exemple #14
0
def _nexus_deployment(build_cfg, export_repo_url):
    resolve_deployment_credentials(build_cfg)
    args = [
        build_cfg.tools().artifact_deployer(), 'deploy', '-p',
        build_cfg.export_file(), '--repo-url', export_repo_url,
        '--artifact-version',
        build_cfg.base_version(), '--request-timeout',
        str(tuning.AD_TIMEOUT), '--write-artifact-info',
        build_cfg.deployment_info_log()
    ]

    suffix = build_cfg.version_suffix()
    if suffix is not None and len(suffix) > 0:
        args.extend(['--artifact-version-suffix=-' + suffix])
    log.info('calling ' + ' '.join(args))

    if build_cfg.deploy_user():
        args.extend(['--repo-user', build_cfg.deploy_user()])
    if build_cfg.deploy_password():
        args.extend(['--repo-passwd', build_cfg.deploy_password()])

    rc = log.log_execute(args)
    if rc == 0:
        log.info("deployment of exported build results succeeded")
        log.info("deployed version: " + build_cfg.base_version() +
                 (('-' + suffix) if suffix is not None else ''))
    else:
        log.info("deployment returned w/ RC==" + str(rc))
        log.error(
            "deployment of exported build results resulted in an error. See log output for further hints",
            log.INFRA)
        raise XmakeException("deployment failed")
Exemple #15
0
def _nexus_staging_close(build_cfg, nexusApi, repoId):
    if not nexusApi.finish(repoId):
        raise XmakeException(
            'close of staging repository failed. Analyse the build log and look at repository id: {}.'
            .format(repoId))
    status = nexusApi.getrepositorystatus(repoId)
    ua_scm_snapshot_url = build_cfg.scm_snapshot_url()
    if not ua_scm_snapshot_url:
        try:
            userAgent = json.loads(status['userAgent'])
            if userAgent:
                if 'scm' in userAgent:
                    ua_scm_snapshot_url = userAgent['scm']
                    log.info(
                        'scm_snapshot_url read from staging repository: {}'.
                        format(ua_scm_snapshot_url))
        except ValueError:
            pass

    _storeStageCloseProps(nexusApi, build_cfg, repoId, ua_scm_snapshot_url)

    log.info('*' * 100)
    log.info('build results are available in staging repository:')
    log.info(status['repositoryURI'])
    log.info('scm_snapshot_url:')
    log.info(ua_scm_snapshot_url)
    log.info('*' * 100)

    return status
Exemple #16
0
def forbid_docker(feature,state,args):
    '''
    If docker is available on this host,only docker aware xmake versions may be used
    '''
    if utils.which('docker') is not None:
        raise XmakeException("non-docker-aware xmake versions may not be used on this host")
    return args
Exemple #17
0
    def after_IMPORT(self, build_cfg):

        self.java_set_environment(True)
        # Setup maven
        self._setup()

        # If set-version option is on
        if build_cfg.get_next_version() is not None:
            self._set_version_in_pom(build_cfg.get_next_version(), build_cfg.component_dir())
            # Always copy src see BESTL-8640 Related to Cloud Foundry deployment
            self._copy_src_dir_to(self._copied_src_dir)

        elif build_cfg.base_version() == 'NONE':
            # Always copy src see BESTL-8640 Related to Cloud Foundry deployment
            self._copy_src_dir_to(self._copied_src_dir)
            self._get_version_from_effective_pom()

        # If get-version option is on
        if build_cfg.get_project_version() is True:

            status = self._check_project_version_compliance()
            if status[0] is False:
                raise XmakeException(status[1])
            else:

                stripped_version = self._remove_leading_zero(self.build_cfg.base_version())
                self.build_cfg.set_base_version(stripped_version)

                log.info('write project version {} in {}'.format(self.build_cfg.base_version(), self.build_cfg.project_version_file()))
                with open(self.build_cfg.project_version_file(), 'w') as f:
                    f.write(self.build_cfg.base_version())
Exemple #18
0
    def __init__(self, build_cfg):
        VariantBuildPlugin.__init__(self, build_cfg)

        if not build_cfg.runtime().startswith('linux'):
            raise XmakeException(
                'docker build available only on linux runtime')
        if not is_existing_directory(self.build_cfg.src_dir()):
            self.build_cfg.set_src_dir(self.build_cfg.component_dir())
            log.info('using flat source dir: ' + self.build_cfg.src_dir())
        repos = self.build_cfg.import_repos('DOCKER')
        if repos is None or len(repos) == 0:
            log.warning('no docker repository specified')
            self.registry = None
        else:
            if len(repos) > 1:
                log.warning(
                    "multiple DOCKER import repositories found -> ignore all but the first one"
                )
            self.registry = repos[0]
            log.info("using DOCKER import repository " + self.registry)
        self.image_file = None
        self.image_name = None
        self.aid = None
        self.gid = None
        self.version = None
        self.mode = 'tool'
        self.keepuser = True
        self.keepcontainer = False
        self._srcdir = '/src'
        self._gendir = '/gen'
        self._importdir = '/imports'
Exemple #19
0
 def set_import_repos(self, r, t=COMMONREPO):
     if self.productive() or self.is_release() or self.is_milestone():
         if r and len(r) >= 1:
             raise XmakeException(
                 'only one repository url is authorized for productive or/and release build. See {} urls passed in --import-repo argument'
                 .format(t))
     self._import_repos[t] = r
Exemple #20
0
    def _setup(self):
        if self._npmcmd is None:
            self._nodehome = self.build_cfg.tools()['nodejs'][
                self._node_version]
            dirs = os.listdir(self._nodehome)
            if len(dirs) != 1:
                raise XmakeException('ERR: invalid nodejs distribution %s' %
                                     str(self._nodehome))
            self._nodehome = join(self._nodehome, dirs[0])
            log.info('found node: ' + self._nodehome)

            self._path = os.path.realpath(
                os.path.join(self._nodehome, self._rel_path))
            self._node_executable = os.path.realpath(self._node_cmd())
            self._npmrc_file = os.path.join(self.build_cfg.temp_dir(),
                                            '.npmrc')
            self._npmcmd = [
                self._node_executable, '-i',
                os.path.realpath(self._npm_script()), '--userconfig',
                self._npmrc_file
            ]

            self.module_dir = join(self.build_cfg.gen_dir(), 'module')
            self.shrinkwrap = join(self.build_cfg.temp_dir(),
                                   'npm-shrinkwrap.json')
            self.build_cfg.add_metadata_file(self.shrinkwrap)
            self.setup_env()
Exemple #21
0
    def run(self):
        """the vmake build execution implementation. Invoked during the xmake BUILD phase"""
        self._initialise()
        log.info("invoking vmake build...")
        self._before_build_callback()
        vmake_args = self.build_cfg.build_args()
        vmake_args = [
            'all.vprj'
        ] if vmake_args is None or len(vmake_args) == 0 else vmake_args
        self._handle_build_mode(
            self.build_cfg.variant_info()[XMAKE_BUILD_MODE])
        vmake_args = ' '.join(vmake_args)
        if self.is_UNIX:
            rc = os.system(
                "bash -c \"source " + self._iprofile() + " && vmake " +
                vmake_args +
                "\"")  # assume there must be a 'all.vprj' vmake target
        else:
            rc = os.system(
                self._iprofile() + " && vmake " +
                vmake_args)  # assume there must be a 'all.vprj' vmake target
        # dirty-hacks: 1. RC == 1 seems to mean build was OK,
        #              2. left-shift RC if on unix
        if self.is_UNIX and rc > 254:
            rc = rc >> 8

        if rc > 1:
            raise XmakeException('ERR: vmake returned RC >1: %s' % str(rc))
Exemple #22
0
    def _fortifyDeploy(self):
        '''
            Deploy fortify result in corporate fortify server
        '''

        log.info('fortify upload')

        # Read option values from os environment
        serverurl = os.getenv('FORTIFY_F360_URL')  # https://fortify1.wdf.sap.corp/ssc
        token = os.getenv('FORTIFY_F360_AUTH_TOKEN')

        logErrors = []
        if serverurl is None:
            logErrors.append('fortify server url not set. Please set env FORTIFY_F360_URL')
        elif not serverurl.startswith('http://') and not serverurl.startswith('https://'):
            logErrors.append('bad fortify server url waiting for http/https url: {}'.format(serverurl))

        if token is None:
            logErrors.append('fortify token not set. Please set env FORTIFY_F360_AUTH_TOKEN')

        if len(logErrors) > 0:
            for error in logErrors:
                log.error(error, log.INFRA)
            raise XmakeException('fortify results upload fails')

        # fortify deploy
        maven_args = []
        maven_args.append('com.sap.ldi:fortify-plugin:{}:upload'.format(self._fortify_plugin_version))
        maven_args.append('-Dldi.fortify.f360.url={}'.format(serverurl))
        maven_args.append('-Dldi.fortify.f360.authToken={}'.format(token))
        maven_args.append('-Dldi.fortify.f360.projectVersion={}'.format(self.build_cfg.base_version().split('.')[0]))
        maven_args.extend(shlex.split(' '.join(self._maven_user_options)))
        self._mvn(maven_args)
Exemple #23
0
 def publish(self):
     if self.image is None:
         self.image = get_first_line(self.image_name_file, 'cannot read image id from last build')
         log.info('reading image name from last build: '+self.image)
         if is_existing_file(self.delta_file):
             log.info('reloading image to daemon cache...')
             docker.docker(['load', '-i', self.delta_file])
     repo = self.repo_host()
     if repo is None:
         raise XmakeException('no DOCKER deployment repository configured')
     self.tag_image()
     log.info('publishing to DOCKER repository '+repo+'...')
     resolve_deployment_credentials(self.build_cfg, DOCKERREPO)
     user = self.build_cfg.deploy_user(DOCKERREPO)
     password = self.build_cfg.deploy_password(DOCKERREPO)
     dir = '.'
     if user is not None and password is not None:
         log.info('  using user '+user)
         dir = docker.prepare_dockercfg(user, password, repo)
     success = False
     try:
         tag = self.tag_name()
         log.info('publishing image to '+repo+' ('+tag+')')
         docker.docker(['push', tag], None, dir=dir, home=dir)
         log.info('docker logout '+repo)
         docker.docker(['logout', repo], None, dir=dir, home=dir)
         success = True
     finally:
         if dir is not '.':
             log.info('cleanup login info '+dir)
             shutil.rmtree(dir)
         if success:
             self.cleanup_image(tag)
Exemple #24
0
def _set_stdrun_option(build, o, v):
    if o == 'image':
        build.image_name = v
        build.mode = 'docker'
        return True
    if o == 'aid':
        build.aid = v
        return True
    if o == 'gid':
        build.gid = v
        return True
    if o == 'version':
        build.version = v
        return True
    if o == 'mode':
        if not v.lower() in ['import', 'docker', 'tool']:
            raise XmakeException('invalid image mode ' + v +
                                 ', please use Import, Docker or Tool')
        build.mode = v
        return True
    if o == 'keepuser':
        build.keepuser = v.lower() == 'true'
        return True
    if o == 'keepcontainer':
        build.keepcontainer = v.lower() == 'true'
        return True
    return False
Exemple #25
0
 def set_option_string(self, v):
     c = v.split(':')
     if len(c) > 1:
         raise XmakeException(
             'ERR: only version of vmake variant coordinate system can be set: %s'
             % (v))
     self._cosy_version = v
     log.info('  using coordinate system ' + self.variant_cosy_gav())
Exemple #26
0
 def add_import_repo(self, r, t=COMMONREPO):
     if self.productive() or self.is_release() or self.is_milestone():
         repos = utils.get_entry(self._import_repos, t)
         if repos and len(repos) >= 1:
             raise XmakeException(
                 'only one repository url is authorized for productive or/and release build. See {} urls passed in --import-repo argument'
                 .format(t))
     utils.add_list_entry(self._import_repos, t, r)
Exemple #27
0
 def _validate_vmake_installation(self):
     if OS_Utils.find_in_PATH("vmake") is not None:
         return
     if is_existing_file(
             join(self.build_cfg.vmake_instdir(), "pgm",
                  "vmake" if OS_Utils.is_UNIX() else "vmake.exe")):
         return
     raise XmakeException('Vmake was not found neither in PATH nor in %s.' %
                          (self.build_cfg.vmake_instdir()))
Exemple #28
0
 def set_option(self,o,v):
     if o=="cosy":
         if len(v.split(':'))==2:
             log.info( '  using coordinate system '+v)
             self._variant_cosy_gav=v
         else:
             raise XmakeException('ERR: invalid coordinate system specification '+str(v)+': expected <name>:<version>')
     else:
         BuildPlugin.set_option(self,o,v)
Exemple #29
0
 def _symlink(self, source, link_name):
     source = os.path.abspath(source)
     if self.is_UNIX:
         getattr(os, 'symlink')(source, link_name)
     # unfortunatly, python 2.7 does not support symlinks on windows
     else:
         if os.system("mklink /d " + link_name + " " + source) != 0:
             raise XmakeException(
                 'failed to create symbolic link (hint: you may need to run this build w/ administrative privileges)'
             )
Exemple #30
0
 def _handle_build_mode(self, build_mode):
     build_modes = ['opt', 'dbg', 'rel']
     if build_mode is None:
         log.warning('no build_mode was specified - defaulting to opt')
         return
     if build_mode not in build_modes:
         raise XmakeException('unsupported build mode: %s' % (build_mode))
     # todo: do not modify own process' environment, but only to this for vmake subproc
     os.environ['VMAKE_VERSION'] = build_mode
     os.environ['VMAKE_DEFAULT'] = build_mode