def _copy_src_dir_to(self, todir): ''' Copy source files in another directory to avoid modifications in original source files. ''' if os.path.exists(todir): log.info('removing existing folder', todir) targetDirectory = os.path.join(todir, 'target') if os.path.exists(targetDirectory): log.debug('target directory was generated, so we keep it as it is in directory {}'.format(targetDirectory)) tmpDir = os.path.join(tempfile.mkdtemp(), 'target') shutil.copytree(targetDirectory, tmpDir) OS_Utils.rm_dir(todir) os.mkdir(todir) shutil.copytree(tmpDir, targetDirectory) shutil.rmtree(tmpDir) else: OS_Utils.rm_dir(todir) os.mkdir(todir) else: os.mkdir(todir) log.info('copying files from', self.build_cfg.component_dir(), 'to', todir) for directory in os.listdir(self.build_cfg.component_dir()): if directory not in ['.xmake.cfg', 'gen', 'import', 'cfg', '.git', '.gitignore', 'target']: pathToCopy = os.path.join(self.build_cfg.component_dir(), directory) if os.path.isdir(pathToCopy): shutil.copytree(pathToCopy, os.path.join(todir, directory)) else: shutil.copyfile(pathToCopy, os.path.join(todir, directory))
def resolve_deployment_credentials(build_cfg, t=config.COMMONREPO): # skip resolving if either no credential key was defined or user and password were specified if build_cfg.deploy_cred_key(t) is None or build_cfg.deploy_user( t) is not None and build_cfg.deploy_password(t) is not None: return prodpassaccess = build_cfg.tools().prodpassaccess() log.info("accessing credential key " + build_cfg.deploy_cred_key(t) + " for repository type " + t) # user log.info("call prodpassaccess to retrieve user") (rc, stdout, _) = OS_Utils.exec_script( [prodpassaccess, 'get', build_cfg.deploy_cred_key(t), 'user']) if rc > 0: raise XmakeException('prodpassaccess returned %s' % str(rc)) build_cfg.set_deploy_user(stdout.strip(), t) log.info("user retrieved") # password log.info("call prodpassaccess to retrieve password") (rc, stdout, _) = OS_Utils.exec_script( [prodpassaccess, 'get', build_cfg.deploy_cred_key(t), 'password']) if rc > 0: raise XmakeException('prodpassaccess returned %s' % str(rc)) build_cfg.set_deploy_password(stdout.strip(), t) log.info("password retrieved")
def _validate_vmake_installation(self): if OS_Utils.find_in_PATH("vmake") is not None: return if is_existing_file( join(self.build_cfg.vmake_instdir(), "pgm", "vmake" if OS_Utils.is_UNIX() else "vmake.exe")): return raise XmakeException('Vmake was not found neither in PATH nor in %s.' % (self.build_cfg.vmake_instdir()))
def install_dependencies(self): # npm install log.info('installing dependencies...') os.mkdir(self.import_dir) if not is_existing_directory(self.build_cfg.import_dir()): return if OS_Utils.is_UNIX(): names = os.listdir(self.build_cfg.import_dir()) if names is not None: for name in names: os.link(join(self.build_cfg.import_dir(), name), join(self.import_dir, name)) else: copytree(self.build_cfg.import_dir(), self.import_dir) if self.load is not None: log.info('preloading images...') for image in self.load: ifile = join(self.build_cfg.import_dir(), image) if is_existing_file(ifile): log.info(' loading '+image) self.docker(['load', '-i', ifile]) else: log.warning('image '+image+' not imported')
def prepare_sources(self): log.info('copying module sources...') if os.path.exists(self.module_dir): OS_Utils.rm_dir(self.module_dir) os.mkdir(self.module_dir) for directory in os.listdir(self._root): if directory not in [ '.xmake.cfg', '.xmake', 'gen', 'import', 'cfg', '.git', 'node_modules' ]: pathToCopy = os.path.join(self._root, directory) if os.path.isdir(pathToCopy): copytree(pathToCopy, os.path.join(self.module_dir, directory)) else: copyfile(pathToCopy, os.path.join(self.module_dir, directory)) os.mkdir(join(self.module_dir, 'node_modules')) self.check_tool([])
def _find_maven_executable(self): ''' Find the mvn command path according to the operating system ''' if OS_Utils.is_UNIX(): path = join(self._m2_home, 'bin', 'mvn') else: path = join(self._m2_home, 'bin', 'mvn.cmd') if not os.path.isfile(path): path = join(self._m2_home, 'bin', 'mvn.bat') return path
def _perl_instdir(self): '''determines a perl installation directory on the local host. if the PERL env var is set, this value is used. Otherwise, "perl" is being looked up in the PATH the first found entry from the PATH will be used / returned''' if 'PERL' in os.environ: return os.environ['PERL'] perl_paths = OS_Utils.find_in_PATH('perl') if perl_paths is None: log.error( "could not determine perl installation directory (must either be set in env var 'PERL' or reside in PATH)", log.INFRA) raise XmakeException( "perl instalation directory could not be determined") # we arbitrarily choose the first found perl entry (instdir is two levels above perl binary) perl_instdir = os.path.abspath( os.path.join(perl_paths[0], os.path.pardir, os.path.pardir)) log.info("using perl installation from " + perl_instdir) return perl_instdir
def _ant_executable(self): cmd = 'ant' # +self.build_cfg.tools().tool_suffix('bat') if self._anthome is not None: self.java_exec_env.env['ANT_HOME'] = self._anthome if 'ANT_HOME' in self.java_exec_env.env: return join(self.java_exec_env.env['ANT_HOME'], 'bin', cmd) log.warning( 'ANT_HOME was not set - falling back to using ant from PATH', log.INFRA) ant_executable = OS_Utils.find_in_PATH('ant') if ant_executable is not None: return ant_executable log.error( 'ant was not found in PATH and ANT_HOME was not set - aborting build', log.INFRA) raise XmakeException( 'ant was neither configured via ANT_HOME env var nor present in PATH' )
def __init__(self, build_cfg): BuildPlugin.__init__(self, build_cfg) self.is_UNIX = OS_Utils.is_UNIX() self._cosy_version = '1.1.2'
def rm_if_target_exists(): if os.path.exists(buildtools_target_dir): OS_Utils.rm_dir(buildtools_target_dir)
def java_run(self): self._clean_if_requested() self._anthome = self.build_cfg.tools()['ant'][self._ant_version] log.info('found ant: ' + self._anthome) ant_executable = self._ant_executable() ant_args = [] build_cfg = self.build_cfg if not OS_Utils.is_UNIX(): ant_args.extend( [join(inst.get_build_plugin_dir(), 'ant_wrapper.bat')]) ant_args.extend([ ant_executable, '-Dbuild.srcdir=' + build_cfg.src_dir(), '-Dbuild.wrkdir=' + self._wrk_dir(), '-Dbuild.gendir=' + build_cfg.gen_dir(), '-Dbuild.genroot=' + build_cfg.genroot_dir(), '-Dbuild.cfgdir=' + build_cfg.cfg_dir(), '-Dbuild.componentroot=' + build_cfg.component_dir(), '-Dbuild.importdir=' + build_cfg.import_dir(), '-Dbuild.bsedir=' + build_cfg.build_script_ext_dir(), '-Dbuild.module_genroot=' + build_cfg.module_genroot_dir(), '-Dbuild.resultdir=' + build_cfg.result_base_dir(), '-Dbuild.runtime=' + build_cfg.runtime(), '-f', self._build_xml_file() ]) props = dict() # not possible under NT to pass empty argument, all variants result in strange behavior or just do not work props['build.versionsuffix'] = stripToBlank(build_cfg.version_suffix()) props['build.baseversion'] = stripToBlank(build_cfg.base_version()) props['build.version'] = stripToBlank(build_cfg.version()) for (i, d) in self.import_roots().items(): props['build.importlabel.' + i + '.dir'] = d def add_tool(n, d): props[self._tool_property(n)] = d props[self._tool_id_property( n)] = build_cfg.configured_tools()[n].toolid() for t in build_cfg.configured_tools()[n].tags(): props[self._tool_tag_property(n, t)] = 'true' props['build.' + self._tool_tag_property(n, t)] = 'true' self._handle_configured_tools(add_tool) def add_dep(key, d): props[self._import_property(key)] = d props['build.' + self._import_property(key)] = d self._handle_dependencies(add_dep) def add_opts(d, prefix): for k in d.keys(): props[prefix + '.' + k] = d[k] if not build_cfg.suppress_variant_handling(): add_opts(build_cfg.variant_info(), 'build.variant.info') add_opts(build_cfg.variant_coords(), 'build.variant.coord') if len(props) > 0: filename = join(build_cfg.temp_dir(), 'ant.properties') with open(filename, 'w') as f: def add_arg(key): f.write(key + '=' + props[key].replace('\\', '\\\\') + '\n') map(add_arg, props.keys()) ant_args.extend(['-propertyfile', filename]) if build_cfg.build_platform() is not None: ant_args.extend(['-Dbuild.platform', build_cfg.build_platform()]) if build_cfg.build_mode() is not None: ant_args.extend(['-Dbuild.mode', build_cfg.build_mode()]) if build_cfg.skip_test(): ant_args.extend(['-Dbuild.skip.test', 'true']) else: ant_args.extend(['-Dbuild.do.test', 'true']) if build_cfg.build_args() is not None: ant_args.extend(build_cfg.build_args()[1:]) log.info('invoking ant: ' + ' '.join(ant_args)) # logfile=join(self.build_cfg.temp_dir(),'ant.log') # ant_args.extend(['-l', logfile]) self.call_ant(ant_args)
def prepare_sources(self): log.info('copying context sources...') if os.path.exists(self.context_dir): OS_Utils.rm_dir(self.context_dir) ign = ignore_patterns('gen*', 'import', 'cfg', '.git', 'node_modules') if self.is_plain() else None copytree(self.build_cfg.src_dir(), self.context_dir, ignore=ign)
def _build(self): ''' Build source files ''' # Maven phases: # validate - validate the project is correct and all necessary information is available # compile - compile the source code of the project # test - test the compiled source code using a suitable unit testing framework. These tests should not require the code be packaged or deployed # package - take the compiled code and package it in its distributable format, such as a JAR. # integration-test - process and deploy the package if necessary into an environment where integration tests can be run # verify - run any checks to verify the package is valid and meets quality criteria # install - install the package into the local repository, for use as a dependency in other projects locally # deploy - done in an integration or release environment, copies the final package to the remote repository for sharing with other developers and projects. # Metadata quality check only for release or milestone build # See details of checks in https://wiki.wdf.sap.corp/wiki/display/LeanDI/Release+Build+Details#ReleaseBuildDetails-VersionUpdates if self.build_cfg.is_release() == 'direct-shipment' or self.build_cfg.is_release() == 'indirect-shipment': # For a customer release build use quality-check-config-customer.xml self._run_metadata_quality_check('-Dmetadata-quality-report.configuration=quality-check-config-customer.xml', '-Dcodesign.sap.realcodesigning=true') elif self.build_cfg.is_release() == 'milestone': # For a milestone build use quality-check-config-milestone.xml self._set_version_in_pom(self.build_cfg.base_version()) self._run_metadata_quality_check('-Dmetadata-quality-report.configuration=quality-check-config-milestone.xml') # Compile sources and install binaries in local repository maven_args = [] # Manage clean phase if self.build_cfg.do_clean(): maven_args.append('clean') # prepare filesystem for local deployment if os.path.exists(self._localDeploymentPath): OS_Utils.rm_dir(self._localDeploymentPath) localDeploymentUrl = urlparse.urljoin('file:', urllib.pathname2url(self._localDeploymentPath)) # Go until install phase to install package locally and # to be able to use it as dependency in other local projects maven_args.append('deploy') maven_args.append('-DaltDeploymentRepository=local::default::{}'.format(localDeploymentUrl)) maven_args.append('-DuniqueVersion=false') # add options for signing ''' History: 1- _maven_jarsigner_plugin_options() was initially called with is_release_build() as parameter 2- is_release() was introduced for leandi 3- is_release_build() was reneamed into is_milestone() for xmake-dev (in xmake-dev we talk about "milestone release" builds) 4- Certainly instead of renaming is_release_build() to is_milestone() in the _maven_jarsigner_plugin_options() call, it was renamed into is_release(), by the way signing worked only for leandi and not anymore for xmake-dev 5- to work under both systems the call must be done with this value as parameter: self._maven_jarsigner_plugin_options(self.build_cfg.is_release() or self.build_cfg.is_milestone()) Comment: Signing env vars are checked in the _maven_jarsigner_plugin_options() method. SNAPSHOT suffixs are checked in is_milestone() which also ensure that it does not run in is_release() mode +-----------+---------------+--------------+------------+------------------+-------------------+ | System | Build type | is_milestone | is_release | Signing env vars | Signing activated | +-----------+---------------+--------------+------------+------------------+-------------------+ | xMake-Dev | | | | | | | | CI/Voter/PR | No | No | No | No | | | OD Milestones | Yes | No | No | No | | | OD Releases | Yes | No | Yes | Yes | | xMake-Ldi | | | | | | | | OD Release | No | Yes | Yes | Yes | +-----------+---------------+--------------+------------+------------------+-------------------+ ASCII Table was generated with: https://ozh.github.io/ascii-tables/ ''' maven_args.extend(self._maven_jarsigner_plugin_options(self.build_cfg.is_release() or self.build_cfg.is_milestone())) if self.build_cfg.skip_test(): maven_args.append('-Dmaven.test.skip=true') # add user options maven_args.extend(shlex.split(' '.join(self._maven_user_options))) # call mvn command self._mvn(maven_args) # Store build dependencies, should be done before _store_build_dependencies to ensure that the leandi plugin used for # generating metadata are also stored in the dependencies for reproducing the build with the exact leandi plugin versions for metadata generation # Generate release metadata data only in customer or in milestone if self.build_cfg.is_release(): if self._ldi_metadata: log.info('building leandi metadata') self._store_ldi_metadata() else: log.info('leandi metadata generation disabled') # Store build dependencies log.info('building dependencies') self._store_build_dependencies()
def _import(self): return utils.runtime_ga( "com.oracle.download.java:jdk:tar.gz", "classifier", "linux-x64" if OS_Utils.is_UNIX() else "windows-x64")