def grab_configuration_used(self, top_location, readme_fp, module): """Function that includes the install configuration into the bundle for reuse. Parameters ---------- top : str resulting location - __temp__ """ try: isa_version, isa_commit_hash = self.find_isa_version() if module is None: LOG.write('Copying build configuration into bundle.') writer = Writer.ConfigWriter(self.install_config) build_config_dir = os.path.join(top_location, 'build-config') writer.write_install_config(filepath=build_config_dir) self.write_readme_heading( 'Build environment version information', readme_fp) else: self.write_readme_heading( 'Implementing add on in exisitng bundle', readme_fp) readme_fp.write( 'This add on tarball contains a folder with a compiled version of {}.\n' .format(module.name)) readme_fp.write( 'To use it with an existing bundle, please copy the folder into {} in the target bundle.\n' .format(module.rel_path)) readme_fp.write( 'It is also recommended to edit the build-config for the bundle to reflect the inclusion of this module.\n\n' ) readme_fp.write('Build configuration:\n\n') readme_fp.write( 'installSynApps Version: {}\n\n'.format(isa_version)) if isa_commit_hash is not None: readme_fp.write( 'To grab this version:\n\n\tgit clone https://github.com/epicsNSLS2-deploy/installSynApps\n' ) readme_fp.write('\tgit checkout {}\n'.format(isa_commit_hash)) else: readme_fp.write( 'To grab this version use:\n\n\tgit clone https://github.com/epicsNSLS2-deploy/installSynApps\n' ) readme_fp.write('\tgit checkout -q {}\n'.format(isa_version)) readme_fp.write( 'To regenerate sources for this bundle, grab installSynApps as described above, and use:\n\n' ) readme_fp.write('\t./installCLI.py -c BUILD_CONFIG -p\n\n') readme_fp.write( 'where BUILD_CONFIG is the path to the build-config directory in this bundle.\n' ) readme_fp.write( 'Make sure to specify an install location as well\n{}\n\n'. format('-' * 64)) readme_fp.write('{:<20}{}\n'.format('Python 3 Version:', sys.version.split()[0])) readme_fp.write('{:<20}{}\n'.format('OS Class:', self.OS)) readme_fp.write('{:<20}{}\n'.format('Build Date:', datetime.datetime.now())) except: LOG.debug('Failed to copy install configuration into bundle.')
def create_opi_package(self): """Function that creates bundle of all opi files. Returns ------- int status of tar creation command """ # Make sure output path exists if not os.path.exists(self.output_location): try: os.mkdir(self.output_location) except OSError: return -1 # Start the timer self.start_timer() LOG.write('Beginning construction of opi tarball...') # Generate the bundle status = self.create_opi_tarball() # Stop the timer elapsed = self.stop_timer() LOG.write('Tarring took {} seconds'.format(elapsed)) self.create_bundle_cleanup_tool() return status
def build_all(self): """Main function that runs full build sequentially Returns ------- int 0 if success, number failed modules otherwise list of str List of module names that failed to compile """ failed = [] for module in self.install_config.get_module_list(): if module.build == "YES": out = self.build_module(module.name) if out != 0: failed.append(module.name) if module.name in self.critical_modules: break # After we build base we should make the support releases consistent if module.name == 'EPICS_BASE': out = self.make_support_releases_consistent() if out != 0: LOG.write('Failed to make releases consistent...') break return len(failed), failed
def acquire_dependecies(self, dependency_script_path): """Method that runs dependency install shell/batch script Parameters ---------- dependency_script_path : str path to dependency shell/batch script """ LOG.debug('Grabbing dependencies via script {}'.format( dependency_script_path)) if os.path.exists(dependency_script_path) and os.path.isfile( dependency_script_path): if dependency_script_path.endswith('.bat'): exec = dependency_script_path else: exec = 'bash {}'.format(dependency_script_path) LOG.print_command(exec) proc = Popen(exec.split(' ')) proc.wait() ret = proc.returncode if ret != 0: LOG.write( 'Dependency script exited with non-zero exit code: {}'. format(ret))
def perform_fix_out_of_order_dependencies(self): """Function that repeatedly checks if dependency order is valid, until no issues found. Runs check_dependency_order_valid in a loop, until we get a 'True' response. If we get 'False' swap the module and it's dependency and rerun """ valid, current, dep = self.check_dependency_order_valid() while not valid: self.install_config.swap_module_positions(current, dep) LOG.write('Swapping build order of {} and {}'.format(current, dep)) valid, current, dep = self.check_dependency_order_valid()
def create_opi_tarball(self): """Function that collects autoconverted .opi files from epics_dir. OPI screens are saved in output_location/ad_opis and creats a tarball. Returns ------- int 0 if suceeded, nonzero otherwise """ opi_base_dir = os.path.join(self.output_location, '__opis_temp__') opi_dir = os.path.join(opi_base_dir, 'opis') try: os.mkdir(opi_base_dir) os.mkdir(opi_dir) except OSError: LOG.write('Error creating ' + opi_dir + ' directory', ) for (root, dirs, files) in os.walk(self.install_config.install_location, topdown=True): for name in files: if '.opi' in name and 'autoconvert' in root: file_name = os.path.join(root, name) try: shutil.copy(file_name, opi_dir) except OSError: LOG.debug("Can't copy {} to {}".format( file_name, opi_dir)) opi_tarball_basename = 'opis_{}'.format( self.install_config.get_core_version()) opi_tarball = opi_tarball_basename counter = 1 while os.path.exists( os.path.join(self.output_location, opi_tarball + '.tgz')): opi_tarball = opi_tarball_basename + '_({})'.format(counter) counter = counter + 1 out = subprocess.call( ['tar', 'czf', opi_tarball + '.tgz', '-C', opi_base_dir, '.']) shutil.rmtree(opi_base_dir) os.rename(opi_tarball + '.tgz', os.path.join(self.output_location, opi_tarball + '.tgz')) return out
def create_add_on_package(self, filename, module_name): """Top level packager driver function for creating addon packages. Creates output directory, generates filename, creates the tarball, and measures time. Parameters ---------- filename : str filename of output bundle module_name : str name of module to create an add-on package for Returns ------- int status of tar creation command """ module = self.install_config.get_module_by_name(module_name) if module is None: return -1 # Make sure output path exists if not os.path.exists(self.output_location): try: os.mkdir(self.output_location) except OSError: return -1 # Start the timer self.start_timer() LOG.write('Beginning construction of {} add on...'.format(module.name)) # Generate the bundle status = self.create_single_module_tarball(filename, module) # Stop the timer elapsed = self.stop_timer() LOG.write('Tarring took {} seconds'.format(elapsed)) self.create_bundle_cleanup_tool() return status
def make_support_releases_consistent(self): """Function that makes support module release files consistent Returns ------- int return code of make release command call """ LOG.write('Running make release to keep releases consistent.') command = 'make -C {} release'.format(self.install_config.support_path) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret != 0: LOG.write( 'make release exited with non-zero exit code: {}'.format(ret)) return ret
def create_tarball(self, filename, flat_format): """Function responsible for creating the tarball given a filename. Parameters ---------- filename : str name for output tarball and readme file flat_format=True : bool flag to toggle generating flat vs. non-flat binaries Returns ------- out : int 0 if success <0 if failure """ readme_fp = open( self.output_location + '/README_{}.txt'.format(filename), 'w') self.setup_tar_staging(filename, readme_fp) self.grab_base('__temp__', readme_fp) support_top = '__temp__' if not flat_format: LOG.write('Non-flat output binary structure selected.') support_top = os.path.join('__temp__', 'support') os.mkdir(support_top) ad_top = os.path.join(support_top, 'areaDetector') os.mkdir(ad_top) for module in self.install_config.get_module_list(): if (module.name in self.required_in_pacakge or module.package == "YES") and not module.name == "EPICS_BASE": if module.rel_path.startswith('$(AREA_DETECTOR)'): self.grab_module(ad_top, module, readme_fp) else: self.grab_module(support_top, module, readme_fp) result = self.cleanup_tar_staging(filename, readme_fp) return result
def create_package(self, filename, flat_format=True): """Top level packager driver function. Creates output directory, generates filename, creates the tarball, and measures time. Parameters ---------- filename : str filename of output bundle flat_format : bool Flag to specify flat vs. non-flat binaries Returns ------- int status of tar creation command """ # Make sure output path exists if not os.path.exists(self.output_location): try: os.mkdir(self.output_location) except OSError: return -1 # Start the timer self.start_timer() LOG.write('Beginning bundling process...') # Generate the bundle status = self.create_tarball(filename, flat_format) # Stop the timer elapsed = self.stop_timer() LOG.write('Tarring took {} seconds'.format(elapsed)) self.create_bundle_cleanup_tool() return status
def sync_all_module_tags(install_config, save_path=None, overwrite_existing=True): """Function that syncs module version tags with those found in git repositories. Parameters ---------- install_config : InstallConfiguration instance of install configuration for which to update tags save_path : str None by default. If set, will save the install configuration to the given location after updating. overwrite_existing : bool Flag that tells installSynApps to overwrite or not the existing module tags. Default: True """ LOG.write('Syncing...') LOG.write('Please wait while tags are synced - this may take a while...') for module in install_config.get_module_list(): sync_module_tag(module.name, install_config) if save_path is not None: writer = IO.config_writer.ConfigWriter(install_config) ret, message = writer.write_install_config( save_path, overwrite_existing=overwrite_existing) LOG.write('Updated install config saved to {}'.format(save_path)) return ret else: return True
def perform_dependency_valid_check(self): """Function that searches each modules configure/RELEASE file for dependencies. If a dependency is found, it is checked against loaded install config to make sure it is scheduled to be built. Each module's dependencies attribute is populated Returns ------- list of str A list of dependencies for modules that are not set to build. """ dep_errors = [] LOG.write( 'The following dependencies have been identified for each auto-build module:' ) for module in self.install_config.get_module_list(): if module.build == "YES" and module.name != 'SUPPORT': ret = 0 self.check_module_dependencies(module) if len(module.dependencies) > 0: LOG.write('{:<16} - {}'.format(module.name, module.dependencies)) for dep in module.dependencies: dep_mod = self.install_config.get_module_by_name(dep) if dep_mod is None: ret = -1 dep_errors.append( 'Dependency {} for module {} not in install config.' .format(dep, module.name)) elif dep_mod.build == 'NO': ret = -1 dep_errors.append( 'Dependency {} for module {} not being built.'. format(dep_mod.name, module.name)) if ret < 0: module.build = "NO" return dep_errors
def checkout_module(self, module): """Function responsible for checking out selected tagged versions of modules. Parameters ---------- module : InstallModule Module that is being checked out Returns ------- int -3 if input was not an InstallModule, -2 if the absolute path is not known, -1 if checkout fails, 0 if success """ ret = -1 LOG.debug('Checking out version for module {}'.format(module.name)) if isinstance(module, IM.InstallModule): if module.abs_path != None: ret = 0 if module.version != "master" and module.url_type == "GIT_URL": current_loc = os.getcwd() os.chdir(module.abs_path) command = "git checkout -q {}".format(module.version) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode os.chdir(current_loc) if ret == 0: LOG.write('Checked out version {}'.format( module.version)) else: LOG.write( 'Checkout of version {} failed for module {}.'. format(module.version, module.name)) return ret
def cleanup_tar_staging(self, filename, readme_fp, module=None): """Function that cleans up tar staging point, and closes readme file. Parameters ---------- filename : str file path string readme_fp : open file The opened readme file pointer module : InstallModule Optional install module to create single module add-on package Returns ------- int Return code of tar creation call. """ readme_fp.write('\n\n') self.grab_configuration_used('__temp__', readme_fp, module) readme_fp.close() LOG.debug( 'Generating README file with module version and append instructions...' ) shutil.copy( os.path.join(self.output_location, 'README_{}.txt'.format(filename)), os.path.join('__temp__', 'README_{}.txt'.format(filename))) LOG.write('Tarring...') out = subprocess.call( ['tar', 'czf', filename + '.tgz', '-C', '__temp__', '.']) if out < 0: return out os.rename(filename + '.tgz', os.path.join(self.output_location, filename + '.tgz')) LOG.write('Done. Wrote tarball to {}.'.format(self.output_location)) LOG.write('Name of tarball: {}'.format( os.path.join(self.output_location, filename + '.tgz'))) shutil.rmtree('__temp__') return out
def sync_module_tag(module_name, install_config, save_path=None): """Function that syncs module version tags with those hosted with git. This function is still buggy, and certain modules do not update correctly Parameters ---------- module_name : str The name of the module to sync install_config : InstallConfiguration instance of install configuration for which to update tags save_path : str None by default. If set, will save the install configuration to the given location after updating. """ module = install_config.get_module_by_name(module_name) if module.url_type == 'GIT_URL' and module.version != 'master' and module.name not in update_tags_blacklist: account_repo = '{}{}'.format(module.url, module.repository) LOG.print_command("git ls-remote --tags {}".format(account_repo)) sync_tags_proc = Popen(['git', 'ls-remote', '--tags', account_repo], stdout=PIPE, stderr=PIPE) out, err = sync_tags_proc.communicate() ret = out.decode('utf-8') tags_temp = ret.splitlines() tags = [] for tag in tags_temp: tags.append(tag.rsplit('/')[-1]) if len(tags) > 0: best_tag = tags[0] best_tag_ver_str_list = re.split(r'\D+', tags[0]) best_tag_ver_str_list = [ num for num in best_tag_ver_str_list if num.isnumeric() ] best_tag_version_numbers = list(map(int, best_tag_ver_str_list)) for tag in tags: tag_ver_str_list = re.split(r'\D+', tag) tag_ver_str_list = [ num for num in tag_ver_str_list if num.isnumeric() ] tag_version_numbers = list(map(int, tag_ver_str_list)) for i in range(len(tag_version_numbers)): if best_tag.startswith('R') and not tag.startswith('R'): break elif not best_tag.startswith('R') and tag.startswith('R'): best_tag = tag best_tag_version_numbers = tag_version_numbers break elif i == len( best_tag_version_numbers ) or tag_version_numbers[i] > best_tag_version_numbers[i]: best_tag = tag best_tag_version_numbers = tag_version_numbers break elif tag_version_numbers[i] < best_tag_version_numbers[i]: break tag_updated = False module_ver_str_list = re.split(r'\D+', module.version) module_ver_str_list = [ num for num in module_ver_str_list if num.isnumeric() ] module_version_numbers = list(map(int, module_ver_str_list)) for i in range(len(best_tag_version_numbers)): if i == len( module_version_numbers ) or best_tag_version_numbers[i] > module_version_numbers[i]: tag_updated = True LOG.write( 'Updating {} from version {} to version {}'.format( module.name, module.version, best_tag)) module.version = best_tag break elif best_tag_version_numbers[i] < module_version_numbers[i]: break if not tag_updated: LOG.debug('Module {} already at latest version: {}'.format( module.name, module.version)) if save_path is not None: writer = IO.config_writer.ConfigWriter(install_config) ret, message = writer.write_install_config(save_path, overwrite_existing=True) LOG.write('Updated install config saved to {}'.format(save_path)) return ret else: return True
def create_new_install_config(install_location, configuration_type, update_versions=True, save_path=None): """Helper function for creating new install configurations Parameters ---------- install_location : str The path to the install location configuration_type : str The type of new install configuration update_versions : bool Flag to tell config to update versions from git remotes. save_path : str If defined, save config to specified path. """ if configuration_type.lower() == 'ad': install_template = 'NEW_CONFIG_AD' elif configuration_type.lower() == 'motor': install_template = 'NEW_CONFIG_MOTOR' else: install_template = 'NEW_CONFIG_ALL' if save_path is not None: LOG.write( '\nCreating new install configuration with template: {}'.format( install_template)) write_loc = os.path.abspath(save_path) LOG.write('Target output location set to {}'.format(write_loc)) parser = IO.config_parser.ConfigParser('resources') install_config, message = parser.parse_install_config( config_filename=install_template, force_location=install_location, allow_illegal=True) if install_config is None: LOG.write('Parse Error - {}'.format(message)) elif message is not None: LOG.write('Warning - {}'.format(message)) else: LOG.write('Loaded template install configuration.') if update_versions and save_path is not None: ret = sync_all_module_tags(install_config, save_path=write_loc, overwrite_existing=False) elif update_versions and save_path is None: ret = sync_all_module_tags(install_config, overwrite_existing=False) elif not update_versions and save_path is not None: writer = IO.config_writer.ConfigWriter(install_config) ret, message = writer.write_install_config(filepath=write_loc) else: ret = True if not ret: LOG.write('Write Error - {}'.format(message)) elif save_path is not None: LOG.write('\nWrote new install configuration to {}.'.format(write_loc)) LOG.write( 'Please edit INSTALL_CONFIG file to specify build specifications.') LOG.write( 'Then run ./installCLI.py -c {} to run the install configuration.'. format(write_loc)) return install_config, message
def build_module(self, module_name): """Function that executes build of single module First, checks if all dependencies built, if not, does that first. Then checks for custom build script. If one is found, runs that from module root directory. Otherwise, runs make followed by specified make flag in module root directory. Parameters ---------- module_name : str The name of the module being built Returns ------- int The return code of the build process, 0 if module is not buildable (ex. UTILS) """ if module_name in self.non_build_packages: return 0 LOG.write('Building module {}'.format(module_name)) module = self.install_config.get_module_by_name(module_name) if len(module.dependencies) > 0: for dep in module.dependencies: if dep not in self.built: self.build_module(dep) if module.custom_build_script_path is not None: LOG.write('Detected custom build script located at {}'.format( module.custom_build_script_path)) ret = self.build_via_custom_script(module) if ret == 0: self.built.append(module_name) LOG.write( 'Built module {} via custom script'.format(module_name)) else: LOG.write( 'Custom script for module {} exited with error code {}.'. format(module_name, ret)) else: command = "make -C {} {}".format(module.abs_path, self.make_flag) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret == 0: self.built.append(module_name) LOG.write('Built module {}'.format(module_name)) else: LOG.write('Failed to build module {}'.format(module_name)) return ret
def write_install_config(self, filepath='addtlConfDirs/config{}'.format( datetime.date.today()), overwrite_existing=False): """Function that saves loaded install configuration Main saving function for writing install config. Can create a save directory, then saves main install configuration, build flags, and injector files. Parameters ---------- filepath : str defaults to addtlConfDirs/config$DATE. The filepath into which to save the install configuration Returns ------- bool True if successful, False otherwise str None if successfull, otherwise error message """ if overwrite_existing and os.path.exists(filepath): try: shutil.rmtree(os.path.join(filepath, 'injectionFiles')) shutil.rmtree(os.path.join(filepath, 'macroFiles')) os.remove(os.path.join(filepath, 'INSTALL_CONFIG')) except PermissionError: return False, 'Insufficient Permissions' # Check if path exists, create it if it doesn't if not os.path.exists(filepath): try: os.mkdir(filepath) except OSError as err: if err.errno == errno.EACCES: return False, 'Permission Error!' elif err.errno == errno.EEXIST: return False, 'Path already exists!' elif err.errno == errno.ENOSPC: return False, 'No space on device!' elif err.errno == errno.EROFS: return False, 'Read-Only File System!' else: return False, 'Unknown Error' try: os.mkdir(os.path.join(filepath, 'injectionFiles')) os.mkdir(os.path.join(filepath, 'macroFiles')) if not os.path.exists(os.path.join(filepath, 'customBuildScripts')): os.mkdir(os.path.join(filepath, 'customBuildScripts')) except OSError: LOG.write('Failed to make configuration directories!') return False, 'Unknown Error' LOG.debug('Writing injector files.') self.write_injector_files(filepath) LOG.debug('Writing build flags.') self.write_build_flags(filepath) LOG.debug('Writing custom build scripts.') self.write_custom_build_scripts(filepath) LOG.debug('Writing INSTALL_CONFIG file.') new_install_config = open(os.path.join(filepath, "INSTALL_CONFIG"), "w+") new_install_config.write( '#\n# INSTALL_CONFIG file saved by installSynApps on {}\n#\n\n'. format(datetime.datetime.now())) new_install_config.write("INSTALL={}\n\n\n".format( self.install_config.install_location)) new_install_config.write( '#MODULE_NAME MODULE_VERSION MODULE_PATH MODULE_REPO CLONE_MODULE BUILD_MODULE PACKAGE_MODULE\n' ) new_install_config.write( '#--------------------------------------------------------------------------------------------------------------------------------------------------\n' ) current_url = "" for module in self.install_config.get_module_list(): if module.url != current_url: new_install_config.write("\n{}={}\n\n".format( module.url_type, module.url)) current_url = module.url new_install_config.write( "{:<16} {:<20} {:<40} {:<24} {:<16} {:<16} {}\n".format( module.name, module.version, module.rel_path, module.rel_repo, module.clone, module.build, module.package)) new_install_config.close() return True, None
def sync_github_tags(user, passwd, install_config, save_path=None): """Function that syncs module version tags with those found on github. This function is still buggy, and certain modules do not update correctly Parameters ---------- user : str github username passwd : str github password install_config : InstallConfiguration instance of install configuration for which to update tags save_path : str None by default. If set, will save the install configuration to the given location after updating. """ try: LOG.write( 'Syncing...', 'Please wait while tags are synced - this may take a while...') g = Github(user, passwd) for module in install_config.get_module_list(): if module.url_type == 'GIT_URL' and 'github' in module.url and module.version != 'master' and module.name not in update_tags_blacklist: account_repo = '{}/{}'.format( module.url.split('/')[-2], module.repository) repo = g.get_repo(account_repo) if repo is not None: tags = repo.get_tags() if tags.totalCount > 0 and module.name != 'EPICS_BASE': tag_found = False for tag in tags: if tag.name.startswith( 'R') and tag.name[1].isdigit(): if tag.name == module.version: tag_found = True break LOG.write( 'Updating {} from version {} to version {}' .format(module.name, module.version, tag.name)) module.version = tag.name tag_found = True break if not tag_found: for tag in tags: if tag.name[0].isdigit( ) and tag.name != module.version: LOG.write( 'Updating {} from version {} to version {}' .format(module.name, module.version, tag.name)) module.version = tags[0].name break elif tag.name[0].isdigit(): break elif module.name == 'EPICS_BASE': for tag in tags: if tag.name.startswith('R7'): if tag.name != module.version: LOG.write( 'Updating {} from version {} to version {}' .format(module.name, module.version, tag.name)) module.version = tag.name break if save_path is not None: writer = IO.config_writer.ConfigWriter(install_config) writer.write_install_config(save_path) LOG.write('Updated install config saved to {}'.format(save_path)) except: LOG.write('ERROR - Possibly invalid Github credentials')
def clone_module(self, module, recursive=False): """Function responsible for cloning each module into the appropriate location First checks if the module uses git or a download, and whether it needs to be recursive then, uses the information in the module object along with subprocess commands to clone the module. Parameters ---------- module : InstallModule InstallModule currently being cloned recursive=False Flag that decides if git clone should be done recursively """ LOG.debug('Cloning module {}'.format(module.name)) if isinstance(module, IM.InstallModule): if module.abs_path != None: ret = -1 if os.path.exists(module.abs_path): shutil.rmtree(module.abs_path) if not recursive and module.url_type == "GIT_URL": command = "git clone {} {}".format( module.url + module.repository, module.abs_path) elif recursive and module.url_type == "GIT_URL": command = "git clone --recursive {} {}".format( module.url + module.repository, module.abs_path) elif module.url_type == "WGET_URL": if platform == "win32": command = "wget --no-check-certificate -P {} {}".format( module.abs_path, module.url + module.repository) else: command = 'wget -P {} {}'.format( module.abs_path, module.url + module.repository) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret == 0: LOG.write('Cloned module {} successfully.'.format( module.name)) else: LOG.write('Failed to clone module {}.'.format(module.name)) return -1 if module.url_type == "WGET_URL": if (module.repository.endswith(".tar.gz") or module.repository.endswith(".tgz")) and ret == 0: command = "tar -xzf {} -C {} --strip-components=1".format( os.path.join(module.abs_path, module.repository), module.abs_path) elif module.repository.endswith(".zip") and ret == 0: command = "unzip {} -C {}".format( os.path.join(module.abs_path, module.repository), module.abs_path) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret == 0: LOG.write('Unpacked module {} successfully.'.format( module.name)) else: LOG.write('Failed to unpack module {}.'.format( module.name)) if ret == 0: return ret return -1 return -2 return -3