def perform_dependency_valid_check(self): """Function that searches each modules configure/RELEASE file for dependencies. If a dependency is found, it is checked against loaded install config to make sure it is scheduled to be built. Each module's dependencies attribute is populated Returns ------- list of str A list of dependencies for modules that are not set to build. """ dep_errors = [] LOG.write('The following dependencies have been identified for each auto-build module:') for module in self.install_config.get_module_list(): if module.build == "YES" and module.name != 'SUPPORT': ret = 0 self.check_module_dependencies(module) if len(module.dependencies) > 0: LOG.write('{:<16} - {}'.format(module.name, module.dependencies)) for dep in module.dependencies: dep_mod = self.install_config.get_module_by_name(dep) if dep_mod is None: ret = -1 dep_errors.append('Dependency {} for module {} not in install config.'.format(dep, module.name)) elif dep_mod.build == 'NO': ret = -1 dep_errors.append('Dependency {} for module {} not being built.'.format(dep_mod.name, module.name)) if ret < 0: module.build = "NO" return dep_errors
def acquire_dependecies(self, dependency_script_path): """Method that runs dependency install shell/batch script Parameters ---------- dependency_script_path : str path to dependency shell/batch script """ LOG.debug('Grabbing dependencies via script {}'.format( dependency_script_path)) if os.path.exists(dependency_script_path) and os.path.isfile( dependency_script_path): if dependency_script_path.endswith('.bat'): exec = dependency_script_path else: exec = 'bash {}'.format(dependency_script_path) LOG.print_command(exec) proc = Popen(exec.split(' ')) proc.wait() ret = proc.returncode if ret != 0: LOG.write( 'Dependency script exited with non-zero exit code: {}'. format(ret))
def generate_build_config(self, top_location, module, readme_fp): """Function that writes the build configuration used to create the source/binary package. Parameters ---------- top_location : str Output location of the package module : installSynApps.DataModel.install_module.InstallModule If not None, module being packaged as add on readme_fp : file pointer Open file pointer for README file. """ if module is None: LOG.write('Copying build configuration into bundle.') writer = WRITER.ConfigWriter(self.install_config) build_config_dir = installSynApps.join_path( top_location, 'build-config') writer.write_install_config(filepath=build_config_dir, overwrite_existing=True) self.write_readme_heading('Build environment version information', readme_fp) else: self.write_readme_heading('Implementing add on in exisitng bundle', readme_fp) readme_fp.write( 'This add on tarball contains a folder with a compiled version of {}.\n' .format(module.name)) readme_fp.write( 'To use it with an existing bundle, please copy the folder into {} in the target bundle.\n' .format(module.rel_path)) readme_fp.write( 'It is also recommended to edit the build-config for the bundle to reflect the inclusion of this module.\n\n' )
def create_opi_package(self): """Function that creates bundle of all opi files. Returns ------- int status of tar creation command """ # Make sure output path exists if not os.path.exists(self.output_location): try: os.mkdir(self.output_location) except OSError: return -1 # Start the timer self.start_timer() LOG.write('Beginning construction of opi tarball...') # Generate the bundle status = self.create_opi_tarball() # Stop the timer elapsed = self.stop_timer() LOG.write('Tarring took {} seconds'.format(elapsed)) self.create_bundle_cleanup_tool() return status
def create_tarball(self, filename, flat_format, with_sources): """Function responsible for creating the tarball given a filename. Parameters ---------- filename : str name for output tarball and readme file flat_format=True : bool flag to toggle generating flat vs. non-flat binaries with_sources : bool flag to include non-build artefact files with bundle Returns ------- out : int 0 if success <0 if failure """ readme_path = installSynApps.join_path( self.output_location, 'README_{}.txt'.format(filename)) self.setup_tar_staging() self.grab_base('__temp__', include_src=with_sources) support_top = '__temp__' if not flat_format: LOG.write('Non-flat output binary structure selected.') support_top = installSynApps.join_path('__temp__', 'support') os.mkdir(support_top) ad_top = installSynApps.join_path(support_top, 'areaDetector') os.mkdir(ad_top) for module in self.install_config.get_module_list(): if (module.name in self.required_in_package or module.package == "YES" or (with_sources and module.build == "YES")) and not module.name == "EPICS_BASE": if module.rel_path.startswith('$(AREA_DETECTOR)'): self.grab_module(ad_top, module, include_src=with_sources) else: self.grab_module(support_top, module, include_src=with_sources) self.file_generator.generate_readme(filename, installation_type='bundle', readme_path=readme_path) self.ioc_gen.init_template_dir() self.ioc_gen.generate_dummy_iocs() if with_sources: self.create_repoint_bundle_script() result = self.cleanup_tar_staging(filename) return result
def perform_fix_out_of_order_dependencies(self): """Function that repeatedly checks if dependency order is valid, until no issues found. Runs check_dependency_order_valid in a loop, until we get a 'True' response. If we get 'False' swap the module and it's dependency and rerun """ valid, current, dep = self.check_dependency_order_valid() while not valid: self.install_config.swap_module_positions(current, dep) LOG.write('Swapping build order of {} and {}'.format(current, dep)) valid, current, dep = self.check_dependency_order_valid()
def update_support_macros(self): """Updates the macros in the support configuration files. """ support_config = installSynApps.join_path(self.install_config.support_path, "configure") self.update_macros(support_config, False, False) # Some modules don't correctly have their RELEASE files updated by make release. Fix that here for module in self.install_config.get_module_list(): if module.clone == 'YES' and module.build == 'YES': rel = installSynApps.join_path(module.abs_path, 'configure', 'RELEASE') if os.path.exists(rel): LOG.write('Updating RELEASE file for {}...'.format(module.name)) self.update_macros(rel, True, True, single_file=True)
def generate_dummy_iocs(self): LOG.write('Generating dummy IOCs for included driver binaries') dummy_ioc_actions = [] for module in self.install_config.get_module_list(): if module.name.startswith('AD') and os.path.exists( installSynApps.join_path(module.abs_path, 'iocs')): dummy_ioc_actions.append( DummyIOCAction(os.path.basename(module.abs_path))) for action in dummy_ioc_actions: self.create_dummy_ioc(action) LOG.write('Done.')
def create_opi_tarball(self): """Function that collects autoconverted .opi files from epics_dir. OPI screens are saved in output_location/ad_opis and creats a tarball. Returns ------- int 0 if suceeded, nonzero otherwise """ opi_base_dir = installSynApps.join_path(self.output_location, '__opis_temp__') opi_dir = installSynApps.join_path(opi_base_dir, 'opis') try: os.mkdir(opi_base_dir) os.mkdir(opi_dir) except OSError: LOG.write('Error creating ' + opi_dir + ' directory', ) for (root, _, files) in os.walk(self.install_config.install_location, topdown=True): for name in files: if '.opi' in name and 'autoconvert' in root: file_name = installSynApps.join_path(root, name) try: shutil.copy(file_name, opi_dir) except OSError: LOG.debug("Can't copy {} to {}".format( file_name, opi_dir)) opi_tarball_basename = 'opis_{}'.format( self.install_config.get_core_version()) opi_tarball = opi_tarball_basename counter = 1 while os.path.exists( installSynApps.join_path(self.output_location, opi_tarball + '.tgz')): opi_tarball = opi_tarball_basename + '_({})'.format(counter) counter = counter + 1 out = subprocess.call( ['tar', 'czf', opi_tarball + '.tgz', '-C', opi_base_dir, '.']) shutil.rmtree(opi_base_dir) os.rename( opi_tarball + '.tgz', installSynApps.join_path(self.output_location, opi_tarball + '.tgz')) return out
def grab_folder(self, src, dest): """Helper function that copies folder if it exists Parameters ---------- src : str folder to copy dest : str result location """ if os.path.exists(src) and not os.path.exists(dest) and os.path.isdir(src): try: shutil.copytree(src, dest) except shutil.Error: LOG.write('Error when copying {}!\nPossibly softlinks in directory tree.'.format(src))
def create_add_on_package(self, filename, module_name, with_sources=False): """Top level packager driver function for creating addon packages. Creates output directory, generates filename, creates the tarball, and measures time. Parameters ---------- filename : str filename of output bundle module_name : str name of module to create an add-on package for Returns ------- int status of tar creation command """ module = self.install_config.get_module_by_name(module_name) if module is None: return -1 # Make sure output path exists if not os.path.exists(self.output_location): try: os.mkdir(self.output_location) except OSError: return -1 # Start the timer self.start_timer() LOG.write('Beginning construction of {} add on...'.format(module.name)) # Generate the bundle status = self.create_single_module_tarball(filename, module, with_sources) # Stop the timer elapsed = self.stop_timer() LOG.write('Tarring took {} seconds'.format(elapsed)) self.create_bundle_cleanup_tool() return status
def checkout_module(self, module, recursive=False): """Function responsible for checking out selected tagged versions of modules. Parameters ---------- module : InstallModule Module that is being checked out recursive : bool Specifies whether there are git submodules that need to be initialized Returns ------- int -3 if input was not an InstallModule, -2 if the absolute path is not known, -1 if checkout fails, 0 if success """ ret = -1 LOG.debug('Checking out version for module {}'.format(module.name)) if isinstance(module, IM.InstallModule): if module.abs_path != None: ret = 0 if module.version != "master" and module.url_type == "GIT_URL": current_loc = os.getcwd() os.chdir(module.abs_path) command = "git checkout -q {}".format(module.version) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if recursive and ret == 0: command = 'git submodule update' LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode os.chdir(current_loc) if ret == 0: LOG.write('Checked out version {}'.format( module.version)) else: LOG.write( 'Checkout of version {} failed for module {}.'. format(module.version, module.name)) return ret
def create_package(self, filename, install_path='DEPLOYMENTS', flat_format=True, with_sources=False): """Top level packager driver function. Creates output directory, generates filename, creates the tarball, and measures time. Parameters ---------- filename : str filename of output bundle flat_format : bool Flag to specify flat vs. non-flat binaries Returns ------- int status of tar creation command """ # Make sure output path exists if not os.path.exists(self.output_location): try: os.mkdir(self.output_location) except OSError: return -1 # Start the timer self.start_timer() LOG.write('Beginning bundling process...') # Generate the bundle status = self.create_tarball(filename, install_path, flat_format, with_sources) # Stop the timer elapsed = self.stop_timer() LOG.write('Tarring took {} seconds'.format(elapsed)) self.create_bundle_cleanup_tool() return status
def make_support_releases_consistent(self): """Function that makes support module release files consistent Returns ------- int return code of make release command call """ LOG.write('Running make release to keep releases consistent.') command = 'make -C {} release'.format(self.install_config.support_path) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret != 0: LOG.write( 'make release exited with non-zero exit code: {}'.format(ret)) return ret
def cleanup_tar_staging(self, filename, module=None): """Function that cleans up tar staging point, and closes readme file. Parameters ---------- filename : str file path string module : InstallModule Optional install module to create single module add-on package Returns ------- int Return code of tar creation call. """ LOG.debug('Generating README file with module version and append instructions...') shutil.copy(installSynApps.join_path(self.output_location, 'README_{}.txt'.format(filename)), installSynApps.join_path('__temp__', 'README_{}.txt'.format(filename))) LOG.write('Tarring...') out = subprocess.call(['tar', 'czf', filename + '.tgz', '-C', '__temp__', '.']) if out < 0: return out os.rename(filename + '.tgz', installSynApps.join_path(self.output_location, filename + '.tgz')) LOG.write('Done. Wrote tarball to {}.'.format(self.output_location)) LOG.write('Name of tarball: {}'.format(installSynApps.join_path(self.output_location, filename + '.tgz'))) shutil.rmtree('__temp__') return out
def sync_all_module_tags(install_config, save_path=None, overwrite_existing=True): """Function that syncs module version tags with those found in git repositories. Parameters ---------- install_config : InstallConfiguration instance of install configuration for which to update tags save_path : str None by default. If set, will save the install configuration to the given location after updating. overwrite_existing : bool Flag that tells installSynApps to overwrite or not the existing module tags. Default: True """ LOG.write('Syncing...') LOG.write('Please wait while tags are synced - this may take a while...') for module in install_config.get_module_list(): sync_module_tag(module.name, install_config) if save_path is not None: writer = IO.config_writer.ConfigWriter(install_config) ret, _ = writer.write_install_config( save_path, overwrite_existing=overwrite_existing) LOG.write('Updated install config saved to {}'.format(save_path)) return ret else: return True
def grab_all_files_in_dir(self, src, dest): """Helper method that collects all the files in one directory and moves them to another one. Parameters ---------- src : os.PathLike Target source directory to pull from dest : os.PathLike Target destination directory to copy to """ if not os.path.exists(dest): try: os.mkdir(dest) except: LOG.write('Failed to grab files from dir {}'.format(src)) return if os.path.exists(src) and os.path.isdir(src) and os.path.exists( dest) and os.path.isdir(dest): for elem in os.listdir(src): #LOG.debug('Grabbing elem :{}'.format(elem)) self.grab_file(src + '/' + elem, dest + '/' + elem)
def install_bundle(self, output_filename, flat_output, with_sources=False, force_install_loc=None, external_readme=True): install_loc = self.output_location readme_name = 'README' if force_install_loc is not None: LOG.write('Copying build artifacts to temp directory...') install_loc = force_install_loc readme_loc = self.output_location readme_name = 'README_{}.txt'.format(output_filename) else: LOG.write('Installing bundle to {}...'.format( self.output_location)) readme_loc = installSynApps.join_path(install_loc, output_filename) bundle_top = installSynApps.join_path(install_loc, output_filename) if os.path.exists(bundle_top): shutil.rmtree(bundle_top) os.mkdir(bundle_top) readme_path = installSynApps.join_path(readme_loc, readme_name) self.grab_base(bundle_top, include_src=with_sources, flat_grab=flat_output) support_top = bundle_top if not flat_output: LOG.write('Non-flat output binary structure selected.') support_top = installSynApps.join_path(bundle_top, 'support') os.mkdir(support_top) ad_top = bundle_top if not flat_output: ad_top = installSynApps.join_path(support_top, 'areaDetector') os.mkdir(ad_top) for module in self.install_config.get_module_list(): if (module.name in self.required_in_package or module.package == "YES" or (with_sources and module.build == "YES")) and not module.name == "EPICS_BASE": if module.rel_path.startswith('$(AREA_DETECTOR)'): self.grab_module(ad_top, module, include_src=with_sources, flat_grab=flat_output) else: self.grab_module(support_top, module, include_src=with_sources, flat_grab=flat_output) # We always create a README file to know which modules were included package_type = 'bundle' if with_sources: package_type = 'source' self.file_generator.generate_readme(output_filename, installation_type=package_type, readme_path=readme_path, flat_grab=flat_output) self.file_generator.generate_license(bundle_top) if not flat_output or with_sources: self.ioc_gen.init_template_dir(output_filename) self.ioc_gen.generate_dummy_iocs() if with_sources: self.create_repoint_bundle_script() LOG.write('Done.') return 0
def clone_module(self, module, recursive = False): """Function responsible for cloning each module into the appropriate location First checks if the module uses git or a download, and whether it needs to be recursive then, uses the information in the module object along with subprocess commands to clone the module. Parameters ---------- module : InstallModule InstallModule currently being cloned recursive=False Flag that decides if git clone should be done recursively """ global USE_URLLIB LOG.debug('Cloning module {}'.format(module.name)) if isinstance(module, IM.InstallModule): if module.abs_path != None: command = None ret = -1 if os.path.exists(module.abs_path): shutil.rmtree(module.abs_path) if not recursive and module.url_type == "GIT_URL": command = "git clone {} {}".format(module.url + module.repository, module.abs_path) elif recursive and module.url_type == "GIT_URL": command = "git clone --recursive {} {}".format(module.url + module.repository, module.abs_path) elif module.url_type == "WGET_URL": try: archive_path = installSynApps.join_path(os.path.dirname(module.abs_path), module.repository) if not USE_WGET: r = requests.get(module.url + module.repository) with open(archive_path, 'wb') as fp: fp.write(r.content) os.mkdir(module.abs_path) ret = 0 else: if platform == "win32": command = "wget --no-check-certificate -P {} {}".format(os.path.dirname(module.abs_path), module.url + module.repository) else: command = 'wget -P {} {}'.format(os.path.dirname(module.abs_path), module.url + module.repository) except Exception as e: LOG.write(str(e)) ret = -1 if command is not None: LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret == 0: LOG.write('Cloned module {} successfully.'.format(module.name)) else: LOG.write('Failed to clone module {}.'.format(module.name)) return -1 if module.url_type == "WGET_URL": archive_path = installSynApps.join_path(os.path.dirname(module.abs_path), module.repository) if not os.path.exists(module.abs_path): os.mkdir(module.abs_path) command = None if (module.repository.endswith(".tar.gz") or module.repository.endswith(".tgz")) and ret == 0: command = "tar -xzf {} -C {} --strip-components=1".format(archive_path, module.abs_path) elif module.repository.endswith(".zip") and ret == 0: command = "tar -xf {} -C {} --strip-components=1".format(archive_path, module.abs_path) else: LOG.write('Unsupported archive format detected!') ret = -1 if command is not None: LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret == 0: LOG.write('Unpacked module {} successfully.'.format(module.name)) os.remove(installSynApps.join_path(os.path.dirname(module.abs_path), module.repository)) else: LOG.write('Failed to unpack module {}.'.format(module.name)) if ret == 0: return ret return -1 return -2 return -3
def write_install_config(self, filepath='addtlConfDirs/config{}'.format(datetime.date.today()), overwrite_existing=False): """Function that saves loaded install configuration Main saving function for writing install config. Can create a save directory, then saves main install configuration, build flags, and injector files. Parameters ---------- filepath : str defaults to addtlConfDirs/config$DATE. The filepath into which to save the install configuration Returns ------- bool True if successful, False otherwise str None if successfull, otherwise error message """ if overwrite_existing and os.path.exists(filepath): try: shutil.rmtree(installSynApps.join_path(filepath, 'injectionFiles')) shutil.rmtree(installSynApps.join_path(filepath, 'macroFiles')) os.remove(installSynApps.join_path(filepath, 'INSTALL_CONFIG')) except PermissionError: return False, 'Insufficient Permissions' # Check if path exists, create it if it doesn't if not os.path.exists(filepath): try: os.mkdir(filepath) except OSError as err: if err.errno == errno.EACCES: return False, 'Permission Error!' elif err.errno == errno.EEXIST: return False, 'Path already exists!' elif err.errno == errno.ENOSPC: return False, 'No space on device!' elif err.errno == errno.EROFS: return False, 'Read-Only File System!' else: return False, 'Unknown Error' try: os.mkdir(installSynApps.join_path(filepath, 'injectionFiles')) os.mkdir(installSynApps.join_path(filepath, 'macroFiles')) if not os.path.exists(installSynApps.join_path(filepath, 'customBuildScripts')): os.mkdir(installSynApps.join_path(filepath, 'customBuildScripts')) except OSError: LOG.write('Failed to make configuration directories!') return False, 'Unknown Error' LOG.debug('Writing injector files.') self.write_injector_files(filepath) LOG.debug('Writing build flags.') self.write_build_flags(filepath) LOG.debug('Writing custom build scripts.') self.write_custom_build_scripts(filepath) LOG.debug('Writing INSTALL_CONFIG file.') new_install_config = open(installSynApps.join_path(filepath, "INSTALL_CONFIG"), "w+") new_install_config.write('#\n# INSTALL_CONFIG file saved by installSynApps on {}\n#\n\n'.format(datetime.datetime.now())) new_install_config.write("INSTALL={}\n\n\n".format(self.install_config.install_location)) new_install_config.write('#MODULE_NAME MODULE_VERSION MODULE_PATH MODULE_REPO CLONE_MODULE BUILD_MODULE PACKAGE_MODULE\n') new_install_config.write('#--------------------------------------------------------------------------------------------------------------------------------------------------\n') current_url = "" for module in self.install_config.get_module_list(): if module.url != current_url: new_install_config.write("\n{}={}\n\n".format(module.url_type, module.url)) current_url = module.url new_install_config.write("{:<16} {:<20} {:<40} {:<24} {:<16} {:<16} {}\n".format(module.name, module.version, module.rel_path, module.rel_repo, module.clone, module.build, module.package)) new_install_config.close() return True, None
def sync_module_tag(module_name, install_config, save_path=None): """Function that syncs module version tags with those hosted with git. This function is still buggy, and certain modules do not update correctly Parameters ---------- module_name : str The name of the module to sync install_config : InstallConfiguration instance of install configuration for which to update tags save_path : str None by default. If set, will save the install configuration to the given location after updating. """ module = install_config.get_module_by_name(module_name) if module.url_type == 'GIT_URL' and module.version != 'master' and module.name not in update_tags_blacklist: account_repo = '{}{}'.format(module.url, module.repository) LOG.print_command("git ls-remote --tags {}".format(account_repo)) sync_tags_proc = Popen(['git', 'ls-remote', '--tags', account_repo], stdout=PIPE, stderr=PIPE) out, _ = sync_tags_proc.communicate() ret = out.decode('utf-8') tags_temp = ret.splitlines() tags = [] for tag in tags_temp: tags.append(tag.rsplit('/')[-1]) if len(tags) > 0: best_tag = tags[0] best_tag_ver_str_list = re.split(r'\D+', tags[0]) best_tag_ver_str_list = [ num for num in best_tag_ver_str_list if num.isnumeric() ] best_tag_version_numbers = list(map(int, best_tag_ver_str_list)) for tag in tags: tag_ver_str_list = re.split(r'\D+', tag) tag_ver_str_list = [ num for num in tag_ver_str_list if num.isnumeric() ] tag_version_numbers = list(map(int, tag_ver_str_list)) for i in range(len(tag_version_numbers)): if best_tag.startswith('R') and not tag.startswith('R'): break elif not best_tag.startswith('R') and tag.startswith('R'): best_tag = tag best_tag_version_numbers = tag_version_numbers break elif i == len( best_tag_version_numbers ) or tag_version_numbers[i] > best_tag_version_numbers[i]: best_tag = tag best_tag_version_numbers = tag_version_numbers break elif tag_version_numbers[i] < best_tag_version_numbers[i]: break tag_updated = False module_ver_str_list = re.split(r'\D+', module.version) module_ver_str_list = [ num for num in module_ver_str_list if num.isnumeric() ] module_version_numbers = list(map(int, module_ver_str_list)) for i in range(len(best_tag_version_numbers)): if i == len( module_version_numbers ) or best_tag_version_numbers[i] > module_version_numbers[i]: tag_updated = True LOG.write( 'Updating {} from version {} to version {}'.format( module.name, module.version, best_tag)) module.version = best_tag break elif best_tag_version_numbers[i] < module_version_numbers[i]: break if not tag_updated: LOG.debug('Module {} already at latest version: {}'.format( module.name, module.version)) if save_path is not None: writer = IO.config_writer.ConfigWriter(install_config) ret, _ = writer.write_install_config(save_path, overwrite_existing=True) LOG.write('Updated install config saved to {}'.format(save_path)) return ret else: return True
def build_module(self, module_name): """Function that executes build of single module First, checks if all dependencies built, if not, does that first. Then checks for custom build script. If one is found, runs that from module root directory. Otherwise, runs make followed by specified make flag in module root directory. Parameters ---------- module_name : str The name of the module being built Returns ------- int The return code of the build process, 0 if module is not buildable (ex. UTILS) """ if module_name in self.non_build_packages: return 0 LOG.write('Building module {}'.format(module_name)) module = self.install_config.get_module_by_name(module_name) if len(module.dependencies) > 0: for dep in module.dependencies: if dep not in self.built: self.build_module(dep) if module.custom_build_script_path is not None: LOG.write('Detected custom build script located at {}'.format( module.custom_build_script_path)) ret = self.build_via_custom_script(module) if ret == 0: self.built.append(module_name) LOG.write( 'Built module {} via custom script'.format(module_name)) else: LOG.write( 'Custom script for module {} exited with error code {}.'. format(module_name, ret)) else: command = "make -C {} {}".format(module.abs_path, self.make_flag) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret == 0: self.built.append(module_name) LOG.write('Built module {}'.format(module_name)) else: LOG.write('Failed to build module {}'.format(module_name)) return ret