def grab_base(self, top, readme_fp): """Function that copies all of the required folders from EPICS_BASE Parameters ---------- top : str resulting location - __temp__ readme_fp : FILE* output readme file """ base_path = self.install_config.base_path self.grab_folder(base_path + '/bin/' + self.arch, top + '/base/bin/' + self.arch) self.grab_folder(base_path + '/lib/' + self.arch, top + '/base/lib/' + self.arch) self.grab_folder(base_path + '/lib/perl', top + '/base/lib/perl') self.grab_folder(base_path + '/configure', top + '/base/configure') self.grab_folder(base_path + '/include', top + '/base/include') self.grab_folder(base_path + '/startup', top + '/base/startup') try: current_loc = os.getcwd() LOG.debug('cd {}'.format(base_path)) os.chdir(base_path) LOG.debug('git describe --tags') out = subprocess.check_output(['git', 'describe', '--tags']) LOG.debug('Checked version for EPICS base.') LOG.debug('cd {}'.format(current_loc)) os.chdir(current_loc) LOG.debug('Detected version git tag {} for EPICS_BASE'.format( out.decode("utf-8").strip())) readme_fp.write('{:<16}- {}'.format('base', out.decode("utf-8"))) except subprocess.CalledProcessError: pass
def grab_configuration_used(self, top_location, readme_fp, module): """Function that includes the install configuration into the bundle for reuse. Parameters ---------- top : str resulting location - __temp__ """ try: isa_version, isa_commit_hash = self.find_isa_version() if module is None: LOG.write('Copying build configuration into bundle.') writer = Writer.ConfigWriter(self.install_config) build_config_dir = os.path.join(top_location, 'build-config') writer.write_install_config(filepath=build_config_dir) self.write_readme_heading( 'Build environment version information', readme_fp) else: self.write_readme_heading( 'Implementing add on in exisitng bundle', readme_fp) readme_fp.write( 'This add on tarball contains a folder with a compiled version of {}.\n' .format(module.name)) readme_fp.write( 'To use it with an existing bundle, please copy the folder into {} in the target bundle.\n' .format(module.rel_path)) readme_fp.write( 'It is also recommended to edit the build-config for the bundle to reflect the inclusion of this module.\n\n' ) readme_fp.write('Build configuration:\n\n') readme_fp.write( 'installSynApps Version: {}\n\n'.format(isa_version)) if isa_commit_hash is not None: readme_fp.write( 'To grab this version:\n\n\tgit clone https://github.com/epicsNSLS2-deploy/installSynApps\n' ) readme_fp.write('\tgit checkout {}\n'.format(isa_commit_hash)) else: readme_fp.write( 'To grab this version use:\n\n\tgit clone https://github.com/epicsNSLS2-deploy/installSynApps\n' ) readme_fp.write('\tgit checkout -q {}\n'.format(isa_version)) readme_fp.write( 'To regenerate sources for this bundle, grab installSynApps as described above, and use:\n\n' ) readme_fp.write('\t./installCLI.py -c BUILD_CONFIG -p\n\n') readme_fp.write( 'where BUILD_CONFIG is the path to the build-config directory in this bundle.\n' ) readme_fp.write( 'Make sure to specify an install location as well\n{}\n\n'. format('-' * 64)) readme_fp.write('{:<20}{}\n'.format('Python 3 Version:', sys.version.split()[0])) readme_fp.write('{:<20}{}\n'.format('OS Class:', self.OS)) readme_fp.write('{:<20}{}\n'.format('Build Date:', datetime.datetime.now())) except: LOG.debug('Failed to copy install configuration into bundle.')
def update_macros_dir(self, macro_replace_list, target_dir, force_override_comments=False): """Function that updates the macros for all files in a target location, given a list of macro-value pairs Parameters ---------- macro_replace_list : List list containting macro-value pairs target_dir : str path of target dir for which all macros will be edited. """ LOG.debug('Updating macros in directory {}'.format(target_dir)) if os.path.exists(target_dir) and os.path.isdir(target_dir): for file in os.listdir(target_dir): if os.path.isfile( target_dir + "/" + file) and not file.endswith( ".pl" ) and file != "Makefile" and not file.endswith(".ioc"): self.update_macros_file(macro_replace_list, target_dir, file, force=force_override_comments)
def acquire_dependecies(self, dependency_script_path): """Method that runs dependency install shell/batch script Parameters ---------- dependency_script_path : str path to dependency shell/batch script """ LOG.debug('Grabbing dependencies via script {}'.format( dependency_script_path)) if os.path.exists(dependency_script_path) and os.path.isfile( dependency_script_path): if dependency_script_path.endswith('.bat'): exec = dependency_script_path else: exec = 'bash {}'.format(dependency_script_path) LOG.print_command(exec) proc = Popen(exec.split(' ')) proc.wait() ret = proc.returncode if ret != 0: LOG.write( 'Dependency script exited with non-zero exit code: {}'. format(ret))
def comment_non_build_macros(self): """Function that comments out any paths in the support/configure/RELEASE that are clone only and not build. """ rel_file_path = os.path.join(self.install_config.support_path, "configure/RELEASE") rel_file_path_temp = os.path.join(self.install_config.support_path, "configure/RELEASE_TEMP") os.rename(rel_file_path, rel_file_path_temp) rel_file_old = open(rel_file_path_temp, "r") rel_file_new = open(rel_file_path, "w") line = rel_file_old.readline() while line: if line.startswith('#'): rel_file_new.write(line) else: for module in self.install_config.get_module_list(): if line.startswith(module.name + "=") and module.build == "NO": rel_file_new.write('#') LOG.debug( 'Commenting out non-build module {} in support/configure/RELEASE' .format(module.name)) rel_file_new.write(line) line = rel_file_old.readline() rel_file_new.close() rel_file_old.close() os.remove(rel_file_path_temp)
def create_bundle_name(self, module_name=None): """Helper function for creating output filename Returns ------- str An output filename describing architecture and ADCore version """ if module_name is not None: module = self.install_config.get_module_by_name(module_name) if module is None: return None date_str = datetime.date.today() if module_name is None: output_filename = '{}_AD_{}_Bin_{}_{}'.format( self.institution, self.install_config.get_core_version(), self.OS, date_str) else: output_filename = '{}_AD_{}_Bin_{}_{}_addon'.format( self.institution, self.install_config.get_core_version(), self.OS, module.name) temp = output_filename counter = 1 while os.path.exists(self.output_location + '/' + temp + '.tgz'): temp = output_filename temp = temp + '_({})'.format(counter) counter = counter + 1 output_filename = temp LOG.debug('Generated potential output tarball name as: {}'.format( output_filename)) return output_filename
def cleanup_modules(self): """Function responsible for cleaning up directories that were not selected to clone """ if self.install_config != None and isinstance(self.install_config, IC.InstallConfiguration): for module in self.install_config.modules: if isinstance(module, IM.InstallModule): if module.clone == "NO" and os.path.exists( module.abs_path): LOG.debug('Removing unused repo {}'.format( module.name)) shutil.rmtree(module.abs_path)
def parse_line_to_module(self, line, current_url, current_url_type): """Function that parses a line in the INSTALL_CONFIG file into an InstallModule object Parameters ---------- line : str line from table in file current_url : str url at which module is located current_url_type : str either GIT_URL or WGET_URL Returns ------- InstallModule module parsed from the table line """ # Remove extra whitespace line = re.sub('\t', ' ', line) line = re.sub(' +', ' ', line) module_components = line.split(' ') # If a line is read that isn't in the correct format return None if len(module_components) < 6: return None # line will be in format: # NAME VERSION RELATIVE_PATH REPOSITORY CLONE BUILD PACKAGE name = module_components[0] version = module_components[1] rel_path = module_components[2] repository = module_components[3] clone = module_components[4] build = module_components[5] if name in self.required_in_package: package = "YES" # Add length check for compatibility with older configure directories - default package to NO elif len(module_components) == 7: package = module_components[6] else: package = "NO" # create object from line and return it LOG.debug('Parsed install module: {}'.format(name)) install_module = IM.InstallModule(name, version, rel_path, current_url_type, current_url, repository, clone, build, package) return install_module
def create_opi_tarball(self): """Function that collects autoconverted .opi files from epics_dir. OPI screens are saved in output_location/ad_opis and creats a tarball. Returns ------- int 0 if suceeded, nonzero otherwise """ opi_base_dir = os.path.join(self.output_location, '__opis_temp__') opi_dir = os.path.join(opi_base_dir, 'opis') try: os.mkdir(opi_base_dir) os.mkdir(opi_dir) except OSError: LOG.write('Error creating ' + opi_dir + ' directory', ) for (root, dirs, files) in os.walk(self.install_config.install_location, topdown=True): for name in files: if '.opi' in name and 'autoconvert' in root: file_name = os.path.join(root, name) try: shutil.copy(file_name, opi_dir) except OSError: LOG.debug("Can't copy {} to {}".format( file_name, opi_dir)) opi_tarball_basename = 'opis_{}'.format( self.install_config.get_core_version()) opi_tarball = opi_tarball_basename counter = 1 while os.path.exists( os.path.join(self.output_location, opi_tarball + '.tgz')): opi_tarball = opi_tarball_basename + '_({})'.format(counter) counter = counter + 1 out = subprocess.call( ['tar', 'czf', opi_tarball + '.tgz', '-C', opi_base_dir, '.']) shutil.rmtree(opi_base_dir) os.rename(opi_tarball + '.tgz', os.path.join(self.output_location, opi_tarball + '.tgz')) return out
def write_build_flags(self, filepath): """Helper Function for writing build flags from install config Parameters ---------- filepath : str Path into which we wish to save configuration """ new_build_flag = open(filepath + "/macroFiles/BUILD_FLAG_CONFIG", 'w') new_build_flag.write('# Saved by installSynApps on {}\n\n'.format( datetime.datetime.now())) for macro_pair in self.install_config.build_flags: LOG.debug('Writing build flag {}={}'.format( macro_pair[0], macro_pair[1])) new_build_flag.write('{}={}\n'.format(macro_pair[0], macro_pair[1])) new_build_flag.close()
def update_submodule(self, module, submodule_name): """Function that updates submodules given that the input module is in the self.submodule_list array Parameters ---------- module : InstallModule module for which we must update submodules submodule_name : str name of submodule to update """ LOG.debug('Updating git submodules for {}'.format(module.name)) if isinstance(module, IM.InstallModule): if module.abs_path != None: submodule_path = module.abs_path + "/" + submodule_name if os.path.exists(submodule_path): LOG.print_command( 'git -C {} submodule init'.format(submodule_path)) p1 = Popen( ["git", "-C", submodule_path, "submodule", "init"]) p1.wait() ret1 = p1.returncode if ret1 == 0: LOG.debug( 'Submodules initialized for module {}.'.format( module.name)) else: LOG.debug( 'Failed to initialize submodules for module {}.'. format(module.name)) LOG.print_command( 'git -C {} submodule update'.format(submodule_path)) p2 = Popen( ["git", "-C", submodule_path, "submodule", "update"]) p2.wait() ret2 = p2.returncode if ret2 == 0: LOG.debug('Submodules updated for module {}.'.format( module.name)) else: LOG.debug('Failed to update submodules for module {}.'. format(module.name))
def write_injector_files(self, filepath): """Helper Function for writing injector files from install config Parameters ---------- filepath : str Path into which we wish to save configuration """ # for each injector file write it with its target location for injector_file in self.install_config.injector_files: LOG.debug('Saving injector file {} with target {}'.format( injector_file, injector_file.target)) new_fp = open(filepath + "/injectionFiles/" + injector_file.name, 'w') new_fp.write('# Saved by installSynApps on {}\n'.format( datetime.datetime.now())) new_fp.write('__TARGET_LOC__={}\n\n'.format(injector_file.target)) new_fp.write(injector_file.contents) new_fp.close()
def cleanup_tar_staging(self, filename, readme_fp, module=None): """Function that cleans up tar staging point, and closes readme file. Parameters ---------- filename : str file path string readme_fp : open file The opened readme file pointer module : InstallModule Optional install module to create single module add-on package Returns ------- int Return code of tar creation call. """ readme_fp.write('\n\n') self.grab_configuration_used('__temp__', readme_fp, module) readme_fp.close() LOG.debug( 'Generating README file with module version and append instructions...' ) shutil.copy( os.path.join(self.output_location, 'README_{}.txt'.format(filename)), os.path.join('__temp__', 'README_{}.txt'.format(filename))) LOG.write('Tarring...') out = subprocess.call( ['tar', 'czf', filename + '.tgz', '-C', '__temp__', '.']) if out < 0: return out os.rename(filename + '.tgz', os.path.join(self.output_location, filename + '.tgz')) LOG.write('Done. Wrote tarball to {}.'.format(self.output_location)) LOG.write('Name of tarball: {}'.format( os.path.join(self.output_location, filename + '.tgz'))) shutil.rmtree('__temp__') return out
def find_isa_version(self): """Function that attempts to get the version of installSynApps used. Returns ------- str The version string for installSynApps. Either hardcoded version, or git tag description str None if git status not available, otherwise hash of current installSynApps commit. """ isa_version = installSynApps.__version__ commit_hash = None try: LOG.debug('git describe --tags') out = subprocess.check_output(['git', 'describe', '--tags']) isa_version = out.decode('utf-8').strip() LOG.debug('git rev-parse HEAD') out = subprocess.check_output(['git', 'rev-parse', 'HEAD']) commit_hash = out.decode('utf-8') except: LOG.debug( 'Could not find git information for installSynApps versions.') return isa_version, commit_hash
def write_custom_build_scripts(self, filepath): """Helper Function for writing custom build scripts of modules Parameters ---------- filepath : str Path into which we wish to save configuration """ build_script_out = os.path.join(filepath, 'customBuildScripts') for module in self.install_config.get_module_list(): old_script = module.custom_build_script_path if old_script is not None: if os.path.exists(old_script) and not os.path.exists( os.path.join(build_script_out, os.path.basename(old_script))): LOG.debug('Copying module custom build script: {}'.format( old_script)) try: shutil.copyfile( old_script, os.path.join(build_script_out, os.path.basename(old_script))) except: LOG.debug( 'Encountered error copying: {}'.format(old_script)) else: LOG.debug('Could not find build script at: {}'.format( old_script))
def add_missing_support_macros(self): """Function that appends any paths to the support/configure/RELEASE file that were not in it originally """ to_append_commented = [] to_append = [] for module in self.install_config.get_module_list(): if module.clone == "YES": was_found = False rel_file = open( os.path.join(self.install_config.support_path, "configure/RELEASE"), "r") line = rel_file.readline() while line: if line.startswith(module.name + "="): was_found = True line = rel_file.readline() if not was_found and not module.name in self.add_to_release_blacklist and not module.name.startswith( "AD"): if module.build == "YES": to_append.append([module.name, module.rel_path]) else: to_append_commented.append( [module.name, module.rel_path]) rel_file.close() app_file = open( self.install_config.support_path + "/configure/RELEASE", "a") for mod in to_append: LOG.debug('Adding {} path to support/configure/RELEASE'.format( mod[0])) app_file.write("{}={}\n".format(mod[0], mod[1])) for mod in to_append_commented: LOG.debug( 'Adding commented {} path to support/configure/RELEASE'.format( mod[0])) app_file.write("#{}={}\n".format(mod[0], mod[1])) app_file.close()
def checkout_module(self, module): """Function responsible for checking out selected tagged versions of modules. Parameters ---------- module : InstallModule Module that is being checked out Returns ------- int -3 if input was not an InstallModule, -2 if the absolute path is not known, -1 if checkout fails, 0 if success """ ret = -1 LOG.debug('Checking out version for module {}'.format(module.name)) if isinstance(module, IM.InstallModule): if module.abs_path != None: ret = 0 if module.version != "master" and module.url_type == "GIT_URL": current_loc = os.getcwd() os.chdir(module.abs_path) command = "git checkout -q {}".format(module.version) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode os.chdir(current_loc) if ret == 0: LOG.write('Checked out version {}'.format( module.version)) else: LOG.write( 'Checkout of version {} failed for module {}.'. format(module.version, module.name)) return ret
def inject_to_file(self, injector_file): """Function that injects contents of specified file into target First, convert to absolute path given the install config. Then open it in append mode, then write all uncommented lines in the injector file into the target, and then close both Parameters ---------- injector_file_path : InjectorFile object representing injector file """ target_path = injector_file.target if target_path is None or len(target_path) == 0: return target_path = self.install_config.convert_path_abs(target_path) target_file = os.path.basename(target_path) if (not os.path.exists(target_path)): return if target_file.startswith("EXAMPLE_"): target_path_no_example = os.path.join(os.path.dirname(target_path), target_file[8:]) if os.path.exists(target_path): os.rename(target_path, target_path_no_example) target_path = target_path_no_example target_fp = open(target_path, "a") target_fp.write( "\n# ------------The following was auto-generated by installSynApps-------\n\n" ) if injector_file.contents is not None: LOG.debug('Injecting into {}'.format(target_path)) target_fp.write(injector_file.contents) LOG.debug(injector_file.contents, force_no_timestamp=True) LOG.debug('Injection Done.') target_fp.write( "\n# --------------------------Auto-generated end----------------------\n" ) target_fp.close()
def write_install_config(self, filepath='addtlConfDirs/config{}'.format( datetime.date.today()), overwrite_existing=False): """Function that saves loaded install configuration Main saving function for writing install config. Can create a save directory, then saves main install configuration, build flags, and injector files. Parameters ---------- filepath : str defaults to addtlConfDirs/config$DATE. The filepath into which to save the install configuration Returns ------- bool True if successful, False otherwise str None if successfull, otherwise error message """ if overwrite_existing and os.path.exists(filepath): try: shutil.rmtree(os.path.join(filepath, 'injectionFiles')) shutil.rmtree(os.path.join(filepath, 'macroFiles')) os.remove(os.path.join(filepath, 'INSTALL_CONFIG')) except PermissionError: return False, 'Insufficient Permissions' # Check if path exists, create it if it doesn't if not os.path.exists(filepath): try: os.mkdir(filepath) except OSError as err: if err.errno == errno.EACCES: return False, 'Permission Error!' elif err.errno == errno.EEXIST: return False, 'Path already exists!' elif err.errno == errno.ENOSPC: return False, 'No space on device!' elif err.errno == errno.EROFS: return False, 'Read-Only File System!' else: return False, 'Unknown Error' try: os.mkdir(os.path.join(filepath, 'injectionFiles')) os.mkdir(os.path.join(filepath, 'macroFiles')) if not os.path.exists(os.path.join(filepath, 'customBuildScripts')): os.mkdir(os.path.join(filepath, 'customBuildScripts')) except OSError: LOG.write('Failed to make configuration directories!') return False, 'Unknown Error' LOG.debug('Writing injector files.') self.write_injector_files(filepath) LOG.debug('Writing build flags.') self.write_build_flags(filepath) LOG.debug('Writing custom build scripts.') self.write_custom_build_scripts(filepath) LOG.debug('Writing INSTALL_CONFIG file.') new_install_config = open(os.path.join(filepath, "INSTALL_CONFIG"), "w+") new_install_config.write( '#\n# INSTALL_CONFIG file saved by installSynApps on {}\n#\n\n'. format(datetime.datetime.now())) new_install_config.write("INSTALL={}\n\n\n".format( self.install_config.install_location)) new_install_config.write( '#MODULE_NAME MODULE_VERSION MODULE_PATH MODULE_REPO CLONE_MODULE BUILD_MODULE PACKAGE_MODULE\n' ) new_install_config.write( '#--------------------------------------------------------------------------------------------------------------------------------------------------\n' ) current_url = "" for module in self.install_config.get_module_list(): if module.url != current_url: new_install_config.write("\n{}={}\n\n".format( module.url_type, module.url)) current_url = module.url new_install_config.write( "{:<16} {:<20} {:<40} {:<24} {:<16} {:<16} {}\n".format( module.name, module.version, module.rel_path, module.rel_repo, module.clone, module.build, module.package)) new_install_config.close() return True, None
def clone_module(self, module, recursive=False): """Function responsible for cloning each module into the appropriate location First checks if the module uses git or a download, and whether it needs to be recursive then, uses the information in the module object along with subprocess commands to clone the module. Parameters ---------- module : InstallModule InstallModule currently being cloned recursive=False Flag that decides if git clone should be done recursively """ LOG.debug('Cloning module {}'.format(module.name)) if isinstance(module, IM.InstallModule): if module.abs_path != None: ret = -1 if os.path.exists(module.abs_path): shutil.rmtree(module.abs_path) if not recursive and module.url_type == "GIT_URL": command = "git clone {} {}".format( module.url + module.repository, module.abs_path) elif recursive and module.url_type == "GIT_URL": command = "git clone --recursive {} {}".format( module.url + module.repository, module.abs_path) elif module.url_type == "WGET_URL": if platform == "win32": command = "wget --no-check-certificate -P {} {}".format( module.abs_path, module.url + module.repository) else: command = 'wget -P {} {}'.format( module.abs_path, module.url + module.repository) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret == 0: LOG.write('Cloned module {} successfully.'.format( module.name)) else: LOG.write('Failed to clone module {}.'.format(module.name)) return -1 if module.url_type == "WGET_URL": if (module.repository.endswith(".tar.gz") or module.repository.endswith(".tgz")) and ret == 0: command = "tar -xzf {} -C {} --strip-components=1".format( os.path.join(module.abs_path, module.repository), module.abs_path) elif module.repository.endswith(".zip") and ret == 0: command = "unzip {} -C {}".format( os.path.join(module.abs_path, module.repository), module.abs_path) LOG.print_command(command) proc = Popen(command.split(' ')) proc.wait() ret = proc.returncode if ret == 0: LOG.write('Unpacked module {} successfully.'.format( module.name)) else: LOG.write('Failed to unpack module {}.'.format( module.name)) if ret == 0: return ret return -1 return -2 return -3
def grab_module(self, top, module, readme_fp): """Function that grabs all of the required folders from each individual module. Parameters ---------- top : str resulting location - __temp__ module_name : str folder name for the module module_location : str path to dir of location of module readme_fp : FILE* output readme file is_ad_module : bool default false. if true search for iocs directory as well. """ module_name = os.path.basename(module.abs_path) target_folder = module.abs_path if not os.path.exists(target_folder): LOG.debug('Module {} not found, skipping...'.format(module.name)) return LOG.debug('Grabbing files for module {}.'.format(module.name)) self.grab_folder(target_folder + '/opi', top + '/' + module_name + '/opi') self.grab_folder(target_folder + '/db', top + '/' + module_name + '/db') self.grab_folder(target_folder + '/dbd', top + '/' + module_name + '/dbd') self.grab_folder(target_folder + '/include', top + '/' + module_name + '/include') self.grab_folder(target_folder + '/bin/' + self.arch, top + '/' + module_name + '/bin/' + self.arch) self.grab_folder(target_folder + '/lib/' + self.arch, top + '/' + module_name + '/lib/' + self.arch) self.grab_folder(target_folder + '/configure', top + '/' + module_name + '/configure') self.grab_folder(target_folder + '/iocBoot', top + '/' + module_name + '/iocBoot') self.grab_folder(target_folder + '/modules', top + '/' + module_name + '/modules') self.grab_folder(target_folder + '/ADViewers/ImageJ', top + '/' + module_name + '/ADViewers/ImageJ') for dir in os.listdir(target_folder): if 'App' in dir and not dir.startswith('test'): self.grab_folder(target_folder + '/' + dir + '/Db', top + '/' + module_name + '/' + dir + '/Db') self.grab_folder(target_folder + '/' + dir + '/op', top + '/' + module_name + '/' + dir + '/op') if os.path.exists(target_folder + '/iocs'): for dir in os.listdir(target_folder + '/iocs'): ioc_folder = '/iocs/' + dir if 'IOC' in dir: LOG.debug( 'Grabbing IOC files for module {} ioc: {}'.format( module.name, dir)) self.grab_folder( target_folder + ioc_folder + '/bin/' + self.arch, top + '/' + module_name + ioc_folder + '/bin/' + self.arch) self.grab_folder( target_folder + ioc_folder + '/lib/' + self.arch, top + '/' + module_name + ioc_folder + '/lib/' + self.arch) self.grab_folder( target_folder + ioc_folder + '/dbd', top + '/' + module_name + ioc_folder + '/dbd') self.grab_folder( target_folder + ioc_folder + '/iocBoot', top + '/' + module_name + ioc_folder + '/iocBoot') try: if module.url_type == 'GIT_URL': current_loc = os.getcwd() LOG.debug('cd {}'.format(module.abs_path)) os.chdir(module.abs_path) LOG.debug('git describe --tags') out = subprocess.check_output(['git', 'describe', '--tags']) LOG.debug('cd {}'.format(current_loc)) os.chdir(current_loc) LOG.debug('Detected git tag/version: {} for module {}'.format( out.decode("utf-8").strip(), module.name)) readme_fp.write('{:<16}- {}'.format(module_name, out.decode("utf-8"))) else: LOG.debug('Detected version {} for module {}'.format( module.version, module.name)) readme_fp.write('{:<16}- {}\n'.format(module_name, module.version)) except subprocess.CalledProcessError: pass
def update_macros_file(self, macro_replace_list, target_dir, target_filename, comment_unsupported=False, with_ad=True, force=False): """Function that updates the macro values in a single configure file Parameters ---------- macro_replace_list : List of [str, str] list of macro-value pairs to replace target_dir : str location of target file target_filename : str name of the file comment_unsupported : bool if true, will comment out any macros that are in the file that are not in input list. Important for updating RELEASE in support/ with_ad : bool if false, will comment out macros for area detector modules. used for RELEASE in support - AD is built separately """ old_files_dir = os.path.join(target_dir, 'OLD_FILES') if not os.path.exists(old_files_dir): os.mkdir(old_files_dir) os.rename(os.path.join(target_dir, target_filename), os.path.join(old_files_dir, target_filename)) old_fp = open(os.path.join(old_files_dir, target_filename), "r") if target_filename.startswith("EXAMPLE_"): new_fp = open(os.path.join(target_dir, target_filename[8:]), "w") else: new_fp = open(os.path.join(target_dir, target_filename), "w") line = old_fp.readline() while line: original = line line = line.strip() if '=' in line: line = line = re.sub(' +', '', line) wrote_line = False for macro in macro_replace_list: if line.startswith(macro[0] + "=") and ( with_ad or (macro[0] not in self.ad_modules)): if line.split('=', 1)[1] != macro[1]: LOG.debug( 'Replacing macro {}: original val {}, new val {} in file {}' .format(macro[0], line.split('=', 1)[1], macro[1], target_filename)) new_fp.write("{}={}\n".format(macro[0], macro[1])) wrote_line = True elif line.startswith("#" + macro[0] + "="): if line.split('=', 1)[1] != macro[1]: LOG.debug( 'Updating commented macro {}: original val {}, new val {} in file {}' .format(macro[0], line.split('=', 1)[1], macro[1], target_filename)) if force: LOG.debug('Uncommenting commented macro {}'.format( macro[0])) new_fp.write("{}={}\n".format(macro[0], macro[1])) else: new_fp.write("#{}={}\n".format(macro[0], macro[1])) wrote_line = True if not wrote_line: if comment_unsupported and not line.startswith( '#') and len(line) > 1: new_fp.write("#" + original) else: new_fp.write(original) else: new_fp.write(original) line = old_fp.readline() new_fp.close() old_fp.close()
def sync_module_tag(module_name, install_config, save_path=None): """Function that syncs module version tags with those hosted with git. This function is still buggy, and certain modules do not update correctly Parameters ---------- module_name : str The name of the module to sync install_config : InstallConfiguration instance of install configuration for which to update tags save_path : str None by default. If set, will save the install configuration to the given location after updating. """ module = install_config.get_module_by_name(module_name) if module.url_type == 'GIT_URL' and module.version != 'master' and module.name not in update_tags_blacklist: account_repo = '{}{}'.format(module.url, module.repository) LOG.print_command("git ls-remote --tags {}".format(account_repo)) sync_tags_proc = Popen(['git', 'ls-remote', '--tags', account_repo], stdout=PIPE, stderr=PIPE) out, err = sync_tags_proc.communicate() ret = out.decode('utf-8') tags_temp = ret.splitlines() tags = [] for tag in tags_temp: tags.append(tag.rsplit('/')[-1]) if len(tags) > 0: best_tag = tags[0] best_tag_ver_str_list = re.split(r'\D+', tags[0]) best_tag_ver_str_list = [ num for num in best_tag_ver_str_list if num.isnumeric() ] best_tag_version_numbers = list(map(int, best_tag_ver_str_list)) for tag in tags: tag_ver_str_list = re.split(r'\D+', tag) tag_ver_str_list = [ num for num in tag_ver_str_list if num.isnumeric() ] tag_version_numbers = list(map(int, tag_ver_str_list)) for i in range(len(tag_version_numbers)): if best_tag.startswith('R') and not tag.startswith('R'): break elif not best_tag.startswith('R') and tag.startswith('R'): best_tag = tag best_tag_version_numbers = tag_version_numbers break elif i == len( best_tag_version_numbers ) or tag_version_numbers[i] > best_tag_version_numbers[i]: best_tag = tag best_tag_version_numbers = tag_version_numbers break elif tag_version_numbers[i] < best_tag_version_numbers[i]: break tag_updated = False module_ver_str_list = re.split(r'\D+', module.version) module_ver_str_list = [ num for num in module_ver_str_list if num.isnumeric() ] module_version_numbers = list(map(int, module_ver_str_list)) for i in range(len(best_tag_version_numbers)): if i == len( module_version_numbers ) or best_tag_version_numbers[i] > module_version_numbers[i]: tag_updated = True LOG.write( 'Updating {} from version {} to version {}'.format( module.name, module.version, best_tag)) module.version = best_tag break elif best_tag_version_numbers[i] < module_version_numbers[i]: break if not tag_updated: LOG.debug('Module {} already at latest version: {}'.format( module.name, module.version)) if save_path is not None: writer = IO.config_writer.ConfigWriter(install_config) ret, message = writer.write_install_config(save_path, overwrite_existing=True) LOG.write('Updated install config saved to {}'.format(save_path)) return ret else: return True
def parse_install_config(self, config_filename="INSTALL_CONFIG", force_location=None, allow_illegal=False): """Top level install config parser function Parses the self.path_to_configure/config_filename file Parameters ---------- config_filename : str name of main config file, defaults to INSTALL_CONFIG force_location : str default to None. if set, will force the install location to its value instead of the one read from file allow_illegal : bool defaults to false. If True, will load install config even if install location is invalid Returns ------- InstallConfiguration valid install_config object if parse was successful, or None str None if there is no error, or a message describing the error """ # Check if exists if os.path.exists(self.configure_path + "/" + config_filename): # open the configure file install_file = open(self.configure_path + "/" + config_filename, "r") # variables if install_file == None: return None, "Couldn't open install file" install_config = None current_url = "dummy_url.com" current_url_type = "GIT_URL" install_loc = "" message = None line = install_file.readline() while line: line = line.strip() if not line.startswith('#') and len(line) > 1: # Check for install location if line.startswith("INSTALL="): if force_location is None: install_loc = line.split('=')[-1] if install_loc.endswith('/'): install_loc = install_loc[:-1] else: install_loc = force_location if install_loc.startswith('/') and platform == 'win32': LOG.debug( 'Using linux path on windows, prepending C: to path.' ) install_loc = 'C:' + install_loc # create install config object install_config = IC.InstallConfiguration( install_loc, self.configure_path) # Error checking if install_config.is_install_valid() < 0: if not allow_illegal: return None, 'Permission Error' else: message = 'Permission Error' elif install_config.is_install_valid() == 0: try: os.mkdir(install_config.install_location) except PermissionError: if not allow_illegal: return None, 'Permission denied to create install location' else: message = 'Permission denied to create install location' except FileNotFoundError: if not allow_illegal: return None, 'Install filepath not valid' else: message = 'Install filepath not valid' # URL definition lines elif line.startswith("GIT_URL") or line.startswith( "WGET_URL"): current_url = line.split('=')[1] if not current_url.endswith('/'): current_url = current_url + '/' current_url_type = line.split('=')[0] else: # Parse individual module line install_module = self.parse_line_to_module( line, current_url, current_url_type) if install_module is not None and install_config is not None: install_config.add_module(install_module) line = install_file.readline() install_file.close() # Read injectors and build flags if install_config is None: return None, 'Could not find INSTALL defined in given path' self.read_injector_files(install_config) self.read_build_flags(install_config) self.parse_custom_build_scripts(install_config) return install_config, message else: # Configure file not found return None, 'Configure Path not found'