def get_prebuild_package(build_space_abs, devel_space_abs, force): """This generates a minimal Catkin package used to generate Catkin environment setup files in a merged devel space. :param build_space_abs: The path to a merged build space :param devel_space_abs: The path to a merged devel space :param force: Overwrite files if they exist :returns: source directory path """ # Get the path to the prebuild package prebuild_path = os.path.join(build_space_abs, 'catkin_tools_prebuild') if not os.path.exists(prebuild_path): mkdir_p(prebuild_path) # Create CMakeLists.txt file cmakelists_txt_path = os.path.join(prebuild_path, 'CMakeLists.txt') if force or not os.path.exists(cmakelists_txt_path): with open(cmakelists_txt_path, 'wb') as cmakelists_txt: cmakelists_txt.write( SETUP_PREBUILD_CMAKELISTS_TEMPLATE.encode('utf-8')) # Create package.xml file package_xml_path = os.path.join(prebuild_path, 'package.xml') if force or not os.path.exists(package_xml_path): with open(package_xml_path, 'wb') as package_xml: package_xml.write( SETUP_PREBUILD_PACKAGE_XML_TEMPLATE.encode('utf-8')) # Create the build directory for this package mkdir_p(os.path.join(build_space_abs, 'catkin_tools_prebuild')) return prebuild_path
def get_prebuild_package(build_space_abs, devel_space_abs, force): """This generates a minimal Catkin package used to generate Catkin environment setup files in a merged devel space. :param build_space_abs: The path to a merged build space :param devel_space_abs: The path to a merged devel space :param force: Overwrite files if they exist :returns: source directory path """ # Get the path to the prebuild package prebuild_path = os.path.join(build_space_abs, 'catkin_tools_prebuild') if not os.path.exists(prebuild_path): mkdir_p(prebuild_path) # Create CMakeLists.txt file cmakelists_txt_path = os.path.join(prebuild_path, 'CMakeLists.txt') if force or not os.path.exists(cmakelists_txt_path): with open(cmakelists_txt_path, 'wb') as cmakelists_txt: cmakelists_txt.write(SETUP_PREBUILD_CMAKELISTS_TEMPLATE.encode('utf-8')) # Create package.xml file package_xml_path = os.path.join(prebuild_path, 'package.xml') if force or not os.path.exists(package_xml_path): with open(package_xml_path, 'wb') as package_xml: package_xml.write(SETUP_PREBUILD_PACKAGE_XML_TEMPLATE.encode('utf-8')) # Create the build directory for this package mkdir_p(os.path.join(build_space_abs, 'catkin_tools_prebuild')) return prebuild_path
def generate_package_summary(logger, event_queue, package, package_path, rosdoc_conf, output_path): mkdir_p(output_path) with open(os.path.join(output_path, 'index.rst'), 'w') as f: f.write('%s\n' % package.name) f.write('=' * 50 + '\n\n') if str == bytes: # Python 2 description = package.description.encode('utf-8') else: # Python 3 description = package.description f.write('.. raw:: html\n\n') f.write(' <p>' + description + '</p>\n\n') if package.maintainers: f.write('**Maintainers:** %s\n\n' % ', '.join(_get_person_links(package.maintainers))) if package.authors: f.write('**Authors:** %s\n\n' % ', '.join(_get_person_links(package.authors))) f.write('**License:** %s\n\n' % ', '.join(package.licenses)) f.write('**Source:** ? \n\n') if rosdoc_conf: f.write('**API:** ') for conf in rosdoc_conf: rosdoc_link = os.path.join('html', conf.get('output_dir', ''), 'index.html') rosdoc_name = conf.get('name', conf['builder']) f.write("`%s <%s>`_ " % (rosdoc_name, rosdoc_link)) f.write('\n\n') f.write(""" .. toctree:: :titlesonly: """) if os.path.exists(os.path.join(output_path, 'msg/index.rst')): f.write(" Messages <msg/index>\n") if os.path.exists(os.path.join(output_path, 'srv/index.rst')): f.write(" Services <srv/index>\n") if os.path.exists(os.path.join(output_path, 'action/index.rst')): f.write(" Actions <action/index>\n") changelog_path = os.path.join(package_path, 'CHANGELOG.rst') changelog_symlink_path = os.path.join(output_path, 'CHANGELOG.rst') if os.path.exists( changelog_path) and not os.path.exists(changelog_symlink_path): os.symlink(changelog_path, changelog_symlink_path) if os.path.exists(changelog_symlink_path): f.write(" Changelog <CHANGELOG>\n") return 0
def generate_services(logger, event_queue, package, package_path, output_path): try: srv_module = __import__(package.name + '.srv').srv srv_names = [ srv_name for srv_name in dir(srv_module) if re.match('^[A-Z]', srv_name) ] except: srv_names = [] if srv_names: mkdir_p(os.path.join(output_path, 'srv')) with open(os.path.join(output_path, 'srv/index.rst'), 'w') as f: f.write('%s » Services\n' % package.name) f.write('=' * 50 + '\n') f.write(""" .. toctree:: :titlesonly: :glob: * """) for srv_name in srv_names: srv_type = getattr(srv_module, srv_name) if hasattr(srv_type, '_request_class'): with open(os.path.join(output_path, 'srv', '%s.rst' % srv_name), 'w') as f: f.write('%s\n' % srv_name) f.write('=' * 50 + '\n\n') f.write('Request Definition::\n\n') _write_raw(f, srv_type._request_class) f.write('Response Definition::\n\n') _write_raw(f, srv_type._response_class) return 0
def generate_messages(logger, event_queue, package, package_path, output_path): try: msg_module = __import__(package.name + '.msg').msg msg_names = [ msg_name for msg_name in dir(msg_module) if re.match('^[A-Z]', msg_name) ] except: msg_names = [] if msg_names: mkdir_p(os.path.join(output_path, 'msg')) with open(os.path.join(output_path, 'msg/index.rst'), 'w') as f: f.write('%s » Messages\n' % package.name) f.write('=' * 50 + '\n') f.write(""" .. toctree:: :titlesonly: :glob: * """) for msg_name in msg_names: msg_type = getattr(msg_module, msg_name) with open(os.path.join(output_path, 'msg', '%s.rst' % msg_name), 'w') as f: f.write('%s\n' % msg_name) f.write('=' * 50 + '\n\n') f.write('Definition::\n\n') _write_raw(f, msg_type) return 0
def ctr_nuke(logger, event_queue, prefix): """Adds a shell script which clears the catkin and ros test results dir.""" ctr_nuke_path = os.path.join(prefix, 'etc', 'catkin', 'profile.d') ctr_nuke_filename = os.path.join(ctr_nuke_path, '06-ctr-nuke.sh') mkdir_p(ctr_nuke_path) if not os.path.exists(ctr_nuke_filename): with open(ctr_nuke_filename, 'w') as ctr_nuke_file: ctr_nuke_file.write(CTR_NUKE_SH) return 0
def ctr_nuke(logger, event_queue, prefix): """Adds an env-hook which clears the catkin and ros test results dir.""" ctr_nuke_path = os.path.join(prefix, 'etc', 'catkin', 'profile.d') ctr_nuke_filename = os.path.join(ctr_nuke_path, '06-ctr-nuke.sh') mkdir_p(ctr_nuke_path) if not os.path.exists(ctr_nuke_filename): with open(ctr_nuke_filename, 'w') as ctr_nuke_file: ctr_nuke_file.write(CTR_NUKE_SH) return 0
def ctr_nuke(logger, event_queue, prefix): """Adds an env-hook which clears the catkin and ros test results dir.""" ctr_nuke_path = os.path.join(prefix, "etc", "catkin", "profile.d") ctr_nuke_filename = os.path.join(ctr_nuke_path, "06-ctr-nuke.sh") mkdir_p(ctr_nuke_path) if not os.path.exists(ctr_nuke_filename): with open(ctr_nuke_filename, "w") as ctr_nuke_file: ctr_nuke_file.write(CTR_NUKE_SH) return 0
def generate_doxygen_config(logger, event_queue, conf, package, recursive_build_deps, output_path, source_path, docs_build_path): header_filename = '' footer_filename = '' output_subdir = os.path.join('html', conf.get('output_dir', ''), '') output_dir = os.path.join(output_path, output_subdir) mkdir_p(output_dir) tagfiles = [] # Add tags for the standard library. cppreference_tagfile = pkg_resources.resource_filename( 'catkin_tools_document', 'external/cppreference-doxygen-web.tag.xml') tagfiles.append('%s=%s' % (cppreference_tagfile, 'http://en.cppreference.com/w/')) # Link up doxygen for all in-workspace build dependencies. for build_depend_name in recursive_build_deps: depend_docs_tagfile = os.path.join(docs_build_path, '..', build_depend_name, 'tags') if os.path.exists(depend_docs_tagfile): with open( os.path.join(docs_build_path, '..', build_depend_name, 'subdir')) as f: subdir = f.read() depend_docs_relative_path = '../' * len(output_subdir.split(os.sep)) + \ '%s/%s' % (build_depend_name, subdir) tagfiles.append('%s=%s' % (depend_docs_tagfile, depend_docs_relative_path)) doxyfile_conf = copy.copy(_base_config) doxyfile_conf.update({ 'ALIASES': conf.get('aliases', ''), 'EXAMPLE_PATTERNS': conf.get('example_patterns', ''), 'EXCLUDE_PATTERNS': conf.get('exclude_patterns', ''), 'EXCLUDE_SYMBOLS': conf.get('exclude_symbols', ''), 'GENERATE_HTML': True, 'GENERATE_XML': True, 'HTML_FOOTER': footer_filename, 'HTML_HEADER': header_filename, 'HTML_OUTPUT': output_dir, 'IMAGE_PATH': conf.get('image_path', source_path), 'INPUT': source_path, 'PROJECT_NAME': package.name, 'OUTPUT_DIRECTORY': output_path, 'TAB_SIZE': conf.get('tab_size', '8'), 'TAGFILES': ' '.join(tagfiles), 'USE_MATHJAX': True }) with open(os.path.join(docs_build_path, 'Doxyfile'), 'w') as f: _write_config(f, doxyfile_conf) return 0
def generate_doxygen_config(logger, event_queue, conf, package, recursive_build_deps, output_path, source_path, docs_build_path): header_filename = '' footer_filename = '' output_subdir = os.path.join('html', conf.get('output_dir', ''), '') output_dir = os.path.join(output_path, output_subdir) mkdir_p(output_dir) tagfiles = [] # Add tags for the standard library. cppreference_tagfile = pkg_resources.resource_filename('catkin_tools_document', 'external/cppreference-doxygen-web.tag.xml') tagfiles.append('%s=%s' % (cppreference_tagfile, 'https://en.cppreference.com/w/')) # Link up doxygen for all in-workspace build dependencies. for build_depend_name in recursive_build_deps: depend_docs_tagfile = os.path.join(docs_build_path, '..', build_depend_name, 'tags') if os.path.isfile(depend_docs_tagfile): with open(os.path.join(docs_build_path, '..', build_depend_name, output_dir_file('doxygen'))) as f: depend_output_dir = f.read() depend_docs_relative_path = os.path.relpath(depend_output_dir, output_dir) tagfiles.append('%s=%s' % (depend_docs_tagfile, depend_docs_relative_path)) mdfile = conf.get('use_mdfile_as_mainpage', '') if mdfile: mdfile = os.path.join(source_path, mdfile) doxyfile_conf = copy.copy(_base_config) doxyfile_conf.update({ 'ALIASES': conf.get('aliases', ''), 'EXAMPLE_PATTERNS': conf.get('example_patterns', ''), 'EXCLUDE_PATTERNS': conf.get('exclude_patterns', ''), 'EXCLUDE_SYMBOLS': conf.get('exclude_symbols', ''), 'FILE_PATTERNS': conf.get('file_patterns', doxyfile_conf['FILE_PATTERNS']), # Use predefined values as default if not defined 'GENERATE_HTML': True, 'GENERATE_XML': True, 'SEARCHENGINE': True, 'HTML_FOOTER': footer_filename, 'HTML_HEADER': header_filename, 'HTML_OUTPUT': output_dir, 'IMAGE_PATH': conf.get('image_path', source_path), 'INPUT': " ".join([source_path, mdfile]), 'PROJECT_NAME': package.name, 'OUTPUT_DIRECTORY': output_path, 'TAB_SIZE': conf.get('tab_size', '8'), 'TAGFILES': ' '.join(tagfiles), 'USE_MATHJAX': True, 'USE_MDFILE_AS_MAINPAGE': mdfile }) with open(os.path.join(docs_build_path, 'Doxyfile'), 'w') as f: _write_config(f, doxyfile_conf) return 0
def __init__(self, label, job_id, stage_label, event_queue, log_path): self.label = label self.job_id = job_id self.stage_label = stage_label self.event_queue = event_queue self.log_path = log_path self.is_open = False self.stdout_buffer = b"" self.stderr_buffer = b"" self.interleaved_buffer = b"" # Construct the logfile path for this job and stage logfile_dir_path = os.path.join(log_path, self.job_id) self.logfile_basename = os.path.join( logfile_dir_path, '.'.join([self.label, self.stage_label])) self.logfile_name = '{}.log'.format(self.logfile_basename) # Create the logfile dir if it doesn't exist if not os.path.exists(logfile_dir_path): mkdir_p(logfile_dir_path) # Get the existing number of logfiles # TODO: Make this number global across all build stages existing_logfile_indices = sorted([ int(lf.split('.')[-2]) for lf in glob('{}.*.log'.format(self.logfile_basename)) ]) if len(existing_logfile_indices) == 0: self.logfile_index = 0 else: self.logfile_index = 1 + existing_logfile_indices[-1] # Generate the logfile name self.unique_logfile_name = '{}.{:0>{}}.log'.format( self.logfile_basename, self.logfile_index, 3) # Remove colliding file if necessary if os.path.exists(self.logfile_name): os.unlink(self.logfile_name) # Open logfile self.log_file = open(self.logfile_name, 'wb') self.is_open = True
def __init__(self, label, job_id, stage_label, event_queue, log_path): self.label = label self.job_id = job_id self.stage_label = stage_label self.event_queue = event_queue self.log_path = log_path self.is_open = False self.stdout_buffer = b"" self.stderr_buffer = b"" self.interleaved_buffer = b"" # Construct the logfile path for this job and stage logfile_dir_path = os.path.join(log_path, self.job_id) self.logfile_basename = os.path.join(logfile_dir_path, ".".join([self.label, self.stage_label])) self.logfile_name = "{}.log".format(self.logfile_basename) # Create the logfile dir if it doesn't exist if not os.path.exists(logfile_dir_path): mkdir_p(logfile_dir_path) # Get the existing number of logfiles # TODO: Make this number global across all build stages existing_logfile_indices = sorted( [int(lf.split(".")[-2]) for lf in glob("{}.*.log".format(self.logfile_basename))] ) if len(existing_logfile_indices) == 0: self.logfile_index = 0 else: self.logfile_index = 1 + existing_logfile_indices[-1] # Generate the logfile name self.unique_logfile_name = "{}.{:0>{}}.log".format(self.logfile_basename, self.logfile_index, 3) # Remove colliding file if necessary if os.path.exists(self.logfile_name): os.unlink(self.logfile_name) # Open logfile self.log_file = open(self.logfile_name, "wb") self.is_open = True
def yaml_dump_file(logger, event_queue, contents: Any, dest_path: str, dumper=yaml.SafeDumper) -> int: """ FunctionStage functor that dumps the contents of an object, which is accepted by yaml dumper, to a file. In case the file exists, the file is overwritten. :param logger: :param event_queue: :param contents: Object which is dumped to the yaml file. :param dest_path: File to which the contents should be written :param dumper: Yaml dumper to use (default: yaml.SafeDumper) :return: return code """ mkdir_p(os.path.dirname(dest_path)) with open(dest_path, 'w') as f: yaml.dump(contents, f, dumper) return 0
def write_file(logger, event_queue, contents: Any, dest_path: str, mode: str = 'w') -> int: """ FunctionStage functor that writes the contents to a file. In case the file exists, the file is overwritten. :param logger: :param event_queue: :param contents: Contents to write :param dest_path: File to which the contents should be written :param mode: file mode (default: 'w') :return: return code """ mkdir_p(os.path.dirname(dest_path)) with open(dest_path, mode) as f: f.write(contents) return 0
def copy_install_manifest( logger, event_queue, src_install_manifest_path, dst_install_manifest_path): """Copy the install manifest file from one path to another,""" # Get file paths src_install_manifest_file_path = os.path.join(src_install_manifest_path, CMAKE_INSTALL_MANIFEST_FILENAME) dst_install_manifest_file_path = os.path.join(dst_install_manifest_path, CMAKE_INSTALL_MANIFEST_FILENAME) # Create the directory for the manifest if it doesn't exist mkdir_p(dst_install_manifest_path) if os.path.exists(src_install_manifest_file_path): # Copy the install manifest shutil.copyfile(src_install_manifest_file_path, dst_install_manifest_file_path) else: # Didn't actually install anything, so create an empty manifest for completeness logger.err("Warning: No targets installed.") with open(dst_install_manifest_file_path, 'a'): os.utime(dst_install_manifest_file_path, None) return 0
def link_devel_products( logger, event_queue, package, package_path, devel_manifest_path, source_devel_path, dest_devel_path, metadata_path, prebuild): """Link files from an isolated devel space into a merged one. This creates directories and symlinks in a merged devel space to a package's linked devel space. """ # Create the devel manifest path if necessary mkdir_p(devel_manifest_path) # Construct manifest file path devel_manifest_file_path = os.path.join(devel_manifest_path, DEVEL_MANIFEST_FILENAME) # Pair of source/dest files or directories products = list() # List of files to clean files_to_clean = [] # List of files that collide files_that_collide = [] # Select the blacklist blacklist = DEVEL_LINK_PREBUILD_BLACKLIST if prebuild else DEVEL_LINK_BLACKLIST # Gather all of the files in the devel space for source_path, dirs, files in os.walk(source_devel_path): # compute destination path dest_path = os.path.join(dest_devel_path, os.path.relpath(source_path, source_devel_path)) # create directories in the destination develspace for dirname in dirs: source_dir = os.path.join(source_path, dirname) dest_dir = os.path.join(dest_path, dirname) if os.path.islink(source_dir): # Store the source/dest pair products.append((source_dir, dest_dir)) if os.path.exists(dest_dir): if os.path.realpath(dest_dir) != os.path.realpath(source_dir): files_that_collide.append(dest_dir) else: logger.out('Linked: ({}, {})'.format(source_dir, dest_dir)) else: # Create a symlink logger.out('Symlinking %s' % (dest_file)) try: os.symlink(source_dir, dest_dir) except OSError: logger.err('Could not create symlink `{}` referencing `{}`'.format(dest_dir, source_dir)) raise else: if not os.path.exists(dest_dir): # Create the dest directory if it doesn't exist os.mkdir(dest_dir) elif not os.path.isdir(dest_dir): logger.err('Error: Cannot create directory: {}'.format(dest_dir)) return -1 # create symbolic links from the source to the dest for filename in files: # Don't link files on the blacklist unless this is a prebuild package if os.path.relpath(os.path.join(source_path, filename), source_devel_path) in blacklist: continue source_file = os.path.join(source_path, filename) dest_file = os.path.join(dest_path, filename) # Store the source/dest pair products.append((source_file, dest_file)) # Check if the symlink exists if os.path.exists(dest_file): if os.path.realpath(dest_file) != os.path.realpath(source_file): # Compute hashes for colliding files source_hash = md5(open(os.path.realpath(source_file)).read().encode('utf-8')).hexdigest() dest_hash = md5(open(os.path.realpath(dest_file)).read().encode('utf-8')).hexdigest() # If the link links to a different file, report a warning and increment # the collision counter for this path if dest_hash != source_hash: logger.err('Warning: Cannot symlink from %s to existing file %s' % (source_file, dest_file)) logger.err('Warning: Source hash: {}'.format(source_hash)) logger.err('Warning: Dest hash: {}'.format(dest_hash)) # Increment link collision counter files_that_collide.append(dest_file) else: logger.out('Linked: ({}, {})'.format(source_file, dest_file)) else: # Create the symlink logger.out('Symlinking %s' % (dest_file)) try: os.symlink(source_file, dest_file) except OSError: logger.err('Could not create symlink `{}` referencing `{}`'.format(dest_file, source_file)) raise # Load the old list of symlinked files for this package if os.path.exists(devel_manifest_file_path): with open(devel_manifest_file_path, 'r') as devel_manifest: manifest_reader = csv.reader(devel_manifest, delimiter=' ', quotechar='"') # Skip the package source directory devel_manifest.readline() # Read the previously-generated products for source_file, dest_file in manifest_reader: # print('Checking (%s, %s)' % (source_file, dest_file)) if (source_file, dest_file) not in products: # Clean the file or decrement the collision count logger.out('Cleaning: (%s, %s)' % (source_file, dest_file)) files_to_clean.append(dest_file) # Remove all listed symlinks and empty directories which have been removed # after this build, and update the collision file try: clean_linked_files(logger, event_queue, metadata_path, files_that_collide, files_to_clean, dry_run=False) except: logger.err('Could not clean linked files.') raise # Save the list of symlinked files with open(devel_manifest_file_path, 'w') as devel_manifest: # Write the path to the package source directory devel_manifest.write('%s\n' % package_path) # Write all the products manifest_writer = csv.writer(devel_manifest, delimiter=' ', quotechar='"') for source_file, dest_file in products: manifest_writer.writerow([source_file, dest_file]) return 0
def link_devel_products(logger, event_queue, package, package_path, devel_manifest_path, source_devel_path, dest_devel_path, metadata_path, prebuild): """Link files from an isolated devel space into a merged one. This creates directories and symlinks in a merged devel space to a package's linked devel space. """ # Create the devel manifest path if necessary mkdir_p(devel_manifest_path) # Construct manifest file path devel_manifest_file_path = os.path.join(devel_manifest_path, DEVEL_MANIFEST_FILENAME) # Pair of source/dest files or directories products = list() # List of files to clean files_to_clean = [] # List of files that collide files_that_collide = [] # Select the blacklist blacklist = DEVEL_LINK_PREBUILD_BLACKLIST if prebuild else DEVEL_LINK_BLACKLIST # Gather all of the files in the devel space for source_path, dirs, files in os.walk(source_devel_path): # compute destination path dest_path = os.path.join( dest_devel_path, os.path.relpath(source_path, source_devel_path)) # create directories in the destination develspace for dirname in dirs: source_dir = os.path.join(source_path, dirname) dest_dir = os.path.join(dest_path, dirname) if os.path.islink(source_dir): # Store the source/dest pair products.append((source_dir, dest_dir)) if os.path.exists(dest_dir): if os.path.realpath(dest_dir) != os.path.realpath( source_dir): files_that_collide.append(dest_dir) else: logger.out('Linked: ({}, {})'.format( source_dir, dest_dir)) else: # Create a symlink logger.out('Symlinking %s' % (dest_file)) try: os.symlink(source_dir, dest_dir) except OSError: logger.err( 'Could not create symlink `{}` referencing `{}`'. format(dest_dir, source_dir)) raise else: if not os.path.exists(dest_dir): # Create the dest directory if it doesn't exist os.mkdir(dest_dir) elif not os.path.isdir(dest_dir): logger.err( 'Error: Cannot create directory: {}'.format(dest_dir)) return -1 # create symbolic links from the source to the dest for filename in files: # Don't link files on the blacklist unless this is a prebuild package if os.path.relpath(os.path.join(source_path, filename), source_devel_path) in blacklist: continue source_file = os.path.join(source_path, filename) dest_file = os.path.join(dest_path, filename) # Store the source/dest pair products.append((source_file, dest_file)) # Check if the symlink exists if os.path.exists(dest_file): if os.path.realpath(dest_file) != os.path.realpath( source_file): # Compute hashes for colliding files source_hash = md5( open(os.path.realpath(source_file)).read().encode( 'utf-8')).hexdigest() dest_hash = md5( open(os.path.realpath(dest_file)).read().encode( 'utf-8')).hexdigest() # If the link links to a different file, report a warning and increment # the collision counter for this path if dest_hash != source_hash: logger.err( 'Warning: Cannot symlink from %s to existing file %s' % (source_file, dest_file)) logger.err( 'Warning: Source hash: {}'.format(source_hash)) logger.err('Warning: Dest hash: {}'.format(dest_hash)) # Increment link collision counter files_that_collide.append(dest_file) else: logger.out('Linked: ({}, {})'.format( source_file, dest_file)) else: # Create the symlink logger.out('Symlinking %s' % (dest_file)) try: os.symlink(source_file, dest_file) except OSError: logger.err( 'Could not create symlink `{}` referencing `{}`'. format(dest_file, source_file)) raise # Load the old list of symlinked files for this package if os.path.exists(devel_manifest_file_path): with open(devel_manifest_file_path, 'r') as devel_manifest: manifest_reader = csv.reader(devel_manifest, delimiter=' ', quotechar='"') # Skip the package source directory devel_manifest.readline() # Read the previously-generated products for source_file, dest_file in manifest_reader: # print('Checking (%s, %s)' % (source_file, dest_file)) if (source_file, dest_file) not in products: # Clean the file or decrement the collision count logger.out('Cleaning: (%s, %s)' % (source_file, dest_file)) files_to_clean.append(dest_file) # Remove all listed symlinks and empty directories which have been removed # after this build, and update the collision file try: clean_linked_files(logger, event_queue, metadata_path, files_that_collide, files_to_clean, dry_run=False) except: logger.err('Could not clean linked files.') raise # Save the list of symlinked files with open(devel_manifest_file_path, 'w') as devel_manifest: # Write the path to the package source directory devel_manifest.write('%s\n' % package_path) # Write all the products manifest_writer = csv.writer(devel_manifest, delimiter=' ', quotechar='"') for source_file, dest_file in products: manifest_writer.writerow([source_file, dest_file]) return 0
def makedirs(logger, event_queue, path): """FunctionStage functor that makes a path of directories.""" mkdir_p(path) return 0