def run(self, timeout=None): return_code = None command = self.job.command if timeout is not None: command = ['timeout', str(timeout)] + self.job.command if not os.path.exists(self.output_dir): os.makedirs(self.output_dir) with working_directory(self.test_dir): with codecs.open(settings.OUTPUT_FILENAME, 'w', 'utf-8') as f: try: return_code = subprocess.call( command, stdout=f, stderr=subprocess.STDOUT) if return_code == 124: # return code from timeout command when expired f.write('\n***** TIMEOUT ERROR *****\n') except OSError: f.write("Error trying to run command.") for filename in self.job.output_files: if os.path.exists(filename): dest_file = os.path.join(self.output_dir, filename) shutil.copyfile(filename, dest_file) return return_code
def target(self, run): with working_directory(os.path.dirname(os.path.realpath(__file__))): command = run.split() with capture() as output: command[-1] = os.path.join("data/test", command[-1]) command[-2] = os.path.join("data/test", command[-2]) command.insert(0, '--debug') inspection.main(command) result = eval(output[0]) return result
def target(self, run): with working_directory(os.path.dirname(os.path.realpath(__file__))): command = run.split() with capture() as output: command[-1] = os.path.join("data/test",command[-1]) command[-2] = os.path.join("data/test",command[-2]) command.insert(0,'--debug') inspection.main(command) result = eval(output[0]) return result
def create_workspace(): exec_cmd('mkdir -p /root/melo_ws/src') with working_directory('/root/melo_ws/src'): exec_cmd( 'git clone https://github.com/ros-planning/moveit_tutorials.git -b melodic-devel' ) exec_cmd( 'git clone https://github.com/ros-planning/panda_moveit_config.git -b melodic-devel' ) exec_cmd( 'rosdep install -y --from-paths . --ignore-src --rosdistro melodic' ) with working_directory('/root/melo_ws'): exec_cmd( 'catkin config --extend /opt/ros/melodic --cmake-args -DCMAKE_BUILD_TYPE=Release' ) exec_cmd('catkin build')
def git_log_gen(repo, number=1, extra=''): ''' generator of commits ''' with utils.working_directory(repo): for line in get_stdout('git log -%d %s' % (number, extra)): if line.startswith('commit'): parts = line.split(' ') assert(len(parts) == 2) commit_name = parts[1] yield commit_name
def init_pipenv(path): ''' Initializes the Pipenv at the given path Parameters: path (str | pathlib.Path): The path to initialize the pipenv at Returns: pipenv (Pipenv): The corresponding Pipenv object ''' with utils.working_directory(path): return Pipenv()
def download_asset_by_name(asset_name, writedir): try: url = [x for x in assets if x.name == asset_name][0].browser_download_url except: utils.error( f"Cannot find {asset_name} in the GitHub release with tag {release.tag_name}" ) with utils.working_directory(writedir): utils.notice(f"Downloading {url} to {writedir} . . .") utils.download_with_sha256(url, asset_name)
def get_changeset_from_timestamp(repo, timestamp_searched): with utils.working_directory(repo): lines = [] for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): lines.append(line) for line in reversed(lines): chunks = line.split(";") assert(len(chunks) == 2) changeset = chunks[0] timestamp = int(time.mktime(parsedate(chunks[1]))) if timestamp_searched == timestamp: return changeset raise Exception('Error in get git hash from timestamp {}'.format(timestamp_searched))
def main(args): mount_flags = get_mount_dirs_str(args.mount) if args.stop_rm: stop_rm_container(args.container_id) with working_directory(path=Path(__file__).parent): exec_cmd('xhost +local:docker') exec_cmd(DOCKER_RUN_CMD_FMT, pre='sudo' if args.elevate else '', container_id=args.container_id, additional_flags=mount_flags, image_name=get_image_name(args.image_name))
def get_timestamp_from_changeset(repo, changeset_searched): ''' generator of commits ''' with utils.working_directory(repo): lines = [] for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): lines.append(line) for line in reversed(lines): chunks = line.split(";") assert(len(chunks) == 2) changeset = chunks[0] timestamp = int(time.mktime(parsedate(chunks[1]))) if changeset_searched == changeset: return timestamp raise Exception('Error in get timestamp from changeset {}'.format(changeset_searched))
def get_changeset_git_from_position(repo, position = 0): with utils.working_directory(repo): i = 1 lines = [] for line in get_stdout('git log'): lines.append(line) for line in reversed(lines): if line.startswith('commit'): parts = line.split(' ') assert(len(parts) == 2) commit_name = parts[1] if i == position: return commit_name else: i += 1 raise Exception('Error in get git hash from position {}'.format(position))
def main(image_name): with working_directory(path=Path(__file__).parent): build(image_name) save_meta_info(image_name)
def packing(node, parameters, compiler_replace_maps): package = node.get_package_name() version_git = node.get_version() packing = node.is_packing() if not packing: logging.warning('Skiping package: %s' % package) return 0 manager = node.get_version_manager() if manager == "git": build_modes = node.get_build_modes() for plat, build_mode in product(platforms, build_modes): build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) revision_git = hash_version.get_last_changeset(build_directory, short=False) version_old = node.get_version() version_git = hash_version.to_cmaki_version(build_directory, revision_git) logging.info('[git] Renamed version from %s to %s' % (version_old, version_git)) current_workspace = node.get_binary_workspace(plat) current_base = node.get_base_folder() oldversion = node.get_version() try: node.set_version(version_git) updated_workspace = node.get_binary_workspace(plat) updated_base = node.get_base_folder() current_base2 = os.path.join(current_workspace, current_base) updated_base2 = os.path.join(current_workspace, updated_base) logging.debug("from: %s" % current_base2) logging.debug("to: %s" % updated_base2) if current_base != updated_base: utils.move_folder_recursive(current_base2, updated_base2) logging.debug('-- copy from: {}, {}'.format(current_workspace, os.path.exists(current_workspace))) logging.debug('-- copy to: {}, {}'.format(updated_workspace, os.path.exists(updated_workspace))) utils.move_folder_recursive(current_workspace, updated_workspace) finally: node.set_version(oldversion) node.set_version(version_git) version = node.get_version() # regenerate autoscripts with new version node.generate_scripts_headers(compiler_replace_maps) # # generate versions.cmake node.generate_3rdpartyversion(parameters.prefix) precmd = '' if utils.is_windows(): precmd = 'cmake -E ' folder_3rdparty = parameters.third_party_dir output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) utils.trymkdir(output_3rdparty) folder_mark = os.path.join(parameters.prefix, node.get_base_folder()) utils.trymkdir(folder_mark) utils.superverbose(parameters, '*** [%s] Generation cmakefiles *** %s' % (package, output_3rdparty)) errors = node.generate_cmakefiles(platforms, output_3rdparty, compiler_replace_maps) logging.debug('errors generating cmakefiles: %d' % errors) node.ret += abs(errors) for plat in platforms: utils.superverbose(parameters, '*** [%s (%s)] Generating package .tar.gz (%s) ***' % (package, version, plat)) workspace = node.get_workspace(plat) current_workspace = node.get_binary_workspace(plat) utils.trymkdir(current_workspace) with utils.working_directory(current_workspace): logging.info('working directory: {}'.format(current_workspace)) if utils.is_windows(): utils.safe_system('del /s *.ilk') utils.safe_system('del /s *.exp') current_base = node.get_base_folder() prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) logging.info('generating package %s from source %s' % (prefix_package, os.path.join(os.getcwd(), current_base))) logging.info('generating md5file %s' % prefix_package_md5) print_folder(current_base) # packing install gen_targz = "%star zcvf %s %s" % (precmd, prefix_package, current_base) node.ret += abs( node.safe_system(gen_targz, compiler_replace_maps) ) if not os.path.exists(prefix_package): logging.error('No such file: {}'.format(prefix_package)) return False # calculate md5 file package_md5 = utils.md5sum(prefix_package) logging.debug("new package {}, with md5sum {}".format(prefix_package, package_md5)) with open(prefix_package_md5, 'wt') as f: f.write('%s\n' % package_md5) # packing cmakefiles (more easy distribution) if not parameters.no_packing_cmakefiles: for plat in platforms: current_base = node.get_base_folder() prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (current_base, plat)) with utils.working_directory(folder_3rdparty): logging.info('working directory: {}'.format(folder_3rdparty)) logging.debug('working dir: %s' % folder_3rdparty) logging.info('generating package cmake %s' % prefix_package_cmake) print_folder(current_base) gen_targz_cmake = '{}tar zcvf {} {}'.format(precmd, prefix_package_cmake, current_base) node.ret += abs( node.safe_system(gen_targz_cmake, compiler_replace_maps) ) if not os.path.exists(prefix_package_cmake): logging.error('No such file: {}'.format(prefix_package_cmake)) return False # finish well return True
utils.notice(f"Detected GAP version {GAPVERSION}") utils.initialize_github() # Error if this release has been already created on GitHub if utils.check_whether_github_release_exists("v"+GAPVERSION): utils.error(f"Release v{GAPVERSION} already exists!") # Create release CURRENT_BRANCH = utils.get_makefile_var("PKG_BRANCH") RELEASE_NOTE = f"For an overview of changes in GAP {GAPVERSION} see the " \ + "[CHANGES.md](https://github.com/gap-system/gap/blob/master/CHANGES.md) file." utils.notice(f"Creating release v{GAPVERSION}") RELEASE = utils.CURRENT_REPO.create_git_release("v"+GAPVERSION, "v"+GAPVERSION, RELEASE_NOTE, target_commitish=CURRENT_BRANCH, prerelease=True) tmpdir = os.getcwd() + "/tmp" with utils.working_directory(tmpdir): manifest_filename = "MANIFEST" with open(manifest_filename, 'r') as manifest_file: manifest = manifest_file.read().splitlines() # Upload all assets to release try: for filename in manifest: utils.notice("Uploading " + filename) RELEASE.upload_asset(filename) except github.GithubException: utils.error("Error: The upload failed")
utils.check_git_tag_for_release(TAG_NAME) # Error if this release has been already created on GitHub if any(r.tag_name == TAG_NAME for r in utils.CURRENT_REPO.get_releases()): utils.error(f"Github release with tag '{TAG_NAME}' already exists!") # Create release RELEASE_NOTE = f"For an overview of changes in GAP {TAG_NAME[1:]} see the " \ + f"[CHANGES.md](https://github.com/gap-system/gap/blob/{TAG_NAME}/CHANGES.md) file." utils.notice(f"Creating release {TAG_NAME}") RELEASE = utils.CURRENT_REPO.create_git_release(TAG_NAME, TAG_NAME, RELEASE_NOTE, prerelease=True) with utils.working_directory(PATH_TO_RELEASE): manifest_filename = "MANIFEST" with open(manifest_filename, 'r') as manifest_file: manifest = manifest_file.read().splitlines() utils.notice(f"Contents of {manifest_filename}:") for filename in manifest: print(filename) # Now check that TAG_NAME and the created archives belong together main_archive_name = "gap-" + TAG_NAME[1:] + ".tar.gz" if not main_archive_name in manifest: utils.error( f"Expected to find {main_archive_name} in MANIFEST, but did not!") # Upload all assets to release
def packing(node, parameters, compiler_replace_maps): package = node.get_package_name() version_git = node.get_version() packing = node.is_packing() if not packing: logging.warning('Skiping package: %s' % package) return 0 manager = node.get_version_manager() if manager == "git": build_modes = node.get_build_modes() for plat, build_mode in product(platforms, build_modes): workspace = node.get_workspace(plat) build_directory = os.path.join( os.getcwd(), node.get_build_directory(plat, build_mode)) revision_git = hash_version.get_last_changeset(build_directory, short=False) version_old = node.get_version() version_git = hash_version.to_cmaki_version( build_directory, revision_git) logging.info('[git] Renamed version from %s to %s' % (version_old, version_git)) # renombrar package-version-platform/package-version workspace = node.get_workspace(plat) source_folder = node.get_base_folder() oldversion = node.get_version() try: node.set_version(version_git) new_workspace = node.get_workspace(plat) new_source_folder = node.get_base_folder() # changed version ? if source_folder != new_source_folder: utils.move_folder_recursive( os.path.join(workspace, source_folder), os.path.join(workspace, new_source_folder)) utils.move_folder_recursive(workspace, new_workspace) finally: node.set_version(oldversion) node.set_version(version_git) version = node.get_version() # regenerate autoscripts with new version node.generate_scripts_headers(compiler_replace_maps) precmd = '' if utils.is_windows(): precmd = 'cmake -E ' folder_3rdparty = parameters.third_party_dir output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) utils.trymkdir(output_3rdparty) folder_mark = os.path.join(parameters.prefix, node.get_base_folder()) utils.trymkdir(folder_mark) utils.superverbose( parameters, '*** [%s] Generation cmakefiles *** %s' % (package, output_3rdparty)) errors = node.generate_cmakefiles(platforms, output_3rdparty, compiler_replace_maps) logging.debug('errors generating cmakefiles: %d' % errors) node.ret += abs(errors) for plat in platforms: utils.superverbose( parameters, '*** [%s (%s)] Generating package .tar.gz (%s) ***' % (package, version, plat)) workspace = node.get_workspace(plat) utils.trymkdir(workspace) with utils.working_directory(workspace): if utils.is_windows(): utils.safe_system('del /s *.ilk') utils.safe_system('del /s *.exp') source_folder = node.get_base_folder() prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) prefix_package_cmake = os.path.join(parameters.prefix, '%s-cmake.tar.gz' % workspace) prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) logging.info( 'generating package %s from source %s' % (prefix_package, os.path.join(os.getcwd(), source_folder))) logging.info('generating md5file %s' % prefix_package_md5) # packing install gen_targz = "%star zcvf %s %s" % (precmd, prefix_package, source_folder) node.ret += abs(node.safe_system(gen_targz, compiler_replace_maps)) if not os.path.exists(prefix_package): logging.error('No such file: {}'.format(prefix_package)) return False # calculate md5 file package_md5 = utils.md5sum(prefix_package) logging.debug("new package {}, with md5sum {}".format( prefix_package, package_md5)) with open(prefix_package_md5, 'wt') as f: f.write('%s\n' % package_md5) # marker is a empty file # prefix_package_marker = os.path.join(folder_mark, '%s.cache' % get_identifier('ALL')) # logging.info('generating marker %s' % prefix_package_marker) # open(prefix_package_marker, 'a').close() # packing cmakefiles (more easy distribution) if not parameters.no_packing_cmakefiles: for plat in platforms: base_folder = node.get_base_folder() prefix_package_cmake = os.path.join( parameters.prefix, '%s-%s-cmake.tar.gz' % (base_folder, plat)) with utils.working_directory(folder_3rdparty): logging.debug('working dir: %s' % folder_3rdparty) # packing install logging.info('generating package cmake %s' % prefix_package_cmake) gen_targz_cmake = '{}tar zcvf {} {}'.format( precmd, prefix_package_cmake, node.get_base_folder()) node.ret += abs( node.safe_system(gen_targz_cmake, compiler_replace_maps)) # finish well return True
def prepare(node, parameters, compiler_replace_maps): package = node.get_package_name() # source folder source_dir = os.path.join(os.getcwd(), package) utils.trymkdir(source_dir) # generate .build.sh / .build.cmd if is defined in yaml node.get_generate_custom_script(source_dir) # generate find.script / find.cmd node.generate_scripts_headers(compiler_replace_maps) # read root CMakeLists.txt with open('CMakeLists.txt', 'rt') as f: content_cmakelists = f.read() # OJO: dejar de borrar cuando reciclemos binarios node.remove_packages() # run_tests or packing build_modes = node.get_build_modes() for plat, build_mode in product(platforms, build_modes): logging.info('Preparing mode %s - %s' % (plat, build_mode)) build_directory = os.path.join( os.getcwd(), node.get_build_directory(plat, build_mode)) utils.trymkdir(build_directory) # download source and prepare in build_directory node.prepare_third_party(build_directory, compiler_replace_maps) # copy source files to build logging.debug('Copy sources to build: %s -> %s' % (source_dir, build_directory)) utils.copy_folder_recursive(source_dir, build_directory) # before copy files with utils.working_directory(build_directory): for bc in node.get_before_copy(): chunks = [x.strip() for x in bc.split(' ') if x] if len(chunks) != 2: raise Exception('Invalid value in before_copy: %s' % bc) logging.debug('Copy "%s" to "%s"' % (chunks[0], chunks[1])) shutil.copy2(chunks[0], chunks[1]) # if have cmakelists, insert root cmakelists header cmake_prefix = node.get_cmake_prefix() build_cmakelist = os.path.join(build_directory, cmake_prefix, 'CMakeLists.txt') if os.path.exists(build_cmakelist) and ( not node.has_custom_script(source_dir)): with open(build_cmakelist, 'rt') as f: content_cmakelists_package = f.read() with open(build_cmakelist, 'wt') as f: f.write('%s\n' % content_cmakelists) f.write('%s\n' % content_cmakelists_package) if parameters.fast: logging.debug('skipping for because is in fast mode: "prepare"') break # finish well return True
def run_tests(node, parameters, compiler_replace_maps, unittests): oldcwd = os.getcwd() artifacts_dir = parameters.rootdir artifacts_dir = utils.get_norm_path(artifacts_dir) artifacts_dir = artifacts_dir.replace('\\', '/') cmakelib_dir = parameters.cmakefiles cmakelib_dir = utils.get_norm_path(cmakelib_dir) cmakelib_dir = cmakelib_dir.replace('\\', '/') cmake3p_dir = parameters.prefix cmake3p_dir = utils.get_norm_path(cmake3p_dir) cmake3p_dir = cmake3p_dir.replace('\\', '/') cmake_prefix = parameters.prefix cmake_prefix = utils.get_norm_path(cmake_prefix) cmake_prefix = cmake_prefix.replace('\\', '/') cmake_third_party_dir = parameters.third_party_dir cmake_third_party_dir = utils.get_norm_path(cmake_third_party_dir) cmake_third_party_dir = cmake_third_party_dir.replace('\\', '/') package = node.get_package_name() package_upper = node.get_package_name_norm_upper() version = node.get_version() packing = node.is_packing() if not packing: logging.warning("No need run_tests, because wasn't generated a package") return 0 # prepare unittests # can be a file or content unittest_value = node.get_unittest() if unittest_value is not None: build_modes = node.get_build_modes() for plat, build_mode in product(platforms, build_modes): builddir = node.get_build_directory(plat, build_mode) path_test = os.path.join(builddir, build_unittests_foldername) utils.trymkdir(path_test) # is is a file unittest_path = os.path.join(builddir, unittest_value) if os.path.isfile(unittest_path): with open(unittest_path, 'rt') as f: unittest_value = f.read() with open(os.path.join(path_test, 'main.cpp'), 'wt') as f: f.write(unittest_value) if parameters.fast: logging.debug('skipping for because is in fast mode: "prepare"') break else: logging.warning('[%s] No test present.' % package) folder_3rdparty = parameters.third_party_dir output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) build_modes = node.get_build_modes() for plat, build_mode in product(platforms, reversed(build_modes)): for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): # verify md5sum workspace = node.get_workspace(plat) utils.trymkdir(workspace) with utils.working_directory(workspace): prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) if os.path.exists(prefix_package) and os.path.exists(prefix_package_md5): with open(prefix_package_md5, 'rt') as f: md5sum = f.read().strip() try: logging.debug("expected md5: %s" % md5sum) for line in utils.get_stdout('cmake -E md5sum %s' % prefix_package, env_modified, 'cmake'): if len(line) > 0: # md5sum filename chunks = line.split(' ') chunks = filter(None, chunks) assert(len(chunks) > 0) md5sum_real = chunks[0] logging.debug("real md5: %s" % md5sum_real) if (md5sum != md5sum_real): logging.error('Error en generated md5sum file!!!') logging.error('Expected: %s' % md5sum) logging.error('Found: %s' % md5sum_real) # add error to node node.ret += 1 except utils.NotFoundProgram: logging.info('can\'t verify md5 because not found cmake') else: logging.warning('Skipping verification md5 because don\'t exists package or md5') logging.info('running unittests. Build mode: %s Platform: %s' % (build_mode, plat)) # OJO con borrar cmake3p, se borra la marca # node.remove_cmake3p( cmake3p_dir ) builddir = os.path.join(oldcwd, node.get_build_directory(plat, build_mode)) logging.info('Using builddir %s' % builddir) unittest_folder = os.path.join(builddir, build_unittests_foldername) unittest_found = os.path.join(unittest_folder, 'main.cpp') unittest_found = unittest_found.replace('\\', '/') unittest_root = os.path.join(oldcwd, build_unittests_foldername) if os.path.exists(unittest_found): logging.info('Search cmakelib in %s' % cmakelib_dir) if os.path.isdir(os.path.join(cmakelib_dir)): with utils.working_directory(unittest_folder): generator_extra = '' if generator is not None: generator_extra = '-G"%s"' % generator find_packages = [] find_packages.append(package) for dep in node.get_depends_raw(): package_name = dep.get_package_name() find_packages.append(package_name) find_packages_str = ';'.join(find_packages) # remove CMakeCache.txt for avoid problems when # change of generator utils.tryremove('CMakeCache.txt') utils.tryremove('cmake_install.cmake') utils.tryremove('install_manifest.txt') utils.tryremove_dir('CMakeFiles') ''' TODO: refactor: prefix = DEPENDS_PATH (cmake3p) (artifacts) cmakefiles = CMAKI_PATH, CMAKE_MODULE_PATH (cmakelib) third-party-dir = CMAKE_PREFIX_PATH (directorio artifacts/cmaki_find_package) (3rdparty) rootdir = ARTIFACTS_PATH, es la base de donde esta build.py (cmaki_generator) (scripts de generacion) tambien podria ser CMAKI_PWD CMAKI_INSTALL: se espera tener instalado el cmaki_identifier ''' cmd = 'cmake %s %s -DARTIFACTS_PATH="%s" -DCMAKI_COMPILER="%s" -DCMAKI_PLATFORM="%s" -DCMAKE_MODULE_PATH="%s" -DPACKAGE="%s" -DPACKAGE_UPPER="%s" -DCMAKE_BUILD_TYPE="%s" -DAVOID_USE_HTTP=1 -DINSTALL_SIMPLE=1 -DCMAKE_PREFIX_PATH="%s" -DUNITTEST_PATH="%s" -DDEPENDS_PATH="%s" -DFIND_PACKAGES="%s" -DCMAKI_DEBUG=TRUE && cmake --build . --config %s --target install && ctest . -C %s --output-on-failure -VV' % (unittest_root, generator_extra, artifacts_dir, get_identifier('COMPILER'), get_identifier('ALL'), cmakelib_dir, package, package_upper, build_mode, cmake_third_party_dir, unittest_found, cmake_prefix, find_packages_str, build_mode, build_mode) ret = utils.safe_system(cmd, env=env_modified) node.ret += abs(ret) if ret != 0: unittests[ '%s - %s' % (package, version) ] = 'ERROR: Fail test' else: unittests[ '%s - %s' % (package, version) ] = 'OK: Pass test' else: unittests[ '%s - %s' % (package, version) ] = 'WARN: No cmakelib available' else: unittests[ '%s - %s' % (package, version) ] = 'WARN: No unittest found' # successful return True
def main(argv=None): parser = argparse.ArgumentParser( description='Return a list of related pages between two pdfs.') parser.add_argument( '--include', action='append', default=['DefaultTest'], help="Include additional test classes (default=[DefaultTest])") parser.add_argument('--exclude', action='append', default=[], help="Exclude test classes.") parser.add_argument( '--cases', type=str, default='cases', help="Python module which stores test cases (default=cases).") parser.add_argument('--check', type=str, choices=['any', 'all'], default='all', help="Require that any/all test cases pass " "for pages to be related (default=all).") parser.add_argument( '--diff', action='store_true', default=False, ) parser.add_argument( '--window', type=int, default=None, help="If the absolute difference of index's of two pdf pages is" "greater than the window range, then pages are not related. (default = None)" ) parser.add_argument( '--debug', action='store_true', default=False, ) parser.add_argument('pdf_a', type=str) parser.add_argument('pdf_b', type=str) args = parser.parse_args(argv) settings = vars(args) if not os.path.isabs(settings['pdf_a']): settings['pdf_a'] = os.path.abspath(settings['pdf_a']) if not os.path.isabs(settings['pdf_b']): settings['pdf_b'] = os.path.abspath(settings['pdf_b']) if settings['debug']: f = sys.stderr else: f = open(os.devnull, 'w') test_results = [] with working_directory(os.path.dirname(os.path.realpath(__file__))): cases = import_module("inspection." + settings['cases']) pdf_a_im = pyPdf.PdfFileReader(file(settings['pdf_a'], "rb")) total_a_pages = pdf_a_im.getNumPages() pdf_b_im = pyPdf.PdfFileReader(file(settings['pdf_b'], "rb")) total_b_pages = pdf_b_im.getNumPages() settings['include'] = list( set(settings['include']) - set(settings['exclude'])) for case_name in settings['include']: TestClass = cases.__getattribute__(case_name) setattr(TestClass, '_settings', settings) SuperClass = inspect.getmro(TestClass)[1] method_list = inspect.getmembers(TestClass, predicate=inspect.ismethod) super_method_list = inspect.getmembers(SuperClass, predicate=inspect.ismethod) test_method_list = list(set(method_list) - set(super_method_list)) test_name_list = [ method[0] for method in test_method_list if method[0] != 'tearDownClass' and method[0] != 'setUpClass' ] for test_name in test_name_list: start = 1 m_end = total_a_pages n_end = total_b_pages # trim off the matching items at the beginning result = unittest.TestResult() while start <= m_end and start <= n_end: test = TestClass(test_name, start, start) test.run(result) if result.wasSuccessful(): case_name = test.case_key_from_id() page_i = start page_j = start value = 'p' start += 1 test_results.append((case_name, page_i, page_j, value)) f.write("{0} ... ok\n".format(test.id())) else: break # trim off the matching items at the end result = unittest.TestResult() while start <= m_end and start <= n_end: test = TestClass(test_name, m_end, n_end) test.run(result) if result.wasSuccessful(): case_name = test.case_key_from_id() page_i = m_end page_j = n_end value = 'p' test_results.append((case_name, page_i, page_j, value)) m_end -= 1 n_end -= 1 f.write("{0} ... ok\n".format(test.id())) else: break # only loop over the items that have changed for page_i in range(start, m_end + 1): for page_j in range(start, n_end + 1): result = unittest.TestResult() test = TestClass(test_name, page_i, page_j) test.run(result) case_name = test.case_key_from_id() if result.wasSuccessful(): value = 'p' f.write("{0} ... ok\n".format(test.id())) elif result.failures: value = 'f' f.write("{0} ... FAIL\n".format(test.id())) elif result.skipped: value = 's' f.write("{0} ... skip\n".format(test.id())) elif result.error: value = 'e' f.write("{0} ... ERROR\n".format(test.id())) else: raise RuntimeError("result not recognized") test_results.append((case_name, page_i, page_j, value)) cases = list(set([c for (c, i, j, v) in test_results])) results_matrix = numpy.chararray( (len(cases), total_a_pages + 1, total_b_pages + 1)) results_matrix[:] = 'f' while test_results: (case, page_i, page_j, value) = test_results.pop() x = cases.index(case) y = page_i z = page_j results_matrix[x, y, z] = value if settings['diff']: diff = diff_images(results_matrix) print(diff) else: related_page_list = lcs_images(results_matrix) print(related_page_list)
def main(argv=None): parser = argparse.ArgumentParser(description="Return a list of related pages between two pdfs.") parser.add_argument( "--include", action="append", default=["DefaultTest"], help="Include additional test classes (default=[DefaultTest])", ) parser.add_argument("--exclude", action="append", default=[], help="Exclude test classes.") parser.add_argument( "--cases", type=str, default="cases", help="Python module which stores test cases (default=cases)." ) parser.add_argument( "--check", type=str, choices=["any", "all"], default="all", help="Require that any/all test cases pass " "for pages to be related (default=all).", ) parser.add_argument("--diff", action="store_true", default=False) parser.add_argument( "--window", type=int, default=None, help="If the absolute difference of index's of two pdf pages is" "greater than the window range, then pages are not related. (default = None)", ) parser.add_argument("--debug", action="store_true", default=False) parser.add_argument("pdf_a", type=str) parser.add_argument("pdf_b", type=str) args = parser.parse_args(argv) settings = vars(args) if not os.path.isabs(settings["pdf_a"]): settings["pdf_a"] = os.path.abspath(settings["pdf_a"]) if not os.path.isabs(settings["pdf_b"]): settings["pdf_b"] = os.path.abspath(settings["pdf_b"]) if settings["debug"]: f = sys.stderr else: f = open(os.devnull, "w") test_results = [] with working_directory(os.path.dirname(os.path.realpath(__file__))): cases = import_module("inspection." + settings["cases"]) pdf_a_im = pyPdf.PdfFileReader(file(settings["pdf_a"], "rb")) total_a_pages = pdf_a_im.getNumPages() pdf_b_im = pyPdf.PdfFileReader(file(settings["pdf_b"], "rb")) total_b_pages = pdf_b_im.getNumPages() settings["include"] = list(set(settings["include"]) - set(settings["exclude"])) for case_name in settings["include"]: TestClass = cases.__getattribute__(case_name) setattr(TestClass, "_settings", settings) SuperClass = inspect.getmro(TestClass)[1] method_list = inspect.getmembers(TestClass, predicate=inspect.ismethod) super_method_list = inspect.getmembers(SuperClass, predicate=inspect.ismethod) test_method_list = list(set(method_list) - set(super_method_list)) test_name_list = [ method[0] for method in test_method_list if method[0] != "tearDownClass" and method[0] != "setUpClass" ] for test_name in test_name_list: start = 1 m_end = total_a_pages n_end = total_b_pages # trim off the matching items at the beginning result = unittest.TestResult() while start <= m_end and start <= n_end: test = TestClass(test_name, start, start) test.run(result) if result.wasSuccessful(): case_name = test.case_key_from_id() page_i = start page_j = start value = "p" start += 1 test_results.append((case_name, page_i, page_j, value)) f.write("{0} ... ok\n".format(test.id())) else: break # trim off the matching items at the end result = unittest.TestResult() while start <= m_end and start <= n_end: test = TestClass(test_name, m_end, n_end) test.run(result) if result.wasSuccessful(): case_name = test.case_key_from_id() page_i = m_end page_j = n_end value = "p" test_results.append((case_name, page_i, page_j, value)) m_end -= 1 n_end -= 1 f.write("{0} ... ok\n".format(test.id())) else: break # only loop over the items that have changed for page_i in range(start, m_end + 1): for page_j in range(start, n_end + 1): result = unittest.TestResult() test = TestClass(test_name, page_i, page_j) test.run(result) case_name = test.case_key_from_id() if result.wasSuccessful(): value = "p" f.write("{0} ... ok\n".format(test.id())) elif result.failures: value = "f" f.write("{0} ... FAIL\n".format(test.id())) elif result.skipped: value = "s" f.write("{0} ... skip\n".format(test.id())) elif result.error: value = "e" f.write("{0} ... ERROR\n".format(test.id())) else: raise RuntimeError("result not recognized") test_results.append((case_name, page_i, page_j, value)) cases = list(set([c for (c, i, j, v) in test_results])) results_matrix = numpy.chararray((len(cases), total_a_pages + 1, total_b_pages + 1)) results_matrix[:] = "f" while test_results: (case, page_i, page_j, value) = test_results.pop() x = cases.index(case) y = page_i z = page_j results_matrix[x, y, z] = value if settings["diff"]: diff = diff_images(results_matrix) print(diff) else: related_page_list = lcs_images(results_matrix) print(related_page_list)
def compilation(node, parameters, compiler_replace_maps): package = node.get_package_name() package_norm = node.get_package_name_norm() version = node.get_version() artifacts_dir = parameters.rootdir artifacts_dir = utils.get_norm_path(artifacts_dir) artifacts_dir = artifacts_dir.replace('\\', '/') cmake3p_dir = parameters.prefix cmake3p_dir = utils.get_norm_path(cmake3p_dir) cmake3p_dir = cmake3p_dir.replace('\\', '/') cmakelib_dir = search_cmakelib() package_upper = node.get_package_name_norm_upper() parms = node.parameters build_modes = node.get_build_modes() for plat, build_mode in product(platforms, build_modes): workspace = node.get_workspace(plat) build_directory = os.path.join( os.getcwd(), node.get_build_directory(plat, build_mode)) utils.trymkdir(build_directory) with utils.working_directory(build_directory): # get generator and platform info for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator( plat, compiler_replace_maps): logging.info('-- compilation mode: %s plat: %s' % (build_mode, plat)) ############# 1. prepare vars if build_mode.lower() == 'debug': try: env_modified[ 'CFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified[ 'CFLAGS'] except KeyError: env_modified['CFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' try: env_modified[ 'CPPFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified[ 'CPPFLAGS'] except KeyError: env_modified['CPPFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' elif build_mode.lower() == 'relwithdebinfo': try: env_modified[ 'CFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified[ 'CFLAGS'] except KeyError: env_modified['CFLAGS'] = '-g -O2 -DNDEBUG' try: env_modified[ 'CPPFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified[ 'CPPFLAGS'] except KeyError: env_modified['CPPFLAGS'] = '-g -O2 -DNDEBUG' elif build_mode.lower() == 'release': # default packages assume came in release pass install_directory = os.path.join(os.getcwd(), '..', workspace, node.get_base_folder(), plat) utils.trymkdir(install_directory) cores = utils.detect_ncpus() half_cores = cores / 2 env_modified['CORES'] = str(cores) env_modified['HALF_CORES'] = str(half_cores) env_modified['GTC_PREFIX'] = parameters.prefix env_modified['CMAKELIB_URL'] = CMAKELIB_URL env_modified['BUILD_MODE'] = str(build_mode) env_modified['HTTP_URL_NPSERVER'] = HTTP_URL_NPSERVER env_modified['SOURCES'] = os.path.abspath( os.path.join('..', node.get_download_directory())) env_modified['CMAKI_DIR'] = cmakelib_dir env_modified['SELFHOME'] = install_directory ################# # remove cmake3p of node node.remove_cmake3p(cmake3p_dir) # show env vars node.show_environment_vars(env_modified) # remove CMakeCache.txt for avoid problems when # change of generator utils.tryremove('CMakeCache.txt') utils.tryremove('cmake_install.cmake') utils.tryremove('install_manifest.txt') utils.tryremove_dir('CMakeFiles') ################# generator_extra = '' if generator is not None: generator_extra = '-G"%s"' % generator cmakelib_dir = parameters.cmakefiles cmakelib_dir = cmakelib_dir.replace('\\', '/') cmake_prefix_path = parameters.third_party_dir cmake_prefix_path = cmake_prefix_path.replace('\\', '/') build_directory = build_directory.replace('\\', '/') # resolve replace maps compiler_replace_resolved = {} for var, value in compiler_replace_maps.iteritems(): newvalue = value newvalue = newvalue.replace('$PLATFORM', plat) compiler_replace_resolved[var] = newvalue # begin definitions cmake try: cmake_definitions_list_original = parms[ 'cmake_definitions'] cmake_definitions_list = [] for define in cmake_definitions_list_original: # TODO: resolver tus variables directas e indirectas (de dependencias) define = define.replace('$%s_HOME' % package_norm, install_directory) # apply replaces cmake_definitions_list.append( utils.apply_replaces(define, compiler_replace_resolved)) except KeyError: cmake_definitions_list = [] # add cflags and cppflags to cmake_definitions try: cmake_definitions_list.append('CMAKE_C_FLAGS="%s"' % env_modified['CFLAGS']) except KeyError: pass try: cmake_definitions_list.append('CMAKE_CXX_FLAGS="%s"' % env_modified['CPPFLAGS']) except KeyError: pass definitions_extra = '' for definition in cmake_definitions_list: definitions_extra += ' -D%s' % definition # end definitions cmake if (not 'CMAKE_TOOLCHAIN_FILE' in env_modified) or ( not env_modified['CMAKE_TOOLCHAIN_FILE']) or ( env_modified['CMAKE_TOOLCHAIN_FILE'] == "no cross compile"): cmake_toolchain_file_filepath = '' else: cmake_toolchain_file_filepath = ' -DCMAKE_TOOLCHAIN_FILE="{}"'.format( env_modified['CMAKE_TOOLCHAIN_FILE']) cmake_prefix = node.get_cmake_prefix() cmake_configure = 'cmake %s %s -DARTIFACTS_PATH="%s" -DCMAKE_MODULE_PATH=%s -DCMAKI_PATH=%s -DCMAKE_BUILD_TYPE=%s -DAVOID_USE_HTTP=1 -DINSTALL_SIMPLE=1 -DCMAKE_PREFIX_PATH=%s -DPACKAGE=%s -DPACKAGE_UPPER=%s -DPACKAGE_VERSION=%s -DPACKAGE_BUILD_DIRECTORY=%s -DCMAKI_COMPILER=%s -DCMAKI_PLATFORM=%s %s %s' % ( generator_extra, cmake_prefix, artifacts_dir, cmakelib_dir, cmakelib_dir, build_mode, cmake_prefix_path, package, package_upper, version, build_directory, get_identifier('COMPILER'), get_identifier('ALL'), definitions_extra, cmake_toolchain_file_filepath) target = node.get_cmake_target() if target is not None: cmake_build = 'cmake --build . --target %s --config %s' % ( target, build_mode) else: cmake_build = 'cmake --build . --config %s' % (build_mode) env_modified['CMAKE_CONFIGURE'] = cmake_configure.replace( r'"', r"'") env_modified['CMAKE_BUILD'] = cmake_build.replace(r'"', r"'") ########## 2. execute executed_build_script = False if utils.is_windows(): for build_script in ['.build.cmd', 'build.cmd']: if os.path.exists(build_script): # execute manual build script node.ret += abs( utils.safe_system( '%s %s %s %s %s %s' % (build_script, install_directory, package, version, plat, build_mode), env=env_modified)) executed_build_script = True else: for build_script in ['.build.sh', 'build.sh']: if os.path.exists(build_script): with open(build_script, 'r') as f: content = f.read() # show vars node.show_environment_vars(env_modified) node.ret += abs( utils.safe_system( 'chmod +x %s && ./%s %s %s %s %s %s' % (build_script, build_script, install_directory, package, version, plat, build_mode), env=env_modified)) executed_build_script = True if not executed_build_script: logging.debug('configure command: %s' % cmake_configure) ret = utils.safe_system(cmake_configure, env=env_modified) if (ret == 0): node.ret += abs( utils.safe_system(cmake_build, env=env_modified)) else: logging.warning('Configuration failed. See log: %s' % parameters.log) node.ret += abs(ret) ######## 3. manual install # post-install logging.debug('begin post-install') for bc in node.get_post_install(): chunks = [x.strip() for x in bc.split(' ') if x] if (len(chunks) != 2) and (len(chunks) != 3): raise Exception( 'Invalid value in post_install: %s. Expected [source pattern destiny]' % bc) source_folder = os.path.join(build_directory, os.path.dirname(chunks[0])) install_directory_chunk = os.path.join( install_directory, chunks[1]) pattern = os.path.basename(chunks[0]) logging.debug( 'copy %s/%s to %s' % (source_folder, pattern, install_directory_chunk)) # create directory if not exists utils.trymkdir(install_directory_chunk) p = pipeline.make_pipe() # begin if len(chunks) == 3: p = pipeline.find(source_folder, 99)(p) else: p = pipeline.find(source_folder, 0)(p) p = pipeline.grep_basename(pattern)(p) p = pipeline.copy(source_folder, install_directory_chunk)(p) p = pipeline.debug('copied ')(p) # end pipeline.end_pipe()(p) logging.debug('end post-install') if parameters.fast: logging.debug( 'skipping for because is in fast mode: "compilation"') break # finish well return True
notice("Using temporary directory: " + tmpdir) ################################################################################ # Download and extract the release asset named "gap-4.X.Y.tar.gz" tarball = "gap-" + gap_version + ".tar.gz" tarball_url = [x for x in assets_unix if x.name == tarball] try: tarball_url = tarball_url[0].browser_download_url except: error("cannot find " + tarball + " in release at tag " + release.tag_name) gaproot = tmpdir + "gap-" + gap_version + "/" pkg_dir = gaproot + "pkg/" gap_exe = gaproot + "bin/gap.sh" with working_directory(tmpdir): # TODO Can we sometimes avoid extracting this? Perhaps this script has # already been partially-run, and so we'd be able to skip this step? # TODO error handling notice("Downloading " + tarball_url + " (if not already downloaded) to " + tmpdir + " . . .") download_with_sha256(tarball_url, tarball) notice("Extracting " + tarball + " to " + gaproot + " . . .") with tarfile.open(tarball) as tar: try: tar.extractall() except: error("failed to extract tarball (was it already extracted?") ################################################################################ # Extract release date from configure.ac
def download_and_extract_json_gz_asset(asset_name, dest): download_asset_by_name(asset_name, tmpdir) with utils.working_directory(tmpdir): with gzip.open(asset_name, "rt", encoding="utf-8") as file_in: with open(dest, "w") as file_out: shutil.copyfileobj(file_in, file_out)
def prepare(node, parameters, compiler_replace_maps): package = node.get_package_name() environment = node.get_first_environment(compiler_replace_maps) # source folder source_dir = os.path.join(os.getcwd(), package) utils.trymkdir(source_dir) # # generate versions.cmake node.generate_3rdpartyversion(parameters.prefix) # generate .build.sh / .build.cmd if is defined in yaml node.get_generate_custom_script(source_dir) # generate find.script / find.cmd node.generate_scripts_headers(compiler_replace_maps) # read root CMakeLists.txt with open('CMakeLists.txt', 'rt') as f: content_cmakelists = f.read() # remove packages before for plat in platforms: prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % node.get_workspace(plat)) prefix_package_cmake = os.path.join(parameters.prefix, '%s-cmakelib-%s.tar.gz' % (node.get_base_folder(), sys.platform)) prefix_folder_cmake = os.path.join(parameters.third_party_dir, node.get_base_folder()) logging.debug("preremoving package %s" % prefix_package) logging.debug("preremoving package cmakefiles %s" % prefix_package_cmake) logging.debug("preremoving folder cmakefiles %s" % prefix_folder_cmake) utils.tryremove(prefix_package) utils.tryremove(prefix_package_cmake) utils.tryremove_dir(prefix_folder_cmake) # run_tests or packing build_modes = node.get_build_modes() for plat, build_mode in product(platforms, build_modes): logging.info('Preparing mode %s - %s' % (plat, build_mode)) workspace = node.get_workspace(plat) build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) install_base_directory = os.path.join(os.getcwd(), workspace, node.get_base_folder()) utils.trymkdir(build_directory) # download source and prepare in build_directory node.prepare_third_party(build_directory, compiler_replace_maps) # copy source files to build logging.debug('Copy sources to build: %s -> %s' % (source_dir, build_directory)) utils.copy_folder_recursive(source_dir, build_directory) # before copy files with utils.working_directory(build_directory): for bc in node.get_before_copy(): chunks = [x.strip() for x in bc.split(' ') if x] if(len(chunks) != 2): raise Exception('Invalid value in before_copy: %s' % bc) logging.debug('Copy "%s" to "%s"' % (chunks[0], chunks[1])) shutil.copy2(chunks[0], chunks[1]) # if have cmakelists, insert root cmakelists header cmake_prefix = node.get_cmake_prefix() build_cmakelist = os.path.join(build_directory, cmake_prefix, 'CMakeLists.txt') if os.path.exists(build_cmakelist) and (not node.has_custom_script(source_dir)): with open(build_cmakelist, 'rt') as f: content_cmakelists_package = f.read() with open(build_cmakelist, 'wt') as f: f.write('%s\n' % content_cmakelists) f.write('%s\n' % content_cmakelists_package) if parameters.fast: logging.debug('skipping for because is in fast mode: "prepare"') break # finish well return True