def _build(self, nodes_to_process, deps_graph, skip_nodes, profile_build_requires, keep_build, root_node, update): """ The build assumes an input of conans ordered by degree, first level should be independent from each other, the next-second level should have dependencies only to first level conans. param nodes_by_level: list of lists [[nodeA, nodeB], [nodeC], [nodeD, ...], ...] """ inverse_levels = deps_graph.inverse_levels() for level in inverse_levels: level[:] = [n for n in level if n not in skip_nodes] for node, package_id, build_needed in nodes_to_process: conan_ref, conan_file = node.conan_ref, node.conanfile output = ScopedOutput(str(conan_ref), self._out) package_ref = PackageReference(conan_ref, package_id) package_folder = self._client_cache.package(package_ref, conan_file.short_paths) with self._client_cache.package_lock(package_ref): set_dirty(package_folder) if build_needed and (conan_ref, package_id) not in self._built_packages: self._build_package(node, package_id, package_ref, output, keep_build, profile_build_requires, inverse_levels, deps_graph, update) else: self._get_existing_package(conan_file, package_ref, output, package_folder, update) self._propagate_info(node, inverse_levels, deps_graph) # Call the info method self._call_package_info(conan_file, package_folder) clean_dirty(package_folder) # Finally, propagate information to root node (conan_ref=None) self._propagate_info(root_node, inverse_levels, deps_graph)
def _handle_node_cache(self, node, package_ref, keep_build, processed_package_references): conan_ref, conan_file = node.conan_ref, node.conanfile output = ScopedOutput(str(conan_ref), self._out) package_folder = self._client_cache.package(package_ref, conan_file.short_paths) with self._client_cache.package_lock(package_ref): if package_ref not in processed_package_references: processed_package_references.add(package_ref) set_dirty(package_folder) if node.binary == BINARY_BUILD: self._build_package(node, package_ref, output, keep_build) elif node.binary in (BINARY_UPDATE, BINARY_DOWNLOAD): self._remote_manager.get_package(package_ref, package_folder, node.binary_remote, output, self._recorder) if node.binary_remote != node.remote: self._registry.set_ref(conan_ref, node.binary_remote.name) elif node.binary == BINARY_CACHE: output.success('Already installed!') log_package_got_from_local_cache(package_ref) self._recorder.package_fetched_from_cache(package_ref) clean_dirty(package_folder) # Call the info method self._call_package_info(conan_file, package_folder)
def build_package(self, node, keep_build, recorder, remotes): t1 = time.time() conanfile = node.conanfile pref = node.pref package_layout = self._cache.package_layout(pref.ref, conanfile.short_paths) source_folder = package_layout.source() conanfile_path = package_layout.conanfile() package_folder = package_layout.package(pref) build_folder, skip_build = self._get_build_folder( conanfile, package_layout, pref, keep_build, recorder) # PREPARE SOURCES if not skip_build: with package_layout.conanfile_write_lock(self._output): set_dirty(build_folder) self._prepare_sources(conanfile, pref, package_layout, conanfile_path, source_folder, build_folder, remotes) # BUILD & PACKAGE with package_layout.conanfile_read_lock(self._output): _remove_folder_raising(package_folder) mkdir(build_folder) with tools.chdir(build_folder): self._output.info('Building your package in %s' % build_folder) try: if getattr(conanfile, 'no_copy_source', False): conanfile.source_folder = source_folder else: conanfile.source_folder = build_folder if not skip_build: with get_env_context_manager(conanfile): conanfile.build_folder = build_folder conanfile.package_folder = package_folder # In local cache, install folder always is build_folder conanfile.install_folder = build_folder self._build(conanfile, pref, build_folder) clean_dirty(build_folder) prev = self._package(conanfile, pref, package_layout, conanfile_path, build_folder, package_folder) assert prev node.prev = prev log_file = os.path.join(build_folder, RUN_LOG_NAME) log_file = log_file if os.path.exists(log_file) else None log_package_built(pref, time.time() - t1, log_file) recorder.package_built(pref) except ConanException as exc: recorder.package_install_error(pref, INSTALL_ERROR_BUILDING, str(exc), remote_name=None) raise exc return node.pref
def _compress_package_files(self, pref, integrity_check): t1 = time.time() # existing package, will use short paths if defined package_folder = self._cache.package(pref, short_paths=None) if is_dirty(package_folder): raise ConanException("Package %s is corrupted, aborting upload.\n" "Remove it with 'conan remove %s -p=%s'" % (pref, pref.ref, pref.id)) tgz_path = os.path.join(package_folder, PACKAGE_TGZ_NAME) if is_dirty(tgz_path): self._user_io.out.warn("%s: Removing %s, marked as dirty" % (str(pref), PACKAGE_TGZ_NAME)) os.remove(tgz_path) clean_dirty(tgz_path) # Get all the files in that directory files, symlinks = gather_files(package_folder) if CONANINFO not in files or CONAN_MANIFEST not in files: logger.error("Missing info or manifest in uploading files: %s" % (str(files))) raise ConanException("Cannot upload corrupted package '%s'" % str(pref)) logger.debug("UPLOAD: Time remote_manager build_files_set : %f" % (time.time() - t1)) if integrity_check: self._package_integrity_check(pref, files, package_folder) logger.debug("UPLOAD: Time remote_manager check package integrity : %f" % (time.time() - t1)) the_files = _compress_package_files(files, symlinks, package_folder, self._user_io.out) return the_files
def compress_files(files, symlinks, name, dest_dir): t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) set_dirty(tgz_path) with open(tgz_path, "wb") as tgz_handle: # tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, dest in sorted(symlinks.items()): info = tarfile.TarInfo(name=filename) info.type = tarfile.SYMTYPE info.linkname = dest tgz.addfile(tarinfo=info) mask = ~(stat.S_IWOTH | stat.S_IWGRP) for filename, abs_path in sorted(files.items()): info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode & mask if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) tgz.close() clean_dirty(tgz_path) duration = time.time() - t1 log_compressed_files(files, duration, tgz_path) return tgz_path
def _get_build_folder(self, conanfile, package_layout, pref, keep_build, recorder): # Build folder can use a different package_ID if build_id() is defined. # This function decides if the build folder should be re-used (not build again) # and returns the build folder new_id = build_id(conanfile) build_pref = PackageReference(pref.ref, new_id) if new_id else pref build_folder = package_layout.build(build_pref) if is_dirty(build_folder): self._output.warn("Build folder is dirty, removing it: %s" % build_folder) rmdir(build_folder) clean_dirty(build_folder) # Decide if the build folder should be kept skip_build = conanfile.develop and keep_build if skip_build: self._output.info("Won't be built as specified by --keep-build") if not os.path.exists(build_folder): msg = "--keep-build specified, but build folder not found" recorder.package_install_error( pref, INSTALL_ERROR_MISSING_BUILD_FOLDER, msg, remote_name=None) raise ConanException(msg) elif build_pref != pref and os.path.exists(build_folder) and hasattr( conanfile, "build_id"): self._output.info( "Won't be built, using previous build folder as defined in build_id()" ) skip_build = True return build_folder, skip_build
def upload_recipe(self, conan_reference, remote, retry, retry_wait, ignore_deleted_file, skip_upload=False, no_overwrite=None): """Will upload the conans to the first remote""" t1 = time.time() export_folder = self._client_cache.export(conan_reference) for f in (EXPORT_TGZ_NAME, EXPORT_SOURCES_TGZ_NAME): tgz_path = os.path.join(export_folder, f) if is_dirty(tgz_path): self._output.warn("%s: Removing %s, marked as dirty" % (str(conan_reference), f)) os.remove(tgz_path) clean_dirty(tgz_path) files, symlinks = gather_files(export_folder) if CONANFILE not in files or CONAN_MANIFEST not in files: raise ConanException("Cannot upload corrupted recipe '%s'" % str(conan_reference)) export_src_folder = self._client_cache.export_sources(conan_reference, short_paths=None) src_files, src_symlinks = gather_files(export_src_folder) the_files = _compress_recipe_files(files, symlinks, src_files, src_symlinks, export_folder, self._output) if skip_upload: return None ret, new_ref = self._call_remote(remote, "upload_recipe", conan_reference, the_files, retry, retry_wait, ignore_deleted_file, no_overwrite) duration = time.time() - t1 log_recipe_upload(new_ref, duration, the_files, remote.name) if ret: msg = "Uploaded conan recipe '%s' to '%s'" % (str(new_ref), remote.name) url = remote.url.replace("https://api.bintray.com/conan", "https://bintray.com") msg += ": %s" % url else: msg = "Recipe is up to date, upload skipped" self._output.info(msg) return new_ref
def _handle_node_cache(self, node, keep_build, processed_package_references): pref = node.pref conan_file = node.conanfile output = conan_file.output package_folder = self._cache.package(pref, conan_file.short_paths) with self._cache.package_lock(pref): if pref not in processed_package_references: processed_package_references.add(pref) if node.binary == BINARY_BUILD: set_dirty(package_folder) self._build_package(node, pref, output, keep_build) clean_dirty(package_folder) elif node.binary in (BINARY_UPDATE, BINARY_DOWNLOAD): if not self._node_concurrently_installed(node, package_folder): set_dirty(package_folder) assert pref.revision is not None, "Installer should receive #PREV always" self._remote_manager.get_package(pref, package_folder, node.binary_remote, output, self._recorder) self._registry.prefs.set(pref, node.binary_remote.name) clean_dirty(package_folder) else: output.success('Download skipped. Probable concurrent download') log_package_got_from_local_cache(pref) self._recorder.package_fetched_from_cache(pref) elif node.binary == BINARY_CACHE: output.success('Already installed!') log_package_got_from_local_cache(pref) self._recorder.package_fetched_from_cache(pref) # Call the info method self._call_package_info(conan_file, package_folder) self._recorder.package_cpp_info(pref, conan_file.cpp_info)
def upload_package(self, package_reference, remote, retry, retry_wait, integrity_check=False, policy=None): """Will upload the package to the first remote""" conanfile_path = self._client_cache.conanfile(package_reference.conan) self._hook_manager.execute("pre_upload_package", conanfile_path=conanfile_path, reference=package_reference.conan, package_id=package_reference.package_id, remote=remote) t1 = time.time() # existing package, will use short paths if defined package_folder = self._client_cache.package(package_reference, short_paths=None) if is_dirty(package_folder): raise ConanException("Package %s is corrupted, aborting upload.\n" "Remove it with 'conan remove %s -p=%s'" % (package_reference, package_reference.conan, package_reference.package_id)) tgz_path = os.path.join(package_folder, PACKAGE_TGZ_NAME) if is_dirty(tgz_path): self._output.warn("%s: Removing %s, marked as dirty" % (str(package_reference), PACKAGE_TGZ_NAME)) os.remove(tgz_path) clean_dirty(tgz_path) # Get all the files in that directory files, symlinks = gather_files(package_folder) if CONANINFO not in files or CONAN_MANIFEST not in files: logger.error("Missing info or manifest in uploading files: %s" % (str(files))) raise ConanException("Cannot upload corrupted package '%s'" % str(package_reference)) logger.debug("UPLOAD: Time remote_manager build_files_set : %f" % (time.time() - t1)) if integrity_check: self._package_integrity_check(package_reference, files, package_folder) logger.debug("UPLOAD: Time remote_manager check package integrity : %f" % (time.time() - t1)) the_files = compress_package_files(files, symlinks, package_folder, self._output) if policy == UPLOAD_POLICY_SKIP: return None uploaded, new_pref, rev_time = self._call_remote(remote, "upload_package", package_reference, the_files, retry, retry_wait, policy) # Update package revision with the rev_time (Created locally but with rev_time None) with self._client_cache.update_metadata(new_pref.conan) as metadata: metadata.packages[new_pref.package_id].time = rev_time duration = time.time() - t1 log_package_upload(package_reference, duration, the_files, remote) logger.debug("UPLOAD: Time remote_manager upload_package: %f" % duration) if not uploaded: self._output.rewrite_line("Package is up to date, upload skipped") self._output.writeln("") self._hook_manager.execute("post_upload_package", conanfile_path=conanfile_path, reference=package_reference.conan, package_id=package_reference.package_id, remote=remote) return new_pref
def test_clean_dirty(self): """ Dirty flag must be cleaned by clean_dirty """ set_dirty(self.temp_folder) self.assertTrue(os.path.exists(self.dirty_folder)) clean_dirty(self.temp_folder) self.assertFalse(os.path.exists(self.dirty_folder))
def upload_package(self, package_reference, remote, retry, retry_wait, skip_upload=False, integrity_check=False, no_overwrite=None): """Will upload the package to the first remote""" t1 = time.time() # existing package, will use short paths if defined package_folder = self._client_cache.package(package_reference, short_paths=None) if is_dirty(package_folder): raise ConanException("Package %s is corrupted, aborting upload.\n" "Remove it with 'conan remove %s -p=%s'" % (package_reference, package_reference.conan, package_reference.package_id)) tgz_path = os.path.join(package_folder, PACKAGE_TGZ_NAME) if is_dirty(tgz_path): self._output.warn("%s: Removing %s, marked as dirty" % (str(package_reference), PACKAGE_TGZ_NAME)) os.remove(tgz_path) clean_dirty(tgz_path) # Get all the files in that directory files, symlinks = gather_files(package_folder) if CONANINFO not in files or CONAN_MANIFEST not in files: logger.error("Missing info or manifest in uploading files: %s" % (str(files))) raise ConanException("Cannot upload corrupted package '%s'" % str(package_reference)) logger.debug("====> Time remote_manager build_files_set : %f" % (time.time() - t1)) if integrity_check: self._package_integrity_check(package_reference, files, package_folder) logger.debug( "====> Time remote_manager check package integrity : %f" % (time.time() - t1)) the_files = compress_package_files(files, symlinks, package_folder, self._output) if skip_upload: return None tmp = self._call_remote(remote, "upload_package", package_reference, the_files, retry, retry_wait, no_overwrite) duration = time.time() - t1 log_package_upload(package_reference, duration, the_files, remote) logger.debug("====> Time remote_manager upload_package: %f" % duration) if not tmp: self._output.rewrite_line("Package is up to date, upload skipped") self._output.writeln("") return tmp
def build_package(self, node, keep_build, recorder, remotes): t1 = time.time() conanfile = node.conanfile pref = node.pref package_layout = self._cache.package_layout(pref.ref, conanfile.short_paths) base_source = package_layout.source() conanfile_path = package_layout.conanfile() base_package = package_layout.package(pref) base_build, skip_build = self._get_build_folder( conanfile, package_layout, pref, keep_build, recorder) # PREPARE SOURCES if not skip_build: with package_layout.conanfile_write_lock(self._output): set_dirty(base_build) self._prepare_sources(conanfile, pref, package_layout, remotes) self._copy_sources(conanfile, base_source, base_build) # BUILD & PACKAGE with package_layout.conanfile_read_lock(self._output): self._output.info('Building your package in %s' % base_build) try: if getattr(conanfile, 'no_copy_source', False): conanfile.folders.set_base_source(base_source) else: conanfile.folders.set_base_source(base_build) conanfile.folders.set_base_build(base_build) conanfile.folders.set_base_imports(base_build) conanfile.folders.set_base_package(base_package) if not skip_build: # In local cache, generators folder always in build_folder conanfile.folders.set_base_generators(base_build) # In local cache, install folder always is build_folder conanfile.folders.set_base_install(base_build) self._build(conanfile, pref) clean_dirty(base_build) prev = self._package(conanfile, pref, package_layout, conanfile_path) assert prev node.prev = prev log_file = os.path.join(base_build, RUN_LOG_NAME) log_file = log_file if os.path.exists(log_file) else None log_package_built(pref, time.time() - t1, log_file) recorder.package_built(pref) except ConanException as exc: recorder.package_install_error(pref, INSTALL_ERROR_BUILDING, str(exc), remote_name=None) raise exc return node.pref
def compress_files(files, symlinks, name, dest_dir, output=None): t1 = time.time() # FIXME, better write to disk sequentially and not keep tgz contents in memory tgz_path = os.path.join(dest_dir, name) set_dirty(tgz_path) with open(tgz_path, "wb") as tgz_handle: # tgz_contents = BytesIO() tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle) for filename, dest in sorted(symlinks.items()): info = tarfile.TarInfo(name=filename) info.type = tarfile.SYMTYPE info.linkname = dest info.size = 0 # A symlink shouldn't have size tgz.addfile(tarinfo=info) mask = ~(stat.S_IWOTH | stat.S_IWGRP) i_file = 0 n_files = len(files) last_progress = None if output and n_files > 1 and not output.is_terminal: output.write("[") for filename, abs_path in sorted(files.items()): info = tarfile.TarInfo(name=filename) info.size = os.stat(abs_path).st_size info.mode = os.stat(abs_path).st_mode & mask if os.path.islink(abs_path): info.type = tarfile.SYMTYPE info.size = 0 # A symlink shouldn't have size info.linkname = os.readlink(abs_path) # @UndefinedVariable tgz.addfile(tarinfo=info) else: with open(abs_path, 'rb') as file_handler: tgz.addfile(tarinfo=info, fileobj=file_handler) if output and n_files > 1: i_file = i_file + 1 units = min(50, int(50 * i_file / n_files)) if last_progress != units: # Avoid screen refresh if nothing has change if output.is_terminal: text = "%s/%s files" % (i_file, n_files) output.rewrite_line("[%s%s] %s" % ('=' * units, ' ' * (50 - units), text)) else: output.write('=' * (units - (last_progress or 0))) last_progress = units if output and n_files > 1: if output.is_terminal: output.writeln("") else: output.writeln("]") tgz.close() clean_dirty(tgz_path) duration = time.time() - t1 log_compressed_files(files, duration, tgz_path) return tgz_path
def upload_recipe(self, conan_reference, remote, retry, retry_wait, policy, remote_manifest): conanfile_path = self._client_cache.conanfile(conan_reference) self._hook_manager.execute("pre_upload_recipe", conanfile_path=conanfile_path, reference=conan_reference, remote=remote) t1 = time.time() export_folder = self._client_cache.export(conan_reference) for f in (EXPORT_TGZ_NAME, EXPORT_SOURCES_TGZ_NAME): tgz_path = os.path.join(export_folder, f) if is_dirty(tgz_path): self._output.warn("%s: Removing %s, marked as dirty" % (str(conan_reference), f)) os.remove(tgz_path) clean_dirty(tgz_path) files, symlinks = gather_files(export_folder) if CONANFILE not in files or CONAN_MANIFEST not in files: raise ConanException("Cannot upload corrupted recipe '%s'" % str(conan_reference)) export_src_folder = self._client_cache.export_sources(conan_reference, short_paths=None) src_files, src_symlinks = gather_files(export_src_folder) the_files = _compress_recipe_files(files, symlinks, src_files, src_symlinks, export_folder, self._output) if policy == UPLOAD_POLICY_SKIP: return conan_reference ret, rev_time = self._call_remote(remote, "upload_recipe", conan_reference, the_files, retry, retry_wait, policy, remote_manifest) # Update package revision with the rev_time (Created locally but with rev_time None) with self._client_cache.update_metadata(conan_reference) as metadata: metadata.recipe.time = rev_time duration = time.time() - t1 log_recipe_upload(conan_reference, duration, the_files, remote.name) if ret: msg = "Uploaded conan recipe '%s' to '%s'" % (str(conan_reference), remote.name) url = remote.url.replace("https://api.bintray.com/conan", "https://bintray.com") msg += ": %s" % url else: msg = "Recipe is up to date, upload skipped" self._output.info(msg) self._hook_manager.execute("post_upload_recipe", conanfile_path=conanfile_path, reference=conan_reference, remote=remote)
def config_source(export_folder, export_source_folder, scm_sources_folder, conanfile, output, conanfile_path, reference, hook_manager, cache): """ Implements the sources configuration when a package is going to be built in the local cache: - remove old sources if dirty or build_policy=always - execute SCM logic - do a copy of the export and exports_sources folders to the source folder in the cache - run the source() recipe method """ def remove_source(): output.warn("This can take a while for big packages") try: rmdir(conanfile.folders.base_source) except BaseException as e_rm: msg = str(e_rm) if six.PY2: msg = str(e_rm).decode( "latin1") # Windows prints some chars in latin1 output.error("Unable to remove source folder %s\n%s" % (conanfile.folders.base_source, msg)) output.warn("**** Please delete it manually ****") raise ConanException("Unable to remove source folder") if is_dirty(conanfile.folders.base_source): output.warn("Trying to remove corrupted source folder") remove_source() clean_dirty(conanfile.folders.base_source) elif conanfile.build_policy_always: output.warn( "Detected build_policy 'always', trying to remove source folder") remove_source() if not os.path.exists( conanfile.folders.base_source): # No source folder, need to get it with set_dirty_context_manager(conanfile.folders.base_source): mkdir(conanfile.source_folder) def get_sources_from_exports(): # First of all get the exported scm sources (if auto) or clone (if fixed) _run_cache_scm(conanfile, scm_sources_folder, output) if not hasattr(conanfile, "layout"): # so self exported files have precedence over python_requires ones merge_directories(export_folder, conanfile.folders.base_source) # Now move the export-sources to the right location merge_directories(export_source_folder, conanfile.folders.base_source) _run_source(conanfile, conanfile_path, hook_manager, reference, cache, get_sources_from_exports=get_sources_from_exports)
def config_source(export_folder, export_source_folder, src_folder, conanfile, output, conanfile_path, reference, hook_manager, cache): """ Implements the sources configuration when a package is going to be built in the local cache. """ def remove_source(raise_error=True): output.warn("This can take a while for big packages") try: rmdir(src_folder) except BaseException as e_rm: set_dirty(src_folder) msg = str(e_rm) if six.PY2: msg = str(e_rm).decode( "latin1") # Windows prints some chars in latin1 output.error("Unable to remove source folder %s\n%s" % (src_folder, msg)) output.warn("**** Please delete it manually ****") if raise_error or isinstance(e_rm, KeyboardInterrupt): raise ConanException("Unable to remove source folder") sources_pointer = cache.package_layout(reference).scm_folder() local_sources_path = load(sources_pointer) if os.path.exists( sources_pointer) else None if is_dirty(src_folder): output.warn("Trying to remove corrupted source folder") remove_source() elif conanfile.build_policy_always: output.warn( "Detected build_policy 'always', trying to remove source folder") remove_source() elif local_sources_path and os.path.exists(local_sources_path): output.warn( "Detected 'scm' auto in conanfile, trying to remove source folder") remove_source() if not os.path.exists(src_folder): # No source folder, need to get it set_dirty(src_folder) mkdir(src_folder) def get_sources_from_exports(): # so self exported files have precedence over python_requires ones merge_directories(export_folder, src_folder) # Now move the export-sources to the right location merge_directories(export_source_folder, src_folder) _run_source(conanfile, conanfile_path, src_folder, hook_manager, reference, cache, local_sources_path, get_sources_from_exports=get_sources_from_exports) clean_dirty(src_folder) # Everything went well, remove DIRTY flag
def _build_package(self, node, package_ref, output, keep_build): conan_ref, conan_file = node.conan_ref, node.conanfile t1 = time.time() # It is necessary to complete the sources of python requires, which might be used for python_require in conan_file.python_requires: complete_recipe_sources(self._remote_manager, self._client_cache, conan_file, python_require.conan_ref) builder = _ConanPackageBuilder(conan_file, package_ref, self._client_cache, output, self._hook_manager) if is_dirty(builder.build_folder): output.warn("Build folder is dirty, removing it: %s" % builder.build_folder) rmdir(builder.build_folder) skip_build = conan_file.develop and keep_build if skip_build: output.info("Won't be built as specified by --keep-build") if skip_build: if not os.path.exists(builder.build_folder): msg = "--keep-build specified, but build folder not found" self._recorder.package_install_error( package_ref, INSTALL_ERROR_MISSING_BUILD_FOLDER, msg, remote_name=None) raise ConanException(msg) else: with self._client_cache.conanfile_write_lock(conan_ref): set_dirty(builder.build_folder) complete_recipe_sources(self._remote_manager, self._client_cache, conan_file, conan_ref) builder.prepare_build() with self._client_cache.conanfile_read_lock(conan_ref): try: if not skip_build: builder.build() clean_dirty(builder.build_folder) builder.package() except ConanException as exc: self._recorder.package_install_error(package_ref, INSTALL_ERROR_BUILDING, str(exc), remote_name=None) raise exc else: # Log build self._log_built_package(builder.build_folder, package_ref.copy_clear_rev(), time.time() - t1)
def _handle_node_cache(self, node, keep_build, processed_package_references, remotes): pref = node.pref assert pref.id, "Package-ID without value" conanfile = node.conanfile output = conanfile.output package_folder = self._cache.package_layout( pref.ref, conanfile.short_paths).package(pref) with self._cache.package_layout(pref.ref).package_lock(pref): if pref not in processed_package_references: processed_package_references.add(pref) if node.binary == BINARY_BUILD: assert node.prev is None, "PREV for %s to be built should be None" % str( pref) set_dirty(package_folder) pref = self._build_package(node, pref, output, keep_build, remotes) clean_dirty(package_folder) assert node.prev is not None, "PREV for %s to be built is None" % str( pref) assert pref.revision is not None, "PREV for %s to be built is None" % str( pref) elif node.binary in (BINARY_UPDATE, BINARY_DOWNLOAD): assert node.prev, "PREV for %s is None" % str(pref) if not self._node_concurrently_installed( node, package_folder): set_dirty(package_folder) assert pref.revision is not None, "Installer should receive #PREV always" self._remote_manager.get_package( pref, package_folder, node.binary_remote, output, self._recorder) output.info("Downloaded package revision %s" % pref.revision) with self._cache.package_layout( pref.ref).update_metadata() as metadata: metadata.packages[ pref.id].remote = node.binary_remote.name clean_dirty(package_folder) else: output.success( 'Download skipped. Probable concurrent download') log_package_got_from_local_cache(pref) self._recorder.package_fetched_from_cache(pref) elif node.binary == BINARY_CACHE: assert node.prev, "PREV for %s is None" % str(pref) output.success('Already installed!') log_package_got_from_local_cache(pref) self._recorder.package_fetched_from_cache(pref) # Call the info method self._call_package_info(conanfile, package_folder, ref=pref.ref) self._recorder.package_cpp_info(pref, conanfile.cpp_info)
def _compress_package_files(self, layout, pref, integrity_check): t1 = time.time() if layout.package_is_dirty(pref): raise ConanException("Package %s is corrupted, aborting upload.\n" "Remove it with 'conan remove %s -p=%s'" % (pref, pref.ref, pref.id)) download_pkg_folder = layout.download_package(pref) package_tgz = os.path.join(download_pkg_folder, PACKAGE_TGZ_NAME) if is_dirty(package_tgz): self._output.warn("%s: Removing %s, marked as dirty" % (str(pref), PACKAGE_TGZ_NAME)) os.remove(package_tgz) clean_dirty(package_tgz) # Get all the files in that directory # existing package, will use short paths if defined package_folder = layout.package(pref) files, symlinks = gather_files(package_folder) if CONANINFO not in files or CONAN_MANIFEST not in files: logger.error("Missing info or manifest in uploading files: %s" % (str(files))) raise ConanException("Cannot upload corrupted package '%s'" % str(pref)) logger.debug("UPLOAD: Time remote_manager build_files_set : %f" % (time.time() - t1)) if integrity_check: self._package_integrity_check(pref, files, package_folder) logger.debug( "UPLOAD: Time remote_manager check package integrity : %f" % (time.time() - t1)) if not os.path.isfile(package_tgz): if self._output and not self._output.is_terminal: self._output.writeln("Compressing package...") tgz_files = { f: path for f, path in files.items() if f not in [CONANINFO, CONAN_MANIFEST] } tgz_path = compress_files(tgz_files, symlinks, PACKAGE_TGZ_NAME, download_pkg_folder, self._output) assert tgz_path == package_tgz assert os.path.exists(package_tgz) return { PACKAGE_TGZ_NAME: package_tgz, CONANINFO: files[CONANINFO], CONAN_MANIFEST: files[CONAN_MANIFEST] }
def download(self, url, file_path=None, md5=None, sha1=None, sha256=None, **kwargs): """ compatible interface of FileDownloader + checksum """ checksum = sha256 or sha1 or md5 # If it is a user download, it must contain a checksum assert (not self._user_download) or (self._user_download and checksum) h = self._get_hash(url, checksum) with self._lock(h): cached_path = os.path.join(self._cache_folder, h) if is_dirty(cached_path): if os.path.exists(cached_path): os.remove(cached_path) clean_dirty(cached_path) if os.path.exists(cached_path): # If exists but it is corrupted, it is removed. Note that v2 downloads # do not have checksums, this only works for user downloads try: check_checksum(cached_path, md5, sha1, sha256) except ConanException: logger.error("Cached file corrupt, redownloading") remove(cached_path) if not os.path.exists(cached_path): set_dirty(cached_path) self._file_downloader.download(url=url, file_path=cached_path, md5=md5, sha1=sha1, sha256=sha256, **kwargs) clean_dirty(cached_path) if file_path is not None: file_path = os.path.abspath(file_path) mkdir(os.path.dirname(file_path)) shutil.copy2(cached_path, file_path) else: with open(cached_path, 'rb') as handle: tmp = handle.read() return tmp
def package_remove(self, pref): # Here we could validate and check we own a write lock over this package assert isinstance(pref, PackageReference) assert pref.ref == self._ref, "{!r} != {!r}".format( pref.ref, self._ref) # This is NOT the short paths, but the standard cache one pkg_folder = os.path.join(self._base_folder, PACKAGES_FOLDER, pref.id) try: rm_conandir(pkg_folder ) # This will remove the shortened path too if exists except OSError as e: raise ConanException( "%s\n\nFolder: %s\n" "Couldn't remove folder, might be busy or open\n" "Close any app using it, and retry" % (pkg_folder, str(e))) if is_dirty(pkg_folder): clean_dirty(pkg_folder)
def _compress_recipe_files(self, ref): export_folder = self._cache.export(ref) for f in (EXPORT_TGZ_NAME, EXPORT_SOURCES_TGZ_NAME): tgz_path = os.path.join(export_folder, f) if is_dirty(tgz_path): self._user_io.out.warn("%s: Removing %s, marked as dirty" % (str(ref), f)) os.remove(tgz_path) clean_dirty(tgz_path) files, symlinks = gather_files(export_folder) if CONANFILE not in files or CONAN_MANIFEST not in files: raise ConanException("Cannot upload corrupted recipe '%s'" % str(ref)) export_src_folder = self._cache.export_sources(ref, short_paths=None) src_files, src_symlinks = gather_files(export_src_folder) the_files = _compress_recipe_files(files, symlinks, src_files, src_symlinks, export_folder, self._user_io.out) return the_files
def _compress_recipe_files(self, ref): layout = self._cache.package_layout(ref) download_export_folder = layout.download_export() for f in (EXPORT_TGZ_NAME, EXPORT_SOURCES_TGZ_NAME): tgz_path = os.path.join(download_export_folder, f) if is_dirty(tgz_path): self._output.warn("%s: Removing %s, marked as dirty" % (str(ref), f)) os.remove(tgz_path) clean_dirty(tgz_path) export_folder = layout.export() files, symlinks = gather_files(export_folder) if CONANFILE not in files or CONAN_MANIFEST not in files: raise ConanException("Cannot upload corrupted recipe '%s'" % str(ref)) export_src_folder = layout.export_sources() src_files, src_symlinks = gather_files(export_src_folder) result = { CONANFILE: files.pop(CONANFILE), CONAN_MANIFEST: files.pop(CONAN_MANIFEST) } def add_tgz(tgz_name, tgz_files, tgz_symlinks, msg): tgz = os.path.join(download_export_folder, tgz_name) if os.path.isfile(tgz): result[tgz_name] = tgz elif tgz_files: if self._output and not self._output.is_terminal: self._output.writeln(msg) tgz = compress_files(tgz_files, tgz_symlinks, tgz_name, download_export_folder, self._output) result[tgz_name] = tgz add_tgz(EXPORT_TGZ_NAME, files, symlinks, "Compressing recipe...") add_tgz(EXPORT_SOURCES_TGZ_NAME, src_files, src_symlinks, "Compressing recipe sources...") return result
def _evaluate_clean_pkg_folder_dirty(self, node, package_layout, package_folder, pref): # Check if dirty, to remove it with package_layout.package_lock(pref): assert node.recipe != RECIPE_EDITABLE, "Editable package shouldn't reach this code" if is_dirty(package_folder): node.conanfile.output.warn( "Package is corrupted, removing folder: %s" % package_folder) rmdir(package_folder) # Do not remove if it is EDITABLE clean_dirty(package_folder) return if self._cache.config.revisions_enabled: metadata = package_layout.load_metadata() rec_rev = metadata.packages[pref.id].recipe_revision if rec_rev and rec_rev != node.ref.revision: node.conanfile.output.warn( "The package {} doesn't belong to the installed " "recipe revision, removing folder".format(pref)) rmdir(package_folder) return metadata
def _build(self, nodes_to_process, deps_graph, skip_nodes, profile_build_requires, keep_build, root_node, update): """ The build assumes an input of conans ordered by degree, first level should be independent from each other, the next-second level should have dependencies only to first level conans. param nodes_by_level: list of lists [[nodeA, nodeB], [nodeC], [nodeD, ...], ...] """ inverse = deps_graph.inverse_levels() flat = [] for level in inverse: level = sorted(level, key=lambda x: x.conan_ref) flat.extend(n for n in level if n not in skip_nodes) for node, package_id, build_needed in nodes_to_process: conan_ref, conan_file = node.conan_ref, node.conanfile output = ScopedOutput(str(conan_ref), self._out) package_ref = PackageReference(conan_ref, package_id) package_folder = self._client_cache.package(package_ref, conan_file.short_paths) with self._client_cache.package_lock(package_ref): set_dirty(package_folder) if build_needed and (conan_ref, package_id) not in self._built_packages: self._build_package(node, package_id, package_ref, output, keep_build, profile_build_requires, flat, deps_graph, update) else: self._get_existing_package(conan_file, package_ref, output, package_folder, update) self._propagate_info(node, flat, deps_graph) # Call the info method self._call_package_info(conan_file, package_folder) clean_dirty(package_folder) # Finally, propagate information to root node (conan_ref=None) self._propagate_info(root_node, flat, deps_graph)
def download(self, url, file_path=None, md5=None, sha1=None, sha256=None, **kwargs): """ compatible interface of FileDownloader + checksum """ checksum = sha256 or sha1 or md5 # If it is a user download, it must contain a checksum assert (not self._user_download) or (self._user_download and checksum) h = self._get_hash(url, checksum) with self._lock(h): cached_path = os.path.join(self._cache_folder, h) if is_dirty(cached_path): if os.path.exists(cached_path): os.remove(cached_path) clean_dirty(cached_path) if not os.path.exists(cached_path): set_dirty(cached_path) self._file_downloader.download(url=url, file_path=cached_path, md5=md5, sha1=sha1, sha256=sha256, **kwargs) clean_dirty(cached_path) else: # specific check for corrupted cached files, will raise, but do nothing more # user can report it or "rm -rf cache_folder/path/to/file" try: check_checksum(cached_path, md5, sha1, sha256) except ConanException as e: raise ConanException("%s\nCached downloaded file corrupted: %s" % (str(e), cached_path)) if file_path is not None: file_path = os.path.abspath(file_path) mkdir(os.path.dirname(file_path)) shutil.copy2(cached_path, file_path) else: with open(cached_path, 'rb') as handle: tmp = handle.read() return tmp
def _handle_node_cache(self, node, package_id, build_needed, keep_build, profile_build_requires, inverse_levels, deps_graph, update): conan_ref, conan_file = node.conan_ref, node.conanfile output = ScopedOutput(str(conan_ref), self._out) package_ref = PackageReference(conan_ref, package_id) package_folder = self._client_cache.package(package_ref, conan_file.short_paths) with self._client_cache.package_lock(package_ref): set_dirty(package_folder) if build_needed and (conan_ref, package_id) not in self._built_packages: self._build_package(node, package_id, package_ref, output, keep_build, profile_build_requires, inverse_levels, deps_graph, update) else: self._get_existing_package(conan_file, package_ref, output, package_folder, update) self._propagate_info(node, inverse_levels, deps_graph) # Call the info method self._call_package_info(conan_file, package_folder) clean_dirty(package_folder)
def config_source(export_folder, export_source_folder, src_folder, conan_file, output, force=False): """ creates src folder and retrieve, calling source() from conanfile the necessary source code """ def remove_source(raise_error=True): output.warn("This can take a while for big packages") try: rmdir(src_folder) except BaseException as e_rm: set_dirty(src_folder) msg = str(e_rm) if six.PY2: msg = str(e_rm).decode("latin1") # Windows prints some chars in latin1 output.error("Unable to remove source folder %s\n%s" % (src_folder, msg)) output.warn("**** Please delete it manually ****") if raise_error or isinstance(e_rm, KeyboardInterrupt): raise ConanException("Unable to remove source folder") if force: output.warn("Forced removal of source folder") remove_source() elif is_dirty(src_folder): output.warn("Trying to remove corrupted source folder") remove_source() elif conan_file.build_policy_always: output.warn("Detected build_policy 'always', trying to remove source folder") remove_source() if not os.path.exists(src_folder): output.info('Configuring sources in %s' % src_folder) shutil.copytree(export_folder, src_folder, symlinks=True) # Now move the export-sources to the right location merge_directories(export_source_folder, src_folder) for f in (EXPORT_TGZ_NAME, EXPORT_SOURCES_TGZ_NAME, CONANFILE+"c", CONANFILE+"o", CONANFILE, CONAN_MANIFEST): try: os.remove(os.path.join(src_folder, f)) except OSError: pass try: shutil.rmtree(os.path.join(src_folder, "__pycache__")) except OSError: pass set_dirty(src_folder) os.chdir(src_folder) conan_file.source_folder = src_folder try: with get_env_context_manager(conan_file): with conanfile_exception_formatter(str(conan_file), "source"): conan_file.build_folder = None conan_file.package_folder = None conan_file.source() clean_dirty(src_folder) # Everything went well, remove DIRTY flag except Exception as e: os.chdir(export_folder) # in case source() fails (user error, typically), remove the src_folder # and raise to interrupt any other processes (build, package) output.warn("Trying to remove corrupted source folder") remove_source(raise_error=False) if isinstance(e, ConanExceptionInUserConanfileMethod): raise e raise ConanException(e)
def config_source(export_folder, export_source_folder, src_folder, conan_file, output, force=False): """ creates src folder and retrieve, calling source() from conanfile the necessary source code """ def remove_source(raise_error=True): output.warn("This can take a while for big packages") try: rmdir(src_folder) except BaseException as e_rm: set_dirty(src_folder) msg = str(e_rm) if six.PY2: msg = str(e_rm).decode( "latin1") # Windows prints some chars in latin1 output.error("Unable to remove source folder %s\n%s" % (src_folder, msg)) output.warn("**** Please delete it manually ****") if raise_error or isinstance(e_rm, KeyboardInterrupt): raise ConanException("Unable to remove source folder") if force: output.warn("Forced removal of source folder") remove_source() elif is_dirty(src_folder): output.warn("Trying to remove dirty source folder") remove_source() elif conan_file.build_policy_always: output.warn( "Detected build_policy 'always', trying to remove source folder") remove_source() if not os.path.exists(src_folder): output.info('Configuring sources in %s' % src_folder) shutil.copytree(export_folder, src_folder, symlinks=True) # Now move the export-sources to the right location merge_directories(export_source_folder, src_folder) for f in (EXPORT_TGZ_NAME, EXPORT_SOURCES_TGZ_NAME, CONANFILE + "c", CONANFILE + "o", CONANFILE, CONAN_MANIFEST): try: os.remove(os.path.join(src_folder, f)) except OSError: pass try: shutil.rmtree(os.path.join(src_folder, "__pycache__")) except OSError: pass set_dirty(src_folder) os.chdir(src_folder) conan_file.source_folder = src_folder try: with tools.environment_append(conan_file.env): with conanfile_exception_formatter(str(conan_file), "source"): conan_file.build_folder = None conan_file.package_folder = None conan_file.source() clean_dirty(src_folder) # Everything went well, remove DIRTY flag except Exception as e: os.chdir(export_folder) # in case source() fails (user error, typically), remove the src_folder # and raise to interrupt any other processes (build, package) output.warn("Trying to remove dirty source folder") remove_source(raise_error=False) if isinstance(e, ConanExceptionInUserConanfileMethod): raise e raise ConanException(e)
def set_dirty_context_manager(self, pref): pkg_folder = os.path.join(self._base_folder, PACKAGES_FOLDER, pref.id) set_dirty(pkg_folder) yield clean_dirty(pkg_folder)
def config_source(export_folder, export_source_folder, local_sources_path, src_folder, conan_file, output, force=False): """ creates src folder and retrieve, calling source() from conanfile the necessary source code """ def remove_source(raise_error=True): output.warn("This can take a while for big packages") try: rmdir(src_folder) except BaseException as e_rm: set_dirty(src_folder) msg = str(e_rm) if six.PY2: msg = str(e_rm).decode( "latin1") # Windows prints some chars in latin1 output.error("Unable to remove source folder %s\n%s" % (src_folder, msg)) output.warn("**** Please delete it manually ****") if raise_error or isinstance(e_rm, KeyboardInterrupt): raise ConanException("Unable to remove source folder") if force: output.warn("Forced removal of source folder") remove_source() elif is_dirty(src_folder): output.warn("Trying to remove corrupted source folder") remove_source() elif conan_file.build_policy_always: output.warn( "Detected build_policy 'always', trying to remove source folder") remove_source() if not os.path.exists(src_folder): output.info('Configuring sources in %s' % src_folder) shutil.copytree(export_folder, src_folder, symlinks=True) # Now move the export-sources to the right location merge_directories(export_source_folder, src_folder) _clean_source_folder(src_folder) try: shutil.rmtree(os.path.join(src_folder, "__pycache__")) except OSError: pass set_dirty(src_folder) os.chdir(src_folder) conan_file.source_folder = src_folder try: with get_env_context_manager(conan_file): with conanfile_exception_formatter(str(conan_file), "source"): conan_file.build_folder = None conan_file.package_folder = None scm = get_scm(conan_file, src_folder) if scm: # scm.capture_origin before exporting if local_sources_path and os.path.exists( local_sources_path): output.info("Getting sources from folder: %s" % local_sources_path) merge_directories(local_sources_path, src_folder) _clean_source_folder(src_folder) else: output.info("Getting sources from url: '%s'" % scm.url) scm.clone() scm.checkout() conan_file.source() clean_dirty(src_folder) # Everything went well, remove DIRTY flag except Exception as e: os.chdir(export_folder) # in case source() fails (user error, typically), remove the src_folder # and raise to interrupt any other processes (build, package) output.warn("Trying to remove corrupted source folder") remove_source(raise_error=False) if isinstance(e, ConanExceptionInUserConanfileMethod): raise e raise ConanException(e)
def cmd_export(app, conanfile_path, name, version, user, channel, keep_source, export=True, graph_lock=None, ignore_dirty=False): """ Export the recipe param conanfile_path: the original source directory of the user containing a conanfile.py """ loader, cache, hook_manager, output = app.loader, app.cache, app.hook_manager, app.out revisions_enabled = app.config.revisions_enabled scm_to_conandata = app.config.scm_to_conandata conanfile = loader.load_export(conanfile_path, name, version, user, channel) # FIXME: Conan 2.0, deprecate CONAN_USER AND CONAN_CHANNEL and remove this try excepts # Take the default from the env vars if they exist to not break behavior try: user = conanfile.user except ConanV2Exception: raise except ConanException: user = None try: channel = conanfile.channel except ConanV2Exception: raise except ConanException: channel = None ref = ConanFileReference(conanfile.name, conanfile.version, user, channel) # If we receive lock information, python_requires could have been locked if graph_lock: node_id = graph_lock.get_consumer(ref) python_requires = graph_lock.python_requires(node_id) # TODO: check that the locked python_requires are different from the loaded ones app.range_resolver.clear_output( ) # invalidate previous version range output conanfile = loader.load_export(conanfile_path, conanfile.name, conanfile.version, ref.user, ref.channel, python_requires) check_casing_conflict(cache=cache, ref=ref) package_layout = cache.package_layout(ref, short_paths=conanfile.short_paths) if not export: metadata = package_layout.load_metadata() recipe_revision = metadata.recipe.revision ref = ref.copy_with_rev(recipe_revision) if graph_lock: graph_lock.update_exported_ref(node_id, ref) return ref _check_settings_for_warnings(conanfile, output) hook_manager.execute("pre_export", conanfile=conanfile, conanfile_path=conanfile_path, reference=package_layout.ref) logger.debug("EXPORT: %s" % conanfile_path) output.highlight("Exporting package recipe") output = conanfile.output # Copy sources to target folders with package_layout.conanfile_write_lock(output=output): # Get previous manifest try: previous_manifest = package_layout.recipe_manifest() except IOError: previous_manifest = None package_layout.export_remove() export_folder = package_layout.export() export_src_folder = package_layout.export_sources() mkdir(export_folder) mkdir(export_src_folder) origin_folder = os.path.dirname(conanfile_path) export_recipe(conanfile, origin_folder, export_folder) export_source(conanfile, origin_folder, export_src_folder) shutil.copy2(conanfile_path, package_layout.conanfile()) # Calculate the "auto" values and replace in conanfile.py scm_data, local_src_folder = _capture_scm_auto_fields( conanfile, os.path.dirname(conanfile_path), package_layout, output, ignore_dirty, scm_to_conandata) # Clear previous scm_folder modified_recipe = False scm_sources_folder = package_layout.scm_sources() if local_src_folder and not keep_source: # Copy the local scm folder to scm_sources in the cache mkdir(scm_sources_folder) _export_scm(scm_data, local_src_folder, scm_sources_folder, output) # https://github.com/conan-io/conan/issues/5195#issuecomment-551840597 # It will cause the source folder to be removed (needed because the recipe still has # the "auto" with uncommitted changes) modified_recipe = True # Execute post-export hook before computing the digest hook_manager.execute("post_export", conanfile=conanfile, reference=package_layout.ref, conanfile_path=package_layout.conanfile()) # Compute the new digest manifest = FileTreeManifest.create(export_folder, export_src_folder) modified_recipe |= not previous_manifest or previous_manifest != manifest if modified_recipe: output.success('A new %s version was exported' % CONANFILE) output.info('Folder: %s' % export_folder) else: output.info("The stored package has not changed") manifest = previous_manifest # Use the old one, keep old timestamp manifest.save(export_folder) # Compute the revision for the recipe revision = _update_revision_in_metadata( package_layout=package_layout, revisions_enabled=revisions_enabled, output=output, path=os.path.dirname(conanfile_path), manifest=manifest, revision_mode=conanfile.revision_mode) # FIXME: Conan 2.0 Clear the registry entry if the recipe has changed source_folder = package_layout.source() if os.path.exists(source_folder): try: if is_dirty(source_folder): output.info("Source folder is corrupted, forcing removal") rmdir(source_folder) clean_dirty(source_folder) elif modified_recipe and not keep_source: output.info( "Package recipe modified in export, forcing source folder removal" ) output.info("Use the --keep-source, -k option to skip it") rmdir(source_folder) except BaseException as e: output.error( "Unable to delete source folder. Will be marked as corrupted for deletion" ) output.warn(str(e)) set_dirty(source_folder) # When revisions enabled, remove the packages not matching the revision if revisions_enabled: packages = search_packages(package_layout, query=None) metadata = package_layout.load_metadata() recipe_revision = metadata.recipe.revision to_remove = [ pid for pid in packages if metadata.packages.get(pid) and metadata.packages.get(pid).recipe_revision != recipe_revision ] if to_remove: output.info( "Removing the local binary packages from different recipe revisions" ) remover = DiskRemover() remover.remove_packages(package_layout, ids_filter=to_remove) ref = ref.copy_with_rev(revision) output.info("Exported revision: %s" % revision) if graph_lock: graph_lock.update_exported_ref(node_id, ref) return ref