def testMd5Name(self): folder = temp_folder() save(os.path.join(folder, "one_file.txt"), b"The contents") save(os.path.join(folder, "Two_file.txt"), b"Two contents") files = { "one_file.txt": os.path.join(folder, "one_file.txt"), "Two_file.txt": os.path.join(folder, "Two_file.txt"), } compress_files(files, PACKAGE_TGZ_NAME, excluded=[PACKAGE_TGZ_NAME], dest_dir=folder) file_path = os.path.join(folder, PACKAGE_TGZ_NAME) md5_a = md5sum(file_path) time.sleep(1) # Timestamps change folder = temp_folder() compress_files(files, PACKAGE_TGZ_NAME, excluded=[PACKAGE_TGZ_NAME], dest_dir=folder) file_path = os.path.join(folder, PACKAGE_TGZ_NAME) md5_b = md5sum(file_path) self.assertEquals(md5_a, md5_b)
def testMd5Name(self): files = { "one_file.txt": { "contents": b"The contents", "mode": 0o777 }, "Two_file.txt": { "contents": b"Two contents", "mode": 0o777 } } new_files = compress_files(files, PACKAGE_TGZ_NAME, excluded=[]) folder = temp_folder() file_path = os.path.join(folder, PACKAGE_TGZ_NAME) save(file_path, new_files[PACKAGE_TGZ_NAME]) md5_a = md5sum(file_path) time.sleep(1) # Timestamps change new_files = compress_files(files, PACKAGE_TGZ_NAME, excluded=[]) folder = temp_folder() file_path = os.path.join(folder, PACKAGE_TGZ_NAME) save(file_path, new_files[PACKAGE_TGZ_NAME]) md5_b = md5sum(file_path) self.assertEquals(md5_a, md5_b)
def test_md5_compress(self): folder = temp_folder() save(os.path.join(folder, "one_file.txt"), b"The contents") save(os.path.join(folder, "Two_file.txt"), b"Two contents") files = { "one_file.txt": os.path.join(folder, "one_file.txt"), "Two_file.txt": os.path.join(folder, "Two_file.txt"), } compress_files(files, {}, PACKAGE_TGZ_NAME, dest_dir=folder) file_path = os.path.join(folder, PACKAGE_TGZ_NAME) md5_a = md5sum(file_path) self.assertEqual(md5_a, "df220cfbc0652e8992a89a77666c03b5") time.sleep(1) # Timestamps change folder = temp_folder() compress_files(files, {}, PACKAGE_TGZ_NAME, dest_dir=folder) file_path = os.path.join(folder, PACKAGE_TGZ_NAME) md5_b = md5sum(file_path) self.assertEquals(md5_a, md5_b)
def test_gcc_and_environment(self): if platform.system() == "SunOS": return # If is using sun-cc the gcc generator doesn't work # CREATE A DUMMY LIBRARY WITH GCC (could be generated with other build system) client = TestClient() client.save({CONANFILE: conanfile, "mean.cpp": mylib, "mean.h": mylibh}) client.run("export lasote/stable") client.run("install Mean/0.1@lasote/stable --build") # Reuse the mean library using only the generator reuse_gcc_conanfile = ''' import platform from conans import ConanFile from conans.tools import environment_append class ConanReuseLib(ConanFile): requires = "Mean/0.1@lasote/stable" generators = "gcc" settings = "os", "compiler", "build_type", "arch" def build(self): self.run("c++ example.cpp @conanbuildinfo.gcc -o mean_exe ") self.run("./mean_exe" if platform.system() != "Windows" else "mean_exe") ''' client.save({CONANFILE: reuse_gcc_conanfile, "example.cpp": example}) client.run("install . --build missing") client.run("build .") self.assertIn("15", client.user_io.out) self.assertIn("Active var!!!", client.user_io.out) client.run("install . --build missing -o Mean:activate_define=False") client.run("build .") self.assertIn("15", client.user_io.out) self.assertNotIn("Active var!!!", client.user_io.out) if platform.system() != "Windows": # MinGW 32 bits apps not running correctly client.run("install . --build missing -o Mean:activate_define=False -s arch=x86") client.run("build .") md5_binary = md5sum(os.path.join(client.current_folder, "mean_exe")) # Pass the optimize option that will append a cflag to -O2, the binary will be different client.run("install . --build missing -o Mean:activate_define=False -o Mean:optimize=True -s arch=x86") client.run("build .") md5_binary2 = md5sum(os.path.join(client.current_folder, "mean_exe")) self.assertNotEquals(md5_binary, md5_binary2) # Rebuid the same binary, same md5sum client.run("install . --build missing -o Mean:activate_define=False -o Mean:optimize=True -s arch=x86") client.run("build .") md5_binary = md5sum(os.path.join(client.current_folder, "mean_exe")) self.assertEquals(md5_binary, md5_binary2)
def test_md5_compress(self): """ The md5 of a tgz should be the same if the files inside are the same """ folder = temp_folder() save(os.path.join(folder, "one_file.txt"), b"The contents") save(os.path.join(folder, "Two_file.txt"), b"Two contents") files = { "one_file.txt": os.path.join(folder, "one_file.txt"), "Two_file.txt": os.path.join(folder, "Two_file.txt"), } compress_files(files, {}, PACKAGE_TGZ_NAME, dest_dir=folder) file_path = os.path.join(folder, PACKAGE_TGZ_NAME) md5_a = md5sum(file_path) if sys.version_info.major == 3 and sys.version_info.minor >= 9: # Python 3.9 changed the tar algorithm. Conan tgz will have different checksums # https://github.com/conan-io/conan/issues/8020 self.assertEqual(md5_a, "79255eaf79cbb743da7cdb8786f4730a") else: self.assertEqual(md5_a, "df220cfbc0652e8992a89a77666c03b5") time.sleep(1) # Timestamps change folder = temp_folder() compress_files(files, {}, PACKAGE_TGZ_NAME, dest_dir=folder) file_path = os.path.join(folder, PACKAGE_TGZ_NAME) md5_b = md5sum(file_path) self.assertEqual(md5_a, md5_b)
def _patch_pe(self, filename): patch_tool_location = "C:/ducible/ducible.exe" if os.path.isfile(patch_tool_location): self._output.info("Patching {} with md5sum: {}".format( filename, md5sum(filename))) self._conanfile.run("{} {}".format(patch_tool_location, filename)) self._output.info("Patched file: {} with md5sum: {}".format( filename, md5sum(filename)))
def test_get_recipe_snapshot(self): snap = self.service.get_recipe_snapshot(self.ref) base_path = self.server_store.export(self.ref) snap_expected = {'conanmanifest.txt': md5sum(os.path.join(base_path, "conanmanifest.txt")), 'conanfile.py': md5sum(os.path.join(base_path, "conanfile.py")), } self.assertEqual(snap, snap_expected)
def test_get_conanfile_snapshot(self): snap = self.service.get_conanfile_snapshot(self.conan_reference) base_path = self.paths.export(self.conan_reference) snap_expected = {'hello.cpp': md5sum(os.path.join(base_path, "hello.cpp")), 'conanmanifest.txt': md5sum(os.path.join(base_path, "conanmanifest.txt")), 'main.cpp': md5sum(os.path.join(base_path, "main.cpp")), 'CMakeLists.txt': md5sum(os.path.join(base_path, "CMakeLists.txt")), 'hellotest.h': md5sum(os.path.join(base_path, "hellotest.h"))} self.assertEquals(snap, snap_expected)
def update_test(self, mode): self._create_code(mode) self.client.run("export . lasote/testing") self.client.run("install Hello/0.1@lasote/testing --build=missing") self.client.run("upload Hello/0.1@lasote/testing --all") self.client.run('remove Hello/0.1@lasote/testing -f') self.client.run("install Hello/0.1@lasote/testing") # upload to remote again, the folder remains as installed self.client.run("install Hello/0.1@lasote/testing --update") self.assertIn("Hello/0.1@lasote/testing: Already installed!", self.client.user_io.out) self._check_export_installed_folder(mode) server_path = self.server.server_store.export(self.reference) save(os.path.join(server_path, "license.txt"), "mylicense") manifest = FileTreeManifest.load(server_path) manifest.time += 1 manifest.file_sums["license.txt"] = md5sum( os.path.join(server_path, "license.txt")) manifest.save(server_path) self.client.run("install Hello/0.1@lasote/testing --update") self._check_export_installed_folder(mode, updated=True)
def create(cls, folder): """ Walks a folder and create a TreeDigest for it, reading file contents from disk, and capturing current time """ file_dict = {} for root, _, files in os.walk(folder): relative_path = os.path.relpath(root, folder) for f in files: abs_path = os.path.join(root, f) rel_path = os.path.normpath(os.path.join(relative_path, f)) rel_path = rel_path.replace("\\", "/") file_dict[rel_path] = md5sum(abs_path) date = calendar.timegm(time.gmtime()) from conans.paths import CONAN_MANIFEST, CONANFILE file_dict.pop(PACKAGE_TGZ_NAME, None) # Exclude the PACKAGE_TGZ_NAME file_dict.pop(EXPORT_TGZ_NAME, None) # Exclude the EXPORT_TGZ_NAME file_dict.pop(CONAN_MANIFEST, None) # Exclude the MANIFEST itself file_dict.pop(CONANFILE + "c", None) # Exclude the CONANFILE.pyc file_dict.pop(".DS_Store", None) # Exclude tmp in mac file_dict = {key: value for key, value in file_dict.items() if not key.startswith("__pycache__")} return cls(date, file_dict)
def upload_recipe(self, conan_reference, the_files, retry, retry_wait, ignore_deleted_file): """ the_files: dict with relative_path: content """ self.check_credentials() # Get the remote snapshot remote_snapshot = self._get_conan_snapshot(conan_reference) local_snapshot = {filename: md5sum(abs_path) for filename, abs_path in the_files.items()} # Get the diff new, modified, deleted = diff_snapshots(local_snapshot, remote_snapshot) if ignore_deleted_file and ignore_deleted_file in deleted: deleted.remove(ignore_deleted_file) files_to_upload = {filename.replace("\\", "/"): the_files[filename] for filename in new + modified} if files_to_upload: # Get the upload urls url = "%s/conans/%s/upload_urls" % (self._remote_api_url, "/".join(conan_reference)) filesizes = {filename.replace("\\", "/"): os.stat(abs_path).st_size for filename, abs_path in files_to_upload.items()} urls = self._get_json(url, data=filesizes) self.upload_files(urls, files_to_upload, self._output, retry, retry_wait) if deleted: self._remove_conanfile_files(conan_reference, deleted)
def create(cls, folder): """ Walks a folder and create a FileTreeManifest for it, reading file contents from disk, and capturing current time """ filterfiles = (PACKAGE_TGZ_NAME, EXPORT_TGZ_NAME, CONAN_MANIFEST, CONANFILE + "c", ".DS_Store") file_dict = {} for root, dirs, files in os.walk(folder): dirs[:] = [d for d in dirs if d != "__pycache__"] # Avoid recursing pycache relative_path = os.path.relpath(root, folder) files = [ f for f in files if f not in filterfiles and not discarded_file(f) ] # Avoid md5 of big TGZ files for f in files: abs_path = os.path.join(root, f) rel_path = os.path.normpath(os.path.join(relative_path, f)) rel_path = rel_path.replace("\\", "/") if os.path.exists(abs_path): file_dict[rel_path] = md5sum(abs_path) else: raise ConanException( "The file is a broken symlink, verify that " "you are packaging the needed destination files: '%s'" % abs_path) date = calendar.timegm(time.gmtime()) return cls(date, file_dict)
def _file_document(name, path): return { "name": name, "path": path, "md5": md5sum(path), "sha1": sha1sum(path) }
def create(cls, folder): """ Walks a folder and create a TreeDigest for it, reading file contents from disk, and capturing current time """ file_dict = {} for root, _, files in os.walk(folder): relative_path = os.path.relpath(root, folder) for f in files: abs_path = os.path.join(root, f) rel_path = os.path.normpath(os.path.join(relative_path, f)) rel_path = rel_path.replace("\\", "/") file_dict[rel_path] = md5sum(abs_path) date = calendar.timegm(time.gmtime()) from conans.paths import CONAN_MANIFEST, CONANFILE file_dict.pop(PACKAGE_TGZ_NAME, None) # Exclude the PACKAGE_TGZ_NAME file_dict.pop(EXPORT_TGZ_NAME, None) # Exclude the EXPORT_TGZ_NAME file_dict.pop(CONAN_MANIFEST, None) # Exclude the MANIFEST itself file_dict.pop(CONANFILE + "c", None) # Exclude the CONANFILE.pyc file_dict.pop(".DS_Store", None) # Exclude tmp in mac file_dict = { key: value for key, value in file_dict.items() if not key.startswith("__pycache__") } return cls(date, file_dict)
def upload_package(self, package_reference, the_files, retry, retry_wait): """ basedir: Base directory with the files to upload (for read the files in disk) relative_files: relative paths to upload """ self.check_credentials() t1 = time.time() # Get the remote snapshot remote_snapshot = self._get_package_snapshot(package_reference) local_snapshot = {filename: md5sum(abs_path) for filename, abs_path in the_files.items()} # Get the diff new, modified, deleted = diff_snapshots(local_snapshot, remote_snapshot) files_to_upload = {filename: the_files[filename] for filename in new + modified} if files_to_upload: # Obtain upload urls url = "%s/conans/%s/packages/%s/upload_urls" % (self._remote_api_url, "/".join(package_reference.conan), package_reference.package_id) filesizes = {filename: os.stat(abs_path).st_size for filename, abs_path in files_to_upload.items()} self._output.rewrite_line("Requesting upload permissions...") urls = self._get_json(url, data=filesizes) self._output.rewrite_line("Requesting upload permissions...Done!") self._output.writeln("") self.upload_files(urls, files_to_upload, self._output, retry, retry_wait) else: self._output.rewrite_line("Package is up to date.") self._output.writeln("") if deleted: self._remove_package_files(package_reference, deleted) logger.debug("====> Time rest client upload_package: %f" % (time.time() - t1))
def test_conan_data_as_source_newtools(self): tgz_path = tgz_with_contents({"foo.txt": "foo"}) if sys.version_info.major == 3 and sys.version_info.minor >= 9: # Python 3.9 changed the tar algorithm. Conan tgz will have different checksums # https://github.com/conan-io/conan/issues/8020 md5_value = "7ebdc5ed79b7b72f3a6010da3671ae05" sha1_value = "862c1b58de1dfadaad3206b453b4de731c1751af" sha256_value = "25200fc2bd7f430358cd7a7c5ce4a84396e8ec68a1e9d8880994b1236f214972" else: md5_value = "2ef49b5a102db1abb775eaf1922d5662" sha1_value = "18dbea2d9a97bb9e9948604a41976bba5b5940bf" sha256_value = "9619013c1f7b83cca4bf3f336f8b4525a23d5463e0768599fe5339e02dd0a338" self.assertEqual(md5_value, md5sum(tgz_path)) self.assertEqual(sha1_value, sha1sum(tgz_path)) self.assertEqual(sha256_value, sha256sum(tgz_path)) # Instance stoppable thread server and add endpoints thread = StoppableThreadBottle() @thread.server.get("/myfile.tar.gz") def get_file(): return static_file(os.path.basename(tgz_path), root=os.path.dirname(tgz_path), mimetype="") thread.run_server() client = TestClient() conanfile = textwrap.dedent(""" from conans import ConanFile from conan.tools.files import get class Lib(ConanFile): def source(self): data = self.conan_data["sources"]["all"] get(self, **data) self.output.info("OK!") """) conandata = textwrap.dedent(""" sources: all: url: "http://*****:*****@user/testing") client.run("create . {}".format(ref)) self.assertIn("OK!", client.out) source_folder = client.cache.package_layout(ref).source() downloaded_file = os.path.join(source_folder, "foo.txt") self.assertEqual("foo", load(downloaded_file))
def deploy_manifest_content(self, copied_files): date = timestamp_now() file_dict = {} for f in copied_files: abs_path = os.path.join(self.output_path, f) file_dict[f] = md5sum(abs_path) manifest = FileTreeManifest(date, file_dict) return repr(manifest)
def deploy_manifest_content(self, copied_files): date = calendar.timegm(time.gmtime()) file_dict = {} for f in copied_files: abs_path = os.path.join(self.output_path, f) file_dict[f] = md5sum(abs_path) manifest = FileTreeManifest(date, file_dict) return repr(manifest)
def calc_files_checksum(files): return { file_name: { "md5": md5sum(path), "sha1": sha1sum(path) } for file_name, path in files.items() }
def test_get_conanfile_snapshot(self): snap = self.service.get_conanfile_snapshot(self.conan_reference) base_path = self.paths.export(self.conan_reference) snap_expected = { 'hello.cpp': md5sum(os.path.join(base_path, "hello.cpp")), 'conanmanifest.txt': md5sum(os.path.join(base_path, "conanmanifest.txt")), 'main.cpp': md5sum(os.path.join(base_path, "main.cpp")), 'CMakeLists.txt': md5sum(os.path.join(base_path, "CMakeLists.txt")), 'hellotest.h': md5sum(os.path.join(base_path, "hellotest.h")) } self.assertEquals(snap, snap_expected)
def get_snapshot(self, absolute_path="", files_subset=None): """returns a dict with the filepaths and md5""" if not path_exists(absolute_path, self._store_folder): raise NotFoundException("") paths = relative_dirs(absolute_path) if files_subset is not None: paths = set(paths).intersection(set(files_subset)) abs_paths = [os.path.join(absolute_path, relpath) for relpath in paths] return {filepath: md5sum(filepath) for filepath in abs_paths}
def _report_save_manifest(copied_files, output, dest_folder, manifest_name): report_copied_files(copied_files, output) if copied_files: date = calendar.timegm(time.gmtime()) file_dict = {} for f in copied_files: abs_path = os.path.join(dest_folder, f) file_dict[f] = md5sum(abs_path) manifest = FileTreeManifest(date, file_dict) manifest.save(dest_folder, manifest_name)
def testMd5Name(self): files = {"one_file.txt": "The contents", "Two_file.txt": "Two contents"} new_files = compress_package_files(files) folder = temp_folder() file_path = os.path.join(folder, PACKAGE_TGZ_NAME) save(file_path, new_files[PACKAGE_TGZ_NAME]) md5_a = md5sum(file_path) time.sleep(1) # Timestamps change new_files = compress_package_files(files) folder = temp_folder() file_path = os.path.join(folder, PACKAGE_TGZ_NAME) save(file_path, new_files[PACKAGE_TGZ_NAME]) md5_b = md5sum(file_path) self.assertEquals(md5_a, md5_b)
def get_package_snapshot(self, pref): try: # Get the digest and calculate md5 of package files npm_package_files = self._download_package_npm(pref) snapshot = {} for src_filename, src_filepath in npm_package_files.items(): snapshot[src_filename] = md5sum(src_filepath) except NotFoundException: snapshot = [] return snapshot
def upload_package(self, package_reference, the_files, retry, retry_wait, no_overwrite): """ basedir: Base directory with the files to upload (for read the files in disk) relative_files: relative paths to upload """ self.check_credentials() t1 = time.time() # Get the remote snapshot remote_snapshot = self._get_package_snapshot(package_reference) local_snapshot = { filename: md5sum(abs_path) for filename, abs_path in the_files.items() } # Get the diff new, modified, deleted = diff_snapshots(local_snapshot, remote_snapshot) if not new and not deleted and modified in (["conanmanifest.txt"], []): return False if no_overwrite and remote_snapshot: if no_overwrite == "all": raise ConanException( "Local package is different from the remote package. " "Forbidden overwrite") files_to_upload = { filename: the_files[filename] for filename in new + modified } if files_to_upload: # Obtain upload urls url = "%s/conans/%s/packages/%s/upload_urls" % ( self.remote_api_url, "/".join( package_reference.conan), package_reference.package_id) filesizes = { filename: os.stat(abs_path).st_size for filename, abs_path in files_to_upload.items() } self._output.rewrite_line("Requesting upload permissions...") urls = self._get_file_to_url_dict(url, data=filesizes) self._output.rewrite_line("Requesting upload permissions...Done!") self._output.writeln("") self._upload_files(urls, files_to_upload, self._output, retry, retry_wait) if deleted: raise Exception( "This shouldn't be happening, deleted files " "in local package present in remote: %s.\n Please, report it at " "https://github.com/conan-io/conan/issues " % str(deleted)) logger.debug("====> Time rest client upload_package: %f" % (time.time() - t1)) return files_to_upload or deleted
def create(cls, folder, exports_sources_folder=None): """ Walks a folder and create a FileTreeManifest for it, reading file contents from disk, and capturing current time """ files, _ = gather_files(folder) for f in (PACKAGE_TGZ_NAME, EXPORT_TGZ_NAME, CONAN_MANIFEST, EXPORT_SOURCES_TGZ_NAME): files.pop(f, None) file_dict = {} for name, filepath in files.items(): file_dict[name] = md5sum(filepath) if exports_sources_folder: export_files, _ = gather_files(exports_sources_folder) for name, filepath in export_files.items(): file_dict["export_source/%s" % name] = md5sum(filepath) date = calendar.timegm(time.gmtime()) return cls(date, file_dict)
def testMd5Name(self): files = { "one_file.txt": "The contents", "Two_file.txt": "Two contents" } new_files = compress_package_files(files) folder = temp_folder() file_path = os.path.join(folder, PACKAGE_TGZ_NAME) save(file_path, new_files[PACKAGE_TGZ_NAME]) md5_a = md5sum(file_path) time.sleep(1) # Timestamps change new_files = compress_package_files(files) folder = temp_folder() file_path = os.path.join(folder, PACKAGE_TGZ_NAME) save(file_path, new_files[PACKAGE_TGZ_NAME]) md5_b = md5sum(file_path) self.assertEquals(md5_a, md5_b)
def conan_data_as_source_test(self): tgz_path = tgz_with_contents({"foo.txt": "foo"}) md5_value = "2ef49b5a102db1abb775eaf1922d5662" sha1_value = "18dbea2d9a97bb9e9948604a41976bba5b5940bf" sha256_value = "9619013c1f7b83cca4bf3f336f8b4525a23d5463e0768599fe5339e02dd0a338" self.assertEqual(md5_value, md5sum(tgz_path)) self.assertEqual(sha1_value, sha1sum(tgz_path)) self.assertEqual(sha256_value, sha256sum(tgz_path)) # Instance stoppable thread server and add endpoints thread = StoppableThreadBottle() @thread.server.get("/myfile.tar.gz") def get_file(): return static_file(os.path.basename(tgz_path), root=os.path.dirname(tgz_path), mimetype="") thread.run_server() client = TestClient() conanfile = textwrap.dedent(""" from conans import ConanFile, tools class Lib(ConanFile): def source(self): data = self.conan_data["sources"]["all"] tools.get(**data) self.output.info("OK!") """) conandata = textwrap.dedent(""" sources: all: url: "http://*****:*****@user/testing") client.run("create . {}".format(ref)) self.assertIn("OK!", client.out) source_folder = client.cache.package_layout(ref).source() downloaded_file = os.path.join(source_folder, "foo.txt") self.assertEqual("foo", load(downloaded_file))
def _patch_lib(self, filename): self._output.info("Patching {} with md5sum: {}".format( filename, md5sum(filename))) pos = 0 with open(filename, 'r+b') as f: header_start = 8 timestamp_offset = 16 timestamp_size = 12 pos = header_start + timestamp_offset f.seek(header_start + timestamp_offset) bytes = f.read(timestamp_size) pos = pos + timestamp_size pattern = bytes regex = re.compile(pattern) f.seek(0) data = f.read() for match_obj in regex.finditer(data): offset = match_obj.start() f.seek(offset) f.write(b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00") self._output.info( "patching timestamp at pos: {}".format(offset)) timestamp_str = bytes.decode("utf-8") timestamp_int = int(timestamp_str) timestamp_bytes = struct.pack("<I", timestamp_int) regex = re.compile(timestamp_bytes) f.seek(0) data = f.read() for match_obj in regex.finditer(data): offset = match_obj.start() f.seek(offset) f.write(b"\x00\x00\x00\x00") self._output.info( "patching timestamp at pos: {}".format(offset)) self._output.info("Patched file: {} with md5sum: {}".format( filename, md5sum(filename)))
def create(cls, folder): """ Walks a folder and create a TreeDigest for it, reading file contents from disk, and capturing current time """ file_dict = {} for root, _, files in os.walk(folder): relative_path = os.path.relpath(root, folder) for f in files: abs_path = os.path.join(root, f) rel_path = os.path.normpath(os.path.join(relative_path, f)) rel_path = rel_path.replace("\\", "/") file_dict[rel_path] = md5sum(abs_path) date = calendar.timegm(time.gmtime()) return cls(date, file_dict)
def upload_recipe(self, conan_reference, the_files, retry, retry_wait, ignore_deleted_file, no_overwrite): """ the_files: dict with relative_path: content """ self.check_credentials() # Get the remote snapshot remote_snapshot = self._get_conan_snapshot(conan_reference) local_snapshot = { filename: md5sum(abs_path) for filename, abs_path in the_files.items() } # Get the diff new, modified, deleted = diff_snapshots(local_snapshot, remote_snapshot) if ignore_deleted_file and ignore_deleted_file in deleted: deleted.remove(ignore_deleted_file) if not new and not deleted and modified in (["conanmanifest.txt"], []): return False if no_overwrite and remote_snapshot: if no_overwrite in ("all", "recipe"): raise ConanException( "Local recipe is different from the remote recipe. " "Forbidden overwrite") files_to_upload = { filename.replace("\\", "/"): the_files[filename] for filename in new + modified } if files_to_upload: # Get the upload urls url = "%s/conans/%s/upload_urls" % (self._remote_api_url, "/".join(conan_reference)) filesizes = { filename.replace("\\", "/"): os.stat(abs_path).st_size for filename, abs_path in files_to_upload.items() } urls = self._get_file_to_url_dict(url, data=filesizes) self.upload_files(urls, files_to_upload, self._output, retry, retry_wait) if deleted: self._remove_conanfile_files(conan_reference, deleted) return files_to_upload or deleted
def run_imports(conanfile, current_path, output): file_importer = FileImporter(conanfile, current_path) conanfile.copy = file_importer conanfile.imports() copied_files = file_importer.execute() import_output = ScopedOutput("%s imports()" % output.scope, output) report_copied_files(copied_files, import_output) if copied_files: date = calendar.timegm(time.gmtime()) file_dict = {} for f in copied_files: abs_path = os.path.join(current_path, f) file_dict[f] = md5sum(abs_path) manifest = FileTreeManifest(date, file_dict) save(os.path.join(current_path, IMPORTS_MANIFESTS), str(manifest)) return copied_files
def test_get_recipe_snapshot(self): snap = self.service.get_recipe_snapshot(self.ref) base_path = self.server_store.export(self.ref) snap_expected = {'hello.cpp': md5sum(os.path.join(base_path, "hello.cpp")), 'conanmanifest.txt': md5sum(os.path.join(base_path, "conanmanifest.txt")), 'executable': md5sum(os.path.join(base_path, "executable")), 'main.cpp': md5sum(os.path.join(base_path, "main.cpp")), 'CMakeLists.txt': md5sum(os.path.join(base_path, "CMakeLists.txt")), 'hellotest.h': md5sum(os.path.join(base_path, "hellotest.h"))} self.assertEquals(snap, snap_expected)
def run_imports(conanfile, dest_folder, output): file_importer = _FileImporter(conanfile, dest_folder) conanfile.copy = file_importer conanfile.imports_folder = dest_folder with environment_append(conanfile.env): conanfile.imports() copied_files = file_importer.copied_files import_output = ScopedOutput("%s imports()" % output.scope, output) report_copied_files(copied_files, import_output) if copied_files: date = calendar.timegm(time.gmtime()) file_dict = {} for f in copied_files: abs_path = os.path.join(dest_folder, f) file_dict[f] = md5sum(abs_path) manifest = FileTreeManifest(date, file_dict) save(os.path.join(dest_folder, IMPORTS_MANIFESTS), str(manifest)) return copied_files
def upload_package(self, package_reference, the_files, retry, retry_wait, no_overwrite): """ basedir: Base directory with the files to upload (for read the files in disk) relative_files: relative paths to upload """ self.check_credentials() t1 = time.time() # Get the remote snapshot remote_snapshot = self._get_package_snapshot(package_reference) local_snapshot = {filename: md5sum(abs_path) for filename, abs_path in the_files.items()} # Get the diff new, modified, deleted = diff_snapshots(local_snapshot, remote_snapshot) if not new and not deleted and modified in (["conanmanifest.txt"], []): return False if no_overwrite and remote_snapshot: if no_overwrite in ("all"): raise ConanException("Local package is different from the remote package. " "Forbbiden overwrite") files_to_upload = {filename: the_files[filename] for filename in new + modified} if files_to_upload: # Obtain upload urls url = "%s/conans/%s/packages/%s/upload_urls" % (self._remote_api_url, "/".join(package_reference.conan), package_reference.package_id) filesizes = {filename: os.stat(abs_path).st_size for filename, abs_path in files_to_upload.items()} self._output.rewrite_line("Requesting upload permissions...") urls = self._get_file_to_url_dict(url, data=filesizes) self._output.rewrite_line("Requesting upload permissions...Done!") self._output.writeln("") self.upload_files(urls, files_to_upload, self._output, retry, retry_wait) if deleted: self._remove_package_files(package_reference, deleted) logger.debug("====> Time rest client upload_package: %f" % (time.time() - t1)) return files_to_upload or deleted
def update_test(self, mode): self._create_code(mode) self.client.run("export . lasote/testing") self.client.run("install Hello/0.1@lasote/testing --build=missing") self.client.run("upload Hello/0.1@lasote/testing --all") self.client.run('remove Hello/0.1@lasote/testing -f') self.client.run("install Hello/0.1@lasote/testing") # upload to remote again, the folder remains as installed self.client.run("install Hello/0.1@lasote/testing --update") self.assertIn("Hello/0.1@lasote/testing: Already installed!", self.client.user_io.out) self._check_export_installed_folder(mode) server_path = self.server.paths.export(self.reference) save(os.path.join(server_path, "license.txt"), "mylicense") manifest = FileTreeManifest.load(server_path) manifest.time += 1 manifest.file_sums["license.txt"] = md5sum(os.path.join(server_path, "license.txt")) manifest.save(server_path) self.client.run("install Hello/0.1@lasote/testing --update") self._check_export_installed_folder(mode, updated=True)
def upload_recipe(self, conan_reference, the_files, retry, retry_wait, ignore_deleted_file, no_overwrite): """ the_files: dict with relative_path: content """ self.check_credentials() # Get the remote snapshot remote_snapshot = self._get_conan_snapshot(conan_reference) local_snapshot = {filename: md5sum(abs_path) for filename, abs_path in the_files.items()} # Get the diff new, modified, deleted = diff_snapshots(local_snapshot, remote_snapshot) if ignore_deleted_file and ignore_deleted_file in deleted: deleted.remove(ignore_deleted_file) if not new and not deleted and modified in (["conanmanifest.txt"], []): return False if no_overwrite and remote_snapshot: if no_overwrite in ("all", "recipe"): raise ConanException("Local recipe is different from the remote recipe. " "Forbbiden overwrite") files_to_upload = {filename.replace("\\", "/"): the_files[filename] for filename in new + modified} if files_to_upload: # Get the upload urls url = "%s/conans/%s/upload_urls" % (self._remote_api_url, "/".join(conan_reference)) filesizes = {filename.replace("\\", "/"): os.stat(abs_path).st_size for filename, abs_path in files_to_upload.items()} urls = self._get_file_to_url_dict(url, data=filesizes) self.upload_files(urls, files_to_upload, self._output, retry, retry_wait) if deleted: self._remove_conanfile_files(conan_reference, deleted) return files_to_upload or deleted
def create(cls, folder): """ Walks a folder and create a FileTreeManifest for it, reading file contents from disk, and capturing current time """ filterfiles = (PACKAGE_TGZ_NAME, EXPORT_TGZ_NAME, CONAN_MANIFEST, CONANFILE + "c", ".DS_Store") file_dict = {} for root, dirs, files in os.walk(folder): dirs[:] = [d for d in dirs if d != "__pycache__"] # Avoid recursing pycache relative_path = os.path.relpath(root, folder) files = [f for f in files if f not in filterfiles] # Avoid md5 of big TGZ files for f in files: abs_path = os.path.join(root, f) rel_path = os.path.normpath(os.path.join(relative_path, f)) rel_path = rel_path.replace("\\", "/") if os.path.exists(abs_path): file_dict[rel_path] = md5sum(abs_path) else: raise ConanException("The file is a broken symlink, verify that " "you are packaging the needed destination files: '%s'" % abs_path) date = calendar.timegm(time.gmtime()) return cls(date, file_dict)
def _file_document(name, path): return {"name": name, "path": path, "md5": md5sum(path), "sha1": sha1sum(path)}