def test_run(pioproject_dir): with fs.cd(pioproject_dir): config = ProjectConfig() build_dir = config.get_optional_dir("build") if os.path.isdir(build_dir): fs.rmtree(build_dir) env_names = config.envs() result = proc.exec_command( ["platformio", "run", "-e", random.choice(env_names)] ) if result["returncode"] != 0: pytest.fail(str(result)) assert os.path.isdir(build_dir) # check .elf file for item in os.listdir(build_dir): if not os.path.isdir(item): continue assert os.path.isfile(os.path.join(build_dir, item, "firmware.elf")) # check .hex or .bin files firmwares = [] for ext in ("bin", "hex"): firmwares += glob(os.path.join(build_dir, item, "firmware*.%s" % ext)) if not firmwares: pytest.fail("Missed firmware file") for firmware in firmwares: assert os.path.getsize(firmware) > 0
def install_from_url(self, url, spec, checksum=None, silent=False): spec = self.ensure_spec(spec) tmp_dir = tempfile.mkdtemp(prefix="pkg-installing-", dir=self.get_tmp_dir()) vcs = None try: if url.startswith("file://"): _url = url[7:] if os.path.isfile(_url): self.unpack(_url, tmp_dir) else: fs.rmtree(tmp_dir) shutil.copytree(_url, tmp_dir, symlinks=True) elif url.startswith(("http://", "https://")): dl_path = self.download(url, checksum, silent=silent) assert os.path.isfile(dl_path) self.unpack(dl_path, tmp_dir) else: vcs = VCSClientFactory.new(tmp_dir, url) assert vcs.export() root_dir = self.find_pkg_root(tmp_dir, spec) pkg_item = PackageItem( root_dir, self.build_metadata( root_dir, spec, vcs.get_current_revision() if vcs else None), ) pkg_item.dump_meta() return self._install_tmp_pkg(pkg_item) finally: if os.path.isdir(tmp_dir): fs.rmtree(tmp_dir)
def device_monitor(ctx, **kwargs): project_options = {} try: with fs.cd(kwargs["project_dir"]): project_options = device_helpers.get_project_options(kwargs["environment"]) kwargs = device_helpers.apply_project_monitor_options(kwargs, project_options) except NotPlatformIOProjectError: pass kwargs["baud"] = kwargs["baud"] or 9600 def _tx_target(sock_dir): pioplus_argv = ["remote", "device", "monitor"] pioplus_argv.extend(device_helpers.options_to_argv(kwargs, project_options)) pioplus_argv.extend(["--sock", sock_dir]) try: pioplus_call(pioplus_argv) except exception.ReturnErrorCode: pass sock_dir = mkdtemp(suffix="pioplus") sock_file = os.path.join(sock_dir, "sock") try: t = threading.Thread(target=_tx_target, args=(sock_dir,)) t.start() while t.is_alive() and not os.path.isfile(sock_file): sleep(0.1) if not t.is_alive(): return with open(sock_file) as fp: kwargs["port"] = fp.read() ctx.invoke(cmd_device_monitor, **kwargs) t.join(2) finally: fs.rmtree(sock_dir)
def get_cmake_code_model(): if not is_proper_zephyr_project(): create_default_project_files() if is_cmake_reconfigure_required(): # Explicitly clean build folder to avoid cached values if os.path.isdir(CMAKE_API_DIR): fs.rmtree(BUILD_DIR) query_file = os.path.join(CMAKE_API_QUERY_DIR, "codemodel-v2") if not os.path.isfile(query_file): os.makedirs(os.path.dirname(query_file)) open(query_file, "a").close() # create an empty file run_cmake() if not os.path.isdir(CMAKE_API_REPLY_DIR) or not os.listdir( CMAKE_API_REPLY_DIR): sys.stderr.write("Error: Couldn't find CMake API response file\n") env.Exit(1) codemodel = {} for target in os.listdir(CMAKE_API_REPLY_DIR): if target.startswith("codemodel-v2"): with open(os.path.join(CMAKE_API_REPLY_DIR, target), "r") as fp: codemodel = json.load(fp) assert codemodel["version"]["major"] == 2 return codemodel
def build_contrib_pysite_deps(target_dir): if os.path.isdir(target_dir): fs.rmtree(target_dir) os.makedirs(target_dir) with open(os.path.join(target_dir, "package.json"), "w") as fp: json.dump( dict( name="contrib-pysite", version="2.%d%d.0" % (sys.version_info.major, sys.version_info.minor), system=util.get_systype(), ), fp, ) pythonexe = get_pythonexe_path() for dep in get_contrib_pysite_deps(): subprocess.check_call([ pythonexe, "-m", "pip", "install", # "--no-cache-dir", "--no-compile", "-t", target_dir, dep, ]) return True
def processEnded(self, reason): # pylint: disable=unused-argument self._unlock_session() if self._gdbsrc_dir and isdir(self._gdbsrc_dir): fs.rmtree(self._gdbsrc_dir) if self._debug_server: self._debug_server.terminate() reactor.stop()
def _finalize_arduino_import(_, project_dir, arduino_project_dir): with fs.cd(project_dir): config = ProjectConfig() src_dir = config.get_optional_dir("src") if isdir(src_dir): fs.rmtree(src_dir) shutil.copytree(arduino_project_dir, src_dir) return project_dir
def build_autotiler(build_dir, generator, model_path): if isdir(build_dir): fs.rmtree(build_dir) # parse custom library path from `platformio.ini` tmpenv = env.Clone() tmpenv.ProcessFlags(env.get("BUILD_FLAGS")) genv = env.Environment( tools=["ar", "gas", "gcc", "g++", "gnulink"], CC="gcc", CPPPATH=[ join(AUTOTILER_DIR, "include"), join(AUTOTILER_DIR, "generators", generator, "generator", "include") ], LIBPATH=[ join(AUTOTILER_DIR, "lib"), env.GetProjectConfig().get("platformio", "lib_dir") ] + tmpenv.get("LIBPATH", []), LIBS=["tile"]) # CHECK "libtile.a" found_libtile = False for d in genv['LIBPATH']: if isfile(genv.subst(join(d, "libtile.a"))): found_libtile = True break if not found_libtile: sys.stderr.write( "Error: AutoTiler library has not been found. Please read => " "https://docs.platformio.org/page/platforms/riscv_gap.html" "#autotiler\n") env.Exit(1) variant_dirs = [(join(build_dir, "model"), dirname(model_path)), (join(build_dir, "generator"), join(AUTOTILER_DIR, "generators", generator, "generator", "src"))] for (var_dir, src_dir) in variant_dirs: if not isdir(genv.subst(var_dir)): makedirs(genv.subst(var_dir)) genv.VariantDir(var_dir, src_dir, duplicate=0) src_files = [join(build_dir, "model", basename(model_path))] src_files.extend(genv.Glob(join(build_dir, "generator", "*Generator?.c"))) for o in genv.Object(src_files): if not int(ARGUMENTS.get("PIOVERBOSE", 0)): genv.Replace(CCCOMSTR="Compiling %s" % relpath(str(o))) o.build() if not int(ARGUMENTS.get("PIOVERBOSE", 0)): genv.Replace(LINKCOMSTR="Linking AutoTiler") return genv.Program(join(build_dir, "program"), src_files)[0].build()
def _exclude_contents(dst_dir, patterns): contents = [] for p in patterns: contents += glob(join(glob_escape(dst_dir), p)) for path in contents: path = abspath(path) if isdir(path): fs.rmtree(path) elif isfile(path): remove(path)
def _exclude_contents(dst_dir, patterns): contents = [] for p in patterns: contents += compat.glob_recursive(join(compat.glob_escape(dst_dir), p)) for path in contents: path = realpath(path) if isdir(path): fs.rmtree(path) elif isfile(path): remove(path)
def cli( # pylint: disable=too-many-arguments, too-many-branches ctx, src, lib, exclude, board, build_dir, keep_build_dir, project_conf, project_option, verbose, ): if not src and getenv("PLATFORMIO_CI_SRC"): src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":")) if not src: raise click.BadParameter("Missing argument 'src'") try: app.set_session_var("force_option", True) if not keep_build_dir and isdir(build_dir): fs.rmtree(build_dir) if not isdir(build_dir): makedirs(build_dir) for dir_name, patterns in dict(lib=lib, src=src).items(): if not patterns: continue contents = [] for p in patterns: contents += glob(p) _copy_contents(join(build_dir, dir_name), contents) if project_conf and isfile(project_conf): _copy_project_conf(build_dir, project_conf) elif not board: raise CIBuildEnvsEmpty() if exclude: _exclude_contents(build_dir, exclude) # initialise project ctx.invoke( cmd_project_init, project_dir=build_dir, board=board, project_option=project_option, ) # process project ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose) finally: if not keep_build_dir: fs.rmtree(build_dir)
def patched_clean_build_dir(build_dir, *args): from platformio import fs from platformio.project.helpers import get_project_dir platformio_ini = join(get_project_dir(), "platformio.ini") # if project's config is modified if isdir(build_dir) and getmtime(platformio_ini) > getmtime(build_dir): fs.rmtree(build_dir) if not isdir(build_dir): makedirs(build_dir)
def PioClean(env, clean_dir): if not isdir(clean_dir): print("Build environment is clean") env.Exit(0) clean_rel_path = relpath(clean_dir) for root, _, files in walk(clean_dir): for f in files: dst = join(root, f) remove(dst) print("Removed %s" % (dst if clean_rel_path.startswith(".") else relpath(dst))) print("Done cleaning") fs.rmtree(clean_dir) env.Exit(0)
def clean_build_dir(build_dir, config): # remove legacy ".pioenvs" folder legacy_build_dir = join(get_project_dir(), ".pioenvs") if isdir(legacy_build_dir) and legacy_build_dir != build_dir: fs.rmtree(legacy_build_dir) checksum_file = join(build_dir, "project.checksum") checksum = compute_project_checksum(config) if isdir(build_dir): # check project structure if isfile(checksum_file) and fs.get_file_contents(checksum_file) == checksum: return fs.rmtree(build_dir) makedirs(build_dir) fs.write_file_contents(checksum_file, checksum)
def system_prune(force): click.secho("WARNING! This will remove:", fg="yellow") click.echo(" - cached API requests") click.echo(" - cached package downloads") click.echo(" - temporary data") if not force: click.confirm("Do you want to continue?", abort=True) reclaimed_total = 0 cache_dir = get_project_cache_dir() if os.path.isdir(cache_dir): reclaimed_total += fs.calculate_folder_size(cache_dir) fs.rmtree(cache_dir) click.secho("Total reclaimed space: %s" % fs.humanize_file_size(reclaimed_total), fg="green")
async def import_arduino(self, board, use_arduino_libs, arduino_project_dir): board = str(board) # don't import PIO Project if is_platformio_project(arduino_project_dir): return arduino_project_dir is_arduino_project = any([ os.path.isfile( os.path.join( arduino_project_dir, "%s.%s" % (os.path.basename(arduino_project_dir), ext), )) for ext in ("ino", "pde") ]) if not is_arduino_project: raise jsonrpc.exceptions.JSONRPCDispatchException( code=4000, message="Not an Arduino project: %s" % arduino_project_dir) state = AppRPC.load_state() project_dir = os.path.join(state["storage"]["projectsDir"], time.strftime("%y%m%d-%H%M%S-") + board) if not os.path.isdir(project_dir): os.makedirs(project_dir) args = ["init", "--board", board] args.extend(["--project-option", "framework = arduino"]) if use_arduino_libs: args.extend([ "--project-option", "lib_extra_dirs = ~/Documents/Arduino/libraries" ]) if (state["storage"]["coreCaller"] and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()): args.extend(["--ide", state["storage"]["coreCaller"]]) await PIOCoreRPC.call(args, options={ "cwd": project_dir, "force_subprocess": True }) with fs.cd(project_dir): config = ProjectConfig() src_dir = config.get_optional_dir("src") if os.path.isdir(src_dir): fs.rmtree(src_dir) shutil.copytree(arduino_project_dir, src_dir, symlinks=True) return project_dir
def prune_cached_data(force=False, dry_run=False, silent=False): reclaimed_space = 0 if not silent: click.secho("Prune cached data:", bold=True) click.echo(" - cached API requests") click.echo(" - cached package downloads") click.echo(" - temporary data") cache_dir = get_project_cache_dir() if os.path.isdir(cache_dir): reclaimed_space += fs.calculate_folder_size(cache_dir) if not dry_run: if not force: click.confirm("Do you want to continue?", abort=True) fs.rmtree(cache_dir) if not silent: click.secho("Space on disk: %s" % fs.humanize_file_size(reclaimed_space)) return reclaimed_space
def uninstall(self, package, requirements=None, after_update=False): # interprocess lock with LockFile(self.package_dir): self.cache_reset() if isdir(package) and self.get_package_by_dir(package): pkg_dir = package else: name, requirements, url = self.parse_pkg_uri( package, requirements) pkg_dir = self.get_package_dir(name, requirements, url) if not pkg_dir: raise exception.UnknownPackage("%s @ %s" % (package, requirements or "*")) manifest = self.load_manifest(pkg_dir) click.echo("Uninstalling %s @ %s: \t" % (click.style( manifest['name'], fg="cyan"), manifest['version']), nl=False) if islink(pkg_dir): os.unlink(pkg_dir) else: fs.rmtree(pkg_dir) self.cache_reset() # unfix package with the same name pkg_dir = self.get_package_dir(manifest['name']) if pkg_dir and "@" in pkg_dir: shutil.move( pkg_dir, join(self.package_dir, self.get_install_dirname(manifest))) self.cache_reset() click.echo("[%s]" % click.style("OK", fg="green")) if not after_update: telemetry.on_event(category=self.__class__.__name__, action="Uninstall", label=manifest['name']) return True
def build_contrib_pysite_deps(target_dir): if os.path.isdir(target_dir): fs.rmtree(target_dir) os.makedirs(target_dir) # build dependencies pythonexe = get_pythonexe_path() for dep in get_contrib_pysite_deps(): subprocess.check_call([ pythonexe, "-m", "pip", "install", # "--no-cache-dir", "--no-compile", "--no-binary", ":all:", "-t", target_dir, dep, ]) # build manifests with open(os.path.join(target_dir, "package.json"), "w") as fp: json.dump( dict( name="contrib-pysite", version="2.%d%d.%s" % ( sys.version_info.major, sys.version_info.minor, date.today().strftime("%y%m%d"), ), system=util.get_systype(), ), fp, ) pm = ToolPackageManager() pkg = PackageItem(target_dir) pkg.metadata = pm.build_metadata( target_dir, PackageSpec(owner="platformio", name="contrib-pysite")) pkg.dump_meta() return True
def _install_from_url(self, name, url, requirements=None, sha1=None, track=False): tmp_dir = mkdtemp("-package", self.TMP_FOLDER_PREFIX, self.package_dir) src_manifest_dir = None src_manifest = {"name": name, "url": url, "requirements": requirements} try: if url.startswith("file://"): _url = url[7:] if isfile(_url): self.unpack(_url, tmp_dir) else: fs.rmtree(tmp_dir) shutil.copytree(_url, tmp_dir) elif url.startswith(("http://", "https://")): dlpath = self.download(url, tmp_dir, sha1) assert isfile(dlpath) self.unpack(dlpath, tmp_dir) os.remove(dlpath) else: vcs = VCSClientFactory.newClient(tmp_dir, url) assert vcs.export() src_manifest_dir = vcs.storage_dir src_manifest["version"] = vcs.get_current_revision() _tmp_dir = tmp_dir if not src_manifest_dir: _tmp_dir = self.find_pkg_root(tmp_dir) src_manifest_dir = join(_tmp_dir, ".pio") # write source data to a special manifest if track: self._update_src_manifest(src_manifest, src_manifest_dir) return self._install_from_tmp_dir(_tmp_dir, requirements) finally: if isdir(tmp_dir): fs.rmtree(tmp_dir) return None
def _uninstall(self, spec, silent=False, skip_dependencies=False): pkg = self.get_package(spec) if not pkg or not pkg.metadata: raise UnknownPackageError(spec) if not silent: self.print_message( "Removing %s @ %s" % (click.style(pkg.metadata.name, fg="cyan"), pkg.metadata.version), ) # firstly, remove dependencies if not skip_dependencies: self.uninstall_dependencies(pkg, silent) if os.path.islink(pkg.path): os.unlink(pkg.path) else: fs.rmtree(pkg.path) self.memcache_reset() # unfix detached-package with the same name detached_pkg = self.get_package(PackageSpec(name=pkg.metadata.name)) if ( detached_pkg and "@" in detached_pkg.path and not os.path.isdir( os.path.join(self.package_dir, detached_pkg.get_safe_dirname()) ) ): shutil.move( detached_pkg.path, os.path.join(self.package_dir, detached_pkg.get_safe_dirname()), ) self.memcache_reset() if not silent: self.print_message( "{name} @ {version} has been removed!".format(**pkg.metadata.as_dict()), fg="green", ) return pkg
def delete(self, keys=None): """ Keys=None, delete expired items """ if not isfile(self._db_path): return None if not keys: keys = [] if not isinstance(keys, list): keys = [keys] paths_for_delete = [self.get_cache_path(k) for k in keys] found = False newlines = [] with open(self._db_path) as fp: for line in fp.readlines(): line = line.strip() if "=" not in line: continue expire, path = line.split("=") try: if ( time() < int(expire) and isfile(path) and path not in paths_for_delete ): newlines.append(line) continue except ValueError: pass found = True if isfile(path): try: remove(path) if not listdir(dirname(path)): fs.rmtree(dirname(path)) except OSError: pass if found and self._lock_dbindex(): with open(self._db_path, "w") as fp: fp.write("\n".join(newlines) + "\n") self._unlock_dbindex() return True
def device_monitor(ctx, agents, **kwargs): from platformio.commands.remote.client.device_monitor import DeviceMonitorClient if kwargs["sock"]: return DeviceMonitorClient(agents, **kwargs).connect() project_options = {} try: with fs.cd(kwargs["project_dir"]): project_options = device_helpers.get_project_options( kwargs["environment"]) kwargs = device_helpers.apply_project_monitor_options( kwargs, project_options) except NotPlatformIOProjectError: pass kwargs["baud"] = kwargs["baud"] or 9600 def _tx_target(sock_dir): subcmd_argv = ["remote", "device", "monitor"] subcmd_argv.extend( device_helpers.options_to_argv(kwargs, project_options)) subcmd_argv.extend(["--sock", sock_dir]) subprocess.call([proc.where_is_program("platformio")] + subcmd_argv) sock_dir = mkdtemp(suffix="pio") sock_file = os.path.join(sock_dir, "sock") try: t = threading.Thread(target=_tx_target, args=(sock_dir, )) t.start() while t.is_alive() and not os.path.isfile(sock_file): sleep(0.1) if not t.is_alive(): return with open(sock_file) as fp: kwargs["port"] = fp.read() ctx.invoke(cmd_device_monitor, **kwargs) t.join(2) finally: fs.rmtree(sock_dir) return True
def uninstall( self, package, requirements=None, after_update=False ): # pylint: disable=unused-argument # interprocess lock with LockFile(self.package_dir): self.cache_reset() if isdir(package) and self.get_package_by_dir(package): pkg_dir = package else: name, requirements, url = self.parse_pkg_uri(package, requirements) pkg_dir = self.get_package_dir(name, requirements, url) if not pkg_dir: raise exception.UnknownPackage( "%s @ %s" % (package, requirements or "*") ) manifest = self.load_manifest(pkg_dir) click.echo( "Uninstalling %s @ %s: \t" % (click.style(manifest["name"], fg="cyan"), manifest["version"]), nl=False, ) if islink(pkg_dir): os.unlink(pkg_dir) else: fs.rmtree(pkg_dir) self.cache_reset() # unfix package with the same name pkg_dir = self.get_package_dir(manifest["name"]) if pkg_dir and "@" in pkg_dir: shutil.move( pkg_dir, join(self.package_dir, self.get_install_dirname(manifest)) ) self.cache_reset() click.echo("[%s]" % click.style("OK", fg="green")) return True
def clean_build_dir(build_dir, config): # remove legacy ".pioenvs" folder legacy_build_dir = join(get_project_dir(), ".pioenvs") if isdir(legacy_build_dir) and legacy_build_dir != build_dir: fs.rmtree(legacy_build_dir) checksum_file = join(build_dir, "project.checksum") checksum = compute_project_checksum(config) if isdir(build_dir): # check project structure if isfile(checksum_file): with open(checksum_file) as f: if f.read() == checksum: return fs.rmtree(build_dir) makedirs(build_dir) with open(checksum_file, "w") as f: f.write(checksum)
def device_monitor(ctx, **kwargs): def _tx_target(sock_dir): try: pioplus_call(sys.argv[1:] + ["--sock", sock_dir]) except exception.ReturnErrorCode: pass sock_dir = mkdtemp(suffix="pioplus") sock_file = join(sock_dir, "sock") try: t = threading.Thread(target=_tx_target, args=(sock_dir, )) t.start() while t.is_alive() and not isfile(sock_file): sleep(0.1) if not t.is_alive(): return kwargs["port"] = fs.get_file_contents(sock_file) ctx.invoke(cmd_device_monitor, **kwargs) t.join(2) finally: fs.rmtree(sock_dir)
def PioClean(env, clean_dir): def _relpath(path): if compat.WINDOWS: prefix = os.getcwd()[:2].lower() if (":" not in prefix or not path.lower().startswith(prefix) or os.path.relpath(path).startswith("..")): return path return os.path.relpath(path) if not os.path.isdir(clean_dir): print("Build environment is clean") env.Exit(0) clean_rel_path = _relpath(clean_dir) for root, _, files in os.walk(clean_dir): for f in files: dst = os.path.join(root, f) os.remove(dst) print( "Removed %s" % (dst if not clean_rel_path.startswith(".") else _relpath(dst))) print("Done cleaning") fs.rmtree(clean_dir) env.Exit(0)
def _install_from_tmp_dir( # pylint: disable=too-many-branches self, tmp_dir, requirements=None): tmp_manifest = self.load_manifest(tmp_dir) assert set(["name", "version"]) <= set(tmp_manifest) pkg_dirname = self.get_install_dirname(tmp_manifest) pkg_dir = join(self.package_dir, pkg_dirname) cur_manifest = self.load_manifest(pkg_dir) tmp_semver = self.parse_semver_version(tmp_manifest["version"]) cur_semver = None if cur_manifest: cur_semver = self.parse_semver_version(cur_manifest["version"]) # package should satisfy requirements if requirements: mismatch_error = "Package version %s doesn't satisfy requirements %s" % ( tmp_manifest["version"], requirements, ) try: assert tmp_semver and tmp_semver in semantic_version.SimpleSpec( requirements), mismatch_error except (AssertionError, ValueError): assert tmp_manifest["version"] == requirements, mismatch_error # check if package already exists if cur_manifest: # 0-overwrite, 1-rename, 2-fix to a version action = 0 if "__src_url" in cur_manifest: if cur_manifest["__src_url"] != tmp_manifest.get("__src_url"): action = 1 elif "__src_url" in tmp_manifest: action = 2 else: if tmp_semver and (not cur_semver or tmp_semver > cur_semver): action = 1 elif tmp_semver and cur_semver and tmp_semver != cur_semver: action = 2 # rename if action == 1: target_dirname = "%s@%s" % (pkg_dirname, cur_manifest["version"]) if "__src_url" in cur_manifest: target_dirname = "%s@src-%s" % ( pkg_dirname, hashlib.md5( hashlib_encode_data( cur_manifest["__src_url"])).hexdigest(), ) shutil.move(pkg_dir, join(self.package_dir, target_dirname)) # fix to a version elif action == 2: target_dirname = "%s@%s" % (pkg_dirname, tmp_manifest["version"]) if "__src_url" in tmp_manifest: target_dirname = "%s@src-%s" % ( pkg_dirname, hashlib.md5( hashlib_encode_data( tmp_manifest["__src_url"])).hexdigest(), ) pkg_dir = join(self.package_dir, target_dirname) # remove previous/not-satisfied package if isdir(pkg_dir): fs.rmtree(pkg_dir) shutil.move(tmp_dir, pkg_dir) assert isdir(pkg_dir) self.cache_reset() return pkg_dir
def _cleanup_dir(path): if os.path.isdir(path): fs.rmtree(path)
def clean(self): if not self.cache_dir or not isdir(self.cache_dir): return fs.rmtree(self.cache_dir)