Esempio n. 1
0
def compute_project_checksum(config):
    # rebuild when PIO Core version changes
    checksum = sha1(hashlib_encode_data(__version__))

    # configuration file state
    checksum.update(hashlib_encode_data(config.to_json()))

    # project file structure
    check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
    for d in (get_project_include_dir(), get_project_src_dir(),
              get_project_lib_dir()):
        if not isdir(d):
            continue
        chunks = []
        for root, _, files in walk(d):
            for f in files:
                path = join(root, f)
                if path.endswith(check_suffixes):
                    chunks.append(path)
        if not chunks:
            continue
        chunks_to_str = ",".join(sorted(chunks))
        if WINDOWS:  # case insensitive OS
            chunks_to_str = chunks_to_str.lower()
        checksum.update(hashlib_encode_data(chunks_to_str))

    return checksum.hexdigest()
Esempio n. 2
0
def get_host_id():
    h = hashlib.sha1(hashlib_encode_data(get_cid()))
    try:
        username = getpass.getuser()
        h.update(hashlib_encode_data(username))
    except:  # pylint: disable=bare-except
        pass
    return h.hexdigest()
Esempio n. 3
0
def get_cid():
    # pylint: disable=import-outside-toplevel
    from platformio.clients.http import fetch_remote_content

    cid = get_state_item("cid")
    if cid:
        return cid
    uid = None
    if os.getenv("C9_UID"):
        uid = os.getenv("C9_UID")
    elif os.getenv("GITPOD_GIT_USER_NAME"):
        uid = os.getenv("GITPOD_GIT_USER_NAME")
    elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
        try:
            uid = json.loads(
                fetch_remote_content("{api}/user?token={token}".format(
                    api=os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")),
                    token=os.getenv("USER_TOKEN"),
                ))).get("id")
        except:  # pylint: disable=bare-except
            pass
    if not uid:
        uid = uuid.getnode()
    cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
    cid = str(cid)
    if WINDOWS or os.getuid() > 0:  # pylint: disable=no-member
        set_state_item("cid", cid)
    return cid
Esempio n. 4
0
 def compute_download_path(self, *args):
     request_hash = hashlib.new("sha1")
     for arg in args:
         request_hash.update(compat.hashlib_encode_data(arg))
     dl_path = os.path.join(self.get_download_dir(),
                            request_hash.hexdigest())
     return dl_path
Esempio n. 5
0
def get_cid():
    cid = get_state_item("cid")
    if cid:
        return cid
    uid = None
    if getenv("C9_UID"):
        uid = getenv("C9_UID")
    elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
        try:
            uid = (
                requests.get(
                    "{api}/user?token={token}".format(
                        api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
                        token=getenv("USER_TOKEN"),
                    )
                )
                .json()
                .get("id")
            )
        except:  # pylint: disable=bare-except
            pass
    if not uid:
        uid = uuid.getnode()
    cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
    cid = str(cid)
    if WINDOWS or os.getuid() > 0:  # pylint: disable=no-member
        set_state_item("cid", cid)
    return cid
Esempio n. 6
0
def _file_long_data(env, data):
    build_dir = env.subst("$BUILD_DIR")
    if not isdir(build_dir):
        makedirs(build_dir)
    tmp_file = join(build_dir,
                    "longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest())
    if isfile(tmp_file):
        return tmp_file
    fs.write_file_contents(tmp_file, data)
    return tmp_file
Esempio n. 7
0
def _file_long_data(env, data):
    build_dir = env.subst("$BUILD_DIR")
    if not isdir(build_dir):
        makedirs(build_dir)
    tmp_file = join(build_dir,
                    "longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest())
    if isfile(tmp_file):
        return tmp_file
    with open(tmp_file, "w") as fp:
        fp.write(data)
    return tmp_file
Esempio n. 8
0
def _file_long_data(env, data):
    tmp_file = os.path.join(
        "$BUILD_DIR", "longinc-%s" % hashlib.md5(hashlib_encode_data(data)).hexdigest()
    )
    build_dir = env.subst("$BUILD_DIR")
    if not os.path.isdir(build_dir):
        os.makedirs(build_dir)
    if os.path.isfile(env.subst(tmp_file)):
        return tmp_file
    with open(env.subst(tmp_file), "w") as fp:
        fp.write(data)
    return tmp_file
Esempio n. 9
0
    def spawn(self, gdb_path, prog_path):
        session_hash = gdb_path + prog_path
        self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
        self._kill_previous_session()

        patterns = {
            "PROJECT_DIR": self.project_dir,
            "PROG_PATH": prog_path,
            "PROG_DIR": dirname(prog_path),
            "PROG_NAME": basename(splitext(prog_path)[0]),
            "DEBUG_PORT": self.debug_options["port"],
            "UPLOAD_PROTOCOL": self.debug_options["upload_protocol"],
            "INIT_BREAK": self.debug_options["init_break"] or "",
            "LOAD_CMDS": "\n".join(self.debug_options["load_cmds"] or []),
        }

        yield self._debug_server.spawn(patterns)
        if not patterns["DEBUG_PORT"]:
            patterns["DEBUG_PORT"] = self._debug_server.get_debug_port()

        self.generate_pioinit(self._gdbsrc_dir, patterns)

        # start GDB client
        args = [
            "piogdb",
            "-q",
            "--directory",
            self._gdbsrc_dir,
            "--directory",
            self.project_dir,
            "-l",
            "10",
        ]
        args.extend(self.args)
        if not gdb_path:
            raise DebugInvalidOptionsError("GDB client is not configured")
        gdb_data_dir = self._get_data_dir(gdb_path)
        if gdb_data_dir:
            args.extend(["--data-directory", gdb_data_dir])
        args.append(patterns["PROG_PATH"])

        transport = reactor.spawnProcess(self,
                                         gdb_path,
                                         args,
                                         path=self.project_dir,
                                         env=os.environ)
        defer.returnValue(transport)
Esempio n. 10
0
def calculate_project_hash():
    check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
    chunks = [__version__]
    for d in (get_project_src_dir(), get_project_lib_dir()):
        if not isdir(d):
            continue
        for root, _, files in walk(d):
            for f in files:
                path = join(root, f)
                if path.endswith(check_suffixes):
                    chunks.append(path)
    chunks_to_str = ",".join(sorted(chunks))
    if WINDOWS:
        # Fix issue with useless project rebuilding for case insensitive FS.
        # A case of disk drive can differ...
        chunks_to_str = chunks_to_str.lower()
    return sha1(hashlib_encode_data(chunks_to_str)).hexdigest()
Esempio n. 11
0
def get_project_optional_dir(name, default=None):
    project_dir = get_project_dir()
    config = ProjectConfig.get_instance(join(project_dir, "platformio.ini"))
    optional_dir = config.get("platformio", name)

    if not optional_dir:
        return default

    if "$PROJECT_HASH" in optional_dir:
        optional_dir = optional_dir.replace(
            "$PROJECT_HASH", "%s-%s" %
            (basename(project_dir), sha1(
                hashlib_encode_data(project_dir)).hexdigest()[:10]))

    if optional_dir.startswith("~"):
        optional_dir = expanduser(optional_dir)

    return realpath(optional_dir)
Esempio n. 12
0
def dump_run_environment(options):
    non_sensitive_data = [
        "platform",
        "platform_packages",
        "framework",
        "board",
        "upload_protocol",
        "check_tool",
        "debug_tool",
        "monitor_filters",
    ]
    safe_options = {
        k: v
        for k, v in options.items() if k in non_sensitive_data
    }
    if is_platformio_project(os.getcwd()):
        phash = hashlib.sha1(hashlib_encode_data(app.get_cid()))
        safe_options["pid"] = phash.hexdigest()
    return json.dumps(safe_options, sort_keys=True, ensure_ascii=False)
Esempio n. 13
0
    def spawn(self, gdb_path, prog_path):
        session_hash = gdb_path + prog_path
        self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
        self._kill_previous_session()

        patterns = {
            "PROJECT_DIR": self.project_dir,
            "PROG_PATH": prog_path,
            "PROG_DIR": dirname(prog_path),
            "PROG_NAME": basename(splitext(prog_path)[0]),
            "DEBUG_PORT": self.debug_options['port'],
            "UPLOAD_PROTOCOL": self.debug_options['upload_protocol'],
            "INIT_BREAK": self.debug_options['init_break'] or "",
            "LOAD_CMDS": "\n".join(self.debug_options['load_cmds'] or []),
        }

        self._debug_server.spawn(patterns)

        if not patterns['DEBUG_PORT']:
            patterns['DEBUG_PORT'] = self._debug_server.get_debug_port()
        self.generate_pioinit(self._gdbsrc_dir, patterns)

        # start GDB client
        args = [
            "piogdb",
            "-q",
            "--directory", self._gdbsrc_dir,
            "--directory", self.project_dir,
            "-l", "10"
        ]  # yapf: disable
        args.extend(self.args)
        if not gdb_path:
            raise exception.DebugInvalidOptions("GDB client is not configured")
        gdb_data_dir = self._get_data_dir(gdb_path)
        if gdb_data_dir:
            args.extend(["--data-directory", gdb_data_dir])
        args.append(patterns['PROG_PATH'])

        return reactor.spawnProcess(self,
                                    gdb_path,
                                    args,
                                    path=self.project_dir,
                                    env=os.environ)
Esempio n. 14
0
    def get_optional_dir(self, name, exists=False):
        if not ProjectOptions.get("platformio.%s_dir" % name):
            raise ValueError("Unknown optional directory -> " + name)

        if name == "core":
            result = self._get_core_dir(exists)
        else:
            result = self.get("platformio", name + "_dir")

        if result is None:
            return None

        project_dir = os.getcwd()

        # patterns
        if "$PROJECT_HASH" in result:
            result = result.replace(
                "$PROJECT_HASH",
                "%s-%s" % (
                    os.path.basename(project_dir),
                    sha1(hashlib_encode_data(project_dir)).hexdigest()[:10],
                ),
            )

        if "$PROJECT_DIR" in result:
            result = result.replace("$PROJECT_DIR", project_dir)
        if "$PROJECT_CORE_DIR" in result:
            result = result.replace("$PROJECT_CORE_DIR",
                                    self.get_optional_dir("core"))
        if "$PROJECT_WORKSPACE_DIR" in result:
            result = result.replace("$PROJECT_WORKSPACE_DIR",
                                    self.get_optional_dir("workspace"))

        if result.startswith("~"):
            result = fs.expanduser(result)

        result = os.path.realpath(result)

        if exists and not os.path.isdir(result):
            os.makedirs(result)

        return result
Esempio n. 15
0
 def _insert_to_db(self, path, relpath):
     if not isfile(path):
         return
     index_hash = "%s-%s-%s" % (relpath, getmtime(path), getsize(path))
     index = crc32(hashlib_encode_data(index_hash))
     self._db[index] = (path, relpath)
Esempio n. 16
0
    def _install_from_tmp_dir(  # pylint: disable=too-many-branches
            self, tmp_dir, requirements=None):
        tmp_manifest = self.load_manifest(tmp_dir)
        assert set(["name", "version"]) <= set(tmp_manifest)

        pkg_dirname = self.get_install_dirname(tmp_manifest)
        pkg_dir = join(self.package_dir, pkg_dirname)
        cur_manifest = self.load_manifest(pkg_dir)

        tmp_semver = self.parse_semver_version(tmp_manifest["version"])
        cur_semver = None
        if cur_manifest:
            cur_semver = self.parse_semver_version(cur_manifest["version"])

        # package should satisfy requirements
        if requirements:
            mismatch_error = "Package version %s doesn't satisfy requirements %s" % (
                tmp_manifest["version"],
                requirements,
            )
            try:
                assert tmp_semver and tmp_semver in semantic_version.SimpleSpec(
                    requirements), mismatch_error
            except (AssertionError, ValueError):
                assert tmp_manifest["version"] == requirements, mismatch_error

        # check if package already exists
        if cur_manifest:
            # 0-overwrite, 1-rename, 2-fix to a version
            action = 0
            if "__src_url" in cur_manifest:
                if cur_manifest["__src_url"] != tmp_manifest.get("__src_url"):
                    action = 1
            elif "__src_url" in tmp_manifest:
                action = 2
            else:
                if tmp_semver and (not cur_semver or tmp_semver > cur_semver):
                    action = 1
                elif tmp_semver and cur_semver and tmp_semver != cur_semver:
                    action = 2

            # rename
            if action == 1:
                target_dirname = "%s@%s" % (pkg_dirname,
                                            cur_manifest["version"])
                if "__src_url" in cur_manifest:
                    target_dirname = "%s@src-%s" % (
                        pkg_dirname,
                        hashlib.md5(
                            hashlib_encode_data(
                                cur_manifest["__src_url"])).hexdigest(),
                    )
                shutil.move(pkg_dir, join(self.package_dir, target_dirname))
            # fix to a version
            elif action == 2:
                target_dirname = "%s@%s" % (pkg_dirname,
                                            tmp_manifest["version"])
                if "__src_url" in tmp_manifest:
                    target_dirname = "%s@src-%s" % (
                        pkg_dirname,
                        hashlib.md5(
                            hashlib_encode_data(
                                tmp_manifest["__src_url"])).hexdigest(),
                    )
                pkg_dir = join(self.package_dir, target_dirname)

        # remove previous/not-satisfied package
        if isdir(pkg_dir):
            fs.rmtree(pkg_dir)
        shutil.move(tmp_dir, pkg_dir)
        assert isdir(pkg_dir)
        self.cache_reset()
        return pkg_dir
Esempio n. 17
0
 def __hash__(self):
     return crc32(
         hashlib_encode_data(
             "%s-%s-%s-%s-%s" %
             (self.owner, self.id, self.name, self.requirements, self.url)))
Esempio n. 18
0
 def encode_scons_arg(value):
     data = base64.urlsafe_b64encode(hashlib_encode_data(value))
     return data.decode() if is_bytes(data) else data
Esempio n. 19
0
 def build_dir(self):
     lib_hash = hashlib.sha1(hashlib_encode_data(self.path)).hexdigest()[:3]
     return join("$BUILD_DIR", "lib%s" % lib_hash, basename(self.path))
Esempio n. 20
0
    def _install_tmp_pkg(self, tmp_pkg):
        assert isinstance(tmp_pkg, PackageItem)
        # validate package version and declared requirements
        if (tmp_pkg.metadata.spec.requirements and tmp_pkg.metadata.version
                not in tmp_pkg.metadata.spec.requirements):
            raise PackageException(
                "Package version %s doesn't satisfy requirements %s based on %s"
                % (
                    tmp_pkg.metadata.version,
                    tmp_pkg.metadata.spec.requirements,
                    tmp_pkg.metadata,
                ))
        dst_pkg = PackageItem(
            os.path.join(self.package_dir, tmp_pkg.get_safe_dirname()))

        # what to do with existing package?
        action = "overwrite"
        if tmp_pkg.metadata.spec.has_custom_name():
            action = "overwrite"
            dst_pkg = PackageItem(
                os.path.join(self.package_dir, tmp_pkg.metadata.spec.name))
        elif dst_pkg.metadata:
            if dst_pkg.metadata.spec.external:
                if dst_pkg.metadata.spec.url != tmp_pkg.metadata.spec.url:
                    action = "detach-existing"
            elif (dst_pkg.metadata.version != tmp_pkg.metadata.version or
                  dst_pkg.metadata.spec.owner != tmp_pkg.metadata.spec.owner):
                action = ("detach-existing" if
                          tmp_pkg.metadata.version > dst_pkg.metadata.version
                          else "detach-new")

        def _cleanup_dir(path):
            if os.path.isdir(path):
                fs.rmtree(path)

        if action == "detach-existing":
            target_dirname = "%s@%s" % (
                tmp_pkg.get_safe_dirname(),
                dst_pkg.metadata.version,
            )
            if dst_pkg.metadata.spec.url:
                target_dirname = "%s@src-%s" % (
                    tmp_pkg.get_safe_dirname(),
                    hashlib.md5(
                        compat.hashlib_encode_data(
                            dst_pkg.metadata.spec.url)).hexdigest(),
                )
            # move existing into the new place
            pkg_dir = os.path.join(self.package_dir, target_dirname)
            _cleanup_dir(pkg_dir)
            shutil.move(dst_pkg.path, pkg_dir)
            # move new source to the destination location
            _cleanup_dir(dst_pkg.path)
            shutil.move(tmp_pkg.path, dst_pkg.path)
            return PackageItem(dst_pkg.path)

        if action == "detach-new":
            target_dirname = "%s@%s" % (
                tmp_pkg.get_safe_dirname(),
                tmp_pkg.metadata.version,
            )
            if tmp_pkg.metadata.spec.external:
                target_dirname = "%s@src-%s" % (
                    tmp_pkg.get_safe_dirname(),
                    hashlib.md5(
                        compat.hashlib_encode_data(
                            tmp_pkg.metadata.spec.url)).hexdigest(),
                )
            pkg_dir = os.path.join(self.package_dir, target_dirname)
            _cleanup_dir(pkg_dir)
            shutil.move(tmp_pkg.path, pkg_dir)
            return PackageItem(pkg_dir)

        # otherwise, overwrite existing
        _cleanup_dir(dst_pkg.path)
        shutil.move(tmp_pkg.path, dst_pkg.path)
        return PackageItem(dst_pkg.path)
Esempio n. 21
0
 def generate_project_id(self, path):
     h = hashlib.sha1(hashlib_encode_data(self.id))
     h.update(hashlib_encode_data(path))
     return "%s-%s" % (os.path.basename(path), h.hexdigest())
Esempio n. 22
0
 def key_from_args(*args):
     h = hashlib.md5()
     for arg in args:
         if arg:
             h.update(hashlib_encode_data(arg))
     return h.hexdigest()