Exemplo n.º 1
0
        def load_meta(path, locale):
            meta = {}

            meta_path = os.path.join(path, "meta.yaml")
            if os.path.isfile(meta_path):
                try:
                    meta = yaml.load_from_file(path=meta_path)
                except Exception:
                    logging.getLogger(__name__).exception(
                        "Could not load %s", meta_path)
                    pass
                else:
                    import datetime

                    if "last_update" in meta and isinstance(
                            meta["last_update"], datetime.datetime):
                        meta["last_update"] = (
                            meta["last_update"] -
                            datetime.datetime(1970, 1, 1)).total_seconds()

            loc = Locale.parse(locale)
            meta["locale"] = locale
            meta["locale_display"] = loc.display_name
            meta["locale_english"] = loc.english_name
            return meta
Exemplo n.º 2
0
    def _get_logging_config(self):
        logging_file = self._get_logging_file()

        config_from_file = {}
        if os.path.exists(logging_file) and os.path.isfile(logging_file):
            config_from_file = yaml.load_from_file(path=logging_file)
        return config_from_file
Exemplo n.º 3
0
    def fetch_blacklist_from_cache(path, ttl):
        if not os.path.isfile(path):
            return None

        if os.stat(path).st_mtime + ttl < time.time():
            return None

        from octoprint.util import yaml

        result = yaml.load_from_file(path=path)

        if isinstance(result, list):
            return result
Exemplo n.º 4
0
def post_from_file(ctx, path, file_path, json_flag, yaml_flag, timeout):
    """POSTs JSON data to the specified server path, taking the data from the specified file."""
    if json_flag or yaml_flag:
        if json_flag:
            with open(file_path) as fp:
                data = json.load(fp)
        else:
            data = yaml.load_from_file(path=file_path)

        r = ctx.obj.client.post_json(path, data, timeout=timeout)
    else:
        with open(file_path, "rb") as fp:
            data = fp.read()

        r = ctx.obj.client.post(path, data, timeout=timeout)

    log_response(r)
Exemplo n.º 5
0
    def get_recovery_data(self):
        if not os.path.isfile(self._recovery_file):
            return None

        try:
            data = yaml.load_from_file(path=self._recovery_file)

            if not isinstance(data, dict) or not all(
                    map(lambda x: x in data,
                        ("origin", "path", "pos", "date"))):
                raise ValueError("Invalid recovery data structure")
            return data
        except Exception:
            self._logger.exception(
                f"Could not read recovery data from file {self._recovery_file}"
            )
            self.delete_recovery_data()
Exemplo n.º 6
0
    def _load_from_path(self, path):
        if not os.path.exists(path) or not os.path.isfile(path):
            return None

        profile = yaml.load_from_file(path=path)

        if profile is None or not isinstance(profile, dict):
            raise InvalidProfileError("Profile is None or not a dictionary")

        if self._migrate_profile(profile):
            try:
                self._save_to_path(path, profile, allow_overwrite=True)
            except Exception:
                self._logger.exception(
                    "Tried to save profile to {path} after migrating it while loading, ran into exception"
                    .format(path=path))

        profile = self._ensure_valid_profile(profile)

        if not profile:
            self._logger.warning("Invalid profile: %s" % path)
            raise InvalidProfileError()
        return profile
Exemplo n.º 7
0
    def _load(self):
        if os.path.exists(self._groupfile) and os.path.isfile(self._groupfile):
            try:
                data = yaml.load_from_file(path=self._groupfile)

                if "groups" not in data:
                    groups = data
                    data = {"groups": groups}

                file_version = data.get("_version", 1)
                if file_version < self.FILE_VERSION:
                    # make sure we migrate the file on disk after loading
                    self._logger.info(
                        "Detected file version {} on group "
                        "storage, migrating to version {}".format(
                            file_version, self.FILE_VERSION))
                    self._dirty = True

                groups = data.get("groups", {})
                tracked_permissions = data.get("tracked", list())

                for key, attributes in groups.items():
                    if key in self._default_groups:
                        # group is a default group
                        if not self._default_groups[key].get(
                                "changeable", True):
                            # group may not be changed -> bail
                            continue

                        name = self._default_groups[key].get("name", "")
                        description = self._default_groups[key].get(
                            "description", "")
                        removable = self._default_groups[key].get(
                            "removable", True)
                        changeable = self._default_groups[key].get(
                            "changeable", True)
                        toggleable = self._default_groups[key].get(
                            "toggleable", True)

                        if file_version == 1:
                            # 1.4.0/file version 1 has a bug that resets default to True for users group on modification
                            set_default = self._default_groups[key].get(
                                "default", False)
                        else:
                            set_default = attributes.get("default", False)
                    else:
                        name = attributes.get("name", "")
                        description = attributes.get("description", "")
                        removable = True
                        changeable = True
                        toggleable = True
                        set_default = attributes.get("default", False)

                    permissions = self._to_permissions(
                        *attributes.get("permissions", []))
                    default_permissions = self.default_permissions_for_group(
                        key)
                    for permission in default_permissions:
                        if (permission.key not in tracked_permissions
                                and permission not in permissions):
                            permissions.append(permission)

                    subgroups = self._to_groups(
                        *attributes.get("subgroups", []))

                    group = Group(
                        key,
                        name,
                        description=description,
                        permissions=permissions,
                        subgroups=subgroups,
                        default=set_default,
                        removable=removable,
                        changeable=changeable,
                        toggleable=toggleable,
                    )

                    if key == GUEST_GROUP and (
                            len(group.permissions) != len(permissions)
                            or len(group.subgroups) != len(subgroups)):
                        self._logger.warning(
                            "Dangerous permissions and/or subgroups stripped from guests group"
                        )
                        self._dirty = True

                    self._groups[key] = group

                for group in self._groups.values():
                    group._subgroups = self._to_groups(*group._subgroups)

                if self._dirty:
                    self._save()

            except Exception:
                self._logger.exception(
                    f"Error while loading groups from file {self._groupfile}")
Exemplo n.º 8
0
    def _do_analysis(self, high_priority=False):
        import sys

        import sarge

        if self._current.analysis and all(
                map(
                    lambda x: x in self._current.analysis,
                    ("printingArea", "dimensions", "estimatedPrintTime",
                     "filament"),
                )):
            return self._current.analysis

        try:
            throttle = (settings().getFloat([
                "gcodeAnalysis", "throttle_highprio"
            ]) if high_priority else settings().getFloat(
                ["gcodeAnalysis", "throttle_normalprio"]))
            throttle_lines = settings().getInt(
                ["gcodeAnalysis", "throttle_lines"])
            max_extruders = settings().getInt(
                ["gcodeAnalysis", "maxExtruders"])
            g90_extruder = settings().getBoolean(
                ["feature", "g90InfluencesExtruder"])
            bed_z = settings().getFloat(["gcodeAnalysis", "bedZ"])
            speedx = self._current.printer_profile["axes"]["x"]["speed"]
            speedy = self._current.printer_profile["axes"]["y"]["speed"]
            offsets = self._current.printer_profile["extruder"]["offsets"]

            command = [
                sys.executable,
                "-m",
                "octoprint",
                "analysis",
                "gcode",
                f"--speed-x={speedx}",
                f"--speed-y={speedy}",
                f"--max-t={max_extruders}",
                f"--throttle={throttle}",
                f"--throttle-lines={throttle_lines}",
                f"--bed-z={bed_z}",
            ]
            for offset in offsets[1:]:
                command += ["--offset", str(offset[0]), str(offset[1])]
            if g90_extruder:
                command += ["--g90-extruder"]
            command.append(self._current.absolute_path)

            self._logger.info("Invoking analysis command: {}".format(
                " ".join(command)))

            self._aborted = False
            p = sarge.run(command,
                          close_fds=CLOSE_FDS,
                          async_=True,
                          stdout=sarge.Capture())

            while len(p.commands) == 0:
                # somewhat ugly... we can't use wait_events because
                # the events might not be all set if an exception
                # by sarge is triggered within the async process
                # thread
                time.sleep(0.01)

            # by now we should have a command, let's wait for its
            # process to have been prepared
            p.commands[0].process_ready.wait()

            if not p.commands[0].process:
                # the process might have been set to None in case of any exception
                raise RuntimeError(
                    "Error while trying to run command {}".format(
                        " ".join(command)))

            try:
                # let's wait for stuff to finish
                while p.returncode is None:
                    if self._aborted:
                        # oh, we shall abort, let's do so!
                        p.commands[0].terminate()
                        raise AnalysisAborted(reenqueue=self._reenqueue)

                    # else continue
                    p.commands[0].poll()
            finally:
                p.close()

            output = p.stdout.text
            self._logger.debug(f"Got output: {output!r}")

            result = {}
            if "ERROR:" in output:
                _, error = output.split("ERROR:")
                raise RuntimeError(error.strip())
            elif "EMPTY:" in output:
                self._logger.info("Result is empty, no extrusions found")
                result = copy.deepcopy(EMPTY_RESULT)
            elif "RESULTS:" not in output:
                raise RuntimeError("No analysis result found")
            else:
                _, output = output.split("RESULTS:")
                analysis = yaml.load_from_file(file=output)

                result["printingArea"] = analysis["printing_area"]
                result["dimensions"] = analysis["dimensions"]
                if analysis["total_time"]:
                    result["estimatedPrintTime"] = analysis["total_time"] * 60
                if analysis["extrusion_length"]:
                    result["filament"] = {}
                    for i in range(len(analysis["extrusion_length"])):
                        result["filament"]["tool%d" % i] = {
                            "length": analysis["extrusion_length"][i],
                            "volume": analysis["extrusion_volume"][i],
                        }

            if self._current.analysis and isinstance(self._current.analysis,
                                                     dict):
                return dict_merge(result, self._current.analysis)
            else:
                return result
        finally:
            self._gcode = None
Exemplo n.º 9
0
def init_logging(
    settings,
    use_logging_file=True,
    logging_file=None,
    default_config=None,
    debug=False,
    verbosity=0,
    uncaught_logger=None,
    uncaught_handler=None,
    disable_color=True,
):
    """Sets up logging."""

    import os

    from octoprint.util import dict_merge

    # default logging configuration
    if default_config is None:
        simple_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
        default_config = {
            "version": 1,
            "formatters": {
                "simple": {
                    "format": simple_format
                },
                "colored": {
                    "()": "colorlog.ColoredFormatter",
                    "format": "%(log_color)s" + simple_format + "%(reset)s",
                    "reset": True,
                    "log_colors": {
                        "DEBUG": "cyan",
                        "WARNING": "yellow",
                        "ERROR": "red",
                        "CRITICAL": "bold_red",
                    },
                },
                "serial": {
                    "format": "%(asctime)s - %(message)s"
                },
                "timings": {
                    "format": "%(asctime)s - %(message)s"
                },
                "timingscsv": {
                    "format": "%(asctime)s;%(func)s;%(timing)f"
                },
            },
            "handlers": {
                "console": {
                    "class":
                    "octoprint.logging.handlers.OctoPrintStreamHandler",
                    "level": "DEBUG",
                    "formatter": "simple" if disable_color else "colored",
                    "stream": "ext://sys.stdout",
                },
                "file": {
                    "class":
                    "octoprint.logging.handlers.OctoPrintLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "simple",
                    "when":
                    "D",
                    "backupCount":
                    6,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"),
                                 "octoprint.log"),
                },
                "serialFile": {
                    "class":
                    "octoprint.logging.handlers.SerialLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "serial",
                    "backupCount":
                    3,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"), "serial.log"),
                    "delay":
                    True,
                },
                "pluginTimingsFile": {
                    "class":
                    "octoprint.logging.handlers.PluginTimingsLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "timings",
                    "backupCount":
                    3,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"),
                                 "plugintimings.log"),
                    "delay":
                    True,
                },
                "pluginTimingsCsvFile": {
                    "class":
                    "octoprint.logging.handlers.PluginTimingsLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "timingscsv",
                    "backupCount":
                    3,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"),
                                 "plugintimings.csv"),
                    "delay":
                    True,
                },
            },
            "loggers": {
                "SERIAL": {
                    "level": "INFO",
                    "handlers": ["serialFile"],
                    "propagate": False,
                },
                "PLUGIN_TIMINGS": {
                    "level": "INFO",
                    "handlers": ["pluginTimingsFile", "pluginTimingsCsvFile"],
                    "propagate": False,
                },
                "PLUGIN_TIMINGS.octoprint.plugin": {
                    "level": "INFO"
                },
                "octoprint": {
                    "level": "INFO"
                },
                "octoprint.util": {
                    "level": "INFO"
                },
                "octoprint.plugins": {
                    "level": "INFO"
                },
            },
            "root": {
                "level": "WARN",
                "handlers": ["console", "file"]
            },
        }

    if debug or verbosity > 0:
        default_config["loggers"]["octoprint"]["level"] = "DEBUG"
        default_config["root"]["level"] = "INFO"
    if verbosity > 1:
        default_config["loggers"]["octoprint.plugins"]["level"] = "DEBUG"
    if verbosity > 2:
        default_config["root"]["level"] = "DEBUG"

    config = default_config
    if use_logging_file:
        # further logging configuration from file...
        if logging_file is None:
            logging_file = os.path.join(settings.getBaseFolder("base"),
                                        "logging.yaml")

        config_from_file = {}
        if os.path.exists(logging_file) and os.path.isfile(logging_file):
            from octoprint.util import yaml

            config_from_file = yaml.load_from_file(path=logging_file)

        # we merge that with the default config
        if config_from_file is not None and isinstance(config_from_file, dict):
            config = dict_merge(default_config, config_from_file)

    # configure logging globally
    return set_logging_config(config, debug, verbosity, uncaught_logger,
                              uncaught_handler)
Exemplo n.º 10
0
    def _load(self):
        if os.path.exists(self._userfile) and os.path.isfile(self._userfile):
            data = yaml.load_from_file(path=self._userfile)

            if not data or not isinstance(data, dict):
                self._logger.fatal(
                    "{} does not contain a valid map of users. Fix "
                    "the file, or remove it, then restart OctoPrint.".format(
                        self._userfile))
                raise CorruptUserStorage()

            for name, attributes in data.items():
                if not isinstance(attributes, dict):
                    continue

                permissions = []
                if "permissions" in attributes:
                    permissions = attributes["permissions"]

                if "groups" in attributes:
                    groups = set(attributes["groups"])
                else:
                    groups = {self._group_manager.user_group}

                # migrate from roles to permissions
                if "roles" in attributes and "permissions" not in attributes:
                    self._logger.info(
                        f"Migrating user {name} to new granular permission system"
                    )

                    groups |= set(
                        self._migrate_roles_to_groups(attributes["roles"]))
                    self._dirty = True

                apikey = None
                if "apikey" in attributes:
                    apikey = attributes["apikey"]
                settings = {}
                if "settings" in attributes:
                    settings = attributes["settings"]

                self._users[name] = User(
                    username=name,
                    passwordHash=attributes["password"],
                    active=attributes["active"],
                    permissions=self._to_permissions(*permissions),
                    groups=self._to_groups(*groups),
                    apikey=apikey,
                    settings=settings,
                )
                for sessionid in self._sessionids_by_userid.get(name, set()):
                    if sessionid in self._session_users_by_session:
                        self._session_users_by_session[sessionid].update_user(
                            self._users[name])

            if self._dirty:
                self._save()

            self._customized = True
        else:
            self._customized = False