示例#1
0
	def save(self, profile, allow_overwrite=False, make_default=False):
		if "id" in profile:
			identifier = profile["id"]
		elif "name" in profile:
			identifier = profile["name"]
		else:
			raise InvalidProfileError("profile must contain either id or name")

		identifier = self._sanitize(identifier)
		profile["id"] = identifier
		profile = dict_sanitize(profile, self.__class__.default)

		if identifier == "_default":
			default_profile = dict_merge(self._load_default(), profile)
			if not self._ensure_valid_profile(default_profile):
				raise InvalidProfileError()

			settings().set(["printerProfiles", "defaultProfile"], default_profile, defaults=dict(printerProfiles=dict(defaultProfile=self.__class__.default)))
			settings().save()
		else:
			self._save_to_path(self._get_profile_path(identifier), profile, allow_overwrite=allow_overwrite)

			if make_default:
				settings().set(["printerProfiles", "default"], identifier)

		if self._current is not None and self._current["id"] == identifier:
			self.select(identifier)
		return self.get(identifier)
示例#2
0
def printerProfilesUpdate(identifier):
	if not "application/json" in request.headers["Content-Type"]:
		return None, None, make_response("Expected content-type JSON", 400)

	json_data = request.json
	if not "profile" in json_data:
		make_response("No profile included in request", 400)

	profile = printerProfileManager.get(identifier)
	if profile is None:
		profile = printerProfileManager.get_default()

	new_profile = json_data["profile"]
	new_profile = dict_merge(profile, new_profile)

	make_default = False
	if "default" in new_profile:
		make_default = True
		del new_profile["default"]

	new_profile["id"] = identifier
	if not _validate_profile(new_profile):
		make_response("Combined profile is invalid, missing obligatory values", 400)

	try:
		saved_profile = printerProfileManager.save(new_profile, allow_overwrite=True, make_default=make_default)
	except Exception as e:
		make_response("Could not save profile: %s" % e.message)

	return jsonify(dict(profile=_convert_profile(saved_profile)))
示例#3
0
	def test_set_setting_string(self, key, value, update, expected_returnvalue):
		returnvalue = self.user.set_setting(key, value)

		expected = dict_merge(default_settings, update)

		self.assertDictEqual(expected, self.user.get_all_settings())
		self.assertEqual(expected_returnvalue, returnvalue)
示例#4
0
	def _load_default(self):
		default_overrides = settings().get(["printerProfiles", "defaultProfile"])
		profile = self._ensure_valid_profile(dict_merge(copy.deepcopy(self.__class__.default), default_overrides))
		if not profile:
			self._logger.warn("Invalid default profile after applying overrides")
			return copy.deepcopy(self.__class__.default)
		return profile
示例#5
0
def get_unrendered_timelapses():
	delete_old_unrendered_timelapses()

	basedir = settings().getBaseFolder("timelapse_tmp")
	jobs = collections.defaultdict(lambda: dict(count=0, size=None, bytes=0, date=None, timestamp=None))
	for osFile in os.listdir(basedir):
		if not fnmatch.fnmatch(osFile, "*.jpg"):
			continue

		prefix = _extract_prefix(osFile)
		if prefix is None:
			continue

		statResult = os.stat(os.path.join(basedir, osFile))
		jobs[prefix]["count"] += 1
		jobs[prefix]["bytes"] += statResult.st_size
		if jobs[prefix]["timestamp"] is None or statResult.st_ctime < jobs[prefix]["timestamp"]:
			jobs[prefix]["timestamp"] = statResult.st_ctime

	def finalize_fields(job):
		job["size"] = util.get_formatted_size(job["bytes"])
		job["date"] = util.get_formatted_datetime(datetime.datetime.fromtimestamp(job["timestamp"]))
		del job["timestamp"]
		return job

	return sorted([util.dict_merge(dict(name=key), finalize_fields(value)) for key, value in jobs.items()], key=lambda x: x["name"])
示例#6
0
	def get_current_versions(self, check_targets=None, force=False):
		"""
		Retrieves the current version information for all defined check_targets. Will retrieve information for all
		available targets by default.

		:param check_targets: an iterable defining the targets to check, if not supplied defaults to all targets
		"""

		checks = self._get_configured_checks()
		if check_targets is None:
			check_targets = checks.keys()

		update_available = False
		update_possible = False
		information = dict()

		for target, check in checks.items():
			if not target in check_targets:
				continue

			try:
				populated_check = self._populated_check(target, check)
				target_information, target_update_available, target_update_possible = self._get_current_version(target, populated_check, force=force)
				if target_information is None:
					target_information = dict()
			except exceptions.UnknownCheckType:
				self._logger.warn("Unknown update check type for target {}: {}".format(target, check.get("type", "<n/a>")))
				continue

			target_information = dict_merge(dict(local=dict(name="unknown", value="unknown"), remote=dict(name="unknown", value="unknown", release_notes=None)), target_information)

			update_available = update_available or target_update_available
			update_possible = update_possible or (target_update_possible and target_update_available)

			local_name = target_information["local"]["name"]
			local_value = target_information["local"]["value"]

			release_notes = None
			if target_information and target_information["remote"] and target_information["remote"]["value"]:
				if "release_notes" in populated_check and populated_check["release_notes"]:
					release_notes = populated_check["release_notes"]
				elif "release_notes" in target_information["remote"]:
					release_notes = target_information["remote"]["release_notes"]

				if release_notes:
					release_notes = release_notes.format(octoprint_version=VERSION,
					                                     target_name=target_information["remote"]["name"],
					                                     target_version=target_information["remote"]["value"])

			information[target] = dict(updateAvailable=target_update_available,
			                           updatePossible=target_update_possible,
			                           information=target_information,
			                           displayName=populated_check["displayName"],
			                           displayVersion=populated_check["displayVersion"].format(octoprint_version=VERSION, local_name=local_name, local_value=local_value),
			                           check=populated_check,
			                           releaseNotes=release_notes)

		if self._version_cache_dirty:
			self._save_version_cache()
		return information, update_available, update_possible
示例#7
0
	def _get_configured_checks(self):
		with self._configured_checks_mutex:
			if self._refresh_configured_checks or self._configured_checks is None:
				self._refresh_configured_checks = False
				self._configured_checks = self._settings.get(["checks"], merged=True)
				update_check_hooks = self._plugin_manager.get_hooks("octoprint.plugin.softwareupdate.check_config")
				check_providers = self._settings.get(["check_providers"], merged=True)
				for name, hook in update_check_hooks.items():
					try:
						hook_checks = hook()
					except:
						self._logger.exception("Error while retrieving update information from plugin {name}".format(**locals()))
					else:
						for key, data in hook_checks.items():
							check_providers[key] = name
							if key in self._configured_checks:
								data = dict_merge(data, self._configured_checks[key])
							self._configured_checks[key] = data
				self._settings.set(["check_providers"], check_providers)
				self._settings.save()

				# we only want to process checks that came from plugins for
				# which the plugins are still installed and enabled
				config_checks = self._settings.get(["checks"])
				plugin_and_not_enabled = lambda k: k in check_providers and \
				                                   not check_providers[k] in self._plugin_manager.enabled_plugins
				obsolete_plugin_checks = filter(plugin_and_not_enabled,
				                                config_checks.keys())
				for key in obsolete_plugin_checks:
					self._logger.debug("Check for key {} was provided by plugin {} that's no longer available, ignoring it".format(key, check_providers[key]))
					del self._configured_checks[key]

			return self._configured_checks
示例#8
0
文件: views.py 项目: devdej/OctoPrint
def _get_translations(locale, domain):
	from babel.messages.pofile import read_po
	from octoprint.util import dict_merge

	messages = dict()
	plural_expr = None

	def messages_from_po(path, locale, domain):
		messages = dict()
		with codecs.open(path, encoding="utf-8") as f:
			catalog = read_po(f, locale=locale, domain=domain)

			for message in catalog:
				message_id = message.id
				if isinstance(message_id, (list, tuple)):
					message_id = message_id[0]
				if message.string:
					messages[message_id] = message.string

		return messages, catalog.plural_expr

	po_files = _get_all_translationfiles(locale, domain)
	for po_file in po_files:
		po_messages, plural_expr = messages_from_po(po_file, locale, domain)
		if po_messages is not None:
			messages = dict_merge(messages, po_messages)

	return messages, plural_expr
示例#9
0
    def get_current_versions(self, check_targets=None, force=False):
        """
		Retrieves the current version information for all defined check_targets. Will retrieve information for all
		available targets by default.

		:param check_targets: an iterable defining the targets to check, if not supplied defaults to all targets
		"""

        checks = self._get_configured_checks()
        if check_targets is None:
            check_targets = checks.keys()

        update_available = False
        update_possible = False
        information = dict()

        for target, check in checks.items():
            if not target in check_targets:
                continue

            try:
                target_information, target_update_available, target_update_possible = self._get_current_version(
                    target, check, force=force
                )
                if target_information is None:
                    continue
            except exceptions.UnknownCheckType:
                self._logger.warn("Unknown update check type for %s" % target)
                continue

            target_information = dict_merge(
                dict(local=dict(name="unknown", value="unknown"), remote=dict(name="unknown", value="unknown")),
                target_information,
            )

            update_available = update_available or target_update_available
            update_possible = update_possible or (target_update_possible and target_update_available)
            information[target] = dict(
                updateAvailable=target_update_available,
                updatePossible=target_update_possible,
                information=target_information,
            )

            if "displayName" in check:
                information[target]["displayName"] = check["displayName"]

            if "displayVersion" in check:
                from octoprint._version import get_versions

                octoprint_version = get_versions()["version"]
                local_name = target_information["local"]["name"]
                local_value = target_information["local"]["value"]
                information[target]["displayVersion"] = check["displayVersion"].format(
                    octoprint_version=octoprint_version, local_name=local_name, local_value=local_value
                )

        if self._version_cache_dirty:
            self._save_version_cache()
        return information, update_available, update_possible
示例#10
0
	def connect(self, protocol_option_overrides=None, transport_option_overrides=None, profile=None):
		"""
		 Connects to the printer. If port and/or baudrate is provided, uses these settings, otherwise autodetection
		 will be attempted.
		"""
		self._protocol.disconnect()

		protocol_options = settings().get(["communication", "protocolOptions"], merged=True)
		if protocol_option_overrides is not None and isinstance(protocol_option_overrides, dict):
			protocol_options = util.dict_merge(protocol_options, protocol_option_overrides)

		transport_options = settings().get(["communication", "transportOptions"], merged=True)
		if transport_option_overrides is not None and isinstance(transport_option_overrides, dict):
			transport_options = util.dict_merge(transport_options, transport_option_overrides)

		self._protocol.connect(protocol_options, transport_options)
		self._printerProfileManager.select(profile)
示例#11
0
	def save_slicer_profile(self, path, profile, allow_overwrite=True, overrides=None):
		from octoprint.util import dict_merge
		if overrides is not None:
			new_profile = dict_merge(profile.data, overrides)
		else:
			new_profile = profile.data

		self._save_profile(path, new_profile, allow_overwrite=allow_overwrite, display_name=profile.display_name, description=profile.description)
示例#12
0
    def on_settings_migrate(self, target, current=None):
        if current is None:
            # there might be some left over data from the time we still persisted everything to settings,
            # even the stuff that shouldn't be persisted but always provided by the hook - let's
            # clean up

            configured_checks = self._settings.get(["checks"], incl_defaults=False)
            if configured_checks is None:
                configured_checks = dict()

                # take care of the octoprint entry
            if "octoprint" in configured_checks:
                octoprint_check = dict(configured_checks["octoprint"])
                if "type" in octoprint_check and not octoprint_check["type"] == "github_commit":
                    deletables = ["current"]
                else:
                    deletables = []
                octoprint_check = self._clean_settings_check(
                    "octoprint",
                    octoprint_check,
                    self.get_settings_defaults()["checks"]["octoprint"],
                    delete=deletables,
                    save=False,
                )

                # and the hooks
            update_check_hooks = self._plugin_manager.get_hooks("octoprint.plugin.softwareupdate.check_config")
            for name, hook in update_check_hooks.items():
                try:
                    hook_checks = hook()
                except:
                    self._logger.exception(
                        "Error while retrieving update information from plugin {name}".format(**locals())
                    )
                else:
                    for key, data in hook_checks.items():
                        if key in configured_checks:
                            settings_check = dict(configured_checks[key])
                            merged = dict_merge(data, settings_check)
                            if "type" in merged and not merged["type"] == "github_commit":
                                deletables = ["current", "displayVersion"]
                            else:
                                deletables = []

                            self._clean_settings_check(key, settings_check, data, delete=deletables, save=False)

        elif current == 1:
            configured_checks = self._settings.get(["checks"], incl_defaults=False)
            if configured_checks is None:
                return

            if "octoprint" in configured_checks and "octoprint" in configured_checks["octoprint"]:
                # that's a circular reference, back to defaults
                dummy_defaults = dict(plugins=dict())
                dummy_defaults["plugins"][self._identifier] = dict(checks=dict())
                dummy_defaults["plugins"][self._identifier]["checks"]["octoprint"] = None
                self._settings.set(["checks", "octoprint"], None, defaults=dummy_defaults)
                self._settings.save()
示例#13
0
	def setUp(self):
		self.base_path = os.path.join(os.path.dirname(__file__), "_files")
		self.config_path = os.path.realpath(os.path.join(self.base_path, "config.yaml"))
		self.defaults_path = os.path.realpath(os.path.join(self.base_path, "defaults.yaml"))

		with open(self.config_path, "r+b") as f:
			self.config = yaml.safe_load(f)
		with open(self.defaults_path, "r+b") as f:
			self.defaults = yaml.safe_load(f)

		from octoprint.util import dict_merge
		self.expected_effective = dict_merge(self.defaults, self.config)
示例#14
0
	def get(self, path, asdict=False, defaults=None, merged=False):
		import octoprint.util as util

		if len(path) == 0:
			return None

		config = self._config
		if defaults is None:
			defaults = default_settings

		while len(path) > 1:
			key = path.pop(0)
			if key in config.keys() and key in defaults.keys():
				config = config[key]
				defaults = defaults[key]
			elif key in defaults.keys():
				config = {}
				defaults = defaults[key]
			else:
				return None

		k = path.pop(0)
		if not isinstance(k, (list, tuple)):
			keys = [k]
		else:
			keys = k

		if asdict:
			results = {}
		else:
			results = []
		for key in keys:
			if key in config.keys():
				value = config[key]
				if merged and key in defaults:
					value = util.dict_merge(defaults[key], value)
			elif key in defaults:
				value = defaults[key]
			else:
				value = None

			if asdict:
				results[key] = value
			else:
				results.append(value)

		if not isinstance(k, (list, tuple)):
			if asdict:
				return results.values().pop()
			else:
				return results.pop()
		else:
			return results
示例#15
0
	def _load_default(self):
		default_overrides = settings().get(["printerProfiles", "defaultProfile"])
		if self._migrate_profile(default_overrides):
			try:
				settings().set(["printerProfiles", "defaultProfile"], default_overrides)
				settings().save()
			except:
				self._logger.exception("Tried to save default profile after migrating it while loading, ran into exception")

		profile = self._ensure_valid_profile(dict_merge(copy.deepcopy(self.__class__.default), default_overrides))
		if not profile:
			self._logger.warn("Invalid default profile after applying overrides")
			return copy.deepcopy(self.__class__.default)
		return profile
示例#16
0
	def _load_all(self):
		all_identifiers = self._load_all_identifiers()
		results = dict()
		for identifier, path in all_identifiers.items():
			if identifier == "_default":
				profile = self._load_default()
			else:
				profile = self._load_from_path(path)

			if profile is None:
				continue

			results[identifier] = dict_merge(self._load_default(), profile)
		return results
示例#17
0
	def _load_all(self):
		all_identifiers = self._load_all_identifiers()
		results = dict()
		for identifier, path in all_identifiers.items():
			try:
				profile = self._load_from_path(path)
			except InvalidProfileError:
				self._logger.warn("Profile {} is invalid, skipping".format(identifier))
				continue

			if profile is None:
				continue

			results[identifier] = dict_merge(self.__class__.default, profile)
		return results
示例#18
0
def printerProfilesAdd():
	if not "application/json" in request.headers["Content-Type"]:
		return make_response("Expected content-type JSON", 400)

	try:
		json_data = request.json
	except BadRequest:
		return make_response("Malformed JSON body in request", 400)

	if not "profile" in json_data:
		return make_response("No profile included in request", 400)

	base_profile = printerProfileManager.get_default()
	if "basedOn" in json_data and isinstance(json_data["basedOn"], basestring):
		other_profile = printerProfileManager.get(json_data["basedOn"])
		if other_profile is not None:
			base_profile = other_profile

	if "id" in base_profile:
		del base_profile["id"]
	if "name" in base_profile:
		del base_profile["name"]
	if "default" in base_profile:
		del base_profile["default"]

	new_profile = json_data["profile"]
	make_default = False
	if "default" in new_profile:
		make_default = True
		del new_profile["default"]

	profile = dict_merge(base_profile, new_profile)

	if not "id" in profile:
		return make_response("Profile does not contain mandatory 'id' field", 400)
	if not "name" in profile:
		return make_response("Profile does not contain mandatory 'name' field", 400)

	try:
		saved_profile = printerProfileManager.save(profile, allow_overwrite=False, make_default=make_default)
	except InvalidProfileError:
		return make_response("Profile is invalid", 400)
	except CouldNotOverwriteError:
		return make_response("Profile {} already exists and overwriting was not allowed".format(profile["id"]), 400)
	except Exception as e:
		return make_response("Could not save profile: %s" % str(e), 500)
	else:
		return jsonify(dict(profile=_convert_profile(saved_profile)))
示例#19
0
	def _get_configured_checks(self):
		with self._configured_checks_mutex:
			if self._refresh_configured_checks or self._configured_checks is None:
				self._refresh_configured_checks = False
				self._configured_checks = self._settings.get(["checks"], merged=True)
				update_check_hooks = self._plugin_manager.get_hooks("octoprint.plugin.softwareupdate.check_config")
				for name, hook in update_check_hooks.items():
					try:
						hook_checks = hook()
					except:
						self._logger.exception("Error while retrieving update information from plugin {name}".format(**locals()))
					else:
						for key, data in hook_checks.items():
							if key in self._configured_checks:
								data = dict_merge(data, self._configured_checks[key])
							self._configured_checks[key] = data

			return self._configured_checks
示例#20
0
def get_unrendered_timelapses():
    global _job_lock
    global current

    delete_old_unrendered_timelapses()

    basedir = settings().getBaseFolder("timelapse_tmp")
    jobs = collections.defaultdict(lambda: dict(count=0, size=None, bytes=0, date=None, timestamp=None))

    for entry in scandir(basedir):
        if not fnmatch.fnmatch(entry.name, "*.jpg"):
            continue

        prefix = _extract_prefix(entry.name)
        if prefix is None:
            continue

        jobs[prefix]["count"] += 1
        jobs[prefix]["bytes"] += entry.stat().st_size
        if jobs[prefix]["timestamp"] is None or entry.stat().st_ctime < jobs[prefix]["timestamp"]:
            jobs[prefix]["timestamp"] = entry.stat().st_ctime

    with _job_lock:
        global current_render_job

        def finalize_fields(prefix, job):
            currently_recording = current is not None and current.prefix == prefix
            currently_rendering = current_render_job is not None and current_render_job["prefix"] == prefix

            job["size"] = util.get_formatted_size(job["bytes"])
            job["date"] = util.get_formatted_datetime(datetime.datetime.fromtimestamp(job["timestamp"]))
            job["recording"] = currently_recording
            job["rendering"] = currently_rendering
            job["processing"] = currently_recording or currently_rendering
            del job["timestamp"]

            return job

        return sorted(
            [util.dict_merge(dict(name=key), finalize_fields(key, value)) for key, value in jobs.items()],
            key=lambda x: x["name"],
        )
示例#21
0
	def save(self, profile, allow_overwrite=False, make_default=False):		
		if "id" in profile:
			identifier = profile["id"]
		elif "name" in profile:
			identifier = profile["name"]
		else:
			raise ValueError("profile must contain either id or name")

		identifier = self._sanitize(identifier)
		profile["id"] = identifier
		profile = dict_clean(profile, self.__class__.default)
		print("lkj save identifier:%s" % str(identifier))
		#lkj 
		'''from octoprint.server import printer
		if printer.isOperational():
			cmds = self.__send_all_update_epprom(profile)
			printer.commands(cmds)
			cmd_eeprom = GcodeCommand("M500")
			printer.command(cmd_eeprom)			
			pass
		print("lkj save 2")
		'''
		self.sendPreferenctParameter(profile)
		self.saveToEEPROM()
		self.getBeforeAndAfterPrintParameter(profile)
		
		if identifier == "_default":
			default_profile = dict_merge(self._load_default(), profile)
			settings().set(["printerProfiles", "defaultProfile"], default_profile, defaults=dict(printerProfiles=dict(defaultProfile=self.__class__.default)))
			settings().save()
		else:
			self._save_to_path(self._get_profile_path(identifier), profile, allow_overwrite=allow_overwrite)

			if make_default:
				settings().set(["printerProfiles", "default"], identifier)
		
		if self._current is not None and self._current["id"] == identifier:
			self.select(identifier)
		return self.get(identifier)
示例#22
0
def printerProfilesAdd():
	if not "application/json" in request.headers["Content-Type"]:
		return None, None, make_response("Expected content-type JSON", 400)

	json_data = request.json
	if not "profile" in json_data:
		return None, None, make_response("No profile included in request", 400)

	base_profile = printerProfileManager.get_default()
	if "basedOn" in json_data and isinstance(json_data["basedOn"], basestring):
		other_profile = printerProfileManager.get(json_data["basedOn"])
		if other_profile is not None:
			base_profile = other_profile

	if "id" in base_profile:
		del base_profile["id"]
	if "name" in base_profile:
		del base_profile["name"]
	profile = dict_merge(base_profile, json_data["profile"])
	if not _validate_profile(profile):
		return None, None, make_response("Profile is invalid, missing obligatory values", 400)

	return _overwrite_profile(profile)
示例#23
0
	def save(self, profile, allow_overwrite=False, make_default=False):
		if "id" in profile:
			identifier = profile["id"]
		elif "name" in profile:
			identifier = profile["name"]
		else:
			raise InvalidProfileError("profile must contain either id or name")

		identifier = self._sanitize(identifier)
		profile["id"] = identifier

		self._migrate_profile(profile)
		profile = dict_sanitize(profile, self.__class__.default)
		profile = dict_merge(self.__class__.default, profile)

		self._save_to_path(self._get_profile_path(identifier), profile, allow_overwrite=allow_overwrite)

		if make_default:
			settings().set(["printerProfiles", "default"], identifier)
			settings().save()

		if self._current is not None and self._current["id"] == identifier:
			self.select(identifier)
		return self.get(identifier)
示例#24
0
def printerProfilesUpdate(identifier):
	if not "application/json" in request.headers["Content-Type"]:
		return make_response("Expected content-type JSON", 400)

	try:
		json_data = request.json
	except JSONBadRequest:
		return make_response("Malformed JSON body in request", 400)

	if not "profile" in json_data:
		return make_response("No profile included in request", 400)

	profile = printerProfileManager.get(identifier)
	if profile is None:
		profile = printerProfileManager.get_default()

	new_profile = json_data["profile"]
	new_profile = dict_merge(profile, new_profile)

	make_default = False
	if "default" in new_profile:
		make_default = True
		del new_profile["default"]

	new_profile["id"] = identifier

	try:
		saved_profile = printerProfileManager.save(new_profile, allow_overwrite=True, make_default=make_default)
	except InvalidProfileError:
		return make_response("Profile is invalid", 400)
	except CouldNotOverwriteError:
		return make_response("Profile already exists and overwriting was not allowed", 400)
	except Exception as e:
		return make_response("Could not save profile: %s" % e.message, 500)
	else:
		return jsonify(dict(profile=_convert_profile(saved_profile)))
示例#25
0
	def _get_configured_checks(self):
		with self._configured_checks_mutex:
			if self._refresh_configured_checks or self._configured_checks is None:
				self._refresh_configured_checks = False
				self._configured_checks = self._settings.get(["checks"], merged=True)

				update_check_hooks = self._plugin_manager.get_hooks("octoprint.plugin.softwareupdate.check_config")
				check_providers = self._settings.get(["check_providers"], merged=True)
				if not isinstance(check_providers, dict):
					check_providers = dict()

				effective_configs = dict()

				for name, hook in update_check_hooks.items():
					try:
						hook_checks = hook()
					except Exception:
						self._logger.exception("Error while retrieving update information from plugin {name}".format(**locals()))
					else:
						for key, default_config in hook_checks.items():
							if key in effective_configs or key == "octoprint":
								if key == name:
									self._logger.warning("Software update hook {} provides check for itself but that was already registered by {} - overwriting that third party registration now!".format(name, check_providers.get(key, "unknown hook")))
								else:
									self._logger.warning("Software update hook {} tried to overwrite config for check {} but that was already configured elsewhere".format(name, key))
									continue

							check_providers[key] = name

							yaml_config = dict()
							effective_config = default_config
							if key in self._configured_checks:
								yaml_config = self._configured_checks[key]
								effective_config = dict_merge(default_config, yaml_config)

								# Make sure there's nothing persisted in that check that shouldn't be persisted
								#
								# This used to be part of the settings migration (version 2) due to a bug - it can't
								# stay there though since it interferes with manual entries to the checks not
								# originating from within a plugin. Hence we do that step now here.
								if "type" not in effective_config or effective_config["type"] not in self.COMMIT_TRACKING_TYPES:
									deletables = ["current", "displayVersion"]
								else:
									deletables = []
								self._clean_settings_check(key, yaml_config, default_config, delete=deletables, save=False)

							if effective_config:
								effective_configs[key] = effective_config
							else:
								self._logger.warning("Update for {} is empty or None, ignoring it".format(key))

				# finally set all our internal representations to our processed results
				for key, config in effective_configs.items():
					self._configured_checks[key] = config

				self._settings.set(["check_providers"], check_providers)
				self._settings.save()

				# we only want to process checks that came from plugins for
				# which the plugins are still installed and enabled
				config_checks = self._settings.get(["checks"])
				plugin_and_not_enabled = lambda k: k in check_providers and \
				                                   not check_providers[k] in self._plugin_manager.enabled_plugins
				obsolete_plugin_checks = list(filter(plugin_and_not_enabled,
				                                	 config_checks.keys()))
				for key in obsolete_plugin_checks:
					self._logger.debug("Check for key {} was provided by plugin {} that's no longer available, ignoring it".format(key, check_providers[key]))
					del self._configured_checks[key]

			return self._configured_checks
示例#26
0
def init_logging(
    settings,
    use_logging_file=True,
    logging_file=None,
    default_config=None,
    debug=False,
    verbosity=0,
    uncaught_logger=None,
    uncaught_handler=None,
):
    """Sets up logging."""

    import os

    from octoprint.util import dict_merge

    # default logging configuration
    if default_config is None:
        simple_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
        default_config = {
            "version": 1,
            "formatters": {
                "simple": {
                    "format": simple_format
                },
                "colored": {
                    "()": "colorlog.ColoredFormatter",
                    "format": "%(log_color)s" + simple_format + "%(reset)s",
                    "reset": True,
                    "log_colors": {
                        "DEBUG": "cyan",
                        "INFO": "white",
                        "WARNING": "yellow",
                        "ERROR": "red",
                        "CRITICAL": "bold_red",
                    },
                },
                "serial": {
                    "format": "%(asctime)s - %(message)s"
                },
                "timings": {
                    "format": "%(asctime)s - %(message)s"
                },
                "timingscsv": {
                    "format": "%(asctime)s;%(func)s;%(timing)f"
                },
            },
            "handlers": {
                "console": {
                    "class":
                    "octoprint.logging.handlers.OctoPrintStreamHandler",
                    "level": "DEBUG",
                    "formatter": "colored",
                    "stream": "ext://sys.stdout",
                },
                "file": {
                    "class":
                    "octoprint.logging.handlers.OctoPrintLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "simple",
                    "when":
                    "D",
                    "backupCount":
                    6,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"),
                                 "octoprint.log"),
                },
                "serialFile": {
                    "class":
                    "octoprint.logging.handlers.SerialLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "serial",
                    "backupCount":
                    3,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"), "serial.log"),
                    "delay":
                    True,
                },
                "pluginTimingsFile": {
                    "class":
                    "octoprint.logging.handlers.PluginTimingsLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "timings",
                    "backupCount":
                    3,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"),
                                 "plugintimings.log"),
                    "delay":
                    True,
                },
                "pluginTimingsCsvFile": {
                    "class":
                    "octoprint.logging.handlers.PluginTimingsLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "timingscsv",
                    "backupCount":
                    3,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"),
                                 "plugintimings.csv"),
                    "delay":
                    True,
                },
            },
            "loggers": {
                "SERIAL": {
                    "level": "INFO",
                    "handlers": ["serialFile"],
                    "propagate": False,
                },
                "PLUGIN_TIMINGS": {
                    "level": "INFO",
                    "handlers": ["pluginTimingsFile", "pluginTimingsCsvFile"],
                    "propagate": False,
                },
                "PLUGIN_TIMINGS.octoprint.plugin": {
                    "level": "INFO"
                },
                "octoprint": {
                    "level": "INFO"
                },
                "octoprint.util": {
                    "level": "INFO"
                },
                "octoprint.plugins": {
                    "level": "INFO"
                },
            },
            "root": {
                "level": "WARN",
                "handlers": ["console", "file"]
            },
        }

    if debug or verbosity > 0:
        default_config["loggers"]["octoprint"]["level"] = "DEBUG"
        default_config["root"]["level"] = "INFO"
    if verbosity > 1:
        default_config["loggers"]["octoprint.plugins"]["level"] = "DEBUG"
    if verbosity > 2:
        default_config["root"]["level"] = "DEBUG"

    config = default_config
    if use_logging_file:
        # further logging configuration from file...
        if logging_file is None:
            logging_file = os.path.join(settings.getBaseFolder("base"),
                                        "logging.yaml")

        config_from_file = {}
        if os.path.exists(logging_file) and os.path.isfile(logging_file):
            import yaml

            with io.open(logging_file, "rt", encoding="utf-8") as f:
                config_from_file = yaml.safe_load(f)

        # we merge that with the default config
        if config_from_file is not None and isinstance(config_from_file, dict):
            config = dict_merge(default_config, config_from_file)

    # configure logging globally
    return set_logging_config(config, debug, verbosity, uncaught_logger,
                              uncaught_handler)
示例#27
0
	def _initLogging(self, debug, logConf=None):
		defaultConfig = {
			"version": 1,
			"formatters": {
				"simple": {
					"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
				}
			},
			"handlers": {
				"console": {
					"class": "logging.StreamHandler",
					"level": "DEBUG",
					"formatter": "simple",
					"stream": "ext://sys.stdout"
				},
				"file": {
					"class": "logging.handlers.TimedRotatingFileHandler",
					"level": "DEBUG",
					"formatter": "simple",
					"when": "D",
					"backupCount": "1",
					"filename": os.path.join(settings().getBaseFolder("logs"), "octoprint.log")
				},
				"serialFile": {
					"class": "logging.handlers.RotatingFileHandler",
					"level": "DEBUG",
					"formatter": "simple",
					"maxBytes": 2 * 1024 * 1024, # let's limit the serial log to 2MB in size
					"filename": os.path.join(settings().getBaseFolder("logs"), "serial.log")
				}
			},
			"loggers": {
				"SERIAL": {
					"level": "CRITICAL",
					"handlers": ["serialFile"],
					"propagate": False
				}
			},
			"root": {
				"level": "INFO",
				"handlers": ["console", "file"]
			}
		}

		if debug:
			defaultConfig["root"]["level"] = "DEBUG"

		if logConf is None:
			logConf = os.path.join(settings().settings_dir, "logging.yaml")

		configFromFile = {}
		if os.path.exists(logConf) and os.path.isfile(logConf):
			import yaml
			with open(logConf, "r") as f:
				configFromFile = yaml.safe_load(f)

		config = util.dict_merge(defaultConfig, configFromFile)
		logging.config.dictConfig(config)

		if settings().getBoolean(["serial", "log"]):
			# enable debug logging to serial.log
			logging.getLogger("SERIAL").setLevel(logging.DEBUG)
			logging.getLogger("SERIAL").debug("Enabling serial logging")
示例#28
0
	def on_settings_migrate(self, target, current=None):

		if current is None or current < 4:
			# config version 4 and higher moves octoprint_restart_command and
			# environment_restart_command to the core configuration

			# current plugin commands
			configured_octoprint_restart_command = self._settings.get(["octoprint_restart_command"])
			configured_environment_restart_command = self._settings.get(["environment_restart_command"])

			# current global commands
			configured_system_restart_command = self._settings.global_get(["server", "commands", "systemRestartCommand"])
			configured_server_restart_command = self._settings.global_get(["server", "commands", "serverRestartCommand"])

			# only set global commands if they are not yet set
			if configured_system_restart_command is None and configured_environment_restart_command is not None:
				self._settings.global_set(["server", "commands", "systemRestartCommand"], configured_environment_restart_command)
			if configured_server_restart_command is None and configured_octoprint_restart_command is not None:
				self._settings.global_set(["server", "commands", "serverRestartCommand"], configured_octoprint_restart_command)

			# delete current plugin commands from config
			self._settings.set(["environment_restart_command"], None)
			self._settings.set(["octoprint_restart_command"], None)

		if current is None or current == 2:
			# No config version and config version 2 need the same fix, stripping
			# accidentally persisted data off the checks

			configured_checks = self._settings.get(["checks"], incl_defaults=False)
			if configured_checks is None:
				configured_checks = dict()

			check_keys = configured_checks.keys()

			# take care of the octoprint entry
			if "octoprint" in configured_checks:
				octoprint_check = dict(configured_checks["octoprint"])
				if "type" not in octoprint_check or octoprint_check["type"] != "github_commit":
					deletables=["current", "displayName", "displayVersion"]
				else:
					deletables=[]
				octoprint_check = self._clean_settings_check("octoprint", octoprint_check, self.get_settings_defaults()["checks"]["octoprint"], delete=deletables, save=False)
				check_keys.remove("octoprint")

			# and the hooks
			update_check_hooks = self._plugin_manager.get_hooks("octoprint.plugin.softwareupdate.check_config")
			for name, hook in update_check_hooks.items():
				try:
					hook_checks = hook()
				except:
					self._logger.exception("Error while retrieving update information from plugin {name}".format(**locals()))
				else:
					for key, data in hook_checks.items():
						if key in configured_checks:
							settings_check = dict(configured_checks[key])
							merged = dict_merge(data, settings_check)
							if "type" not in merged or merged["type"] != "github_commit":
								deletables = ["current", "displayVersion"]
							else:
								deletables = []

							self._clean_settings_check(key, settings_check, data, delete=deletables, save=False)
							check_keys.remove(key)

			# and anything that's left over we'll just remove now
			for key in check_keys:
				dummy_defaults = dict(plugins=dict())
				dummy_defaults["plugins"][self._identifier] = dict(checks=dict())
				dummy_defaults["plugins"][self._identifier]["checks"][key] = None
				self._settings.set(["checks", key], None, defaults=dummy_defaults)

		elif current == 1:
			# config version 1 had the error that the octoprint check got accidentally
			# included in checks["octoprint"], leading to recursion and hence to
			# yaml parser errors

			configured_checks = self._settings.get(["checks"], incl_defaults=False)
			if configured_checks is None:
				return

			if "octoprint" in configured_checks and "octoprint" in configured_checks["octoprint"]:
				# that's a circular reference, back to defaults
				dummy_defaults = dict(plugins=dict())
				dummy_defaults["plugins"][self._identifier] = dict(checks=dict())
				dummy_defaults["plugins"][self._identifier]["checks"]["octoprint"] = None
				self._settings.set(["checks", "octoprint"], None, defaults=dummy_defaults)
示例#29
0
	def _get_value(self, path, asdict=False, config=None, defaults=None, preprocessors=None, merged=False, incl_defaults=True, do_copy=True):
		if not path:
			raise NoSuchSettingsPath()

		if config is not None or defaults is not None:
			if config is None:
				config = self._config

			if defaults is None:
				defaults = dict(self._map.parents)

			# mappings: provided config + any intermediary parents + provided defaults + regular defaults
			mappings = [config] + self._overlay_maps + [defaults, self._default_map]
			chain = HierarchicalChainMap(*mappings)
		else:
			chain = self._map

		if preprocessors is None:
			preprocessors = self._get_preprocessors

		preprocessor = None
		try:
			preprocessor = self._get_by_path(path, preprocessors)
		except NoSuchSettingsPath:
			pass

		parent_path = path[:-1]
		last = path[-1]

		if not isinstance(last, (list, tuple)):
			keys = [last]
		else:
			keys = last

		if asdict:
			results = dict()
		else:
			results = list()

		for key in keys:
			try:
				value = chain.get_by_path(parent_path + [key], only_local=not incl_defaults)
			except KeyError:
				raise NoSuchSettingsPath()

			if isinstance(value, dict) and merged:
				try:
					default_value = chain.get_by_path(parent_path + [key], only_defaults=True)
					if default_value is not None:
						value = dict_merge(default_value, value)
				except KeyError:
					raise NoSuchSettingsPath()

			if preprocessors is not None:
				try:
					preprocessor = self._get_by_path(path, preprocessors)
				except:
					pass

				if callable(preprocessor):
					value = preprocessor(value)

			if do_copy:
				value = copy.deepcopy(value)

			if asdict:
				results[key] = value
			else:
				results.append(value)

		if not isinstance(last, (list, tuple)):
			if asdict:
				return results.values().pop()
			else:
				return results.pop()
		else:
			return results
示例#30
0
	def _get_value(self, path, asdict=False, config=None, defaults=None, preprocessors=None, merged=False, incl_defaults=True, do_copy=True):
		if not path:
			raise NoSuchSettingsPath()

		if config is not None or defaults is not None:
			if config is None:
				config = self._config

			if defaults is None:
				defaults = dict(self._map.parents)

			# mappings: provided config + any intermediary parents + provided defaults + regular defaults
			mappings = [config] + self._overlay_maps + [defaults, self._default_map]
			chain = HierarchicalChainMap(*mappings)
		else:
			chain = self._map

		if preprocessors is None:
			preprocessors = self._get_preprocessors

		preprocessor = None
		try:
			preprocessor = self._get_by_path(path, preprocessors)
		except NoSuchSettingsPath:
			pass

		parent_path = path[:-1]
		last = path[-1]

		if not isinstance(last, (list, tuple)):
			keys = [last]
		else:
			keys = last

		if asdict:
			results = dict()
		else:
			results = list()

		for key in keys:
			try:
				value = chain.get_by_path(parent_path + [key], only_local=not incl_defaults)
			except KeyError:
				raise NoSuchSettingsPath()

			if isinstance(value, dict) and merged:
				try:
					default_value = chain.get_by_path(parent_path + [key], only_defaults=True)
					if default_value is not None:
						value = dict_merge(default_value, value)
				except KeyError:
					raise NoSuchSettingsPath()

			if preprocessors is not None:
				try:
					preprocessor = self._get_by_path(path, preprocessors)
				except:
					pass

				if callable(preprocessor):
					value = preprocessor(value)

			if do_copy:
				value = copy.deepcopy(value)

			if asdict:
				results[key] = value
			else:
				results.append(value)

		if not isinstance(last, (list, tuple)):
			if asdict:
				return results.values().pop()
			else:
				return results.pop()
		else:
			return results
示例#31
0
	def get_current_versions(self, check_targets=None, force=False):
		"""
		Retrieves the current version information for all defined check_targets. Will retrieve information for all
		available targets by default.

		:param check_targets: an iterable defining the targets to check, if not supplied defaults to all targets
		"""

		checks = self._get_configured_checks()
		if check_targets is None:
			check_targets = list(checks.keys())

		update_available = False
		update_possible = False
		information = dict()

		# we don't want to do the same work twice, so let's use a lock
		if self._get_versions_mutex.acquire(False):
			self._get_versions_data_ready.clear()
			try:
				futures_to_result = dict()
				online = self._connectivity_checker.check_immediately()
				self._logger.debug("Looks like we are {}".format("online" if online else "offline"))

				with futures.ThreadPoolExecutor(max_workers=5) as executor:
					for target, check in checks.items():
						if not target in check_targets:
							continue

						if not check:
							continue

						try:
							populated_check = self._populated_check(target, check)
							future = executor.submit(self._get_current_version, target, populated_check, force=force)
							futures_to_result[future] = (target, populated_check)
						except exceptions.UnknownCheckType:
							self._logger.warning("Unknown update check type for target {}: {}".format(target,
							                                                                       check.get("type",
							                                                                                 "<n/a>")))
							continue
						except Exception:
							self._logger.exception("Could not check {} for updates".format(target))
							continue

					for future in futures.as_completed(futures_to_result):

						target, populated_check = futures_to_result[future]
						if future.exception() is not None:
							self._logger.error("Could not check {} for updates, error: {!r}".format(target,
							                                                                        future.exception()))
							continue

						target_information, target_update_available, target_update_possible, target_online, target_error = future.result()
						target_update_possible = target_update_possible and self._environment_supported

						target_information = dict_merge(dict(local=dict(name="?", value="?"),
						                                     remote=dict(name="?", value="?",
						                                                 release_notes=None),
						                                     needs_online=True), target_information)

						update_available = update_available or target_update_available
						update_possible = update_possible or (target_update_possible and target_update_available)

						local_name = target_information["local"]["name"]
						local_value = target_information["local"]["value"]

						release_notes = None
						if target_information and target_information["remote"] and target_information["remote"][
							"value"]:
							if "release_notes" in populated_check and populated_check["release_notes"]:
								release_notes = populated_check["release_notes"]
							elif "release_notes" in target_information["remote"]:
								release_notes = target_information["remote"]["release_notes"]

							if release_notes:
								release_notes = release_notes.format(octoprint_version=VERSION,
								                                     target_name=target_information["remote"]["name"],
								                                     target_version=target_information["remote"]["value"])

						information[target] = dict(updateAvailable=target_update_available,
						                           updatePossible=target_update_possible,
						                           information=target_information,
						                           displayName=populated_check["displayName"],
						                           displayVersion=populated_check["displayVersion"].format(octoprint_version=VERSION,
						                                                                                   local_name=local_name,
						                                                                                   local_value=local_value),
						                           releaseNotes=release_notes,
						                           online=target_online,
						                           error=target_error)

						if target == "octoprint" and "released_version" in populated_check:
							information[target]["released_version"] = populated_check["released_version"]

				if self._version_cache_dirty:
					self._save_version_cache()

				self._get_versions_data = information, update_available, update_possible
				self._get_versions_data_ready.set()
			finally:
				self._get_versions_mutex.release()

		else: # something's already in progress, let's wait for it to complete and use its result
			self._get_versions_data_ready.wait()
			information, update_available, update_possible = self._get_versions_data

		return information, update_available, update_possible
示例#32
0
	def get(self, path, asdict=False, config=None, defaults=None, preprocessors=None, merged=False, incl_defaults=True):
		import octoprint.util as util

		if len(path) == 0:
			return None

		if config is None:
			config = self._config
		if defaults is None:
			defaults = default_settings
		if preprocessors is None:
			preprocessors = self._get_preprocessors

		while len(path) > 1:
			key = path.pop(0)
			if key in config and key in defaults:
				config = config[key]
				defaults = defaults[key]
			elif incl_defaults and key in defaults:
				config = {}
				defaults = defaults[key]
			else:
				return None

			if preprocessors and isinstance(preprocessors, dict) and key in preprocessors:
				preprocessors = preprocessors[key]


		k = path.pop(0)
		if not isinstance(k, (list, tuple)):
			keys = [k]
		else:
			keys = k

		if asdict:
			results = {}
		else:
			results = []
		for key in keys:
			if key in config:
				value = config[key]
				if merged and key in defaults:
					value = util.dict_merge(defaults[key], value)
			elif incl_defaults and key in defaults:
				value = defaults[key]
			else:
				value = None

			if preprocessors and isinstance(preprocessors, dict) and key in preprocessors and callable(preprocessors[key]):
				value = preprocessors[key](value)

			if asdict:
				results[key] = value
			else:
				results.append(value)

		if not isinstance(k, (list, tuple)):
			if asdict:
				return results.values().pop()
			else:
				return results.pop()
		else:
			return results
示例#33
0
    def _initLogging(self, debug, logConf=None):
        defaultConfig = {
            "version": 1,
            "formatters": {
                "simple": {
                    "format":
                    "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
                }
            },
            "handlers": {
                "console": {
                    "class": "logging.StreamHandler",
                    "level": "DEBUG",
                    "formatter": "simple",
                    "stream": "ext://sys.stdout"
                },
                "file": {
                    "class":
                    "logging.handlers.TimedRotatingFileHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "simple",
                    "when":
                    "D",
                    "backupCount":
                    5,
                    "filename":
                    os.path.join(settings().getBaseFolder("logs"),
                                 "astrobox.log")
                },
                "serialFile": {
                    "class":
                    "logging.handlers.RotatingFileHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "simple",
                    "maxBytes":
                    2 * 1024 *
                    1024,  # let's limit the serial log to 2MB in size
                    "filename":
                    os.path.join(settings().getBaseFolder("logs"),
                                 "serial.log")
                }
            },
            "loggers": {
                "SERIAL": {
                    "level": "CRITICAL",
                    "handlers": ["serialFile"],
                    "propagate": False
                }
            },
            "root": {
                "level": "INFO",
                "handlers": ["console", "file"]
            }
        }

        if debug:
            defaultConfig["root"]["level"] = "DEBUG"

        if logConf is None:
            logConf = os.path.join(settings().settings_dir, "logging.yaml")

        configFromFile = {}
        if os.path.exists(logConf) and os.path.isfile(logConf):
            import yaml
            with open(logConf, "r") as f:
                configFromFile = yaml.safe_load(f)

        config = util.dict_merge(defaultConfig, configFromFile)
        logging.config.dictConfig(config)

        if settings().getBoolean(["serial", "log"]):
            # enable debug logging to serial.log
            serialLogger = logging.getLogger("SERIAL")
            serialLogger.setLevel(logging.DEBUG)
            serialLogger.debug("Enabling serial logging")
示例#34
0
    def on_settings_migrate(self, target, current=None):
        if current is None or current == 2:
            # there might be some left over data from the time we still persisted everything to settings,
            # even the stuff that shouldn't be persisted but always provided by the hook - let's
            # clean up

            configured_checks = self._settings.get(["checks"],
                                                   incl_defaults=False)
            if configured_checks is None:
                configured_checks = dict()

            check_keys = configured_checks.keys()

            # take care of the octoprint entry
            if "octoprint" in configured_checks:
                octoprint_check = dict(configured_checks["octoprint"])
                if "type" not in octoprint_check or octoprint_check[
                        "type"] != "github_commit":
                    deletables = ["current", "displayName", "displayVersion"]
                else:
                    deletables = []
                octoprint_check = self._clean_settings_check(
                    "octoprint",
                    octoprint_check,
                    self.get_settings_defaults()["checks"]["octoprint"],
                    delete=deletables,
                    save=False)
                check_keys.remove("octoprint")

            # and the hooks
            update_check_hooks = self._plugin_manager.get_hooks(
                "octoprint.plugin.softwareupdate.check_config")
            for name, hook in update_check_hooks.items():
                try:
                    hook_checks = hook()
                except:
                    self._logger.exception(
                        "Error while retrieving update information from plugin {name}"
                        .format(**locals()))
                else:
                    for key, data in hook_checks.items():
                        if key in configured_checks:
                            settings_check = dict(configured_checks[key])
                            merged = dict_merge(data, settings_check)
                            if "type" not in merged or merged[
                                    "type"] != "github_commit":
                                deletables = ["current", "displayVersion"]
                            else:
                                deletables = []

                            self._clean_settings_check(key,
                                                       settings_check,
                                                       data,
                                                       delete=deletables,
                                                       save=False)
                            check_keys.remove(key)

            # and anything that's left over we'll just remove now
            for key in check_keys:
                dummy_defaults = dict(plugins=dict())
                dummy_defaults["plugins"][self._identifier] = dict(
                    checks=dict())
                dummy_defaults["plugins"][
                    self._identifier]["checks"][key] = None
                self._settings.set(["checks", key],
                                   None,
                                   defaults=dummy_defaults)

        elif current == 1:
            configured_checks = self._settings.get(["checks"],
                                                   incl_defaults=False)
            if configured_checks is None:
                return

            if "octoprint" in configured_checks and "octoprint" in configured_checks[
                    "octoprint"]:
                # that's a circular reference, back to defaults
                dummy_defaults = dict(plugins=dict())
                dummy_defaults["plugins"][self._identifier] = dict(
                    checks=dict())
                dummy_defaults["plugins"][
                    self._identifier]["checks"]["octoprint"] = None
                self._settings.set(["checks", "octoprint"],
                                   None,
                                   defaults=dummy_defaults)
                self._settings.save()
示例#35
0
    def _do_analysis(self, high_priority=False):
        import sys

        import sarge

        if self._current.analysis and all(
                map(
                    lambda x: x in self._current.analysis,
                    ("printingArea", "dimensions", "estimatedPrintTime",
                     "filament"),
                )):
            return self._current.analysis

        try:
            throttle = (settings().getFloat([
                "gcodeAnalysis", "throttle_highprio"
            ]) if high_priority else settings().getFloat(
                ["gcodeAnalysis", "throttle_normalprio"]))
            throttle_lines = settings().getInt(
                ["gcodeAnalysis", "throttle_lines"])
            max_extruders = settings().getInt(
                ["gcodeAnalysis", "maxExtruders"])
            g90_extruder = settings().getBoolean(
                ["feature", "g90InfluencesExtruder"])
            bed_z = settings().getFloat(["gcodeAnalysis", "bedZ"])
            speedx = self._current.printer_profile["axes"]["x"]["speed"]
            speedy = self._current.printer_profile["axes"]["y"]["speed"]
            offsets = self._current.printer_profile["extruder"]["offsets"]

            command = [
                sys.executable,
                "-m",
                "octoprint",
                "analysis",
                "gcode",
                f"--speed-x={speedx}",
                f"--speed-y={speedy}",
                f"--max-t={max_extruders}",
                f"--throttle={throttle}",
                f"--throttle-lines={throttle_lines}",
                f"--bed-z={bed_z}",
            ]
            for offset in offsets[1:]:
                command += ["--offset", str(offset[0]), str(offset[1])]
            if g90_extruder:
                command += ["--g90-extruder"]
            command.append(self._current.absolute_path)

            self._logger.info("Invoking analysis command: {}".format(
                " ".join(command)))

            self._aborted = False
            p = sarge.run(command,
                          close_fds=CLOSE_FDS,
                          async_=True,
                          stdout=sarge.Capture())

            while len(p.commands) == 0:
                # somewhat ugly... we can't use wait_events because
                # the events might not be all set if an exception
                # by sarge is triggered within the async process
                # thread
                time.sleep(0.01)

            # by now we should have a command, let's wait for its
            # process to have been prepared
            p.commands[0].process_ready.wait()

            if not p.commands[0].process:
                # the process might have been set to None in case of any exception
                raise RuntimeError(
                    "Error while trying to run command {}".format(
                        " ".join(command)))

            try:
                # let's wait for stuff to finish
                while p.returncode is None:
                    if self._aborted:
                        # oh, we shall abort, let's do so!
                        p.commands[0].terminate()
                        raise AnalysisAborted(reenqueue=self._reenqueue)

                    # else continue
                    p.commands[0].poll()
            finally:
                p.close()

            output = p.stdout.text
            self._logger.debug(f"Got output: {output!r}")

            result = {}
            if "ERROR:" in output:
                _, error = output.split("ERROR:")
                raise RuntimeError(error.strip())
            elif "EMPTY:" in output:
                self._logger.info("Result is empty, no extrusions found")
                result = copy.deepcopy(EMPTY_RESULT)
            elif "RESULTS:" not in output:
                raise RuntimeError("No analysis result found")
            else:
                _, output = output.split("RESULTS:")
                analysis = yaml.load_from_file(file=output)

                result["printingArea"] = analysis["printing_area"]
                result["dimensions"] = analysis["dimensions"]
                if analysis["total_time"]:
                    result["estimatedPrintTime"] = analysis["total_time"] * 60
                if analysis["extrusion_length"]:
                    result["filament"] = {}
                    for i in range(len(analysis["extrusion_length"])):
                        result["filament"]["tool%d" % i] = {
                            "length": analysis["extrusion_length"][i],
                            "volume": analysis["extrusion_volume"][i],
                        }

            if self._current.analysis and isinstance(self._current.analysis,
                                                     dict):
                return dict_merge(result, self._current.analysis)
            else:
                return result
        finally:
            self._gcode = None
示例#36
0
def init_logging(settings,
                 use_logging_file=True,
                 logging_file=None,
                 default_config=None,
                 debug=False,
                 verbosity=0,
                 uncaught_logger=None,
                 uncaught_handler=None):
    """Sets up logging."""

    import os

    from octoprint.util import dict_merge

    # default logging configuration
    if default_config is None:
        default_config = {
            "version": 1,
            "formatters": {
                "simple": {
                    "format":
                    "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
                },
                "serial": {
                    "format": "%(asctime)s - %(message)s"
                }
            },
            "handlers": {
                "console": {
                    "class": "logging.StreamHandler",
                    "level": "DEBUG",
                    "formatter": "simple",
                    "stream": "ext://sys.stdout"
                },
                "file": {
                    "class":
                    "octoprint.logging.handlers.OctoPrintLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "simple",
                    "when":
                    "D",
                    "backupCount":
                    6,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"),
                                 "octoprint.log")
                },
                "serialFile": {
                    "class":
                    "octoprint.logging.handlers.SerialLogHandler",
                    "level":
                    "DEBUG",
                    "formatter":
                    "serial",
                    "backupCount":
                    3,
                    "filename":
                    os.path.join(settings.getBaseFolder("logs"), "serial.log")
                }
            },
            "loggers": {
                "SERIAL": {
                    "level": "CRITICAL",
                    "handlers": ["serialFile"],
                    "propagate": False
                },
                "octoprint": {
                    "level": "INFO"
                },
                "octoprint.util": {
                    "level": "INFO"
                },
                "octoprint.plugins": {
                    "level": "INFO"
                }
            },
            "root": {
                "level": "WARN",
                "handlers": ["console", "file"]
            }
        }

    if debug or verbosity > 0:
        default_config["loggers"]["octoprint"]["level"] = "DEBUG"
        default_config["root"]["level"] = "INFO"
    if verbosity > 1:
        default_config["loggers"]["octoprint.plugins"]["level"] = "DEBUG"
    if verbosity > 2:
        default_config["root"]["level"] = "DEBUG"

    config = default_config
    if use_logging_file:
        # further logging configuration from file...
        if logging_file is None:
            logging_file = os.path.join(settings.getBaseFolder("base"),
                                        "logging.yaml")

        config_from_file = {}
        if os.path.exists(logging_file) and os.path.isfile(logging_file):
            import yaml
            with open(logging_file, "r") as f:
                config_from_file = yaml.safe_load(f)

        # we merge that with the default config
        if config_from_file is not None and isinstance(config_from_file, dict):
            config = dict_merge(default_config, config_from_file)

    # configure logging globally
    return set_logging_config(config, debug, verbosity, uncaught_logger,
                              uncaught_handler)
示例#37
0
    def get_current_versions(self, check_targets=None, force=False):
        """
		Retrieves the current version information for all defined check_targets. Will retrieve information for all
		available targets by default.

		:param check_targets: an iterable defining the targets to check, if not supplied defaults to all targets
		"""

        checks = self._get_configured_checks()
        if check_targets is None:
            check_targets = checks.keys()

        update_available = False
        update_possible = False
        information = dict()

        for target, check in checks.items():
            if not target in check_targets:
                continue

            try:
                populated_check = self._populated_check(target, check)
                target_information, target_update_available, target_update_possible = self._get_current_version(
                    target, populated_check, force=force)
                if target_information is None:
                    target_information = dict()
            except exceptions.UnknownCheckType:
                self._logger.warn(
                    "Unknown update check type for target {}: {}".format(
                        target, check.get("type", "<n/a>")))
                continue

            target_information = dict_merge(
                dict(local=dict(name="unknown", value="unknown"),
                     remote=dict(name="unknown",
                                 value="unknown",
                                 release_notes=None)), target_information)

            update_available = update_available or target_update_available
            update_possible = update_possible or (target_update_possible
                                                  and target_update_available)

            local_name = target_information["local"]["name"]
            local_value = target_information["local"]["value"]

            release_notes = None
            if target_information and target_information[
                    "remote"] and target_information["remote"]["value"]:
                if "release_notes" in populated_check and populated_check[
                        "release_notes"]:
                    release_notes = populated_check["release_notes"]
                elif "release_notes" in target_information["remote"]:
                    release_notes = target_information["remote"][
                        "release_notes"]

                if release_notes:
                    release_notes = release_notes.format(
                        octoprint_version=VERSION,
                        target_name=target_information["remote"]["name"],
                        target_version=target_information["remote"]["value"])

            information[target] = dict(
                updateAvailable=target_update_available,
                updatePossible=target_update_possible,
                information=target_information,
                displayName=populated_check["displayName"],
                displayVersion=populated_check["displayVersion"].format(
                    octoprint_version=VERSION,
                    local_name=local_name,
                    local_value=local_value),
                check=populated_check,
                releaseNotes=release_notes)

        if self._version_cache_dirty:
            self._save_version_cache()
        return information, update_available, update_possible
示例#38
0
try:
	import octoprint_setuptools
except:
	print("Could not import OctoPrint's setuptools, are you sure you are running that under "
	      "the same python installation that OctoPrint is installed under?")
	import sys
	sys.exit(-1)

setup_parameters = octoprint_setuptools.create_plugin_setup_parameters(
	identifier=plugin_identifier,
	package=plugin_package,
	name=plugin_name,
	version=plugin_version,
	description=plugin_description,
	author=plugin_author,
	mail=plugin_author_email,
	url=plugin_url,
	license=plugin_license,
	requires=plugin_requires,
	additional_packages=plugin_addtional_packages,
	ignored_packages=plugin_ignored_packages,
	additional_data=plugin_additional_data
)

if len(additional_setup_parameters):
	from octoprint.util import dict_merge
	setup_parameters = dict_merge(setup_parameters, additional_setup_parameters)

setup(**setup_parameters)
示例#39
0
def init_logging(settings, use_logging_file=True, logging_file=None, default_config=None, debug=False, verbosity=0, uncaught_logger=None, uncaught_handler=None):
	"""Sets up logging."""

	import os

	from octoprint.util import dict_merge

	# default logging configuration
	if default_config is None:
		default_config = {
			"version": 1,
			"formatters": {
				"simple": {
					"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
				},
				"serial": {
					"format": "%(asctime)s - %(message)s"
				}
			},
			"handlers": {
				"console": {
					"class": "logging.StreamHandler",
					"level": "DEBUG",
					"formatter": "simple",
					"stream": "ext://sys.stdout"
				},
				"file": {
					"class": "octoprint.logging.handlers.OctoPrintLogHandler",
					"level": "DEBUG",
					"formatter": "simple",
					"when": "D",
					"backupCount": 6,
					"filename": os.path.join(settings.getBaseFolder("logs"), "octoprint.log")
				},
				"serialFile": {
					"class": "octoprint.logging.handlers.SerialLogHandler",
					"level": "DEBUG",
					"formatter": "serial",
					"backupCount": 3,
					"filename": os.path.join(settings.getBaseFolder("logs"), "serial.log")
				}
			},
			"loggers": {
				"SERIAL": {
					"level": "CRITICAL",
					"handlers": ["serialFile"],
					"propagate": False
				},
				"octoprint": {
					"level": "INFO"
				},
				"octoprint.util": {
					"level": "INFO"
				},
				"octoprint.plugins": {
					"level": "INFO"
				}
			},
			"root": {
				"level": "WARN",
				"handlers": ["console", "file"]
			}
		}

	if debug or verbosity > 0:
		default_config["loggers"]["octoprint"]["level"] = "DEBUG"
		default_config["root"]["level"] = "INFO"
	if verbosity > 1:
		default_config["loggers"]["octoprint.plugins"]["level"] = "DEBUG"
	if verbosity > 2:
		default_config["root"]["level"] = "DEBUG"

	config = default_config
	if use_logging_file:
		# further logging configuration from file...
		if logging_file is None:
			logging_file = os.path.join(settings.getBaseFolder("base"), "logging.yaml")

		config_from_file = {}
		if os.path.exists(logging_file) and os.path.isfile(logging_file):
			import yaml
			with open(logging_file, "r") as f:
				config_from_file = yaml.safe_load(f)

		# we merge that with the default config
		if config_from_file is not None and isinstance(config_from_file, dict):
			config = dict_merge(default_config, config_from_file)

	# configure logging globally
	return set_logging_config(config, debug, verbosity, uncaught_logger, uncaught_handler)
示例#40
0
	def get(self, path, asdict=False, config=None, defaults=None, preprocessors=None, merged=False, incl_defaults=True):
		import octoprint.util as util

		if len(path) == 0:
			return None

		if config is None:
			config = self._config
		if defaults is None:
			defaults = default_settings
		if preprocessors is None:
			preprocessors = self._get_preprocessors

		while len(path) > 1:
			key = path.pop(0)
			if key in config and key in defaults:
				config = config[key]
				defaults = defaults[key]
			elif incl_defaults and key in defaults:
				config = {}
				defaults = defaults[key]
			else:
				return None

			if preprocessors and isinstance(preprocessors, dict) and key in preprocessors:
				preprocessors = preprocessors[key]


		k = path.pop(0)
		if not isinstance(k, (list, tuple)):
			keys = [k]
		else:
			keys = k

		if asdict:
			results = {}
		else:
			results = []
		for key in keys:
			if key in config:
				value = config[key]
				if merged and key in defaults:
					value = util.dict_merge(defaults[key], value)
			elif incl_defaults and key in defaults:
				value = defaults[key]
			else:
				value = None

			if preprocessors and isinstance(preprocessors, dict) and key in preprocessors and callable(preprocessors[key]):
				value = preprocessors[key](value)

			if asdict:
				results[key] = value
			else:
				results.append(value)

		if not isinstance(k, (list, tuple)):
			if asdict:
				return results.values().pop()
			else:
				return results.pop()
		else:
			return results
try:
	import octoprint_setuptools
except:
	print("Could not import OctoPrint's setuptools, are you sure you are running that under "
	      "the same python installation that OctoPrint is installed under?")
	import sys
	sys.exit(-1)

setup_parameters = octoprint_setuptools.create_plugin_setup_parameters(
	identifier=plugin_identifier,
	package=plugin_package,
	name=plugin_name,
	version=plugin_version,
	description=plugin_description,
	author=plugin_author,
	mail=plugin_author_email,
	url=plugin_url,
	license=plugin_license,
	requires=plugin_requires,
	additional_packages=plugin_additional_packages,
	ignored_packages=plugin_ignored_packages,
	additional_data=plugin_additional_data
)

if len(additional_setup_parameters):
	from octoprint.util import dict_merge
	setup_parameters = dict_merge(setup_parameters, additional_setup_parameters)

setup(**setup_parameters)
示例#42
0
    def on_settings_migrate(self, target, current=None):

        if current is None or current < 4:
            # config version 4 and higher moves octoprint_restart_command and
            # environment_restart_command to the core configuration

            # current plugin commands
            configured_octoprint_restart_command = self._settings.get(
                ["octoprint_restart_command"])
            configured_environment_restart_command = self._settings.get(
                ["environment_restart_command"])

            # current global commands
            configured_system_restart_command = self._settings.global_get(
                ["server", "commands", "systemRestartCommand"])
            configured_server_restart_command = self._settings.global_get(
                ["server", "commands", "serverRestartCommand"])

            # only set global commands if they are not yet set
            if configured_system_restart_command is None and configured_environment_restart_command is not None:
                self._settings.global_set(
                    ["server", "commands", "systemRestartCommand"],
                    configured_environment_restart_command)
            if configured_server_restart_command is None and configured_octoprint_restart_command is not None:
                self._settings.global_set(
                    ["server", "commands", "serverRestartCommand"],
                    configured_octoprint_restart_command)

            # delete current plugin commands from config
            self._settings.set(["environment_restart_command"], None)
            self._settings.set(["octoprint_restart_command"], None)

        if current is None or current == 2:
            # No config version and config version 2 need the same fix, stripping
            # accidentally persisted data off the checks

            configured_checks = self._settings.get(["checks"],
                                                   incl_defaults=False)
            if configured_checks is None:
                configured_checks = dict()

            check_keys = configured_checks.keys()

            # take care of the octoprint entry
            if "octoprint" in configured_checks:
                octoprint_check = dict(configured_checks["octoprint"])
                if "type" not in octoprint_check or octoprint_check[
                        "type"] != "github_commit":
                    deletables = ["current", "displayName", "displayVersion"]
                else:
                    deletables = []
                octoprint_check = self._clean_settings_check(
                    "octoprint",
                    octoprint_check,
                    self.get_settings_defaults()["checks"]["octoprint"],
                    delete=deletables,
                    save=False)
                check_keys.remove("octoprint")

            # and the hooks
            update_check_hooks = self._plugin_manager.get_hooks(
                "octoprint.plugin.softwareupdate.check_config")
            for name, hook in update_check_hooks.items():
                try:
                    hook_checks = hook()
                except:
                    self._logger.exception(
                        "Error while retrieving update information from plugin {name}"
                        .format(**locals()))
                else:
                    for key, data in hook_checks.items():
                        if key in configured_checks:
                            settings_check = dict(configured_checks[key])
                            merged = dict_merge(data, settings_check)
                            if "type" not in merged or merged[
                                    "type"] != "github_commit":
                                deletables = ["current", "displayVersion"]
                            else:
                                deletables = []

                            self._clean_settings_check(key,
                                                       settings_check,
                                                       data,
                                                       delete=deletables,
                                                       save=False)
                            check_keys.remove(key)

            # and anything that's left over we'll just remove now
            for key in check_keys:
                dummy_defaults = dict(plugins=dict())
                dummy_defaults["plugins"][self._identifier] = dict(
                    checks=dict())
                dummy_defaults["plugins"][
                    self._identifier]["checks"][key] = None
                self._settings.set(["checks", key],
                                   None,
                                   defaults=dummy_defaults)

        elif current == 1:
            # config version 1 had the error that the octoprint check got accidentally
            # included in checks["octoprint"], leading to recursion and hence to
            # yaml parser errors

            configured_checks = self._settings.get(["checks"],
                                                   incl_defaults=False)
            if configured_checks is None:
                return

            if "octoprint" in configured_checks and "octoprint" in configured_checks[
                    "octoprint"]:
                # that's a circular reference, back to defaults
                dummy_defaults = dict(plugins=dict())
                dummy_defaults["plugins"][self._identifier] = dict(
                    checks=dict())
                dummy_defaults["plugins"][
                    self._identifier]["checks"]["octoprint"] = None
                self._settings.set(["checks", "octoprint"],
                                   None,
                                   defaults=dummy_defaults)
def migrate_none_to_one(settings):
    new_settings = {
        "strip": {
            "count": settings.get_int(["led_count"]),
            "pin": settings.get_int(["led_pin"]),
            "freq_hz": settings.get_int(["led_freq_hz"]),
            "dma": settings.get_int(["led_dma"]),
            "invert": settings.get_boolean(["led_invert"]),
            "channel": settings.get_int(["led_channel"]),
            "reverse": settings.get_boolean(["reverse"]),
            "type": settings.get(["strip_type"]),
            "brightness": settings.get(["brightness"]),
        },
        "effects": {
            "startup": {
                "enabled": settings.get_boolean(["startup_enabled"]),
                "effect": settings.get(["startup_effect"]),
                "color": settings.get(["startup_color"]),
                "delay": settings.get(["startup_delay"]),
            },
            "idle": {
                "enabled": settings.get_boolean(["idle_enabled"]),
                "effect": settings.get(["idle_effect"]),
                "color": settings.get(["idle_color"]),
                "delay": settings.get(["idle_delay"]),
            },
            "disconnected": {
                "enabled": settings.get_boolean(["disconnected_enabled"]),
                "effect": settings.get(["disconnected_effect"]),
                "color": settings.get(["disconnected_color"]),
                "delay": settings.get(["disconnected_delay"]),
            },
            "failed": {
                "enabled": settings.get_boolean(["failed_enabled"]),
                "effect": settings.get(["failed_effect"]),
                "color": settings.get(["failed_color"]),
                "delay": settings.get(["failed_delay"]),
            },
            "success": {
                "enabled": settings.get_boolean(["success_enabled"]),
                "effect": settings.get(["success_effect"]),
                "color": settings.get(["success_color"]),
                "delay": settings.get(["success_delay"]),
                "return_to_idle": settings.get(["success_return_idle"]),
            },
            "paused": {
                "enabled": settings.get_boolean(["paused_enabled"]),
                "effect": settings.get(["paused_effect"]),
                "color": settings.get(["paused_color"]),
                "delay": settings.get(["paused_delay"]),
            },
            "printing": {
                "enabled": settings.get_boolean(["printing_enabled"]),
                "effect": settings.get(["printing_effect"]),
                "color": settings.get(["printing_color"]),
                "delay": settings.get(["printing_delay"]),
            },
            "torch": {
                "enabled": settings.get_boolean(["torch_enabled"]),
                "effect": settings.get(["torch_effect"]),
                "color": settings.get(["torch_color"]),
                "delay": settings.get(["torch_delay"]),
                "toggle": settings.get(["torch_toggle"]),
                "timer": settings.get_int(["torch_timer"]),
            },
            "progress_print": {
                "enabled": settings.get_boolean(["progress_print_enabled"]),
                "base": settings.get(["progress_print_color_base"]),
                "color": settings.get(["progress_print_color"]),
            },
            "progress_heatup": {
                "enabled":
                settings.get_boolean(["progress_heatup_enabled"]),
                "base":
                settings.get(["progress_heatup_color_base"]),
                "color":
                settings.get(["progress_heatup_color"]),
                "tool_enabled":
                settings.get_boolean(["progress_heatup_tool_enabled"]),
                "bed_enabled":
                settings.get_boolean(["progress_heatup_bed_enabled"]),
                "tool_key":
                settings.get_int(["progress_heatup_tool_key"]),
            },
            "progress_cooling": {
                "enabled": settings.get_boolean(["progress_cooling_enabled"]),
                "base": settings.get(["progress_cooling_color_base"]),
                "color": settings.get(["progress_cooling_color"]),
                "bed_or_tool": settings.get(["progress_cooling_bed_or_tool"]),
                "threshold": settings.get_int(["progress_cooling_threshold"]),
            },
        },
        "active_times": {
            "enabled": settings.get(["active_hours_enabled"]),
            "start": settings.get(["active_hours_start"]),
            "end": settings.get(["active_hours_end"]),
        },
    }
    # Filter out None values
    filtered = filter_none(new_settings)
    # Merge with default settings that were not set
    result = dict_merge(defaults, filtered)
    # SAVE!
    settings.global_set(["plugins", "ws281x_led_status"], result)