async def handle_critical_repositories(self): """Handled critical repositories during runtime.""" # Get critical repositories critical_queue = QueueManager() instored = [] critical = [] was_installed = False try: critical = await self.data_repo.get_contents("critical") critical = json.loads(critical.content) except AIOGitHubAPIException: pass if not critical: self.logger.debug("No critical repositories") return stored_critical = await async_load_from_store(self.hass, "critical") for stored in stored_critical or []: instored.append(stored["repository"]) stored_critical = [] for repository in critical: removed_repo = get_removed(repository["repository"]) removed_repo.removal_type = "critical" repo = self.get_by_name(repository["repository"]) stored = { "repository": repository["repository"], "reason": repository["reason"], "link": repository["link"], "acknowledged": True, } if repository["repository"] not in instored: if repo is not None and repo.installed: self.logger.critical( f"Removing repository {repository['repository']}, it is marked as critical" ) was_installed = True stored["acknowledged"] = False # Remove from HACS critical_queue.add(repository.uninstall()) repo.remove() stored_critical.append(stored) removed_repo.update_data(stored) # Uninstall await critical_queue.execute() # Save to FS await async_save_to_store(self.hass, "critical", stored_critical) # Restart HASS if was_installed: self.logger.critical("Resarting Home Assistant") self.hass.async_create_task(self.hass.async_stop(100))
async def example(): """Run the example.""" queue = QueueManager() for number in range(0, 10): queue.add(exampletask(number)) while queue.has_pending_tasks: await queue.execute(5)
async def download_content(repository): """Download the content of a directory.""" queue = QueueManager() contents = gather_files_to_download(repository) repository.logger.debug(repository.data.filename) if not contents: raise HacsException("No content to download") for content in contents: if repository.data.content_in_root and repository.data.filename: if content.name != repository.data.filename: continue queue.add(dowload_repository_content(repository, content)) await queue.execute()
def get_queue(): if SHARE["queue"] is None: from queueman import QueueManager SHARE["queue"] = QueueManager() return SHARE["queue"]
async def download_zip_files(repository, validate): """Download ZIP archive from repository release.""" contents = [] queue = QueueManager() try: for release in repository.releases.objects: repository.logger.info( f"ref: {repository.ref} --- tag: {release.tag_name}") if release.tag_name == repository.ref.split("/")[1]: contents = release.assets if not contents: return validate for content in contents or []: queue.add(async_download_zip_file(repository, content, validate)) await queue.execute() except (Exception, BaseException) as exception: # pylint: disable=broad-except validate.errors.append(f"Download was not completed [{exception}]") return validate
async def download_zip_files(self, validate): """Download ZIP archive from repository release.""" download_queue = QueueManager() try: contents = False for release in self.releases.objects: self.logger.info(f"ref: {self.ref} --- tag: {release.tag_name}.") if release.tag_name == self.ref.split("/")[1]: contents = release.assets if not contents: return validate for content in contents or []: download_queue.add(self.async_download_zip_file(content, validate)) await download_queue.execute() except (Exception, BaseException): validate.errors.append(f"Download was not complete") return validate
async def test_everything(): """Test everything.""" queue = QueueManager() await queue.execute() assert not queue.running assert not queue.has_pending_tasks queue.add(dummy_task()) assert queue.has_pending_tasks queue.clear() assert not queue.has_pending_tasks queue.running = True with pytest.raises(QueueManagerExecutionStillInProgress): await queue.execute() queue.running = False dummy_sync_task() queue.add(dummy_task()) queue.add(dummy_task()) await queue.execute(1) await queue.execute()
class Hacs: """The base class of HACS, nested thoughout the project.""" token = f"{str(uuid.uuid4())}-{str(uuid.uuid4())}" hacsweb = f"/hacsweb/{token}" hacsapi = f"/hacsapi/{token}" repositories = [] frontend = HacsFrontend() repo = None data_repo = None developer = Developer() data = None configuration = None logger = Logger("hacs") github = None hass = None version = None session = None factory = HacsTaskFactory() queue = QueueManager() system = System() recuring_tasks = [] common = HacsCommon() @staticmethod def init(hass, github_token): """Return a initialized HACS object.""" return Hacs() def get_by_id(self, repository_id): """Get repository by ID.""" try: for repository in self.repositories: if repository.information.uid == repository_id: return repository except Exception: # pylint: disable=broad-except pass return None def get_by_name(self, repository_full_name): """Get repository by full_name.""" try: for repository in self.repositories: if repository.data.full_name.lower() == repository_full_name.lower(): return repository except Exception: # pylint: disable=broad-except pass return None def is_known(self, repository_full_name): """Return a bool if the repository is known.""" return repository_full_name.lower() in [ x.data.full_name.lower() for x in self.repositories ] @property def sorted_by_name(self): """Return a sorted(by name) list of repository objects.""" return sorted(self.repositories, key=lambda x: x.display_name) @property def sorted_by_repository_name(self): """Return a sorted(by repository_name) list of repository objects.""" return sorted(self.repositories, key=lambda x: x.data.full_name) async def register_repository(self, full_name, category, check=True): """Register a repository.""" await register_repository(full_name, category, check=True) async def startup_tasks(self): """Tasks tha are started after startup.""" self.system.status.background_task = True await self.hass.async_add_executor_job(setup_extra_stores) self.hass.bus.async_fire("hacs/status", {}) self.logger.debug(self.github.ratelimits.remaining) self.logger.debug(self.github.ratelimits.reset_utc) await self.handle_critical_repositories_startup() await self.handle_critical_repositories() await self.load_known_repositories() await self.clear_out_removed_repositories() self.recuring_tasks.append( async_track_time_interval( self.hass, self.recuring_tasks_installed, timedelta(minutes=30) ) ) self.recuring_tasks.append( async_track_time_interval( self.hass, self.recuring_tasks_all, timedelta(minutes=800) ) ) self.recuring_tasks.append( async_track_time_interval( self.hass, self.prosess_queue, timedelta(minutes=10) ) ) self.hass.bus.async_fire("hacs/reload", {"force": True}) await self.recuring_tasks_installed() await self.prosess_queue() self.system.status.startup = False self.system.status.new = False self.system.status.background_task = False self.hass.bus.async_fire("hacs/status", {}) await self.data.async_write() async def handle_critical_repositories_startup(self): """Handled critical repositories during startup.""" alert = False critical = await async_load_from_store(self.hass, "critical") if not critical: return for repo in critical: if not repo["acknowledged"]: alert = True if alert: self.logger.critical("URGENT!: Check the HACS panel!") self.hass.components.persistent_notification.create( title="URGENT!", message="**Check the HACS panel!**" ) async def handle_critical_repositories(self): """Handled critical repositories during runtime.""" # Get critical repositories instored = [] critical = [] was_installed = False try: critical = await self.data_repo.get_contents("critical") critical = json.loads(critical.content) except AIOGitHubException: pass if not critical: self.logger.debug("No critical repositories") return stored_critical = await async_load_from_store(self.hass, "critical") for stored in stored_critical or []: instored.append(stored["repository"]) stored_critical = [] for repository in critical: removed_repo = get_removed(repository["repository"]) removed_repo.removal_type = "critical" repo = self.get_by_name(repository["repository"]) stored = { "repository": repository["repository"], "reason": repository["reason"], "link": repository["link"], "acknowledged": True, } if repository["repository"] not in instored: if repo is not None and repo.installed: self.logger.critical( f"Removing repository {repository['repository']}, it is marked as critical" ) was_installed = True stored["acknowledged"] = False # Uninstall from HACS repo.remove() await repo.uninstall() stored_critical.append(stored) removed_repo.update_data(stored) # Save to FS await async_save_to_store(self.hass, "critical", stored_critical) # Resart HASS if was_installed: self.logger.critical("Resarting Home Assistant") self.hass.async_create_task(self.hass.async_stop(100)) async def prosess_queue(self, notarealarg=None): """Recuring tasks for installed repositories.""" if not self.queue.has_pending_tasks: self.logger.debug("Nothing in the queue") return if self.queue.running: self.logger.debug("Queue is already running") return can_update = await get_fetch_updates_for(self.github) if can_update == 0: self.logger.info( "HACS is ratelimited, repository updates will resume later." ) else: self.system.status.background_task = True self.hass.bus.async_fire("hacs/status", {}) await self.queue.execute(can_update) self.system.status.background_task = False self.hass.bus.async_fire("hacs/status", {}) async def recuring_tasks_installed(self, notarealarg=None): """Recuring tasks for installed repositories.""" self.logger.debug( "Starting recuring background task for installed repositories" ) self.system.status.background_task = True self.hass.bus.async_fire("hacs/status", {}) self.logger.debug(self.github.ratelimits.remaining) self.logger.debug(self.github.ratelimits.reset_utc) for repository in self.repositories: if ( repository.status.installed and repository.data.category in self.common.categories ): self.queue.add(self.factory.safe_update(repository)) await self.handle_critical_repositories() self.system.status.background_task = False self.hass.bus.async_fire("hacs/status", {}) await self.data.async_write() self.logger.debug("Recuring background task for installed repositories done") async def recuring_tasks_all(self, notarealarg=None): """Recuring tasks for all repositories.""" self.logger.debug("Starting recuring background task for all repositories") await self.hass.async_add_executor_job(setup_extra_stores) self.system.status.background_task = True self.hass.bus.async_fire("hacs/status", {}) self.logger.debug(self.github.ratelimits.remaining) self.logger.debug(self.github.ratelimits.reset_utc) for repository in self.repositories: if repository.data.category in self.common.categories: self.queue.add(self.factory.safe_common_update(repository)) await self.load_known_repositories() await self.clear_out_removed_repositories() self.system.status.background_task = False await self.data.async_write() self.hass.bus.async_fire("hacs/status", {}) self.hass.bus.async_fire("hacs/repository", {"action": "reload"}) self.logger.debug("Recuring background task for all repositories done") async def clear_out_removed_repositories(self): """Clear out blaclisted repositories.""" need_to_save = False for removed in removed_repositories: if self.is_known(removed.repository): repository = self.get_by_name(removed.repository) if repository.status.installed and removed.removal_type != "critical": self.logger.warning( f"You have {repository.data.full_name} installed with HACS " + f"this repository has been removed, please consider removing it. " + f"Removal reason ({removed.removal_type})" ) else: need_to_save = True repository.remove() if need_to_save: await self.data.async_write() async def get_repositories(self): """Return a list of repositories.""" repositories = {} for category in self.common.categories: repositories[category] = await get_default_repos_lists( self.session, self.configuration.token, category ) org = await get_default_repos_orgs(self.github, category) for repo in org: repositories[category].append(repo) for category in repositories: for repo in repositories[category]: if repo not in self.common.default: self.common.default.append(repo) return repositories async def load_known_repositories(self): """Load known repositories.""" self.logger.info("Loading known repositories") repositories = await self.get_repositories() for item in await get_default_repos_lists( self.session, self.configuration.token, "removed" ): removed = get_removed(item["repository"]) removed.reason = item.get("reason") removed.link = item.get("link") removed.removal_type = item.get("removal_type") for category in repositories: for repo in repositories[category]: if is_removed(repo): continue if self.is_known(repo): continue self.queue.add(self.factory.safe_register(repo, category))
def __init__(self): """Initialize.""" self.logger = getLogger("data") self.hacs = get_hacs() self.queue = QueueManager() self.content = {}
class HacsData: """HacsData class.""" def __init__(self): """Initialize.""" self.logger = getLogger("data") self.hacs = get_hacs() self.queue = QueueManager() self.content = {} async def async_write(self): """Write content to the store files.""" if self.hacs.system.status.background_task or self.hacs.system.disabled: return self.logger.debug("Saving data") # Hacs await async_save_to_store( self.hacs.hass, "hacs", { "view": self.hacs.configuration.frontend_mode, "compact": self.hacs.configuration.frontend_compact, "onboarding_done": self.hacs.configuration.onboarding_done, }, ) # Repositories self.content = {} for repository in self.hacs.repositories or []: self.queue.add(self.async_store_repository_data(repository)) await self.queue.execute() await async_save_to_store(self.hacs.hass, "repositories", self.content) self.hacs.hass.bus.async_fire("hacs/repository", {}) self.hacs.hass.bus.fire("hacs/config", {}) async def async_store_repository_data(self, repository): repository_manifest = repository.repository_manifest.manifest data = { "authors": repository.data.authors, "category": repository.data.category, "description": repository.data.description, "domain": repository.data.domain, "downloads": repository.data.downloads, "full_name": repository.data.full_name, "first_install": repository.status.first_install, "installed_commit": repository.data.installed_commit, "installed": repository.data.installed, "last_commit": repository.data.last_commit, "last_release_tag": repository.data.last_version, "last_updated": repository.data.last_updated, "name": repository.data.name, "new": repository.data.new, "repository_manifest": repository_manifest, "selected_tag": repository.data.selected_tag, "show_beta": repository.data.show_beta, "stars": repository.data.stargazers_count, "topics": repository.data.topics, "version_installed": repository.data.installed_version, } if data: if repository.data.installed and ( repository.data.installed_commit or repository.data.installed_version ): await async_save_to_store( self.hacs.hass, f"hacs/{repository.data.id}.hacs", repository.data.to_json(), ) self.content[str(repository.data.id)] = data async def restore(self): """Restore saved data.""" hacs = await async_load_from_store(self.hacs.hass, "hacs") repositories = await async_load_from_store(self.hacs.hass, "repositories") try: if not hacs and not repositories: # Assume new install self.hacs.system.status.new = True return True self.logger.info("Restore started") self.hacs.system.status.new = False # Hacs self.hacs.configuration.frontend_mode = hacs.get("view", "Grid") self.hacs.configuration.frontend_compact = hacs.get("compact", False) self.hacs.configuration.onboarding_done = hacs.get("onboarding_done", False) # Repositories for entry in repositories or []: self.queue.add( self.async_restore_repository(entry, repositories[entry]) ) await self.queue.execute() self.logger.info("Restore done") except (Exception, BaseException) as exception: # pylint: disable=broad-except self.logger.critical(f"[{exception}] Restore Failed!") return False return True async def async_restore_repository(self, entry, repository_data): if not self.hacs.is_known(entry): await register_repository( repository_data["full_name"], repository_data["category"], False ) repository = [ x for x in self.hacs.repositories if str(x.data.id) == str(entry) or x.data.full_name == repository_data["full_name"] ] if not repository: self.logger.error(f"Did not find {repository_data['full_name']} ({entry})") return repository = repository[0] # Restore repository attributes repository.data.id = entry repository.data.authors = repository_data.get("authors", []) repository.data.description = repository_data.get("description") repository.releases.last_release_object_downloads = repository_data.get( "downloads" ) repository.data.last_updated = repository_data.get("last_updated") repository.data.topics = repository_data.get("topics", []) repository.data.domain = repository_data.get("domain", None) repository.data.stargazers_count = repository_data.get("stars", 0) repository.releases.last_release = repository_data.get("last_release_tag") repository.data.hide = repository_data.get("hide", False) repository.data.installed = repository_data.get("installed", False) repository.data.new = repository_data.get("new", True) repository.data.selected_tag = repository_data.get("selected_tag") repository.data.show_beta = repository_data.get("show_beta", False) repository.data.last_version = repository_data.get("last_release_tag") repository.data.last_commit = repository_data.get("last_commit") repository.data.installed_version = repository_data.get("version_installed") repository.data.installed_commit = repository_data.get("installed_commit") repository.repository_manifest = HacsManifest.from_dict( repository_data.get("repository_manifest", {}) ) if repository.data.installed: repository.status.first_install = False if repository_data["full_name"] == "hacs/integration": repository.data.installed_version = VERSION repository.data.installed = True restored = await async_load_from_store(self.hacs.hass, f"hacs/{entry}.hacs") if restored: repository.data.update_data(restored) if not repository.data.installed: repository.logger.debug( "Should be installed but is not... Fixing that!" ) repository.data.installed = True