diff --git a/supervisor/addons/__init__.py b/supervisor/addons/__init__.py index 8938064a1f5..c8b40519600 100644 --- a/supervisor/addons/__init__.py +++ b/supervisor/addons/__init__.py @@ -1 +1 @@ -"""Init file for Supervisor add-ons.""" +"""Init file for Supervisor apps.""" diff --git a/supervisor/addons/addon.py b/supervisor/addons/addon.py index f08391b5d60..6cf23c700c9 100644 --- a/supervisor/addons/addon.py +++ b/supervisor/addons/addon.py @@ -1,4 +1,4 @@ -"""Init file for Supervisor add-ons.""" +"""Init file for Supervisor apps.""" import asyncio from collections.abc import Awaitable @@ -51,30 +51,30 @@ ATTR_VERSION_TIMESTAMP, ATTR_WATCHDOG, DNS_SUFFIX, - AddonBoot, - AddonBootConfig, - AddonStartup, - AddonState, + AppBoot, + AppBootConfig, + AppStartup, + AppState, BusEvent, ) from ..coresys import CoreSys -from ..docker.addon import DockerAddon +from ..docker.addon import DockerApp from ..docker.const import ContainerState from ..docker.manager import ExecReturn from ..docker.monitor import DockerContainerStateEvent from ..docker.stats import DockerStats from ..exceptions import ( - AddonBackupMetadataInvalidError, - AddonBuildFailedUnknownError, - AddonConfigurationInvalidError, - AddonNotRunningError, - AddonNotSupportedError, - AddonNotSupportedWriteStdinError, - AddonPortConflict, - AddonPrePostBackupCommandReturnedError, - AddonsError, - AddonsJobError, - AddonUnknownError, + AppBackupMetadataInvalidError, + AppBuildFailedUnknownError, + AppConfigurationInvalidError, + AppNotRunningError, + AppNotSupportedError, + AppNotSupportedWriteStdinError, + AppPortConflict, + AppPrePostBackupCommandReturnedError, + AppsError, + AppsJobError, + AppUnknownError, BackupInvalidError, BackupRestoreUnknownError, ConfigurationFileError, @@ -83,7 +83,7 @@ DockerError, DockerRegistryAuthError, HostAppArmorError, - StoreAddonNotFoundError, + StoreAppNotFoundError, ) from ..hardware.data import Device from ..homeassistant.const import WSEvent @@ -91,7 +91,7 @@ from ..jobs.decorator import Job from ..resolution.const import ContextType, IssueType, SuggestionType from ..resolution.data import Issue -from ..store.addon import AddonStore +from ..store.addon import AppStore from ..utils import check_port from ..utils.apparmor import adjust_profile from ..utils.dt import utc_from_timestamp @@ -102,11 +102,11 @@ WATCHDOG_RETRY_SECONDS, WATCHDOG_THROTTLE_MAX_CALLS, WATCHDOG_THROTTLE_PERIOD, - AddonBackupMode, + AppBackupMode, MappingType, ) -from .model import AddonModel, Data -from .options import AddonOptions +from .model import AppModel, Data +from .options import AppOptions from .utils import remove_data from .validate import SCHEMA_ADDON_BACKUP @@ -131,23 +131,23 @@ type_conflict_strategies=["override"], ) -# Backups just need to know if an addon was running or not -# Map other addon states to those two +# Backups just need to know if an app was running or not +# Map other app states to those two _MAP_ADDON_STATE = { - AddonState.STARTUP: AddonState.STARTED, - AddonState.ERROR: AddonState.STOPPED, - AddonState.UNKNOWN: AddonState.STOPPED, + AppState.STARTUP: AppState.STARTED, + AppState.ERROR: AppState.STOPPED, + AppState.UNKNOWN: AppState.STOPPED, } -class Addon(AddonModel): - """Hold data for add-on inside Supervisor.""" +class App(AppModel): + """Hold data for app inside Supervisor.""" def __init__(self, coresys: CoreSys, slug: str): """Initialize data holder.""" super().__init__(coresys, slug) - self.instance: DockerAddon = DockerAddon(coresys, self) - self._state: AddonState = AddonState.UNKNOWN + self.instance: DockerApp = DockerApp(coresys, self) + self._state: AppState = AppState.UNKNOWN self._manual_stop: bool = False self._listeners: list[EventListener] = [] self._startup_event = asyncio.Event() @@ -174,30 +174,30 @@ def device_access_missing_issue(self) -> Issue: return self._device_access_missing_issue @property - def state(self) -> AddonState: - """Return state of the add-on.""" + def state(self) -> AppState: + """Return state of the app.""" return self._state @state.setter - def state(self, new_state: AddonState) -> None: - """Set the add-on into new state.""" + def state(self, new_state: AppState) -> None: + """Set the app into new state.""" if self._state == new_state: return old_state = self._state self._state = new_state - # Signal listeners about addon state change - if new_state == AddonState.STARTED or old_state == AddonState.STARTUP: + # Signal listeners about app state change + if new_state == AppState.STARTED or old_state == AppState.STARTUP: self._startup_event.set() # Dismiss boot failed issue if present and we started - if new_state == AddonState.STARTED and ( + if new_state == AppState.STARTED and ( issue := self.sys_resolution.get_issue_if_present(self.boot_failed_issue) ): self.sys_resolution.dismiss_issue(issue) # Dismiss device access missing issue if present and we stopped - if new_state == AddonState.STOPPED and ( + if new_state == AppState.STOPPED and ( issue := self.sys_resolution.get_issue_if_present( self.device_access_missing_issue ) @@ -260,7 +260,7 @@ async def load(self) -> None: await self.instance.check_image(self.version, default_image, self.arch) except DockerError: _LOGGER.info("No %s app Docker image %s found", self.slug, self.image) - with suppress(DockerError, AddonNotSupportedError): + with suppress(DockerError, AppNotSupportedError): await self.instance.install(self.version, default_image, arch=self.arch) self.persist[ATTR_IMAGE] = default_image @@ -268,70 +268,70 @@ async def load(self) -> None: @property def ip_address(self) -> IPv4Address: - """Return IP of add-on instance.""" + """Return IP of app instance.""" return self.instance.ip_address @property def data(self) -> Data: - """Return add-on data/config.""" - return self.sys_addons.data.system[self.slug] + """Return app data/config.""" + return self.sys_apps.data.system[self.slug] @property def data_store(self) -> Data: - """Return add-on data from store.""" - return self.sys_store.data.addons.get(self.slug, self.data) + """Return app data from store.""" + return self.sys_store.data.apps.get(self.slug, self.data) @property - def addon_store(self) -> AddonStore | None: - """Return store representation of addon.""" - return self.sys_addons.store.get(self.slug) + def app_store(self) -> AppStore | None: + """Return store representation of app.""" + return self.sys_apps.store.get(self.slug) @property def persist(self) -> Data: - """Return add-on data/config.""" - return self.sys_addons.data.user[self.slug] + """Return app data/config.""" + return self.sys_apps.data.user[self.slug] @property def is_installed(self) -> bool: - """Return True if an add-on is installed.""" + """Return True if an app is installed.""" return True @property def is_detached(self) -> bool: - """Return True if add-on is detached.""" - return self.slug not in self.sys_store.data.addons + """Return True if app is detached.""" + return self.slug not in self.sys_store.data.apps @property def with_icon(self) -> bool: """Return True if an icon exists.""" - if self.is_detached or not self.addon_store: + if self.is_detached or not self.app_store: return super().with_icon - return self.addon_store.with_icon + return self.app_store.with_icon @property def with_logo(self) -> bool: """Return True if a logo exists.""" - if self.is_detached or not self.addon_store: + if self.is_detached or not self.app_store: return super().with_logo - return self.addon_store.with_logo + return self.app_store.with_logo @property def with_changelog(self) -> bool: """Return True if a changelog exists.""" - if self.is_detached or not self.addon_store: + if self.is_detached or not self.app_store: return super().with_changelog - return self.addon_store.with_changelog + return self.app_store.with_changelog @property def with_documentation(self) -> bool: """Return True if a documentation exists.""" - if self.is_detached or not self.addon_store: + if self.is_detached or not self.app_store: return super().with_documentation - return self.addon_store.with_documentation + return self.app_store.with_documentation @property def available(self) -> bool: - """Return True if this add-on is available on this platform.""" + """Return True if this app is available on this platform.""" return self._available(self.data_store) @property @@ -348,7 +348,7 @@ def need_update(self) -> bool: @property def dns(self) -> list[str]: - """Return list of DNS name for that add-on.""" + """Return list of DNS name for that app.""" return [f"{self.hostname}.{DNS_SUFFIX}"] @property @@ -360,23 +360,23 @@ def options(self) -> dict[str, Any]: @options.setter def options(self, value: dict[str, Any] | None) -> None: - """Store user add-on options.""" + """Store user app options.""" self.persist[ATTR_OPTIONS] = {} if value is None else deepcopy(value) @property - def boot(self) -> AddonBoot: + def boot(self) -> AppBoot: """Return boot config with prio local settings unless config is forced.""" - if self.boot_config == AddonBootConfig.MANUAL_ONLY: + if self.boot_config == AppBootConfig.MANUAL_ONLY: return super().boot return self.persist.get(ATTR_BOOT, super().boot) @boot.setter - def boot(self, value: AddonBoot) -> None: + def boot(self, value: AppBoot) -> None: """Store user boot options.""" self.persist[ATTR_BOOT] = value # Dismiss boot failed issue if present and boot at start disabled - if value == AddonBoot.MANUAL and ( + if value == AppBoot.MANUAL and ( issue := self.sys_resolution.get_issue_if_present(self._boot_failed_issue) ): self.sys_resolution.dismiss_issue(issue) @@ -393,7 +393,7 @@ def auto_update(self, value: bool) -> None: @property def auto_update_available(self) -> bool: - """Return if it is safe to auto update addon.""" + """Return if it is safe to auto update app.""" if not self.need_update or not self.auto_update: return False @@ -421,7 +421,7 @@ def watchdog(self) -> bool: @watchdog.setter def watchdog(self, value: bool) -> None: """Set watchdog enable/disable.""" - if value and self.startup == AddonStartup.ONCE: + if value and self.startup == AppStartup.ONCE: _LOGGER.warning( "Ignoring watchdog for %s because startup type is 'once'", self.slug ) @@ -430,7 +430,7 @@ def watchdog(self, value: bool) -> None: @property def system_managed(self) -> bool: - """Return True if addon is managed by Home Assistant.""" + """Return True if app is managed by Home Assistant.""" return self.persist[ATTR_SYSTEM_MANAGED] @system_managed.setter @@ -443,14 +443,14 @@ def system_managed(self, value: bool) -> None: @property def system_managed_config_entry(self) -> str | None: - """Return id of config entry managing this addon (if any).""" + """Return id of config entry managing this app (if any).""" if not self.system_managed: return None return self.persist.get(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY) @system_managed_config_entry.setter def system_managed_config_entry(self, value: str | None) -> None: - """Set ID of config entry managing this addon.""" + """Set ID of config entry managing this app.""" if not self.system_managed: _LOGGER.warning( "Ignoring system managed config entry for %s because it is not system managed", @@ -461,7 +461,7 @@ def system_managed_config_entry(self, value: str | None) -> None: @property def uuid(self) -> str: - """Return an API token for this add-on.""" + """Return an API token for this app.""" return self.persist[ATTR_UUID] @property @@ -483,7 +483,7 @@ def ingress_entry(self) -> str | None: @property def latest_version(self) -> AwesomeVersion: - """Return version of add-on.""" + """Return version of app.""" return self.data_store[ATTR_VERSION] @property @@ -493,22 +493,22 @@ def latest_version_timestamp(self) -> datetime: @property def protected(self) -> bool: - """Return if add-on is in protected mode.""" + """Return if app is in protected mode.""" return self.persist[ATTR_PROTECTED] @protected.setter def protected(self, value: bool) -> None: - """Set add-on in protected mode.""" + """Set app in protected mode.""" self.persist[ATTR_PROTECTED] = value @property def ports(self) -> dict[str, int | None] | None: - """Return ports of add-on.""" + """Return ports of app.""" return self.persist.get(ATTR_NETWORK, super().ports) @ports.setter def ports(self, value: dict[str, int | None] | None) -> None: - """Set custom ports of add-on.""" + """Set custom ports of app.""" if value is None: self.persist.pop(ATTR_NETWORK, None) return @@ -572,7 +572,7 @@ def ingress_port(self) -> int | None: @property def ingress_panel(self) -> bool | None: - """Return True if the add-on access support ingress.""" + """Return True if the app access support ingress.""" if not self.with_ingress: return None @@ -580,7 +580,7 @@ def ingress_panel(self) -> bool | None: @ingress_panel.setter def ingress_panel(self, value: bool) -> None: - """Return True if the add-on access support ingress.""" + """Return True if the app access support ingress.""" self.persist[ATTR_INGRESS_PANEL] = value @property @@ -610,47 +610,47 @@ def audio_input(self, value: str | None) -> None: @property def image(self) -> str | None: - """Return image name of add-on.""" + """Return image name of app.""" return self.persist.get(ATTR_IMAGE) @property def need_build(self) -> bool: - """Return True if this add-on need a local build.""" + """Return True if this app need a local build.""" return ATTR_IMAGE not in self.data @property def latest_need_build(self) -> bool: - """Return True if the latest version of the addon needs a local build.""" + """Return True if the latest version of the app needs a local build.""" return ATTR_IMAGE not in self.data_store @property def path_data(self) -> Path: - """Return add-on data path inside Supervisor.""" - return Path(self.sys_config.path_addons_data, self.slug) + """Return app data path inside Supervisor.""" + return Path(self.sys_config.path_apps_data, self.slug) @property def path_extern_data(self) -> PurePath: - """Return add-on data path external for Docker.""" - return PurePath(self.sys_config.path_extern_addons_data, self.slug) + """Return app data path external for Docker.""" + return PurePath(self.sys_config.path_extern_apps_data, self.slug) @property - def addon_config_used(self) -> bool: - """Add-on is using its public config folder.""" + def app_config_used(self) -> bool: + """App is using its public config folder.""" return MappingType.ADDON_CONFIG in self.map_volumes @property def path_config(self) -> Path: - """Return add-on config path inside Supervisor.""" - return Path(self.sys_config.path_addon_configs, self.slug) + """Return app config path inside Supervisor.""" + return Path(self.sys_config.path_app_configs, self.slug) @property def path_extern_config(self) -> PurePath: - """Return add-on config path external for Docker.""" - return PurePath(self.sys_config.path_extern_addon_configs, self.slug) + """Return app config path external for Docker.""" + return PurePath(self.sys_config.path_extern_app_configs, self.slug) @property def path_options(self) -> Path: - """Return path to add-on options.""" + """Return path to app options.""" return Path(self.path_data, "options.json") @property @@ -665,7 +665,7 @@ def path_extern_pulse(self) -> Path: @property def devices(self) -> set[Device]: - """Extract devices from add-on options.""" + """Extract devices from app options.""" options_schema = self.schema with suppress(vol.Invalid): options_schema.validate(self.options) @@ -674,7 +674,7 @@ def devices(self) -> set[Device]: @property def pwned(self) -> set[str]: - """Extract pwned data for add-on options.""" + """Extract pwned data for app options.""" options_schema = self.schema with suppress(vol.Invalid): options_schema.validate(self.options) @@ -683,12 +683,12 @@ def pwned(self) -> set[str]: @property def loaded(self) -> bool: - """Is add-on loaded.""" + """Is app loaded.""" return bool(self._listeners) async def save_persist(self) -> None: - """Save data of add-on.""" - await self.sys_addons.data.save_data() + """Save data of app.""" + await self.sys_apps.data.save_data() async def watchdog_application(self) -> bool: """Return True if application is running.""" @@ -734,7 +734,7 @@ async def watchdog_application(self) -> bool: return False async def write_options(self) -> None: - """Return True if add-on options is written to data.""" + """Return True if app options is written to data.""" # Update secrets for validation await self.sys_homeassistant.secrets.reload() @@ -742,27 +742,27 @@ async def write_options(self) -> None: options = self.schema.validate(self.options) await self.sys_run_in_executor(write_json_file, self.path_options, options) except vol.Invalid as ex: - raise AddonConfigurationInvalidError( + raise AppConfigurationInvalidError( _LOGGER.error, - addon=self.slug, + app=self.slug, validation_error=humanize_error(self.options, ex), ) from None except ConfigurationFileError as err: _LOGGER.error("App %s can't write options", self.slug) - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err _LOGGER.debug("App %s write options: %s", self.slug, options) @Job( name="addon_unload", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def unload(self) -> None: - """Unload add-on and remove data.""" + """Unload app and remove data.""" # Wait for startup wait task to complete before removing data. # The container remove/state change resolves _startup_event; this - # ensures _wait_for_startup finishes before we touch addon data. + # ensures _wait_for_startup finishes before we touch app data. if self._wait_for_startup_task: await self._wait_for_startup_task @@ -788,15 +788,15 @@ async def _check_ingress_port(self): @Job( name="addon_install", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def install(self) -> None: - """Install and setup this addon.""" - if not self.addon_store: - raise StoreAddonNotFoundError(addon=self.slug) + """Install and setup this app.""" + if not self.app_store: + raise StoreAppNotFoundError(app=self.slug) - await self.sys_addons.data.install(self.addon_store) + await self.sys_apps.data.install(self.app_store) def setup_data(): if not self.path_data.is_dir(): @@ -813,28 +813,28 @@ def setup_data(): # Install image try: await self.instance.install( - self.latest_version, self.addon_store.image, arch=self.arch + self.latest_version, self.app_store.image, arch=self.arch ) - except AddonsError: - await self.sys_addons.data.uninstall(self) + except AppsError: + await self.sys_apps.data.uninstall(self) raise except DockerBuildError as err: _LOGGER.error("Could not build image for app %s: %s", self.slug, err) - await self.sys_addons.data.uninstall(self) - raise AddonBuildFailedUnknownError(addon=self.slug) from err + await self.sys_apps.data.uninstall(self) + raise AppBuildFailedUnknownError(app=self.slug) from err except DockerRegistryAuthError: - await self.sys_addons.data.uninstall(self) + await self.sys_apps.data.uninstall(self) raise except DockerError as err: _LOGGER.error("Could not pull image to update app %s: %s", self.slug, err) - await self.sys_addons.data.uninstall(self) - raise AddonUnknownError(addon=self.slug) from err + await self.sys_apps.data.uninstall(self) + raise AppUnknownError(app=self.slug) from err # Finish initialization and set up listeners await self.load() - # Add to addon manager - self.sys_addons.local[self.slug] = self + # Add to app manager + self.sys_apps.local[self.slug] = self # Reload ingress tokens if self.with_ingress: @@ -842,26 +842,26 @@ def setup_data(): @Job( name="addon_uninstall", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def uninstall( self, *, remove_config: bool, remove_image: bool = True ) -> None: - """Uninstall and cleanup this addon.""" + """Uninstall and cleanup this app.""" try: await self.instance.remove(remove_image=remove_image) except DockerError as err: _LOGGER.error("Could not remove image for app %s: %s", self.slug, err) - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err - self.state = AddonState.UNKNOWN + self.state = AppState.UNKNOWN await self.unload() def cleanup_config_and_audio(): # Remove config if present and requested - if self.addon_config_used and remove_config: + if self.app_config_used and remove_config: remove_data(self.path_config) # Cleanup audio settings @@ -898,9 +898,9 @@ def cleanup_config_and_audio(): continue await service.del_service_data(self) - # Remove from addon manager - self.sys_addons.local.pop(self.slug) - await self.sys_addons.data.uninstall(self) + # Remove from app manager + self.sys_apps.local.pop(self.slug) + await self.sys_apps.data.uninstall(self) # Cleanup Ingress tokens if need_ingress_token_cleanup: @@ -908,43 +908,43 @@ def cleanup_config_and_audio(): @Job( name="addon_update", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def update(self) -> asyncio.Task | None: - """Update this addon to latest version. + """Update this app to latest version. - Returns a Task that completes when addon has state 'started' (see start) + Returns a Task that completes when app has state 'started' (see start) if it was running. Else nothing is returned. """ - if not self.addon_store: - raise StoreAddonNotFoundError(addon=self.slug) + if not self.app_store: + raise StoreAppNotFoundError(app=self.slug) old_image = self.image # Cache data to prevent races with other updates to global - store = self.addon_store.clone() + store = self.app_store.clone() try: await self.instance.update(store.version, store.image, arch=self.arch) except DockerBuildError as err: _LOGGER.error("Could not build image for app %s: %s", self.slug, err) - raise AddonBuildFailedUnknownError(addon=self.slug) from err + raise AppBuildFailedUnknownError(app=self.slug) from err except DockerRegistryAuthError: raise except DockerError as err: _LOGGER.error("Could not pull image to update app %s: %s", self.slug, err) - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err - # Stop the addon if running - if (last_state := self.state) in {AddonState.STARTED, AddonState.STARTUP}: + # Stop the app if running + if (last_state := self.state) in {AppState.STARTED, AppState.STARTUP}: await self.stop() try: _LOGGER.info("App '%s' successfully updated", self.slug) - await self.sys_addons.data.update(store) + await self.sys_apps.data.update(store) await self._check_ingress_port() - # Reload ingress tokens in case addon gained ingress support + # Reload ingress tokens in case app gained ingress support if self.with_ingress: await self.sys_ingress.reload() @@ -961,50 +961,50 @@ async def update(self) -> asyncio.Task | None: # restore state. Return Task for caller if no exception out = ( await self.start() - if last_state in {AddonState.STARTED, AddonState.STARTUP} + if last_state in {AppState.STARTED, AppState.STARTUP} else None ) return out @Job( name="addon_rebuild", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def rebuild(self) -> asyncio.Task | None: - """Rebuild this addons container and image. + """Rebuild this apps container and image. - Returns a Task that completes when addon has state 'started' (see start) + Returns a Task that completes when app has state 'started' (see start) if it was running. Else nothing is returned. """ - last_state: AddonState = self.state + last_state: AppState = self.state try: - # remove docker container and image but not addon config + # remove docker container and image but not app config try: await self.instance.remove() except DockerError as err: _LOGGER.error("Could not remove image for app %s: %s", self.slug, err) - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err try: await self.instance.install(self.version) except DockerBuildError as err: _LOGGER.error("Could not build image for app %s: %s", self.slug, err) - raise AddonBuildFailedUnknownError(addon=self.slug) from err + raise AppBuildFailedUnknownError(app=self.slug) from err except DockerRegistryAuthError: raise except DockerError as err: _LOGGER.error( "Could not pull image to update app %s: %s", self.slug, err ) - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err - if self.addon_store: - await self.sys_addons.data.update(self.addon_store) + if self.app_store: + await self.sys_apps.data.update(self.app_store) await self._check_ingress_port() - # Reload ingress tokens in case addon gained ingress support + # Reload ingress tokens in case app gained ingress support if self.with_ingress: await self.sys_ingress.reload() @@ -1014,7 +1014,7 @@ async def rebuild(self) -> asyncio.Task | None: # restore state out = ( await self.start() - if last_state in [AddonState.STARTED, AddonState.STARTUP] + if last_state in [AppState.STARTED, AppState.STARTUP] else None ) return out @@ -1042,16 +1042,16 @@ def write_pulse_config(): ) async def install_apparmor(self) -> None: - """Install or Update AppArmor profile for Add-on.""" + """Install or Update AppArmor profile for App.""" exists_local = self.sys_host.apparmor.exists(self.slug) - exists_addon = await self.sys_run_in_executor(self.path_apparmor.exists) + exists_app = await self.sys_run_in_executor(self.path_apparmor.exists) # Nothing to do - if not exists_local and not exists_addon: + if not exists_local and not exists_app: return # Need removed - if exists_local and not exists_addon: + if exists_local and not exists_app: await self.sys_host.apparmor.remove_profile(self.slug) return @@ -1073,7 +1073,7 @@ def install_update_profile() -> Path: await self.sys_run_in_executor(tmp_folder.cleanup) async def uninstall_apparmor(self) -> None: - """Remove AppArmor profile for Add-on.""" + """Remove AppArmor profile for App.""" if not self.sys_host.apparmor.exists(self.slug): return await self.sys_host.apparmor.remove_profile(self.slug) @@ -1096,7 +1096,7 @@ def test_update_schema(self) -> bool: # create voluptuous new_schema = vol.Schema( vol.All( - dict, AddonOptions(self.coresys, new_raw_schema, self.name, self.slug) + dict, AppOptions(self.coresys, new_raw_schema, self.name, self.slug) ) ) @@ -1126,15 +1126,15 @@ async def _wait_for_startup(self) -> None: @Job( name="addon_start", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def start(self) -> asyncio.Task: - """Set options and start add-on. + """Set options and start app. - Returns a Task that completes when addon has state 'started'. - For addons with a healthcheck, that is when they become healthy or unhealthy. - Addons without a healthcheck have state 'started' immediately. + Returns a Task that completes when app has state 'started'. + For apps with a healthcheck, that is when they become healthy or unhealthy. + Apps without a healthcheck have state 'started' immediately. """ if await self.instance.is_running(): _LOGGER.warning("%s is already running!", self.slug) @@ -1155,7 +1155,7 @@ async def start(self) -> asyncio.Task: if self.with_audio: await self.write_pulse() - def _check_addon_config_dir(): + def _check_app_config_dir(): if self.path_config.is_dir(): return @@ -1164,53 +1164,53 @@ def _check_addon_config_dir(): ) self.path_config.mkdir() - if self.addon_config_used: - await self.sys_run_in_executor(_check_addon_config_dir) + if self.app_config_used: + await self.sys_run_in_executor(_check_app_config_dir) - # Start Add-on + # Start App self._startup_event.clear() try: await self.instance.run() except DockerContainerPortConflict as err: - raise AddonPortConflict( + raise AppPortConflict( _LOGGER.error, name=self.slug, port=cast(dict[str, Any], err.extra_fields)["port"], ) from err except DockerError as err: _LOGGER.error("Could not start container for app %s: %s", self.slug, err) - self.state = AddonState.ERROR - raise AddonUnknownError(addon=self.slug) from err + self.state = AppState.ERROR + raise AppUnknownError(app=self.slug) from err self._wait_for_startup_task = self.sys_create_task(self._wait_for_startup()) return self._wait_for_startup_task @Job( name="addon_stop", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def stop(self) -> None: - """Stop add-on.""" + """Stop app.""" self._manual_stop = True try: await self.instance.stop() except DockerError as err: _LOGGER.error("Could not stop container for app %s: %s", self.slug, err) - self.state = AddonState.ERROR - raise AddonUnknownError(addon=self.slug) from err + self.state = AppState.ERROR + raise AppUnknownError(app=self.slug) from err @Job( name="addon_restart", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def restart(self) -> asyncio.Task: - """Restart add-on. + """Restart app. - Returns a Task that completes when addon has state 'started' (see start). + Returns a Task that completes when app has state 'started' (see start). """ - with suppress(AddonsError): + with suppress(AppsError): await self.stop() return await self.start() @@ -1225,35 +1225,35 @@ async def stats(self) -> DockerStats: """Return stats of container.""" try: if not await self.is_running(): - raise AddonNotRunningError(_LOGGER.warning, addon=self.slug) + raise AppNotRunningError(_LOGGER.warning, app=self.slug) return await self.instance.stats() except DockerError as err: _LOGGER.error( "Could not get stats of container for app %s: %s", self.slug, err ) - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err @Job( name="addon_write_stdin", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def write_stdin(self, data) -> None: - """Write data to add-on stdin.""" + """Write data to app stdin.""" if not self.with_stdin: - raise AddonNotSupportedWriteStdinError(_LOGGER.error, addon=self.slug) + raise AppNotSupportedWriteStdinError(_LOGGER.error, app=self.slug) try: if not await self.is_running(): - raise AddonNotRunningError(_LOGGER.warning, addon=self.slug) + raise AppNotRunningError(_LOGGER.warning, app=self.slug) await self.instance.write_stdin(data) except DockerError as err: _LOGGER.error( "Could not write stdin to container for app %s: %s", self.slug, err ) - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err async def _backup_command(self, command: str) -> None: try: @@ -1263,29 +1263,29 @@ async def _backup_command(self, command: str) -> None: "Pre-/Post backup command failed with: %s", command_return.output.decode("utf-8", errors="replace"), ) - raise AddonPrePostBackupCommandReturnedError( - _LOGGER.error, addon=self.slug, exit_code=command_return.exit_code + raise AppPrePostBackupCommandReturnedError( + _LOGGER.error, app=self.slug, exit_code=command_return.exit_code ) except DockerError as err: _LOGGER.error( "Failed running pre-/post backup command %s: %s", command, err ) - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err @Job( name="addon_begin_backup", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def begin_backup(self) -> bool: - """Execute pre commands or stop addon if necessary. + """Execute pre commands or stop app if necessary. Returns value of `is_running`. Caller should not call `end_backup` if return is false. """ if not await self.is_running(): return False - if self.backup_mode == AddonBackupMode.COLD: + if self.backup_mode == AppBackupMode.COLD: _LOGGER.info("Shutdown app %s for cold backup", self.slug) await self.stop() @@ -1296,16 +1296,16 @@ async def begin_backup(self) -> bool: @Job( name="addon_end_backup", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def end_backup(self) -> asyncio.Task | None: - """Execute post commands or restart addon if necessary. + """Execute post commands or restart app if necessary. - Returns a Task that completes when addon has state 'started' (see start) + Returns a Task that completes when app has state 'started' (see start) for cold backup. Else nothing is returned. """ - if self.backup_mode is AddonBackupMode.COLD: + if self.backup_mode is AppBackupMode.COLD: _LOGGER.info("Starting app %s again", self.slug) return await self.start() @@ -1316,7 +1316,7 @@ async def end_backup(self) -> asyncio.Task | None: def _is_excluded_by_filter( self, origin_path: Path, arcname: str, item_arcpath: PurePath ) -> bool: - """Filter out files from backup based on filters provided by addon developer. + """Filter out files from backup based on filters provided by app developer. This tests the dev provided filters against the full path of the file as Supervisor sees them using match. This is done for legacy reasons, testing @@ -1334,20 +1334,20 @@ def _is_excluded_by_filter( @Job( name="addon_backup", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def backup(self, tar_file: SecureTarFile) -> asyncio.Task | None: - """Backup state of an add-on. + """Backup state of an app. - Returns a Task that completes when addon has state 'started' (see start) + Returns a Task that completes when app has state 'started' (see start) for cold backup. Else nothing is returned. """ - def _addon_backup( + def _app_backup( metadata: dict[str, Any], apparmor_profile: str | None, - addon_config_used: bool, + app_config_used: bool, temp_dir: TemporaryDirectory, temp_path: Path, ): @@ -1388,7 +1388,7 @@ def _addon_backup( ) # Backup config (if used and existing, restore handles this gracefully) - if addon_config_used and self.path_config.is_dir(): + if app_config_used and self.path_config.is_dir(): atomic_contents_add( backup, self.path_config, @@ -1423,10 +1423,10 @@ def _addon_backup( await self.sys_run_in_executor( partial( - _addon_backup, + _app_backup, metadata=data, apparmor_profile=apparmor_profile, - addon_config_used=self.addon_config_used, + app_config_used=self.app_config_used, temp_dir=temp_dir, temp_path=temp_path, ) @@ -1447,14 +1447,14 @@ def _addon_backup( @Job( name="addon_restore", - on_condition=AddonsJobError, + on_condition=AppsJobError, concurrency=JobConcurrency.GROUP_REJECT, ) async def restore(self, tar_file: SecureTarFile) -> asyncio.Task | None: - """Restore state of an add-on. + """Restore state of an app. - Returns a Task that completes when addon has state 'started' (see start) - if addon is started after restore. Else nothing is returned. + Returns a Task that completes when app has state 'started' (see start) + if app is started after restore. Else nothing is returned. """ wait_for_start: asyncio.Task | None = None @@ -1488,26 +1488,26 @@ def _extract_tarfile() -> tuple[TemporaryDirectory, dict[str, Any]]: except tarfile.TarError as err: raise BackupRestoreUnknownError() from err except ConfigurationFileError as err: - raise AddonUnknownError(addon=self.slug) from err + raise AppUnknownError(app=self.slug) from err try: # Validate try: data = SCHEMA_ADDON_BACKUP(data) except vol.Invalid as err: - raise AddonBackupMetadataInvalidError( + raise AppBackupMetadataInvalidError( _LOGGER.error, - addon=self.slug, + app=self.slug, validation_error=humanize_error(data, err), ) from err # Validate availability. Raises if not self._validate_availability(data[ATTR_SYSTEM], logger=_LOGGER.error) - # Restore local add-on information + # Restore local app information _LOGGER.info("Restore config for app %s", self.slug) restore_image = self._image(data[ATTR_SYSTEM]) - await self.sys_addons.data.restore( + await self.sys_apps.data.restore( self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image ) @@ -1555,7 +1555,7 @@ def _restore_data(): temp_config = Path(tmp.name, "config") if temp_config.is_dir(): shutil.copytree(temp_config, self.path_config, symlinks=True) - elif self.addon_config_used: + elif self.app_config_used: self.path_config.mkdir() try: @@ -1582,12 +1582,12 @@ def _restore_data(): raise BackupRestoreUnknownError() from err finally: - # Is add-on loaded + # Is app loaded if not self.loaded: await self.load() - # Run add-on - if data[ATTR_STATE] == AddonState.STARTED: + # Run app + if data[ATTR_STATE] == AppState.STARTED: wait_for_start = await self.start() finally: await self.sys_run_in_executor(tmp.cleanup) @@ -1598,11 +1598,11 @@ def _restore_data(): name="addon_restart_after_problem", throttle_period=WATCHDOG_THROTTLE_PERIOD, throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS, - on_condition=AddonsJobError, + on_condition=AppsJobError, throttle=JobThrottle.GROUP_RATE_LIMIT, ) async def _restart_after_problem(self, state: ContainerState): - """Restart unhealthy or failed addon.""" + """Restart unhealthy or failed app.""" attempts = 0 while await self.instance.current_state() == state: if not self.in_progress: @@ -1621,7 +1621,7 @@ async def _restart_after_problem(self, state: ContainerState): await (await self.start()) else: await (await self.restart()) - except AddonsError as err: + except AppsError as err: attempts = attempts + 1 _LOGGER.error("Watchdog restart of app %s failed!", self.name) await async_capture_exception(err) @@ -1647,27 +1647,27 @@ async def _restart_after_problem(self, state: ContainerState): await asyncio.sleep(delay) async def container_state_changed(self, event: DockerContainerStateEvent) -> None: - """Set addon state from container state.""" + """Set app state from container state.""" if event.name != self.instance.name: return if event.state == ContainerState.RUNNING: self._manual_stop = False self.state = ( - AddonState.STARTUP if self.instance.healthcheck else AddonState.STARTED + AppState.STARTUP if self.instance.healthcheck else AppState.STARTED ) elif event.state in [ ContainerState.HEALTHY, ContainerState.UNHEALTHY, ]: - self.state = AddonState.STARTED + self.state = AppState.STARTED elif event.state == ContainerState.STOPPED: - self.state = AddonState.STOPPED + self.state = AppState.STOPPED elif event.state == ContainerState.FAILED: - self.state = AddonState.ERROR + self.state = AppState.ERROR async def watchdog_container(self, event: DockerContainerStateEvent) -> None: - """Process state changes in addon container and restart if necessary.""" + """Process state changes in app container and restart if necessary.""" if event.name != self.instance.name: return @@ -1684,6 +1684,6 @@ async def watchdog_container(self, event: DockerContainerStateEvent) -> None: def refresh_path_cache(self) -> Awaitable[None]: """Refresh cache of existing paths.""" - if self.is_detached or not self.addon_store: + if self.is_detached or not self.app_store: return super().refresh_path_cache() - return self.addon_store.refresh_path_cache() + return self.app_store.refresh_path_cache() diff --git a/supervisor/addons/build.py b/supervisor/addons/build.py index 991ca1ce3a0..1e0ddb73db3 100644 --- a/supervisor/addons/build.py +++ b/supervisor/addons/build.py @@ -1,4 +1,4 @@ -"""Supervisor add-on build environment.""" +"""Supervisor app build environment.""" from __future__ import annotations @@ -34,8 +34,8 @@ from ..docker.const import DOCKER_HUB, DOCKER_HUB_LEGACY, DockerMount, MountType from ..docker.interface import MAP_ARCH from ..exceptions import ( - AddonBuildArchitectureNotSupportedError, - AddonBuildDockerfileMissingError, + AppBuildArchitectureNotSupportedError, + AppBuildDockerfileMissingError, ConfigurationFileError, HassioArchNotFound, ) @@ -43,50 +43,50 @@ from .validate import SCHEMA_BUILD_CONFIG if TYPE_CHECKING: - from .manager import AnyAddon + from .manager import AnyApp _LOGGER: logging.Logger = logging.getLogger(__name__) -class AddonBuild(CoreSysAttributes): - """Handle build options for add-ons.""" +class AppBuild(CoreSysAttributes): + """Handle build options for apps.""" - def __init__(self, coresys: CoreSys, addon: AnyAddon, data: dict[str, Any]) -> None: - """Initialize Supervisor add-on builder.""" + def __init__(self, coresys: CoreSys, app: AnyApp, data: dict[str, Any]) -> None: + """Initialize Supervisor app builder.""" self.coresys: CoreSys = coresys - self.addon = addon + self.app = app self._build_config: dict[str, Any] = data @classmethod - async def create(cls, coresys: CoreSys, addon: AnyAddon) -> Self: - """Create an AddonBuild by reading the build configuration from disk.""" - data = await coresys.run_in_executor(cls._read_build_config, addon) + async def create(cls, coresys: CoreSys, app: AnyApp) -> Self: + """Create an AppBuild by reading the build configuration from disk.""" + data = await coresys.run_in_executor(cls._read_build_config, app) if data: _LOGGER.warning( "App %s uses build.yaml which is deprecated. " "Move build parameters into the Dockerfile directly.", - addon.slug, + app.slug, ) if data[ATTR_SQUASH]: _LOGGER.warning( "Ignoring squash build option for %s as Docker BuildKit" " does not support it.", - addon.slug, + app.slug, ) - return cls(coresys, addon, data or {}) + return cls(coresys, app, data or {}) @staticmethod - def _read_build_config(addon: AnyAddon) -> dict[str, Any] | None: + def _read_build_config(app: AnyApp) -> dict[str, Any] | None: """Find and read the build configuration file. Must be run in executor. """ try: build_file = find_one_filetype( - addon.path_location, "build", FILE_SUFFIX_CONFIGURATION + app.path_location, "build", FILE_SUFFIX_CONFIGURATION ) except ConfigurationFileError: # No build config file found, assuming modernized build @@ -98,13 +98,13 @@ def _read_build_config(addon: AnyAddon) -> dict[str, Any] | None: except ConfigurationFileError as ex: _LOGGER.exception( "Error reading %s build config (%s), using defaults", - addon.slug, + app.slug, ex, ) build_config = SCHEMA_BUILD_CONFIG({}) except vol.Invalid as ex: _LOGGER.warning( - "Error parsing %s build config (%s), using defaults", addon.slug, ex + "Error parsing %s build config (%s), using defaults", app.slug, ex ) build_config = SCHEMA_BUILD_CONFIG({}) @@ -117,12 +117,12 @@ def _read_build_config(addon: AnyAddon) -> dict[str, Any] | None: @cached_property def arch(self) -> CpuArch: - """Return arch of the add-on.""" - return self.sys_arch.match([self.addon.arch]) + """Return arch of the app.""" + return self.sys_arch.match([self.app.arch]) @property def base_image(self) -> str | None: - """Return base image for this add-on, or None to use Dockerfile default.""" + """Return base image for this app, or None to use Dockerfile default.""" # No build config (otherwise default is coerced when reading the config) if not self._build_config.get(ATTR_BUILD_FROM): return None @@ -134,7 +134,7 @@ def base_image(self) -> str | None: # Dict - per-arch base images in build config if self.arch not in self._build_config[ATTR_BUILD_FROM]: raise HassioArchNotFound( - f"App {self.addon.slug} is not supported on {self.arch}" + f"App {self.app.slug} is not supported on {self.arch}" ) return self._build_config[ATTR_BUILD_FROM][self.arch] @@ -153,9 +153,9 @@ def get_dockerfile(self) -> Path: Must be run in executor. """ - if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists(): - return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}") - return self.addon.path_location.joinpath("Dockerfile") + if self.app.path_location.joinpath(f"Dockerfile.{self.arch}").exists(): + return self.app.path_location.joinpath(f"Dockerfile.{self.arch}") + return self.app.path_location.joinpath("Dockerfile") async def is_valid(self) -> None: """Return true if the build env is valid.""" @@ -163,21 +163,19 @@ async def is_valid(self) -> None: def build_is_valid() -> bool: return all( [ - self.addon.path_location.is_dir(), + self.app.path_location.is_dir(), self.get_dockerfile().is_file(), ] ) try: if not await self.sys_run_in_executor(build_is_valid): - raise AddonBuildDockerfileMissingError( - _LOGGER.error, addon=self.addon.slug - ) + raise AppBuildDockerfileMissingError(_LOGGER.error, app=self.app.slug) except HassioArchNotFound: - raise AddonBuildArchitectureNotSupportedError( + raise AppBuildArchitectureNotSupportedError( _LOGGER.error, - addon=self.addon.slug, - addon_arch_list=self.addon.supported_arch, + app=self.app.slug, + app_arch_list=self.app.supported_arch, system_arch_list=[arch.value for arch in self.sys_arch.supported], ) from None @@ -213,7 +211,7 @@ def get_docker_args( self, version: AwesomeVersion, image_tag: str, docker_config_path: Path | None ) -> dict[str, Any]: """Create a dict with Docker run args.""" - dockerfile_path = self.get_dockerfile().relative_to(self.addon.path_location) + dockerfile_path = self.get_dockerfile().relative_to(self.app.path_location) build_cmd = [ "docker", @@ -244,8 +242,8 @@ def get_docker_args( if description := self._fix_label("description"): labels[LABEL_DESCRIPTION] = description - if self.addon.url: - labels[LABEL_URL] = self.addon.url + if self.app.url: + labels[LABEL_URL] = self.app.url for key, value in labels.items(): build_cmd.extend(["--label", f"{key}={value}"]) @@ -262,10 +260,8 @@ def get_docker_args( for key, value in build_args.items(): build_cmd.extend(["--build-arg", f"{key}={value}"]) - # The addon path will be mounted from the host system - addon_extern_path = self.sys_config.local_to_extern_path( - self.addon.path_location - ) + # The app path will be mounted from the host system + app_extern_path = self.sys_config.local_to_extern_path(self.app.path_location) mounts = [ DockerMount( @@ -276,7 +272,7 @@ def get_docker_args( ), DockerMount( type=MountType.BIND, - source=addon_extern_path.as_posix(), + source=app_extern_path.as_posix(), target="/addon", read_only=True, ), @@ -304,5 +300,5 @@ def get_docker_args( def _fix_label(self, label_name: str) -> str: """Remove characters they are not supported.""" - label = getattr(self.addon, label_name, "") + label = getattr(self.app, label_name, "") return label.replace("'", "") diff --git a/supervisor/addons/configuration.py b/supervisor/addons/configuration.py index 09594f6c446..080cc9a06fa 100644 --- a/supervisor/addons/configuration.py +++ b/supervisor/addons/configuration.py @@ -1,4 +1,4 @@ -"""Confgiuration Objects for Addon Config.""" +"""Confgiuration Objects for App Config.""" from dataclasses import dataclass diff --git a/supervisor/addons/const.py b/supervisor/addons/const.py index f96fd33cfa1..ca8c18c177a 100644 --- a/supervisor/addons/const.py +++ b/supervisor/addons/const.py @@ -1,4 +1,4 @@ -"""Add-on static data.""" +"""App static data.""" from datetime import timedelta from enum import StrEnum @@ -6,15 +6,15 @@ from ..jobs.const import JobCondition -class AddonBackupMode(StrEnum): - """Backup mode of an Add-on.""" +class AppBackupMode(StrEnum): + """Backup mode of an App.""" HOT = "hot" COLD = "cold" class MappingType(StrEnum): - """Mapping type of an Add-on Folder.""" + """Mapping type of an App Folder.""" DATA = "data" CONFIG = "config" @@ -38,7 +38,7 @@ class MappingType(StrEnum): WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30) WATCHDOG_THROTTLE_MAX_CALLS = 10 -ADDON_UPDATE_CONDITIONS = [ +APP_UPDATE_CONDITIONS = [ JobCondition.FREE_SPACE, JobCondition.HEALTHY, JobCondition.INTERNET_HOST, diff --git a/supervisor/addons/data.py b/supervisor/addons/data.py index 5d4479faf53..06943ef66de 100644 --- a/supervisor/addons/data.py +++ b/supervisor/addons/data.py @@ -1,4 +1,4 @@ -"""Init file for Supervisor add-on data.""" +"""Init file for Supervisor app data.""" from copy import deepcopy from typing import Any @@ -12,16 +12,16 @@ FILE_HASSIO_ADDONS, ) from ..coresys import CoreSys, CoreSysAttributes -from ..store.addon import AddonStore +from ..store.addon import AppStore from ..utils.common import FileConfiguration -from .addon import Addon +from .addon import App from .validate import SCHEMA_ADDONS_FILE Config = dict[str, Any] -class AddonsData(FileConfiguration, CoreSysAttributes): - """Hold data for installed Add-ons inside Supervisor.""" +class AppsData(FileConfiguration, CoreSysAttributes): + """Hold data for installed Apps inside Supervisor.""" def __init__(self, coresys: CoreSys): """Initialize data holder.""" @@ -30,42 +30,40 @@ def __init__(self, coresys: CoreSys): @property def user(self): - """Return local add-on user data.""" + """Return local app user data.""" return self._data[ATTR_USER] @property def system(self): - """Return local add-on data.""" + """Return local app data.""" return self._data[ATTR_SYSTEM] - async def install(self, addon: AddonStore) -> None: - """Set addon as installed.""" - self.system[addon.slug] = deepcopy(addon.data) - self.user[addon.slug] = { + async def install(self, app: AppStore) -> None: + """Set app as installed.""" + self.system[app.slug] = deepcopy(app.data) + self.user[app.slug] = { ATTR_OPTIONS: {}, - ATTR_VERSION: addon.version, - ATTR_IMAGE: addon.image, + ATTR_VERSION: app.version, + ATTR_IMAGE: app.image, } await self.save_data() - async def uninstall(self, addon: Addon) -> None: - """Set add-on as uninstalled.""" - self.system.pop(addon.slug, None) - self.user.pop(addon.slug, None) + async def uninstall(self, app: App) -> None: + """Set app as uninstalled.""" + self.system.pop(app.slug, None) + self.user.pop(app.slug, None) await self.save_data() - async def update(self, addon: AddonStore) -> None: - """Update version of add-on.""" - self.system[addon.slug] = deepcopy(addon.data) - self.user[addon.slug].update( - {ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image} - ) + async def update(self, app: AppStore) -> None: + """Update version of app.""" + self.system[app.slug] = deepcopy(app.data) + self.user[app.slug].update({ATTR_VERSION: app.version, ATTR_IMAGE: app.image}) await self.save_data() async def restore( self, slug: str, user: Config, system: Config, image: str ) -> None: - """Restore data to add-on.""" + """Restore data to app.""" self.user[slug] = deepcopy(user) self.system[slug] = deepcopy(system) diff --git a/supervisor/addons/manager.py b/supervisor/addons/manager.py index c69436772e8..e0acf981b60 100644 --- a/supervisor/addons/manager.py +++ b/supervisor/addons/manager.py @@ -1,4 +1,4 @@ -"""Supervisor add-on manager.""" +"""Supervisor app manager.""" import asyncio from collections.abc import Awaitable @@ -9,12 +9,12 @@ from attr import evolve from securetar import SecureTarFile -from ..const import AddonBoot, AddonStartup, AddonState +from ..const import AppBoot, AppStartup, AppState from ..coresys import CoreSys, CoreSysAttributes from ..exceptions import ( - AddonNotSupportedError, - AddonsError, - AddonsJobError, + AppNotSupportedError, + AppsError, + AppsJobError, CoreDNSError, DockerError, HassioError, @@ -23,60 +23,60 @@ from ..jobs.const import JobConcurrency from ..jobs.decorator import Job, JobCondition from ..resolution.const import ContextType, IssueType, SuggestionType, UnhealthyReason -from ..store.addon import AddonStore +from ..store.addon import AppStore from ..utils.sentry import async_capture_exception -from .addon import Addon -from .const import ADDON_UPDATE_CONDITIONS -from .data import AddonsData +from .addon import App +from .const import APP_UPDATE_CONDITIONS +from .data import AppsData _LOGGER: logging.Logger = logging.getLogger(__name__) -AnyAddon = Union[Addon, AddonStore] +AnyApp = Union[App, AppStore] -class AddonManager(CoreSysAttributes): - """Manage add-ons inside Supervisor.""" +class AppManager(CoreSysAttributes): + """Manage apps inside Supervisor.""" def __init__(self, coresys: CoreSys): """Initialize Docker base wrapper.""" self.coresys: CoreSys = coresys - self.data: AddonsData = AddonsData(coresys) - self.local: dict[str, Addon] = {} - self.store: dict[str, AddonStore] = {} + self.data: AppsData = AppsData(coresys) + self.local: dict[str, App] = {} + self.store: dict[str, AppStore] = {} @property - def all(self) -> list[AnyAddon]: - """Return a list of all add-ons.""" - addons: dict[str, AnyAddon] = {**self.store, **self.local} - return list(addons.values()) + def all(self) -> list[AnyApp]: + """Return a list of all apps.""" + apps: dict[str, AnyApp] = {**self.store, **self.local} + return list(apps.values()) @property - def installed(self) -> list[Addon]: - """Return a list of all installed add-ons.""" + def installed(self) -> list[App]: + """Return a list of all installed apps.""" return list(self.local.values()) - def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None: - """Return an add-on from slug. + def get(self, app_slug: str, local_only: bool = False) -> AnyApp | None: + """Return an app from slug. Prio: 1 - Local 2 - Store """ - if addon_slug in self.local: - return self.local[addon_slug] + if app_slug in self.local: + return self.local[app_slug] if not local_only: - return self.store.get(addon_slug) + return self.store.get(app_slug) return None - def get_local_only(self, addon_slug: str) -> Addon | None: - """Return an installed add-on from slug.""" - return self.local.get(addon_slug) + def get_local_only(self, app_slug: str) -> App | None: + """Return an installed app from slug.""" + return self.local.get(app_slug) - def from_token(self, token: str) -> Addon | None: - """Return an add-on from Supervisor token.""" - for addon in self.installed: - if token == addon.supervisor_token: - return addon + def from_token(self, token: str) -> App | None: + """Return an app from Supervisor token.""" + for app in self.installed: + if token == app.supervisor_token: + return app return None async def load_config(self) -> Self: @@ -85,16 +85,16 @@ async def load_config(self) -> Self: return self async def load(self) -> None: - """Start up add-on management.""" - # Refresh cache for all store addons + """Start up app management.""" + # Refresh cache for all store apps tasks: list[Awaitable[None]] = [ store.refresh_path_cache() for store in self.store.values() ] - # Load all installed addons + # Load all installed apps for slug in self.data.system: - addon = self.local[slug] = Addon(self.coresys, slug) - tasks.append(addon.load()) + app = self.local[slug] = App(self.coresys, slug) + tasks.append(app.load()) # Run initial tasks _LOGGER.info("Found %d installed apps", len(self.data.system)) @@ -104,42 +104,42 @@ async def load(self) -> None: # Sync DNS await self.sync_dns() - async def boot(self, stage: AddonStartup) -> None: - """Boot add-ons with mode auto.""" - tasks: list[Addon] = [] - for addon in self.installed: - if addon.boot != AddonBoot.AUTO or addon.startup != stage: + async def boot(self, stage: AppStartup) -> None: + """Boot apps with mode auto.""" + tasks: list[App] = [] + for app in self.installed: + if app.boot != AppBoot.AUTO or app.startup != stage: continue if ( - addon.host_network + app.host_network and UnhealthyReason.DOCKER_GATEWAY_UNPROTECTED in self.sys_resolution.unhealthy ): _LOGGER.warning( "Skipping boot of app %s because gateway firewall" " rules are not active", - addon.slug, + app.slug, ) continue - tasks.append(addon) + tasks.append(app) - # Evaluate add-ons which need to be started + # Evaluate apps which need to be started _LOGGER.info("Phase '%s' starting %d apps", stage, len(tasks)) if not tasks: return - # Start Add-ons sequential + # Start Apps sequential # avoid issue on slow IO - # Config.wait_boot is deprecated. Until addons update with healthchecks, + # Config.wait_boot is deprecated. Until apps update with healthchecks, # add a sleep task for it to keep the same minimum amount of wait time wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)] - for addon in tasks: + for app in tasks: try: - if start_task := await addon.start(): + if start_task := await app.start(): wait_boot.append(start_task) except HassioError: self.sys_resolution.add_issue( - evolve(addon.boot_failed_issue), + evolve(app.boot_failed_issue), suggestions=[ SuggestionType.EXECUTE_START, SuggestionType.DISABLE_BOOT, @@ -148,50 +148,50 @@ async def boot(self, stage: AddonStartup) -> None: else: continue - _LOGGER.warning("Can't start app %s", addon.slug) + _LOGGER.warning("Can't start app %s", app.slug) - # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere + # Ignore exceptions from waiting for app startup, app errors handled elsewhere await asyncio.gather(*wait_boot, return_exceptions=True) - # After waiting for startup, create an issue for boot addons that are error or unknown state - # Ignore stopped as single shot addons can be run at boot and this is successful exit - # Timeout waiting for startup is not a failure, addon is probably just slow - for addon in tasks: - if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}: + # After waiting for startup, create an issue for boot apps that are error or unknown state + # Ignore stopped as single shot apps can be run at boot and this is successful exit + # Timeout waiting for startup is not a failure, app is probably just slow + for app in tasks: + if app.state in {AppState.ERROR, AppState.UNKNOWN}: self.sys_resolution.add_issue( - evolve(addon.boot_failed_issue), + evolve(app.boot_failed_issue), suggestions=[ SuggestionType.EXECUTE_START, SuggestionType.DISABLE_BOOT, ], ) - async def shutdown(self, stage: AddonStartup) -> None: - """Shutdown addons.""" - tasks: list[Addon] = [] - for addon in self.installed: - if addon.state != AddonState.STARTED or addon.startup != stage: + async def shutdown(self, stage: AppStartup) -> None: + """Shutdown apps.""" + tasks: list[App] = [] + for app in self.installed: + if app.state != AppState.STARTED or app.startup != stage: continue - tasks.append(addon) + tasks.append(app) - # Evaluate add-ons which need to be stopped + # Evaluate apps which need to be stopped _LOGGER.info("Phase '%s' stopping %d apps", stage, len(tasks)) if not tasks: return - # Stop Add-ons sequential + # Stop Apps sequential # avoid issue on slow IO - for addon in tasks: + for app in tasks: try: - await addon.stop() + await app.stop() except Exception as err: # pylint: disable=broad-except - _LOGGER.warning("Can't stop app %s: %s", addon.slug, err) + _LOGGER.warning("Can't stop app %s: %s", app.slug, err) await async_capture_exception(err) @Job( name="addon_manager_install", - conditions=ADDON_UPDATE_CONDITIONS, - on_condition=AddonsJobError, + conditions=APP_UPDATE_CONDITIONS, + on_condition=AppsJobError, concurrency=JobConcurrency.QUEUE, child_job_syncs=[ ChildJobSyncFilter("docker_interface_install", progress_allocation=1.0) @@ -200,15 +200,15 @@ async def shutdown(self, stage: AddonStartup) -> None: async def install( self, slug: str, *, validation_complete: asyncio.Event | None = None ) -> None: - """Install an add-on.""" + """Install an app.""" self.sys_jobs.current.reference = slug if slug in self.local: - raise AddonsError(f"App {slug} is already installed", _LOGGER.warning) + raise AppsError(f"App {slug} is already installed", _LOGGER.warning) store = self.store.get(slug) if not store: - raise AddonsError(f"App {slug} does not exist", _LOGGER.error) + raise AppsError(f"App {slug} does not exist", _LOGGER.error) store.validate_availability() @@ -216,22 +216,22 @@ async def install( if validation_complete: validation_complete.set() - await Addon(self.coresys, slug).install() + await App(self.coresys, slug).install() _LOGGER.info("App '%s' successfully installed", slug) @Job(name="addon_manager_uninstall") async def uninstall(self, slug: str, *, remove_config: bool = False) -> None: - """Remove an add-on.""" + """Remove an app.""" if slug not in self.local: _LOGGER.warning("App %s is not installed", slug) return shared_image = any( - self.local[slug].image == addon.image - and self.local[slug].version == addon.version - for addon in self.installed - if addon.slug != slug + self.local[slug].image == app.image + and self.local[slug].version == app.version + for app in self.installed + if app.slug != slug ) await self.local[slug].uninstall( remove_config=remove_config, remove_image=not shared_image @@ -241,12 +241,12 @@ async def uninstall(self, slug: str, *, remove_config: bool = False) -> None: @Job( name="addon_manager_update", - conditions=ADDON_UPDATE_CONDITIONS, - on_condition=AddonsJobError, + conditions=APP_UPDATE_CONDITIONS, + on_condition=AppsJobError, # We assume for now the docker image pull is 100% of this task for progress # allocation. But from a user perspective that isn't true. Other steps # that take time which is not accounted for in progress include: - # partial backup, image cleanup, apparmor update, and addon restart + # partial backup, image cleanup, apparmor update, and app restart child_job_syncs=[ ChildJobSyncFilter("docker_interface_install", progress_allocation=1.0) ], @@ -258,25 +258,23 @@ async def update( *, validation_complete: asyncio.Event | None = None, ) -> asyncio.Task | None: - """Update add-on. + """Update app. - Returns a Task that completes when addon has state 'started' (see addon.start) - if addon is started after update. Else nothing is returned. + Returns a Task that completes when app has state 'started' (see app.start) + if app is started after update. Else nothing is returned. """ self.sys_jobs.current.reference = slug if slug not in self.local: - raise AddonsError(f"App {slug} is not installed", _LOGGER.error) - addon = self.local[slug] + raise AppsError(f"App {slug} is not installed", _LOGGER.error) + app = self.local[slug] - if addon.is_detached: - raise AddonsError( - f"App {slug} is not available inside store", _LOGGER.error - ) + if app.is_detached: + raise AppsError(f"App {slug} is not available inside store", _LOGGER.error) store = self.store[slug] - if addon.version == store.version: - raise AddonsError(f"No update available for app {slug}", _LOGGER.warning) + if app.version == store.version: + raise AppsError(f"No update available for app {slug}", _LOGGER.warning) # Check if available, Maybe something have changed store.validate_availability() @@ -287,12 +285,12 @@ async def update( if backup: await self.sys_backups.do_backup_partial( - name=f"addon_{addon.slug}_{addon.version}", + name=f"addon_{app.slug}_{app.version}", homeassistant=False, - addons=[addon.slug], + apps=[app.slug], ) - task = await addon.update() + task = await app.update() _LOGGER.info("App '%s' successfully updated", slug) return task @@ -304,37 +302,35 @@ async def update( JobCondition.INTERNET_HOST, JobCondition.HEALTHY, ], - on_condition=AddonsJobError, + on_condition=AppsJobError, ) async def rebuild(self, slug: str, *, force: bool = False) -> asyncio.Task | None: - """Perform a rebuild of local build add-on. + """Perform a rebuild of local build app. - Returns a Task that completes when addon has state 'started' (see addon.start) - if addon is started after rebuild. Else nothing is returned. + Returns a Task that completes when app has state 'started' (see app.start) + if app is started after rebuild. Else nothing is returned. """ self.sys_jobs.current.reference = slug if slug not in self.local: - raise AddonsError(f"App {slug} is not installed", _LOGGER.error) - addon = self.local[slug] + raise AppsError(f"App {slug} is not installed", _LOGGER.error) + app = self.local[slug] - if addon.is_detached: - raise AddonsError( - f"App {slug} is not available inside store", _LOGGER.error - ) + if app.is_detached: + raise AppsError(f"App {slug} is not available inside store", _LOGGER.error) store = self.store[slug] # Check if a rebuild is possible now - if addon.version != store.version: - raise AddonsError( + if app.version != store.version: + raise AppsError( "Version changed, use Update instead Rebuild", _LOGGER.error ) - if not force and not addon.need_build: - raise AddonNotSupportedError( + if not force and not app.need_build: + raise AppNotSupportedError( "Can't rebuild an image-based app", _LOGGER.error ) - return await addon.rebuild() + return await app.rebuild() @Job( name="addon_manager_restore", @@ -343,36 +339,36 @@ async def rebuild(self, slug: str, *, force: bool = False) -> asyncio.Task | Non JobCondition.INTERNET_HOST, JobCondition.HEALTHY, ], - on_condition=AddonsJobError, + on_condition=AppsJobError, ) async def restore(self, slug: str, tar_file: SecureTarFile) -> asyncio.Task | None: - """Restore state of an add-on. + """Restore state of an app. - Returns a Task that completes when addon has state 'started' (see addon.start) - if addon is started after restore. Else nothing is returned. + Returns a Task that completes when app has state 'started' (see app.start) + if app is started after restore. Else nothing is returned. """ self.sys_jobs.current.reference = slug if slug not in self.local: _LOGGER.debug("App %s is not locally available for restore", slug) - addon = Addon(self.coresys, slug) + app = App(self.coresys, slug) had_ingress: bool | None = False else: _LOGGER.debug("App %s is locally available for restore", slug) - addon = self.local[slug] - had_ingress = addon.ingress_panel + app = self.local[slug] + had_ingress = app.ingress_panel - wait_for_start = await addon.restore(tar_file) + wait_for_start = await app.restore(tar_file) # Check if new if slug not in self.local: _LOGGER.info("Detected new app after restore: %s", slug) - self.local[slug] = addon + self.local[slug] = app # Update ingress - if had_ingress != addon.ingress_panel: + if had_ingress != app.ingress_panel: await self.sys_ingress.reload() - await self.sys_ingress.update_hass_panel(addon) + await self.sys_ingress.update_hass_panel(app) return wait_for_start @@ -381,60 +377,60 @@ async def restore(self, slug: str, tar_file: SecureTarFile) -> asyncio.Task | No conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST], ) async def repair(self) -> None: - """Repair local add-ons.""" - needs_repair: list[Addon] = [] + """Repair local apps.""" + needs_repair: list[App] = [] - # Evaluate Add-ons to repair - for addon in self.installed: - if await addon.instance.exists(): + # Evaluate Apps to repair + for app in self.installed: + if await app.instance.exists(): continue - needs_repair.append(addon) + needs_repair.append(app) _LOGGER.info("Found %d apps to repair", len(needs_repair)) if not needs_repair: return - for addon in needs_repair: - _LOGGER.info("Repairing for app: %s", addon.slug) + for app in needs_repair: + _LOGGER.info("Repairing for app: %s", app.slug) with suppress(DockerError, KeyError): # Need pull a image again - if not addon.need_build: - await addon.instance.install(addon.version, addon.image) + if not app.need_build: + await app.instance.install(app.version, app.image) continue # Need local lookup - if addon.need_build and not addon.is_detached: - store = self.store[addon.slug] - # If this add-on is available for rebuild - if addon.version == store.version: - await addon.instance.install(addon.version, addon.image) + if app.need_build and not app.is_detached: + store = self.store[app.slug] + # If this app is available for rebuild + if app.version == store.version: + await app.instance.install(app.version, app.image) continue - _LOGGER.error("Can't repair %s", addon.slug) - with suppress(AddonsError): - await self.uninstall(addon.slug) + _LOGGER.error("Can't repair %s", app.slug) + with suppress(AppsError): + await self.uninstall(app.slug) async def sync_dns(self) -> None: - """Sync add-ons DNS names.""" + """Sync apps DNS names.""" # Update hosts add_host_coros: list[Awaitable[None]] = [] - for addon in self.installed: + for app in self.installed: try: - if not await addon.instance.is_running(): + if not await app.instance.is_running(): continue except DockerError as err: - _LOGGER.warning("App %s is corrupt: %s", addon.slug, err) + _LOGGER.warning("App %s is corrupt: %s", app.slug, err) self.sys_resolution.create_issue( IssueType.CORRUPT_DOCKER, ContextType.ADDON, - reference=addon.slug, + reference=app.slug, suggestions=[SuggestionType.EXECUTE_REPAIR], ) await async_capture_exception(err) else: add_host_coros.append( self.sys_plugins.dns.add_host( - ipv4=addon.ip_address, names=[addon.hostname], write=False + ipv4=app.ip_address, names=[app.hostname], write=False ) ) diff --git a/supervisor/addons/model.py b/supervisor/addons/model.py index c6ca928dacd..6ada4924e96 100644 --- a/supervisor/addons/model.py +++ b/supervisor/addons/model.py @@ -1,4 +1,4 @@ -"""Init file for Supervisor add-ons.""" +"""Init file for Supervisor apps.""" from abc import ABC, abstractmethod from collections import defaultdict @@ -82,19 +82,19 @@ SECURITY_DEFAULT, SECURITY_DISABLE, SECURITY_PROFILE, - AddonBoot, - AddonBootConfig, - AddonStage, - AddonStartup, + AppBoot, + AppBootConfig, + AppStage, + AppStartup, CpuArch, ) from ..coresys import CoreSys from ..docker.const import Capabilities from ..exceptions import ( - AddonNotSupportedArchitectureError, - AddonNotSupportedError, - AddonNotSupportedHomeAssistantVersionError, - AddonNotSupportedMachineTypeError, + AppNotSupportedArchitectureError, + AppNotSupportedError, + AppNotSupportedHomeAssistantVersionError, + AppNotSupportedMachineTypeError, HassioArchNotFound, ) from ..jobs.const import JOB_GROUP_ADDON @@ -107,10 +107,10 @@ ATTR_BREAKING_VERSIONS, ATTR_PATH, ATTR_READ_ONLY, - AddonBackupMode, + AppBackupMode, MappingType, ) -from .options import AddonOptions, UiOptions +from .options import AppOptions, UiOptions from .validate import RE_SERVICE _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -118,8 +118,8 @@ Data = dict[str, Any] -class AddonModel(JobGroup, ABC): - """Add-on Data layout.""" +class AppModel(JobGroup, ABC): + """App Data layout.""" def __init__(self, coresys: CoreSys, slug: str): """Initialize data holder.""" @@ -135,21 +135,21 @@ def __init__(self, coresys: CoreSys, slug: str): @property @abstractmethod def data(self) -> Data: - """Return add-on config/data.""" + """Return app config/data.""" @property @abstractmethod def is_installed(self) -> bool: - """Return True if an add-on is installed.""" + """Return True if an app is installed.""" @property @abstractmethod def is_detached(self) -> bool: - """Return True if add-on is detached.""" + """Return True if app is detached.""" @property def available(self) -> bool: - """Return True if this add-on is available on this platform.""" + """Return True if this app is available on this platform.""" return self._available(self.data) @property @@ -158,14 +158,14 @@ def options(self) -> dict[str, Any]: return self.data[ATTR_OPTIONS] @property - def boot_config(self) -> AddonBootConfig: + def boot_config(self) -> AppBootConfig: """Return boot config.""" return self.data[ATTR_BOOT] @property - def boot(self) -> AddonBoot: + def boot(self) -> AppBoot: """Return boot config with prio local settings unless config is forced.""" - return AddonBoot(self.data[ATTR_BOOT]) + return AppBoot(self.data[ATTR_BOOT]) @property def auto_update(self) -> bool | None: @@ -174,27 +174,27 @@ def auto_update(self) -> bool | None: @property def name(self) -> str: - """Return name of add-on.""" + """Return name of app.""" return self.data[ATTR_NAME] @property def hostname(self) -> str: - """Return slug/id of add-on.""" + """Return slug/id of app.""" return self.slug.replace("_", "-") @property def dns(self) -> list[str]: - """Return list of DNS name for that add-on.""" + """Return list of DNS name for that app.""" return [] @property def timeout(self) -> int: - """Return timeout of addon for docker stop.""" + """Return timeout of app for docker stop.""" return self.data[ATTR_TIMEOUT] @property def uuid(self) -> str | None: - """Return an API token for this add-on.""" + """Return an API token for this app.""" return None @property @@ -214,22 +214,22 @@ def ingress_entry(self) -> str | None: @property def description(self) -> str: - """Return description of add-on.""" + """Return description of app.""" return self.data[ATTR_DESCRIPTON] @property def repository(self) -> str: - """Return repository of add-on.""" + """Return repository of app.""" return self.data[ATTR_REPOSITORY] @property def translations(self) -> dict: - """Return add-on translations.""" + """Return app translations.""" return self.data[ATTR_TRANSLATIONS] @property def latest_version(self) -> AwesomeVersion: - """Return latest version of add-on.""" + """Return latest version of app.""" return self.data[ATTR_VERSION] @property @@ -239,17 +239,17 @@ def latest_version_timestamp(self) -> datetime: @property def version(self) -> AwesomeVersion: - """Return version of add-on.""" + """Return version of app.""" return self.data[ATTR_VERSION] @property def protected(self) -> bool: - """Return if add-on is in protected mode.""" + """Return if app is in protected mode.""" return True @property - def startup(self) -> AddonStartup: - """Return startup type of add-on.""" + def startup(self) -> AppStartup: + """Return startup type of app.""" return self.data[ATTR_STARTUP] @property @@ -260,8 +260,8 @@ def advanced(self) -> bool: return False @property - def stage(self) -> AddonStage: - """Return stage mode of add-on.""" + def stage(self) -> AppStage: + """Return stage mode of app.""" return self.data[ATTR_STAGE] @property @@ -289,7 +289,7 @@ def ports_description(self) -> dict[str, str] | None: @property def ports(self) -> dict[str, int | None] | None: - """Return ports of add-on.""" + """Return ports of app.""" return self.data.get(ATTR_PORTS) @property @@ -329,37 +329,37 @@ def panel_admin(self) -> bool: @property def host_network(self) -> bool: - """Return True if add-on run on host network.""" + """Return True if app run on host network.""" return self.data[ATTR_HOST_NETWORK] @property def host_pid(self) -> bool: - """Return True if add-on run on host PID namespace.""" + """Return True if app run on host PID namespace.""" return self.data[ATTR_HOST_PID] @property def host_ipc(self) -> bool: - """Return True if add-on run on host IPC namespace.""" + """Return True if app run on host IPC namespace.""" return self.data[ATTR_HOST_IPC] @property def host_uts(self) -> bool: - """Return True if add-on run on host UTS namespace.""" + """Return True if app run on host UTS namespace.""" return self.data[ATTR_HOST_UTS] @property def host_dbus(self) -> bool: - """Return True if add-on run on host D-BUS.""" + """Return True if app run on host D-BUS.""" return self.data[ATTR_HOST_DBUS] @property def static_devices(self) -> list[Path]: - """Return static devices of add-on.""" + """Return static devices of app.""" return [Path(node) for node in self.data.get(ATTR_DEVICES, [])] @property def environment(self) -> dict[str, str] | None: - """Return environment of add-on.""" + """Return environment of app.""" return self.data.get(ATTR_ENVIRONMENT) @property @@ -378,22 +378,22 @@ def apparmor(self) -> str: @property def legacy(self) -> bool: - """Return if the add-on don't support Home Assistant labels.""" + """Return if the app don't support Home Assistant labels.""" return self.data[ATTR_LEGACY] @property def access_docker_api(self) -> bool: - """Return if the add-on need read-only Docker API access.""" + """Return if the app need read-only Docker API access.""" return self.data[ATTR_DOCKER_API] @property def access_hassio_api(self) -> bool: - """Return True if the add-on access to Supervisor REASTful API.""" + """Return True if the app access to Supervisor REASTful API.""" return self.data[ATTR_HASSIO_API] @property def access_homeassistant_api(self) -> bool: - """Return True if the add-on access to Home Assistant API proxy.""" + """Return True if the app access to Home Assistant API proxy.""" return self.data[ATTR_HOMEASSISTANT_API] @property @@ -417,28 +417,28 @@ def backup_post(self) -> str | None: return self.data.get(ATTR_BACKUP_POST) @property - def backup_mode(self) -> AddonBackupMode: + def backup_mode(self) -> AppBackupMode: """Return if backup is hot/cold.""" return self.data[ATTR_BACKUP] @property def default_init(self) -> bool: - """Return True if the add-on have no own init.""" + """Return True if the app have no own init.""" return self.data[ATTR_INIT] @property def with_stdin(self) -> bool: - """Return True if the add-on access use stdin input.""" + """Return True if the app access use stdin input.""" return self.data[ATTR_STDIN] @property def with_ingress(self) -> bool: - """Return True if the add-on access support ingress.""" + """Return True if the app access support ingress.""" return self.data[ATTR_INGRESS] @property def ingress_panel(self) -> bool | None: - """Return True if the add-on access support ingress.""" + """Return True if the app access support ingress.""" return None @property @@ -448,12 +448,12 @@ def ingress_stream(self) -> bool: @property def with_gpio(self) -> bool: - """Return True if the add-on access to GPIO interface.""" + """Return True if the app access to GPIO interface.""" return self.data[ATTR_GPIO] @property def with_usb(self) -> bool: - """Return True if the add-on need USB access.""" + """Return True if the app need USB access.""" return self.data[ATTR_USB] @property @@ -463,7 +463,7 @@ def with_uart(self) -> bool: @property def with_udev(self) -> bool: - """Return True if the add-on have his own udev.""" + """Return True if the app have his own udev.""" return self.data[ATTR_UDEV] @property @@ -473,52 +473,52 @@ def ulimits(self) -> dict[str, Any]: @property def with_kernel_modules(self) -> bool: - """Return True if the add-on access to kernel modules.""" + """Return True if the app access to kernel modules.""" return self.data[ATTR_KERNEL_MODULES] @property def with_realtime(self) -> bool: - """Return True if the add-on need realtime schedule functions.""" + """Return True if the app need realtime schedule functions.""" return self.data[ATTR_REALTIME] @property def with_full_access(self) -> bool: - """Return True if the add-on want full access to hardware.""" + """Return True if the app want full access to hardware.""" return self.data[ATTR_FULL_ACCESS] @property def with_devicetree(self) -> bool: - """Return True if the add-on read access to devicetree.""" + """Return True if the app read access to devicetree.""" return self.data[ATTR_DEVICETREE] @property def with_tmpfs(self) -> bool: - """Return if tmp is in memory of add-on.""" + """Return if tmp is in memory of app.""" return self.data[ATTR_TMPFS] @property def access_auth_api(self) -> bool: - """Return True if the add-on access to login/auth backend.""" + """Return True if the app access to login/auth backend.""" return self.data[ATTR_AUTH_API] @property def with_audio(self) -> bool: - """Return True if the add-on access to audio.""" + """Return True if the app access to audio.""" return self.data[ATTR_AUDIO] @property def with_video(self) -> bool: - """Return True if the add-on access to video.""" + """Return True if the app access to video.""" return self.data[ATTR_VIDEO] @property def homeassistant_version(self) -> AwesomeVersion | None: - """Return min Home Assistant version they needed by Add-on.""" + """Return min Home Assistant version they needed by App.""" return self.data.get(ATTR_HOMEASSISTANT) @property def url(self) -> str | None: - """Return URL of add-on.""" + """Return URL of app.""" return self.data.get(ATTR_URL) @property @@ -548,17 +548,17 @@ def supported_arch(self) -> list[str]: @property def has_deprecated_arch(self) -> bool: - """Return True if add-on includes deprecated architectures.""" + """Return True if app includes deprecated architectures.""" return any(arch in ARCH_DEPRECATED for arch in self.supported_arch) @property def has_supported_arch(self) -> bool: - """Return True if add-on supports any architecture on this system.""" + """Return True if app supports any architecture on this system.""" return self.sys_arch.is_supported(self.supported_arch) @property def has_deprecated_machine(self) -> bool: - """Return True if add-on includes deprecated machine entries.""" + """Return True if app includes deprecated machine entries.""" return any( machine.lstrip("!") in MACHINE_DEPRECATED for machine in self.supported_machine @@ -566,7 +566,7 @@ def has_deprecated_machine(self) -> bool: @property def has_supported_machine(self) -> bool: - """Return True if add-on supports this machine.""" + """Return True if app supports this machine.""" if not (machine_types := self.supported_machine): return True @@ -582,7 +582,7 @@ def supported_machine(self) -> list[str]: @property def arch(self) -> CpuArch: - """Return architecture to use for the addon's image.""" + """Return architecture to use for the app's image.""" return self.sys_arch.match(self.data[ATTR_ARCH]) @property @@ -592,12 +592,12 @@ def image(self) -> str | None: @property def need_build(self) -> bool: - """Return True if this add-on need a local build.""" + """Return True if this app need a local build.""" return ATTR_IMAGE not in self.data @property def map_volumes(self) -> dict[MappingType, FolderMapping]: - """Return a dict of {MappingType: FolderMapping} from add-on.""" + """Return a dict of {MappingType: FolderMapping} from app.""" volumes = {} for volume in self.data[ATTR_MAP]: volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping( @@ -608,27 +608,27 @@ def map_volumes(self) -> dict[MappingType, FolderMapping]: @property def path_location(self) -> Path: - """Return path to this add-on.""" + """Return path to this app.""" return Path(self.data[ATTR_LOCATION]) @property def path_icon(self) -> Path: - """Return path to add-on icon.""" + """Return path to app icon.""" return Path(self.path_location, "icon.png") @property def path_logo(self) -> Path: - """Return path to add-on logo.""" + """Return path to app logo.""" return Path(self.path_location, "logo.png") @property def path_changelog(self) -> Path: - """Return path to add-on changelog.""" + """Return path to app changelog.""" return Path(self.path_location, "CHANGELOG.md") @property def path_documentation(self) -> Path: - """Return path to add-on changelog.""" + """Return path to app changelog.""" return Path(self.path_location, "DOCS.md") @property @@ -637,17 +637,17 @@ def path_apparmor(self) -> Path: return Path(self.path_location, "apparmor.txt") @property - def schema(self) -> AddonOptions: - """Return Addon options validation object.""" + def schema(self) -> AppOptions: + """Return App options validation object.""" raw_schema = self.data[ATTR_SCHEMA] if isinstance(raw_schema, bool): raw_schema = {} - return AddonOptions(self.coresys, raw_schema, self.name, self.slug) + return AppOptions(self.coresys, raw_schema, self.name, self.slug) @property def schema_ui(self) -> list[dict[Any, Any]] | None: - """Create a UI schema for add-on options.""" + """Create a UI schema for app options.""" raw_schema = self.data[ATTR_SCHEMA] if isinstance(raw_schema, bool): @@ -656,7 +656,7 @@ def schema_ui(self) -> list[dict[Any, Any]] | None: @property def with_journald(self) -> bool: - """Return True if the add-on accesses the system journal.""" + """Return True if the app accesses the system journal.""" return self.data[ATTR_JOURNALD] @property @@ -666,7 +666,7 @@ def signed(self) -> bool: @property def breaking_versions(self) -> list[AwesomeVersion]: - """Return breaking versions of addon.""" + """Return breaking versions of app.""" return self.data[ATTR_BREAKING_VERSIONS] async def long_description(self) -> str | None: @@ -696,26 +696,26 @@ def check_paths(): return self.sys_run_in_executor(check_paths) def validate_availability(self) -> None: - """Validate if addon is available for current system.""" + """Validate if app is available for current system.""" return self._validate_availability(self.data, logger=_LOGGER.error) def __eq__(self, other: Any) -> bool: - """Compare add-on objects.""" - if not isinstance(other, AddonModel): + """Compare app objects.""" + if not isinstance(other, AppModel): return False return self.slug == other.slug def __hash__(self) -> int: - """Hash for add-on objects.""" + """Hash for app objects.""" return hash(self.slug) def _validate_availability( self, config, *, logger: Callable[..., None] | None = None ) -> None: - """Validate if addon is available for current system.""" + """Validate if app is available for current system.""" # Architecture if not self.sys_arch.is_supported(config[ATTR_ARCH]): - raise AddonNotSupportedArchitectureError( + raise AppNotSupportedArchitectureError( logger, slug=self.slug, architectures=config[ATTR_ARCH] ) @@ -724,7 +724,7 @@ def _validate_availability( if machine and ( f"!{self.sys_machine}" in machine or self.sys_machine not in machine ): - raise AddonNotSupportedMachineTypeError( + raise AppNotSupportedMachineTypeError( logger, slug=self.slug, machine_types=machine ) @@ -734,15 +734,15 @@ def _validate_availability( if version and not version_is_new_enough( self.sys_homeassistant.version, version ): - raise AddonNotSupportedHomeAssistantVersionError( + raise AppNotSupportedHomeAssistantVersionError( logger, slug=self.slug, version=str(version) ) def _available(self, config) -> bool: - """Return True if this add-on is available on this platform.""" + """Return True if this app is available on this platform.""" try: self._validate_availability(config) - except AddonNotSupportedError: + except AppNotSupportedError: return False return True diff --git a/supervisor/addons/options.py b/supervisor/addons/options.py index 46309c01537..25bb4c790fa 100644 --- a/supervisor/addons/options.py +++ b/supervisor/addons/options.py @@ -1,4 +1,4 @@ -"""Add-on Options / UI rendering.""" +"""App Options / UI rendering.""" import hashlib import logging @@ -56,8 +56,8 @@ ) -class AddonOptions(CoreSysAttributes): - """Validate Add-ons Options.""" +class AppOptions(CoreSysAttributes): + """Validate Apps Options.""" def __init__( self, coresys: CoreSys, raw_schema: dict[str, Any], name: str, slug: str @@ -72,11 +72,11 @@ def __init__( @property def validate(self) -> vol.Schema: - """Create a schema for add-on options.""" + """Create a schema for app options.""" return vol.Schema(vol.All(dict, self)) def __call__(self, struct: dict[str, Any]) -> dict[str, Any]: - """Create schema validator for add-ons options.""" + """Create schema validator for apps options.""" options = {} # read options @@ -262,7 +262,7 @@ def _check_missing_options( class UiOptions(CoreSysAttributes): - """Render UI Add-ons Options.""" + """Render UI Apps Options.""" def __init__(self, coresys: CoreSys) -> None: """Initialize UI option render.""" diff --git a/supervisor/addons/utils.py b/supervisor/addons/utils.py index 07b43f0fe2c..11d29fec8f7 100644 --- a/supervisor/addons/utils.py +++ b/supervisor/addons/utils.py @@ -1,4 +1,4 @@ -"""Util add-ons functions.""" +"""Util apps functions.""" from __future__ import annotations @@ -11,12 +11,12 @@ from ..docker.const import Capabilities if TYPE_CHECKING: - from .model import AddonModel + from .model import AppModel _LOGGER: logging.Logger = logging.getLogger(__name__) -def rating_security(addon: AddonModel) -> int: +def rating_security(app: AppModel) -> int: """Return 1-8 for security rating. 1 = not secure @@ -25,25 +25,25 @@ def rating_security(addon: AddonModel) -> int: rating = 5 # AppArmor - if addon.apparmor == SECURITY_DISABLE: + if app.apparmor == SECURITY_DISABLE: rating += -1 - elif addon.apparmor == SECURITY_PROFILE: + elif app.apparmor == SECURITY_PROFILE: rating += 1 # Home Assistant Login & Ingress - if addon.with_ingress: + if app.with_ingress: rating += 2 - elif addon.access_auth_api: + elif app.access_auth_api: rating += 1 # Signed - if addon.signed: + if app.signed: rating += 1 # Privileged options if ( any( - privilege in addon.privileged + privilege in app.privileged for privilege in ( Capabilities.BPF, Capabilities.CHECKPOINT_RESTORE, @@ -57,30 +57,30 @@ def rating_security(addon: AddonModel) -> int: Capabilities.SYS_RAWIO, ) ) - or addon.with_kernel_modules + or app.with_kernel_modules ): rating += -1 # API Supervisor role - if addon.hassio_role == ROLE_MANAGER: + if app.hassio_role == ROLE_MANAGER: rating += -1 - elif addon.hassio_role == ROLE_ADMIN: + elif app.hassio_role == ROLE_ADMIN: rating += -2 # Not secure Networking - if addon.host_network: + if app.host_network: rating += -1 # Insecure PID namespace - if addon.host_pid: + if app.host_pid: rating += -2 # UTS host namespace allows to set hostname only with SYS_ADMIN - if addon.host_uts and Capabilities.SYS_ADMIN in addon.privileged: + if app.host_uts and Capabilities.SYS_ADMIN in app.privileged: rating += -1 # Docker Access & full Access - if addon.access_docker_api or addon.with_full_access: + if app.access_docker_api or app.with_full_access: rating = 1 return max(min(8, rating), 1) diff --git a/supervisor/addons/validate.py b/supervisor/addons/validate.py index 208ec41ce6e..5e8057f6a24 100644 --- a/supervisor/addons/validate.py +++ b/supervisor/addons/validate.py @@ -1,4 +1,4 @@ -"""Validate add-ons options schema.""" +"""Validate apps options schema.""" import logging import re @@ -101,11 +101,11 @@ MACHINE_DEPRECATED, ROLE_ALL, ROLE_DEFAULT, - AddonBoot, - AddonBootConfig, - AddonStage, - AddonStartup, - AddonState, + AppBoot, + AppBootConfig, + AppStage, + AppStartup, + AppState, ) from ..docker.const import Capabilities from ..validate import ( @@ -124,7 +124,7 @@ ATTR_PATH, ATTR_READ_ONLY, RE_SLUG, - AddonBackupMode, + AppBackupMode, MappingType, ) from .options import RE_SCHEMA_ELEMENT @@ -186,7 +186,7 @@ RE_SLUG_FIELD = re.compile(r"^" + RE_SLUG + r"$") -def _warn_addon_config(config: dict[str, Any]): +def _warn_app_config(config: dict[str, Any]): """Warn about miss configs.""" name = config.get(ATTR_NAME) if not name: @@ -212,7 +212,7 @@ def _warn_addon_config(config: dict[str, Any]): name, ) - if config.get(ATTR_BACKUP, AddonBackupMode.HOT) == AddonBackupMode.COLD and ( + if config.get(ATTR_BACKUP, AppBackupMode.HOT) == AppBackupMode.COLD and ( config.get(ATTR_BACKUP_POST) or config.get(ATTR_BACKUP_PRE) ): _LOGGER.warning( @@ -249,8 +249,8 @@ def _warn_addon_config(config: dict[str, Any]): return config -def _migrate_addon_config(protocol=False): - """Migrate addon config.""" +def _migrate_app_config(protocol=False): + """Migrate app config.""" def _migrate(config: dict[str, Any]): if not isinstance(config, dict): @@ -269,9 +269,9 @@ def _migrate(config: dict[str, Any]): name, ) if value == "before": - config[ATTR_STARTUP] = AddonStartup.SERVICES + config[ATTR_STARTUP] = AppStartup.SERVICES elif value == "after": - config[ATTR_STARTUP] = AddonStartup.APPLICATION + config[ATTR_STARTUP] = AppStartup.APPLICATION # UART 2021-01-20 if "auto_uart" in config: @@ -349,7 +349,7 @@ def _migrate(config: dict[str, Any]): # Always update config to clear potentially malformed ones config[ATTR_MAP] = volumes - # 2023-10 "config" became "homeassistant" so /config can be used for addon's public config + # 2023-10 "config" became "homeassistant" so /config can be used for app's public config if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes): if any( volume @@ -387,15 +387,13 @@ def _migrate(config: dict[str, Any]): vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL_COMPAT)], vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()), vol.Optional(ATTR_URL): vol.Url(), - vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce( - AddonStartup - ), - vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce( - AddonBootConfig + vol.Optional(ATTR_STARTUP, default=AppStartup.APPLICATION): vol.Coerce( + AppStartup ), + vol.Optional(ATTR_BOOT, default=AppBootConfig.AUTO): vol.Coerce(AppBootConfig), vol.Optional(ATTR_INIT, default=True): vol.Boolean(), vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(), - vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage), + vol.Optional(ATTR_STAGE, default=AppStage.STABLE): vol.Coerce(AppStage), vol.Optional(ATTR_PORTS): docker_ports, vol.Optional(ATTR_PORTS_DESCRIPTION): docker_ports_description, vol.Optional(ATTR_WATCHDOG): vol.Match( @@ -455,9 +453,7 @@ def _migrate(config: dict[str, Any]): vol.Optional(ATTR_BACKUP_EXCLUDE): [str], vol.Optional(ATTR_BACKUP_PRE): str, vol.Optional(ATTR_BACKUP_POST): str, - vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce( - AddonBackupMode - ), + vol.Optional(ATTR_BACKUP, default=AppBackupMode.HOT): vol.Coerce(AppBackupMode), vol.Optional(ATTR_OPTIONS, default={}): dict, vol.Optional(ATTR_SCHEMA, default={}): vol.Any( vol.Schema({str: SCHEMA_ELEMENT}), @@ -488,7 +484,7 @@ def _migrate(config: dict[str, Any]): ) SCHEMA_ADDON_CONFIG = vol.All( - _migrate_addon_config(True), _warn_addon_config, _SCHEMA_ADDON_CONFIG + _migrate_app_config(True), _warn_app_config, _SCHEMA_ADDON_CONFIG ) @@ -535,7 +531,7 @@ def _migrate(config: dict[str, Any]): vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): str, vol.Optional(ATTR_OPTIONS, default=dict): dict, vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(), - vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot), + vol.Optional(ATTR_BOOT): vol.Coerce(AppBoot), vol.Optional(ATTR_NETWORK): docker_ports, vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str), vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str), @@ -549,7 +545,7 @@ def _migrate(config: dict[str, Any]): ) SCHEMA_ADDON_SYSTEM = vol.All( - _migrate_addon_config(), + _migrate_app_config(), _SCHEMA_ADDON_CONFIG.extend( { vol.Required(ATTR_LOCATION): str, @@ -575,7 +571,7 @@ def _migrate(config: dict[str, Any]): { vol.Required(ATTR_USER): SCHEMA_ADDON_USER, vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM, - vol.Required(ATTR_STATE): vol.Coerce(AddonState), + vol.Required(ATTR_STATE): vol.Coerce(AppState), vol.Required(ATTR_VERSION): version_tag, }, extra=vol.REMOVE_EXTRA, diff --git a/supervisor/api/__init__.py b/supervisor/api/__init__.py index f4023766382..fd06def8ed2 100644 --- a/supervisor/api/__init__.py +++ b/supervisor/api/__init__.py @@ -8,11 +8,11 @@ from aiohttp import hdrs, web -from ..const import SUPERVISOR_DOCKER_NAME, AddonState +from ..const import SUPERVISOR_DOCKER_NAME, AppState from ..coresys import CoreSys, CoreSysAttributes -from ..exceptions import APIAddonNotInstalled, HostNotSupportedError +from ..exceptions import APIAppNotInstalled, HostNotSupportedError from ..utils.sentry import async_capture_exception -from .addons import APIAddons +from .addons import APIApps from .audio import APIAudio from .auth import APIAuth from .backups import APIBackups @@ -89,7 +89,7 @@ async def load(self) -> None: """Register REST API Calls.""" static_resource_configs: list[StaticResourceConfig] = [] - self._register_addons() + self._register_apps() self._register_audio() self._register_auth() self._register_backups() @@ -563,74 +563,72 @@ def _register_proxy(self) -> None: ] ) - def _register_addons(self) -> None: - """Register Add-on functions.""" - api_addons = APIAddons() - api_addons.coresys = self.coresys + def _register_apps(self) -> None: + """Register App functions.""" + api_apps = APIApps() + api_apps.coresys = self.coresys self.webapp.add_routes( [ - web.get("/addons", api_addons.list_addons), - web.post("/addons/{addon}/uninstall", api_addons.uninstall), - web.post("/addons/{addon}/start", api_addons.start), - web.post("/addons/{addon}/stop", api_addons.stop), - web.post("/addons/{addon}/restart", api_addons.restart), - web.post("/addons/{addon}/options", api_addons.options), - web.post("/addons/{addon}/sys_options", api_addons.sys_options), - web.post( - "/addons/{addon}/options/validate", api_addons.options_validate - ), - web.get("/addons/{addon}/options/config", api_addons.options_config), - web.post("/addons/{addon}/rebuild", api_addons.rebuild), - web.post("/addons/{addon}/stdin", api_addons.stdin), - web.post("/addons/{addon}/security", api_addons.security), - web.get("/addons/{addon}/stats", api_addons.stats), + web.get("/addons", api_apps.list_apps), + web.post("/addons/{app}/uninstall", api_apps.uninstall), + web.post("/addons/{app}/start", api_apps.start), + web.post("/addons/{app}/stop", api_apps.stop), + web.post("/addons/{app}/restart", api_apps.restart), + web.post("/addons/{app}/options", api_apps.options), + web.post("/addons/{app}/sys_options", api_apps.sys_options), + web.post("/addons/{app}/options/validate", api_apps.options_validate), + web.get("/addons/{app}/options/config", api_apps.options_config), + web.post("/addons/{app}/rebuild", api_apps.rebuild), + web.post("/addons/{app}/stdin", api_apps.stdin), + web.post("/addons/{app}/security", api_apps.security), + web.get("/addons/{app}/stats", api_apps.stats), ] ) @api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT) - async def get_addon_logs(request, *args, **kwargs): - addon = api_addons.get_addon_for_request(request) - kwargs["identifier"] = f"addon_{addon.slug}" + async def get_app_logs(request, *args, **kwargs): + app = api_apps.get_app_for_request(request) + kwargs["identifier"] = f"addon_{app.slug}" return await self._api_host.advanced_logs(request, *args, **kwargs) self.webapp.add_routes( [ - web.get("/addons/{addon}/logs", get_addon_logs), + web.get("/addons/{app}/logs", get_app_logs), web.get( - "/addons/{addon}/logs/follow", - partial(get_addon_logs, follow=True), + "/addons/{app}/logs/follow", + partial(get_app_logs, follow=True), ), web.get( - "/addons/{addon}/logs/latest", - partial(get_addon_logs, latest=True, no_colors=True), + "/addons/{app}/logs/latest", + partial(get_app_logs, latest=True, no_colors=True), ), - web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs), + web.get("/addons/{app}/logs/boots/{bootid}", get_app_logs), web.get( - "/addons/{addon}/logs/boots/{bootid}/follow", - partial(get_addon_logs, follow=True), + "/addons/{app}/logs/boots/{bootid}/follow", + partial(get_app_logs, follow=True), ), ] ) - # Legacy routing to support requests for not installed addons + # Legacy routing to support requests for not installed apps api_store = APIStore() api_store.coresys = self.coresys @api_process - async def addons_addon_info(request: web.Request) -> dict[str, Any]: - """Route to store if info requested for not installed addon.""" + async def apps_app_info(request: web.Request) -> dict[str, Any]: + """Route to store if info requested for not installed app.""" try: - return await api_addons.info(request) - except APIAddonNotInstalled: - # Route to store/{addon}/info but add missing fields + return await api_apps.info(request) + except APIAppNotInstalled: + # Route to store/{app}/info but add missing fields return dict( - await api_store.addons_addon_info_wrapped(request), - state=AddonState.UNKNOWN, - options=self.sys_addons.store[request.match_info["addon"]].options, + await api_store.apps_app_info_wrapped(request), + state=AppState.UNKNOWN, + options=self.sys_apps.store[request.match_info["app"]].options, ) - self.webapp.add_routes([web.get("/addons/{addon}/info", addons_addon_info)]) + self.webapp.add_routes([web.get("/addons/{app}/info", apps_app_info)]) def _register_ingress(self) -> None: """Register Ingress functions.""" @@ -768,35 +766,31 @@ def _register_store(self) -> None: self.webapp.add_routes( [ web.get("/store", api_store.store_info), - web.get("/store/addons", api_store.addons_list), - web.get("/store/addons/{addon}", api_store.addons_addon_info), - web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon), - web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo), - web.get( - "/store/addons/{addon}/changelog", api_store.addons_addon_changelog - ), + web.get("/store/addons", api_store.apps_list), + web.get("/store/addons/{app}", api_store.apps_app_info), + web.get("/store/addons/{app}/icon", api_store.apps_app_icon), + web.get("/store/addons/{app}/logo", api_store.apps_app_logo), + web.get("/store/addons/{app}/changelog", api_store.apps_app_changelog), web.get( - "/store/addons/{addon}/documentation", - api_store.addons_addon_documentation, + "/store/addons/{app}/documentation", + api_store.apps_app_documentation, ), web.get( - "/store/addons/{addon}/availability", - api_store.addons_addon_availability, - ), - web.post( - "/store/addons/{addon}/install", api_store.addons_addon_install + "/store/addons/{app}/availability", + api_store.apps_app_availability, ), + web.post("/store/addons/{app}/install", api_store.apps_app_install), web.post( - "/store/addons/{addon}/install/{version}", - api_store.addons_addon_install, + "/store/addons/{app}/install/{version}", + api_store.apps_app_install, ), - web.post("/store/addons/{addon}/update", api_store.addons_addon_update), + web.post("/store/addons/{app}/update", api_store.apps_app_update), web.post( - "/store/addons/{addon}/update/{version}", - api_store.addons_addon_update, + "/store/addons/{app}/update/{version}", + api_store.apps_app_update, ), # Must be below others since it has a wildcard in resource path - web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info), + web.get("/store/addons/{app}/{version}", api_store.apps_app_info), web.post("/store/reload", api_store.reload), web.get("/store/repositories", api_store.repositories_list), web.get( @@ -818,14 +812,14 @@ def _register_store(self) -> None: self.webapp.add_routes( [ web.post("/addons/reload", api_store.reload), - web.post("/addons/{addon}/install", api_store.addons_addon_install), - web.post("/addons/{addon}/update", api_store.addons_addon_update), - web.get("/addons/{addon}/icon", api_store.addons_addon_icon), - web.get("/addons/{addon}/logo", api_store.addons_addon_logo), - web.get("/addons/{addon}/changelog", api_store.addons_addon_changelog), + web.post("/addons/{app}/install", api_store.apps_app_install), + web.post("/addons/{app}/update", api_store.apps_app_update), + web.get("/addons/{app}/icon", api_store.apps_app_icon), + web.get("/addons/{app}/logo", api_store.apps_app_logo), + web.get("/addons/{app}/changelog", api_store.apps_app_changelog), web.get( - "/addons/{addon}/documentation", - api_store.addons_addon_documentation, + "/addons/{app}/documentation", + api_store.apps_app_documentation, ), ] ) diff --git a/supervisor/api/addons.py b/supervisor/api/addons.py index f8f14192afe..903f972d77d 100644 --- a/supervisor/api/addons.py +++ b/supervisor/api/addons.py @@ -9,12 +9,12 @@ import voluptuous as vol from voluptuous.humanize import humanize_error -from ..addons.addon import Addon +from ..addons.addon import App from ..addons.utils import rating_security from ..const import ( - ATTR_ADDONS, ATTR_ADVANCED, ATTR_APPARMOR, + ATTR_APPS, ATTR_ARCH, ATTR_AUDIO, ATTR_AUDIO_INPUT, @@ -94,19 +94,19 @@ ATTR_WATCHDOG, ATTR_WEBUI, REQUEST_FROM, - AddonBoot, - AddonBootConfig, + AppBoot, + AppBootConfig, ) from ..coresys import CoreSysAttributes from ..docker.stats import DockerStats from ..exceptions import ( - AddonBootConfigCannotChangeError, - AddonConfigurationInvalidError, - AddonNotSupportedWriteStdinError, - APIAddonNotInstalled, + APIAppNotInstalled, APIError, APIForbidden, APINotFound, + AppBootConfigCannotChangeError, + AppConfigurationInvalidError, + AppNotSupportedWriteStdinError, PwnedError, PwnedSecret, ) @@ -121,7 +121,7 @@ # pylint: disable=no-value-for-parameter SCHEMA_OPTIONS = vol.Schema( { - vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot), + vol.Optional(ATTR_BOOT): vol.Coerce(AppBoot), vol.Optional(ATTR_NETWORK): vol.Maybe(docker_ports), vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(), vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str), @@ -157,149 +157,148 @@ class OptionsValidateResponse(TypedDict): pwned: bool | None -class APIAddons(CoreSysAttributes): - """Handle RESTful API for add-on functions.""" +class APIApps(CoreSysAttributes): + """Handle RESTful API for app functions.""" - def get_addon_for_request(self, request: web.Request) -> Addon: - """Return addon, throw an exception if it doesn't exist.""" - addon_slug: str = request.match_info["addon"] + def get_app_for_request(self, request: web.Request) -> App: + """Return app, throw an exception if it doesn't exist.""" + app_slug: str = request.match_info["app"] # Lookup itself - if addon_slug == "self": - addon = request.get(REQUEST_FROM) - if not isinstance(addon, Addon): - raise APIError("Self is not an Addon") - return addon + if app_slug == "self": + app = request.get(REQUEST_FROM) + if not isinstance(app, App): + raise APIError("Self is not an App") + return app - addon = self.sys_addons.get(addon_slug) - if not addon: - raise APINotFound(f"App {addon_slug} does not exist") - if not isinstance(addon, Addon) or not addon.is_installed: - raise APIAddonNotInstalled("App is not installed") + app = self.sys_apps.get(app_slug) + if not app: + raise APINotFound(f"App {app_slug} does not exist") + if not isinstance(app, App) or not app.is_installed: + raise APIAppNotInstalled("App is not installed") - return addon + return app @api_process - async def list_addons(self, request: web.Request) -> dict[str, Any]: - """Return all add-ons or repositories.""" - data_addons = [ + async def list_apps(self, request: web.Request) -> dict[str, Any]: + """Return all apps or repositories.""" + data_apps = [ { - ATTR_NAME: addon.name, - ATTR_SLUG: addon.slug, - ATTR_DESCRIPTON: addon.description, - ATTR_ADVANCED: addon.advanced, # Deprecated 2026.03 - ATTR_STAGE: addon.stage, - ATTR_VERSION: addon.version, - ATTR_VERSION_LATEST: addon.latest_version, - ATTR_UPDATE_AVAILABLE: addon.need_update, - ATTR_AVAILABLE: addon.available, - ATTR_DETACHED: addon.is_detached, - ATTR_HOMEASSISTANT: addon.homeassistant_version, - ATTR_STATE: addon.state, - ATTR_REPOSITORY: addon.repository, - ATTR_BUILD: addon.need_build, - ATTR_URL: addon.url, - ATTR_ICON: addon.with_icon, - ATTR_LOGO: addon.with_logo, - ATTR_SYSTEM_MANAGED: addon.system_managed, + ATTR_NAME: app.name, + ATTR_SLUG: app.slug, + ATTR_DESCRIPTON: app.description, + ATTR_ADVANCED: app.advanced, # Deprecated 2026.03 + ATTR_STAGE: app.stage, + ATTR_VERSION: app.version, + ATTR_VERSION_LATEST: app.latest_version, + ATTR_UPDATE_AVAILABLE: app.need_update, + ATTR_AVAILABLE: app.available, + ATTR_DETACHED: app.is_detached, + ATTR_HOMEASSISTANT: app.homeassistant_version, + ATTR_STATE: app.state, + ATTR_REPOSITORY: app.repository, + ATTR_BUILD: app.need_build, + ATTR_URL: app.url, + ATTR_ICON: app.with_icon, + ATTR_LOGO: app.with_logo, + ATTR_SYSTEM_MANAGED: app.system_managed, } - for addon in self.sys_addons.installed + for app in self.sys_apps.installed ] - return {ATTR_ADDONS: data_addons} + return {ATTR_APPS: data_apps} @api_process async def reload(self, request: web.Request) -> None: - """Reload all add-on data from store.""" + """Reload all app data from store.""" await asyncio.shield(self.sys_store.reload()) async def info(self, request: web.Request) -> dict[str, Any]: - """Return add-on information.""" - addon: Addon = self.get_addon_for_request(request) + """Return app information.""" + app: App = self.get_app_for_request(request) data = { - ATTR_NAME: addon.name, - ATTR_SLUG: addon.slug, - ATTR_HOSTNAME: addon.hostname, - ATTR_DNS: addon.dns, - ATTR_DESCRIPTON: addon.description, - ATTR_LONG_DESCRIPTION: await addon.long_description(), - ATTR_ADVANCED: addon.advanced, # Deprecated 2026.03 - ATTR_STAGE: addon.stage, - ATTR_REPOSITORY: addon.repository, - ATTR_VERSION_LATEST: addon.latest_version, - ATTR_PROTECTED: addon.protected, - ATTR_RATING: rating_security(addon), - ATTR_BOOT_CONFIG: addon.boot_config, - ATTR_BOOT: addon.boot, - ATTR_OPTIONS: addon.options, - ATTR_SCHEMA: addon.schema_ui, - ATTR_ARCH: addon.supported_arch, - ATTR_MACHINE: addon.supported_machine, - ATTR_HOMEASSISTANT: addon.homeassistant_version, - ATTR_URL: addon.url, - ATTR_DETACHED: addon.is_detached, - ATTR_AVAILABLE: addon.available, - ATTR_BUILD: addon.need_build, - ATTR_NETWORK: addon.ports, - ATTR_NETWORK_DESCRIPTION: addon.ports_description, - ATTR_HOST_NETWORK: addon.host_network, - ATTR_HOST_PID: addon.host_pid, - ATTR_HOST_IPC: addon.host_ipc, - ATTR_HOST_UTS: addon.host_uts, - ATTR_HOST_DBUS: addon.host_dbus, - ATTR_PRIVILEGED: addon.privileged, - ATTR_FULL_ACCESS: addon.with_full_access, - ATTR_APPARMOR: addon.apparmor, - ATTR_ICON: addon.with_icon, - ATTR_LOGO: addon.with_logo, - ATTR_CHANGELOG: addon.with_changelog, - ATTR_DOCUMENTATION: addon.with_documentation, - ATTR_STDIN: addon.with_stdin, - ATTR_HASSIO_API: addon.access_hassio_api, - ATTR_HASSIO_ROLE: addon.hassio_role, - ATTR_AUTH_API: addon.access_auth_api, - ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api, - ATTR_GPIO: addon.with_gpio, - ATTR_USB: addon.with_usb, - ATTR_UART: addon.with_uart, - ATTR_KERNEL_MODULES: addon.with_kernel_modules, - ATTR_DEVICETREE: addon.with_devicetree, - ATTR_UDEV: addon.with_udev, - ATTR_DOCKER_API: addon.access_docker_api, - ATTR_VIDEO: addon.with_video, - ATTR_AUDIO: addon.with_audio, - ATTR_STARTUP: addon.startup, - ATTR_SERVICES: _pretty_services(addon), - ATTR_DISCOVERY: addon.discovery, - ATTR_TRANSLATIONS: addon.translations, - ATTR_INGRESS: addon.with_ingress, - ATTR_SIGNED: addon.signed, - ATTR_STATE: addon.state, - ATTR_WEBUI: addon.webui, - ATTR_INGRESS_ENTRY: addon.ingress_entry, - ATTR_INGRESS_URL: addon.ingress_url, - ATTR_INGRESS_PORT: addon.ingress_port, - ATTR_INGRESS_PANEL: addon.ingress_panel, - ATTR_AUDIO_INPUT: addon.audio_input, - ATTR_AUDIO_OUTPUT: addon.audio_output, - ATTR_AUTO_UPDATE: addon.auto_update, - ATTR_IP_ADDRESS: str(addon.ip_address), - ATTR_VERSION: addon.version, - ATTR_UPDATE_AVAILABLE: addon.need_update, - ATTR_WATCHDOG: addon.watchdog, - ATTR_DEVICES: addon.static_devices - + [device.path for device in addon.devices], - ATTR_SYSTEM_MANAGED: addon.system_managed, - ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry, + ATTR_NAME: app.name, + ATTR_SLUG: app.slug, + ATTR_HOSTNAME: app.hostname, + ATTR_DNS: app.dns, + ATTR_DESCRIPTON: app.description, + ATTR_LONG_DESCRIPTION: await app.long_description(), + ATTR_ADVANCED: app.advanced, # Deprecated 2026.03 + ATTR_STAGE: app.stage, + ATTR_REPOSITORY: app.repository, + ATTR_VERSION_LATEST: app.latest_version, + ATTR_PROTECTED: app.protected, + ATTR_RATING: rating_security(app), + ATTR_BOOT_CONFIG: app.boot_config, + ATTR_BOOT: app.boot, + ATTR_OPTIONS: app.options, + ATTR_SCHEMA: app.schema_ui, + ATTR_ARCH: app.supported_arch, + ATTR_MACHINE: app.supported_machine, + ATTR_HOMEASSISTANT: app.homeassistant_version, + ATTR_URL: app.url, + ATTR_DETACHED: app.is_detached, + ATTR_AVAILABLE: app.available, + ATTR_BUILD: app.need_build, + ATTR_NETWORK: app.ports, + ATTR_NETWORK_DESCRIPTION: app.ports_description, + ATTR_HOST_NETWORK: app.host_network, + ATTR_HOST_PID: app.host_pid, + ATTR_HOST_IPC: app.host_ipc, + ATTR_HOST_UTS: app.host_uts, + ATTR_HOST_DBUS: app.host_dbus, + ATTR_PRIVILEGED: app.privileged, + ATTR_FULL_ACCESS: app.with_full_access, + ATTR_APPARMOR: app.apparmor, + ATTR_ICON: app.with_icon, + ATTR_LOGO: app.with_logo, + ATTR_CHANGELOG: app.with_changelog, + ATTR_DOCUMENTATION: app.with_documentation, + ATTR_STDIN: app.with_stdin, + ATTR_HASSIO_API: app.access_hassio_api, + ATTR_HASSIO_ROLE: app.hassio_role, + ATTR_AUTH_API: app.access_auth_api, + ATTR_HOMEASSISTANT_API: app.access_homeassistant_api, + ATTR_GPIO: app.with_gpio, + ATTR_USB: app.with_usb, + ATTR_UART: app.with_uart, + ATTR_KERNEL_MODULES: app.with_kernel_modules, + ATTR_DEVICETREE: app.with_devicetree, + ATTR_UDEV: app.with_udev, + ATTR_DOCKER_API: app.access_docker_api, + ATTR_VIDEO: app.with_video, + ATTR_AUDIO: app.with_audio, + ATTR_STARTUP: app.startup, + ATTR_SERVICES: _pretty_services(app), + ATTR_DISCOVERY: app.discovery, + ATTR_TRANSLATIONS: app.translations, + ATTR_INGRESS: app.with_ingress, + ATTR_SIGNED: app.signed, + ATTR_STATE: app.state, + ATTR_WEBUI: app.webui, + ATTR_INGRESS_ENTRY: app.ingress_entry, + ATTR_INGRESS_URL: app.ingress_url, + ATTR_INGRESS_PORT: app.ingress_port, + ATTR_INGRESS_PANEL: app.ingress_panel, + ATTR_AUDIO_INPUT: app.audio_input, + ATTR_AUDIO_OUTPUT: app.audio_output, + ATTR_AUTO_UPDATE: app.auto_update, + ATTR_IP_ADDRESS: str(app.ip_address), + ATTR_VERSION: app.version, + ATTR_UPDATE_AVAILABLE: app.need_update, + ATTR_WATCHDOG: app.watchdog, + ATTR_DEVICES: app.static_devices + [device.path for device in app.devices], + ATTR_SYSTEM_MANAGED: app.system_managed, + ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: app.system_managed_config_entry, } return data @api_process async def options(self, request: web.Request) -> None: - """Store user options for add-on.""" - addon = self.get_addon_for_request(request) + """Store user options for app.""" + app = self.get_app_for_request(request) # Update secrets for validation await self.sys_homeassistant.secrets.reload() @@ -309,61 +308,61 @@ async def options(self, request: web.Request) -> None: if ATTR_OPTIONS in body: # None resets options to defaults, otherwise validate the options if body[ATTR_OPTIONS] is None: - addon.options = None + app.options = None else: try: - addon.options = addon.schema(body[ATTR_OPTIONS]) + app.options = app.schema(body[ATTR_OPTIONS]) except vol.Invalid as ex: - raise AddonConfigurationInvalidError( - addon=addon.slug, + raise AppConfigurationInvalidError( + app=app.slug, validation_error=humanize_error(body[ATTR_OPTIONS], ex), ) from None if ATTR_BOOT in body: - if addon.boot_config == AddonBootConfig.MANUAL_ONLY: - raise AddonBootConfigCannotChangeError( - addon=addon.slug, boot_config=addon.boot_config.value + if app.boot_config == AppBootConfig.MANUAL_ONLY: + raise AppBootConfigCannotChangeError( + app=app.slug, boot_config=app.boot_config.value ) - addon.boot = body[ATTR_BOOT] + app.boot = body[ATTR_BOOT] if ATTR_AUTO_UPDATE in body: - addon.auto_update = body[ATTR_AUTO_UPDATE] + app.auto_update = body[ATTR_AUTO_UPDATE] if ATTR_NETWORK in body: - addon.ports = body[ATTR_NETWORK] + app.ports = body[ATTR_NETWORK] if ATTR_AUDIO_INPUT in body: - addon.audio_input = body[ATTR_AUDIO_INPUT] + app.audio_input = body[ATTR_AUDIO_INPUT] if ATTR_AUDIO_OUTPUT in body: - addon.audio_output = body[ATTR_AUDIO_OUTPUT] + app.audio_output = body[ATTR_AUDIO_OUTPUT] if ATTR_INGRESS_PANEL in body: - addon.ingress_panel = body[ATTR_INGRESS_PANEL] - await self.sys_ingress.update_hass_panel(addon) + app.ingress_panel = body[ATTR_INGRESS_PANEL] + await self.sys_ingress.update_hass_panel(app) if ATTR_WATCHDOG in body: - addon.watchdog = body[ATTR_WATCHDOG] + app.watchdog = body[ATTR_WATCHDOG] - await addon.save_persist() + await app.save_persist() @api_process async def sys_options(self, request: web.Request) -> None: - """Store system options for an add-on.""" - addon = self.get_addon_for_request(request) + """Store system options for an app.""" + app = self.get_app_for_request(request) # Validate/Process Body body = await api_validate(SCHEMA_SYS_OPTIONS, request) if ATTR_SYSTEM_MANAGED in body: - addon.system_managed = body[ATTR_SYSTEM_MANAGED] + app.system_managed = body[ATTR_SYSTEM_MANAGED] if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body: - addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] + app.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] - await addon.save_persist() + await app.save_persist() @api_process async def options_validate(self, request: web.Request) -> OptionsValidateResponse: - """Validate user options for add-on.""" - addon = self.get_addon_for_request(request) + """Validate user options for app.""" + app = self.get_app_for_request(request) data = OptionsValidateResponse(message="", valid=True, pwned=False) - options = await request.json(loads=json_loads) or addon.options + options = await request.json(loads=json_loads) or app.options # Validate config - options_schema = addon.schema + options_schema = app.schema try: options_schema.validate(options) except vol.Invalid as ex: @@ -395,37 +394,37 @@ async def options_validate(self, request: web.Request) -> OptionsValidateRespons @api_process async def options_config(self, request: web.Request) -> dict[str, Any]: - """Validate user options for add-on.""" - slug: str = request.match_info["addon"] + """Validate user options for app.""" + slug: str = request.match_info["app"] if slug != "self": raise APIForbidden("This can be only read by the app itself!") - addon = self.get_addon_for_request(request) + app = self.get_app_for_request(request) # Lookup/reload secrets await self.sys_homeassistant.secrets.reload() try: - return addon.schema.validate(addon.options) + return app.schema.validate(app.options) except vol.Invalid: raise APIError("Invalid configuration data for the app") from None @api_process async def security(self, request: web.Request) -> None: - """Store security options for add-on.""" - addon = self.get_addon_for_request(request) + """Store security options for app.""" + app = self.get_app_for_request(request) body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request) if ATTR_PROTECTED in body: - _LOGGER.warning("Changing protected flag for %s!", addon.slug) - addon.protected = body[ATTR_PROTECTED] + _LOGGER.warning("Changing protected flag for %s!", app.slug) + app.protected = body[ATTR_PROTECTED] - await addon.save_persist() + await app.save_persist() @api_process async def stats(self, request: web.Request) -> dict[str, Any]: """Return resource information.""" - addon = self.get_addon_for_request(request) + app = self.get_app_for_request(request) - stats: DockerStats = await addon.stats() + stats: DockerStats = await app.stats() return { ATTR_CPU_PERCENT: stats.cpu_percent, @@ -440,57 +439,55 @@ async def stats(self, request: web.Request) -> dict[str, Any]: @api_process async def uninstall(self, request: web.Request) -> None: - """Uninstall add-on.""" - addon = self.get_addon_for_request(request) + """Uninstall app.""" + app = self.get_app_for_request(request) body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request) await asyncio.shield( - self.sys_addons.uninstall( - addon.slug, remove_config=body[ATTR_REMOVE_CONFIG] - ) + self.sys_apps.uninstall(app.slug, remove_config=body[ATTR_REMOVE_CONFIG]) ) @api_process async def start(self, request: web.Request) -> None: - """Start add-on.""" - addon = self.get_addon_for_request(request) - if start_task := await asyncio.shield(addon.start()): + """Start app.""" + app = self.get_app_for_request(request) + if start_task := await asyncio.shield(app.start()): await start_task @api_process def stop(self, request: web.Request) -> Awaitable[None]: - """Stop add-on.""" - addon = self.get_addon_for_request(request) - return asyncio.shield(addon.stop()) + """Stop app.""" + app = self.get_app_for_request(request) + return asyncio.shield(app.stop()) @api_process async def restart(self, request: web.Request) -> None: - """Restart add-on.""" - addon: Addon = self.get_addon_for_request(request) - if start_task := await asyncio.shield(addon.restart()): + """Restart app.""" + app: App = self.get_app_for_request(request) + if start_task := await asyncio.shield(app.restart()): await start_task @api_process async def rebuild(self, request: web.Request) -> None: - """Rebuild local build add-on.""" - addon = self.get_addon_for_request(request) + """Rebuild local build app.""" + app = self.get_app_for_request(request) body: dict[str, Any] = await api_validate(SCHEMA_REBUILD, request) if start_task := await asyncio.shield( - self.sys_addons.rebuild(addon.slug, force=body[ATTR_FORCE]) + self.sys_apps.rebuild(app.slug, force=body[ATTR_FORCE]) ): await start_task @api_process async def stdin(self, request: web.Request) -> None: - """Write to stdin of add-on.""" - addon = self.get_addon_for_request(request) - if not addon.with_stdin: - raise AddonNotSupportedWriteStdinError(_LOGGER.error, addon=addon.slug) + """Write to stdin of app.""" + app = self.get_app_for_request(request) + if not app.with_stdin: + raise AppNotSupportedWriteStdinError(_LOGGER.error, app=app.slug) data = await request.read() - await asyncio.shield(addon.write_stdin(data)) + await asyncio.shield(app.write_stdin(data)) -def _pretty_services(addon: Addon) -> list[str]: +def _pretty_services(app: App) -> list[str]: """Return a simplified services role list.""" - return [f"{name}:{access}" for name, access in addon.services_role.items()] + return [f"{name}:{access}" for name, access in app.services_role.items()] diff --git a/supervisor/api/auth.py b/supervisor/api/auth.py index 7fc132e1c2e..a69885e457e 100644 --- a/supervisor/api/auth.py +++ b/supervisor/api/auth.py @@ -12,7 +12,7 @@ from multidict import MultiDictProxy import voluptuous as vol -from ..addons.addon import Addon +from ..addons.addon import App from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM from ..coresys import CoreSysAttributes from ..exceptions import APIForbidden, AuthInvalidNonStringValueError @@ -44,7 +44,7 @@ class APIAuth(CoreSysAttributes): """Handle RESTful API for auth functions.""" - def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]: + def _process_basic(self, request: web.Request, app: App) -> Awaitable[bool]: """Process login request with basic auth. Return a coroutine. @@ -53,12 +53,12 @@ def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]: auth = BasicAuth.decode(request.headers[AUTHORIZATION]) except ValueError as err: raise HTTPUnauthorized(headers=REALM_HEADER) from err - return self.sys_auth.check_login(addon, auth.login, auth.password) + return self.sys_auth.check_login(app, auth.login, auth.password) def _process_dict( self, request: web.Request, - addon: Addon, + app: App, data: dict[str, Any] | MultiDictProxy[str | bytes | FileField], ) -> Awaitable[bool]: """Process login with dict data. @@ -76,35 +76,33 @@ def _process_dict( _LOGGER.error, headers=REALM_HEADER ) from None - return self.sys_auth.check_login( - addon, cast(str, username), cast(str, password) - ) + return self.sys_auth.check_login(app, cast(str, username), cast(str, password)) @api_process async def auth(self, request: web.Request) -> bool: """Process login request.""" - addon = request[REQUEST_FROM] + app = request[REQUEST_FROM] - if not isinstance(addon, Addon) or not addon.access_auth_api: + if not isinstance(app, App) or not app.access_auth_api: raise APIForbidden("Can't use Home Assistant auth!") # BasicAuth if AUTHORIZATION in request.headers: - if not await self._process_basic(request, addon): + if not await self._process_basic(request, app): raise HTTPUnauthorized(headers=REALM_HEADER) return True # Json if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON: data = await request.json(loads=json_loads) - if not await self._process_dict(request, addon, data): + if not await self._process_dict(request, app, data): raise HTTPUnauthorized() return True # URL encoded if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL: data = await request.post() - if not await self._process_dict(request, addon, data): + if not await self._process_dict(request, app, data): raise HTTPUnauthorized() return True diff --git a/supervisor/api/backups.py b/supervisor/api/backups.py index 26c678ca0d0..99a3912f62e 100644 --- a/supervisor/api/backups.py +++ b/supervisor/api/backups.py @@ -19,7 +19,7 @@ from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale from ..const import ( - ATTR_ADDONS, + ATTR_APPS, ATTR_BACKUPS, ATTR_COMPRESSED, ATTR_CONTENT, @@ -102,7 +102,7 @@ def _convert_local_location(item: str | None) -> str | None: SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend( { vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), - vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()), + vol.Optional(ATTR_APPS): vol.All([str], vol.Unique()), vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS, } ) @@ -122,9 +122,7 @@ def _convert_local_location(item: str | None) -> str | None: SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend( { - vol.Optional(ATTR_ADDONS): vol.Or( - ALL_ADDONS_FLAG, vol.All([str], vol.Unique()) - ), + vol.Optional(ATTR_APPS): vol.Or(ALL_ADDONS_FLAG, vol.All([str], vol.Unique())), vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS, vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), } @@ -172,7 +170,7 @@ def _list_backups(self): ATTR_COMPRESSED: backup.compressed, ATTR_CONTENT: { ATTR_HOMEASSISTANT: backup.homeassistant_version is not None, - ATTR_ADDONS: backup.addon_list, + ATTR_APPS: backup.app_list, ATTR_FOLDERS: backup.folders, }, } @@ -220,14 +218,14 @@ async def backup_info(self, request): """Return backup info.""" backup = self._extract_slug(request) - data_addons = [] - for addon_data in backup.addons: - data_addons.append( + data_apps = [] + for app_data in backup.apps: + data_apps.append( { - ATTR_SLUG: addon_data[ATTR_SLUG], - ATTR_NAME: addon_data[ATTR_NAME], - ATTR_VERSION: addon_data[ATTR_VERSION], - ATTR_SIZE: addon_data[ATTR_SIZE], + ATTR_SLUG: app_data[ATTR_SLUG], + ATTR_NAME: app_data[ATTR_NAME], + ATTR_VERSION: app_data[ATTR_VERSION], + ATTR_SIZE: app_data[ATTR_SIZE], } ) @@ -245,7 +243,7 @@ async def backup_info(self, request): ATTR_HOMEASSISTANT: backup.homeassistant_version, ATTR_LOCATION: backup.location, ATTR_LOCATIONS: backup.locations, - ATTR_ADDONS: data_addons, + ATTR_APPS: data_apps, ATTR_REPOSITORIES: backup.repositories, ATTR_FOLDERS: backup.folders, ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database, @@ -334,9 +332,11 @@ async def backup_partial(self, request: web.Request): if locations: body[ATTR_ADDITIONAL_LOCATIONS] = locations - if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG: - body[ATTR_ADDONS] = list(self.sys_addons.local) + if body.get(ATTR_APPS) == ALL_ADDONS_FLAG: + body[ATTR_APPS] = list(self.sys_apps.local) + if ATTR_APPS in body: + body["apps"] = body.pop(ATTR_APPS) background = body.pop(ATTR_BACKGROUND) backup_task, job_id = await background_task( self, self.sys_backups.do_backup_partial, **body @@ -382,6 +382,8 @@ async def restore_partial(self, request: web.Request): request, body.get(ATTR_LOCATION, backup.location) ) background = body.pop(ATTR_BACKGROUND) + if ATTR_APPS in body: + body["apps"] = body.pop(ATTR_APPS) restore_task, job_id = await background_task( self, self.sys_backups.do_restore_partial, backup, **body ) diff --git a/supervisor/api/discovery.py b/supervisor/api/discovery.py index aa37e398220..4ba9b003210 100644 --- a/supervisor/api/discovery.py +++ b/supervisor/api/discovery.py @@ -6,16 +6,16 @@ from aiohttp import web import voluptuous as vol -from ..addons.addon import Addon +from ..addons.addon import App from ..const import ( - ATTR_ADDON, + ATTR_APP, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_SERVICES, ATTR_UUID, REQUEST_FROM, - AddonState, + AppState, ) from ..coresys import CoreSysAttributes from ..discovery import Message @@ -49,25 +49,25 @@ async def list_discovery(self, request: web.Request) -> dict[str, Any]: # Get available discovery discovery = [ { - ATTR_ADDON: message.addon, + ATTR_APP: message.addon, ATTR_SERVICE: message.service, ATTR_UUID: message.uuid, ATTR_CONFIG: message.config, } for message in self.sys_discovery.list_messages if ( - discovered := self.sys_addons.get_local_only( + discovered := self.sys_apps.get_local_only( message.addon, ) ) - and discovered.state == AddonState.STARTED + and discovered.state == AppState.STARTED ] - # Get available services/add-ons + # Get available services/apps services: dict[str, list[str]] = {} - for addon in self.sys_addons.all: - for name in addon.discovery: - services.setdefault(name, []).append(addon.slug) + for app in self.sys_apps.all: + for name in app.discovery: + services.setdefault(name, []).append(app.slug) return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services} @@ -75,14 +75,14 @@ async def list_discovery(self, request: web.Request) -> dict[str, Any]: async def set_discovery(self, request: web.Request) -> dict[str, str]: """Write data into a discovery pipeline.""" body = await api_validate(SCHEMA_DISCOVERY, request) - addon: Addon = request[REQUEST_FROM] + app: App = request[REQUEST_FROM] service = body[ATTR_SERVICE] # Access? - if body[ATTR_SERVICE] not in addon.discovery: + if body[ATTR_SERVICE] not in app.discovery: _LOGGER.error( "App %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the app", - addon.name, + app.name, service, ) raise APIForbidden( @@ -90,7 +90,7 @@ async def set_discovery(self, request: web.Request) -> dict[str, str]: ) # Process discovery message - message = await self.sys_discovery.send(addon, **body) + message = await self.sys_discovery.send(app, **body) return {ATTR_UUID: message.uuid} @@ -101,7 +101,7 @@ async def get_discovery(self, request: web.Request) -> dict[str, Any]: message = self._extract_message(request) return { - ATTR_ADDON: message.addon, + ATTR_APP: message.addon, ATTR_SERVICE: message.service, ATTR_UUID: message.uuid, ATTR_CONFIG: message.config, @@ -111,10 +111,10 @@ async def get_discovery(self, request: web.Request) -> dict[str, Any]: async def del_discovery(self, request: web.Request) -> None: """Delete data into a discovery message.""" message = self._extract_message(request) - addon = request[REQUEST_FROM] + app = request[REQUEST_FROM] # Permission - if message.addon != addon.slug: + if message.addon != app.slug: raise APIForbidden("Can't remove discovery message") await self.sys_discovery.remove(message) diff --git a/supervisor/api/host.py b/supervisor/api/host.py index cc4a1d10504..db1da0271d1 100644 --- a/supervisor/api/host.py +++ b/supervisor/api/host.py @@ -357,8 +357,8 @@ async def disk_usage(self, request: web.Request) -> dict[str, Any]: known_paths = await self.sys_run_in_executor( disk.get_dir_sizes, { - "addons_data": self.sys_config.path_addons_data, - "addons_config": self.sys_config.path_addon_configs, + "addons_data": self.sys_config.path_apps_data, + "addons_config": self.sys_config.path_app_configs, "media": self.sys_config.path_media, "share": self.sys_config.path_share, "backup": self.sys_config.path_backup, diff --git a/supervisor/api/ingress.py b/supervisor/api/ingress.py index a34503846ed..212bcc39200 100644 --- a/supervisor/api/ingress.py +++ b/supervisor/api/ingress.py @@ -1,4 +1,4 @@ -"""Supervisor Add-on ingress service.""" +"""Supervisor App ingress service.""" import asyncio from ipaddress import ip_address @@ -15,7 +15,7 @@ from multidict import CIMultiDict, istr import voluptuous as vol -from ..addons.addon import Addon +from ..addons.addon import App from ..const import ( ATTR_ADMIN, ATTR_ENABLE, @@ -75,37 +75,37 @@ def status_code_must_be_empty_body(code: int) -> bool: class APIIngress(CoreSysAttributes): - """Ingress view to handle add-on webui routing.""" + """Ingress view to handle app webui routing.""" - def _extract_addon(self, request: web.Request) -> Addon: - """Return addon, throw an exception it it doesn't exist.""" + def _extract_app(self, request: web.Request) -> App: + """Return app, throw an exception it it doesn't exist.""" token = request.match_info["token"] - # Find correct add-on - addon = self.sys_ingress.get(token) - if not addon: + # Find correct app + app = self.sys_ingress.get(token) + if not app: _LOGGER.warning("Ingress for %s not available", token) raise HTTPServiceUnavailable() - return addon + return app - def _create_url(self, addon: Addon, path: str) -> str: + def _create_url(self, app: App, path: str) -> str: """Create URL to container.""" - return f"http://{addon.ip_address}:{addon.ingress_port}/{path}" + return f"http://{app.ip_address}:{app.ingress_port}/{path}" @api_process async def panels(self, request: web.Request) -> dict[str, Any]: """Create a list of panel data.""" - addons = {} - for addon in self.sys_ingress.addons: - addons[addon.slug] = { - ATTR_TITLE: addon.panel_title, - ATTR_ICON: addon.panel_icon, - ATTR_ADMIN: addon.panel_admin, - ATTR_ENABLE: addon.ingress_panel, + apps = {} + for app in self.sys_ingress.apps: + apps[app.slug] = { + ATTR_TITLE: app.panel_title, + ATTR_ICON: app.panel_icon, + ATTR_ADMIN: app.panel_admin, + ATTR_ENABLE: app.ingress_panel, } - return {ATTR_PANELS: addons} + return {ATTR_PANELS: apps} @api_process @require_home_assistant @@ -149,16 +149,16 @@ async def handler( raise HTTPUnauthorized() # Process requests - addon = self._extract_addon(request) + app = self._extract_app(request) path = request.match_info.get("path", "") session_data = self.sys_ingress.get_session_data(session) try: # Websocket if _is_websocket(request): - return await self._handle_websocket(request, addon, path, session_data) + return await self._handle_websocket(request, app, path, session_data) # Request - return await self._handle_request(request, addon, path, session_data) + return await self._handle_request(request, app, path, session_data) except aiohttp.ClientError as err: _LOGGER.error("Ingress error: %s", err) @@ -168,7 +168,7 @@ async def handler( async def _handle_websocket( self, request: web.Request, - addon: Addon, + app: App, path: str, session_data: IngressSessionData | None, ) -> web.WebSocketResponse: @@ -190,8 +190,8 @@ async def _handle_websocket( await ws_server.prepare(request) # Preparing - url = self._create_url(addon, path) - source_header = _init_header(request, addon, session_data) + url = self._create_url(app, path) + source_header = _init_header(request, app, session_data) # Support GET query if request.query_string: @@ -199,7 +199,7 @@ async def _handle_websocket( # Start proxy try: - _LOGGER.debug("Proxing WebSocket to %s, upstream url: %s", addon.slug, url) + _LOGGER.debug("Proxing WebSocket to %s, upstream url: %s", app.slug, url) async with self.sys_websession.ws_connect( url, headers=source_header, @@ -217,28 +217,28 @@ async def _handle_websocket( return_when=asyncio.FIRST_COMPLETED, ) except TimeoutError: - _LOGGER.warning("WebSocket proxy to %s timed out", addon.slug) + _LOGGER.warning("WebSocket proxy to %s timed out", app.slug) return ws_server async def _handle_request( self, request: web.Request, - addon: Addon, + app: App, path: str, session_data: IngressSessionData | None, ) -> web.Response | web.StreamResponse: """Ingress route for request.""" - url = self._create_url(addon, path) - source_header = _init_header(request, addon, session_data) + url = self._create_url(app, path) + source_header = _init_header(request, app, session_data) # Passing the raw stream breaks requests for some webservers # since we just need it for POST requests really, for all other methods - # we read the bytes and pass that to the request to the add-on - # add-ons needs to add support with that in the configuration + # we read the bytes and pass that to the request to the app + # apps needs to add support with that in the configuration data = ( request.content - if request.method == "POST" and addon.ingress_stream + if request.method == "POST" and app.ingress_stream else await request.read() ) @@ -318,7 +318,7 @@ async def _find_user_by_id(self, user_id: str) -> HomeAssistantUser | None: def _init_header( - request: web.Request, addon: Addon, session_data: IngressSessionData | None + request: web.Request, app: App, session_data: IngressSessionData | None ) -> CIMultiDict[str]: """Create initial header.""" headers = CIMultiDict[str]() diff --git a/supervisor/api/middleware/security.py b/supervisor/api/middleware/security.py index 3715a2e6366..8c47c807413 100644 --- a/supervisor/api/middleware/security.py +++ b/supervisor/api/middleware/security.py @@ -68,7 +68,7 @@ r")$" ) -# Can called by every add-on +# Can called by every app ADDONS_API_BYPASS: Final = re.compile( r"^(?:" r"|/addons/self/(?!security|update)[^/]+" @@ -87,7 +87,7 @@ r")$" ) -# Policy role add-on API access +# Policy role app API access ADDONS_ROLE_ACCESS: dict[str, re.Pattern[str]] = { ROLE_DEFAULT: re.compile( r"^(?:" @@ -255,26 +255,24 @@ async def token_validation( _LOGGER.debug("%s access from Observer", request.path) request_from = self.sys_plugins.observer - # Add-on - addon = None + # App + app = None if supervisor_token and not request_from: - addon = self.sys_addons.from_token(supervisor_token) + app = self.sys_apps.from_token(supervisor_token) - # Check Add-on API access - if addon and ADDONS_API_BYPASS.match(request.path): - _LOGGER.debug("Passthrough %s from %s", request.path, addon.slug) - request_from = addon - elif addon and addon.access_hassio_api: + # Check App API access + if app and ADDONS_API_BYPASS.match(request.path): + _LOGGER.debug("Passthrough %s from %s", request.path, app.slug) + request_from = app + elif app and app.access_hassio_api: # Check Role - if ADDONS_ROLE_ACCESS[addon.hassio_role].match(request.path): - _LOGGER.info("%s access from %s", request.path, addon.slug) - request_from = addon + if ADDONS_ROLE_ACCESS[app.hassio_role].match(request.path): + _LOGGER.info("%s access from %s", request.path, app.slug) + request_from = app else: - _LOGGER.warning("%s no role for %s", request.path, addon.slug) - elif addon: - _LOGGER.warning( - "%s missing API permission for %s", addon.slug, request.path - ) + _LOGGER.warning("%s no role for %s", request.path, app.slug) + elif app: + _LOGGER.warning("%s missing API permission for %s", app.slug, request.path) if request_from: request[REQUEST_FROM] = request_from diff --git a/supervisor/api/proxy.py b/supervisor/api/proxy.py index b5a04214d62..23a4fd433e7 100644 --- a/supervisor/api/proxy.py +++ b/supervisor/api/proxy.py @@ -16,7 +16,7 @@ from ..coresys import CoreSysAttributes from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError from ..utils.json import json_dumps -from ..utils.logging import AddonLoggerAdapter +from ..utils.logging import AppLoggerAdapter _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -81,13 +81,13 @@ def _check_access(self, request: web.Request): else: supervisor_token = request.headers.get(HEADER_HA_ACCESS, "") - addon = self.sys_addons.from_token(supervisor_token) - if not addon: + app = self.sys_apps.from_token(supervisor_token) + if not app: _LOGGER.warning("Unknown Home Assistant API access!") - elif not addon.access_homeassistant_api: - _LOGGER.warning("Not permitted API access: %s", addon.slug) + elif not app.access_homeassistant_api: + _LOGGER.warning("Not permitted API access: %s", app.slug) else: - _LOGGER.debug("%s access from %s", request.path, addon.slug) + _LOGGER.debug("%s access from %s", request.path, app.slug) return raise HTTPUnauthorized() @@ -235,7 +235,7 @@ async def _proxy_message( self, source: web.WebSocketResponse | ClientWebSocketResponse, target: web.WebSocketResponse | ClientWebSocketResponse, - logger: AddonLoggerAdapter, + logger: AppLoggerAdapter, ) -> None: """Proxy a message from client to server or vice versa.""" while not source.closed and not target.closed: @@ -278,7 +278,7 @@ async def websocket(self, request: web.Request): # init server server = web.WebSocketResponse(heartbeat=30) await server.prepare(request) - addon_name = None + app_name = None # handle authentication try: @@ -292,9 +292,9 @@ async def websocket(self, request: web.Request): supervisor_token = response.get("api_password") or response.get( "access_token" ) - addon = self.sys_addons.from_token(supervisor_token) + app = self.sys_apps.from_token(supervisor_token) - if not addon or not addon.access_homeassistant_api: + if not app or not app.access_homeassistant_api: _LOGGER.warning("Unauthorized WebSocket access!") await server.send_json( {"type": "auth_invalid", "message": "Invalid access"}, @@ -302,8 +302,8 @@ async def websocket(self, request: web.Request): ) return server - addon_name = addon.slug - _LOGGER.info("WebSocket access from %s", addon_name) + app_name = app.slug + _LOGGER.info("WebSocket access from %s", app_name) await server.send_json( {"type": "auth_ok", "ha_version": self.sys_homeassistant.version}, @@ -327,7 +327,7 @@ async def websocket(self, request: web.Request): except APIError: return server - logger = AddonLoggerAdapter(_LOGGER, {"addon_name": addon_name}) + logger = AppLoggerAdapter(_LOGGER, {"app_name": app_name}) logger.info("Home Assistant WebSocket API proxy running") client_task = self.sys_create_task(self._proxy_message(client, server, logger)) diff --git a/supervisor/api/root.py b/supervisor/api/root.py index 3054f77fd5c..f55e8b9f99a 100644 --- a/supervisor/api/root.py +++ b/supervisor/api/root.py @@ -94,17 +94,17 @@ async def available_updates(self, request: web.Request) -> dict[str, Any]: } ) - # Add-ons + # Apps available_updates.extend( { ATTR_UPDATE_TYPE: "addon", - ATTR_NAME: addon.name, - ATTR_ICON: f"/addons/{addon.slug}/icon" if addon.with_icon else None, - ATTR_PANEL_PATH: f"/update-available/{addon.slug}", - ATTR_VERSION_LATEST: addon.latest_version, + ATTR_NAME: app.name, + ATTR_ICON: f"/addons/{app.slug}/icon" if app.with_icon else None, + ATTR_PANEL_PATH: f"/update-available/{app.slug}", + ATTR_VERSION_LATEST: app.latest_version, } - for addon in self.sys_addons.installed - if addon.need_update + for app in self.sys_apps.installed + if app.need_update ) return {ATTR_AVAILABLE_UPDATES: available_updates} diff --git a/supervisor/api/services.py b/supervisor/api/services.py index cb003246342..96fe7ed246e 100644 --- a/supervisor/api/services.py +++ b/supervisor/api/services.py @@ -48,10 +48,10 @@ async def set_service(self, request: web.Request) -> None: """Write data into a service.""" service = self._extract_service(request) body = await api_validate(service.schema, request) - addon = request[REQUEST_FROM] + app = request[REQUEST_FROM] _check_access(request, service.slug) - await service.set_service_data(addon, body) + await service.set_service_data(app, body) @api_process async def get_service(self, request: web.Request) -> dict[str, Any]: @@ -69,18 +69,18 @@ async def get_service(self, request: web.Request) -> dict[str, Any]: async def del_service(self, request: web.Request) -> None: """Delete data into a service.""" service = self._extract_service(request) - addon = request[REQUEST_FROM] + app = request[REQUEST_FROM] # Access _check_access(request, service.slug, True) - await service.del_service_data(addon) + await service.del_service_data(app) def _check_access(request, service, provide=False): """Raise error if the rights are wrong.""" - addon = request[REQUEST_FROM] - if not addon.services_role.get(service): + app = request[REQUEST_FROM] + if not app.services_role.get(service): raise APIForbidden(f"No access to {service} service!") - if provide and addon.services_role.get(service) != PROVIDE_SERVICE: + if provide and app.services_role.get(service) != PROVIDE_SERVICE: raise APIForbidden(f"No access to write {service} service!") diff --git a/supervisor/api/store.py b/supervisor/api/store.py index 16561f11bbc..355f44ccb2e 100644 --- a/supervisor/api/store.py +++ b/supervisor/api/store.py @@ -7,15 +7,15 @@ from aiohttp import web import voluptuous as vol -from ..addons.addon import Addon -from ..addons.manager import AnyAddon +from ..addons.addon import App +from ..addons.manager import AnyApp from ..addons.utils import rating_security from ..api.const import ATTR_SIGNED from ..api.utils import api_process, api_process_raw, api_validate from ..const import ( - ATTR_ADDONS, ATTR_ADVANCED, ATTR_APPARMOR, + ATTR_APPS, ATTR_ARCH, ATTR_AUTH_API, ATTR_AVAILABLE, @@ -53,9 +53,9 @@ REQUEST_FROM, ) from ..coresys import CoreSysAttributes -from ..exceptions import APIError, APIForbidden, APINotFound, StoreAddonNotFoundError +from ..exceptions import APIError, APIForbidden, APINotFound, StoreAppNotFoundError from ..resolution.const import ContextType, SuggestionType -from ..store.addon import AddonStore +from ..store.addon import AppStore from ..store.repository import Repository from ..store.validate import validate_repository from .const import ATTR_BACKGROUND, CONTENT_TYPE_PNG, CONTENT_TYPE_TEXT @@ -100,23 +100,23 @@ def _read_static_binary_file(path: Path) -> Any: class APIStore(CoreSysAttributes): """Handle RESTful API for store functions.""" - def _extract_addon(self, request: web.Request, installed=False) -> AnyAddon: - """Return add-on, throw an exception it it doesn't exist.""" - addon_slug: str = request.match_info["addon"] + def _extract_app(self, request: web.Request, installed=False) -> AnyApp: + """Return app, throw an exception it it doesn't exist.""" + app_slug: str = request.match_info["app"] - if not (addon := self.sys_addons.get(addon_slug)): - raise StoreAddonNotFoundError(addon=addon_slug) + if not (app := self.sys_apps.get(app_slug)): + raise StoreAppNotFoundError(app=app_slug) - if installed and not addon.is_installed: - raise APIError(f"App {addon_slug} is not installed") + if installed and not app.is_installed: + raise APIError(f"App {app_slug} is not installed") - if not installed and addon.is_installed: - addon = cast(Addon, addon) - if not addon.addon_store: - raise StoreAddonNotFoundError(addon=addon_slug) - return addon.addon_store + if not installed and app.is_installed: + app = cast(App, app) + if not app.app_store: + raise StoreAppNotFoundError(app=app_slug) + return app.app_store - return addon + return app def _extract_repository(self, request: web.Request) -> Repository: """Return repository, throw an exception it it doesn't exist.""" @@ -129,52 +129,50 @@ def _extract_repository(self, request: web.Request) -> Repository: return self.sys_store.get(repository_slug) - async def _generate_addon_information( - self, addon: AddonStore, extended: bool = False + async def _generate_app_information( + self, app: AppStore, extended: bool = False ) -> dict[str, Any]: - """Generate addon information.""" + """Generate app information.""" - installed = ( - self.sys_addons.get_local_only(addon.slug) if addon.is_installed else None - ) + installed = self.sys_apps.get_local_only(app.slug) if app.is_installed else None data = { - ATTR_ADVANCED: addon.advanced, - ATTR_ARCH: addon.supported_arch, - ATTR_AVAILABLE: addon.available, - ATTR_BUILD: addon.need_build, - ATTR_DESCRIPTON: addon.description, - ATTR_DOCUMENTATION: addon.with_documentation, - ATTR_HOMEASSISTANT: addon.homeassistant_version, - ATTR_ICON: addon.with_icon, - ATTR_INSTALLED: addon.is_installed, - ATTR_LOGO: addon.with_logo, - ATTR_NAME: addon.name, - ATTR_REPOSITORY: addon.repository, - ATTR_SLUG: addon.slug, - ATTR_STAGE: addon.stage, + ATTR_ADVANCED: app.advanced, + ATTR_ARCH: app.supported_arch, + ATTR_AVAILABLE: app.available, + ATTR_BUILD: app.need_build, + ATTR_DESCRIPTON: app.description, + ATTR_DOCUMENTATION: app.with_documentation, + ATTR_HOMEASSISTANT: app.homeassistant_version, + ATTR_ICON: app.with_icon, + ATTR_INSTALLED: app.is_installed, + ATTR_LOGO: app.with_logo, + ATTR_NAME: app.name, + ATTR_REPOSITORY: app.repository, + ATTR_SLUG: app.slug, + ATTR_STAGE: app.stage, ATTR_UPDATE_AVAILABLE: installed.need_update if installed else False, - ATTR_URL: addon.url, - ATTR_VERSION_LATEST: addon.latest_version, + ATTR_URL: app.url, + ATTR_VERSION_LATEST: app.latest_version, ATTR_VERSION: installed.version if installed else None, } if extended: data.update( { - ATTR_APPARMOR: addon.apparmor, - ATTR_AUTH_API: addon.access_auth_api, - ATTR_DETACHED: addon.is_detached, - ATTR_DOCKER_API: addon.access_docker_api, - ATTR_FULL_ACCESS: addon.with_full_access, - ATTR_HASSIO_API: addon.access_hassio_api, - ATTR_HASSIO_ROLE: addon.hassio_role, - ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api, - ATTR_HOST_NETWORK: addon.host_network, - ATTR_HOST_PID: addon.host_pid, - ATTR_INGRESS: addon.with_ingress, - ATTR_LONG_DESCRIPTION: await addon.long_description(), - ATTR_RATING: rating_security(addon), - ATTR_SIGNED: addon.signed, + ATTR_APPARMOR: app.apparmor, + ATTR_AUTH_API: app.access_auth_api, + ATTR_DETACHED: app.is_detached, + ATTR_DOCKER_API: app.access_docker_api, + ATTR_FULL_ACCESS: app.with_full_access, + ATTR_HASSIO_API: app.access_hassio_api, + ATTR_HASSIO_ROLE: app.hassio_role, + ATTR_HOMEASSISTANT_API: app.access_homeassistant_api, + ATTR_HOST_NETWORK: app.host_network, + ATTR_HOST_PID: app.host_pid, + ATTR_INGRESS: app.with_ingress, + ATTR_LONG_DESCRIPTION: await app.long_description(), + ATTR_RATING: rating_security(app), + ATTR_SIGNED: app.signed, } ) @@ -194,17 +192,17 @@ def _generate_repository_information( @api_process async def reload(self, request: web.Request) -> None: - """Reload all add-on data from store.""" + """Reload all app data from store.""" await asyncio.shield(self.sys_store.reload()) @api_process async def store_info(self, request: web.Request) -> dict[str, Any]: """Return store information.""" return { - ATTR_ADDONS: await asyncio.gather( + ATTR_APPS: await asyncio.gather( *[ - self._generate_addon_information(self.sys_addons.store[addon]) - for addon in self.sys_addons.store + self._generate_app_information(self.sys_apps.store[app]) + for app in self.sys_apps.store ] ), ATTR_REPOSITORIES: [ @@ -214,27 +212,27 @@ async def store_info(self, request: web.Request) -> dict[str, Any]: } @api_process - async def addons_list(self, request: web.Request) -> dict[str, Any]: - """Return all store add-ons.""" + async def apps_list(self, request: web.Request) -> dict[str, Any]: + """Return all store apps.""" return { - ATTR_ADDONS: await asyncio.gather( + ATTR_APPS: await asyncio.gather( *[ - self._generate_addon_information(self.sys_addons.store[addon]) - for addon in self.sys_addons.store + self._generate_app_information(self.sys_apps.store[app]) + for app in self.sys_apps.store ] ) } @api_process - async def addons_addon_install(self, request: web.Request) -> dict[str, str] | None: - """Install add-on.""" - addon = self._extract_addon(request) + async def apps_app_install(self, request: web.Request) -> dict[str, str] | None: + """Install app.""" + app = self._extract_app(request) body = await api_validate(SCHEMA_INSTALL, request) background = body[ATTR_BACKGROUND] install_task, job_id = await background_task( - self, self.sys_addons.install, addon.slug + self, self.sys_apps.install, app.slug ) if background and not install_task.done(): @@ -243,19 +241,19 @@ async def addons_addon_install(self, request: web.Request) -> dict[str, str] | N return await install_task @api_process - async def addons_addon_update(self, request: web.Request) -> dict[str, str] | None: - """Update add-on.""" - addon = self._extract_addon(request, installed=True) - if addon == request.get(REQUEST_FROM): - raise APIForbidden(f"App {addon.slug} can't update itself!") + async def apps_app_update(self, request: web.Request) -> dict[str, str] | None: + """Update app.""" + app = self._extract_app(request, installed=True) + if app == request.get(REQUEST_FROM): + raise APIForbidden(f"App {app.slug} can't update itself!") body = await api_validate(SCHEMA_UPDATE, request) background = body[ATTR_BACKGROUND] update_task, job_id = await background_task( self, - self.sys_addons.update, - addon.slug, + self.sys_apps.update, + app.slug, backup=body.get(ATTR_BACKUP), ) @@ -267,71 +265,71 @@ async def addons_addon_update(self, request: web.Request) -> dict[str, str] | No return None @api_process - async def addons_addon_info(self, request: web.Request) -> dict[str, Any]: - """Return add-on information.""" - return await self.addons_addon_info_wrapped(request) + async def apps_app_info(self, request: web.Request) -> dict[str, Any]: + """Return app information.""" + return await self.apps_app_info_wrapped(request) - # Used by legacy routing for addons/{addon}/info, can be refactored out when that is removed (1/2023) - async def addons_addon_info_wrapped(self, request: web.Request) -> dict[str, Any]: - """Return add-on information directly (not api).""" - addon = cast(AddonStore, self._extract_addon(request)) - return await self._generate_addon_information(addon, True) + # Used by legacy routing for apps/{app}/info, can be refactored out when that is removed (1/2023) + async def apps_app_info_wrapped(self, request: web.Request) -> dict[str, Any]: + """Return app information directly (not api).""" + app = cast(AppStore, self._extract_app(request)) + return await self._generate_app_information(app, True) @api_process_raw(CONTENT_TYPE_PNG) - async def addons_addon_icon(self, request: web.Request) -> bytes: - """Return icon from add-on.""" - addon = self._extract_addon(request) - if not addon.with_icon: - raise APIError(f"No icon found for app {addon.slug}!") + async def apps_app_icon(self, request: web.Request) -> bytes: + """Return icon from app.""" + app = self._extract_app(request) + if not app.with_icon: + raise APIError(f"No icon found for app {app.slug}!") - return await self.sys_run_in_executor(_read_static_binary_file, addon.path_icon) + return await self.sys_run_in_executor(_read_static_binary_file, app.path_icon) @api_process_raw(CONTENT_TYPE_PNG) - async def addons_addon_logo(self, request: web.Request) -> bytes: - """Return logo from add-on.""" - addon = self._extract_addon(request) - if not addon.with_logo: - raise APIError(f"No logo found for app {addon.slug}!") + async def apps_app_logo(self, request: web.Request) -> bytes: + """Return logo from app.""" + app = self._extract_app(request) + if not app.with_logo: + raise APIError(f"No logo found for app {app.slug}!") - return await self.sys_run_in_executor(_read_static_binary_file, addon.path_logo) + return await self.sys_run_in_executor(_read_static_binary_file, app.path_logo) @api_process_raw(CONTENT_TYPE_TEXT) - async def addons_addon_changelog(self, request: web.Request) -> str: - """Return changelog from add-on.""" + async def apps_app_changelog(self, request: web.Request) -> str: + """Return changelog from app.""" # Frontend can't handle error response here, need to return 200 and error as text for now try: - addon = self._extract_addon(request) + app = self._extract_app(request) except APIError as err: return str(err) - if not addon.with_changelog: - return f"No changelog found for app {addon.slug}!" + if not app.with_changelog: + return f"No changelog found for app {app.slug}!" return await self.sys_run_in_executor( - _read_static_text_file, addon.path_changelog + _read_static_text_file, app.path_changelog ) @api_process_raw(CONTENT_TYPE_TEXT) - async def addons_addon_documentation(self, request: web.Request) -> str: - """Return documentation from add-on.""" + async def apps_app_documentation(self, request: web.Request) -> str: + """Return documentation from app.""" # Frontend can't handle error response here, need to return 200 and error as text for now try: - addon = self._extract_addon(request) + app = self._extract_app(request) except APIError as err: return str(err) - if not addon.with_documentation: - return f"No documentation found for app {addon.slug}!" + if not app.with_documentation: + return f"No documentation found for app {app.slug}!" return await self.sys_run_in_executor( - _read_static_text_file, addon.path_documentation + _read_static_text_file, app.path_documentation ) @api_process - async def addons_addon_availability(self, request: web.Request) -> None: - """Check add-on availability for current system.""" - addon = cast(AddonStore, self._extract_addon(request)) - addon.validate_availability() + async def apps_app_availability(self, request: web.Request) -> None: + """Check app availability for current system.""" + app = cast(AppStore, self._extract_app(request)) + app.validate_availability() @api_process async def repositories_list(self, request: web.Request) -> list[dict[str, Any]]: diff --git a/supervisor/api/supervisor.py b/supervisor/api/supervisor.py index 43a5da26b6b..7a5b6b78328 100644 --- a/supervisor/api/supervisor.py +++ b/supervisor/api/supervisor.py @@ -9,8 +9,8 @@ import voluptuous as vol from ..const import ( - ATTR_ADDONS, - ATTR_ADDONS_REPOSITORIES, + ATTR_APPS, + ATTR_APPS_REPOSITORIES, ATTR_ARCH, ATTR_AUTO_UPDATE, ATTR_BLK_READ, @@ -60,7 +60,7 @@ SCHEMA_OPTIONS = vol.Schema( { vol.Optional(ATTR_CHANNEL): vol.Coerce(UpdateChannel), - vol.Optional(ATTR_ADDONS_REPOSITORIES): repositories, + vol.Optional(ATTR_APPS_REPOSITORIES): repositories, vol.Optional(ATTR_TIMEZONE): str, vol.Optional(ATTR_WAIT_BOOT): wait_boot, vol.Optional(ATTR_LOGGING): vol.Coerce(LogLevel), @@ -106,20 +106,20 @@ async def info(self, request: web.Request) -> dict[str, Any]: ATTR_COUNTRY: self.sys_config.country, # Depricated ATTR_WAIT_BOOT: self.sys_config.wait_boot, - ATTR_ADDONS: [ + ATTR_APPS: [ { - ATTR_NAME: addon.name, - ATTR_SLUG: addon.slug, - ATTR_VERSION: addon.version, - ATTR_VERSION_LATEST: addon.latest_version, - ATTR_UPDATE_AVAILABLE: addon.need_update, - ATTR_STATE: addon.state, - ATTR_REPOSITORY: addon.repository, - ATTR_ICON: addon.with_icon, + ATTR_NAME: app.name, + ATTR_SLUG: app.slug, + ATTR_VERSION: app.version, + ATTR_VERSION_LATEST: app.latest_version, + ATTR_UPDATE_AVAILABLE: app.need_update, + ATTR_STATE: app.state, + ATTR_REPOSITORY: app.repository, + ATTR_ICON: app.with_icon, } - for addon in self.sys_addons.local.values() + for app in self.sys_apps.local.values() ], - ATTR_ADDONS_REPOSITORIES: [ + ATTR_APPS_REPOSITORIES: [ {ATTR_NAME: store.name, ATTR_SLUG: store.slug} for store in self.sys_store.all ], @@ -182,14 +182,14 @@ async def options(self, request: web.Request) -> None: if ATTR_WAIT_BOOT in body: self.sys_config.wait_boot = body[ATTR_WAIT_BOOT] - # Save changes before processing addons in case of errors + # Save changes before processing apps in case of errors await self.sys_updater.save_data() await self.sys_config.save_data() # Remove: 2022.9 - if ATTR_ADDONS_REPOSITORIES in body: + if ATTR_APPS_REPOSITORIES in body: await asyncio.shield( - self.sys_store.update_repositories(set(body[ATTR_ADDONS_REPOSITORIES])) + self.sys_store.update_repositories(set(body[ATTR_APPS_REPOSITORIES])) ) await self.sys_resolution.evaluate.evaluate_system() @@ -230,7 +230,7 @@ async def update(self, request: web.Request) -> None: @api_process async def reload(self, request: web.Request) -> None: - """Reload add-ons, configuration, etc.""" + """Reload apps, configuration, etc.""" await asyncio.gather( asyncio.shield(self.sys_updater.reload()), asyncio.shield(self.sys_homeassistant.secrets.reload()), diff --git a/supervisor/auth.py b/supervisor/auth.py index 815e179ddba..a8d42b8ca3b 100644 --- a/supervisor/auth.py +++ b/supervisor/auth.py @@ -1,11 +1,11 @@ -"""Manage SSO for Add-ons with Home Assistant user.""" +"""Manage SSO for Apps with Home Assistant user.""" import asyncio import hashlib import logging from typing import Any, TypedDict, cast -from .addons.addon import Addon +from .addons.addon import App from .const import ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH, HomeAssistantUser from .coresys import CoreSys, CoreSysAttributes from .exceptions import ( @@ -34,7 +34,7 @@ class BackendAuthRequest(TypedDict): class Auth(FileConfiguration, CoreSysAttributes): - """Manage SSO for Add-ons with Home Assistant user.""" + """Manage SSO for Apps with Home Assistant user.""" def __init__(self, coresys: CoreSys) -> None: """Initialize updater.""" @@ -81,13 +81,13 @@ async def _dismatch_cache(self, username: str, password: str) -> None: await self.save_data() async def check_login( - self, addon: Addon, username: str | None, password: str | None + self, app: App, username: str | None, password: str | None ) -> bool: """Check username login.""" if username is None or password is None: raise AuthInvalidNonStringValueError(_LOGGER.error) - _LOGGER.info("Auth request from '%s' for '%s'", addon.slug, username) + _LOGGER.info("Auth request from '%s' for '%s'", app.slug, username) # Get from cache cache_hit = self._check_cache(username, password) @@ -99,18 +99,18 @@ async def check_login( # No cache hit if cache_hit is None: - return await self._backend_login(addon, username, password) + return await self._backend_login(app, username, password) # Home Assistant Core take over 1-2sec to validate it # Let's use the cache and update the cache in background if username not in self._running: self._running[username] = self.sys_create_task( - self._backend_login(addon, username, password) + self._backend_login(app, username, password) ) return cache_hit - async def _backend_login(self, addon: Addon, username: str, password: str) -> bool: + async def _backend_login(self, app: App, username: str, password: str) -> bool: """Check username login on core.""" try: async with self.sys_homeassistant.api.make_request( @@ -119,7 +119,7 @@ async def _backend_login(self, addon: Addon, username: str, password: str) -> bo json=cast( dict[str, Any], BackendAuthRequest( - username=username, password=password, addon=addon.slug + username=username, password=password, addon=app.slug ), ), ) as req: diff --git a/supervisor/backups/backup.py b/supervisor/backups/backup.py index dee2f5116af..66048931680 100644 --- a/supervisor/backups/backup.py +++ b/supervisor/backups/backup.py @@ -28,9 +28,9 @@ import voluptuous as vol from voluptuous.humanize import humanize_error -from ..addons.manager import Addon +from ..addons.manager import App from ..const import ( - ATTR_ADDONS, + ATTR_APPS, ATTR_COMPRESSED, ATTR_DATE, ATTR_DOCKER, @@ -50,7 +50,7 @@ ) from ..coresys import CoreSys from ..exceptions import ( - AddonsError, + AppsError, BackupError, BackupFatalIOError, BackupFileExistError, @@ -164,14 +164,14 @@ def compressed(self) -> bool: return self._data[ATTR_COMPRESSED] @property - def addons(self) -> list[dict[str, Any]]: + def apps(self) -> list[dict[str, Any]]: """Return backup date.""" - return self._data[ATTR_ADDONS] + return self._data[ATTR_APPS] @property - def addon_list(self) -> list[str]: - """Return a list of add-ons slugs.""" - return [addon_data[ATTR_SLUG] for addon_data in self.addons] + def app_list(self) -> list[str]: + """Return a list of apps slugs.""" + return [app_data[ATTR_SLUG] for app_data in self.apps] @property def folders(self) -> list[str]: @@ -180,12 +180,12 @@ def folders(self) -> list[str]: @property def repositories(self) -> list[str]: - """Return add-on store repositories.""" + """Return app store repositories.""" return self._data[ATTR_REPOSITORIES] @repositories.setter def repositories(self, value: list[str]) -> None: - """Set add-on store repositories.""" + """Set app store repositories.""" self._data[ATTR_REPOSITORIES] = value @property @@ -613,16 +613,16 @@ def _add_backup_json(): _LOGGER.error("Can't write backup: %s", err) @Job(name="backup_addon_save", cleanup=False) - async def _addon_save(self, addon: Addon) -> asyncio.Task | None: - """Store an add-on into backup.""" - self.sys_jobs.current.reference = slug = addon.slug + async def _app_save(self, app: App) -> asyncio.Task | None: + """Store an app into backup.""" + self.sys_jobs.current.reference = slug = app.slug if not self._outer_secure_tarfile: raise RuntimeError( "Cannot backup components without initializing backup tar" ) # Ensure it is still installed and get current data before proceeding - if not (curr_addon := self.sys_addons.get_local_only(slug)): + if not (curr_app := self.sys_apps.get_local_only(slug)): _LOGGER.warning( "Skipping backup of app %s because it has been uninstalled", slug, @@ -631,22 +631,22 @@ async def _addon_save(self, addon: Addon) -> asyncio.Task | None: tar_name = f"{slug}.tar{'.gz' if self.compressed else ''}" - addon_file = self._outer_secure_tarfile.create_tar( + app_file = self._outer_secure_tarfile.create_tar( f"./{tar_name}", gzip=self.compressed, ) # Take backup try: - start_task = await curr_addon.backup(addon_file) - except AddonsError as err: + start_task = await curr_app.backup(app_file) + except AppsError as err: raise BackupError(str(err)) from err # Store to config - self._data[ATTR_ADDONS].append( + self._data[ATTR_APPS].append( { ATTR_SLUG: slug, - ATTR_NAME: curr_addon.name, - ATTR_VERSION: curr_addon.version, + ATTR_NAME: curr_app.name, + ATTR_VERSION: curr_app.version, # Bug - addon_file.size used to give us this information # It always returns 0 in current securetar. Skipping until fixed ATTR_SIZE: 0, @@ -656,17 +656,17 @@ async def _addon_save(self, addon: Addon) -> asyncio.Task | None: return start_task @Job(name="backup_store_addons", cleanup=False) - async def store_addons(self, addon_list: list[Addon]) -> list[asyncio.Task]: - """Add a list of add-ons into backup. + async def store_apps(self, app_list: list[App]) -> list[asyncio.Task]: + """Add a list of apps into backup. - For each addon that needs to be started after backup, returns a Task which - completes when that addon has state 'started' (see addon.start). + For each app that needs to be started after backup, returns a Task which + completes when that app has state 'started' (see app.start). """ - # Save Add-ons sequential avoid issue on slow IO + # Save Apps sequential avoid issue on slow IO start_tasks: list[asyncio.Task] = [] - for addon in addon_list: + for app in app_list: try: - if start_task := await self._addon_save(addon): + if start_task := await self._app_save(app): start_tasks.append(start_task) except BackupFatalIOError: raise @@ -676,20 +676,20 @@ async def store_addons(self, addon_list: list[Addon]) -> list[asyncio.Task]: return start_tasks @Job(name="backup_addon_restore", cleanup=False) - async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None: - """Restore an add-on from backup.""" - self.sys_jobs.current.reference = addon_slug + async def _app_restore(self, app_slug: str) -> asyncio.Task | None: + """Restore an app from backup.""" + self.sys_jobs.current.reference = app_slug if not self._tmp: raise RuntimeError("Cannot restore components without opening backup tar") - tar_name = f"{addon_slug}.tar{'.gz' if self.compressed else ''}" + tar_name = f"{app_slug}.tar{'.gz' if self.compressed else ''}" tar_path = Path(self._tmp.name, tar_name) # Verify the backup exists before trying to restore it if not await self.sys_run_in_executor(tar_path.exists): - raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error) + raise BackupError(f"Can't find backup {app_slug}", _LOGGER.error) - addon_file = SecureTarFile( + app_file = SecureTarFile( tar_path, gzip=self.compressed, bufsize=BUF_SIZE, @@ -698,23 +698,23 @@ async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None: # Perform a restore try: - return await self.sys_addons.restore(addon_slug, addon_file) - except AddonsError as err: + return await self.sys_apps.restore(app_slug, app_file) + except AppsError as err: raise BackupError( - f"Can't restore backup {addon_slug}", _LOGGER.error + f"Can't restore backup {app_slug}", _LOGGER.error ) from err @Job(name="backup_restore_addons", cleanup=False) - async def restore_addons( - self, addon_list: list[str] + async def restore_apps( + self, app_list: list[str] ) -> tuple[bool, list[asyncio.Task]]: - """Restore a list add-on from backup.""" - # Save Add-ons sequential avoid issue on slow IO + """Restore a list app from backup.""" + # Save Apps sequential avoid issue on slow IO start_tasks: list[asyncio.Task] = [] success = True - for slug in addon_list: + for slug in app_list: try: - start_task = await self._addon_restore(slug) + start_task = await self._app_restore(slug) except Exception as err: # pylint: disable=broad-except _LOGGER.warning("Can't restore app %s: %s", slug, err) success = False @@ -725,20 +725,20 @@ async def restore_addons( return (success, start_tasks) @Job(name="backup_remove_delta_addons", cleanup=False) - async def remove_delta_addons(self) -> bool: - """Remove addons which are not in this backup.""" + async def remove_delta_apps(self) -> bool: + """Remove apps which are not in this backup.""" success = True - for addon in self.sys_addons.installed: - if addon.slug in self.addon_list: + for app in self.sys_apps.installed: + if app.slug in self.app_list: continue - # Remove Add-on because it's not a part of the new env + # Remove App because it's not a part of the new env # Do it sequential avoid issue on slow IO try: - await self.sys_addons.uninstall(addon.slug) - except AddonsError as err: + await self.sys_apps.uninstall(app.slug) + except AppsError as err: self.sys_jobs.current.capture_error(err) - _LOGGER.warning("Can't uninstall app %s: %s", addon.slug, err) + _LOGGER.warning("Can't uninstall app %s: %s", app.slug, err) success = False return success diff --git a/supervisor/backups/manager.py b/supervisor/backups/manager.py index 0a7df189c80..2692780f633 100644 --- a/supervisor/backups/manager.py +++ b/supervisor/backups/manager.py @@ -10,7 +10,7 @@ from shutil import copy from typing import cast -from ..addons.addon import Addon +from ..addons.addon import App from ..const import ( ATTR_DAYS_UNTIL_STALE, FILE_HASSIO_BACKUPS, @@ -502,7 +502,7 @@ async def import_backup( async def _do_backup( self, backup: Backup, - addon_list: list[Addon], + app_list: list[App], folder_list: list[str], homeassistant: bool, homeassistant_exclude_database: bool | None, @@ -513,14 +513,14 @@ async def _do_backup( Must be called from an existing backup job. If the backup failed, the backup file is being deleted and None is returned. """ - addon_start_tasks: list[Awaitable[None]] | None = None + app_start_tasks: list[Awaitable[None]] | None = None try: await self.sys_core.set_state(CoreState.FREEZE) # Any exception leaving create() means the backup is incomplete # and will be discarded (file unlinked below). Individual - # add-on/folder errors are captured inside store_addons/ + # app/folder errors are captured inside store_addons/ # store_folders and do not propagate. async with backup.create(): # HomeAssistant Folder is for v1 @@ -532,10 +532,10 @@ async def _do_backup( else homeassistant_exclude_database ) - # Backup add-ons - if addon_list: + # Backup apps + if app_list: self._change_stage(BackupJobStage.ADDONS, backup) - addon_start_tasks = await backup.store_addons(addon_list) + app_start_tasks = await backup.store_apps(app_list) # Backup folders if folder_list: @@ -568,10 +568,10 @@ async def _do_backup( self._change_stage(BackupJobStage.COPY_ADDITONAL_LOCATIONS, backup) await self._copy_to_additional_locations(backup, additional_locations) - if addon_start_tasks: + if app_start_tasks: self._change_stage(BackupJobStage.AWAIT_ADDON_RESTARTS, backup) - # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere - await asyncio.gather(*addon_start_tasks, return_exceptions=True) + # Ignore exceptions from waiting for app startup, app errors handled elsewhere + await asyncio.gather(*app_start_tasks, return_exceptions=True) return backup finally: @@ -619,7 +619,7 @@ async def do_backup_full( _LOGGER.info("Creating new full backup with slug %s", new_backup.slug) backup = await self._do_backup( new_backup, - self.sys_addons.installed, + self.sys_apps.installed, ALL_FOLDERS, True, homeassistant_exclude_database, @@ -641,7 +641,7 @@ async def do_backup_partial( name: str = "", filename: str | None = None, *, - addons: list[str] | None = None, + apps: list[str] | None = None, folders: list[str] | None = None, password: str | None = None, homeassistant: bool = False, @@ -663,7 +663,7 @@ async def do_backup_partial( self, {JobCondition.FREE_SPACE}, "BackupManager.do_backup_partial" ) - addons = addons or [] + apps = apps or [] folders = folders or [] # HomeAssistant Folder is for v1 @@ -671,7 +671,7 @@ async def do_backup_partial( folders.remove(FOLDER_HOMEASSISTANT) homeassistant = True - if len(addons) == 0 and len(folders) == 0 and not homeassistant: + if len(apps) == 0 and len(folders) == 0 and not homeassistant: _LOGGER.error("Nothing to create backup for") new_backup = self._create_backup( @@ -679,13 +679,13 @@ async def do_backup_partial( ) _LOGGER.info("Creating new partial backup with slug %s", new_backup.slug) - addon_list = [] - for addon_slug in addons: - addon = self.sys_addons.get(addon_slug) - if addon and addon.is_installed: - addon_list.append(cast(Addon, addon)) + app_list = [] + for app_slug in apps: + app = self.sys_apps.get(app_slug) + if app and app.is_installed: + app_list.append(cast(App, app)) continue - _LOGGER.warning("App %s not found/installed", addon_slug) + _LOGGER.warning("App %s not found/installed", app_slug) # If being run in the background, notify caller that validation has completed if validation_complete: @@ -693,7 +693,7 @@ async def do_backup_partial( backup = await self._do_backup( new_backup, - addon_list, + app_list, folders, homeassistant, homeassistant_exclude_database, @@ -706,7 +706,7 @@ async def do_backup_partial( async def _do_restore( self, backup: Backup, - addon_list: list[str], + app_list: list[str], folder_list: list[str], homeassistant: bool, replace: bool, @@ -716,7 +716,7 @@ async def _do_restore( Must be called from an existing restore job. """ - addon_start_tasks: list[Awaitable[None]] | None = None + app_start_tasks: list[Awaitable[None]] | None = None success = True try: @@ -732,18 +732,18 @@ async def _do_restore( self._change_stage(RestoreJobStage.HOME_ASSISTANT, backup) task_hass = await backup.restore_homeassistant() - # Delete delta add-ons + # Delete delta apps if replace: self._change_stage(RestoreJobStage.REMOVE_DELTA_ADDONS, backup) - success = success and await backup.remove_delta_addons() + success = success and await backup.remove_delta_apps() - if addon_list: + if app_list: self._change_stage(RestoreJobStage.ADDON_REPOSITORIES, backup) await backup.restore_repositories(replace) self._change_stage(RestoreJobStage.ADDONS, backup) - restore_success, addon_start_tasks = await backup.restore_addons( - addon_list + restore_success, app_start_tasks = await backup.restore_apps( + app_list ) success = success and restore_success @@ -767,12 +767,10 @@ async def _do_restore( f"Restore {backup.slug} error, see supervisor logs" ) from err else: - if addon_start_tasks: + if app_start_tasks: self._change_stage(RestoreJobStage.AWAIT_ADDON_RESTARTS, backup) - # Failure to resume addons post restore is still a restore failure - if any( - await asyncio.gather(*addon_start_tasks, return_exceptions=True) - ): + # Failure to resume apps post restore is still a restore failure + if any(await asyncio.gather(*app_start_tasks, return_exceptions=True)): return False # Wait for mount activations (failures don't affect restore success @@ -871,12 +869,12 @@ async def do_restore_full( await self.sys_core.set_state(CoreState.FREEZE) try: - # Stop Home-Assistant / Add-ons + # Stop Home-Assistant / Apps await self.sys_core.shutdown(remove_homeassistant_container=True) success = await self._do_restore( backup, - backup.addon_list, + backup.app_list, backup.folders, homeassistant=True, replace=True, @@ -907,7 +905,7 @@ async def do_restore_partial( backup: Backup, *, homeassistant: bool = False, - addons: list[str] | None = None, + apps: list[str] | None = None, folders: list[str] | None = None, password: str | None = None, location: str | None | type[DEFAULT] = DEFAULT, @@ -917,7 +915,7 @@ async def do_restore_partial( # Add backup ID to job self.sys_jobs.current.reference = backup.slug - addon_list = addons or [] + app_list = apps or [] folder_list = folders or [] # Version 1 @@ -949,7 +947,7 @@ async def do_restore_partial( try: success = await self._do_restore( backup, - addon_list, + app_list, folder_list, homeassistant=homeassistant, replace=False, @@ -972,27 +970,27 @@ async def freeze_all(self, timeout: float = DEFAULT_FREEZE_TIMEOUT) -> None: """Freeze system to prepare for an external backup such as an image snapshot.""" await self.sys_core.set_state(CoreState.FREEZE) - # Determine running addons - installed = self.sys_addons.installed.copy() + # Determine running apps + installed = self.sys_apps.installed.copy() is_running: list[bool] = await asyncio.gather( - *[addon.is_running() for addon in installed] + *[app.is_running() for app in installed] ) - running_addons = [ + running_apps = [ installed[ind] for ind in range(len(installed)) if is_running[ind] ] # Create thaw task first to ensure we eventually undo freezes even if the below fails self._thaw_task = asyncio.shield( - self.sys_create_task(self._thaw_all(running_addons, timeout)) + self.sys_create_task(self._thaw_all(running_apps, timeout)) ) # Tell Home Assistant to freeze for a backup self._change_stage(BackupJobStage.HOME_ASSISTANT) await self.sys_homeassistant.begin_backup() - # Run all pre-backup tasks for addons + # Run all pre-backup tasks for apps self._change_stage(BackupJobStage.ADDONS) - await asyncio.gather(*[addon.begin_backup() for addon in running_addons]) + await asyncio.gather(*[app.begin_backup() for app in running_apps]) @Job( name="backup_manager_thaw_all", @@ -1000,7 +998,7 @@ async def freeze_all(self, timeout: float = DEFAULT_FREEZE_TIMEOUT) -> None: on_condition=BackupJobError, ) async def _thaw_all( - self, running_addons: list[Addon], timeout: float = DEFAULT_FREEZE_TIMEOUT + self, running_apps: list[App], timeout: float = DEFAULT_FREEZE_TIMEOUT ) -> None: """Thaw system after user signal or timeout.""" try: @@ -1015,10 +1013,10 @@ async def _thaw_all( await self.sys_homeassistant.end_backup() self._change_stage(BackupJobStage.ADDONS) - addon_start_tasks: list[asyncio.Task] = [ + app_start_tasks: list[asyncio.Task] = [ task for task in await asyncio.gather( - *[addon.end_backup() for addon in running_addons] + *[app.end_backup() for app in running_apps] ) if task ] @@ -1027,9 +1025,9 @@ async def _thaw_all( self._thaw_event.clear() self._thaw_task = None - if addon_start_tasks: + if app_start_tasks: self._change_stage(BackupJobStage.AWAIT_ADDON_RESTARTS) - await asyncio.gather(*addon_start_tasks, return_exceptions=True) + await asyncio.gather(*app_start_tasks, return_exceptions=True) @Job( name="backup_manager_signal_thaw", diff --git a/supervisor/backups/utils.py b/supervisor/backups/utils.py index e327c9c73fc..705df75b417 100644 --- a/supervisor/backups/utils.py +++ b/supervisor/backups/utils.py @@ -1,4 +1,4 @@ -"""Util add-on functions.""" +"""Util app functions.""" import hashlib import re diff --git a/supervisor/backups/validate.py b/supervisor/backups/validate.py index 8f669b6b3a7..7db94c7c2c2 100644 --- a/supervisor/backups/validate.py +++ b/supervisor/backups/validate.py @@ -9,7 +9,7 @@ from ..backups.const import BackupType from ..const import ( - ATTR_ADDONS, + ATTR_APPS, ATTR_COMPRESSED, ATTR_DATE, ATTR_DAYS_UNTIL_STALE, @@ -42,13 +42,13 @@ ] -def unique_addons(addons_list): - """Validate that an add-on is unique.""" - single = {addon[ATTR_SLUG] for addon in addons_list} +def unique_apps(apps_list): + """Validate that an app is unique.""" + single = {app[ATTR_SLUG] for app in apps_list} - if len(single) != len(addons_list): + if len(single) != len(apps_list): raise vol.Invalid("Invalid app list in backup!") from None - return addons_list + return apps_list def v1_homeassistant( @@ -114,7 +114,7 @@ def v1_protected(protected: bool | str) -> bool: vol.Optional(ATTR_FOLDERS, default=list): vol.All( v1_folderlist, [vol.In(ALL_FOLDERS)], vol.Unique() ), - vol.Optional(ATTR_ADDONS, default=list): vol.All( + vol.Optional(ATTR_APPS, default=list): vol.All( [ vol.Schema( { @@ -126,7 +126,7 @@ def v1_protected(protected: bool | str) -> bool: extra=vol.REMOVE_EXTRA, ) ], - unique_addons, + unique_apps, ), vol.Optional(ATTR_REPOSITORIES, default=list): repositories, vol.Optional(ATTR_EXTRA, default=dict): dict, diff --git a/supervisor/bootstrap.py b/supervisor/bootstrap.py index 3b700d3281d..ee71e3c346c 100644 --- a/supervisor/bootstrap.py +++ b/supervisor/bootstrap.py @@ -12,7 +12,7 @@ from colorlog import ColoredFormatter -from .addons.manager import AddonManager +from .addons.manager import AppManager from .api import RestAPI from .arch import CpuArchManager from .auth import Auth @@ -78,7 +78,7 @@ async def initialize_coresys() -> CoreSys: coresys.api = RestAPI(coresys) coresys.supervisor = Supervisor(coresys) coresys.homeassistant = await HomeAssistant(coresys).load_config() - coresys.addons = await AddonManager(coresys).load_config() + coresys.apps = await AppManager(coresys).load_config() coresys.backups = await BackupManager(coresys).load_config() coresys.host = await HostManager(coresys).post_init() coresys.hardware = await HardwareManager.create(coresys) @@ -130,26 +130,26 @@ def initialize_system(coresys: CoreSys) -> None: _LOGGER.debug("Creating Supervisor SSL/TLS folder at '%s'", config.path_ssl) config.path_ssl.mkdir() - # Supervisor addon data folder - if not config.path_addons_data.is_dir(): + # Supervisor app data folder + if not config.path_apps_data.is_dir(): _LOGGER.debug( - "Creating Supervisor app data folder at '%s'", config.path_addons_data + "Creating Supervisor app data folder at '%s'", config.path_apps_data ) - config.path_addons_data.mkdir(parents=True) + config.path_apps_data.mkdir(parents=True) - if not config.path_addons_local.is_dir(): + if not config.path_apps_local.is_dir(): _LOGGER.debug( "Creating Supervisor app local repository folder at '%s'", - config.path_addons_local, + config.path_apps_local, ) - config.path_addons_local.mkdir(parents=True) + config.path_apps_local.mkdir(parents=True) - if not config.path_addons_git.is_dir(): + if not config.path_apps_git.is_dir(): _LOGGER.debug( "Creating Supervisor app git repositories folder at '%s'", - config.path_addons_git, + config.path_apps_git, ) - config.path_addons_git.mkdir(parents=True) + config.path_apps_git.mkdir(parents=True) # Supervisor tmp folder if not config.path_tmp.is_dir(): @@ -219,13 +219,13 @@ def initialize_system(coresys: CoreSys) -> None: ) config.path_emergency.mkdir() - # Addon Configs folder - if not config.path_addon_configs.is_dir(): + # App Configs folder + if not config.path_app_configs.is_dir(): _LOGGER.debug( "Creating Supervisor app configs folder at '%s'", - config.path_addon_configs, + config.path_app_configs, ) - config.path_addon_configs.mkdir() + config.path_app_configs.mkdir() if not config.path_cid_files.is_dir(): _LOGGER.debug("Creating Docker cidfiles folder at '%s'", config.path_cid_files) diff --git a/supervisor/config.py b/supervisor/config.py index 03abb91fbc4..81929fba773 100644 --- a/supervisor/config.py +++ b/supervisor/config.py @@ -9,7 +9,7 @@ from awesomeversion import AwesomeVersion from .const import ( - ATTR_ADDONS_CUSTOM_LIST, + ATTR_APPS_CUSTOM_LIST, ATTR_COUNTRY, ATTR_DEBUG, ATTR_DEBUG_BLOCK, @@ -241,43 +241,43 @@ def path_ssl(self) -> Path: return self.path_supervisor / HASSIO_SSL @property - def path_addons_core(self) -> Path: - """Return git path for core Add-ons.""" + def path_apps_core(self) -> Path: + """Return git path for core Apps.""" return self.path_supervisor / ADDONS_CORE @property - def path_addons_git(self) -> Path: - """Return path for Git Add-on.""" + def path_apps_git(self) -> Path: + """Return path for Git App.""" return self.path_supervisor / ADDONS_GIT @property - def path_addons_local(self) -> Path: - """Return path for custom Add-ons.""" + def path_apps_local(self) -> Path: + """Return path for custom Apps.""" return self.path_supervisor / ADDONS_LOCAL @property - def path_extern_addons_local(self) -> PurePath: - """Return path for custom Add-ons.""" + def path_extern_apps_local(self) -> PurePath: + """Return path for custom Apps.""" return PurePath(self.path_extern_supervisor, ADDONS_LOCAL) @property - def path_addons_data(self) -> Path: - """Return root Add-on data folder.""" + def path_apps_data(self) -> Path: + """Return root App data folder.""" return self.path_supervisor / ADDONS_DATA @property - def path_extern_addons_data(self) -> PurePath: - """Return root add-on data folder external for Docker.""" + def path_extern_apps_data(self) -> PurePath: + """Return root app data folder external for Docker.""" return PurePath(self.path_extern_supervisor, ADDONS_DATA) @property - def path_addon_configs(self) -> Path: - """Return root Add-on configs folder.""" + def path_app_configs(self) -> Path: + """Return root App configs folder.""" return self.path_supervisor / ADDON_CONFIGS @property - def path_extern_addon_configs(self) -> PurePath: - """Return root Add-on configs folder external for Docker.""" + def path_extern_app_configs(self) -> PurePath: + """Return root App configs folder external for Docker.""" return PurePath(self.path_extern_supervisor, ADDON_CONFIGS) @property @@ -411,23 +411,23 @@ def path_extern_cid_files(self) -> PurePath: return PurePath(self.path_extern_supervisor, CID_FILES) @property - def addons_repositories(self) -> list[str]: - """Return list of custom Add-on repositories.""" - return self._data[ATTR_ADDONS_CUSTOM_LIST] + def apps_repositories(self) -> list[str]: + """Return list of custom App repositories.""" + return self._data[ATTR_APPS_CUSTOM_LIST] - def add_addon_repository(self, repo: str) -> None: + def add_app_repository(self, repo: str) -> None: """Add a custom repository to list.""" - if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]: + if repo in self._data[ATTR_APPS_CUSTOM_LIST]: return - self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo) + self._data[ATTR_APPS_CUSTOM_LIST].append(repo) - def drop_addon_repository(self, repo: str) -> None: + def drop_app_repository(self, repo: str) -> None: """Remove a custom repository from list.""" - if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]: + if repo not in self._data[ATTR_APPS_CUSTOM_LIST]: return - self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo) + self._data[ATTR_APPS_CUSTOM_LIST].remove(repo) def local_to_extern_path(self, path: PurePath) -> PurePath: """Translate a path relative to supervisor data in the container to its extern path.""" diff --git a/supervisor/const.py b/supervisor/const.py index 996f32458bd..715af6cd81b 100644 --- a/supervisor/const.py +++ b/supervisor/const.py @@ -106,10 +106,10 @@ ATTR_ACCESSPOINTS = "accesspoints" ATTR_ACTIVE = "active" ATTR_ACTIVITY_LED = "activity_led" -ATTR_ADDON = "addon" -ATTR_ADDONS = "addons" -ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list" -ATTR_ADDONS_REPOSITORIES = "addons_repositories" +ATTR_APP = "addon" +ATTR_APPS = "addons" +ATTR_APPS_CUSTOM_LIST = "addons_custom_list" +ATTR_APPS_REPOSITORIES = "addons_repositories" ATTR_ADDR_GEN_MODE = "addr_gen_mode" ATTR_ADDRESS = "address" ATTR_ADDRESS_DATA = "address-data" @@ -433,16 +433,16 @@ DEFAULT_CHUNK_SIZE = 2**16 # 64KiB -class AddonBootConfig(StrEnum): - """Boot mode config for the add-on.""" +class AppBootConfig(StrEnum): + """Boot mode config for the app.""" AUTO = "auto" MANUAL = "manual" MANUAL_ONLY = "manual_only" -class AddonBoot(StrEnum): - """Boot mode for the add-on.""" +class AppBoot(StrEnum): + """Boot mode for the app.""" AUTO = "auto" MANUAL = "manual" @@ -450,15 +450,15 @@ class AddonBoot(StrEnum): @classmethod def _missing_(cls, value: object) -> Self | None: """Convert 'forced' config values to their counterpart.""" - if value == AddonBootConfig.MANUAL_ONLY: + if value == AppBootConfig.MANUAL_ONLY: for member in cls: - if member == AddonBoot.MANUAL: + if member == AppBoot.MANUAL: return member return None -class AddonStartup(StrEnum): - """Startup types of Add-on.""" +class AppStartup(StrEnum): + """Startup types of App.""" INITIALIZE = "initialize" SYSTEM = "system" @@ -467,16 +467,16 @@ class AddonStartup(StrEnum): ONCE = "once" -class AddonStage(StrEnum): - """Stage types of add-on.""" +class AppStage(StrEnum): + """Stage types of app.""" STABLE = "stable" EXPERIMENTAL = "experimental" DEPRECATED = "deprecated" -class AddonState(StrEnum): - """State of add-on.""" +class AppState(StrEnum): + """State of app.""" STARTUP = "startup" STARTED = "started" diff --git a/supervisor/core.py b/supervisor/core.py index 1594e679c8f..a7d53d552d8 100644 --- a/supervisor/core.py +++ b/supervisor/core.py @@ -11,7 +11,7 @@ ATTR_STARTUP, RUN_SUPERVISOR_STATE, STARTING_STATES, - AddonStartup, + AppStartup, BusEvent, CoreState, ) @@ -169,8 +169,8 @@ async def setup(self) -> None: self.sys_arch.load(), # Load Stores self.sys_store.load(), - # Load Add-ons - self.sys_addons.load(), + # Load Apps + self.sys_apps.load(), # load last available data self.sys_backups.load(), # load services @@ -235,8 +235,8 @@ async def start(self) -> None: return try: - # Start addon mark as initialize - await self.sys_addons.boot(AddonStartup.INITIALIZE) + # Start app mark as initialize + await self.sys_apps.boot(AppStartup.INITIALIZE) # HomeAssistant is already running, only Supervisor restarted if await self.sys_hardware.helper.last_boot() == self.sys_config.last_boot: @@ -246,11 +246,11 @@ async def start(self) -> None: # reset register services / discovery await self.sys_services.reset() - # start addon mark as system - await self.sys_addons.boot(AddonStartup.SYSTEM) + # start app mark as system + await self.sys_apps.boot(AppStartup.SYSTEM) - # start addon mark as services - await self.sys_addons.boot(AddonStartup.SERVICES) + # start app mark as services + await self.sys_apps.boot(AppStartup.SERVICES) # run HomeAssistant if ( @@ -279,8 +279,8 @@ async def start(self) -> None: suggestions=[SuggestionType.EXECUTE_REPAIR], ) - # start addon mark as application - await self.sys_addons.boot(AddonStartup.APPLICATION) + # start app mark as application + await self.sys_apps.boot(AppStartup.APPLICATION) # store new last boot await self._update_last_boot() @@ -357,8 +357,8 @@ async def shutdown(self, *, remove_homeassistant_container: bool = False) -> Non if self.state == CoreState.RUNNING: await self.set_state(CoreState.SHUTDOWN) - # Shutdown Application Add-ons, using Home Assistant API - await self.sys_addons.shutdown(AddonStartup.APPLICATION) + # Shutdown Application Apps, using Home Assistant API + await self.sys_apps.shutdown(AppStartup.APPLICATION) # Close Home Assistant with suppress(HassioError): @@ -366,10 +366,10 @@ async def shutdown(self, *, remove_homeassistant_container: bool = False) -> Non remove_container=remove_homeassistant_container ) - # Shutdown System Add-ons - await self.sys_addons.shutdown(AddonStartup.SERVICES) - await self.sys_addons.shutdown(AddonStartup.SYSTEM) - await self.sys_addons.shutdown(AddonStartup.INITIALIZE) + # Shutdown System Apps + await self.sys_apps.shutdown(AppStartup.SERVICES) + await self.sys_apps.shutdown(AppStartup.SYSTEM) + await self.sys_apps.shutdown(AppStartup.INITIALIZE) # Shutdown all Plugins if self.state in (CoreState.STOPPING, CoreState.SHUTDOWN): @@ -470,7 +470,7 @@ async def repair(self) -> None: await self.sys_plugins.repair() # Restore core functionality - await self.sys_addons.repair() + await self.sys_apps.repair() await self.sys_homeassistant.core.repair() # Tag version for latest diff --git a/supervisor/coresys.py b/supervisor/coresys.py index fdfce35e981..9dc7923b1c3 100644 --- a/supervisor/coresys.py +++ b/supervisor/coresys.py @@ -27,7 +27,7 @@ ) if TYPE_CHECKING: - from .addons.manager import AddonManager + from .addons.manager import AppManager from .api import RestAPI from .arch import CpuArchManager from .auth import Auth @@ -82,7 +82,7 @@ def __init__(self) -> None: self._auth: Auth | None = None self._homeassistant: HomeAssistant | None = None self._supervisor: Supervisor | None = None - self._addons: AddonManager | None = None + self._apps: AppManager | None = None self._api: RestAPI | None = None self._updater: Updater | None = None self._backups: BackupManager | None = None @@ -350,18 +350,18 @@ def updater(self, value: Updater) -> None: self._updater = value @property - def addons(self) -> AddonManager: - """Return AddonManager object.""" - if self._addons is None: - raise RuntimeError("AddonManager not set!") - return self._addons + def apps(self) -> AppManager: + """Return AppManager object.""" + if self._apps is None: + raise RuntimeError("AppManager not set!") + return self._apps - @addons.setter - def addons(self, value: AddonManager) -> None: - """Set a AddonManager object.""" - if self._addons: - raise RuntimeError("AddonManager already set!") - self._addons = value + @apps.setter + def apps(self, value: AppManager) -> None: + """Set a AppManager object.""" + if self._apps: + raise RuntimeError("AppManager already set!") + self._apps = value @property def store(self) -> StoreManager: @@ -771,9 +771,9 @@ def sys_updater(self) -> Updater: return self.coresys.updater @property - def sys_addons(self) -> AddonManager: - """Return AddonManager object.""" - return self.coresys.addons + def sys_apps(self) -> AppManager: + """Return AppManager object.""" + return self.coresys.apps @property def sys_store(self) -> StoreManager: diff --git a/supervisor/discovery/__init__.py b/supervisor/discovery/__init__.py index 8e5c8248d16..9541c6b0c86 100644 --- a/supervisor/discovery/__init__.py +++ b/supervisor/discovery/__init__.py @@ -15,7 +15,7 @@ from .validate import SCHEMA_DISCOVERY_CONFIG if TYPE_CHECKING: - from ..addons.addon import Addon + from ..addons.addon import App _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -71,10 +71,10 @@ def list_messages(self) -> list[Message]: """Return list of available discovery messages.""" return list(self.message_obj.values()) - async def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Message: + async def send(self, app: App, service: str, config: dict[str, Any]) -> Message: """Send a discovery message to Home Assistant.""" # Create message - message = Message(addon.slug, service, config) + message = Message(app.slug, service, config) # Already exists? for exists_msg in self.list_messages: @@ -84,12 +84,12 @@ async def send(self, addon: Addon, service: str, config: dict[str, Any]) -> Mess message = exists_msg message.config = config else: - _LOGGER.debug("Duplicate discovery message from %s", addon.slug) + _LOGGER.debug("Duplicate discovery message from %s", app.slug) return exists_msg break _LOGGER.info( - "Sending discovery to Home Assistant %s from %s", service, addon.slug + "Sending discovery to Home Assistant %s from %s", service, app.slug ) self.message_obj[message.uuid] = message await self.save() diff --git a/supervisor/discovery/validate.py b/supervisor/discovery/validate.py index a577073d893..02bcff167e8 100644 --- a/supervisor/discovery/validate.py +++ b/supervisor/discovery/validate.py @@ -2,7 +2,7 @@ import voluptuous as vol -from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID +from ..const import ATTR_APP, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID from ..utils.validate import schema_or from ..validate import uuid_match @@ -11,7 +11,7 @@ vol.Schema( { vol.Required(ATTR_UUID): uuid_match, - vol.Required(ATTR_ADDON): str, + vol.Required(ATTR_APP): str, vol.Required(ATTR_SERVICE): str, vol.Required(ATTR_CONFIG): vol.Maybe(dict), }, diff --git a/supervisor/docker/addon.py b/supervisor/docker/addon.py index 0adc562da66..8605e1f73ef 100644 --- a/supervisor/docker/addon.py +++ b/supervisor/docker/addon.py @@ -1,4 +1,4 @@ -"""Init file for Supervisor add-on Docker object.""" +"""Init file for Supervisor app Docker object.""" from __future__ import annotations @@ -14,7 +14,7 @@ from attr import evolve from awesomeversion import AwesomeVersion -from ..addons.build import AddonBuild +from ..addons.build import AppBuild from ..addons.const import MappingType from ..bus import EventListener from ..const import ( @@ -71,7 +71,7 @@ from .interface import DockerInterface if TYPE_CHECKING: - from ..addons.addon import Addon + from ..addons.addon import App _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -79,12 +79,12 @@ NO_ADDDRESS = IPv4Address("0.0.0.0") -class DockerAddon(DockerInterface): +class DockerApp(DockerInterface): """Docker Supervisor wrapper for Home Assistant.""" - def __init__(self, coresys: CoreSys, addon: Addon): + def __init__(self, coresys: CoreSys, app: App): """Initialize Docker Home Assistant wrapper.""" - self.addon: Addon = addon + self.app: App = app super().__init__(coresys) self._hw_listener: EventListener | None = None @@ -97,12 +97,12 @@ def slug_to_name(slug: str) -> str: @property def image(self) -> str | None: """Return name of Docker image.""" - return self.addon.image + return self.app.image @property def ip_address(self) -> IPv4Address: """Return IP address of this container.""" - if self.addon.host_network: + if self.app.host_network: return self.sys_docker.network.gateway if not self._meta: return NO_ADDDRESS @@ -118,43 +118,43 @@ def ip_address(self) -> IPv4Address: @property def timeout(self) -> int: """Return timeout for Docker actions.""" - return self.addon.timeout + return self.app.timeout @property def version(self) -> AwesomeVersion: """Return version of Docker image.""" - return self.addon.version + return self.app.version @property def arch(self) -> str | None: """Return arch of Docker image.""" - if self.addon.legacy: + if self.app.legacy: return str(self.sys_arch.default) return super().arch @property def name(self) -> str: """Return name of Docker container.""" - return DockerAddon.slug_to_name(self.addon.slug) + return DockerApp.slug_to_name(self.app.slug) @property def environment(self) -> dict[str, str | int | None]: - """Return environment for Docker add-on.""" - addon_env = cast(dict[str, str | int | None], self.addon.environment or {}) + """Return environment for Docker app.""" + app_env = cast(dict[str, str | int | None], self.app.environment or {}) - # Provide options for legacy add-ons - if self.addon.legacy: - for key, value in self.addon.options.items(): + # Provide options for legacy apps + if self.app.legacy: + for key, value in self.app.options.items(): if isinstance(value, (int, str)): - addon_env[key] = value + app_env[key] = value else: _LOGGER.warning("Can not set nested option %s as Docker env", key) return { - **addon_env, + **app_env, ENV_TIME: self.sys_timezone, - ENV_TOKEN: self.addon.supervisor_token, - ENV_TOKEN_OLD: self.addon.supervisor_token, + ENV_TOKEN: self.app.supervisor_token, + ENV_TOKEN_OLD: self.app.supervisor_token, } @property @@ -163,7 +163,7 @@ def cgroups_rules(self) -> list[str] | None: rules = set() # Attach correct cgroups for static devices - for device_path in self.addon.static_devices: + for device_path in self.app.static_devices: try: device = self.sys_hardware.get_by_path(device_path) except HardwareNotFound: @@ -174,41 +174,41 @@ def cgroups_rules(self) -> list[str] | None: if not self.sys_hardware.policy.allowed_for_access(device): _LOGGER.error( "App %s tried to access blocked device %s!", - self.addon.name, + self.app.name, device.name, ) continue rules.add(self.sys_hardware.policy.get_cgroups_rule(device)) # Attach correct cgroups for devices - for device in self.addon.devices: + for device in self.app.devices: if not self.sys_hardware.policy.allowed_for_access(device): _LOGGER.error( "App %s tried to access blocked device %s!", - self.addon.name, + self.app.name, device.name, ) continue rules.add(self.sys_hardware.policy.get_cgroups_rule(device)) # Video - if self.addon.with_video: + if self.app.with_video: rules.update(self.sys_hardware.policy.get_cgroups_rules(PolicyGroup.VIDEO)) # GPIO - if self.addon.with_gpio: + if self.app.with_gpio: rules.update(self.sys_hardware.policy.get_cgroups_rules(PolicyGroup.GPIO)) # UART - if self.addon.with_uart: + if self.app.with_uart: rules.update(self.sys_hardware.policy.get_cgroups_rules(PolicyGroup.UART)) # USB - if self.addon.with_usb: + if self.app.with_usb: rules.update(self.sys_hardware.policy.get_cgroups_rules(PolicyGroup.USB)) # Full Access - if not self.addon.protected and self.addon.with_full_access: + if not self.app.protected and self.app.with_full_access: return [self.sys_hardware.policy.get_full_access()] # Return None if no rules is present @@ -218,13 +218,13 @@ def cgroups_rules(self) -> list[str] | None: @property def ports(self) -> dict[str, str | int | None] | None: - """Filter None from add-on ports.""" - if self.addon.host_network or not self.addon.ports: + """Filter None from app ports.""" + if self.app.host_network or not self.app.ports: return None return { container_port: host_port - for container_port, host_port in self.addon.ports.items() + for container_port, host_port in self.app.ports.items() if host_port } @@ -236,23 +236,23 @@ def security_opt(self) -> list[str]: # AppArmor if ( not self.sys_host.apparmor.available - or self.addon.apparmor == SECURITY_DISABLE + or self.app.apparmor == SECURITY_DISABLE ): security.append("apparmor=unconfined") - elif self.addon.apparmor == SECURITY_PROFILE: - security.append(f"apparmor={self.addon.slug}") + elif self.app.apparmor == SECURITY_PROFILE: + security.append(f"apparmor={self.app.slug}") return security @property def tmpfs(self) -> dict[str, str] | None: - """Return tmpfs for Docker add-on.""" + """Return tmpfs for Docker app.""" tmpfs = {} - if self.addon.with_tmpfs: + if self.app.with_tmpfs: tmpfs["/tmp"] = "" # noqa: S108 - if not self.addon.host_ipc: + if not self.app.host_ipc: tmpfs["/dev/shm"] = "" # noqa: S108 # Return None if no tmpfs is present @@ -270,36 +270,36 @@ def network_mapping(self) -> dict[str, IPv4Address]: @property def network_mode(self) -> Literal["host"] | None: - """Return network mode for add-on.""" - if self.addon.host_network: + """Return network mode for app.""" + if self.app.host_network: return "host" return None @property def pid_mode(self) -> str | None: - """Return PID mode for add-on.""" - if not self.addon.protected and self.addon.host_pid: + """Return PID mode for app.""" + if not self.app.protected and self.app.host_pid: return "host" return None @property def uts_mode(self) -> str | None: - """Return UTS mode for add-on.""" - if self.addon.host_uts: + """Return UTS mode for app.""" + if self.app.host_uts: return "host" return None @property def capabilities(self) -> list[Capabilities] | None: """Generate needed capabilities.""" - capabilities: set[Capabilities] = set(self.addon.privileged) + capabilities: set[Capabilities] = set(self.app.privileged) # Need work with kernel modules - if self.addon.with_kernel_modules: + if self.app.with_kernel_modules: capabilities.add(Capabilities.SYS_MODULE) # Need schedule functions - if self.addon.with_realtime: + if self.app.with_realtime: capabilities.add(Capabilities.SYS_NICE) # Return None if no capabilities is present @@ -309,19 +309,19 @@ def capabilities(self) -> list[Capabilities] | None: @property def ulimits(self) -> list[Ulimit] | None: - """Generate ulimits for add-on.""" + """Generate ulimits for app.""" limits: list[Ulimit] = [] # Need schedule functions - if self.addon.with_realtime: + if self.app.with_realtime: limits.append(Ulimit(name="rtprio", soft=90, hard=99)) # Set available memory for memlock to 128MB mem = 128 * 1024 * 1024 limits.append(Ulimit(name="memlock", soft=mem, hard=mem)) - # Add configurable ulimits from add-on config - for name, config in self.addon.ulimits.items(): + # Add configurable ulimits from app config + for name, config in self.app.ulimits.items(): if isinstance(config, int): # Simple format: both soft and hard limits are the same limits.append(Ulimit(name=name, soft=config, hard=config)) @@ -343,131 +343,129 @@ def cpu_rt_runtime(self) -> int | None: return None # If need CPU RT - if self.addon.with_realtime: + if self.app.with_realtime: return DOCKER_CPU_RUNTIME_ALLOCATION return None @property def mounts(self) -> list[DockerMount]: """Return mounts for container.""" - addon_mapping = self.addon.map_volumes + app_mapping = self.app.map_volumes target_data_path: str | None = None - if MappingType.DATA in addon_mapping: - target_data_path = addon_mapping[MappingType.DATA].path + if MappingType.DATA in app_mapping: + target_data_path = app_mapping[MappingType.DATA].path mounts = [ MOUNT_DEV, DockerMount( type=MountType.BIND, - source=self.addon.path_extern_data.as_posix(), + source=self.app.path_extern_data.as_posix(), target=target_data_path or PATH_PRIVATE_DATA.as_posix(), read_only=False, ), ] # setup config mappings - if MappingType.CONFIG in addon_mapping: + if MappingType.CONFIG in app_mapping: mounts.append( DockerMount( type=MountType.BIND, source=self.sys_config.path_extern_homeassistant.as_posix(), - target=addon_mapping[MappingType.CONFIG].path + target=app_mapping[MappingType.CONFIG].path or PATH_HOMEASSISTANT_CONFIG_LEGACY.as_posix(), - read_only=addon_mapping[MappingType.CONFIG].read_only, + read_only=app_mapping[MappingType.CONFIG].read_only, ) ) else: - # Map addon's public config folder if not using deprecated config option - if self.addon.addon_config_used: + # Map app's public config folder if not using deprecated config option + if self.app.app_config_used: mounts.append( DockerMount( type=MountType.BIND, - source=self.addon.path_extern_config.as_posix(), - target=addon_mapping[MappingType.ADDON_CONFIG].path + source=self.app.path_extern_config.as_posix(), + target=app_mapping[MappingType.ADDON_CONFIG].path or PATH_PUBLIC_CONFIG.as_posix(), - read_only=addon_mapping[MappingType.ADDON_CONFIG].read_only, + read_only=app_mapping[MappingType.ADDON_CONFIG].read_only, ) ) # Map Home Assistant config in new way - if MappingType.HOMEASSISTANT_CONFIG in addon_mapping: + if MappingType.HOMEASSISTANT_CONFIG in app_mapping: mounts.append( DockerMount( type=MountType.BIND, source=self.sys_config.path_extern_homeassistant.as_posix(), - target=addon_mapping[MappingType.HOMEASSISTANT_CONFIG].path + target=app_mapping[MappingType.HOMEASSISTANT_CONFIG].path or PATH_HOMEASSISTANT_CONFIG.as_posix(), - read_only=addon_mapping[ + read_only=app_mapping[ MappingType.HOMEASSISTANT_CONFIG ].read_only, ) ) - if MappingType.ALL_ADDON_CONFIGS in addon_mapping: + if MappingType.ALL_ADDON_CONFIGS in app_mapping: mounts.append( DockerMount( type=MountType.BIND, - source=self.sys_config.path_extern_addon_configs.as_posix(), - target=addon_mapping[MappingType.ALL_ADDON_CONFIGS].path + source=self.sys_config.path_extern_app_configs.as_posix(), + target=app_mapping[MappingType.ALL_ADDON_CONFIGS].path or PATH_ALL_ADDON_CONFIGS.as_posix(), - read_only=addon_mapping[MappingType.ALL_ADDON_CONFIGS].read_only, + read_only=app_mapping[MappingType.ALL_ADDON_CONFIGS].read_only, ) ) - if MappingType.SSL in addon_mapping: + if MappingType.SSL in app_mapping: mounts.append( DockerMount( type=MountType.BIND, source=self.sys_config.path_extern_ssl.as_posix(), - target=addon_mapping[MappingType.SSL].path or PATH_SSL.as_posix(), - read_only=addon_mapping[MappingType.SSL].read_only, + target=app_mapping[MappingType.SSL].path or PATH_SSL.as_posix(), + read_only=app_mapping[MappingType.SSL].read_only, ) ) - if MappingType.ADDONS in addon_mapping: + if MappingType.ADDONS in app_mapping: mounts.append( DockerMount( type=MountType.BIND, - source=self.sys_config.path_extern_addons_local.as_posix(), - target=addon_mapping[MappingType.ADDONS].path + source=self.sys_config.path_extern_apps_local.as_posix(), + target=app_mapping[MappingType.ADDONS].path or PATH_LOCAL_ADDONS.as_posix(), - read_only=addon_mapping[MappingType.ADDONS].read_only, + read_only=app_mapping[MappingType.ADDONS].read_only, ) ) - if MappingType.BACKUP in addon_mapping: + if MappingType.BACKUP in app_mapping: mounts.append( DockerMount( type=MountType.BIND, source=self.sys_config.path_extern_backup.as_posix(), - target=addon_mapping[MappingType.BACKUP].path + target=app_mapping[MappingType.BACKUP].path or PATH_BACKUP.as_posix(), - read_only=addon_mapping[MappingType.BACKUP].read_only, + read_only=app_mapping[MappingType.BACKUP].read_only, ) ) - if MappingType.SHARE in addon_mapping: + if MappingType.SHARE in app_mapping: mounts.append( DockerMount( type=MountType.BIND, source=self.sys_config.path_extern_share.as_posix(), - target=addon_mapping[MappingType.SHARE].path - or PATH_SHARE.as_posix(), - read_only=addon_mapping[MappingType.SHARE].read_only, + target=app_mapping[MappingType.SHARE].path or PATH_SHARE.as_posix(), + read_only=app_mapping[MappingType.SHARE].read_only, bind_options=MountBindOptions(propagation=PropagationMode.RSLAVE), ) ) - if MappingType.MEDIA in addon_mapping: + if MappingType.MEDIA in app_mapping: mounts.append( DockerMount( type=MountType.BIND, source=self.sys_config.path_extern_media.as_posix(), - target=addon_mapping[MappingType.MEDIA].path - or PATH_MEDIA.as_posix(), - read_only=addon_mapping[MappingType.MEDIA].read_only, + target=app_mapping[MappingType.MEDIA].path or PATH_MEDIA.as_posix(), + read_only=app_mapping[MappingType.MEDIA].read_only, bind_options=MountBindOptions(propagation=PropagationMode.RSLAVE), ) ) @@ -475,7 +473,7 @@ def mounts(self) -> list[DockerMount]: # Init other hardware mappings # GPIO support - if self.addon.with_gpio and self.sys_hardware.helper.support_gpio: + if self.app.with_gpio and self.sys_hardware.helper.support_gpio: for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"): if not Path(gpio_path).exists(): continue @@ -489,7 +487,7 @@ def mounts(self) -> list[DockerMount]: ) # DeviceTree support - if self.addon.with_devicetree: + if self.app.with_devicetree: mounts.append( DockerMount( type=MountType.BIND, @@ -500,11 +498,11 @@ def mounts(self) -> list[DockerMount]: ) # Host udev support - if self.addon.with_udev: + if self.app.with_udev: mounts.append(MOUNT_UDEV) # Kernel Modules support - if self.addon.with_kernel_modules: + if self.app.with_kernel_modules: mounts.append( DockerMount( type=MountType.BIND, @@ -515,19 +513,19 @@ def mounts(self) -> list[DockerMount]: ) # Docker API support - if not self.addon.protected and self.addon.access_docker_api: + if not self.app.protected and self.app.access_docker_api: mounts.append(MOUNT_DOCKER) # Host D-Bus system - if self.addon.host_dbus: + if self.app.host_dbus: mounts.append(MOUNT_DBUS) # Configuration Audio - if self.addon.with_audio: + if self.app.with_audio: mounts += [ DockerMount( type=MountType.BIND, - source=self.addon.path_extern_pulse.as_posix(), + source=self.app.path_extern_pulse.as_posix(), target="/etc/pulse/client.conf", read_only=True, ), @@ -546,7 +544,7 @@ def mounts(self) -> list[DockerMount]: ] # System Journal access - if self.addon.with_journald: + if self.app.with_journald: mounts += [ DockerMount( type=MountType.BIND, @@ -572,21 +570,21 @@ def mounts(self) -> list[DockerMount]: async def run(self) -> None: """Run Docker image.""" # Security check - if not self.addon.protected: - _LOGGER.warning("%s running with disabled protected mode!", self.addon.name) + if not self.app.protected: + _LOGGER.warning("%s running with disabled protected mode!", self.app.name) # Don't set a hostname if no separate UTS namespace is used - hostname = None if self.uts_mode else self.addon.hostname + hostname = None if self.uts_mode else self.app.hostname # Create & Run container try: await self._run( - tag=str(self.addon.version), + tag=str(self.app.version), name=self.name, hostname=hostname, detach=True, - init=self.addon.default_init, - stdin_open=self.addon.with_stdin, + init=self.app.default_init, + stdin_open=self.app.with_stdin, network_mode=self.network_mode, pid_mode=self.pid_mode, uts_mode=self.uts_mode, @@ -606,7 +604,7 @@ async def run(self) -> None: self.sys_resolution.create_issue( IssueType.MISSING_IMAGE, ContextType.ADDON, - reference=self.addon.slug, + reference=self.app.slug, suggestions=[SuggestionType.EXECUTE_REPAIR], ) raise @@ -616,14 +614,14 @@ async def run(self) -> None: # Write data to DNS server try: await self.sys_plugins.dns.add_host( - ipv4=self.ip_address, names=[self.addon.hostname] + ipv4=self.ip_address, names=[self.app.hostname] ) except CoreDNSError as err: _LOGGER.warning("Can't update DNS for %s", self.name) await async_capture_exception(err) # Hardware Access - if self.addon.static_devices: + if self.app.static_devices: self._hw_listener = self.sys_bus.register_event( BusEvent.HARDWARE_NEW_DEVICE, self._hardware_events ) @@ -653,7 +651,7 @@ async def update( image=image, latest=latest, arch=arch, - need_build=self.addon.latest_need_build, + need_build=self.app.latest_need_build, ) @Job( @@ -671,27 +669,27 @@ async def install( need_build: bool | None = None, ) -> None: """Pull Docker image or build it.""" - if need_build is None and self.addon.need_build or need_build: + if need_build is None and self.app.need_build or need_build: await self._build(version, image) else: await super().install(version, image, latest, arch) async def _build(self, version: AwesomeVersion, image: str | None = None) -> None: """Build a Docker container.""" - build_env = await AddonBuild.create(self.coresys, self.addon) + build_env = await AppBuild.create(self.coresys, self.app) # Check if the build environment is valid, raises if not await build_env.is_valid() _LOGGER.info("Starting build for %s:%s", self.image, version) - addon_image_tag = f"{image or self.addon.image}:{version!s}" + app_image_tag = f"{image or self.app.image}:{version!s}" docker_version = self.sys_docker.info.version builder_version_tag = ( f"{docker_version.major}.{docker_version.minor}.{docker_version.micro}-cli" ) - builder_name = f"addon_builder_{self.addon.slug}" + builder_name = f"addon_builder_{self.app.slug}" # Remove dangling builder container if it exists by any chance # E.g. because of an abrupt host shutdown/reboot during a build @@ -732,7 +730,7 @@ def pre_build_setup() -> tuple[ return ( temp_dir, build_env.get_docker_args( - version, addon_image_tag, docker_config_path + version, app_image_tag, docker_config_path ), ) @@ -756,7 +754,7 @@ def pre_build_setup() -> tuple[ logs = "\n".join(result.log) if result.exit_code != 0: raise DockerBuildError( - f"Docker build failed for {addon_image_tag} (exit code {result.exit_code}). Build output:\n{logs}", + f"Docker build failed for {app_image_tag} (exit code {result.exit_code}). Build output:\n{logs}", _LOGGER.error, ) @@ -764,10 +762,10 @@ def pre_build_setup() -> tuple[ try: # Update meta data - self._meta = await self.sys_docker.images.inspect(addon_image_tag) + self._meta = await self.sys_docker.images.inspect(app_image_tag) except aiodocker.DockerError as err: raise DockerBuildError( - f"Can't get image metadata for {addon_image_tag} after build: {err!s}" + f"Can't get image metadata for {app_image_tag} after build: {err!s}" ) from err _LOGGER.info("Build %s:%s done", self.image, version) @@ -819,11 +817,11 @@ async def cleanup( use_version, {old_image} if old_image else None, keep_images={ - f"{addon.image}:{addon.version}" - for addon in self.sys_addons.installed - if addon.slug != self.addon.slug - and addon.image - and addon.image in {old_image, use_image} + f"{app.image}:{app.version}" + for app in self.sys_apps.installed + if app.slug != self.app.slug + and app.image + and app.image in {old_image, use_image} }, ) @@ -833,7 +831,7 @@ async def cleanup( concurrency=JobConcurrency.GROUP_REJECT, ) async def write_stdin(self, data: bytes) -> None: - """Write to add-on stdin.""" + """Write to app stdin.""" try: # Load needed docker objects container = await self.sys_docker.containers.get(self.name) @@ -863,7 +861,7 @@ async def stop(self, remove_container: bool = True) -> None: # DNS if self.ip_address != NO_ADDDRESS: try: - await self.sys_plugins.dns.delete_host(self.addon.hostname) + await self.sys_plugins.dns.delete_host(self.app.hostname) except CoreDNSError as err: _LOGGER.warning("Can't update DNS for %s", self.name) await async_capture_exception(err) @@ -878,7 +876,7 @@ async def stop(self, remove_container: bool = True) -> None: # If there is a device access issue and the container is removed, clear it if remove_container and ( issue := self.sys_resolution.get_issue_if_present( - self.addon.device_access_missing_issue + self.app.device_access_missing_issue ) ): self.sys_resolution.dismiss_issue(issue) @@ -893,7 +891,7 @@ async def _hardware_events(self, device: Device) -> None: """Process Hardware events for adjust device access.""" if not any( device_path in (device.path, device.sysfs) - for device_path in self.addon.static_devices + for device_path in self.app.static_devices ): return @@ -914,7 +912,7 @@ async def _hardware_events(self, device: Device) -> None: and not self.sys_os.available ): self.sys_resolution.add_issue( - evolve(self.addon.device_access_missing_issue), + evolve(self.app.device_access_missing_issue), suggestions=[SuggestionType.EXECUTE_RESTART], ) return diff --git a/supervisor/exceptions.py b/supervisor/exceptions.py index ccff85b5cc1..8bdc1e6c0a6 100644 --- a/supervisor/exceptions.py +++ b/supervisor/exceptions.py @@ -97,8 +97,8 @@ class APIInternalServerError(APIError): status = 500 -class APIAddonNotInstalled(APIError): - """Not installed addon requested at addons API.""" +class APIAppNotInstalled(APIError): + """Not installed app requested at apps API.""" class APIDBMigrationInProgress(APIError): @@ -342,19 +342,19 @@ class AudioJobError(AudioError, PluginJobError): """Raise on job error with audio plugin.""" -# Addons +# Apps -class AddonsError(HassioError): - """Addons exception.""" +class AppsError(HassioError): + """Apps exception.""" -class AddonConfigurationError(AddonsError): - """Error with add-on configuration.""" +class AppConfigurationError(AppsError): + """Error with app configuration.""" -class AddonConfigurationInvalidError(AddonConfigurationError, APIError): - """Raise if invalid configuration provided for addon.""" +class AppConfigurationInvalidError(AppConfigurationError, APIError): + """Raise if invalid configuration provided for app.""" error_key = "addon_configuration_invalid_error" message_template = "App {addon} has invalid options: {validation_error}" @@ -363,16 +363,16 @@ def __init__( self, logger: Callable[..., None] | None = None, *, - addon: str, + app: str, validation_error: str, ) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon, "validation_error": validation_error} + self.extra_fields = {"addon": app, "validation_error": validation_error} super().__init__(None, logger) -class AddonBootConfigCannotChangeError(AddonsError, APIError): - """Raise if user attempts to change addon boot config when it can't be changed.""" +class AppBootConfigCannotChangeError(AppsError, APIError): + """Raise if user attempts to change app boot config when it can't be changed.""" error_key = "addon_boot_config_cannot_change_error" message_template = ( @@ -380,29 +380,27 @@ class AddonBootConfigCannotChangeError(AddonsError, APIError): ) def __init__( - self, logger: Callable[..., None] | None = None, *, addon: str, boot_config: str + self, logger: Callable[..., None] | None = None, *, app: str, boot_config: str ) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon, "boot_config": boot_config} + self.extra_fields = {"addon": app, "boot_config": boot_config} super().__init__(None, logger) -class AddonNotRunningError(AddonsError, APIError): - """Raise when an addon is not running.""" +class AppNotRunningError(AppsError, APIError): + """Raise when an app is not running.""" error_key = "addon_not_running_error" message_template = "App {addon} is not running" - def __init__( - self, logger: Callable[..., None] | None = None, *, addon: str - ) -> None: + def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon} + self.extra_fields = {"addon": app} super().__init__(None, logger) -class AddonPortConflict(AddonsError, APIError): - """Raise if addon cannot start due to a port conflict.""" +class AppPortConflict(AppsError, APIError): + """Raise if app cannot start due to a port conflict.""" error_key = "addon_port_conflict" message_template = "Cannot start app {name} because port {port} is already in use" @@ -415,12 +413,12 @@ def __init__( super().__init__(None, logger) -class AddonNotSupportedError(HassioNotSupportedError): - """Addon doesn't support a function.""" +class AppNotSupportedError(HassioNotSupportedError): + """App doesn't support a function.""" -class AddonNotSupportedArchitectureError(AddonNotSupportedError): - """Addon does not support system due to architecture.""" +class AppNotSupportedArchitectureError(AppNotSupportedError): + """App does not support system due to architecture.""" error_key = "addon_not_supported_architecture_error" message_template = "App {slug} not supported on this platform, supported architectures: {architectures}" @@ -437,8 +435,8 @@ def __init__( super().__init__(None, logger) -class AddonNotSupportedMachineTypeError(AddonNotSupportedError): - """Addon does not support system due to machine type.""" +class AppNotSupportedMachineTypeError(AppNotSupportedError): + """App does not support system due to machine type.""" error_key = "addon_not_supported_machine_type_error" message_template = "App {slug} not supported on this machine, supported machine types: {machine_types}" @@ -455,8 +453,8 @@ def __init__( super().__init__(None, logger) -class AddonNotSupportedHomeAssistantVersionError(AddonNotSupportedError): - """Addon does not support system due to Home Assistant version.""" +class AppNotSupportedHomeAssistantVersionError(AppNotSupportedError): + """App does not support system due to Home Assistant version.""" error_key = "addon_not_supported_home_assistant_version_error" message_template = "App {slug} not supported on this system, requires Home Assistant version {version} or greater" @@ -473,22 +471,20 @@ def __init__( super().__init__(None, logger) -class AddonNotSupportedWriteStdinError(AddonNotSupportedError, APIError): - """Addon does not support writing to stdin.""" +class AppNotSupportedWriteStdinError(AppNotSupportedError, APIError): + """App does not support writing to stdin.""" error_key = "addon_not_supported_write_stdin_error" message_template = "App {addon} does not support writing to stdin" - def __init__( - self, logger: Callable[..., None] | None = None, *, addon: str - ) -> None: + def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon} + self.extra_fields = {"addon": app} super().__init__(None, logger) -class AddonBuildDockerfileMissingError(AddonNotSupportedError, APIError): - """Raise when addon build invalid because dockerfile is missing.""" +class AppBuildDockerfileMissingError(AppNotSupportedError, APIError): + """Raise when app build invalid because dockerfile is missing.""" error_key = "addon_build_dockerfile_missing_error" message_template = ( @@ -497,16 +493,14 @@ class AddonBuildDockerfileMissingError(AddonNotSupportedError, APIError): "corruption. Otherwise please report this to the app developer." ) - def __init__( - self, logger: Callable[..., None] | None = None, *, addon: str - ) -> None: + def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon, "repair_command": "ha supervisor repair"} + self.extra_fields = {"addon": app, "repair_command": "ha supervisor repair"} super().__init__(None, logger) -class AddonBuildArchitectureNotSupportedError(AddonNotSupportedError, APIError): - """Raise when addon cannot be built on system because it doesn't support its architecture.""" +class AppBuildArchitectureNotSupportedError(AppNotSupportedError, APIError): + """Raise when app cannot be built on system because it doesn't support its architecture.""" error_key = "addon_build_architecture_not_supported_error" message_template = ( @@ -518,50 +512,46 @@ def __init__( self, logger: Callable[..., None] | None = None, *, - addon: str, - addon_arch_list: list[str], + app: str, + app_arch_list: list[str], system_arch_list: list[str], ) -> None: """Initialize exception.""" self.extra_fields = { - "addon": addon, - "addon_arches": ", ".join(addon_arch_list), + "addon": app, + "addon_arches": ", ".join(app_arch_list), "system_arches": ", ".join(system_arch_list), } super().__init__(None, logger) -class AddonUnknownError(AddonsError, APIUnknownSupervisorError): - """Raise when unknown error occurs taking an action for an addon.""" +class AppUnknownError(AppsError, APIUnknownSupervisorError): + """Raise when unknown error occurs taking an action for an app.""" error_key = "addon_unknown_error" message_template = "An unknown error occurred with app {addon}" - def __init__( - self, logger: Callable[..., None] | None = None, *, addon: str - ) -> None: + def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon} + self.extra_fields = {"addon": app} super().__init__(logger) -class AddonBuildFailedUnknownError(AddonsError, APIUnknownSupervisorError): - """Raise when the build failed for an addon due to an unknown error.""" +class AppBuildFailedUnknownError(AppsError, APIUnknownSupervisorError): + """Raise when the build failed for an app due to an unknown error.""" error_key = "addon_build_failed_unknown_error" message_template = ( "An unknown error occurred while trying to build the image for app {addon}" ) - def __init__( - self, logger: Callable[..., None] | None = None, *, addon: str - ) -> None: + def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon} + self.extra_fields = {"addon": app} super().__init__(logger) -class AddonsJobError(AddonsError, JobException): +class AppsJobError(AppsError, JobException): """Raise on job errors.""" @@ -1028,22 +1018,20 @@ class StoreNotFound(StoreError): """Raise if slug is not known.""" -class StoreAddonNotFoundError(StoreError, APINotFound): - """Raise if a requested addon is not in the store.""" +class StoreAppNotFoundError(StoreError, APINotFound): + """Raise if a requested app is not in the store.""" error_key = "store_addon_not_found_error" message_template = "App {addon} does not exist in the store" - def __init__( - self, logger: Callable[..., None] | None = None, *, addon: str - ) -> None: + def __init__(self, logger: Callable[..., None] | None = None, *, app: str) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon} + self.extra_fields = {"addon": app} super().__init__(None, logger) class StoreRepositoryLocalCannotReset(StoreError, APIError): - """Raise if user requests a reset on the local addon repository.""" + """Raise if user requests a reset on the local app repository.""" error_key = "store_repository_local_cannot_reset" message_template = "Can't reset repository {local_repo} as it is not git based!" @@ -1058,8 +1046,8 @@ class StoreJobError(StoreError, JobException): """Raise on job error with git.""" -class StoreInvalidAddonRepo(StoreError): - """Raise on invalid addon repo.""" +class StoreInvalidAppRepo(StoreError): + """Raise on invalid app repo.""" class StoreRepositoryUnknownError(StoreError, APIUnknownSupervisorError): @@ -1117,8 +1105,8 @@ class BackupFatalIOError(BackupError): """Raise on write-side I/O errors that leave the backup tar corrupt.""" -class AddonBackupMetadataInvalidError(BackupError, APIError): - """Raise if invalid metadata file provided for addon in backup.""" +class AppBackupMetadataInvalidError(BackupError, APIError): + """Raise if invalid metadata file provided for app in backup.""" error_key = "addon_backup_metadata_invalid_error" message_template = ( @@ -1129,16 +1117,16 @@ def __init__( self, logger: Callable[..., None] | None = None, *, - addon: str, + app: str, validation_error: str, ) -> None: """Initialize exception.""" - self.extra_fields = {"addon": addon, "validation_error": validation_error} + self.extra_fields = {"addon": app, "validation_error": validation_error} super().__init__(None, logger) -class AddonPrePostBackupCommandReturnedError(BackupError, APIError): - """Raise when addon's pre/post backup command returns an error.""" +class AppPrePostBackupCommandReturnedError(BackupError, APIError): + """Raise when app's pre/post backup command returns an error.""" error_key = "addon_pre_post_backup_command_returned_error" message_template = ( @@ -1148,11 +1136,11 @@ class AddonPrePostBackupCommandReturnedError(BackupError, APIError): ) def __init__( - self, logger: Callable[..., None] | None = None, *, addon: str, exit_code: int + self, logger: Callable[..., None] | None = None, *, app: str, exit_code: int ) -> None: """Initialize exception.""" self.extra_fields = { - "addon": addon, + "addon": app, "exit_code": exit_code, "debug_logging_command": "ha supervisor options --logging debug", } diff --git a/supervisor/homeassistant/secrets.py b/supervisor/homeassistant/secrets.py index ce4fcac033e..c279ef3c4cf 100644 --- a/supervisor/homeassistant/secrets.py +++ b/supervisor/homeassistant/secrets.py @@ -1,4 +1,4 @@ -"""Handle Home Assistant secrets to add-ons.""" +"""Handle Home Assistant secrets to apps.""" from datetime import timedelta import logging diff --git a/supervisor/host/sound.py b/supervisor/host/sound.py index fc0d3fdc8e7..1a4d3cda9ec 100644 --- a/supervisor/host/sound.py +++ b/supervisor/host/sound.py @@ -37,7 +37,7 @@ class AudioApplication: stream_type: StreamType volume: float mute: bool - addon: str + app: str @dataclass(slots=True, frozen=True) diff --git a/supervisor/ingress.py b/supervisor/ingress.py index 859d8586613..a24a1fc3aa7 100644 --- a/supervisor/ingress.py +++ b/supervisor/ingress.py @@ -5,7 +5,7 @@ import random import secrets -from .addons.addon import Addon +from .addons.addon import App from .const import ( ATTR_PORTS, ATTR_SESSION, @@ -33,11 +33,11 @@ def __init__(self, coresys: CoreSys): self.coresys: CoreSys = coresys self.tokens: dict[str, str] = {} - def get(self, token: str) -> Addon | None: - """Return addon they have this ingress token.""" + def get(self, token: str) -> App | None: + """Return app they have this ingress token.""" if token not in self.tokens: return None - return self.sys_addons.get_local_only(self.tokens[token]) + return self.sys_apps.get_local_only(self.tokens[token]) def get_session_data(self, session_id: str) -> IngressSessionData | None: """Return complementary data of current session or None.""" @@ -61,14 +61,14 @@ def ports(self) -> dict[str, int]: return self._data[ATTR_PORTS] @property - def addons(self) -> list[Addon]: - """Return list of ingress Add-ons.""" - addons = [] - for addon in self.sys_addons.installed: - if not addon.with_ingress: + def apps(self) -> list[App]: + """Return list of ingress Apps.""" + apps = [] + for app in self.sys_apps.installed: + if not app.with_ingress: continue - addons.append(addon) - return addons + apps.append(app) + return apps async def load(self) -> None: """Update internal data.""" @@ -115,13 +115,13 @@ def _cleanup_sessions(self) -> None: self.sessions_data.update(sessions_data) def _update_token_list(self) -> None: - """Regenerate token <-> Add-on map.""" + """Regenerate token <-> App map.""" self.tokens.clear() # Read all ingress token and build a map - for addon in self.addons: - if addon.ingress_token: - self.tokens[addon.ingress_token] = addon.slug + for app in self.apps: + if app.ingress_token: + self.tokens[app.ingress_token] = app.slug def create_session(self, data: IngressSessionData | None = None) -> str: """Create new session.""" @@ -158,10 +158,10 @@ def validate_session(self, session: str) -> bool: return True - async def get_dynamic_port(self, addon_slug: str) -> int: + async def get_dynamic_port(self, app_slug: str) -> int: """Get/Create a dynamic port from range.""" - if addon_slug in self.ports: - return self.ports[addon_slug] + if app_slug in self.ports: + return self.ports[app_slug] port = None while ( @@ -172,37 +172,37 @@ async def get_dynamic_port(self, addon_slug: str) -> int: port = random.randint(62000, 65500) # Save port for next time - self.ports[addon_slug] = port + self.ports[app_slug] = port await self.save_data() return port - async def del_dynamic_port(self, addon_slug: str) -> None: + async def del_dynamic_port(self, app_slug: str) -> None: """Remove a previously assigned dynamic port.""" - if addon_slug not in self.ports: + if app_slug not in self.ports: return - del self.ports[addon_slug] + del self.ports[app_slug] await self.save_data() - async def update_hass_panel(self, addon: Addon): + async def update_hass_panel(self, app: App): """Return True if Home Assistant up and running.""" if not await self.sys_homeassistant.core.is_running(): _LOGGER.debug("Ignoring panel update on Core") return # Update UI - method = "post" if addon.ingress_panel else "delete" + method = "post" if app.ingress_panel else "delete" try: async with self.sys_homeassistant.api.make_request( - method, f"api/hassio_push/panel/{addon.slug}" + method, f"api/hassio_push/panel/{app.slug}" ) as resp: if resp.status in (200, 201): - _LOGGER.info("Update Ingress as panel for %s", addon.slug) + _LOGGER.info("Update Ingress as panel for %s", app.slug) else: _LOGGER.warning( "Failed to update the Ingress panel for %s with %i", - addon.slug, + app.slug, resp.status, ) except HomeAssistantAPIError as err: - _LOGGER.error("Panel update request failed for %s: %s", addon.slug, err) + _LOGGER.error("Panel update request failed for %s: %s", app.slug, err) diff --git a/supervisor/misc/filter.py b/supervisor/misc/filter.py index 7b7a220480c..d4357b6a2e2 100644 --- a/supervisor/misc/filter.py +++ b/supervisor/misc/filter.py @@ -11,7 +11,7 @@ from ..const import DOCKER_IPV4_NETWORK_MASK, HEADER_TOKEN, HEADER_TOKEN_OLD, CoreState from ..coresys import CoreSys -from ..exceptions import AddonConfigurationError +from ..exceptions import AppConfigurationError RE_URL: re.Pattern = re.compile(r"(\w+:\/\/)(.*\.\w+)(.*)") @@ -46,7 +46,7 @@ def filter_data(coresys: CoreSys, event: Event, hint: Hint) -> Event | None: # Ignore some exceptions if "exc_info" in hint: _, exc_value, _ = hint["exc_info"] - if isinstance(exc_value, (AddonConfigurationError)): + if isinstance(exc_value, (AppConfigurationError)): return None # Ignore issue if system is not supported or diagnostics is disabled @@ -82,10 +82,10 @@ def filter_data(coresys: CoreSys, event: Event, hint: Hint) -> Event | None: ) return event - # List installed addons - installed_addons = [ - {"slug": addon.slug, "repository": addon.repository, "name": addon.name} - for addon in coresys.addons.installed + # List installed apps + installed_apps = [ + {"slug": app.slug, "repository": app.repository, "name": app.name} + for app in coresys.apps.installed ] # Update information @@ -93,7 +93,7 @@ def filter_data(coresys: CoreSys, event: Event, hint: Hint) -> Event | None: { "supervisor": { "channel": coresys.updater.channel, - "installed_addons": installed_addons, + "installed_addons": installed_apps, }, "host": { "arch": str(coresys.arch.default), diff --git a/supervisor/misc/tasks.py b/supervisor/misc/tasks.py index c68b7f07193..40a274debab 100644 --- a/supervisor/misc/tasks.py +++ b/supervisor/misc/tasks.py @@ -5,12 +5,12 @@ import logging from typing import cast -from ..addons.const import ADDON_UPDATE_CONDITIONS +from ..addons.const import APP_UPDATE_CONDITIONS from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE -from ..const import ATTR_TYPE, AddonState +from ..const import ATTR_TYPE, AppState from ..coresys import CoreSysAttributes from ..exceptions import ( - AddonsError, + AppsError, BackupFileNotFoundError, HomeAssistantError, HomeAssistantWSError, @@ -71,7 +71,7 @@ def __init__(self, coresys): async def load(self): """Add Tasks to scheduler.""" # Update - self.sys_scheduler.register_task(self._update_addons, RUN_UPDATE_ADDONS) + self.sys_scheduler.register_task(self._update_apps, RUN_UPDATE_ADDONS) self.sys_scheduler.register_task(self._update_cli, RUN_UPDATE_CLI) self.sys_scheduler.register_task(self._update_dns, RUN_UPDATE_DNS) self.sys_scheduler.register_task(self._update_audio, RUN_UPDATE_AUDIO) @@ -94,7 +94,7 @@ async def load(self): self._watchdog_observer_application, RUN_WATCHDOG_OBSERVER_APPLICATION ) self.sys_scheduler.register_task( - self._watchdog_addon_application, RUN_WATCHDOG_ADDON_APPLICATON + self._watchdog_app_application, RUN_WATCHDOG_ADDON_APPLICATON ) # Cleanup @@ -106,48 +106,46 @@ async def load(self): @Job( name="tasks_update_addons", - conditions=ADDON_UPDATE_CONDITIONS + [JobCondition.RUNNING], + conditions=APP_UPDATE_CONDITIONS + [JobCondition.RUNNING], ) - async def _update_addons(self): - """Check if an update is available for an Add-on and update it.""" - for addon in self.sys_addons.all: - if not addon.is_installed or not addon.auto_update: + async def _update_apps(self): + """Check if an update is available for an App and update it.""" + for app in self.sys_apps.all: + if not app.is_installed or not app.auto_update: continue # Evaluate available updates - if not addon.need_update: + if not app.need_update: continue - if not addon.auto_update_available: + if not app.auto_update_available: _LOGGER.debug( "Not updating app %s from %s to %s as that would cross a known breaking version", - addon.slug, - addon.version, - addon.latest_version, + app.slug, + app.version, + app.latest_version, ) continue # Delay auto-updates for a day in case of issues - if utcnow() < addon.latest_version_timestamp + timedelta(days=1): + if utcnow() < app.latest_version_timestamp + timedelta(days=1): _LOGGER.debug( "Not updating app %s from %s to %s as the latest version is less than a day old", - addon.slug, - addon.version, - addon.latest_version, + app.slug, + app.version, + app.latest_version, ) continue - if not addon.test_update_schema(): - _LOGGER.warning( - "App %s will be ignored, schema tests failed", addon.slug - ) + if not app.test_update_schema(): + _LOGGER.warning("App %s will be ignored, schema tests failed", app.slug) continue - _LOGGER.info("App auto update process %s", addon.slug) - # Call Home Assistant Core to update add-on to make sure that backups + _LOGGER.info("App auto update process %s", app.slug) + # Call Home Assistant Core to update app to make sure that backups # get created through the Home Assistant Core API (categorized correctly). # Ultimately auto updates should be handled by Home Assistant Core itself # through a update entity feature. message = { ATTR_TYPE: WSType.HASSIO_UPDATE_ADDON, - "addon": addon.slug, + "addon": app.slug, "backup": True, } _LOGGER.debug( @@ -311,37 +309,37 @@ async def _watchdog_observer_application(self): except ObserverError: _LOGGER.error("Observer watchdog reanimation failed!") - async def _watchdog_addon_application(self): + async def _watchdog_app_application(self): """Check running state of the application and start if they is hangs.""" - for addon in self.sys_addons.installed: + for app in self.sys_apps.installed: # if watchdog need looking for - if not addon.watchdog or addon.state != AddonState.STARTED: + if not app.watchdog or app.state != AppState.STARTED: continue # Init cache data - retry_scan = self._cache.get(addon.slug, 0) + retry_scan = self._cache.get(app.slug, 0) - # if Addon have running actions / Application work - if addon.in_progress or await addon.watchdog_application(): + # if App have running actions / Application work + if app.in_progress or await app.watchdog_application(): continue # Look like we run into a problem retry_scan += 1 if retry_scan == 1: - self._cache[addon.slug] = retry_scan + self._cache[app.slug] = retry_scan _LOGGER.warning( - "Watchdog missing application response from %s", addon.slug + "Watchdog missing application response from %s", app.slug ) return - _LOGGER.warning("Watchdog found a problem with %s application!", addon.slug) + _LOGGER.warning("Watchdog found a problem with %s application!", app.slug) try: - await (await addon.restart()) - except AddonsError as err: - _LOGGER.error("%s watchdog reanimation failed with %s", addon.slug, err) + await (await app.restart()) + except AppsError as err: + _LOGGER.error("%s watchdog reanimation failed with %s", app.slug, err) await async_capture_exception(err) finally: - self._cache[addon.slug] = 0 + self._cache[app.slug] = 0 @Job( name="tasks_reload_store", @@ -352,7 +350,7 @@ async def _watchdog_addon_application(self): ], ) async def _reload_store(self) -> None: - """Reload store and check for addon updates.""" + """Reload store and check for app updates.""" await self.sys_store.reload() @Job(name="tasks_reload_updater") diff --git a/supervisor/mounts/mount.py b/supervisor/mounts/mount.py index 417c44aa72c..667224df53d 100644 --- a/supervisor/mounts/mount.py +++ b/supervisor/mounts/mount.py @@ -142,7 +142,7 @@ def local_where(self) -> Path: @property def container_where(self) -> PurePath | None: - """Return where this is made available in managed containers (core, addons, etc.). + """Return where this is made available in managed containers (core, apps, etc.). This returns none if it is not made available in managed containers. """ diff --git a/supervisor/plugins/dns.py b/supervisor/plugins/dns.py index c4d72c8f8ac..8585ecb2870 100644 --- a/supervisor/plugins/dns.py +++ b/supervisor/plugins/dns.py @@ -336,7 +336,7 @@ async def reset(self) -> None: # Reset loop protection self._loop = False - await self.sys_addons.sync_dns() + await self.sys_apps.sync_dns() async def watchdog_container(self, event: DockerContainerStateEvent) -> None: """Check for loop on failure before processing state change event.""" diff --git a/supervisor/resolution/checks/addon_pwned.py b/supervisor/resolution/checks/addon_pwned.py index 9a0dea23003..2492a4793ba 100644 --- a/supervisor/resolution/checks/addon_pwned.py +++ b/supervisor/resolution/checks/addon_pwned.py @@ -3,7 +3,7 @@ from datetime import timedelta import logging -from ...const import AddonState, CoreState +from ...const import AppState, CoreState from ...coresys import CoreSys from ...exceptions import PwnedConnectivityError, PwnedError, PwnedSecret from ...jobs.const import JobCondition, JobThrottle @@ -16,11 +16,11 @@ def setup(coresys: CoreSys) -> CheckBase: """Check setup function.""" - return CheckAddonPwned(coresys) + return CheckAppPwned(coresys) -class CheckAddonPwned(CheckBase): - """CheckAddonPwned class for check.""" +class CheckAppPwned(CheckBase): + """CheckAppPwned class for check.""" @Job( name="check_addon_pwned_run", @@ -35,8 +35,8 @@ async def run_check(self) -> None: return await self.sys_homeassistant.secrets.reload() - for addon in self.sys_addons.installed: - secrets = addon.pwned + for app in self.sys_apps.installed: + secrets = app.pwned if not secrets: continue @@ -49,7 +49,7 @@ async def run_check(self) -> None: return except PwnedSecret: # Check possible suggestion - if addon.state == AddonState.STARTED: + if app.state == AppState.STARTED: suggestions = [SuggestionType.EXECUTE_STOP] else: suggestions = None @@ -57,7 +57,7 @@ async def run_check(self) -> None: self.sys_resolution.create_issue( IssueType.PWNED, ContextType.ADDON, - reference=addon.slug, + reference=app.slug, suggestions=suggestions, ) break @@ -71,11 +71,11 @@ async def approve_check(self, reference: str | None = None) -> bool: return False # Uninstalled - if not (addon := self.sys_addons.get_local_only(reference)): + if not (app := self.sys_apps.get_local_only(reference)): return False # Not in use anymore - secrets = addon.pwned + secrets = app.pwned if not secrets: return False diff --git a/supervisor/resolution/checks/deprecated_addon.py b/supervisor/resolution/checks/deprecated_addon.py index 495b165ce16..9190ad0a483 100644 --- a/supervisor/resolution/checks/deprecated_addon.py +++ b/supervisor/resolution/checks/deprecated_addon.py @@ -1,6 +1,6 @@ -"""Helpers to check for deprecated addons.""" +"""Helpers to check for deprecated apps.""" -from ...const import AddonStage, CoreState +from ...const import AppStage, CoreState from ...coresys import CoreSys from ..const import ContextType, IssueType, SuggestionType from .base import CheckBase @@ -8,20 +8,20 @@ def setup(coresys: CoreSys) -> CheckBase: """Check setup function.""" - return CheckDeprecatedAddon(coresys) + return CheckDeprecatedApp(coresys) -class CheckDeprecatedAddon(CheckBase): - """CheckDeprecatedAddon class for check.""" +class CheckDeprecatedApp(CheckBase): + """CheckDeprecatedApp class for check.""" async def run_check(self) -> None: """Run check if not affected by issue.""" - for addon in self.sys_addons.installed: - if addon.stage == AddonStage.DEPRECATED: + for app in self.sys_apps.installed: + if app.stage == AppStage.DEPRECATED: self.sys_resolution.create_issue( IssueType.DEPRECATED_ADDON, ContextType.ADDON, - reference=addon.slug, + reference=app.slug, suggestions=[SuggestionType.EXECUTE_REMOVE], ) @@ -30,8 +30,8 @@ async def approve_check(self, reference: str | None = None) -> bool: if not reference: return False - addon = self.sys_addons.get_local_only(reference) - return addon is not None and addon.stage == AddonStage.DEPRECATED + app = self.sys_apps.get_local_only(reference) + return app is not None and app.stage == AppStage.DEPRECATED @property def issue(self) -> IssueType: diff --git a/supervisor/resolution/checks/deprecated_arch_addon.py b/supervisor/resolution/checks/deprecated_arch_addon.py index d0f4c301095..061b4c78ac7 100644 --- a/supervisor/resolution/checks/deprecated_arch_addon.py +++ b/supervisor/resolution/checks/deprecated_arch_addon.py @@ -1,6 +1,6 @@ -"""Helpers to check for add-ons using deprecated compatibility entries.""" +"""Helpers to check for apps using deprecated compatibility entries.""" -from ...const import AddonStage, CoreState +from ...const import AppStage, CoreState from ...coresys import CoreSys from ..const import ContextType, IssueType, SuggestionType from .base import CheckBase @@ -8,25 +8,25 @@ def setup(coresys: CoreSys) -> CheckBase: """Check setup function.""" - return CheckDeprecatedArchAddon(coresys) + return CheckDeprecatedArchApp(coresys) -class CheckDeprecatedArchAddon(CheckBase): - """CheckDeprecatedArchAddon class for check.""" +class CheckDeprecatedArchApp(CheckBase): + """CheckDeprecatedArchApp class for check.""" async def run_check(self) -> None: """Run check if not affected by issue.""" - for addon in self.sys_addons.installed: - if addon.stage == AddonStage.DEPRECATED: + for app in self.sys_apps.installed: + if app.stage == AppStage.DEPRECATED: continue - if (addon.has_deprecated_arch and not addon.has_supported_arch) or ( - addon.has_deprecated_machine and not addon.has_supported_machine + if (app.has_deprecated_arch and not app.has_supported_arch) or ( + app.has_deprecated_machine and not app.has_supported_machine ): self.sys_resolution.create_issue( IssueType.DEPRECATED_ARCH_ADDON, ContextType.ADDON, - reference=addon.slug, + reference=app.slug, suggestions=[SuggestionType.EXECUTE_REMOVE], ) @@ -35,13 +35,13 @@ async def approve_check(self, reference: str | None = None) -> bool: if not reference: return False - addon = self.sys_addons.get_local_only(reference) + app = self.sys_apps.get_local_only(reference) return ( - addon is not None - and addon.stage != AddonStage.DEPRECATED + app is not None + and app.stage != AppStage.DEPRECATED and ( - (addon.has_deprecated_arch and not addon.has_supported_arch) - or (addon.has_deprecated_machine and not addon.has_supported_machine) + (app.has_deprecated_arch and not app.has_supported_arch) + or (app.has_deprecated_machine and not app.has_supported_machine) ) ) diff --git a/supervisor/resolution/checks/detached_addon_missing.py b/supervisor/resolution/checks/detached_addon_missing.py index 1d09fa32962..27218b82eb8 100644 --- a/supervisor/resolution/checks/detached_addon_missing.py +++ b/supervisor/resolution/checks/detached_addon_missing.py @@ -1,4 +1,4 @@ -"""Helpers to check for detached addons due to repo misisng.""" +"""Helpers to check for detached apps due to repo misisng.""" from ...const import CoreState from ...coresys import CoreSys @@ -8,23 +8,20 @@ def setup(coresys: CoreSys) -> CheckBase: """Check setup function.""" - return CheckDetachedAddonMissing(coresys) + return CheckDetachedAppMissing(coresys) -class CheckDetachedAddonMissing(CheckBase): - """CheckDetachedAddonMissing class for check.""" +class CheckDetachedAppMissing(CheckBase): + """CheckDetachedAppMissing class for check.""" async def run_check(self) -> None: """Run check if not affected by issue.""" - for addon in self.sys_addons.installed: - if ( - addon.is_detached - and addon.repository not in self.sys_store.repositories - ): + for app in self.sys_apps.installed: + if app.is_detached and app.repository not in self.sys_store.repositories: self.sys_resolution.create_issue( IssueType.DETACHED_ADDON_MISSING, ContextType.ADDON, - reference=addon.slug, + reference=app.slug, ) async def approve_check(self, reference: str | None = None) -> bool: @@ -32,8 +29,8 @@ async def approve_check(self, reference: str | None = None) -> bool: if not reference: return False - addon = self.sys_addons.get_local_only(reference) - return addon is not None and addon.is_detached + app = self.sys_apps.get_local_only(reference) + return app is not None and app.is_detached @property def issue(self) -> IssueType: diff --git a/supervisor/resolution/checks/detached_addon_removed.py b/supervisor/resolution/checks/detached_addon_removed.py index 9510fe3767f..6f9e87324bb 100644 --- a/supervisor/resolution/checks/detached_addon_removed.py +++ b/supervisor/resolution/checks/detached_addon_removed.py @@ -1,4 +1,4 @@ -"""Helpers to check for detached addons due to removal from repo.""" +"""Helpers to check for detached apps due to removal from repo.""" from ...const import CoreState from ...coresys import CoreSys @@ -8,20 +8,20 @@ def setup(coresys: CoreSys) -> CheckBase: """Check setup function.""" - return CheckDetachedAddonRemoved(coresys) + return CheckDetachedAppRemoved(coresys) -class CheckDetachedAddonRemoved(CheckBase): - """CheckDetachedAddonRemoved class for check.""" +class CheckDetachedAppRemoved(CheckBase): + """CheckDetachedAppRemoved class for check.""" async def run_check(self) -> None: """Run check if not affected by issue.""" - for addon in self.sys_addons.installed: - if addon.is_detached and addon.repository in self.sys_store.repositories: + for app in self.sys_apps.installed: + if app.is_detached and app.repository in self.sys_store.repositories: self.sys_resolution.create_issue( IssueType.DETACHED_ADDON_REMOVED, ContextType.ADDON, - reference=addon.slug, + reference=app.slug, suggestions=[SuggestionType.EXECUTE_REMOVE], ) @@ -30,8 +30,8 @@ async def approve_check(self, reference: str | None = None) -> bool: if not reference: return False - addon = self.sys_addons.get_local_only(reference) - return addon is not None and addon.is_detached + app = self.sys_apps.get_local_only(reference) + return app is not None and app.is_detached @property def issue(self) -> IssueType: diff --git a/supervisor/resolution/checks/docker_config.py b/supervisor/resolution/checks/docker_config.py index d6568a27b5a..49880f3fd3e 100644 --- a/supervisor/resolution/checks/docker_config.py +++ b/supervisor/resolution/checks/docker_config.py @@ -10,24 +10,24 @@ from .base import CheckBase -def _check_container(container: DockerInterface, addon=None) -> bool: +def _check_container(container: DockerInterface, app=None) -> bool: """Check if container has mount propagation issues requiring recreate. - For add-ons, only validates mounts explicitly configured (not Docker VOLUMEs). + For apps, only validates mounts explicitly configured (not Docker VOLUMEs). For Core/plugins, validates all /media and /share mounts. """ - # For add-ons, check mounts against their actual configured targets - if addon is not None: - addon_mapping = addon.map_volumes + # For apps, check mounts against their actual configured targets + if app is not None: + app_mapping = app.map_volumes configured_targets = set() - # Get actual target paths from add-on configuration - if MappingType.MEDIA in addon_mapping: - target = addon_mapping[MappingType.MEDIA].path or PATH_MEDIA.as_posix() + # Get actual target paths from app configuration + if MappingType.MEDIA in app_mapping: + target = app_mapping[MappingType.MEDIA].path or PATH_MEDIA.as_posix() configured_targets.add(target) - if MappingType.SHARE in addon_mapping: - target = addon_mapping[MappingType.SHARE].path or PATH_SHARE.as_posix() + if MappingType.SHARE in app_mapping: + target = app_mapping[MappingType.SHARE].path or PATH_SHARE.as_posix() configured_targets.add(target) if not configured_targets: @@ -82,11 +82,11 @@ def _check_docker_config(self) -> None: if _check_container(self.sys_homeassistant.core.instance): new_issues.add(Issue(IssueType.DOCKER_CONFIG, ContextType.CORE)) - for addon in self.sys_addons.installed: - if _check_container(addon.instance, addon): + for app in self.sys_apps.installed: + if _check_container(app.instance, app): new_issues.add( Issue( - IssueType.DOCKER_CONFIG, ContextType.ADDON, reference=addon.slug + IssueType.DOCKER_CONFIG, ContextType.ADDON, reference=app.slug ) ) diff --git a/supervisor/resolution/evaluations/container.py b/supervisor/resolution/evaluations/container.py index 913d5714e2a..4837b6a260d 100644 --- a/supervisor/resolution/evaluations/container.py +++ b/supervisor/resolution/evaluations/container.py @@ -63,7 +63,7 @@ def known_images(self) -> set[str]: self.sys_homeassistant.image, self.sys_supervisor.image or self.sys_supervisor.default_image, *(plugin.image for plugin in self.sys_plugins.all_plugins if plugin.image), - *(addon.image for addon in self.sys_addons.installed if addon.image), + *(app.image for app in self.sys_apps.installed if app.image), ADDON_BUILDER_IMAGE, } diff --git a/supervisor/resolution/evaluations/restart_policy.py b/supervisor/resolution/evaluations/restart_policy.py index b5c84389cab..8cddc22f4b4 100644 --- a/supervisor/resolution/evaluations/restart_policy.py +++ b/supervisor/resolution/evaluations/restart_policy.py @@ -48,7 +48,7 @@ def no_restart_expected(self) -> set[DockerInterface]: for plug in self.sys_plugins.all_plugins if plug != self.sys_plugins.observer }, - *{addon.instance for addon in self.sys_addons.installed}, + *{app.instance for app in self.sys_apps.installed}, } @property diff --git a/supervisor/resolution/fixups/addon_disable_boot.py b/supervisor/resolution/fixups/addon_disable_boot.py index 4bdb2f3eef5..5dcc39375af 100644 --- a/supervisor/resolution/fixups/addon_disable_boot.py +++ b/supervisor/resolution/fixups/addon_disable_boot.py @@ -1,8 +1,8 @@ -"""Helpers to fix addon by disabling boot.""" +"""Helpers to fix app by disabling boot.""" import logging -from ...const import AddonBoot +from ...const import AppBoot from ...coresys import CoreSys from ..const import ContextType, IssueType, SuggestionType from .base import FixupBase @@ -12,10 +12,10 @@ def setup(coresys: CoreSys) -> FixupBase: """Check setup function.""" - return FixupAddonDisableBoot(coresys) + return FixupAppDisableBoot(coresys) -class FixupAddonDisableBoot(FixupBase): +class FixupAppDisableBoot(FixupBase): """Storage class for fixup.""" async def process_fixup(self, reference: str | None = None) -> None: @@ -23,12 +23,12 @@ async def process_fixup(self, reference: str | None = None) -> None: if not reference: return - if not (addon := self.sys_addons.get_local_only(reference)): + if not (app := self.sys_apps.get_local_only(reference)): _LOGGER.info("Cannot change app %s as it does not exist", reference) return - # Disable boot on addon - addon.boot = AddonBoot.MANUAL + # Disable boot on app + app.boot = AppBoot.MANUAL @property def suggestion(self) -> SuggestionType: diff --git a/supervisor/resolution/fixups/addon_execute_rebuild.py b/supervisor/resolution/fixups/addon_execute_rebuild.py index 8fe58149217..5d2d5906ac4 100644 --- a/supervisor/resolution/fixups/addon_execute_rebuild.py +++ b/supervisor/resolution/fixups/addon_execute_rebuild.py @@ -1,4 +1,4 @@ -"""Helper to fix an issue with an addon by rebuilding its container.""" +"""Helper to fix an issue with an app by rebuilding its container.""" import logging @@ -12,26 +12,26 @@ def setup(coresys: CoreSys) -> FixupBase: """Check setup function.""" - return FixupAddonExecuteRebuild(coresys) + return FixupAppExecuteRebuild(coresys) -class FixupAddonExecuteRebuild(FixupBase): +class FixupAppExecuteRebuild(FixupBase): """Storage class for fixup.""" async def process_fixup(self, reference: str | None = None) -> None: - """Rebuild the addon's container.""" + """Rebuild the app's container.""" if not reference: return - addon = self.sys_addons.get_local_only(reference) - if not addon: + app = self.sys_apps.get_local_only(reference) + if not app: _LOGGER.info( "Cannot rebuild app %s as it is not installed, dismissing suggestion", reference, ) return - state = await addon.instance.current_state() + state = await app.instance.current_state() if state == ContainerState.UNKNOWN: _LOGGER.info( "Container for app %s does not exist, it will be rebuilt when started next", @@ -42,9 +42,9 @@ async def process_fixup(self, reference: str | None = None) -> None: "App %s is stopped, removing its container so it rebuilds when started next", reference, ) - await addon.stop() + await app.stop() else: - await (await addon.restart()) + await (await app.restart()) @property def suggestion(self) -> SuggestionType: diff --git a/supervisor/resolution/fixups/addon_execute_remove.py b/supervisor/resolution/fixups/addon_execute_remove.py index f0461b49278..168a8f9567a 100644 --- a/supervisor/resolution/fixups/addon_execute_remove.py +++ b/supervisor/resolution/fixups/addon_execute_remove.py @@ -1,9 +1,9 @@ -"""Helpers to fix addon issue by removing it.""" +"""Helpers to fix app issue by removing it.""" import logging from ...coresys import CoreSys -from ...exceptions import AddonsError, ResolutionFixupError +from ...exceptions import AppsError, ResolutionFixupError from ..const import ContextType, IssueType, SuggestionType from .base import FixupBase @@ -12,10 +12,10 @@ def setup(coresys: CoreSys) -> FixupBase: """Check setup function.""" - return FixupAddonExecuteRemove(coresys) + return FixupAppExecuteRemove(coresys) -class FixupAddonExecuteRemove(FixupBase): +class FixupAppExecuteRemove(FixupBase): """Storage class for fixup.""" async def process_fixup(self, reference: str | None = None) -> None: @@ -23,15 +23,15 @@ async def process_fixup(self, reference: str | None = None) -> None: if not reference: return - if not (addon := self.sys_addons.get_local_only(reference)): + if not (app := self.sys_apps.get_local_only(reference)): _LOGGER.info("App %s already removed", reference) return - # Remove addon + # Remove app _LOGGER.info("Remove app: %s", reference) try: - await addon.uninstall(remove_config=False) - except AddonsError as err: + await app.uninstall(remove_config=False) + except AppsError as err: _LOGGER.error("Could not remove %s due to %s", reference, err) raise ResolutionFixupError() from None diff --git a/supervisor/resolution/fixups/addon_execute_repair.py b/supervisor/resolution/fixups/addon_execute_repair.py index 45276fdbca8..486bd21b761 100644 --- a/supervisor/resolution/fixups/addon_execute_repair.py +++ b/supervisor/resolution/fixups/addon_execute_repair.py @@ -1,4 +1,4 @@ -"""Helper to fix missing image for addon.""" +"""Helper to fix missing image for app.""" import logging @@ -12,31 +12,31 @@ def setup(coresys: CoreSys) -> FixupBase: """Check setup function.""" - return FixupAddonExecuteRepair(coresys) + return FixupAppExecuteRepair(coresys) -class FixupAddonExecuteRepair(FixupBase): +class FixupAppExecuteRepair(FixupBase): """Storage class for fixup.""" def __init__(self, coresys: CoreSys) -> None: - """Initialize the add-on execute repair fixup class.""" + """Initialize the app execute repair fixup class.""" super().__init__(coresys) self.attempts = 0 async def process_fixup(self, reference: str | None = None) -> None: - """Pull the addons image.""" + """Pull the apps image.""" if not reference: return - addon = self.sys_addons.get_local_only(reference) - if not addon: + app = self.sys_apps.get_local_only(reference) + if not app: _LOGGER.info( "Cannot repair app %s as it is not installed, dismissing suggestion", reference, ) return - if await addon.instance.exists(): + if await app.instance.exists(): _LOGGER.info( "App %s does not need repair, dismissing suggestion", reference ) @@ -44,7 +44,7 @@ async def process_fixup(self, reference: str | None = None) -> None: _LOGGER.info("Installing image for app %s", reference) self.attempts += 1 - await addon.instance.install(addon.version) + await app.instance.install(app.version) @property def suggestion(self) -> SuggestionType: diff --git a/supervisor/resolution/fixups/addon_execute_restart.py b/supervisor/resolution/fixups/addon_execute_restart.py index c6270e08021..edf1c8f81a5 100644 --- a/supervisor/resolution/fixups/addon_execute_restart.py +++ b/supervisor/resolution/fixups/addon_execute_restart.py @@ -1,9 +1,9 @@ -"""Helpers to fix addon by restarting it.""" +"""Helpers to fix app by restarting it.""" import logging from ...coresys import CoreSys -from ...exceptions import AddonsError, ResolutionFixupError +from ...exceptions import AppsError, ResolutionFixupError from ..const import ContextType, IssueType, SuggestionType from .base import FixupBase @@ -12,10 +12,10 @@ def setup(coresys: CoreSys) -> FixupBase: """Check setup function.""" - return FixupAddonExecuteRestart(coresys) + return FixupAppExecuteRestart(coresys) -class FixupAddonExecuteRestart(FixupBase): +class FixupAppExecuteRestart(FixupBase): """Storage class for fixup.""" async def process_fixup(self, reference: str | None = None) -> None: @@ -23,23 +23,23 @@ async def process_fixup(self, reference: str | None = None) -> None: if not reference: return - if not (addon := self.sys_addons.get_local_only(reference)): + if not (app := self.sys_apps.get_local_only(reference)): _LOGGER.info("Cannot restart app %s as it does not exist", reference) return - # Stop addon + # Stop app try: - await addon.stop() - except AddonsError as err: + await app.stop() + except AppsError as err: _LOGGER.error("Could not stop %s due to %s", reference, err) raise ResolutionFixupError() from None - # Start addon + # Start app # Removing the container has already fixed the issue and dismissed it # So any errors on startup are just logged. We won't wait on the startup task either try: - await addon.start() - except AddonsError as err: + await app.start() + except AppsError as err: _LOGGER.error("Could not restart %s due to %s", reference, err) @property diff --git a/supervisor/resolution/fixups/addon_execute_start.py b/supervisor/resolution/fixups/addon_execute_start.py index ab49b79257f..740b2782863 100644 --- a/supervisor/resolution/fixups/addon_execute_start.py +++ b/supervisor/resolution/fixups/addon_execute_start.py @@ -1,10 +1,10 @@ -"""Helpers to fix addon by starting it.""" +"""Helpers to fix app by starting it.""" import logging -from ...const import AddonState +from ...const import AppState from ...coresys import CoreSys -from ...exceptions import AddonsError, ResolutionFixupError +from ...exceptions import AppsError, ResolutionFixupError from ..const import ContextType, IssueType, SuggestionType from .base import FixupBase @@ -13,10 +13,10 @@ def setup(coresys: CoreSys) -> FixupBase: """Check setup function.""" - return FixupAddonExecuteStart(coresys) + return FixupAppExecuteStart(coresys) -class FixupAddonExecuteStart(FixupBase): +class FixupAppExecuteStart(FixupBase): """Storage class for fixup.""" async def process_fixup(self, reference: str | None = None) -> None: @@ -24,20 +24,20 @@ async def process_fixup(self, reference: str | None = None) -> None: if not reference: return - if not (addon := self.sys_addons.get_local_only(reference)): + if not (app := self.sys_apps.get_local_only(reference)): _LOGGER.info("Cannot start app %s as it does not exist", reference) return - # Start addon + # Start app try: - start_task = await addon.start() - except AddonsError as err: + start_task = await app.start() + except AppsError as err: _LOGGER.error("Could not start %s due to %s", reference, err) raise ResolutionFixupError() from None - # Wait for addon start. If it ends up in error or unknown state it's not fixed + # Wait for app start. If it ends up in error or unknown state it's not fixed await start_task - if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}: + if app.state in {AppState.ERROR, AppState.UNKNOWN}: _LOGGER.error("App %s could not start successfully", reference) raise ResolutionFixupError() diff --git a/supervisor/services/const.py b/supervisor/services/const.py index f5e2fceba76..4510c68934f 100644 --- a/supervisor/services/const.py +++ b/supervisor/services/const.py @@ -1,6 +1,6 @@ """Service API static data.""" -ATTR_ADDON = "addon" +ATTR_APP = "addon" ATTR_HOST = "host" ATTR_PASSWORD = "password" ATTR_PORT = "port" diff --git a/supervisor/services/interface.py b/supervisor/services/interface.py index 7eba581964a..a36a3dd925f 100644 --- a/supervisor/services/interface.py +++ b/supervisor/services/interface.py @@ -5,7 +5,7 @@ import voluptuous as vol -from ..addons.addon import Addon +from ..addons.addon import App from ..const import PROVIDE_SERVICE from ..coresys import CoreSys, CoreSysAttributes @@ -34,17 +34,17 @@ def schema(self) -> vol.Schema: @property def providers(self) -> list[str]: - """Return name of service providers addon.""" - addons = [] - for addon in self.sys_addons.installed: - if addon.services_role.get(self.slug) == PROVIDE_SERVICE: - addons.append(addon.slug) - return addons + """Return name of service providers app.""" + apps = [] + for app in self.sys_apps.installed: + if app.services_role.get(self.slug) == PROVIDE_SERVICE: + apps.append(app.slug) + return apps @property @abstractmethod def active(self) -> list[str]: - """Return list of addon slug they have enable that.""" + """Return list of app slug they have enable that.""" @property def enabled(self) -> bool: @@ -62,9 +62,9 @@ def get_service_data(self) -> dict[str, Any] | None: return None @abstractmethod - async def set_service_data(self, addon: Addon, data: dict[str, Any]) -> None: + async def set_service_data(self, app: App, data: dict[str, Any]) -> None: """Write the data into service object.""" @abstractmethod - async def del_service_data(self, addon: Addon) -> None: + async def del_service_data(self, app: App) -> None: """Remove the data from service object.""" diff --git a/supervisor/services/modules/mqtt.py b/supervisor/services/modules/mqtt.py index 7927e2218e3..922f2bab3e1 100644 --- a/supervisor/services/modules/mqtt.py +++ b/supervisor/services/modules/mqtt.py @@ -5,11 +5,11 @@ import voluptuous as vol -from ...addons.addon import Addon +from ...addons.addon import App from ...exceptions import ServicesError from ...validate import network_port from ..const import ( - ATTR_ADDON, + ATTR_APP, ATTR_HOST, ATTR_PASSWORD, ATTR_PORT, @@ -37,7 +37,7 @@ } ) -SCHEMA_CONFIG_MQTT = SCHEMA_SERVICE_MQTT.extend({vol.Required(ATTR_ADDON): str}) +SCHEMA_CONFIG_MQTT = SCHEMA_SERVICE_MQTT.extend({vol.Required(ATTR_APP): str}) class MQTTService(ServiceInterface): @@ -60,26 +60,26 @@ def schema(self) -> vol.Schema: @property def active(self) -> list[str]: - """Return list of addon slug they have enable that.""" + """Return list of app slug they have enable that.""" if not self.enabled: return [] - return [self._data[ATTR_ADDON]] + return [self._data[ATTR_APP]] - async def set_service_data(self, addon: Addon, data: dict[str, Any]) -> None: + async def set_service_data(self, app: App, data: dict[str, Any]) -> None: """Write the data into service object.""" if self.enabled: raise ServicesError( - f"There is already a MQTT service in use from {self._data[ATTR_ADDON]}", + f"There is already a MQTT service in use from {self._data[ATTR_APP]}", _LOGGER.error, ) self._data.update(data) - self._data[ATTR_ADDON] = addon.slug + self._data[ATTR_APP] = app.slug - _LOGGER.info("Set %s as service provider for mqtt", addon.slug) + _LOGGER.info("Set %s as service provider for mqtt", app.slug) await self.save() - async def del_service_data(self, addon: Addon) -> None: + async def del_service_data(self, app: App) -> None: """Remove the data from service object.""" if not self.enabled: raise ServicesError( diff --git a/supervisor/services/modules/mysql.py b/supervisor/services/modules/mysql.py index dfb92219666..35a772ec3b0 100644 --- a/supervisor/services/modules/mysql.py +++ b/supervisor/services/modules/mysql.py @@ -5,11 +5,11 @@ import voluptuous as vol -from ...addons.addon import Addon +from ...addons.addon import App from ...exceptions import ServicesError from ...validate import network_port from ..const import ( - ATTR_ADDON, + ATTR_APP, ATTR_HOST, ATTR_PASSWORD, ATTR_PORT, @@ -31,7 +31,7 @@ } ) -SCHEMA_CONFIG_MYSQL = SCHEMA_SERVICE_MYSQL.extend({vol.Required(ATTR_ADDON): str}) +SCHEMA_CONFIG_MYSQL = SCHEMA_SERVICE_MYSQL.extend({vol.Required(ATTR_APP): str}) class MySQLService(ServiceInterface): @@ -54,26 +54,26 @@ def schema(self) -> vol.Schema: @property def active(self) -> list[str]: - """Return list of addon slug they have enable that.""" + """Return list of app slug they have enable that.""" if not self.enabled: return [] - return [self._data[ATTR_ADDON]] + return [self._data[ATTR_APP]] - async def set_service_data(self, addon: Addon, data: dict[str, Any]) -> None: + async def set_service_data(self, app: App, data: dict[str, Any]) -> None: """Write the data into service object.""" if self.enabled: raise ServicesError( - f"There is already a MySQL service in use from {self._data[ATTR_ADDON]}", + f"There is already a MySQL service in use from {self._data[ATTR_APP]}", _LOGGER.error, ) self._data.update(data) - self._data[ATTR_ADDON] = addon.slug + self._data[ATTR_APP] = app.slug - _LOGGER.info("Set %s as service provider for MySQL", addon.slug) + _LOGGER.info("Set %s as service provider for MySQL", app.slug) await self.save() - async def del_service_data(self, addon: Addon) -> None: + async def del_service_data(self, app: App) -> None: """Remove the data from service object.""" if not self.enabled: raise ServicesError("Can't remove not exists services", _LOGGER.warning) diff --git a/supervisor/store/__init__.py b/supervisor/store/__init__.py index 8e219e15f13..9982062a1a9 100644 --- a/supervisor/store/__init__.py +++ b/supervisor/store/__init__.py @@ -1,4 +1,4 @@ -"""Add-on Store handler.""" +"""App Store handler.""" import asyncio from collections.abc import Awaitable @@ -10,14 +10,14 @@ StoreError, StoreGitCloneError, StoreGitError, - StoreInvalidAddonRepo, + StoreInvalidAppRepo, StoreJobError, StoreNotFound, ) from ..jobs.decorator import Job, JobCondition from ..resolution.const import ContextType, IssueType, SuggestionType from ..utils.common import FileConfiguration -from .addon import AddonStore +from .addon import AppStore from .const import FILE_HASSIO_STORE, BuiltinRepository from .data import StoreData from .repository import Repository @@ -27,7 +27,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration): - """Manage add-ons inside Supervisor.""" + """Manage apps inside Supervisor.""" def __init__(self, coresys: CoreSys): """Initialize Docker base wrapper.""" @@ -38,7 +38,7 @@ def __init__(self, coresys: CoreSys): @property def all(self) -> list[Repository]: - """Return list of add-on repositories.""" + """Return list of app repositories.""" return list(self.repositories.values()) @property @@ -63,7 +63,7 @@ def get(self, slug: str) -> Repository: return self.repositories[slug] async def load(self) -> None: - """Start up add-on store management.""" + """Start up app store management.""" # Make sure the built-in repositories are all present # This is especially important when adding new built-in repositories # to make sure existing installations have them. @@ -82,7 +82,7 @@ async def load(self) -> None: on_condition=StoreJobError, ) async def reload(self, repository: Repository | None = None) -> None: - """Update add-ons from repository and reload list.""" + """Update apps from repository and reload list.""" # Make a copy to prevent race with other tasks repositories = [repository] if repository else self.all.copy() results: list[bool | BaseException] = await asyncio.gather( @@ -101,19 +101,19 @@ async def reload(self, repository: Repository | None = None) -> None: result, ) - # Update path cache for all addons in updated repos + # Update path cache for all apps in updated repos if updated_repos: await asyncio.gather( *[ - addon.refresh_path_cache() - for addon in self.sys_addons.store.values() - if addon.repository in updated_repos + app.refresh_path_cache() + for app in self.sys_apps.store.values() + if app.repository in updated_repos ] ) # read data from repositories await self.data.update() - await self._read_addons() + await self._read_apps() @Job( name="store_manager_add_repository", @@ -195,7 +195,7 @@ async def _add_repository( ) else: await repository.remove() - raise StoreInvalidAddonRepo( + raise StoreInvalidAppRepo( f"{url} is not a valid app repository", logger=_LOGGER.error ) @@ -210,16 +210,16 @@ async def _add_repository( # Persist changes if persist: await self.data.update() - await self._read_addons() + await self._read_apps() async def remove_repository(self, repository: Repository, *, persist: bool = True): """Remove a repository.""" if repository.is_builtin: - raise StoreInvalidAddonRepo( + raise StoreInvalidAppRepo( "Can't remove built-in repositories!", logger=_LOGGER.error ) - if repository.slug in (addon.repository for addon in self.sys_addons.installed): + if repository.slug in (app.repository for app in self.sys_apps.installed): raise StoreError( f"Can't remove '{repository.source}'. It's used by installed apps", logger=_LOGGER.error, @@ -230,7 +230,7 @@ async def remove_repository(self, repository: Repository, *, persist: bool = Tru if persist: await self.data.update() - await self._read_addons() + await self._read_apps() @Job(name="store_manager_update_repositories") async def update_repositories( @@ -280,37 +280,37 @@ async def update_repositories( # Always update data, even if there are errors, some changes may have succeeded await self.data.update() - await self._read_addons() + await self._read_apps() # Raise the first error we found (if any) for error in add_errors + remove_errors: if error: raise error - async def _read_addons(self) -> None: - """Reload add-ons inside store.""" - all_addons = set(self.data.addons) + async def _read_apps(self) -> None: + """Reload apps inside store.""" + all_apps = set(self.data.apps) # calc diff - add_addons = all_addons - set(self.sys_addons.store) - del_addons = set(self.sys_addons.store) - all_addons + add_apps = all_apps - set(self.sys_apps.store) + del_apps = set(self.sys_apps.store) - all_apps _LOGGER.info( "Loading apps from store: %d all - %d new - %d remove", - len(all_addons), - len(add_addons), - len(del_addons), + len(all_apps), + len(add_apps), + len(del_apps), ) - # new addons - if add_addons: + # new apps + if add_apps: cache_updates: list[Awaitable[None]] = [] - for slug in add_addons: - self.sys_addons.store[slug] = AddonStore(self.coresys, slug) - cache_updates.append(self.sys_addons.store[slug].refresh_path_cache()) + for slug in add_apps: + self.sys_apps.store[slug] = AppStore(self.coresys, slug) + cache_updates.append(self.sys_apps.store[slug].refresh_path_cache()) await asyncio.gather(*cache_updates) # remove - for slug in del_addons: - self.sys_addons.store.pop(slug) + for slug in del_apps: + self.sys_apps.store.pop(slug) diff --git a/supervisor/store/addon.py b/supervisor/store/addon.py index 7275cbba579..3bab8b1a8b4 100644 --- a/supervisor/store/addon.py +++ b/supervisor/store/addon.py @@ -1,17 +1,17 @@ -"""Init file for Supervisor add-ons.""" +"""Init file for Supervisor apps.""" from copy import deepcopy import logging from typing import Self -from ..addons.model import AddonModel, Data +from ..addons.model import AppModel, Data from ..coresys import CoreSys _LOGGER: logging.Logger = logging.getLogger(__name__) -class AddonStore(AddonModel): - """Hold data for add-on inside Supervisor.""" +class AppStore(AppModel): + """Hold data for app inside Supervisor.""" def __init__(self, coresys: CoreSys, slug: str, data: Data | None = None): """Initialize object.""" @@ -24,17 +24,17 @@ def __repr__(self) -> str: @property def data(self) -> Data: - """Return add-on data/config.""" - return self._data or self.sys_store.data.addons[self.slug] + """Return app data/config.""" + return self._data or self.sys_store.data.apps[self.slug] @property def is_installed(self) -> bool: - """Return True if an add-on is installed.""" - return self.sys_addons.get_local_only(self.slug) is not None + """Return True if an app is installed.""" + return self.sys_apps.get_local_only(self.slug) is not None @property def is_detached(self) -> bool: - """Return True if add-on is detached.""" + """Return True if app is detached.""" return False def clone(self) -> Self: diff --git a/supervisor/store/const.py b/supervisor/store/const.py index f4417f5a0ed..81ea7aa32f3 100644 --- a/supervisor/store/const.py +++ b/supervisor/store/const.py @@ -1,4 +1,4 @@ -"""Constants for the add-on store.""" +"""Constants for the app store.""" from enum import StrEnum from pathlib import Path diff --git a/supervisor/store/data.py b/supervisor/store/data.py index 3572a071096..4eec8644fda 100644 --- a/supervisor/store/data.py +++ b/supervisor/store/data.py @@ -1,4 +1,4 @@ -"""Init file for Supervisor add-on data.""" +"""Init file for Supervisor app data.""" from dataclasses import dataclass import errno @@ -40,12 +40,12 @@ class ProcessedRepository: config: dict[str, Any] -def _read_addon_translations(addon_path: Path) -> dict: - """Read translations from add-ons folder. +def _read_app_translations(app_path: Path) -> dict: + """Read translations from apps folder. Should be run in the executor. """ - translations_dir = addon_path / "translations" + translations_dir = app_path / "translations" translations: dict[str, Any] = {} if not translations_dir.exists(): @@ -101,25 +101,25 @@ def _read_git_repository(path: Path) -> ProcessedRepository | None: class StoreData(CoreSysAttributes): - """Hold data for Add-ons inside Supervisor.""" + """Hold data for Apps inside Supervisor.""" def __init__(self, coresys: CoreSys): """Initialize data holder.""" self.coresys: CoreSys = coresys self.repositories: dict[str, Any] = {} - self.addons: dict[str, dict[str, Any]] = {} + self.apps: dict[str, dict[str, Any]] = {} async def update(self) -> None: - """Read data from add-on repository.""" + """Read data from app repository.""" # read core repository - addons = await self._read_addons_folder( - self.sys_config.path_addons_core, REPOSITORY_CORE + apps = await self._read_apps_folder( + self.sys_config.path_apps_core, REPOSITORY_CORE ) # read local repository - addons.update( - await self._read_addons_folder( - self.sys_config.path_addons_local, REPOSITORY_LOCAL + apps.update( + await self._read_apps_folder( + self.sys_config.path_apps_local, REPOSITORY_LOCAL ) ) @@ -130,38 +130,36 @@ async def update(self) -> None: def _read_git_repositories() -> list[ProcessedRepository]: return [ repo - for repository_element in self.sys_config.path_addons_git.iterdir() + for repository_element in self.sys_config.path_apps_git.iterdir() if repository_element.is_dir() and (repo := _read_git_repository(repository_element)) ] for repo in await self.sys_run_in_executor(_read_git_repositories): repositories[repo.slug] = repo.config - addons.update(await self._read_addons_folder(repo.path, repo.slug)) + apps.update(await self._read_apps_folder(repo.path, repo.slug)) self.repositories = repositories - self.addons = addons + self.apps = apps - async def _find_addon_configs( - self, path: Path, repository: str - ) -> list[Path] | None: - """Find add-ons in the path.""" + async def _find_app_configs(self, path: Path, repository: str) -> list[Path] | None: + """Find apps in the path.""" - def _get_addons_list() -> list[Path]: + def _get_apps_list() -> list[Path]: # Generate a list without artefact, safe for corruptions return [ - addon - for addon in path.glob("**/config.*") + app + for app in path.glob("**/config.*") if not [ part - for part in addon.parts + for part in app.parts if part.startswith(".") or part == "rootfs" ] - and addon.suffix in FILE_SUFFIX_CONFIGURATION + and app.suffix in FILE_SUFFIX_CONFIGURATION ] try: - addon_list = await self.sys_run_in_executor(_get_addons_list) + app_list = await self.sys_run_in_executor(_get_apps_list) except OSError as err: suggestion = None self.sys_resolution.check_oserror(err) @@ -177,48 +175,48 @@ def _get_addons_list() -> list[Path]: "Can't process %s because of Filesystem issues: %s", repository, err ) return None - return addon_list + return app_list - async def _read_addons_folder( + async def _read_apps_folder( self, path: Path, repository: str ) -> dict[str, dict[str, Any]]: - """Read data from add-ons folder.""" - if not (addon_config_list := await self._find_addon_configs(path, repository)): + """Read data from apps folder.""" + if not (app_config_list := await self._find_app_configs(path, repository)): return {} - def _process_addons_config() -> dict[str, dict[str, Any]]: - addons: dict[str, dict[str, Any]] = {} - for addon_config in addon_config_list: + def _process_apps_config() -> dict[str, dict[str, Any]]: + apps: dict[str, dict[str, Any]] = {} + for app_config in app_config_list: try: - addon = read_json_or_yaml_file(addon_config) + app = read_json_or_yaml_file(app_config) except ConfigurationFileError: _LOGGER.warning( - "Can't read %s from repository %s", addon_config, repository + "Can't read %s from repository %s", app_config, repository ) continue # validate try: - addon = SCHEMA_ADDON_CONFIG(addon) + app = SCHEMA_ADDON_CONFIG(app) except vol.Invalid as ex: _LOGGER.warning( - "Can't read %s: %s", addon_config, humanize_error(addon, ex) + "Can't read %s: %s", app_config, humanize_error(app, ex) ) continue # Generate slug - addon_slug = f"{repository}_{addon[ATTR_SLUG]}" + app_slug = f"{repository}_{app[ATTR_SLUG]}" # store - addon[ATTR_REPOSITORY] = repository - addon[ATTR_LOCATION] = str(addon_config.parent) - addon[ATTR_TRANSLATIONS] = _read_addon_translations(addon_config.parent) - addon[ATTR_VERSION_TIMESTAMP] = addon_config.stat().st_mtime - addons[addon_slug] = addon + app[ATTR_REPOSITORY] = repository + app[ATTR_LOCATION] = str(app_config.parent) + app[ATTR_TRANSLATIONS] = _read_app_translations(app_config.parent) + app[ATTR_VERSION_TIMESTAMP] = app_config.stat().st_mtime + apps[app_slug] = app - return addons + return apps - return await self.sys_run_in_executor(_process_addons_config) + return await self.sys_run_in_executor(_process_apps_config) def _get_builtin_repositories(self) -> dict[str, dict[str, str]]: """Get local built-in repositories into dataset. diff --git a/supervisor/store/git.py b/supervisor/store/git.py index 9a528123831..edf54eab3c5 100644 --- a/supervisor/store/git.py +++ b/supervisor/store/git.py @@ -1,4 +1,4 @@ -"""Init file for Supervisor add-on Git.""" +"""Init file for Supervisor app Git.""" import asyncio import functools as ft @@ -20,7 +20,7 @@ class GitRepo(CoreSysAttributes): - """Manage Add-on Git repository.""" + """Manage App Git repository.""" def __init__(self, coresys: CoreSys, path: Path, url: str): """Initialize Git base wrapper.""" @@ -48,7 +48,7 @@ def branch(self) -> str: return self.data[ATTR_BRANCH] async def load(self) -> None: - """Init Git add-on repository.""" + """Init Git app repository.""" if await self.sys_run_in_executor(directory_missing_or_empty, self.path): await self.clone() return @@ -83,7 +83,7 @@ async def load(self) -> None: on_condition=StoreJobError, ) async def clone(self) -> None: - """Clone git add-on repository.""" + """Clone git app repository.""" async with self.lock: await self._clone() @@ -121,7 +121,7 @@ def move_clone(): await self.sys_run_in_executor(temp_dir.cleanup) async def _clone(self, path: Path | None = None) -> None: - """Clone git add-on repository to location.""" + """Clone git app repository to location.""" path = path or self.path git_args = { attribute: value @@ -160,7 +160,7 @@ async def _clone(self, path: Path | None = None) -> None: on_condition=StoreJobError, ) async def pull(self) -> bool: - """Pull Git add-on repo.""" + """Pull Git app repo.""" if self.lock.locked(): _LOGGER.warning("There is already a task in progress") return False diff --git a/supervisor/store/repository.py b/supervisor/store/repository.py index f6725d50524..606aac01185 100644 --- a/supervisor/store/repository.py +++ b/supervisor/store/repository.py @@ -36,10 +36,10 @@ class Repository(CoreSysAttributes, ABC): - """Add-on store repository in Supervisor.""" + """App store repository in Supervisor.""" def __init__(self, coresys: CoreSys, repository: str, local_path: Path, slug: str): - """Initialize add-on store repository object.""" + """Initialize app store repository object.""" self._slug: str = slug self._local_path: Path = local_path self.coresys: CoreSys = coresys @@ -58,15 +58,15 @@ def _create_builtin(coresys: CoreSys, builtin: BuiltinRepository) -> Repository: """Create builtin repository.""" if builtin == BuiltinRepository.LOCAL: slug = REPOSITORY_LOCAL - local_path = coresys.config.path_addons_local + local_path = coresys.config.path_apps_local return RepositoryLocal(coresys, local_path, slug) elif builtin == BuiltinRepository.CORE: slug = REPOSITORY_CORE - local_path = coresys.config.path_addons_core + local_path = coresys.config.path_apps_core else: # For other builtin repositories (URL-based) slug = get_hash_from_repository(builtin.value) - local_path = coresys.config.path_addons_git / slug + local_path = coresys.config.path_apps_git / slug return RepositoryGitBuiltin( coresys, builtin.value, local_path, slug, builtin.git_url ) @@ -75,7 +75,7 @@ def _create_builtin(coresys: CoreSys, builtin: BuiltinRepository) -> Repository: def _create_custom(coresys: CoreSys, repository: str) -> RepositoryCustom: """Create custom repository.""" slug = get_hash_from_repository(repository) - local_path = coresys.config.path_addons_git / slug + local_path = coresys.config.path_apps_git / slug return RepositoryCustom(coresys, repository, local_path, slug) def __repr__(self) -> str: @@ -123,26 +123,26 @@ async def validate(self) -> bool: @abstractmethod async def load(self) -> None: - """Load addon repository.""" + """Load app repository.""" @abstractmethod async def update(self) -> bool: - """Update add-on repository. + """Update app repository. Returns True if the repository was updated. """ @abstractmethod async def remove(self) -> None: - """Remove add-on repository.""" + """Remove app repository.""" @abstractmethod async def reset(self) -> None: - """Reset add-on repository to fix corruption issue with files.""" + """Reset app repository to fix corruption issue with files.""" class RepositoryBuiltin(Repository, ABC): - """A built-in add-on repository.""" + """A built-in app repository.""" @property def is_builtin(self) -> bool: @@ -159,16 +159,16 @@ async def remove(self) -> None: class RepositoryGit(Repository, ABC): - """A git based add-on repository.""" + """A git based app repository.""" _git: GitRepo async def load(self) -> None: - """Load addon repository.""" + """Load app repository.""" await self._git.load() async def update(self) -> bool: - """Update add-on repository. + """Update app repository. Returns True if the repository was updated. """ @@ -202,7 +202,7 @@ def validate_file() -> bool: return await self.sys_run_in_executor(validate_file) async def reset(self) -> None: - """Reset add-on repository to fix corruption issue with files.""" + """Reset app repository to fix corruption issue with files.""" try: await self._git.reset() await self.load() @@ -212,7 +212,7 @@ async def reset(self) -> None: class RepositoryLocal(RepositoryBuiltin): - """A local add-on repository.""" + """A local app repository.""" def __init__(self, coresys: CoreSys, local_path: Path, slug: str) -> None: """Initialize object.""" @@ -229,11 +229,11 @@ async def _get_latest_mtime(self) -> tuple[float, Path]: raise StoreRepositoryUnknownError(repo=self.slug) from err async def load(self) -> None: - """Load addon repository.""" + """Load app repository.""" self._latest_mtime, _ = await self._get_latest_mtime() async def update(self) -> bool: - """Update add-on repository. + """Update app repository. Returns True if the repository was updated. """ @@ -257,7 +257,7 @@ async def reset(self) -> None: class RepositoryGitBuiltin(RepositoryBuiltin, RepositoryGit): - """A built-in add-on repository based on git.""" + """A built-in app repository based on git.""" def __init__( self, coresys: CoreSys, repository: str, local_path: Path, slug: str, url: str @@ -268,7 +268,7 @@ def __init__( class RepositoryCustom(RepositoryGit): - """A custom add-on repository.""" + """A custom app repository.""" def __init__(self, coresys: CoreSys, url: str, local_path: Path, slug: str) -> None: """Initialize object.""" @@ -281,5 +281,5 @@ def is_builtin(self) -> bool: return False async def remove(self) -> None: - """Remove add-on repository.""" + """Remove app repository.""" await self._git.remove() diff --git a/supervisor/store/utils.py b/supervisor/store/utils.py index 1e8f4c003e1..ead32a0866d 100644 --- a/supervisor/store/utils.py +++ b/supervisor/store/utils.py @@ -1,4 +1,4 @@ -"""Util add-ons functions.""" +"""Util apps functions.""" import hashlib import logging diff --git a/supervisor/store/validate.py b/supervisor/store/validate.py index e649caa7c8a..502cdc756fa 100644 --- a/supervisor/store/validate.py +++ b/supervisor/store/validate.py @@ -1,4 +1,4 @@ -"""Validate add-ons options schema.""" +"""Validate apps options schema.""" import voluptuous as vol diff --git a/supervisor/utils/logging.py b/supervisor/utils/logging.py index e43d8c45d93..3f5feb3c124 100644 --- a/supervisor/utils/logging.py +++ b/supervisor/utils/logging.py @@ -8,12 +8,12 @@ from typing import Any -class AddonLoggerAdapter(logging.LoggerAdapter): - """Logging Adapter which prepends log entries with add-on name.""" +class AppLoggerAdapter(logging.LoggerAdapter): + """Logging Adapter which prepends log entries with app name.""" def process(self, msg, kwargs): - """Process the logging message by prepending the add-on name.""" - return f"[{self.extra['addon_name']}] {msg}", kwargs + """Process the logging message by prepending the app name.""" + return f"[{self.extra['app_name']}] {msg}", kwargs class SupervisorQueueHandler(logging.handlers.QueueHandler): diff --git a/supervisor/validate.py b/supervisor/validate.py index da870be2249..da35655ab0a 100644 --- a/supervisor/validate.py +++ b/supervisor/validate.py @@ -7,7 +7,7 @@ import voluptuous as vol from .const import ( - ATTR_ADDONS_CUSTOM_LIST, + ATTR_APPS_CUSTOM_LIST, ATTR_AUDIO, ATTR_AUTO_UPDATE, ATTR_CHANNEL, @@ -87,7 +87,7 @@ def docker_image(image: str) -> str: if not path: raise vol.Invalid(f"Docker image has no name: {image}") - # Tags are not allowed - version is managed separately by the add-on system + # Tags are not allowed - version is managed separately by the app system if ":" in path: raise vol.Invalid(f"Docker image must not contain a tag: {image}") @@ -204,7 +204,7 @@ def validate_repository(repository: str) -> str: ATTR_VERSION, default=AwesomeVersion(SUPERVISOR_VERSION) ): version_tag, vol.Optional(ATTR_IMAGE): docker_image, - vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[]): repositories, + vol.Optional(ATTR_APPS_CUSTOM_LIST, default=[]): repositories, vol.Optional(ATTR_WAIT_BOOT, default=5): wait_boot, vol.Optional(ATTR_LOGGING, default=LogLevel.INFO): vol.Coerce(LogLevel), vol.Optional(ATTR_DEBUG, default=False): vol.Boolean(), diff --git a/tests/addons/__init__.py b/tests/addons/__init__.py index 1cdeb3f7733..1add850792a 100644 --- a/tests/addons/__init__.py +++ b/tests/addons/__init__.py @@ -1 +1 @@ -"""Add-ons tests.""" +"""Apps tests.""" diff --git a/tests/addons/test_addon.py b/tests/addons/test_addon.py index c16edc8bfc4..c5520d661dc 100644 --- a/tests/addons/test_addon.py +++ b/tests/addons/test_addon.py @@ -1,4 +1,4 @@ -"""Test Home Assistant Add-ons.""" +"""Test Home Assistant Apps.""" import asyncio from datetime import timedelta @@ -14,21 +14,21 @@ import pytest from securetar import SecureTarArchive, SecureTarFile -from supervisor.addons.addon import Addon -from supervisor.addons.const import AddonBackupMode -from supervisor.addons.model import AddonModel +from supervisor.addons.addon import App +from supervisor.addons.const import AppBackupMode +from supervisor.addons.model import AppModel from supervisor.config import CoreConfig -from supervisor.const import ATTR_ADVANCED, AddonBoot, AddonState, BusEvent +from supervisor.const import ATTR_ADVANCED, AppBoot, AppState, BusEvent from supervisor.coresys import CoreSys -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.docker.const import ContainerState from supervisor.docker.manager import CommandReturn, DockerAPI from supervisor.docker.monitor import DockerContainerStateEvent from supervisor.exceptions import ( - AddonPortConflict, - AddonPrePostBackupCommandReturnedError, - AddonsJobError, - AddonUnknownError, + AppPortConflict, + AppPrePostBackupCommandReturnedError, + AppsJobError, + AppUnknownError, AudioUpdateError, DockerRegistryAuthError, HassioError, @@ -58,41 +58,41 @@ def _fire_test_event(coresys: CoreSys, name: str, state: ContainerState): ) -def test_options_merge(coresys: CoreSys, install_addon_ssh: Addon) -> None: +def test_options_merge(coresys: CoreSys, install_app_ssh: App) -> None: """Test options merge.""" - addon = coresys.addons.get(TEST_ADDON_SLUG) + app = coresys.apps.get(TEST_ADDON_SLUG) - assert addon.options == { + assert app.options == { "apks": [], "authorized_keys": [], "password": "", "server": {"tcp_forwarding": False}, } - addon.options = {"password": "test"} - assert addon.persist["options"] == {"password": "test"} - assert addon.options == { + app.options = {"password": "test"} + assert app.persist["options"] == {"password": "test"} + assert app.options == { "apks": [], "authorized_keys": [], "password": "test", "server": {"tcp_forwarding": False}, } - addon.options = {"password": "test", "apks": ["gcc"]} - assert addon.persist["options"] == {"password": "test", "apks": ["gcc"]} - assert addon.options == { + app.options = {"password": "test", "apks": ["gcc"]} + assert app.persist["options"] == {"password": "test", "apks": ["gcc"]} + assert app.options == { "apks": ["gcc"], "authorized_keys": [], "password": "test", "server": {"tcp_forwarding": False}, } - addon.options = {"password": "test", "server": {"tcp_forwarding": True}} - assert addon.persist["options"] == { + app.options = {"password": "test", "server": {"tcp_forwarding": True}} + assert app.persist["options"] == { "password": "test", "server": {"tcp_forwarding": True}, } - assert addon.options == { + assert app.options == { "apks": [], "authorized_keys": [], "password": "test", @@ -100,59 +100,59 @@ def test_options_merge(coresys: CoreSys, install_addon_ssh: Addon) -> None: } # Test overwrite - test = addon.options + test = app.options test["server"]["test"] = 1 - assert addon.options == { + assert app.options == { "apks": [], "authorized_keys": [], "password": "test", "server": {"tcp_forwarding": True}, } - addon.options = {"password": "test", "server": {"tcp_forwarding": True}} + app.options = {"password": "test", "server": {"tcp_forwarding": True}} -async def test_addon_state_listener(coresys: CoreSys, install_addon_ssh: Addon) -> None: - """Test addon is setting state from docker events.""" - with patch.object(DockerAddon, "attach"): - await install_addon_ssh.load() +async def test_app_state_listener(coresys: CoreSys, install_app_ssh: App) -> None: + """Test app is setting state from docker events.""" + with patch.object(DockerApp, "attach"): + await install_app_ssh.load() - assert install_addon_ssh.state == AddonState.UNKNOWN + assert install_app_ssh.state == AppState.UNKNOWN - with patch.object(Addon, "watchdog_container"): + with patch.object(App, "watchdog_container"): _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STARTED + assert install_app_ssh.state == AppState.STARTED _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.STOPPED) await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.HEALTHY) await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STARTED + assert install_app_ssh.state == AppState.STARTED _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.FAILED) await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.ERROR + assert install_app_ssh.state == AppState.ERROR - # Test other addons are ignored + # Test other apps are ignored _fire_test_event(coresys, "addon_local_non_installed", ContainerState.RUNNING) await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.ERROR + assert install_app_ssh.state == AppState.ERROR -async def test_addon_watchdog(coresys: CoreSys, install_addon_ssh: Addon) -> None: - """Test addon watchdog works correctly.""" - with patch.object(DockerAddon, "attach"): - await install_addon_ssh.load() +async def test_app_watchdog(coresys: CoreSys, install_app_ssh: App) -> None: + """Test app watchdog works correctly.""" + with patch.object(DockerApp, "attach"): + await install_app_ssh.load() - install_addon_ssh.watchdog = True - install_addon_ssh._manual_stop = False # pylint: disable=protected-access + install_app_ssh.watchdog = True + install_app_ssh._manual_stop = False # pylint: disable=protected-access with ( - patch.object(Addon, "restart") as restart, - patch.object(Addon, "start") as start, - patch.object(DockerAddon, "current_state") as current_state, + patch.object(App, "restart") as restart, + patch.object(App, "start") as start, + patch.object(DockerApp, "current_state") as current_state, ): # Restart if it becomes unhealthy current_state.return_value = ContainerState.UNHEALTHY @@ -165,7 +165,7 @@ async def test_addon_watchdog(coresys: CoreSys, install_addon_ssh: Addon) -> Non # Rebuild if it failed current_state.return_value = ContainerState.FAILED - with patch.object(DockerAddon, "stop") as stop: + with patch.object(DockerApp, "stop") as stop: _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.FAILED) await asyncio.sleep(0) stop.assert_called_once_with(remove_container=True) @@ -181,7 +181,7 @@ async def test_addon_watchdog(coresys: CoreSys, install_addon_ssh: Addon) -> Non restart.assert_not_called() start.assert_not_called() - # Other addons ignored + # Other apps ignored current_state.return_value = ContainerState.UNHEALTHY _fire_test_event(coresys, "addon_local_non_installed", ContainerState.UNHEALTHY) await asyncio.sleep(0) @@ -189,31 +189,31 @@ async def test_addon_watchdog(coresys: CoreSys, install_addon_ssh: Addon) -> Non start.assert_not_called() -async def test_watchdog_on_stop(coresys: CoreSys, install_addon_ssh: Addon) -> None: - """Test addon watchdog restarts addon on stop if not manual.""" - with patch.object(DockerAddon, "attach"): - await install_addon_ssh.load() +async def test_watchdog_on_stop(coresys: CoreSys, install_app_ssh: App) -> None: + """Test app watchdog restarts app on stop if not manual.""" + with patch.object(DockerApp, "attach"): + await install_app_ssh.load() - install_addon_ssh.watchdog = True + install_app_ssh.watchdog = True with ( - patch.object(Addon, "restart") as restart, + patch.object(App, "restart") as restart, patch.object( - DockerAddon, + DockerApp, "current_state", return_value=ContainerState.STOPPED, ), - patch.object(DockerAddon, "stop"), + patch.object(DockerApp, "stop"), ): - # Do not restart when addon stopped by user + # Do not restart when app stopped by user _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) await asyncio.sleep(0) - await install_addon_ssh.stop() + await install_app_ssh.stop() _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.STOPPED) await asyncio.sleep(0) restart.assert_not_called() - # Do restart addon if it stops and user didn't do it + # Do restart app if it stops and user didn't do it _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) await asyncio.sleep(0) _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.STOPPED) @@ -223,7 +223,7 @@ async def test_watchdog_on_stop(coresys: CoreSys, install_addon_ssh: Addon) -> N @pytest.mark.usefixtures("mock_amd64_arch_supported", "test_repository") async def test_listener_attached_on_install(coresys: CoreSys): - """Test events listener attached on addon install.""" + """Test events listener attached on app install.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 coresys.docker.containers.get.side_effect = aiodocker.DockerError( 500, {"message": "fail"} @@ -231,22 +231,22 @@ async def test_listener_attached_on_install(coresys: CoreSys): with ( patch("pathlib.Path.is_dir", return_value=True), patch( - "supervisor.addons.addon.Addon.need_build", + "supervisor.addons.addon.App.need_build", new=PropertyMock(return_value=False), ), patch( - "supervisor.addons.model.AddonModel.with_ingress", + "supervisor.addons.model.AppModel.with_ingress", new=PropertyMock(return_value=False), ), ): - await coresys.addons.install(TEST_ADDON_SLUG) + await coresys.apps.install(TEST_ADDON_SLUG) - # Normally this would be defaulted to False on start of the addon but test skips that - coresys.addons.get_local_only(TEST_ADDON_SLUG).watchdog = False + # Normally this would be defaulted to False on start of the app but test skips that + coresys.apps.get_local_only(TEST_ADDON_SLUG).watchdog = False _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) await asyncio.sleep(0) - assert coresys.addons.get(TEST_ADDON_SLUG).state == AddonState.STARTED + assert coresys.apps.get(TEST_ADDON_SLUG).state == AppState.STARTED @pytest.mark.parametrize( @@ -259,15 +259,15 @@ async def test_watchdog_during_attach( restart_count: int, ): """Test host reboot treated as manual stop but not supervisor restart.""" - store = coresys.addons.store[TEST_ADDON_SLUG] - await coresys.addons.data.install(store) + store = coresys.apps.store[TEST_ADDON_SLUG] + await coresys.apps.data.install(store) with ( - patch.object(Addon, "restart") as restart, + patch.object(App, "restart") as restart, patch.object(HwHelper, "last_boot", return_value=utcnow()), - patch.object(DockerAddon, "attach"), + patch.object(DockerApp, "attach"), patch.object( - DockerAddon, + DockerApp, "current_state", return_value=ContainerState.STOPPED, ), @@ -275,29 +275,29 @@ async def test_watchdog_during_attach( coresys.config.last_boot = ( await coresys.hardware.helper.last_boot() + boot_timedelta ) - addon = Addon(coresys, store.slug) - coresys.addons.local[addon.slug] = addon - addon.watchdog = True + app = App(coresys, store.slug) + coresys.apps.local[app.slug] = app + app.watchdog = True - await addon.load() + await app.load() _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.STOPPED) await asyncio.sleep(0) assert restart.call_count == restart_count -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") async def test_install_update_fails_if_out_of_date(coresys: CoreSys): - """Test install or update of addon fails when supervisor or plugin is out of date.""" + """Test install or update of app fails when supervisor or plugin is out of date.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 with patch.object( type(coresys.supervisor), "need_update", new=PropertyMock(return_value=True) ): - with pytest.raises(AddonsJobError): - await coresys.addons.install(TEST_ADDON_SLUG) - with pytest.raises(AddonsJobError): - await coresys.addons.update(TEST_ADDON_SLUG) + with pytest.raises(AppsJobError): + await coresys.apps.install(TEST_ADDON_SLUG) + with pytest.raises(AppsJobError): + await coresys.apps.update(TEST_ADDON_SLUG) with ( patch.object( @@ -309,29 +309,29 @@ async def test_install_update_fails_if_out_of_date(coresys: CoreSys): type(coresys.plugins.audio), "update", side_effect=AudioUpdateError ), ): - with pytest.raises(AddonsJobError): - await coresys.addons.install(TEST_ADDON_SLUG) - with pytest.raises(AddonsJobError): - await coresys.addons.update(TEST_ADDON_SLUG) + with pytest.raises(AppsJobError): + await coresys.apps.install(TEST_ADDON_SLUG) + with pytest.raises(AppsJobError): + await coresys.apps.update(TEST_ADDON_SLUG) async def test_listeners_removed_on_uninstall( - coresys: CoreSys, install_addon_ssh: Addon + coresys: CoreSys, install_app_ssh: App ) -> None: - """Test addon listeners are removed on uninstall.""" - with patch.object(DockerAddon, "attach"): - await install_addon_ssh.load() + """Test app listeners are removed on uninstall.""" + with patch.object(DockerApp, "attach"): + await install_app_ssh.load() - assert install_addon_ssh.loaded is True + assert install_app_ssh.loaded is True # pylint: disable=protected-access - listeners = install_addon_ssh._listeners + listeners = install_app_ssh._listeners for listener in listeners: assert ( listener in coresys.bus._listeners[BusEvent.DOCKER_CONTAINER_STATE_CHANGE] ) - with patch.object(Addon, "persist", new=PropertyMock(return_value=MagicMock())): - await coresys.addons.uninstall(TEST_ADDON_SLUG) + with patch.object(App, "persist", new=PropertyMock(return_value=MagicMock())): + await coresys.apps.uninstall(TEST_ADDON_SLUG) for listener in listeners: assert ( @@ -341,63 +341,63 @@ async def test_listeners_removed_on_uninstall( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_start(coresys: CoreSys, install_addon_ssh: Addon) -> None: - """Test starting an addon without healthcheck.""" - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() +async def test_start(coresys: CoreSys, install_app_ssh: App) -> None: + """Test starting an app without healthcheck.""" + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED - start_task = await install_addon_ssh.start() + start_task = await install_app_ssh.start() assert start_task _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) await start_task - assert install_addon_ssh.state == AddonState.STARTED + assert install_app_ssh.state == AppState.STARTED @pytest.mark.parametrize("state", [ContainerState.HEALTHY, ContainerState.UNHEALTHY]) @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_start_wait_healthcheck( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, state: ContainerState, ) -> None: - """Test starting an addon with a healthcheck waits for health status.""" - install_addon_ssh.path_data.mkdir() + """Test starting an app with a healthcheck waits for health status.""" + install_app_ssh.path_data.mkdir() container.show.return_value["Config"] = {"Healthcheck": "exists"} - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED - start_task = await install_addon_ssh.start() + start_task = await install_app_ssh.start() assert start_task _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) await asyncio.sleep(0.01) assert not start_task.done() - assert install_addon_ssh.state == AddonState.STARTUP + assert install_app_ssh.state == AppState.STARTUP _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", state) await asyncio.sleep(0.01) assert start_task.done() - assert install_addon_ssh.state == AddonState.STARTED + assert install_app_ssh.state == AppState.STARTED @pytest.mark.usefixtures("coresys", "tmp_supervisor_data", "path_extern") async def test_start_timeout( - install_addon_ssh: Addon, caplog: pytest.LogCaptureFixture + install_app_ssh: App, caplog: pytest.LogCaptureFixture ) -> None: - """Test starting an addon times out while waiting.""" - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + """Test starting an app times out while waiting.""" + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED - start_task = await install_addon_ssh.start() + start_task = await install_app_ssh.start() assert start_task caplog.clear() @@ -410,39 +410,39 @@ async def test_start_timeout( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_restart(coresys: CoreSys, install_addon_ssh: Addon) -> None: - """Test restarting an addon.""" - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() +async def test_restart(coresys: CoreSys, install_app_ssh: App) -> None: + """Test restarting an app.""" + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED - start_task = await install_addon_ssh.restart() + start_task = await install_app_ssh.restart() assert start_task _fire_test_event(coresys, f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) await start_task - assert install_addon_ssh.state == AddonState.STARTED + assert install_app_ssh.state == AppState.STARTED @pytest.mark.parametrize("status", ["running", "stopped"]) @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_backup( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, status: str, ) -> None: - """Test backing up an addon.""" + """Test backing up an app.""" container.show.return_value["State"]["Status"] = status container.show.return_value["State"]["Running"] = status == "running" - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() archive = SecureTarArchive(coresys.config.path_tmp / "test.tar", "w") archive.open() tar_file = archive.create_tar("./test.tar.gz") - assert await install_addon_ssh.backup(tar_file) is None + assert await install_app_ssh.backup(tar_file) is None archive.close() @@ -450,48 +450,46 @@ async def test_backup( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_backup_no_config( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, status: str, ) -> None: - """Test backing up an addon with deleted config directory.""" + """Test backing up an app with deleted config directory.""" container.show.return_value["State"]["Status"] = status container.show.return_value["State"]["Running"] = status == "running" - install_addon_ssh.data["map"].append({"type": "addon_config", "read_only": False}) - assert not install_addon_ssh.path_config.exists() - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.data["map"].append({"type": "addon_config", "read_only": False}) + assert not install_app_ssh.path_config.exists() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() archive = SecureTarArchive(coresys.config.path_tmp / "test.tar", "w") archive.open() tar_file = archive.create_tar("./test.tar.gz") - assert await install_addon_ssh.backup(tar_file) is None + assert await install_app_ssh.backup(tar_file) is None archive.close() @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_backup_with_pre_post_command( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ) -> None: - """Test backing up an addon with pre and post command.""" + """Test backing up an app with pre and post command.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() archive = SecureTarArchive(coresys.config.path_tmp / "test.tar", "w") archive.open() tar_file = archive.create_tar("./test.tar.gz") with ( - patch.object(Addon, "backup_pre", new=PropertyMock(return_value="backup_pre")), - patch.object( - Addon, "backup_post", new=PropertyMock(return_value="backup_post") - ), + patch.object(App, "backup_pre", new=PropertyMock(return_value="backup_pre")), + patch.object(App, "backup_post", new=PropertyMock(return_value="backup_post")), ): - assert await install_addon_ssh.backup(tar_file) is None + assert await install_app_ssh.backup(tar_file) is None archive.close() assert container.exec.call_count == 2 @@ -511,39 +509,39 @@ async def test_backup_with_pre_post_command( aiodocker.DockerError(HTTPStatus.NOT_FOUND, {"message": "missing"}), None, [{"ExitCode": 1}], - AddonUnknownError, + AppUnknownError, ), ( aiodocker.DockerError(HTTPStatus.INTERNAL_SERVER_ERROR, {"message": "bad"}), None, [{"ExitCode": 1}], - AddonUnknownError, + AppUnknownError, ), ( None, aiodocker.DockerError(HTTPStatus.INTERNAL_SERVER_ERROR, {"message": "bad"}), [{"ExitCode": 1}], - AddonUnknownError, + AppUnknownError, ), ( None, None, aiodocker.DockerError(HTTPStatus.INTERNAL_SERVER_ERROR, {"message": "bad"}), - AddonUnknownError, + AppUnknownError, ), - (None, None, [{"ExitCode": 1}], AddonPrePostBackupCommandReturnedError), + (None, None, [{"ExitCode": 1}], AppPrePostBackupCommandReturnedError), ], ) @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_backup_with_pre_command_error( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container_get_side_effect: aiodocker.DockerError | None, exec_start_side_effect: aiodocker.DockerError | None, exec_inspect_side_effect: aiodocker.DockerError | list[dict[str, Any]] | None, exc_type_raised: type[HassioError], ) -> None: - """Test backing up an addon with error running pre command.""" + """Test backing up an app with error running pre command.""" coresys.docker.containers.get.side_effect = container_get_side_effect coresys.docker.containers.get.return_value.exec.return_value.start.side_effect = ( exec_start_side_effect @@ -552,18 +550,18 @@ async def test_backup_with_pre_command_error( exec_inspect_side_effect ) - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() archive = SecureTarArchive(coresys.config.path_tmp / "test.tar", "w") archive.open() tar_file = archive.create_tar("./test.tar.gz") with ( - patch.object(DockerAddon, "is_running", return_value=True), - patch.object(Addon, "backup_pre", new=PropertyMock(return_value="backup_pre")), + patch.object(DockerApp, "is_running", return_value=True), + patch.object(App, "backup_pre", new=PropertyMock(return_value="backup_pre")), pytest.raises(exc_type_raised), ): - assert await install_addon_ssh.backup(tar_file) is None + assert await install_app_ssh.backup(tar_file) is None assert not tar_file.path.exists() archive.close() @@ -573,30 +571,30 @@ async def test_backup_with_pre_command_error( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_backup_cold_mode( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, status: str, ) -> None: - """Test backing up an addon in cold mode.""" + """Test backing up an app in cold mode.""" container.show.return_value["State"]["Status"] = status container.show.return_value["State"]["Running"] = status == "running" - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() archive = SecureTarArchive(coresys.config.path_tmp / "test.tar", "w") archive.open() tar_file = archive.create_tar("./test.tar.gz") with ( patch.object( - AddonModel, + AppModel, "backup_mode", - new=PropertyMock(return_value=AddonBackupMode.COLD), + new=PropertyMock(return_value=AppBackupMode.COLD), ), patch.object( - DockerAddon, "is_running", side_effect=[status == "running", False, False] + DockerApp, "is_running", side_effect=[status == "running", False, False] ), ): - start_task = await install_addon_ssh.backup(tar_file) + start_task = await install_app_ssh.backup(tar_file) archive.close() assert bool(start_task) is (status == "running") @@ -605,15 +603,15 @@ async def test_backup_cold_mode( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_backup_cold_mode_with_watchdog( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ): - """Test backing up an addon in cold mode with watchdog active.""" + """Test backing up an app in cold mode with watchdog active.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - install_addon_ssh.watchdog = True - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.watchdog = True + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() # Clear task queue, including the event fired for running container await asyncio.sleep(0) @@ -629,17 +627,17 @@ async def mock_stop(*args, **kwargs): archive.open() tar_file = archive.create_tar("./test.tar.gz") with ( - patch.object(Addon, "start") as start, - patch.object(Addon, "restart") as restart, - patch.object(Addon, "end_backup"), - patch.object(DockerAddon, "stop", new=mock_stop), + patch.object(App, "start") as start, + patch.object(App, "restart") as restart, + patch.object(App, "end_backup"), + patch.object(DockerApp, "stop", new=mock_stop), patch.object( - AddonModel, + AppModel, "backup_mode", - new=PropertyMock(return_value=AddonBackupMode.COLD), + new=PropertyMock(return_value=AppBackupMode.COLD), ), ): - await install_addon_ssh.backup(tar_file) + await install_app_ssh.backup(tar_file) await asyncio.sleep(0) start.assert_not_called() restart.assert_not_called() @@ -650,15 +648,15 @@ async def mock_stop(*args, **kwargs): @pytest.mark.usefixtures( "tmp_supervisor_data", "path_extern", "mock_aarch64_arch_supported" ) -async def test_restore(coresys: CoreSys, install_addon_ssh: Addon, status: str) -> None: - """Test restoring an addon.""" +async def test_restore(coresys: CoreSys, install_app_ssh: App, status: str) -> None: + """Test restoring an app.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() tarfile = SecureTarFile(get_fixture_path(f"backup_local_ssh_{status}.tar.gz")) - with patch.object(DockerAddon, "is_running", return_value=False): - start_task = await coresys.addons.restore(TEST_ADDON_SLUG, tarfile) + with patch.object(DockerApp, "is_running", return_value=False): + start_task = await coresys.apps.restore(TEST_ADDON_SLUG, tarfile) assert bool(start_task) is (status == "running") @@ -667,21 +665,21 @@ async def test_restore(coresys: CoreSys, install_addon_ssh: Addon, status: str) "tmp_supervisor_data", "path_extern", "mock_aarch64_arch_supported" ) async def test_restore_while_running( - coresys: CoreSys, install_addon_ssh: Addon, container: DockerContainer + coresys: CoreSys, install_app_ssh: App, container: DockerContainer ): - """Test restore of a running addon.""" + """Test restore of a running app.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() tarfile = SecureTarFile(get_fixture_path("backup_local_ssh_stopped.tar.gz")) with ( - patch.object(DockerAddon, "is_running", return_value=True), + patch.object(DockerApp, "is_running", return_value=True), patch.object(Ingress, "update_hass_panel"), ): - start_task = await coresys.addons.restore(TEST_ADDON_SLUG, tarfile) + start_task = await coresys.apps.restore(TEST_ADDON_SLUG, tarfile) assert bool(start_task) is False container.stop.assert_called_once() @@ -691,15 +689,15 @@ async def test_restore_while_running( "tmp_supervisor_data", "path_extern", "mock_aarch64_arch_supported" ) async def test_restore_while_running_with_watchdog( - coresys: CoreSys, install_addon_ssh: Addon, container: DockerContainer + coresys: CoreSys, install_app_ssh: App, container: DockerContainer ): - """Test restore of a running addon with watchdog interference.""" + """Test restore of a running app with watchdog interference.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - install_addon_ssh.path_data.mkdir() - install_addon_ssh.watchdog = True - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + install_app_ssh.watchdog = True + await install_app_ssh.load() # Simulate stop firing the docker event for stopped container like it normally would async def mock_stop(*args, **kwargs): @@ -711,12 +709,12 @@ async def mock_stop(*args, **kwargs): # Watchdog will see it stop and should not attempt reanimation either tarfile = SecureTarFile(get_fixture_path("backup_local_ssh_stopped.tar.gz")) with ( - patch.object(Addon, "start") as start, - patch.object(Addon, "restart") as restart, - patch.object(DockerAddon, "stop", new=mock_stop), + patch.object(App, "start") as start, + patch.object(App, "restart") as restart, + patch.object(DockerApp, "stop", new=mock_stop), patch.object(Ingress, "update_hass_panel"), ): - await coresys.addons.restore(TEST_ADDON_SLUG, tarfile) + await coresys.apps.restore(TEST_ADDON_SLUG, tarfile) await asyncio.sleep(0) start.assert_not_called() restart.assert_not_called() @@ -724,19 +722,19 @@ async def mock_stop(*args, **kwargs): @pytest.mark.usefixtures("coresys") async def test_start_when_running( - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, caplog: pytest.LogCaptureFixture, ) -> None: - """Test starting an addon without healthcheck.""" + """Test starting an app without healthcheck.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STARTED + assert install_app_ssh.state == AppState.STARTED caplog.clear() - start_task = await install_addon_ssh.start() + start_task = await install_app_ssh.start() assert start_task await start_task @@ -745,22 +743,22 @@ async def test_start_when_running( @pytest.mark.usefixtures("test_repository", "mock_aarch64_arch_supported") async def test_local_example_install(coresys: CoreSys, tmp_supervisor_data: Path): - """Test install of an addon.""" + """Test install of an app.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 assert not ( data_dir := tmp_supervisor_data / "addons" / "data" / "local_example" ).exists() - with patch.object(DockerAddon, "install") as install: - await coresys.addons.install("local_example") + with patch.object(DockerApp, "install") as install: + await coresys.apps.install("local_example") install.assert_called_once() assert data_dir.is_dir() @pytest.mark.usefixtures("test_repository", "tmp_supervisor_data") -async def test_addon_install_auth_failure(coresys: CoreSys): - """Test addon install raises DockerRegistryAuthError on 401 with credentials.""" +async def test_app_install_auth_failure(coresys: CoreSys): + """Test app install raises DockerRegistryAuthError on 401 with credentials.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 # Configure bad registry credentials @@ -770,146 +768,140 @@ async def test_addon_install_auth_failure(coresys: CoreSys): with ( patch.object( - DockerAddon, + DockerApp, "install", side_effect=DockerRegistryAuthError(registry="docker.io"), ), pytest.raises(DockerRegistryAuthError), ): - await coresys.addons.install("local_example") + await coresys.apps.install("local_example") - # Verify addon data was cleaned up - assert "local_example" not in coresys.addons.local + # Verify app data was cleaned up + assert "local_example" not in coresys.apps.local @pytest.mark.usefixtures("tmp_supervisor_data") -async def test_addon_update_auth_failure( - coresys: CoreSys, install_addon_example: Addon -): - """Test addon update raises DockerRegistryAuthError on 401 with credentials.""" +async def test_app_update_auth_failure(coresys: CoreSys, install_app_example: App): + """Test app update raises DockerRegistryAuthError on 401 with credentials.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 with ( patch.object( - DockerAddon, + DockerApp, "update", side_effect=DockerRegistryAuthError(registry="docker.io"), ), pytest.raises(DockerRegistryAuthError), ): - await install_addon_example.update() + await install_app_example.update() @pytest.mark.usefixtures("tmp_supervisor_data") -async def test_addon_rebuild_auth_failure( - coresys: CoreSys, install_addon_example: Addon -): - """Test addon rebuild raises DockerRegistryAuthError on 401 with credentials.""" +async def test_app_rebuild_auth_failure(coresys: CoreSys, install_app_example: App): + """Test app rebuild raises DockerRegistryAuthError on 401 with credentials.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 with ( - patch.object(DockerAddon, "remove"), + patch.object(DockerApp, "remove"), patch.object( - DockerAddon, + DockerApp, "install", side_effect=DockerRegistryAuthError(registry="docker.io"), ), pytest.raises(DockerRegistryAuthError), ): - await install_addon_example.rebuild() + await install_app_example.rebuild() @pytest.mark.usefixtures("coresys", "path_extern") -async def test_local_example_start( - tmp_supervisor_data: Path, install_addon_example: Addon -): - """Test start of an addon.""" - install_addon_example.path_data.mkdir() - await install_addon_example.load() +async def test_local_example_start(tmp_supervisor_data: Path, install_app_example: App): + """Test start of an app.""" + install_app_example.path_data.mkdir() + await install_app_example.load() await asyncio.sleep(0) - assert install_addon_example.state == AddonState.STOPPED + assert install_app_example.state == AppState.STOPPED assert not ( - addon_config_dir := tmp_supervisor_data / "addon_configs" / "local_example" + app_config_dir := tmp_supervisor_data / "addon_configs" / "local_example" ).exists() - await install_addon_example.start() + await install_app_example.start() - assert addon_config_dir.is_dir() + assert app_config_dir.is_dir() @pytest.mark.usefixtures("coresys", "tmp_supervisor_data") -async def test_local_example_ingress_port_set(install_addon_example: Addon): - """Test start of an addon.""" - install_addon_example.path_data.mkdir() - await install_addon_example.load() +async def test_local_example_ingress_port_set(install_app_example: App): + """Test start of an app.""" + install_app_example.path_data.mkdir() + await install_app_example.load() - assert install_addon_example.ingress_port != 0 + assert install_app_example.ingress_port != 0 @pytest.mark.usefixtures("tmp_supervisor_data") -async def test_addon_pulse_error( - coresys: CoreSys, install_addon_example: Addon, caplog: pytest.LogCaptureFixture +async def test_app_pulse_error( + coresys: CoreSys, install_app_example: App, caplog: pytest.LogCaptureFixture ): - """Test error writing pulse config for addon.""" + """Test error writing pulse config for app.""" with patch( "supervisor.addons.addon.Path.write_text", side_effect=(err := OSError()) ): err.errno = errno.EBUSY - await install_addon_example.write_pulse() + await install_app_example.write_pulse() assert "can't write pulse/client.config" in caplog.text assert coresys.core.healthy is True caplog.clear() err.errno = errno.EBADMSG - await install_addon_example.write_pulse() + await install_app_example.write_pulse() assert "can't write pulse/client.config" in caplog.text assert coresys.core.healthy is False @pytest.mark.usefixtures("coresys") -def test_auto_update_available(install_addon_example: Addon): +def test_auto_update_available(install_app_example: App): """Test auto update availability based on versions.""" - assert install_addon_example.auto_update is False - assert install_addon_example.need_update is False - assert install_addon_example.auto_update_available is False + assert install_app_example.auto_update is False + assert install_app_example.need_update is False + assert install_app_example.auto_update_available is False with patch.object( - Addon, "version", new=PropertyMock(return_value=AwesomeVersion("1.0")) + App, "version", new=PropertyMock(return_value=AwesomeVersion("1.0")) ): - assert install_addon_example.need_update is True - assert install_addon_example.auto_update_available is False + assert install_app_example.need_update is True + assert install_app_example.auto_update_available is False - install_addon_example.auto_update = True - assert install_addon_example.auto_update_available is True + install_app_example.auto_update = True + assert install_app_example.auto_update_available is True with patch.object( - Addon, "version", new=PropertyMock(return_value=AwesomeVersion("0.9")) + App, "version", new=PropertyMock(return_value=AwesomeVersion("0.9")) ): - assert install_addon_example.auto_update_available is False + assert install_app_example.auto_update_available is False with patch.object( - Addon, "version", new=PropertyMock(return_value=AwesomeVersion("test")) + App, "version", new=PropertyMock(return_value=AwesomeVersion("test")) ): - assert install_addon_example.auto_update_available is False + assert install_app_example.auto_update_available is False @pytest.mark.usefixtures("coresys") -def test_advanced_flag_ignored(install_addon_example: Addon): +def test_advanced_flag_ignored(install_app_example: App): """Ensure advanced flag in config is ignored.""" - install_addon_example.data[ATTR_ADVANCED] = True + install_app_example.data[ATTR_ADVANCED] = True - assert install_addon_example.advanced is False + assert install_app_example.advanced is False -async def test_paths_cache(coresys: CoreSys, install_addon_ssh: Addon): +async def test_paths_cache(coresys: CoreSys, install_app_ssh: App): """Test cache for key paths that may or may not exist.""" - assert not install_addon_ssh.with_logo - assert not install_addon_ssh.with_icon - assert not install_addon_ssh.with_changelog - assert not install_addon_ssh.with_documentation + assert not install_app_ssh.with_logo + assert not install_app_ssh.with_icon + assert not install_app_ssh.with_changelog + assert not install_app_ssh.with_documentation with ( patch("supervisor.addons.addon.Path.exists", return_value=True), @@ -917,20 +909,20 @@ async def test_paths_cache(coresys: CoreSys, install_addon_ssh: Addon): ): await coresys.store.reload(coresys.store.get("local")) - assert install_addon_ssh.with_logo - assert install_addon_ssh.with_icon - assert install_addon_ssh.with_changelog - assert install_addon_ssh.with_documentation + assert install_app_ssh.with_logo + assert install_app_ssh.with_icon + assert install_app_ssh.with_changelog + assert install_app_ssh.with_documentation @pytest.mark.usefixtures("mock_amd64_arch_supported") -async def test_addon_loads_wrong_image( - coresys: CoreSys, install_addon_ssh: Addon, container: DockerContainer +async def test_app_loads_wrong_image( + coresys: CoreSys, install_app_ssh: App, container: DockerContainer ): - """Test addon is loaded with incorrect image for architecture.""" - coresys.addons.data.save_data.reset_mock() - install_addon_ssh.persist["image"] = "local/aarch64-addon-ssh" - assert install_addon_ssh.image == "local/aarch64-addon-ssh" + """Test app is loaded with incorrect image for architecture.""" + coresys.apps.data.save_data.reset_mock() + install_app_ssh.persist["image"] = "local/aarch64-addon-ssh" + assert install_app_ssh.image == "local/aarch64-addon-ssh" with ( patch("pathlib.Path.is_file", return_value=True), @@ -945,10 +937,10 @@ async def test_addon_loads_wrong_image( return_value=PurePath("/addon/path/on/host"), ), ): - await install_addon_ssh.load() + await install_app_ssh.load() container.delete.assert_called_with(force=True, v=True) - # one for removing the addon, one for removing the addon builder + # one for removing the app, one for removing the app builder assert coresys.docker.images.delete.call_count == 2 assert coresys.docker.images.delete.call_args_list[0] == call( @@ -969,13 +961,13 @@ async def test_addon_loads_wrong_image( ["--tag", "local/amd64-addon-ssh:9.2.1"], command, ) - assert install_addon_ssh.image == "local/amd64-addon-ssh" - coresys.addons.data.save_data.assert_called_once() + assert install_app_ssh.image == "local/amd64-addon-ssh" + coresys.apps.data.save_data.assert_called_once() @pytest.mark.usefixtures("mock_amd64_arch_supported") -async def test_addon_loads_missing_image(coresys: CoreSys, install_addon_ssh: Addon): - """Test addon corrects a missing image on load.""" +async def test_app_loads_missing_image(coresys: CoreSys, install_app_ssh: App): + """Test app corrects a missing image on load.""" coresys.docker.images.inspect.side_effect = aiodocker.DockerError( HTTPStatus.NOT_FOUND, {"message": "missing"} ) @@ -993,7 +985,7 @@ async def test_addon_loads_missing_image(coresys: CoreSys, install_addon_ssh: Ad return_value=PurePath("/addon/path/on/host"), ), ): - await install_addon_ssh.load() + await install_app_ssh.load() mock_run_command.assert_called_once() assert mock_run_command.call_args.args[0] == "docker" @@ -1007,12 +999,12 @@ async def test_addon_loads_missing_image(coresys: CoreSys, install_addon_ssh: Ad ["--tag", "local/amd64-addon-ssh:9.2.1"], command, ) - assert install_addon_ssh.image == "local/amd64-addon-ssh" + assert install_app_ssh.image == "local/amd64-addon-ssh" @pytest.mark.usefixtures("container", "mock_amd64_arch_supported") -async def test_addon_load_succeeds_with_docker_errors( - coresys: CoreSys, install_addon_ssh: Addon, caplog: pytest.LogCaptureFixture +async def test_app_load_succeeds_with_docker_errors( + coresys: CoreSys, install_app_ssh: App, caplog: pytest.LogCaptureFixture ): """Docker errors while building/pulling an image during load should not raise and fail setup.""" # Build env invalid failure @@ -1020,7 +1012,7 @@ async def test_addon_load_succeeds_with_docker_errors( HTTPStatus.NOT_FOUND, {"message": "missing"} ) caplog.clear() - await install_addon_ssh.load() + await install_app_ssh.load() assert "Cannot build app 'local_ssh' because dockerfile is missing" in caplog.text # Image build failure @@ -1036,48 +1028,48 @@ async def test_addon_load_succeeds_with_docker_errors( DockerAPI, "run_command", return_value=CommandReturn(1, ["error"]) ), ): - await install_addon_ssh.load() + await install_app_ssh.load() assert ( "Docker build failed for local/amd64-addon-ssh:9.2.1 (exit code 1). Build output:\nerror" in caplog.text ) # Image pull failure - install_addon_ssh.data["image"] = "test/amd64-addon-ssh" + install_app_ssh.data["image"] = "test/amd64-addon-ssh" caplog.clear() with patch.object( DockerAPI, "pull_image", side_effect=aiodocker.DockerError(400, {"message": "error"}), ): - await install_addon_ssh.load() + await install_app_ssh.load() assert "Can't install test/amd64-addon-ssh:9.2.1:" in caplog.text @pytest.mark.usefixtures("coresys") -async def test_addon_manual_only_boot(install_addon_example: Addon): - """Test an addon with manual only boot mode.""" - assert install_addon_example.boot_config == "manual_only" - assert install_addon_example.boot == "manual" +async def test_app_manual_only_boot(install_app_example: App): + """Test an app with manual only boot mode.""" + assert install_app_example.boot_config == "manual_only" + assert install_app_example.boot == "manual" - # Users cannot change boot mode of an addon with manual forced so changing boot isn't realistic + # Users cannot change boot mode of an app with manual forced so changing boot isn't realistic # However boot mode can change on update and user may have set auto before, ensure it is ignored - install_addon_example.boot = "auto" - assert install_addon_example.boot == "manual" + install_app_example.boot = "auto" + assert install_app_example.boot == "manual" @pytest.mark.parametrize( ("initial_state", "target_state", "issue", "suggestions"), [ ( - AddonState.ERROR, - AddonState.STARTED, + AppState.ERROR, + AppState.STARTED, BOOT_FAIL_ISSUE, [suggestion.type for suggestion in BOOT_FAIL_SUGGESTIONS], ), ( - AddonState.STARTED, - AddonState.STOPPED, + AppState.STARTED, + AppState.STOPPED, Issue( IssueType.DEVICE_ACCESS_MISSING, ContextType.ADDON, @@ -1087,34 +1079,34 @@ async def test_addon_manual_only_boot(install_addon_example: Addon): ), ], ) -async def test_addon_state_dismisses_issue( +async def test_app_state_dismisses_issue( coresys: CoreSys, - install_addon_ssh: Addon, - initial_state: AddonState, - target_state: AddonState, + install_app_ssh: App, + initial_state: AppState, + target_state: AppState, issue: Issue, suggestions: list[SuggestionType], ): - """Test an addon state change dismisses the issues.""" - install_addon_ssh.state = initial_state + """Test an app state change dismisses the issues.""" + install_app_ssh.state = initial_state coresys.resolution.add_issue(issue, suggestions) - install_addon_ssh.state = target_state + install_app_ssh.state = target_state assert coresys.resolution.issues == [] assert coresys.resolution.suggestions == [] -async def test_addon_disable_boot_dismisses_boot_fail( - coresys: CoreSys, install_addon_ssh: Addon +async def test_app_disable_boot_dismisses_boot_fail( + coresys: CoreSys, install_app_ssh: App ): """Test a disabling boot dismisses the boot fail issue.""" - install_addon_ssh.boot = AddonBoot.AUTO - install_addon_ssh.state = AddonState.ERROR + install_app_ssh.boot = AppBoot.AUTO + install_app_ssh.state = AppState.ERROR coresys.resolution.add_issue( BOOT_FAIL_ISSUE, [suggestion.type for suggestion in BOOT_FAIL_SUGGESTIONS] ) - install_addon_ssh.boot = AddonBoot.MANUAL + install_app_ssh.boot = AppBoot.MANUAL assert coresys.resolution.issues == [] assert coresys.resolution.suggestions == [] @@ -1122,28 +1114,28 @@ async def test_addon_disable_boot_dismisses_boot_fail( @pytest.mark.usefixtures( "container", "mock_amd64_arch_supported", "path_extern", "tmp_supervisor_data" ) -async def test_addon_start_port_conflict_error( +async def test_app_start_port_conflict_error( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, caplog: pytest.LogCaptureFixture, ): - """Test port conflict error when trying to start addon.""" - install_addon_ssh.data["image"] = "test/amd64-addon-ssh" + """Test port conflict error when trying to start app.""" + install_app_ssh.data["image"] = "test/amd64-addon-ssh" coresys.docker.containers.create.return_value.start.side_effect = aiodocker.DockerError( HTTPStatus.INTERNAL_SERVER_ERROR, "failed to set up container networking: driver failed programming external connectivity on endpoint addon_local_ssh (ea4d0fdaa72cf86f2c9199a04208e3eaf0c5a0d6fd34b3c7f4fab2daadb1f3a9): failed to bind host port for 0.0.0.0:2222:172.30.33.4:22/tcp: address already in use", ) - await install_addon_ssh.load() + await install_app_ssh.load() caplog.clear() with ( - patch.object(Addon, "write_options"), + patch.object(App, "write_options"), pytest.raises( - AddonPortConflict, + AppPortConflict, check=lambda exc: exc.extra_fields == {"name": "local_ssh", "port": 2222}, ), ): - await install_addon_ssh.start() + await install_app_ssh.start() assert ( "Cannot start container addon_local_ssh because port 2222 is already in use" diff --git a/tests/addons/test_build.py b/tests/addons/test_build.py index e0de0069a97..8de86801ef6 100644 --- a/tests/addons/test_build.py +++ b/tests/addons/test_build.py @@ -1,4 +1,4 @@ -"""Test addon build.""" +"""Test app build.""" import base64 import json @@ -9,11 +9,11 @@ from awesomeversion import AwesomeVersion import pytest -from supervisor.addons.addon import Addon -from supervisor.addons.build import AddonBuild +from supervisor.addons.addon import App +from supervisor.addons.build import AppBuild from supervisor.coresys import CoreSys from supervisor.docker.const import DOCKER_HUB, MountType -from supervisor.exceptions import AddonBuildDockerfileMissingError +from supervisor.exceptions import AppBuildDockerfileMissingError from tests.common import is_in_list @@ -30,9 +30,9 @@ def _is_label_in_command( return f"--label {label_name}={label_value}" in " ".join(command) -async def test_platform_set(coresys: CoreSys, install_addon_ssh: Addon): +async def test_platform_set(coresys: CoreSys, install_app_ssh: App): """Test platform set in container build args.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( @@ -54,9 +54,9 @@ async def test_platform_set(coresys: CoreSys, install_addon_ssh: Addon): assert is_in_list(["--platform", "linux/amd64"], args["command"]) -async def test_dockerfile_evaluation(coresys: CoreSys, install_addon_ssh: Addon): +async def test_dockerfile_evaluation(coresys: CoreSys, install_app_ssh: App): """Test dockerfile path in container build args.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( @@ -82,9 +82,9 @@ async def test_dockerfile_evaluation(coresys: CoreSys, install_addon_ssh: Addon) assert build.arch == "amd64" -async def test_dockerfile_evaluation_arch(coresys: CoreSys, install_addon_ssh: Addon): +async def test_dockerfile_evaluation_arch(coresys: CoreSys, install_app_ssh: App): """Test dockerfile arch evaluation in container build args.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( @@ -110,9 +110,9 @@ async def test_dockerfile_evaluation_arch(coresys: CoreSys, install_addon_ssh: A assert build.arch == "aarch64" -async def test_build_valid(coresys: CoreSys, install_addon_ssh: Addon): +async def test_build_valid(coresys: CoreSys, install_app_ssh: App): """Test platform set in docker args.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( type(coresys.arch), "supported", new=PropertyMock(return_value=["aarch64"]) @@ -124,9 +124,9 @@ async def test_build_valid(coresys: CoreSys, install_addon_ssh: Addon): assert (await build.is_valid()) is None -async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon): +async def test_build_invalid(coresys: CoreSys, install_app_ssh: App): """Test build not supported because Dockerfile missing for specified architecture.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( type(coresys.arch), "supported", new=PropertyMock(return_value=["amd64"]) @@ -134,22 +134,22 @@ async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon): patch.object( type(coresys.arch), "default", new=PropertyMock(return_value="amd64") ), - pytest.raises(AddonBuildDockerfileMissingError), + pytest.raises(AppBuildDockerfileMissingError), ): await build.is_valid() -async def test_docker_config_no_registries(coresys: CoreSys, install_addon_ssh: Addon): +async def test_docker_config_no_registries(coresys: CoreSys, install_app_ssh: App): """Test docker config generation when no registries configured.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) # No registries configured by default assert build.get_docker_config_json() is None -async def test_docker_config_all_registries(coresys: CoreSys, install_addon_ssh: Addon): +async def test_docker_config_all_registries(coresys: CoreSys, install_app_ssh: App): """Test docker config includes all configured registries.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) # pylint: disable-next=protected-access coresys.docker.config._data["registries"] = { @@ -171,9 +171,9 @@ async def test_docker_config_all_registries(coresys: CoreSys, install_addon_ssh: assert config["auths"]["some.other.registry"]["auth"] == expected_other -async def test_docker_config_docker_hub(coresys: CoreSys, install_addon_ssh: Addon): +async def test_docker_config_docker_hub(coresys: CoreSys, install_app_ssh: App): """Test docker config uses special URL key for Docker Hub.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) # pylint: disable-next=protected-access coresys.docker.config._data["registries"] = { @@ -190,9 +190,9 @@ async def test_docker_config_docker_hub(coresys: CoreSys, install_addon_ssh: Add assert config["auths"]["https://index.docker.io/v1/"]["auth"] == expected_auth -async def test_docker_args_with_config_path(coresys: CoreSys, install_addon_ssh: Addon): +async def test_docker_args_with_config_path(coresys: CoreSys, install_app_ssh: App): """Test docker args include config volume when path provided.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( @@ -215,7 +215,7 @@ async def test_docker_args_with_config_path(coresys: CoreSys, install_addon_ssh: config_path, ) - # Check that config is mounted (3 mounts: docker socket, addon path, config) + # Check that config is mounted (3 mounts: docker socket, app path, config) assert len(args["mounts"]) == 3 config_mount = next( m for m in args["mounts"] if m.target == "/root/.docker/config.json" @@ -225,11 +225,9 @@ async def test_docker_args_with_config_path(coresys: CoreSys, install_addon_ssh: assert config_mount.type == MountType.BIND -async def test_docker_args_without_config_path( - coresys: CoreSys, install_addon_ssh: Addon -): +async def test_docker_args_without_config_path(coresys: CoreSys, install_app_ssh: App): """Test docker args don't include config volume when no path provided.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( @@ -248,7 +246,7 @@ async def test_docker_args_without_config_path( build.get_docker_args, AwesomeVersion("latest"), "test-image:latest", None ) - # Only docker socket and addon path should be mounted + # Only docker socket and app path should be mounted assert len(args["mounts"]) == 2 # Verify no docker config mount for mount in args["mounts"]: @@ -256,17 +254,17 @@ async def test_docker_args_without_config_path( async def test_build_file_deprecation_warning( - coresys: CoreSys, install_addon_ssh: Addon, caplog: pytest.LogCaptureFixture + coresys: CoreSys, install_app_ssh: App, caplog: pytest.LogCaptureFixture ): """Test deprecation warning is logged when build.yaml exists.""" with caplog.at_level(logging.WARNING): - await AddonBuild.create(coresys, install_addon_ssh) + await AppBuild.create(coresys, install_app_ssh) assert "uses build.yaml which is deprecated" in caplog.text async def test_no_build_file_no_deprecation_warning( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, tmp_path: Path, caplog: pytest.LogCaptureFixture, ): @@ -276,34 +274,34 @@ async def test_no_build_file_no_deprecation_warning( with ( patch.object( - type(install_addon_ssh), + type(install_app_ssh), "path_location", new=PropertyMock(return_value=tmp_path), ), caplog.at_level(logging.WARNING), ): - await AddonBuild.create(coresys, install_addon_ssh) + await AppBuild.create(coresys, install_app_ssh) assert "uses build.yaml which is deprecated" not in caplog.text async def test_no_build_yaml_base_image_none( - coresys: CoreSys, install_addon_ssh: Addon, tmp_path: Path + coresys: CoreSys, install_app_ssh: App, tmp_path: Path ): """Test base_image is None when no build file exists.""" dockerfile = tmp_path / "Dockerfile" dockerfile.write_text("ARG BUILD_FROM=ghcr.io/home-assistant/base:latest\n") with patch.object( - type(install_addon_ssh), + type(install_app_ssh), "path_location", new=PropertyMock(return_value=tmp_path), ): - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) assert build.base_image is None async def test_no_build_yaml_no_build_from_arg( - coresys: CoreSys, install_addon_ssh: Addon, tmp_path: Path + coresys: CoreSys, install_app_ssh: App, tmp_path: Path ): """Test BUILD_FROM is not in docker args when no build file exists.""" dockerfile = tmp_path / "Dockerfile" @@ -311,7 +309,7 @@ async def test_no_build_yaml_no_build_from_arg( with ( patch.object( - type(install_addon_ssh), + type(install_app_ssh), "path_location", new=PropertyMock(return_value=tmp_path), ), @@ -327,7 +325,7 @@ async def test_no_build_yaml_no_build_from_arg( return_value=PurePath("/addon/path/on/host"), ), ): - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) args = await coresys.run_in_executor( build.get_docker_args, AwesomeVersion("1.0.0"), "test-image:1.0.0", None ) @@ -337,9 +335,9 @@ async def test_no_build_yaml_no_build_from_arg( assert _is_build_arg_in_command(args["command"], "BUILD_ARCH") -async def test_build_yaml_passes_build_from(coresys: CoreSys, install_addon_ssh: Addon): +async def test_build_yaml_passes_build_from(coresys: CoreSys, install_app_ssh: App): """Test BUILD_FROM is in docker args when build.yaml exists.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( @@ -364,7 +362,7 @@ async def test_build_yaml_passes_build_from(coresys: CoreSys, install_addon_ssh: async def test_no_build_yaml_docker_config_includes_registries( - coresys: CoreSys, install_addon_ssh: Addon, tmp_path: Path + coresys: CoreSys, install_app_ssh: App, tmp_path: Path ): """Test registries are included in docker config even without build file.""" dockerfile = tmp_path / "Dockerfile" @@ -376,11 +374,11 @@ async def test_no_build_yaml_docker_config_includes_registries( } with patch.object( - type(install_addon_ssh), + type(install_app_ssh), "path_location", new=PropertyMock(return_value=tmp_path), ): - build = await AddonBuild.create(coresys, install_addon_ssh) + build = await AppBuild.create(coresys, install_app_ssh) config_json = build.get_docker_config_json() assert config_json is not None @@ -389,10 +387,10 @@ async def test_no_build_yaml_docker_config_includes_registries( async def test_labels_include_name_and_description( - coresys: CoreSys, install_addon_ssh: Addon + coresys: CoreSys, install_app_ssh: App ): - """Test name and description labels are included when addon has them set.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + """Test name and description labels are included when app has them set.""" + build = await AppBuild.create(coresys, install_app_ssh) with ( patch.object( @@ -420,17 +418,15 @@ async def test_labels_include_name_and_description( async def test_labels_omit_name_and_description_when_empty( - coresys: CoreSys, install_addon_ssh: Addon + coresys: CoreSys, install_app_ssh: App ): - """Test name and description labels are omitted when addon has empty values.""" - build = await AddonBuild.create(coresys, install_addon_ssh) + """Test name and description labels are omitted when app has empty values.""" + build = await AppBuild.create(coresys, install_app_ssh) with ( + patch.object(type(install_app_ssh), "name", new=PropertyMock(return_value="")), patch.object( - type(install_addon_ssh), "name", new=PropertyMock(return_value="") - ), - patch.object( - type(install_addon_ssh), + type(install_app_ssh), "description", new=PropertyMock(return_value=""), ), diff --git a/tests/addons/test_config.py b/tests/addons/test_config.py index a26a86355f1..4da49cc25cd 100644 --- a/tests/addons/test_config.py +++ b/tests/addons/test_config.py @@ -1,10 +1,10 @@ -"""Validate Add-on configs.""" +"""Validate App configs.""" import pytest import voluptuous as vol from supervisor.addons import validate as vd -from supervisor.addons.const import AddonBackupMode +from supervisor.addons.const import AppBackupMode from ..common import load_json_fixture @@ -85,7 +85,7 @@ def test_migration_backup(): """Migrate snapshot to backup.""" config = load_json_fixture("basic-addon-config.json") - config["snapshot"] = AddonBackupMode.HOT + config["snapshot"] = AppBackupMode.HOT config["snapshot_pre"] = "pre_command" config["snapshot_post"] = "post_command" config["snapshot_exclude"] = ["excludeed"] @@ -97,7 +97,7 @@ def test_migration_backup(): assert valid_config.get("snapshot_post") is None assert valid_config.get("snapshot_exclude") is None - assert valid_config["backup"] == AddonBackupMode.HOT + assert valid_config["backup"] == AppBackupMode.HOT assert valid_config["backup_pre"] == "pre_command" assert valid_config["backup_post"] == "post_command" assert valid_config["backup_exclude"] == ["excludeed"] @@ -350,10 +350,10 @@ def test_watchdog_url(): def test_valid_slug(): - """Test valid and invalid addon slugs.""" + """Test valid and invalid app slugs.""" config = load_json_fixture("basic-addon-config.json") - # All examples pulled from https://analytics.home-assistant.io/addons.json + # All examples pulled from https://analytics.home-assistant.io/apps.json config["slug"] = "uptime-kuma" assert vd.SCHEMA_ADDON_CONFIG(config) @@ -383,7 +383,7 @@ def test_valid_slug(): def test_valid_schema(): - """Test valid and invalid addon slugs.""" + """Test valid and invalid app slugs.""" config = load_json_fixture("basic-addon-config.json") # Basic types diff --git a/tests/addons/test_manager.py b/tests/addons/test_manager.py index 04b8d2a9cf4..5c55171c65b 100644 --- a/tests/addons/test_manager.py +++ b/tests/addons/test_manager.py @@ -1,4 +1,4 @@ -"""Test addon manager.""" +"""Test app manager.""" import asyncio from collections.abc import AsyncGenerator, Generator @@ -10,19 +10,19 @@ from awesomeversion import AwesomeVersion import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.arch import CpuArchManager from supervisor.config import CoreConfig -from supervisor.const import ATTR_INGRESS, AddonBoot, AddonStartup, AddonState, BusEvent +from supervisor.const import ATTR_INGRESS, AppBoot, AppStartup, AppState, BusEvent from supervisor.coresys import CoreSys -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.docker.const import ContainerState from supervisor.docker.interface import DockerInterface from supervisor.docker.manager import DockerAPI from supervisor.docker.monitor import DockerContainerStateEvent from supervisor.exceptions import ( - AddonConfigurationError, - AddonsError, + AppConfigurationError, + AppsError, DockerAPIError, DockerNotFound, ) @@ -34,7 +34,7 @@ UnhealthyReason, ) from supervisor.resolution.data import Issue, Suggestion -from supervisor.store.addon import AddonStore +from supervisor.store.addon import AppStore from supervisor.store.repository import RepositoryLocal from supervisor.utils import check_exception_chain from supervisor.utils.common import write_json_file @@ -73,174 +73,170 @@ async def fixture_remove_wait_boot(coresys: CoreSys) -> AsyncGenerator[None]: coresys.config.wait_boot = 0 -@pytest.fixture(name="install_addon_example_image") -async def fixture_install_addon_example_image( +@pytest.fixture(name="install_app_example_image") +async def fixture_install_app_example_image( coresys: CoreSys, test_repository -) -> Generator[Addon]: - """Install local_example add-on with image.""" - store = coresys.addons.store["local_example_image"] - await coresys.addons.data.install(store) +) -> Generator[App]: + """Install local_example app with image.""" + store = coresys.apps.store["local_example_image"] + await coresys.apps.data.install(store) # pylint: disable-next=protected-access - coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data) + coresys.apps.data._data = coresys.apps.data._schema(coresys.apps.data._data) - addon = Addon(coresys, store.slug) - coresys.addons.local[addon.slug] = addon - yield addon + app = App(coresys, store.slug) + coresys.apps.local[app.slug] = app + yield app -async def test_image_added_removed_on_update( - coresys: CoreSys, install_addon_ssh: Addon -): - """Test image added or removed from addon config on update.""" - assert install_addon_ssh.need_update is False +async def test_image_added_removed_on_update(coresys: CoreSys, install_app_ssh: App): + """Test image added or removed from app config on update.""" + assert install_app_ssh.need_update is False with patch( "supervisor.store.data.read_json_or_yaml_file", return_value=load_json_fixture("addon-config-add-image.json"), ): await coresys.store.data.update() - assert install_addon_ssh.need_update is True - assert install_addon_ssh.image == "local/amd64-addon-ssh" - assert coresys.addons.store.get(TEST_ADDON_SLUG).image == "test/amd64-my-ssh-addon" + assert install_app_ssh.need_update is True + assert install_app_ssh.image == "local/amd64-addon-ssh" + assert coresys.apps.store.get(TEST_ADDON_SLUG).image == "test/amd64-my-ssh-addon" with ( patch.object(DockerInterface, "install") as install, - patch.object(DockerAddon, "_build") as build, + patch.object(DockerApp, "_build") as build, ): - await coresys.addons.update(TEST_ADDON_SLUG) + await coresys.apps.update(TEST_ADDON_SLUG) build.assert_not_called() install.assert_called_once_with( AwesomeVersion("10.0.0"), "test/amd64-my-ssh-addon", False, "amd64" ) - assert install_addon_ssh.need_update is False + assert install_app_ssh.need_update is False with patch( "supervisor.store.data.read_json_or_yaml_file", return_value=load_json_fixture("addon-config-remove-image.json"), ): await coresys.store.data.update() - assert install_addon_ssh.need_update is True - assert install_addon_ssh.image == "test/amd64-my-ssh-addon" - assert coresys.addons.store.get(TEST_ADDON_SLUG).image == "local/amd64-addon-ssh" + assert install_app_ssh.need_update is True + assert install_app_ssh.image == "test/amd64-my-ssh-addon" + assert coresys.apps.store.get(TEST_ADDON_SLUG).image == "local/amd64-addon-ssh" with ( patch.object(DockerInterface, "install") as install, - patch.object(DockerAddon, "_build") as build, + patch.object(DockerApp, "_build") as build, ): - await coresys.addons.update(TEST_ADDON_SLUG) + await coresys.apps.update(TEST_ADDON_SLUG) build.assert_called_once_with(AwesomeVersion("11.0.0"), "local/amd64-addon-ssh") install.assert_not_called() -async def test_addon_boot_skip_host_network_gateway_unprotected( - coresys: CoreSys, install_addon_ssh: Addon +async def test_app_boot_skip_host_network_gateway_unprotected( + coresys: CoreSys, install_app_ssh: App ): - """Test host network add-ons are skipped when gateway is unprotected.""" - install_addon_ssh.boot = AddonBoot.AUTO + """Test host network apps are skipped when gateway is unprotected.""" + install_app_ssh.boot = AppBoot.AUTO coresys.resolution.add_unhealthy_reason(UnhealthyReason.DOCKER_GATEWAY_UNPROTECTED) with ( patch.object( - type(install_addon_ssh), "host_network", new=PropertyMock(return_value=True) + type(install_app_ssh), "host_network", new=PropertyMock(return_value=True) ), - patch.object(Addon, "start") as start, + patch.object(App, "start") as start, ): - await coresys.addons.boot(AddonStartup.APPLICATION) + await coresys.apps.boot(AppStartup.APPLICATION) start.assert_not_called() -async def test_addon_boot_host_network_gateway_protected( - coresys: CoreSys, install_addon_ssh: Addon +async def test_app_boot_host_network_gateway_protected( + coresys: CoreSys, install_app_ssh: App ): - """Test host network add-ons boot normally when gateway is protected.""" - install_addon_ssh.boot = AddonBoot.AUTO + """Test host network apps boot normally when gateway is protected.""" + install_app_ssh.boot = AppBoot.AUTO assert ( UnhealthyReason.DOCKER_GATEWAY_UNPROTECTED not in coresys.resolution.unhealthy ) with ( patch.object( - type(install_addon_ssh), "host_network", new=PropertyMock(return_value=True) + type(install_app_ssh), "host_network", new=PropertyMock(return_value=True) ), - patch.object(Addon, "start", return_value=asyncio.Future()) as start, + patch.object(App, "start", return_value=asyncio.Future()) as start, ): start.return_value.set_result(None) - await coresys.addons.boot(AddonStartup.APPLICATION) + await coresys.apps.boot(AppStartup.APPLICATION) start.assert_called_once() @pytest.mark.parametrize("err", [DockerAPIError, DockerNotFound]) -async def test_addon_boot_system_error( - coresys: CoreSys, install_addon_ssh: Addon, capture_exception: Mock, err +async def test_app_boot_system_error( + coresys: CoreSys, install_app_ssh: App, capture_exception: Mock, err ): - """Test system errors during addon boot.""" - install_addon_ssh.boot = AddonBoot.AUTO + """Test system errors during app boot.""" + install_app_ssh.boot = AppBoot.AUTO assert coresys.resolution.issues == [] assert coresys.resolution.suggestions == [] with ( - patch.object(Addon, "write_options"), - patch.object(DockerAddon, "run", side_effect=err), + patch.object(App, "write_options"), + patch.object(DockerApp, "run", side_effect=err), ): - await coresys.addons.boot(AddonStartup.APPLICATION) + await coresys.apps.boot(AppStartup.APPLICATION) capture_exception.assert_not_called() assert coresys.resolution.issues == [BOOT_FAIL_ISSUE] assert coresys.resolution.suggestions == BOOT_FAIL_SUGGESTIONS -async def test_addon_boot_user_error( - coresys: CoreSys, install_addon_ssh: Addon, capture_exception: Mock +async def test_app_boot_user_error( + coresys: CoreSys, install_app_ssh: App, capture_exception: Mock ): - """Test user error during addon boot.""" - install_addon_ssh.boot = AddonBoot.AUTO - with patch.object(Addon, "write_options", side_effect=AddonConfigurationError): - await coresys.addons.boot(AddonStartup.APPLICATION) + """Test user error during app boot.""" + install_app_ssh.boot = AppBoot.AUTO + with patch.object(App, "write_options", side_effect=AppConfigurationError): + await coresys.apps.boot(AppStartup.APPLICATION) capture_exception.assert_not_called() assert coresys.resolution.issues == [BOOT_FAIL_ISSUE] assert coresys.resolution.suggestions == BOOT_FAIL_SUGGESTIONS -async def test_addon_boot_other_error( - coresys: CoreSys, install_addon_ssh: Addon, capture_exception: Mock +async def test_app_boot_other_error( + coresys: CoreSys, install_app_ssh: App, capture_exception: Mock ): - """Test other errors captured during addon boot.""" - install_addon_ssh.boot = AddonBoot.AUTO + """Test other errors captured during app boot.""" + install_app_ssh.boot = AppBoot.AUTO err = OSError() with ( - patch.object(Addon, "write_options"), - patch.object(DockerAddon, "run", side_effect=err), + patch.object(App, "write_options"), + patch.object(DockerApp, "run", side_effect=err), ): - await coresys.addons.boot(AddonStartup.APPLICATION) + await coresys.apps.boot(AppStartup.APPLICATION) capture_exception.assert_called_once_with(err) assert coresys.resolution.issues == [BOOT_FAIL_ISSUE] assert coresys.resolution.suggestions == BOOT_FAIL_SUGGESTIONS -async def test_addon_shutdown_error( - coresys: CoreSys, install_addon_ssh: Addon, capture_exception: Mock +async def test_app_shutdown_error( + coresys: CoreSys, install_app_ssh: App, capture_exception: Mock ): - """Test errors captured during addon shutdown.""" - install_addon_ssh.state = AddonState.STARTED - with patch.object(DockerAddon, "stop", side_effect=DockerNotFound()): - await coresys.addons.shutdown(AddonStartup.APPLICATION) + """Test errors captured during app shutdown.""" + install_app_ssh.state = AppState.STARTED + with patch.object(DockerApp, "stop", side_effect=DockerNotFound()): + await coresys.apps.shutdown(AppStartup.APPLICATION) - assert install_addon_ssh.state == AddonState.ERROR + assert install_app_ssh.state == AppState.ERROR capture_exception.assert_called_once() assert check_exception_chain( - capture_exception.call_args[0][0], (AddonsError, DockerNotFound) + capture_exception.call_args[0][0], (AppsError, DockerNotFound) ) @pytest.mark.usefixtures("websession") -async def test_addon_uninstall_removes_discovery( - coresys: CoreSys, install_addon_ssh: Addon -): - """Test discovery messages removed when addon uninstalled.""" +async def test_app_uninstall_removes_discovery(coresys: CoreSys, install_app_ssh: App): + """Test discovery messages removed when app uninstalled.""" assert coresys.discovery.list_messages == [] message = await coresys.discovery.send( - install_addon_ssh, "mqtt", {"host": "localhost", "port": 1883} + install_app_ssh, "mqtt", {"host": "localhost", "port": 1883} ) assert message.addon == TEST_ADDON_SLUG assert message.service == "mqtt" @@ -248,7 +244,7 @@ async def test_addon_uninstall_removes_discovery( coresys.homeassistant.api.ensure_access_token = AsyncMock() - await coresys.addons.uninstall(TEST_ADDON_SLUG) + await coresys.apps.uninstall(TEST_ADDON_SLUG) await asyncio.sleep(0) # Find the delete call among all request calls (send also uses request) @@ -266,20 +262,20 @@ async def test_addon_uninstall_removes_discovery( "uuid": message.uuid, } - assert coresys.addons.installed == [] + assert coresys.apps.installed == [] assert coresys.discovery.list_messages == [] -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") async def test_load(coresys: CoreSys, caplog: pytest.LogCaptureFixture): - """Test addon manager load.""" + """Test app manager load.""" caplog.clear() with ( patch.object(DockerInterface, "attach") as attach, patch.object(PluginDns, "write_hosts") as write_hosts, ): - await coresys.addons.load() + await coresys.apps.load() attach.assert_called_once_with(version=AwesomeVersion("9.2.1")) write_hosts.assert_called_once() @@ -288,19 +284,19 @@ async def test_load(coresys: CoreSys, caplog: pytest.LogCaptureFixture): @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_boot_waits_for_addons(coresys: CoreSys, install_addon_ssh: Addon): - """Test addon manager boot waits for addons.""" - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() +async def test_boot_waits_for_apps(coresys: CoreSys, install_app_ssh: App): + """Test app manager boot waits for apps.""" + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED - addon_state: AddonState | None = None + app_state: AppState | None = None async def fire_container_event(*args, **kwargs): - nonlocal addon_state + nonlocal app_state - addon_state = install_addon_ssh.state + app_state = install_app_ssh.state coresys.bus.fire_event( BusEvent.DOCKER_CONTAINER_STATE_CHANGE, DockerContainerStateEvent( @@ -311,39 +307,39 @@ async def fire_container_event(*args, **kwargs): ), ) - with patch.object(DockerAddon, "run", new=fire_container_event): - await coresys.addons.boot(AddonStartup.APPLICATION) + with patch.object(DockerApp, "run", new=fire_container_event): + await coresys.apps.boot(AppStartup.APPLICATION) - assert addon_state == AddonState.STOPPED - assert install_addon_ssh.state == AddonState.STARTED + assert app_state == AppState.STOPPED + assert install_app_ssh.state == AppState.STARTED @pytest.mark.parametrize("status", ["running", "stopped"]) @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_update( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, status: str, ): - """Test addon update.""" + """Test app update.""" container.show.return_value["State"]["Status"] = status container.show.return_value["State"]["Running"] = status == "running" - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() with patch( "supervisor.store.data.read_json_or_yaml_file", return_value=load_json_fixture("addon-config-add-image.json"), ): await coresys.store.data.update() - assert install_addon_ssh.need_update is True + assert install_app_ssh.need_update is True with ( patch.object(DockerInterface, "install"), - patch.object(DockerAddon, "is_running", return_value=False), + patch.object(DockerApp, "is_running", return_value=False), ): - start_task = await coresys.addons.update(TEST_ADDON_SLUG) + start_task = await coresys.apps.update(TEST_ADDON_SLUG) assert bool(start_task) is (status == "running") @@ -352,22 +348,22 @@ async def test_update( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_rebuild( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, status: str, ): - """Test addon rebuild.""" + """Test app rebuild.""" container.show.return_value["State"]["Status"] = status container.show.return_value["State"]["Running"] = status == "running" - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() with ( - patch.object(DockerAddon, "_build"), - patch.object(DockerAddon, "is_running", return_value=False), - patch.object(Addon, "need_build", new=PropertyMock(return_value=True)), + patch.object(DockerApp, "_build"), + patch.object(DockerApp, "is_running", return_value=False), + patch.object(App, "need_build", new=PropertyMock(return_value=True)), ): - start_task = await coresys.addons.rebuild(TEST_ADDON_SLUG) + start_task = await coresys.apps.rebuild(TEST_ADDON_SLUG) assert bool(start_task) is (status == "running") @@ -375,17 +371,17 @@ async def test_rebuild( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_start_wait_resolved_on_uninstall_in_startup( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ) -> None: - """Test uninstall resolves the startup wait task when addon is in STARTUP state.""" - install_addon_ssh.path_data.mkdir() + """Test uninstall resolves the startup wait task when app is in STARTUP state.""" + install_app_ssh.path_data.mkdir() container.show.return_value["Config"] = {"Healthcheck": "exists"} - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED - start_task = await install_addon_ssh.start() + start_task = await install_app_ssh.start() assert start_task coresys.bus.fire_event( @@ -400,9 +396,9 @@ async def test_start_wait_resolved_on_uninstall_in_startup( await asyncio.sleep(0.01) assert not start_task.done() - assert install_addon_ssh.state == AddonState.STARTUP + assert install_app_ssh.state == AppState.STARTUP - await coresys.addons.uninstall(TEST_ADDON_SLUG) + await coresys.apps.uninstall(TEST_ADDON_SLUG) assert start_task.done() assert start_task.exception() is None @@ -413,10 +409,10 @@ async def test_repository_file_missing( """Test repository file is missing.""" with patch.object( CoreConfig, - "path_addons_git", + "path_apps_git", new=PropertyMock(return_value=tmp_supervisor_data / "addons" / "git"), ): - repo_dir = coresys.config.path_addons_git / "test" + repo_dir = coresys.config.path_apps_git / "test" repo_dir.mkdir(parents=True) await coresys.store.data.update() @@ -430,10 +426,10 @@ async def test_repository_file_error( """Test repository file is missing.""" with patch.object( CoreConfig, - "path_addons_git", + "path_apps_git", new=PropertyMock(return_value=tmp_supervisor_data / "addons" / "git"), ): - repo_dir = coresys.config.path_addons_git / "test" + repo_dir = coresys.config.path_apps_git / "test" repo_dir.mkdir(parents=True) repo_file = repo_dir / "repository.json" @@ -452,13 +448,11 @@ async def test_repository_file_error( assert f"Repository parse error {repo_dir.as_posix()}" in caplog.text -async def test_store_data_changes_during_update( - coresys: CoreSys, install_addon_ssh: Addon -): - """Test store data changing for an addon during an update does not cause errors.""" +async def test_store_data_changes_during_update(coresys: CoreSys, install_app_ssh: App): + """Test store data changing for an app during an update does not cause errors.""" event = asyncio.Event() - coresys.store.data.addons["local_ssh"]["image"] = "test_image" - coresys.store.data.addons["local_ssh"]["version"] = AwesomeVersion("1.1.1") + coresys.store.data.apps["local_ssh"]["image"] = "test_image" + coresys.store.data.apps["local_ssh"]["version"] = AwesomeVersion("1.1.1") async def simulate_update(): async def mock_update(_, version, image, *args, **kwargs): @@ -467,10 +461,10 @@ async def mock_update(_, version, image, *args, **kwargs): await event.wait() with ( - patch.object(DockerAddon, "update", new=mock_update), + patch.object(DockerApp, "update", new=mock_update), patch.object(DockerAPI, "cleanup_old_images") as cleanup, ): - await coresys.addons.update("local_ssh") + await coresys.apps.update("local_ssh") cleanup.assert_called_once_with( "test_image", AwesomeVersion("1.1.1"), @@ -484,26 +478,26 @@ async def mock_update(_, version, image, *args, **kwargs): with patch.object(RepositoryLocal, "update", return_value=True): await coresys.store.reload() - assert "image" not in coresys.store.data.addons["local_ssh"] - assert coresys.store.data.addons["local_ssh"]["version"] == AwesomeVersion("9.2.1") + assert "image" not in coresys.store.data.apps["local_ssh"] + assert coresys.store.data.apps["local_ssh"]["version"] == AwesomeVersion("9.2.1") event.set() await update_task - assert install_addon_ssh.image == "test_image" - assert install_addon_ssh.version == AwesomeVersion("1.1.1") + assert install_app_ssh.image == "test_image" + assert install_app_ssh.version == AwesomeVersion("1.1.1") async def test_watchdog_runs_during_update( - coresys: CoreSys, install_addon_ssh: Addon, container: DockerContainer + coresys: CoreSys, install_app_ssh: App, container: DockerContainer ): """Test watchdog running during a long update.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - install_addon_ssh.watchdog = True - coresys.store.data.addons["local_ssh"]["image"] = "test_image" - coresys.store.data.addons["local_ssh"]["version"] = AwesomeVersion("1.1.1") - await install_addon_ssh.load() + install_app_ssh.watchdog = True + coresys.store.data.apps["local_ssh"]["image"] = "test_image" + coresys.store.data.apps["local_ssh"]["version"] = AwesomeVersion("1.1.1") + await install_app_ssh.load() # Simulate stop firing the docker event for stopped container like it normally would async def mock_stop(*args, **kwargs): @@ -525,62 +519,62 @@ async def mock_update(*args, **kwargs): # Start should be called exactly once by update itself. Restart should never be called with ( - patch.object(DockerAddon, "stop", new=mock_stop), - patch.object(DockerAddon, "update", new=mock_update), - patch.object(Addon, "start") as start, - patch.object(Addon, "restart") as restart, + patch.object(DockerApp, "stop", new=mock_stop), + patch.object(DockerApp, "update", new=mock_update), + patch.object(App, "start") as start, + patch.object(App, "restart") as restart, ): - await coresys.addons.update("local_ssh") + await coresys.apps.update("local_ssh") await asyncio.sleep(0) start.assert_called_once() restart.assert_not_called() async def test_shared_image_kept_on_uninstall( - coresys: CoreSys, install_addon_example: Addon + coresys: CoreSys, install_app_example: App ): - """Test if two addons share an image it is not removed on uninstall.""" + """Test if two apps share an image it is not removed on uninstall.""" # Clone example to a new mock copy so two share an image - store_data = deepcopy(coresys.addons.store["local_example"].data) - store = AddonStore(coresys, "local_example2", store_data) - coresys.addons.store["local_example2"] = store - await coresys.addons.data.install(store) + store_data = deepcopy(coresys.apps.store["local_example"].data) + store = AppStore(coresys, "local_example2", store_data) + coresys.apps.store["local_example2"] = store + await coresys.apps.data.install(store) # pylint: disable-next=protected-access - coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data) + coresys.apps.data._data = coresys.apps.data._schema(coresys.apps.data._data) - example_2 = Addon(coresys, store.slug) - coresys.addons.local[example_2.slug] = example_2 + example_2 = App(coresys, store.slug) + coresys.apps.local[example_2.slug] = example_2 - image = f"{install_addon_example.image}:{install_addon_example.version}" - latest = f"{install_addon_example.image}:latest" + image = f"{install_app_example.image}:{install_app_example.version}" + latest = f"{install_app_example.image}:latest" - await coresys.addons.uninstall("local_example2") + await coresys.apps.uninstall("local_example2") coresys.docker.images.delete.assert_not_called() - assert not coresys.addons.get("local_example2", local_only=True) + assert not coresys.apps.get("local_example2", local_only=True) - await coresys.addons.uninstall("local_example") + await coresys.apps.uninstall("local_example") assert coresys.docker.images.delete.call_count == 2 assert coresys.docker.images.delete.call_args_list[0] == call(latest, force=True) assert coresys.docker.images.delete.call_args_list[1] == call(image, force=True) - assert not coresys.addons.get("local_example", local_only=True) + assert not coresys.apps.get("local_example", local_only=True) @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_update_reloads_ingress_tokens( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ): - """Test ingress tokens are reloaded when addon gains ingress on update.""" + """Test ingress tokens are reloaded when app gains ingress on update.""" container.show.return_value["State"]["Status"] = "stopped" container.show.return_value["State"]["Running"] = False - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() - # Simulate addon was installed without ingress - coresys.addons.data.system[install_addon_ssh.slug][ATTR_INGRESS] = False - await install_addon_ssh.load() + # Simulate app was installed without ingress + coresys.apps.data.system[install_app_ssh.slug][ATTR_INGRESS] = False + await install_app_ssh.load() await coresys.ingress.reload() - assert install_addon_ssh.ingress_token not in coresys.ingress.tokens + assert install_app_ssh.ingress_token not in coresys.ingress.tokens # Update store to version with ingress enabled with patch( @@ -589,74 +583,74 @@ async def test_update_reloads_ingress_tokens( ): await coresys.store.data.update() - assert install_addon_ssh.need_update is True + assert install_app_ssh.need_update is True with ( patch.object(DockerInterface, "install"), - patch.object(DockerAddon, "is_running", return_value=False), + patch.object(DockerApp, "is_running", return_value=False), ): - await coresys.addons.update(TEST_ADDON_SLUG) + await coresys.apps.update(TEST_ADDON_SLUG) # Ingress token should now be registered - assert install_addon_ssh.with_ingress is True - assert install_addon_ssh.ingress_token in coresys.ingress.tokens + assert install_app_ssh.with_ingress is True + assert install_app_ssh.ingress_token in coresys.ingress.tokens @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_rebuild_reloads_ingress_tokens( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ): - """Test ingress tokens are reloaded when addon gains ingress on rebuild.""" + """Test ingress tokens are reloaded when app gains ingress on rebuild.""" container.show.return_value["State"]["Status"] = "stopped" container.show.return_value["State"]["Running"] = False - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() - # Simulate addon was installed without ingress - coresys.addons.data.system[install_addon_ssh.slug][ATTR_INGRESS] = False - await install_addon_ssh.load() + # Simulate app was installed without ingress + coresys.apps.data.system[install_app_ssh.slug][ATTR_INGRESS] = False + await install_app_ssh.load() await coresys.ingress.reload() - assert install_addon_ssh.ingress_token not in coresys.ingress.tokens + assert install_app_ssh.ingress_token not in coresys.ingress.tokens # Re-enable ingress in system data (rebuild pulls fresh store data) - coresys.addons.data.system[install_addon_ssh.slug][ATTR_INGRESS] = True + coresys.apps.data.system[install_app_ssh.slug][ATTR_INGRESS] = True with ( - patch.object(DockerAddon, "_build"), - patch.object(DockerAddon, "is_running", return_value=False), - patch.object(Addon, "need_build", new=PropertyMock(return_value=True)), + patch.object(DockerApp, "_build"), + patch.object(DockerApp, "is_running", return_value=False), + patch.object(App, "need_build", new=PropertyMock(return_value=True)), ): - await coresys.addons.rebuild(TEST_ADDON_SLUG) + await coresys.apps.rebuild(TEST_ADDON_SLUG) # Ingress token should now be registered - assert install_addon_ssh.with_ingress is True - assert install_addon_ssh.ingress_token in coresys.ingress.tokens + assert install_app_ssh.with_ingress is True + assert install_app_ssh.ingress_token in coresys.ingress.tokens async def test_shared_image_kept_on_update( - coresys: CoreSys, install_addon_example_image: Addon, docker: DockerAPI + coresys: CoreSys, install_app_example_image: App, docker: DockerAPI ): - """Test if two addons share an image it is not removed on update.""" + """Test if two apps share an image it is not removed on update.""" # Clone example to a new mock copy so two share an image # But modify version in store so Supervisor sees an update - curr_store_data = deepcopy(coresys.store.data.addons["local_example_image"]) - curr_store = AddonStore(coresys, "local_example2", curr_store_data) - install_addon_example_image.data_store["version"] = "1.3.0" - new_store_data = deepcopy(coresys.store.data.addons["local_example_image"]) - new_store = AddonStore(coresys, "local_example2", new_store_data) - - coresys.store.data.addons["local_example2"] = new_store_data - coresys.addons.store["local_example2"] = new_store - await coresys.addons.data.install(curr_store) + curr_store_data = deepcopy(coresys.store.data.apps["local_example_image"]) + curr_store = AppStore(coresys, "local_example2", curr_store_data) + install_app_example_image.data_store["version"] = "1.3.0" + new_store_data = deepcopy(coresys.store.data.apps["local_example_image"]) + new_store = AppStore(coresys, "local_example2", new_store_data) + + coresys.store.data.apps["local_example2"] = new_store_data + coresys.apps.store["local_example2"] = new_store + await coresys.apps.data.install(curr_store) # pylint: disable-next=protected-access - coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data) + coresys.apps.data._data = coresys.apps.data._schema(coresys.apps.data._data) - example_2 = Addon(coresys, curr_store.slug) - coresys.addons.local[example_2.slug] = example_2 + example_2 = App(coresys, curr_store.slug) + coresys.apps.local[example_2.slug] = example_2 assert example_2.version == "1.2.0" - assert install_addon_example_image.version == "1.2.0" + assert install_app_example_image.version == "1.2.0" image_new = {"Id": "image_new", "RepoTags": ["image_new:latest"]} image_old = {"Id": "image_old", "RepoTags": ["image_old:latest"]} @@ -664,11 +658,11 @@ async def test_shared_image_kept_on_update( docker.images.list.return_value = [image_new, image_old] with patch.object(DockerAPI, "pull_image", return_value=image_new): - await coresys.addons.update("local_example2") + await coresys.apps.update("local_example2") docker.images.delete.assert_not_called() assert example_2.version == "1.3.0" docker.images.inspect.side_effect = [image_new] - await coresys.addons.update("local_example_image") + await coresys.apps.update("local_example_image") docker.images.delete.assert_called_once_with("image_old", force=True) - assert install_addon_example_image.version == "1.3.0" + assert install_app_example_image.version == "1.3.0" diff --git a/tests/addons/test_options.py b/tests/addons/test_options.py index aa70a00b59c..41eb201a27e 100644 --- a/tests/addons/test_options.py +++ b/tests/addons/test_options.py @@ -1,11 +1,11 @@ -"""Test add-ons schema to UI schema convertion.""" +"""Test apps schema to UI schema convertion.""" from pathlib import Path import pytest import voluptuous as vol -from supervisor.addons.options import AddonOptions, UiOptions +from supervisor.addons.options import AppOptions, UiOptions from supervisor.hardware.data import Device MOCK_ADDON_NAME = "Mock Add-on" @@ -14,14 +14,14 @@ def test_simple_schema(coresys): """Test with simple schema.""" - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "fires": "bool", "alias": "str?"}, MOCK_ADDON_NAME, MOCK_ADDON_SLUG, )({"name": "Pascal", "password": "1234", "fires": True, "alias": "test"}) - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "fires": "bool", "alias": "str?"}, MOCK_ADDON_NAME, @@ -29,7 +29,7 @@ def test_simple_schema(coresys): )({"name": "Pascal", "password": "1234", "fires": True}) with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "password": "password", "fires": "bool", "alias": "str?"}, MOCK_ADDON_NAME, @@ -37,7 +37,7 @@ def test_simple_schema(coresys): )({"name": "Pascal", "password": "1234", "fires": "hah"}) with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "password": "password", "fires": "bool", "alias": "str?"}, MOCK_ADDON_NAME, @@ -47,7 +47,7 @@ def test_simple_schema(coresys): def test_simple_schema_integers(coresys): """Test integer limits.""" - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "pos": "int(0,10)", "neg": "int(-5,0)"}, MOCK_ADDON_NAME, @@ -55,7 +55,7 @@ def test_simple_schema_integers(coresys): )({"name": "Pascal", "password": "1234", "pos": 5, "neg": "-4"}) with pytest.raises(vol.error.Invalid): - assert AddonOptions( + assert AppOptions( coresys, { "name": "str", @@ -70,7 +70,7 @@ def test_simple_schema_integers(coresys): def test_simple_schema_floats(coresys): """Test float limits.""" - assert AddonOptions( + assert AppOptions( coresys, { "name": "str", @@ -83,7 +83,7 @@ def test_simple_schema_floats(coresys): )({"name": "Pascal", "password": "1234", "pos": 5.0, "neg": "-4.0"}) with pytest.raises(vol.error.Invalid): - assert AddonOptions( + assert AppOptions( coresys, { "name": "str", @@ -96,7 +96,7 @@ def test_simple_schema_floats(coresys): )({"name": "Pascal", "password": "1234", "pos": 11.0, "neg": "-6.0"}) with pytest.raises(vol.error.Invalid): - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "float": "float(-1.0,-.)"}, MOCK_ADDON_NAME, @@ -106,7 +106,7 @@ def test_simple_schema_floats(coresys): def test_complex_schema_list(coresys): """Test with complex list schema.""" - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "extend": ["str"]}, MOCK_ADDON_NAME, @@ -114,7 +114,7 @@ def test_complex_schema_list(coresys): )({"name": "Pascal", "password": "1234", "extend": ["test", "blu"]}) with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "password": "password", "extend": ["str"]}, MOCK_ADDON_NAME, @@ -122,7 +122,7 @@ def test_complex_schema_list(coresys): )({"name": "Pascal", "password": "1234", "extend": ["test", 1]}) with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "password": "password", "extend": ["str"]}, MOCK_ADDON_NAME, @@ -132,14 +132,14 @@ def test_complex_schema_list(coresys): def test_optional_schema_list(coresys): """Test with an optional list schema.""" - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "extend": ["str?"]}, MOCK_ADDON_NAME, MOCK_ADDON_SLUG, )({"name": "Pascal", "password": "1234"}) - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "extend": ["str?"]}, MOCK_ADDON_NAME, @@ -147,14 +147,14 @@ def test_optional_schema_list(coresys): )({"name": "Pascal", "password": "1234", "extend": []}) with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "password": "password", "extend": ["str"]}, MOCK_ADDON_NAME, MOCK_ADDON_SLUG, )({"name": "Pascal", "password": "1234"}) - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "extend": ["str"]}, MOCK_ADDON_NAME, @@ -164,7 +164,7 @@ def test_optional_schema_list(coresys): def test_complex_schema_dict(coresys): """Test with complex dict schema.""" - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "extend": {"test": "int"}}, MOCK_ADDON_NAME, @@ -172,7 +172,7 @@ def test_complex_schema_dict(coresys): )({"name": "Pascal", "password": "1234", "extend": {"test": 1}}) with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "password": "password", "extend": {"test": "int"}}, MOCK_ADDON_NAME, @@ -180,7 +180,7 @@ def test_complex_schema_dict(coresys): )({"name": "Pascal", "password": "1234", "extend": {"wrong": 1}}) with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "password": "password", "extend": ["str"]}, MOCK_ADDON_NAME, @@ -190,7 +190,7 @@ def test_complex_schema_dict(coresys): def test_complex_schema_dict_and_list(coresys): """Test with complex dict/list nested schema.""" - assert AddonOptions( + assert AppOptions( coresys, { "name": "str", @@ -218,7 +218,7 @@ def test_complex_schema_dict_and_list(coresys): ) with pytest.raises(vol.error.Invalid): - assert AddonOptions( + assert AppOptions( coresys, { "name": "str", @@ -292,7 +292,7 @@ def test_simple_device_schema(coresys): ): coresys.hardware.update_device(device) - data_device_path = AddonOptions( + data_device_path = AppOptions( coresys, {"name": "str", "password": "password", "input": "device"}, MOCK_ADDON_NAME, @@ -300,7 +300,7 @@ def test_simple_device_schema(coresys): )({"name": "Pascal", "password": "1234", "input": "/dev/ttyUSB0"}) assert data_device_path["input"] == "/dev/ttyUSB0" - data = AddonOptions( + data = AppOptions( coresys, {"name": "str", "password": "password", "input": "device"}, MOCK_ADDON_NAME, @@ -308,7 +308,7 @@ def test_simple_device_schema(coresys): )({"name": "Pascal", "password": "1234", "input": "/dev/serial/by-id/xyx"}) assert data["input"] == "/dev/serial/by-id/xyx" - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "input": "device(subsystem=tty)"}, MOCK_ADDON_NAME, @@ -316,7 +316,7 @@ def test_simple_device_schema(coresys): )({"name": "Pascal", "password": "1234", "input": "/dev/ttyACM0"}) with pytest.raises(vol.error.Invalid): - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "input": "device"}, MOCK_ADDON_NAME, @@ -324,7 +324,7 @@ def test_simple_device_schema(coresys): )({"name": "Pascal", "password": "1234", "input": "/dev/not_exists"}) with pytest.raises(vol.error.Invalid): - assert AddonOptions( + assert AppOptions( coresys, {"name": "str", "password": "password", "input": "device(subsystem=tty)"}, MOCK_ADDON_NAME, @@ -335,7 +335,7 @@ def test_simple_device_schema(coresys): def test_device_schema_wrong_type(coresys): """Test device option rejects non-string values.""" with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "input": "device(subsystem=tty)"}, MOCK_ADDON_NAME, @@ -343,7 +343,7 @@ def test_device_schema_wrong_type(coresys): )({"name": "Pascal", "input": {"baudrate": 115200, "flow_control": True}}) with pytest.raises(vol.error.Invalid): - AddonOptions( + AppOptions( coresys, {"name": "str", "input": "device"}, MOCK_ADDON_NAME, @@ -353,7 +353,7 @@ def test_device_schema_wrong_type(coresys): def test_simple_schema_password(coresys): """Test with simple schema password pwned.""" - validate = AddonOptions( + validate = AppOptions( coresys, {"name": "str", "password": "password", "fires": "bool", "alias": "str?"}, MOCK_ADDON_NAME, @@ -572,9 +572,7 @@ def test_ui_simple_device_schema_no_filter(coresys): def test_log_entry(coresys, caplog): """Test log entry when no option match in schema.""" - options = AddonOptions(coresys, {}, MOCK_ADDON_NAME, MOCK_ADDON_SLUG)( - {"test": "str"} - ) + options = AppOptions(coresys, {}, MOCK_ADDON_NAME, MOCK_ADDON_SLUG)({"test": "str"}) assert isinstance(options, dict) assert not options assert ( diff --git a/tests/api/middleware/test_security.py b/tests/api/middleware/test_security.py index 78016f0c6e5..b074a34bba2 100644 --- a/tests/api/middleware/test_security.py +++ b/tests/api/middleware/test_security.py @@ -9,7 +9,7 @@ import pytest import urllib3 -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.api import RestAPI from supervisor.const import ROLE_ALL, CoreState from supervisor.coresys import CoreSys @@ -197,23 +197,23 @@ async def test_bad_requests( @pytest.mark.usefixtures("plugin_tokens") async def test_token_validation( api_token_validation: TestClient, - install_addon_example: Addon, + install_app_example: App, request_method: str, request_path: str, success_roles: set[str], ): """Test token validation paths.""" - install_addon_example.persist["access_token"] = "abc123" - install_addon_example.data["hassio_api"] = True + install_app_example.persist["access_token"] = "abc123" + install_app_example.data["hassio_api"] = True for role in success_roles: - install_addon_example.data["hassio_role"] = role + install_app_example.data["hassio_role"] = role resp = await getattr(api_token_validation, request_method)( request_path, headers={"Authorization": "Bearer abc123"} ) assert resp.status == 200 for role in set(ROLE_ALL) - success_roles: - install_addon_example.data["hassio_role"] = role + install_app_example.data["hassio_role"] = role resp = await getattr(api_token_validation, request_method)( request_path, headers={"Authorization": "Bearer abc123"} ) diff --git a/tests/api/test_addons.py b/tests/api/test_addons.py index 51252932577..4c58b589456 100644 --- a/tests/api/test_addons.py +++ b/tests/api/test_addons.py @@ -1,4 +1,4 @@ -"""Test addons api.""" +"""Test apps api.""" import asyncio from collections.abc import Awaitable, Callable @@ -11,12 +11,12 @@ from aiohttp.test_utils import TestClient import pytest -from supervisor.addons.addon import Addon -from supervisor.addons.build import AddonBuild +from supervisor.addons.addon import App +from supervisor.addons.build import AppBuild from supervisor.arch import CpuArchManager -from supervisor.const import AddonState, CpuArch +from supervisor.const import AppState, CpuArch from supervisor.coresys import CoreSys -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.docker.const import ContainerState from supervisor.docker.manager import CommandReturn from supervisor.docker.monitor import DockerContainerStateEvent @@ -36,12 +36,12 @@ def _create_test_event(name: str, state: ContainerState) -> DockerContainerState ) -async def test_addons_info(api_client: TestClient, install_addon_ssh: Addon): - """Test getting addon info.""" - install_addon_ssh.state = AddonState.STOPPED - install_addon_ssh.ingress_panel = True - install_addon_ssh.protected = True - install_addon_ssh.watchdog = False +async def test_apps_info(api_client: TestClient, install_app_ssh: App): + """Test getting app info.""" + install_app_ssh.state = AppState.STOPPED + install_app_ssh.ingress_panel = True + install_app_ssh.protected = True + install_app_ssh.watchdog = False resp = await api_client.get(f"/addons/{TEST_ADDON_SLUG}/info") result = await resp.json() @@ -54,10 +54,10 @@ async def test_addons_info(api_client: TestClient, install_addon_ssh: Addon): # DEPRECATED - Remove with legacy routing logic on 1/2023 -async def test_addons_info_not_installed( +async def test_apps_info_not_installed( api_client: TestClient, coresys: CoreSys, test_repository: Repository ): - """Test getting addon info for not installed addon.""" + """Test getting app info for not installed app.""" resp = await api_client.get(f"/addons/{TEST_ADDON_SLUG}/info") result = await resp.json() assert result["data"]["version_latest"] == "9.2.1" @@ -72,16 +72,16 @@ async def test_addons_info_not_installed( } -@pytest.mark.usefixtures("install_addon_ssh") -async def test_api_addon_logs( +@pytest.mark.usefixtures("install_app_ssh") +async def test_api_app_logs( advanced_logs_tester: Callable[[str, str], Awaitable[None]], ): - """Test addon logs.""" + """Test app logs.""" await advanced_logs_tester("/addons/local_ssh", "addon_local_ssh") -async def test_api_addon_logs_not_installed(api_client: TestClient): - """Test error is returned for non-existing add-on.""" +async def test_api_app_logs_not_installed(api_client: TestClient): + """Test error is returned for non-existing app.""" resp = await api_client.get("/addons/hic_sunt_leones/logs") assert resp.status == 404 @@ -90,9 +90,9 @@ async def test_api_addon_logs_not_installed(api_client: TestClient): assert content == "App hic_sunt_leones does not exist" -@pytest.mark.usefixtures("docker_logs", "install_addon_ssh") -async def test_api_addon_logs_error(api_client: TestClient, journald_logs: MagicMock): - """Test errors are properly handled for add-on logs.""" +@pytest.mark.usefixtures("docker_logs", "install_app_ssh") +async def test_api_app_logs_error(api_client: TestClient, journald_logs: MagicMock): + """Test errors are properly handled for app logs.""" journald_logs.side_effect = HassioError("Something bad happened!") resp = await api_client.get("/addons/local_ssh/logs") @@ -103,29 +103,29 @@ async def test_api_addon_logs_error(api_client: TestClient, journald_logs: Magic @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_api_addon_start_healthcheck( - api_client: TestClient, install_addon_ssh: Addon, container: DockerContainer +async def test_api_app_start_healthcheck( + api_client: TestClient, install_app_ssh: App, container: DockerContainer ): - """Test starting an addon waits for healthy.""" - install_addon_ssh.path_data.mkdir() + """Test starting an app waits for healthy.""" + install_app_ssh.path_data.mkdir() container.show.return_value["Config"] = {"Healthcheck": "exists"} - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED - state_changes: list[AddonState] = [] + state_changes: list[AppState] = [] _container_events_task: asyncio.Task | None = None async def container_events(): nonlocal state_changes await asyncio.sleep(0.01) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) ) - state_changes.append(install_addon_ssh.state) + state_changes.append(install_app_ssh.state) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.HEALTHY) ) @@ -133,38 +133,38 @@ async def container_events_task(*args, **kwargs): nonlocal _container_events_task _container_events_task = asyncio.create_task(container_events()) - with patch.object(DockerAddon, "run", new=container_events_task): + with patch.object(DockerApp, "run", new=container_events_task): resp = await api_client.post("/addons/local_ssh/start") - assert state_changes == [AddonState.STARTUP] - assert install_addon_ssh.state == AddonState.STARTED + assert state_changes == [AppState.STARTUP] + assert install_app_ssh.state == AppState.STARTED assert resp.status == 200 @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_api_addon_restart_healthcheck( - api_client: TestClient, install_addon_ssh: Addon, container: DockerContainer +async def test_api_app_restart_healthcheck( + api_client: TestClient, install_app_ssh: App, container: DockerContainer ): - """Test restarting an addon waits for healthy.""" - install_addon_ssh.path_data.mkdir() + """Test restarting an app waits for healthy.""" + install_app_ssh.path_data.mkdir() container.show.return_value["Config"] = {"Healthcheck": "exists"} - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STOPPED + assert install_app_ssh.state == AppState.STOPPED - state_changes: list[AddonState] = [] + state_changes: list[AppState] = [] _container_events_task: asyncio.Task | None = None async def container_events(): nonlocal state_changes await asyncio.sleep(0.01) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) ) - state_changes.append(install_addon_ssh.state) + state_changes.append(install_app_ssh.state) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.HEALTHY) ) @@ -172,49 +172,49 @@ async def container_events_task(*args, **kwargs): nonlocal _container_events_task _container_events_task = asyncio.create_task(container_events()) - with patch.object(DockerAddon, "run", new=container_events_task): + with patch.object(DockerApp, "run", new=container_events_task): resp = await api_client.post("/addons/local_ssh/restart") - assert state_changes == [AddonState.STARTUP] - assert install_addon_ssh.state == AddonState.STARTED + assert state_changes == [AppState.STARTUP] + assert install_app_ssh.state == AppState.STARTED assert resp.status == 200 @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_api_addon_rebuild_healthcheck( +async def test_api_app_rebuild_healthcheck( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ): - """Test rebuilding an addon waits for healthy.""" + """Test rebuilding an app waits for healthy.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() container.show.return_value["Config"] = {"Healthcheck": "exists"} - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STARTUP + assert install_app_ssh.state == AppState.STARTUP - state_changes: list[AddonState] = [] + state_changes: list[AppState] = [] _container_events_task: asyncio.Task | None = None async def container_events(): nonlocal state_changes - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.STOPPED) ) - state_changes.append(install_addon_ssh.state) + state_changes.append(install_app_ssh.state) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) ) - state_changes.append(install_addon_ssh.state) + state_changes.append(install_app_ssh.state) await asyncio.sleep(0) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.HEALTHY) ) @@ -223,20 +223,20 @@ async def container_events_task(*args, **kwargs): _container_events_task = asyncio.create_task(container_events()) with ( - patch.object(AddonBuild, "is_valid", return_value=True), - patch.object(DockerAddon, "is_running", return_value=False), - patch.object(Addon, "need_build", new=PropertyMock(return_value=True)), + patch.object(AppBuild, "is_valid", return_value=True), + patch.object(DockerApp, "is_running", return_value=False), + patch.object(App, "need_build", new=PropertyMock(return_value=True)), patch.object( CpuArchManager, "supported", new=PropertyMock(return_value=["amd64"]) ), - patch.object(DockerAddon, "run", new=container_events_task), + patch.object(DockerApp, "run", new=container_events_task), patch.object( coresys.docker, "run_command", return_value=CommandReturn(0, ["Build successful"]), ), patch.object( - DockerAddon, "healthcheck", new=PropertyMock(return_value={"exists": True}) + DockerApp, "healthcheck", new=PropertyMock(return_value={"exists": True}) ), patch.object( type(coresys.config), @@ -246,46 +246,46 @@ async def container_events_task(*args, **kwargs): ): resp = await api_client.post("/addons/local_ssh/rebuild") - assert state_changes == [AddonState.STOPPED, AddonState.STARTUP] - assert install_addon_ssh.state == AddonState.STARTED + assert state_changes == [AppState.STOPPED, AppState.STARTUP] + assert install_app_ssh.state == AppState.STARTED assert resp.status == 200 @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_api_addon_rebuild_force( +async def test_api_app_rebuild_force( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ): - """Test rebuilding an image-based addon with force parameter.""" + """Test rebuilding an image-based app with force parameter.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() container.show.return_value["Config"] = {"Healthcheck": "exists"} - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STARTUP + assert install_app_ssh.state == AppState.STARTUP - state_changes: list[AddonState] = [] + state_changes: list[AppState] = [] _container_events_task: asyncio.Task | None = None async def container_events(): nonlocal state_changes - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.STOPPED) ) - state_changes.append(install_addon_ssh.state) + state_changes.append(install_app_ssh.state) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.RUNNING) ) - state_changes.append(install_addon_ssh.state) + state_changes.append(install_app_ssh.state) await asyncio.sleep(0) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( _create_test_event(f"addon_{TEST_ADDON_SLUG}", ContainerState.HEALTHY) ) @@ -293,12 +293,12 @@ async def container_events_task(*args, **kwargs): nonlocal _container_events_task _container_events_task = asyncio.create_task(container_events()) - # Test 1: Without force, image-based addon should fail + # Test 1: Without force, image-based app should fail with ( - patch.object(AddonBuild, "is_valid", return_value=True), - patch.object(DockerAddon, "is_running", return_value=False), + patch.object(AppBuild, "is_valid", return_value=True), + patch.object(DockerApp, "is_running", return_value=False), patch.object( - Addon, "need_build", new=PropertyMock(return_value=False) + App, "need_build", new=PropertyMock(return_value=False) ), # Image-based patch.object( CpuArchManager, "supported", new=PropertyMock(return_value=["amd64"]) @@ -313,24 +313,24 @@ async def container_events_task(*args, **kwargs): # Reset state for next test state_changes.clear() - # Test 2: With force=True, image-based addon should succeed + # Test 2: With force=True, image-based app should succeed with ( - patch.object(AddonBuild, "is_valid", return_value=True), - patch.object(DockerAddon, "is_running", return_value=False), + patch.object(AppBuild, "is_valid", return_value=True), + patch.object(DockerApp, "is_running", return_value=False), patch.object( - Addon, "need_build", new=PropertyMock(return_value=False) + App, "need_build", new=PropertyMock(return_value=False) ), # Image-based patch.object( CpuArchManager, "supported", new=PropertyMock(return_value=["amd64"]) ), - patch.object(DockerAddon, "run", new=container_events_task), + patch.object(DockerApp, "run", new=container_events_task), patch.object( coresys.docker, "run_command", return_value=CommandReturn(0, ["Build successful"]), ), patch.object( - DockerAddon, "healthcheck", new=PropertyMock(return_value={"exists": True}) + DockerApp, "healthcheck", new=PropertyMock(return_value={"exists": True}) ), patch.object( type(coresys.config), @@ -340,58 +340,54 @@ async def container_events_task(*args, **kwargs): ): resp = await api_client.post("/addons/local_ssh/rebuild", json={"force": True}) - assert state_changes == [AddonState.STOPPED, AddonState.STARTUP] - assert install_addon_ssh.state == AddonState.STARTED + assert state_changes == [AppState.STOPPED, AppState.STARTUP] + assert install_app_ssh.state == AppState.STARTED assert resp.status == 200 await _container_events_task @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_api_addon_uninstall( - api_client: TestClient, coresys: CoreSys, install_addon_example: Addon +async def test_api_app_uninstall( + api_client: TestClient, coresys: CoreSys, install_app_example: App ): """Test uninstall.""" - install_addon_example.data["map"].append( - {"type": "addon_config", "read_only": False} - ) - install_addon_example.path_config.mkdir() - (test_file := install_addon_example.path_config / "test.txt").touch() + install_app_example.data["map"].append({"type": "addon_config", "read_only": False}) + install_app_example.path_config.mkdir() + (test_file := install_app_example.path_config / "test.txt").touch() resp = await api_client.post("/addons/local_example/uninstall") assert resp.status == 200 - assert not coresys.addons.get("local_example", local_only=True) + assert not coresys.apps.get("local_example", local_only=True) assert test_file.exists() @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_api_addon_uninstall_remove_config( - api_client: TestClient, coresys: CoreSys, install_addon_example: Addon +async def test_api_app_uninstall_remove_config( + api_client: TestClient, coresys: CoreSys, install_app_example: App ): """Test uninstall and remove config.""" - install_addon_example.data["map"].append( - {"type": "addon_config", "read_only": False} - ) - (test_folder := install_addon_example.path_config).mkdir() - (install_addon_example.path_config / "test.txt").touch() + install_app_example.data["map"].append({"type": "addon_config", "read_only": False}) + (test_folder := install_app_example.path_config).mkdir() + (install_app_example.path_config / "test.txt").touch() resp = await api_client.post( "/addons/local_example/uninstall", json={"remove_config": True} ) assert resp.status == 200 - assert not coresys.addons.get("local_example", local_only=True) + assert not coresys.apps.get("local_example", local_only=True) assert not test_folder.exists() @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") -async def test_api_addon_system_managed( +async def test_api_app_system_managed( api_client: TestClient, coresys: CoreSys, - install_addon_example: Addon, + install_app_example: App, caplog: pytest.LogCaptureFixture, ): - """Test setting system managed for an addon.""" - install_addon_example.data["ingress"] = False + """Test setting system managed for an app.""" + install_app_example.data["ingress"] = False # Not system managed resp = await api_client.get("/addons") @@ -405,13 +401,13 @@ async def test_api_addon_system_managed( assert body["data"]["system_managed_config_entry"] is None # Mark as system managed - coresys.addons.data.save_data.reset_mock() + coresys.apps.data.save_data.reset_mock() resp = await api_client.post( "/addons/local_example/sys_options", json={"system_managed": True, "system_managed_config_entry": "abc123"}, ) assert resp.status == 200 - coresys.addons.data.save_data.assert_called_once() + coresys.apps.data.save_data.assert_called_once() resp = await api_client.get("/addons") body = await resp.json() @@ -423,13 +419,13 @@ async def test_api_addon_system_managed( assert body["data"]["system_managed_config_entry"] == "abc123" # Revert. Log that cannot have a config entry if not system managed - coresys.addons.data.save_data.reset_mock() + coresys.apps.data.save_data.reset_mock() resp = await api_client.post( "/addons/local_example/sys_options", json={"system_managed": False, "system_managed_config_entry": "abc123"}, ) assert resp.status == 200 - coresys.addons.data.save_data.assert_called_once() + coresys.apps.data.save_data.assert_called_once() assert "Ignoring system managed config entry" in caplog.text resp = await api_client.get("/addons") @@ -442,11 +438,11 @@ async def test_api_addon_system_managed( assert body["data"]["system_managed_config_entry"] is None -async def test_addon_options_boot_mode_manual_only_invalid( - api_client: TestClient, install_addon_example: Addon +async def test_app_options_boot_mode_manual_only_invalid( + api_client: TestClient, install_app_example: App ): """Test changing boot mode is invalid if set to manual only.""" - install_addon_example.data["ingress"] = False + install_app_example.data["ingress"] = False resp = await api_client.get("/addons/local_example/info") assert resp.status == 200 body = await resp.json() @@ -496,10 +492,10 @@ async def get_message(resp: ClientResponse, json_expected: bool) -> str: ("get", "/addons/bad/logs/boots/1/follow", False), ], ) -async def test_addon_not_found( +async def test_app_not_found( api_client: TestClient, method: str, url: str, json_expected: bool ): - """Test addon not found error.""" + """Test app not found error.""" resp = await api_client.request(method, url) assert resp.status == 404 assert await get_message(resp, json_expected) == "App bad does not exist" @@ -526,34 +522,32 @@ async def test_addon_not_found( ], ) @pytest.mark.usefixtures("test_repository") -async def test_addon_not_installed( +async def test_app_not_installed( api_client: TestClient, method: str, url: str, json_expected: bool ): - """Test addon not installed error.""" + """Test app not installed error.""" resp = await api_client.request(method, url) assert resp.status == 400 assert await get_message(resp, json_expected) == "App is not installed" -async def test_addon_set_options(api_client: TestClient, install_addon_example: Addon): - """Test setting options for an addon.""" +async def test_app_set_options(api_client: TestClient, install_app_example: App): + """Test setting options for an app.""" resp = await api_client.post( "/addons/local_example/options", json={"options": {"message": "test"}} ) assert resp.status == 200 - assert install_addon_example.options == {"message": "test"} + assert install_app_example.options == {"message": "test"} -async def test_addon_reset_options( - api_client: TestClient, install_addon_example: Addon -): - """Test resetting options for an addon to defaults. +async def test_app_reset_options(api_client: TestClient, install_app_example: App): + """Test resetting options for an app to defaults. Fixes SUPERVISOR-171F. """ # First set some custom options - install_addon_example.options = {"message": "custom"} - assert install_addon_example.persist["options"] == {"message": "custom"} + install_app_example.options = {"message": "custom"} + assert install_app_example.persist["options"] == {"message": "custom"} # Reset to defaults by sending null resp = await api_client.post( @@ -562,12 +556,12 @@ async def test_addon_reset_options( assert resp.status == 200 # Persisted options should be empty (meaning defaults will be used) - assert install_addon_example.persist["options"] == {} + assert install_app_example.persist["options"] == {} -@pytest.mark.usefixtures("install_addon_example") -async def test_addon_set_options_error(api_client: TestClient): - """Test setting options for an addon.""" +@pytest.mark.usefixtures("install_app_example") +async def test_app_set_options_error(api_client: TestClient): + """Test setting options for an app.""" resp = await api_client.post( "/addons/local_example/options", json={"options": {"message": True}} ) @@ -584,13 +578,13 @@ async def test_addon_set_options_error(api_client: TestClient): } -async def test_addon_start_options_error( +async def test_app_start_options_error( api_client: TestClient, - install_addon_example: Addon, + install_app_example: App, caplog: pytest.LogCaptureFixture, ): - """Test error writing options when trying to start addon.""" - install_addon_example.options = {"message": "hello"} + """Test error writing options when trying to start app.""" + install_app_example.options = {"message": "hello"} # Simulate OS error trying to write the file with patch("supervisor.utils.json.atomic_write", side_effect=OSError("fail")): @@ -609,7 +603,7 @@ async def test_addon_start_options_error( # Simulate an update with a breaking change for options schema creating failure on start caplog.clear() - install_addon_example.data["schema"] = {"message": "bool"} + install_app_example.data["schema"] = {"message": "bool"} resp = await api_client.post("/addons/local_example/start") assert resp.status == 400 body = await resp.json() @@ -629,12 +623,10 @@ async def test_addon_start_options_error( @pytest.mark.parametrize(("method", "action"), [("get", "stats"), ("post", "stdin")]) -@pytest.mark.usefixtures("install_addon_example") -async def test_addon_not_running_error( - api_client: TestClient, method: str, action: str -): - """Test addon not running error for endpoints that require that.""" - with patch.object(Addon, "with_stdin", new=PropertyMock(return_value=True)): +@pytest.mark.usefixtures("install_app_example") +async def test_app_not_running_error(api_client: TestClient, method: str, action: str): + """Test app not running error for endpoints that require that.""" + with patch.object(App, "with_stdin", new=PropertyMock(return_value=True)): resp = await api_client.request(method, f"/addons/local_example/{action}") assert resp.status == 400 @@ -644,9 +636,9 @@ async def test_addon_not_running_error( assert body["extra_fields"] == {"addon": "local_example"} -@pytest.mark.usefixtures("install_addon_example") -async def test_addon_write_stdin_not_supported_error(api_client: TestClient): - """Test error when trying to write stdin to addon that does not support it.""" +@pytest.mark.usefixtures("install_app_example") +async def test_app_write_stdin_not_supported_error(api_client: TestClient): + """Test error when trying to write stdin to app that does not support it.""" resp = await api_client.post("/addons/local_example/stdin") assert resp.status == 400 body = await resp.json() @@ -655,9 +647,9 @@ async def test_addon_write_stdin_not_supported_error(api_client: TestClient): assert body["extra_fields"] == {"addon": "local_example"} -@pytest.mark.usefixtures("install_addon_ssh") -async def test_addon_rebuild_fails_error(api_client: TestClient, coresys: CoreSys): - """Test error when build fails during rebuild for addon.""" +@pytest.mark.usefixtures("install_app_ssh") +async def test_app_rebuild_fails_error(api_client: TestClient, coresys: CoreSys): + """Test error when build fails during rebuild for app.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 coresys.docker.containers.create.side_effect = aiodocker.DockerError( 500, {"message": "fail"} @@ -672,9 +664,7 @@ async def test_addon_rebuild_fails_error(api_client: TestClient, coresys: CoreSy patch.object( CpuArchManager, "default", new=PropertyMock(return_value=CpuArch.AARCH64) ), - patch.object( - AddonBuild, "get_docker_args", return_value={"command": ["build"]} - ), + patch.object(AppBuild, "get_docker_args", return_value={"command": ["build"]}), ): resp = await api_client.post("/addons/local_ssh/rebuild") assert resp.status == 500 diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 132dbab548f..dcabceb2dab 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -7,7 +7,7 @@ from aiohttp.test_utils import TestClient import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.coresys import CoreSys from supervisor.exceptions import HomeAssistantAPIError, HomeAssistantWSError from supervisor.homeassistant.api import HomeAssistantAPI @@ -196,7 +196,7 @@ async def test_list_users_ws_error( async def test_auth_json_success( api_client: TestClient, mock_check_login: AsyncMock, - install_addon_ssh: Addon, + install_app_ssh: App, field: str, ): """Test successful JSON auth.""" @@ -216,7 +216,7 @@ async def test_auth_json_success( async def test_auth_json_failure_none( api_client: TestClient, mock_check_login: AsyncMock, - install_addon_ssh: Addon, + install_app_ssh: App, user: str | None, password: str | None, ): @@ -235,7 +235,7 @@ async def test_auth_json_failure_none( @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) async def test_auth_json_invalid_credentials( - api_client: TestClient, mock_check_login: AsyncMock, install_addon_ssh: Addon + api_client: TestClient, mock_check_login: AsyncMock, install_app_ssh: App ): """Test failed JSON auth due to invalid credentials.""" mock_check_login.return_value = False @@ -247,7 +247,7 @@ async def test_auth_json_invalid_credentials( @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) -async def test_auth_json_empty_body(api_client: TestClient, install_addon_ssh: Addon): +async def test_auth_json_empty_body(api_client: TestClient, install_app_ssh: App): """Test JSON auth with empty body.""" resp = await api_client.post( "/auth", data="", headers={"Content-Type": "application/json"} @@ -256,7 +256,7 @@ async def test_auth_json_empty_body(api_client: TestClient, install_addon_ssh: A @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) -async def test_auth_json_invalid_json(api_client: TestClient, install_addon_ssh: Addon): +async def test_auth_json_invalid_json(api_client: TestClient, install_app_ssh: App): """Test JSON auth with malformed JSON.""" resp = await api_client.post( "/auth", data="{not json}", headers={"Content-Type": "application/json"} @@ -266,7 +266,7 @@ async def test_auth_json_invalid_json(api_client: TestClient, install_addon_ssh: @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) async def test_auth_urlencoded_success( - api_client: TestClient, mock_check_login: AsyncMock, install_addon_ssh: Addon + api_client: TestClient, mock_check_login: AsyncMock, install_app_ssh: App ): """Test successful URL-encoded auth.""" mock_check_login.return_value = True @@ -280,7 +280,7 @@ async def test_auth_urlencoded_success( @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) async def test_auth_urlencoded_failure( - api_client: TestClient, mock_check_login: AsyncMock, install_addon_ssh: Addon + api_client: TestClient, mock_check_login: AsyncMock, install_app_ssh: App ): """Test URL-encoded auth with invalid credentials.""" mock_check_login.return_value = False @@ -295,7 +295,7 @@ async def test_auth_urlencoded_failure( @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) async def test_auth_unsupported_content_type( - api_client: TestClient, install_addon_ssh: Addon + api_client: TestClient, install_app_ssh: App ): """Test auth with unsupported content type.""" resp = await api_client.post( @@ -307,7 +307,7 @@ async def test_auth_unsupported_content_type( @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) async def test_auth_basic_auth( - api_client: TestClient, mock_check_login: AsyncMock, install_addon_ssh: Addon + api_client: TestClient, mock_check_login: AsyncMock, install_app_ssh: App ): """Test auth with BasicAuth header.""" mock_check_login.return_value = True @@ -319,7 +319,7 @@ async def test_auth_basic_auth( @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) async def test_auth_basic_auth_failure( - api_client: TestClient, mock_check_login: AsyncMock, install_addon_ssh: Addon + api_client: TestClient, mock_check_login: AsyncMock, install_app_ssh: App ): """Test auth with BasicAuth header and failure.""" mock_check_login.return_value = False @@ -331,7 +331,7 @@ async def test_auth_basic_auth_failure( @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) async def test_auth_bearer_token_returns_401( - api_client: TestClient, install_addon_ssh: Addon + api_client: TestClient, install_app_ssh: App ): """Test that a Bearer token in Authorization header returns 401, not 500.""" resp = await api_client.post( @@ -342,22 +342,22 @@ async def test_auth_bearer_token_returns_401( @pytest.mark.parametrize("api_client", ["local_example"], indirect=True) -async def test_auth_addon_no_auth_access( - api_client: TestClient, install_addon_example: Addon +async def test_auth_app_no_auth_access( + api_client: TestClient, install_app_example: App ): - """Test auth where add-on is not allowed to access auth API.""" + """Test auth where app is not allowed to access auth API.""" resp = await api_client.post("/auth", json={"username": "test", "password": "pass"}) assert resp.status == 403 -async def test_non_addon_token_no_auth_access(api_client: TestClient): - """Test auth where add-on is not allowed to access auth API.""" +async def test_non_app_token_no_auth_access(api_client: TestClient): + """Test auth where app is not allowed to access auth API.""" resp = await api_client.post("/auth", json={"username": "test", "password": "pass"}) assert resp.status == 403 @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") async def test_auth_backend_login_failure(api_client: TestClient): """Test backend login failure on auth.""" with ( diff --git a/tests/api/test_backups.py b/tests/api/test_backups.py index 34d9dcb486b..1fab536ae6d 100644 --- a/tests/api/test_backups.py +++ b/tests/api/test_backups.py @@ -12,14 +12,15 @@ from awesomeversion import AwesomeVersion import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.backups.backup import Backup, BackupLocation +from supervisor.backups.manager import BackupManager from supervisor.const import CoreState from supervisor.coresys import CoreSys from supervisor.docker.manager import DockerAPI from supervisor.exceptions import ( - AddonPrePostBackupCommandReturnedError, - AddonsError, + AppPrePostBackupCommandReturnedError, + AppsError, BackupInvalidError, HomeAssistantBackupError, ) @@ -359,7 +360,7 @@ async def test_api_backup_restore_background( ), ], ) -@pytest.mark.usefixtures("install_addon_ssh", "path_extern") +@pytest.mark.usefixtures("install_app_ssh", "path_extern") async def test_api_backup_errors( api_client: TestClient, coresys: CoreSys, @@ -375,9 +376,7 @@ async def test_api_backup_errors( assert coresys.jobs.jobs == [] - with patch.object( - Addon, "backup", side_effect=(err := AddonsError("Backup error")) - ): + with patch.object(App, "backup", side_effect=(err := AppsError("Backup error"))): resp = await api_client.post( f"/backups/new/{backup_type}", json={"name": f"{backup_type} backup"} | options, @@ -423,7 +422,7 @@ async def test_api_backup_errors( "backup", side_effect=HomeAssistantBackupError("Backup error"), ), - patch.object(Addon, "backup"), + patch.object(App, "backup"), ): resp = await api_client.post( f"/backups/new/{backup_type}", @@ -580,7 +579,7 @@ async def test_reload( assert backup.locations == [location] -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) async def test_cloud_backup_core_only(api_client: TestClient, mock_full_backup: Backup): """Test only core can access cloud backup location.""" @@ -1017,21 +1016,21 @@ async def test_download_backup_from_invalid_location(api_client: TestClient): @pytest.mark.usefixtures("tmp_supervisor_data") -async def test_partial_backup_all_addons( +async def test_partial_backup_all_apps( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, ): """Test backup including extra metdata.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - with patch.object(Backup, "store_addons") as store_addons: + with patch.object(Backup, "store_apps") as store_apps: resp = await api_client.post( "/backups/new/partial", json={"name": "All addons test", "addons": "ALL"} ) assert resp.status == 200 - store_addons.assert_called_once_with([install_addon_ssh]) + store_apps.assert_called_once_with([install_app_ssh]) @pytest.mark.parametrize("local_location", [None, "", ".local"]) @@ -1497,7 +1496,7 @@ async def mock_wait(tasks: list[asyncio.Task], *args, **kwargs): @pytest.mark.parametrize("command", ["backup_pre", "backup_post"]) -@pytest.mark.usefixtures("install_addon_example", "tmp_supervisor_data") +@pytest.mark.usefixtures("install_app_example", "tmp_supervisor_data") async def test_pre_post_backup_command_error( api_client: TestClient, coresys: CoreSys, container: DockerContainer, command: str ): @@ -1509,7 +1508,7 @@ async def test_pre_post_backup_command_error( container.show.return_value["State"]["Running"] = True container.exec.return_value.inspect.return_value = {"ExitCode": 1} - with patch.object(Addon, command, new=PropertyMock(return_value="test")): + with patch.object(App, command, new=PropertyMock(return_value="test")): resp = await api_client.post( "/backups/new/partial", json={"addons": ["local_example"]} ) @@ -1525,7 +1524,7 @@ async def test_pre_post_backup_command_error( assert job assert job.done is True - assert job.errors[0].type_ == AddonPrePostBackupCommandReturnedError + assert job.errors[0].type_ == AppPrePostBackupCommandReturnedError assert job.errors[0].message == ( "Pre-/Post backup command for app local_example returned error code: " "1. Please report this to the app developer. Enable debug " @@ -1537,3 +1536,28 @@ async def test_pre_post_backup_command_error( "exit_code": 1, "debug_logging_command": "ha supervisor options --logging debug", } + + +async def test_restore_partial_with_addons_key( + api_client: TestClient, + coresys: CoreSys, + mock_partial_backup: Backup, +): + """Test that partial restore accepts 'addons' key in request body and remaps it to 'apps'.""" + await coresys.core.set_state(CoreState.RUNNING) + coresys.hardware.disk.get_disk_free_space = lambda x: 5000 + + with patch.object( + BackupManager, "do_restore_partial", return_value=True + ) as mock_restore: + resp = await api_client.post( + f"/backups/{mock_partial_backup.slug}/restore/partial", + json={"addons": ["local_ssh"]}, + ) + + assert resp.status == 200 + mock_restore.assert_called_once() + _, call_kwargs = mock_restore.call_args + assert "apps" in call_kwargs + assert call_kwargs["apps"] == ["local_ssh"] + assert "addons" not in call_kwargs diff --git a/tests/api/test_discovery.py b/tests/api/test_discovery.py index fc2d4850c02..ec46a77fe63 100644 --- a/tests/api/test_discovery.py +++ b/tests/api/test_discovery.py @@ -6,8 +6,8 @@ from aiohttp.test_utils import TestClient import pytest -from supervisor.addons.addon import Addon -from supervisor.const import AddonState +from supervisor.addons.addon import App +from supervisor.const import AppState from supervisor.coresys import CoreSys from supervisor.discovery import Message @@ -17,9 +17,9 @@ @pytest.mark.parametrize("api_client", ["local_ssh"], indirect=True) async def test_api_discovery_forbidden( - api_client: TestClient, caplog: pytest.LogCaptureFixture, install_addon_ssh + api_client: TestClient, caplog: pytest.LogCaptureFixture, install_app_ssh ): - """Test addon sending discovery message for an unregistered service.""" + """Test app sending discovery message for an unregistered service.""" caplog.clear() with caplog.at_level(logging.ERROR): @@ -38,13 +38,13 @@ async def test_api_discovery_forbidden( @pytest.mark.parametrize( - "skip_state", [AddonState.ERROR, AddonState.STOPPED, AddonState.STARTUP] + "skip_state", [AppState.ERROR, AppState.STOPPED, AppState.STARTUP] ) async def test_api_list_discovery( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, - skip_state: AddonState, + install_app_ssh: App, + skip_state: AppState, ): """Test listing discovery messages only returns ones for healthy services.""" with ( @@ -62,7 +62,7 @@ async def test_api_list_discovery( Message(addon="local_ssh", service="adguard", config=ANY, uuid=ANY), ] - install_addon_ssh.state = AddonState.STARTED + install_app_ssh.state = AppState.STARTED resp = await api_client.get("/discovery") assert resp.status == 200 result = await resp.json() @@ -75,7 +75,7 @@ async def test_api_list_discovery( } ] - install_addon_ssh.state = skip_state + install_app_ssh.state = skip_state resp = await api_client.get("/discovery") assert resp.status == 200 result = await resp.json() @@ -86,11 +86,11 @@ async def test_api_list_discovery( async def test_api_send_del_discovery( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, websession: MagicMock, ): """Test adding and removing discovery.""" - install_addon_ssh.data["discovery"] = ["test"] + install_app_ssh.data["discovery"] = ["test"] coresys.homeassistant.api.ensure_access_token = AsyncMock() resp = await api_client.post("/discovery", json={"service": "test", "config": {}}) @@ -133,9 +133,9 @@ async def test_api_send_del_discovery( @pytest.mark.parametrize("api_client", [TEST_ADDON_SLUG], indirect=True) -async def test_api_invalid_discovery(api_client: TestClient, install_addon_ssh: Addon): +async def test_api_invalid_discovery(api_client: TestClient, install_app_ssh: App): """Test invalid discovery messages.""" - install_addon_ssh.data["discovery"] = ["test"] + install_app_ssh.data["discovery"] = ["test"] resp = await api_client.post("/discovery", json={"service": "test"}) assert resp.status == 400 diff --git a/tests/api/test_host.py b/tests/api/test_host.py index b307b712261..499ee2bdcf9 100644 --- a/tests/api/test_host.py +++ b/tests/api/test_host.py @@ -425,7 +425,7 @@ async def test_disk_usage_api(api_client: TestClient, coresys: CoreSys): mock_dir_sizes.return_value = [ { "id": "addons_data", - "label": "Addons Data", + "label": "Apps Data", "used_bytes": 100000000, "children": [ {"id": "addon1", "label": "addon1", "used_bytes": 50000000} @@ -433,7 +433,7 @@ async def test_disk_usage_api(api_client: TestClient, coresys: CoreSys): }, { "id": "addons_config", - "label": "Addons Config", + "label": "Apps Config", "used_bytes": 200000000, "children": [ {"id": "media1", "label": "media1", "used_bytes": 100000000} @@ -542,8 +542,8 @@ async def test_disk_usage_api(api_client: TestClient, coresys: CoreSys): call_args = mock_dir_sizes.call_args assert call_args[0][1] == 1 # max_depth parameter paths_dict = call_args[0][0] # paths dictionary - assert paths_dict["addons_data"] == coresys.config.path_addons_data - assert paths_dict["addons_config"] == coresys.config.path_addon_configs + assert paths_dict["addons_data"] == coresys.config.path_apps_data + assert paths_dict["addons_config"] == coresys.config.path_app_configs assert paths_dict["media"] == coresys.config.path_media assert paths_dict["share"] == coresys.config.path_share assert paths_dict["backup"] == coresys.config.path_backup @@ -565,7 +565,7 @@ async def test_disk_usage_api_with_custom_depth( mock_dir_sizes.return_value = [ { "id": "addons_data", - "label": "Addons Data", + "label": "Apps Data", "used_bytes": 100000000, "children": [ { @@ -584,7 +584,7 @@ async def test_disk_usage_api_with_custom_depth( }, { "id": "addons_config", - "label": "Addons Config", + "label": "Apps Config", "used_bytes": 100000000, "children": [ { @@ -721,12 +721,12 @@ async def test_disk_usage_api_invalid_depth(api_client: TestClient, coresys: Cor mock_dir_sizes.return_value = [ { "id": "addons_data", - "label": "Addons Data", + "label": "Apps Data", "used_bytes": 100000000, }, { "id": "addons_config", - "label": "Addons Config", + "label": "Apps Config", "used_bytes": 100000000, }, { @@ -783,12 +783,12 @@ async def test_disk_usage_api_empty_directories( mock_dir_sizes.return_value = [ { "id": "addons_data", - "label": "Addons Data", + "label": "Apps Data", "used_bytes": 0, }, { "id": "addons_config", - "label": "Addons Config", + "label": "Apps Config", "used_bytes": 0, }, { diff --git a/tests/api/test_ingress.py b/tests/api/test_ingress.py index 8724ce0c8d0..c78da43201b 100644 --- a/tests/api/test_ingress.py +++ b/tests/api/test_ingress.py @@ -8,7 +8,7 @@ from aiohttp.test_utils import TestClient, TestServer import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.coresys import CoreSys @@ -107,9 +107,9 @@ async def test_ingress_proxy_no_content_type_for_empty_body_responses( ): """Test that empty body responses don't get Content-Type header.""" - # Create a mock add-on backend server that returns various status codes - async def mock_addon_handler(request: web.Request) -> web.Response: - """Mock add-on handler that returns different status codes based on path.""" + # Create a mock app backend server that returns various status codes + async def mock_app_handler(request: web.Request) -> web.Response: + """Mock app handler that returns different status codes based on path.""" path = request.path if path == "/204": @@ -138,11 +138,11 @@ async def mock_addon_handler(request: web.Request) -> web.Response: else: return web.Response(body=b"default", content_type="text/html") - # Create test server for mock add-on + # Create test server for mock app app = web.Application() - app.router.add_route("*", "/{tail:.*}", mock_addon_handler) - addon_server = TestServer(app) - await addon_server.start_server() + app.router.add_route("*", "/{tail:.*}", mock_app_handler) + app_server = TestServer(app) + await app_server.start_server() try: # Create ingress session @@ -150,16 +150,16 @@ async def mock_addon_handler(request: web.Request) -> web.Response: result = await resp.json() session = result["data"]["session"] - # Create a mock add-on - mock_addon = MagicMock(spec=Addon) - mock_addon.slug = "test_addon" - mock_addon.ip_address = addon_server.host - mock_addon.ingress_port = addon_server.port - mock_addon.ingress_stream = False + # Create a mock app + mock_app = MagicMock(spec=App) + mock_app.slug = "test_addon" + mock_app.ip_address = app_server.host + mock_app.ingress_port = app_server.port + mock_app.ingress_stream = False - # Generate an ingress token and register the add-on + # Generate an ingress token and register the app ingress_token = coresys.ingress.create_session() - with patch.object(coresys.ingress, "get", return_value=mock_addon): + with patch.object(coresys.ingress, "get", return_value=mock_app): # Test 204 No Content - should NOT have Content-Type resp = await api_client.get( f"/ingress/{ingress_token}/204", @@ -222,4 +222,4 @@ async def mock_addon_handler(request: web.Request) -> web.Response: assert body == b'{"key": "value"}' finally: - await addon_server.close() + await app_server.close() diff --git a/tests/api/test_proxy.py b/tests/api/test_proxy.py index 56824ffcbc5..9bb9d95248f 100644 --- a/tests/api/test_proxy.py +++ b/tests/api/test_proxy.py @@ -14,7 +14,7 @@ from aiohttp.test_utils import TestClient import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.api.proxy import APIProxy from supervisor.const import ATTR_ACCESS_TOKEN from supervisor.homeassistant.api import HomeAssistantAPI @@ -128,12 +128,12 @@ def _send_json_auto_id(data: dict[str, Any]) -> Coroutine[Any, Any, None]: async def test_proxy_message( proxy_ws_client: WebSocketGenerator, ha_ws_server: MockHAServerWebSocket, - install_addon_ssh: Addon, + install_app_ssh: App, ): """Test proxy a message to and from Home Assistant.""" - install_addon_ssh.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_ssh.persist[ATTR_ACCESS_TOKEN] = "abc123" client: MockHAClientWebSocket = await proxy_ws_client( - install_addon_ssh.supervisor_token + install_app_ssh.supervisor_token ) await client.send_json_auto_id({"hello": "world"}) @@ -150,12 +150,12 @@ async def test_proxy_message( async def test_proxy_binary_message( proxy_ws_client: WebSocketGenerator, ha_ws_server: MockHAServerWebSocket, - install_addon_ssh: Addon, + install_app_ssh: App, ): """Test proxy a binary message to and from Home Assistant.""" - install_addon_ssh.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_ssh.persist[ATTR_ACCESS_TOKEN] = "abc123" client: MockHAClientWebSocket = await proxy_ws_client( - install_addon_ssh.supervisor_token + install_app_ssh.supervisor_token ) await client.send_bytes(b"hello world") @@ -172,12 +172,12 @@ async def test_proxy_binary_message( async def test_proxy_large_message( proxy_ws_client: WebSocketGenerator, ha_ws_server: MockHAServerWebSocket, - install_addon_ssh: Addon, + install_app_ssh: App, ): """Test too large message handled gracefully.""" - install_addon_ssh.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_ssh.persist[ATTR_ACCESS_TOKEN] = "abc123" client: MockHAClientWebSocket = await proxy_ws_client( - install_addon_ssh.supervisor_token + install_app_ssh.supervisor_token ) # Test message over size limit of 4MB @@ -191,10 +191,10 @@ async def test_proxy_large_message( @pytest.mark.parametrize("auth_token", ["abc123", "bad"]) async def test_proxy_invalid_auth( - api_client: TestClient, install_addon_example: Addon, auth_token: str + api_client: TestClient, install_app_example: App, auth_token: str ): - """Test invalid access token or addon with no access.""" - install_addon_example.persist[ATTR_ACCESS_TOKEN] = "abc123" + """Test invalid access token or app with no access.""" + install_app_example.persist[ATTR_ACCESS_TOKEN] = "abc123" websocket = await api_client.ws_connect("/core/websocket") auth_resp = await websocket.receive_json() assert auth_resp["type"] == "auth_required" @@ -207,11 +207,11 @@ async def test_proxy_invalid_auth( async def test_proxy_auth_abort_log( api_client: TestClient, - install_addon_example: Addon, + install_app_example: App, caplog: pytest.LogCaptureFixture, ): """Test WebSocket closed during authentication gets logged.""" - install_addon_example.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_example.persist[ATTR_ACCESS_TOKEN] = "abc123" websocket = await api_client.ws_connect("/core/websocket") auth_resp = await websocket.receive_json() assert auth_resp["type"] == "auth_required" @@ -226,13 +226,13 @@ async def test_proxy_auth_abort_log( @pytest.mark.parametrize("path", ["", "mock_path"]) async def test_api_proxy_get_request( api_client: TestClient, - install_addon_example: Addon, + install_app_example: App, request: pytest.FixtureRequest, path: str, ): """Test the API proxy request using patch for make_request.""" - install_addon_example.persist[ATTR_ACCESS_TOKEN] = "abc123" - install_addon_example.data["homeassistant_api"] = True + install_app_example.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_example.data["homeassistant_api"] = True request.param = "local_example" @@ -261,13 +261,13 @@ async def test_api_proxy_get_request( ) async def test_api_proxy_post_request( api_client: TestClient, - install_addon_example: Addon, + install_app_example: App, request: pytest.FixtureRequest, path: str, ): """Test the API proxy POST request.""" - install_addon_example.persist[ATTR_ACCESS_TOKEN] = "abc123" - install_addon_example.data["homeassistant_api"] = True + install_app_example.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_example.data["homeassistant_api"] = True request.param = "local_example" @@ -298,13 +298,13 @@ async def test_api_proxy_post_request( ) async def test_api_proxy_delete_request( api_client: TestClient, - install_addon_example: Addon, + install_app_example: App, request: pytest.FixtureRequest, path: str, ): """Test the API proxy DELETE request.""" - install_addon_example.persist[ATTR_ACCESS_TOKEN] = "abc123" - install_addon_example.data["homeassistant_api"] = True + install_app_example.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_example.data["homeassistant_api"] = True request.param = "local_example" @@ -330,11 +330,11 @@ async def test_api_proxy_delete_request( async def test_api_proxy_mcp_headers_forwarded( api_client: TestClient, - install_addon_example: Addon, + install_app_example: App, ): """Test that MCP headers are forwarded to Home Assistant.""" - install_addon_example.persist[ATTR_ACCESS_TOKEN] = "abc123" - install_addon_example.data["homeassistant_api"] = True + install_app_example.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_example.data["homeassistant_api"] = True with patch.object(HomeAssistantAPI, "make_request") as make_request: # Mock the response from make_request @@ -369,11 +369,11 @@ async def test_api_proxy_mcp_headers_forwarded( async def test_api_proxy_streaming_response( api_client: TestClient, - install_addon_example: Addon, + install_app_example: App, ): """Test that streaming responses (text/event-stream) are handled properly.""" - install_addon_example.persist[ATTR_ACCESS_TOKEN] = "abc123" - install_addon_example.data["homeassistant_api"] = True + install_app_example.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_example.data["homeassistant_api"] = True async def mock_content_iter(): """Mock async iterator for streaming content.""" @@ -416,11 +416,11 @@ async def mock_content_iter(): async def test_api_proxy_streaming_response_client_payload_error( api_client: TestClient, - install_addon_example: Addon, + install_app_example: App, ): """Test that client payload errors during streaming are handled gracefully.""" - install_addon_example.persist[ATTR_ACCESS_TOKEN] = "abc123" - install_addon_example.data["homeassistant_api"] = True + install_app_example.persist[ATTR_ACCESS_TOKEN] = "abc123" + install_app_example.data["homeassistant_api"] = True async def mock_content_iter_error(): yield b"data: event1\n\n" diff --git a/tests/api/test_root.py b/tests/api/test_root.py index 3cc93e3f244..8e1e8332199 100644 --- a/tests/api/test_root.py +++ b/tests/api/test_root.py @@ -25,13 +25,13 @@ async def test_api_info(api_client): async def test_api_available_updates( - install_addon_ssh, + install_app_ssh, api_client, coresys: CoreSys, ): """Test available_updates.""" - installed_addon = coresys.addons.get(TEST_ADDON_SLUG) - installed_addon.persist["version"] = "1.2.3" + installed_app = coresys.apps.get(TEST_ADDON_SLUG) + installed_app.persist["version"] = "1.2.3" async def available_updates(): return (await (await api_client.get("/available_updates")).json())["data"][ diff --git a/tests/api/test_store.py b/tests/api/test_store.py index 9c0e2cd6754..2286aa61248 100644 --- a/tests/api/test_store.py +++ b/tests/api/test_store.py @@ -9,13 +9,13 @@ from awesomeversion import AwesomeVersion import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.arch import CpuArchManager from supervisor.backups.manager import BackupManager from supervisor.config import CoreConfig -from supervisor.const import AddonState, CoreState +from supervisor.const import AppState, CoreState from supervisor.coresys import CoreSys -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.docker.const import ContainerState from supervisor.docker.interface import DockerInterface from supervisor.docker.monitor import DockerContainerStateEvent @@ -24,7 +24,7 @@ from supervisor.homeassistant.module import HomeAssistant from supervisor.resolution.const import ContextType, IssueType, SuggestionType from supervisor.resolution.data import Issue, Suggestion -from supervisor.store.addon import AddonStore +from supervisor.store.addon import AppStore from supervisor.store.repository import Repository from tests.common import AsyncIterator, load_json_fixture @@ -36,7 +36,7 @@ @pytest.mark.asyncio async def test_api_store( api_client: TestClient, - store_addon: AddonStore, + store_app: AppStore, test_repository: Repository, caplog: pytest.LogCaptureFixture, ): @@ -44,37 +44,35 @@ async def test_api_store( resp = await api_client.get("/store") result = await resp.json() - assert result["data"]["addons"][-1]["slug"] == store_addon.slug + assert result["data"]["addons"][-1]["slug"] == store_app.slug assert result["data"]["repositories"][-1]["slug"] == test_repository.slug - assert f"App {store_addon.slug} not supported on this platform" not in caplog.text + assert f"App {store_app.slug} not supported on this platform" not in caplog.text @pytest.mark.asyncio -async def test_api_store_addons(api_client: TestClient, store_addon: AddonStore): - """Test /store/addons REST API.""" +async def test_api_store_apps(api_client: TestClient, store_app: AppStore): + """Test /store/apps REST API.""" resp = await api_client.get("/store/addons") result = await resp.json() - assert result["data"]["addons"][-1]["slug"] == store_addon.slug + assert result["data"]["addons"][-1]["slug"] == store_app.slug @pytest.mark.asyncio -async def test_api_store_addons_addon(api_client: TestClient, store_addon: AddonStore): - """Test /store/addons/{addon} REST API.""" - resp = await api_client.get(f"/store/addons/{store_addon.slug}") +async def test_api_store_apps_app(api_client: TestClient, store_app: AppStore): + """Test /store/apps/{app} REST API.""" + resp = await api_client.get(f"/store/addons/{store_app.slug}") result = await resp.json() - assert result["data"]["slug"] == store_addon.slug + assert result["data"]["slug"] == store_app.slug @pytest.mark.asyncio -async def test_api_store_addons_addon_version( - api_client: TestClient, store_addon: AddonStore -): - """Test /store/addons/{addon}/{version} REST API.""" - resp = await api_client.get(f"/store/addons/{store_addon.slug}/1.0.0") +async def test_api_store_apps_app_version(api_client: TestClient, store_app: AppStore): + """Test /store/apps/{app}/{version} REST API.""" + resp = await api_client.get(f"/store/addons/{store_app.slug}/1.0.0") result = await resp.json() - assert result["data"]["slug"] == store_addon.slug + assert result["data"]["slug"] == store_app.slug @pytest.mark.asyncio @@ -204,32 +202,32 @@ async def test_api_store_repair_repository_git_error( async def test_api_store_update_healthcheck( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ): - """Test updating an addon with healthcheck waits for health status.""" + """Test updating an app with healthcheck waits for health status.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True container.show.return_value["Config"] = {"Healthcheck": "exists"} - install_addon_ssh.path_data.mkdir() - await install_addon_ssh.load() + install_app_ssh.path_data.mkdir() + await install_app_ssh.load() with patch( "supervisor.store.data.read_json_or_yaml_file", return_value=load_json_fixture("addon-config-add-image.json"), ): await coresys.store.data.update() - assert install_addon_ssh.need_update is True + assert install_app_ssh.need_update is True - state_changes: list[AddonState] = [] + state_changes: list[AppState] = [] _container_events_task: asyncio.Task | None = None async def container_events(): nonlocal state_changes await asyncio.sleep(0.01) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.STOPPED, @@ -238,8 +236,8 @@ async def container_events(): ) ) - state_changes.append(install_addon_ssh.state) - await install_addon_ssh.container_state_changed( + state_changes.append(install_app_ssh.state) + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.RUNNING, @@ -248,8 +246,8 @@ async def container_events(): ) ) - state_changes.append(install_addon_ssh.state) - await install_addon_ssh.container_state_changed( + state_changes.append(install_app_ssh.state) + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.HEALTHY, @@ -263,105 +261,105 @@ async def container_events_task(*args, **kwargs): _container_events_task = asyncio.create_task(container_events()) with ( - patch.object(DockerAddon, "run", new=container_events_task), + patch.object(DockerApp, "run", new=container_events_task), patch.object(DockerInterface, "install"), - patch.object(DockerAddon, "is_running", return_value=False), + patch.object(DockerApp, "is_running", return_value=False), patch.object( CpuArchManager, "supported", new=PropertyMock(return_value=["amd64"]) ), ): resp = await api_client.post(f"/store/addons/{TEST_ADDON_SLUG}/update") - assert state_changes == [AddonState.STOPPED, AddonState.STARTUP] - assert install_addon_ssh.state == AddonState.STARTED + assert state_changes == [AppState.STOPPED, AppState.STARTUP] + assert install_app_ssh.state == AppState.STARTED assert resp.status == 200 await _container_events_task @pytest.mark.parametrize("resource", ["store/addons", "addons"]) -async def test_api_store_addons_no_changelog( - api_client: TestClient, coresys: CoreSys, store_addon: AddonStore, resource: str +async def test_api_store_apps_no_changelog( + api_client: TestClient, coresys: CoreSys, store_app: AppStore, resource: str ): - """Test /store/addons/{addon}/changelog REST API. + """Test /store/apps/{app}/changelog REST API. Currently the frontend expects a valid body even in the error case. Make sure that is what the API returns. """ - assert store_addon.with_changelog is False - resp = await api_client.get(f"/{resource}/{store_addon.slug}/changelog") + assert store_app.with_changelog is False + resp = await api_client.get(f"/{resource}/{store_app.slug}/changelog") assert resp.status == 200 result = await resp.text() assert result == "No changelog found for app test_store_addon!" @pytest.mark.parametrize("resource", ["store/addons", "addons"]) -async def test_api_detached_addon_changelog( +async def test_api_detached_app_changelog( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, tmp_supervisor_data: Path, resource: str, ): - """Test /store/addons/{addon}/changelog for an detached addon. + """Test /store/apps/{app}/changelog for an detached app. Currently the frontend expects a valid body even in the error case. Make sure that is what the API returns. """ - (addons_dir := tmp_supervisor_data / "addons" / "local").mkdir() + (apps_dir := tmp_supervisor_data / "addons" / "local").mkdir() with patch.object( - CoreConfig, "path_addons_local", new=PropertyMock(return_value=addons_dir) + CoreConfig, "path_apps_local", new=PropertyMock(return_value=apps_dir) ): await coresys.store.load() - assert install_addon_ssh.is_detached is True - assert install_addon_ssh.with_changelog is False + assert install_app_ssh.is_detached is True + assert install_app_ssh.with_changelog is False - resp = await api_client.get(f"/{resource}/{install_addon_ssh.slug}/changelog") + resp = await api_client.get(f"/{resource}/{install_app_ssh.slug}/changelog") assert resp.status == 200 result = await resp.text() assert result == "App local_ssh does not exist in the store" @pytest.mark.parametrize("resource", ["store/addons", "addons"]) -async def test_api_store_addons_no_documentation( - api_client: TestClient, coresys: CoreSys, store_addon: AddonStore, resource: str +async def test_api_store_apps_no_documentation( + api_client: TestClient, coresys: CoreSys, store_app: AppStore, resource: str ): - """Test /store/addons/{addon}/documentation REST API. + """Test /store/apps/{app}/documentation REST API. Currently the frontend expects a valid body even in the error case. Make sure that is what the API returns. """ - assert store_addon.with_documentation is False - resp = await api_client.get(f"/{resource}/{store_addon.slug}/documentation") + assert store_app.with_documentation is False + resp = await api_client.get(f"/{resource}/{store_app.slug}/documentation") assert resp.status == 200 result = await resp.text() assert result == "No documentation found for app test_store_addon!" @pytest.mark.parametrize("resource", ["store/addons", "addons"]) -async def test_api_detached_addon_documentation( +async def test_api_detached_app_documentation( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, tmp_supervisor_data: Path, resource: str, ): - """Test /store/addons/{addon}/changelog for an detached addon. + """Test /store/apps/{app}/changelog for an detached app. Currently the frontend expects a valid body even in the error case. Make sure that is what the API returns. """ - (addons_dir := tmp_supervisor_data / "addons" / "local").mkdir() + (apps_dir := tmp_supervisor_data / "addons" / "local").mkdir() with patch.object( - CoreConfig, "path_addons_local", new=PropertyMock(return_value=addons_dir) + CoreConfig, "path_apps_local", new=PropertyMock(return_value=apps_dir) ): await coresys.store.load() - assert install_addon_ssh.is_detached is True - assert install_addon_ssh.with_documentation is False + assert install_app_ssh.is_detached is True + assert install_app_ssh.with_documentation is False - resp = await api_client.get(f"/{resource}/{install_addon_ssh.slug}/documentation") + resp = await api_client.get(f"/{resource}/{install_app_ssh.slug}/documentation") assert resp.status == 200 result = await resp.text() assert result == "App local_ssh does not exist in the store" @@ -386,10 +384,10 @@ async def test_api_detached_addon_documentation( ("post", "/addons/bad/update", True), ], ) -async def test_store_addon_not_found( +async def test_store_app_not_found( api_client: TestClient, method: str, url: str, json_expected: bool ): - """Test store addon not found error.""" + """Test store app not found error.""" resp = await api_client.request(method, url) assert resp.status == 404 if json_expected: @@ -411,8 +409,8 @@ async def test_store_addon_not_found( ], ) @pytest.mark.usefixtures("test_repository") -async def test_store_addon_not_installed(api_client: TestClient, method: str, url: str): - """Test store addon not installed error.""" +async def test_store_app_not_installed(api_client: TestClient, method: str, url: str): + """Test store app not installed error.""" resp = await api_client.request(method, url) assert resp.status == 400 body = await resp.json() @@ -435,54 +433,54 @@ async def test_repository_not_found(api_client: TestClient, method: str, url: st @pytest.mark.parametrize("resource", ["store/addons", "addons"]) -async def test_api_store_addons_documentation_corrupted( - api_client: TestClient, coresys: CoreSys, store_addon: AddonStore, resource: str +async def test_api_store_apps_documentation_corrupted( + api_client: TestClient, coresys: CoreSys, store_app: AppStore, resource: str ): - """Test /store/addons/{addon}/documentation REST API. + """Test /store/apps/{app}/documentation REST API. - Test add-on with documentation file with byte sequences which cannot be decoded + Test app with documentation file with byte sequences which cannot be decoded using UTF-8. """ - store_addon.path_documentation.write_bytes(b"Text with an invalid UTF-8 char: \xff") - await store_addon.refresh_path_cache() - assert store_addon.with_documentation is True + store_app.path_documentation.write_bytes(b"Text with an invalid UTF-8 char: \xff") + await store_app.refresh_path_cache() + assert store_app.with_documentation is True - resp = await api_client.get(f"/{resource}/{store_addon.slug}/documentation") + resp = await api_client.get(f"/{resource}/{store_app.slug}/documentation") assert resp.status == 200 result = await resp.text() assert result == "Text with an invalid UTF-8 char: �" @pytest.mark.parametrize("resource", ["store/addons", "addons"]) -async def test_api_store_addons_changelog_corrupted( - api_client: TestClient, coresys: CoreSys, store_addon: AddonStore, resource: str +async def test_api_store_apps_changelog_corrupted( + api_client: TestClient, coresys: CoreSys, store_app: AppStore, resource: str ): - """Test /store/addons/{addon}/changelog REST API. + """Test /store/apps/{app}/changelog REST API. - Test add-on with changelog file with byte sequences which cannot be decoded + Test app with changelog file with byte sequences which cannot be decoded using UTF-8. """ - store_addon.path_changelog.write_bytes(b"Text with an invalid UTF-8 char: \xff") - await store_addon.refresh_path_cache() - assert store_addon.with_changelog is True + store_app.path_changelog.write_bytes(b"Text with an invalid UTF-8 char: \xff") + await store_app.refresh_path_cache() + assert store_app.with_changelog is True - resp = await api_client.get(f"/{resource}/{store_addon.slug}/changelog") + resp = await api_client.get(f"/{resource}/{store_app.slug}/changelog") assert resp.status == 200 result = await resp.text() assert result == "Text with an invalid UTF-8 char: �" @pytest.mark.usefixtures("test_repository", "tmp_supervisor_data") -async def test_addon_install_in_background(api_client: TestClient, coresys: CoreSys): - """Test installing an addon in the background.""" +async def test_app_install_in_background(api_client: TestClient, coresys: CoreSys): + """Test installing an app in the background.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 event = asyncio.Event() # Mock a long-running install task - async def mock_addon_install(*args, **kwargs): + async def mock_app_install(*args, **kwargs): await event.wait() - with patch.object(Addon, "install", new=mock_addon_install): + with patch.object(App, "install", new=mock_app_install): resp = await api_client.post( "/store/addons/local_ssh/install", json={"background": True} ) @@ -494,11 +492,11 @@ async def mock_addon_install(*args, **kwargs): event.set() -@pytest.mark.usefixtures("install_addon_ssh") -async def test_background_addon_install_fails_fast( +@pytest.mark.usefixtures("install_app_ssh") +async def test_background_app_install_fails_fast( api_client: TestClient, coresys: CoreSys ): - """Test background addon install returns error not job if validation fails.""" + """Test background app install returns error not job if validation fails.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 resp = await api_client.post( @@ -514,22 +512,22 @@ async def test_background_addon_install_fails_fast( [(True, True, False), (False, False, True)], ) @pytest.mark.usefixtures("test_repository", "tmp_supervisor_data") -async def test_addon_update_in_background( +async def test_app_update_in_background( api_client: TestClient, coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, make_backup: bool, backup_called: bool, update_called: bool, ): - """Test updating an addon in the background.""" + """Test updating an app in the background.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - install_addon_ssh.data_store["version"] = "10.0.0" + install_app_ssh.data_store["version"] = "10.0.0" event = asyncio.Event() mock_update_called = mock_backup_called = False # Mock backup/update as long-running tasks - async def mock_addon_update(*args, **kwargs): + async def mock_app_update(*args, **kwargs): nonlocal mock_update_called mock_update_called = True await event.wait() @@ -540,7 +538,7 @@ async def mock_partial_backup(*args, **kwargs): await event.wait() with ( - patch.object(Addon, "update", new=mock_addon_update), + patch.object(App, "update", new=mock_app_update), patch.object(BackupManager, "do_backup_partial", new=mock_partial_backup), ): resp = await api_client.post( @@ -558,11 +556,11 @@ async def mock_partial_backup(*args, **kwargs): event.set() -@pytest.mark.usefixtures("install_addon_ssh") -async def test_background_addon_update_fails_fast( +@pytest.mark.usefixtures("install_app_ssh") +async def test_background_app_update_fails_fast( api_client: TestClient, coresys: CoreSys ): - """Test background addon update returns error not job if validation doesn't succeed.""" + """Test background app update returns error not job if validation doesn't succeed.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 resp = await api_client.post( @@ -573,11 +571,11 @@ async def test_background_addon_update_fails_fast( assert body["message"] == "No update available for app local_ssh" -async def test_api_store_addons_addon_availability_success( - api_client: TestClient, store_addon: AddonStore +async def test_api_store_apps_app_availability_success( + api_client: TestClient, store_app: AppStore ): - """Test /store/addons/{addon}/availability REST API - success case.""" - resp = await api_client.get(f"/store/addons/{store_addon.slug}/availability") + """Test /store/apps/{app}/availability REST API - success case.""" + resp = await api_client.get(f"/store/addons/{store_app.slug}/availability") assert resp.status == 200 @@ -592,7 +590,7 @@ async def test_api_store_addons_addon_availability_success( (["aarch64", "fooarch"], "update", "post", True), ], ) -async def test_api_store_addons_addon_availability_arch_not_supported( +async def test_api_store_apps_app_availability_arch_not_supported( api_client: TestClient, coresys: CoreSys, supported_architectures: list[str], @@ -600,14 +598,14 @@ async def test_api_store_addons_addon_availability_arch_not_supported( api_method: str, installed: bool, ): - """Test availability errors for /store/addons/{addon}/* REST APIs - architecture not supported.""" + """Test availability errors for /store/apps/{app}/* REST APIs - architecture not supported.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - # Create an addon with unsupported architecture - addon_obj = AddonStore(coresys, "test_arch_addon") - coresys.addons.store[addon_obj.slug] = addon_obj + # Create an app with unsupported architecture + app_obj = AppStore(coresys, "test_arch_addon") + coresys.apps.store[app_obj.slug] = app_obj - # Set addon config with unsupported architecture - addon_config = { + # Set app config with unsupported architecture + app_config = { "advanced": False, "arch": supported_architectures, "slug": "test_arch_addon", @@ -617,17 +615,17 @@ async def test_api_store_addons_addon_availability_arch_not_supported( "stage": "stable", "version": "1.0.0", } - coresys.store.data.addons[addon_obj.slug] = addon_config + coresys.store.data.apps[app_obj.slug] = app_config if installed: - coresys.addons.local[addon_obj.slug] = Addon(coresys, addon_obj.slug) - coresys.addons.data.user[addon_obj.slug] = {"version": AwesomeVersion("0.0.1")} + coresys.apps.local[app_obj.slug] = App(coresys, app_obj.slug) + coresys.apps.data.user[app_obj.slug] = {"version": AwesomeVersion("0.0.1")} # Mock the system architecture to be different with patch.object( CpuArchManager, "supported", new=PropertyMock(return_value=["amd64"]) ): resp = await api_client.request( - api_method, f"/store/addons/{addon_obj.slug}/{api_action}" + api_method, f"/store/addons/{app_obj.slug}/{api_action}" ) assert resp.status == 400 result = await resp.json() @@ -656,7 +654,7 @@ async def test_api_store_addons_addon_availability_arch_not_supported( (["a", "b"], "update", "post", True), ], ) -async def test_api_store_addons_addon_availability_machine_not_supported( +async def test_api_store_apps_app_availability_machine_not_supported( api_client: TestClient, coresys: CoreSys, supported_machines: list[str], @@ -664,14 +662,14 @@ async def test_api_store_addons_addon_availability_machine_not_supported( api_method: str, installed: bool, ): - """Test availability errors for /store/addons/{addon}/* REST APIs - machine not supported.""" + """Test availability errors for /store/apps/{app}/* REST APIs - machine not supported.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - # Create an addon with unsupported machine type - addon_obj = AddonStore(coresys, "test_machine_addon") - coresys.addons.store[addon_obj.slug] = addon_obj + # Create an app with unsupported machine type + app_obj = AppStore(coresys, "test_machine_addon") + coresys.apps.store[app_obj.slug] = app_obj - # Set addon config with unsupported machine - addon_config = { + # Set app config with unsupported machine + app_config = { "advanced": False, "arch": ["amd64"], "machine": supported_machines, @@ -682,15 +680,15 @@ async def test_api_store_addons_addon_availability_machine_not_supported( "stage": "stable", "version": "1.0.0", } - coresys.store.data.addons[addon_obj.slug] = addon_config + coresys.store.data.apps[app_obj.slug] = app_config if installed: - coresys.addons.local[addon_obj.slug] = Addon(coresys, addon_obj.slug) - coresys.addons.data.user[addon_obj.slug] = {"version": AwesomeVersion("0.0.1")} + coresys.apps.local[app_obj.slug] = App(coresys, app_obj.slug) + coresys.apps.data.user[app_obj.slug] = {"version": AwesomeVersion("0.0.1")} # Mock the system machine to be different with patch.object(CoreSys, "machine", new=PropertyMock(return_value="qemux86-64")): resp = await api_client.request( - api_method, f"/store/addons/{addon_obj.slug}/{api_action}" + api_method, f"/store/addons/{app_obj.slug}/{api_action}" ) assert resp.status == 400 result = await resp.json() @@ -713,21 +711,21 @@ async def test_api_store_addons_addon_availability_machine_not_supported( ("update", "post", True), ], ) -async def test_api_store_addons_addon_availability_homeassistant_version_too_old( +async def test_api_store_apps_app_availability_homeassistant_version_too_old( api_client: TestClient, coresys: CoreSys, api_action: str, api_method: str, installed: bool, ): - """Test availability errors for /store/addons/{addon}/* REST APIs - Home Assistant version too old.""" + """Test availability errors for /store/apps/{app}/* REST APIs - Home Assistant version too old.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - # Create an addon that requires newer Home Assistant version - addon_obj = AddonStore(coresys, "test_version_addon") - coresys.addons.store[addon_obj.slug] = addon_obj + # Create an app that requires newer Home Assistant version + app_obj = AppStore(coresys, "test_version_addon") + coresys.apps.store[app_obj.slug] = app_obj - # Set addon config with minimum Home Assistant version requirement - addon_config = { + # Set app config with minimum Home Assistant version requirement + app_config = { "advanced": False, "arch": ["amd64"], "homeassistant": "2023.1.1", # Requires newer version than current @@ -738,10 +736,10 @@ async def test_api_store_addons_addon_availability_homeassistant_version_too_old "stage": "stable", "version": "1.0.0", } - coresys.store.data.addons[addon_obj.slug] = addon_config + coresys.store.data.apps[app_obj.slug] = app_config if installed: - coresys.addons.local[addon_obj.slug] = Addon(coresys, addon_obj.slug) - coresys.addons.data.user[addon_obj.slug] = {"version": AwesomeVersion("0.0.1")} + coresys.apps.local[app_obj.slug] = App(coresys, app_obj.slug) + coresys.apps.data.user[app_obj.slug] = {"version": AwesomeVersion("0.0.1")} # Mock the Home Assistant version to be older with patch.object( @@ -750,7 +748,7 @@ async def test_api_store_addons_addon_availability_homeassistant_version_too_old new=PropertyMock(return_value=AwesomeVersion("2022.1.1")), ): resp = await api_client.request( - api_method, f"/store/addons/{addon_obj.slug}/{api_action}" + api_method, f"/store/addons/{app_obj.slug}/{api_action}" ) assert resp.status == 400 result = await resp.json() @@ -765,15 +763,15 @@ async def test_api_store_addons_addon_availability_homeassistant_version_too_old ) -async def test_api_store_addons_addon_availability_installed_addon( - api_client: TestClient, install_addon_ssh: Addon +async def test_api_store_apps_app_availability_installed_app( + api_client: TestClient, install_app_ssh: App ): - """Test /store/addons/{addon}/availability REST API - installed addon checks against latest version.""" + """Test /store/apps/{app}/availability REST API - installed app checks against latest version.""" resp = await api_client.get("/store/addons/local_ssh/availability") assert resp.status == 200 - install_addon_ssh.data_store["version"] = AwesomeVersion("10.0.0") - install_addon_ssh.data_store["homeassistant"] = AwesomeVersion("2023.1.1") + install_app_ssh.data_store["version"] = AwesomeVersion("10.0.0") + install_app_ssh.data_store["homeassistant"] = AwesomeVersion("2023.1.1") # Mock the Home Assistant version to be older with patch.object( @@ -790,21 +788,21 @@ async def test_api_store_addons_addon_availability_installed_addon( @pytest.mark.parametrize( - ("action", "job_name", "addon_slug"), + ("action", "job_name", "app_slug"), [ ("install", "addon_manager_install", "local_ssh"), ("update", "addon_manager_update", "local_example"), ], ) @pytest.mark.usefixtures("tmp_supervisor_data") -async def test_api_progress_updates_addon_install_update( +async def test_api_progress_updates_app_install_update( api_client: TestClient, coresys: CoreSys, ha_ws_client: AsyncMock, - install_addon_example: Addon, + install_app_example: App, action: str, job_name: str, - addon_slug: str, + app_slug: str, ): """Test progress updates sent to Home Assistant for installs/updates.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 @@ -814,14 +812,14 @@ async def test_api_progress_updates_addon_install_update( coresys.docker.images.pull.return_value = AsyncIterator(logs) coresys.arch._supported_arch = ["amd64"] # pylint: disable=protected-access - install_addon_example.data_store["version"] = AwesomeVersion("2.0.0") + install_app_example.data_store["version"] = AwesomeVersion("2.0.0") with ( - patch.object(Addon, "load"), - patch.object(Addon, "need_build", new=PropertyMock(return_value=False)), - patch.object(Addon, "latest_need_build", new=PropertyMock(return_value=False)), + patch.object(App, "load"), + patch.object(App, "need_build", new=PropertyMock(return_value=False)), + patch.object(App, "latest_need_build", new=PropertyMock(return_value=False)), ): - resp = await api_client.post(f"/store/addons/{addon_slug}/{action}") + resp = await api_client.post(f"/store/addons/{app_slug}/{action}") assert resp.status == 200 @@ -835,7 +833,7 @@ async def test_api_progress_updates_addon_install_update( if "data" in evt.args[0] and evt.args[0]["data"]["event"] == WSEvent.JOB and evt.args[0]["data"]["data"]["name"] == job_name - and evt.args[0]["data"]["data"]["reference"] == addon_slug + and evt.args[0]["data"]["data"]["reference"] == app_slug ] # Count-based progress: 2 layers need pulling (each worth 50%) # Layers that already exist are excluded from progress calculation diff --git a/tests/backups/conftest.py b/tests/backups/conftest.py index bfe6abd074e..546b74411dc 100644 --- a/tests/backups/conftest.py +++ b/tests/backups/conftest.py @@ -21,17 +21,17 @@ def fixture_backup_mock(): backup_instance = MagicMock() backup_mock.return_value = backup_instance - backup_instance.store_addons = AsyncMock(return_value=None) + backup_instance.store_apps = AsyncMock(return_value=None) backup_instance.store_folders = AsyncMock(return_value=None) backup_instance.store_homeassistant = AsyncMock(return_value=None) - backup_instance.store_addons = AsyncMock(return_value=None) + backup_instance.store_apps = AsyncMock(return_value=None) backup_instance.store_supervisor_config = AsyncMock(return_value=None) backup_instance.restore_folders = AsyncMock(return_value=True) backup_instance.restore_homeassistant = AsyncMock(return_value=None) - backup_instance.restore_addons = AsyncMock(return_value=(True, [])) + backup_instance.restore_apps = AsyncMock(return_value=(True, [])) backup_instance.restore_repositories = AsyncMock(return_value=None) backup_instance.restore_supervisor_config = AsyncMock(return_value=(True, [])) - backup_instance.remove_delta_addons = AsyncMock(return_value=True) + backup_instance.remove_delta_apps = AsyncMock(return_value=True) yield backup_mock @@ -42,7 +42,7 @@ def partial_backup_mock(backup_mock): backup_instance = backup_mock.return_value backup_instance.sys_type = BackupType.PARTIAL backup_instance.folders = [] - backup_instance.addon_list = [TEST_ADDON_SLUG] + backup_instance.app_list = [TEST_ADDON_SLUG] backup_instance.supervisor_version = "9999.09.9.dev9999" backup_instance.location = None backup_instance.all_locations = { @@ -58,7 +58,7 @@ def full_backup_mock(backup_mock): backup_instance = backup_mock.return_value backup_instance.sys_type = BackupType.FULL backup_instance.folders = ALL_FOLDERS - backup_instance.addon_list = [TEST_ADDON_SLUG] + backup_instance.app_list = [TEST_ADDON_SLUG] backup_instance.supervisor_version = "9999.09.9.dev9999" backup_instance.location = None backup_instance.all_locations = { diff --git a/tests/backups/test_backup.py b/tests/backups/test_backup.py index 5525277751f..067e0541cf5 100644 --- a/tests/backups/test_backup.py +++ b/tests/backups/test_backup.py @@ -10,12 +10,12 @@ import pytest from securetar import AddFileError, InvalidPasswordError, SecureTarReadError -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.backups.backup import Backup, BackupLocation from supervisor.backups.const import BackupType from supervisor.coresys import CoreSys from supervisor.exceptions import ( - AddonsError, + AppsError, BackupError, BackupFatalIOError, BackupFileExistError, @@ -75,32 +75,30 @@ async def test_new_backup_exists_error(coresys: CoreSys, tmp_path: Path): pass -async def test_backup_error_addon( - coresys: CoreSys, install_addon_ssh: Addon, tmp_path: Path -): - """Test if errors during add-on backup is correctly recorded in jobs.""" +async def test_backup_error_app(coresys: CoreSys, install_app_ssh: App, tmp_path: Path): + """Test if errors during app backup is correctly recorded in jobs.""" backup_file = tmp_path / "my_backup.tar" backup = Backup(coresys, backup_file, "test", None) backup.new("test", "2023-07-21T21:05:00.000000+00:00", BackupType.FULL) - install_addon_ssh.backup = MagicMock( - side_effect=(err := AddonsError("Fake app backup error")) + install_app_ssh.backup = MagicMock( + side_effect=(err := AppsError("Fake app backup error")) ) async with backup.create(): - # Validate that the add-on exception is collected in the main job - backup_store_addons_job, backup_task = coresys.jobs.schedule_job( - backup.store_addons, JobSchedulerOptions(), [install_addon_ssh] + # Validate that the app exception is collected in the main job + backup_store_apps_job, backup_task = coresys.jobs.schedule_job( + backup.store_apps, JobSchedulerOptions(), [install_app_ssh] ) await backup_task - assert len(backup_store_addons_job.errors) == 1 - assert str(err) in backup_store_addons_job.errors[0].message + assert len(backup_store_apps_job.errors) == 1 + assert str(err) in backup_store_apps_job.errors[0].message # Check backup_addon_restore child job has the same error child_jobs = [ job for job in coresys.jobs.jobs - if job.parent_id == backup_store_addons_job.uuid + if job.parent_id == backup_store_apps_job.uuid ] assert len(child_jobs) == 1 assert child_jobs[0].errors[0].message == str(err) @@ -163,23 +161,23 @@ async def test_backup_oserror_folder_propagates( await backup.store_folders(["media"]) -async def test_backup_fatal_error_addon_propagates( - coresys: CoreSys, install_addon_ssh: Addon, tmp_path: Path +async def test_backup_fatal_error_app_propagates( + coresys: CoreSys, install_app_ssh: App, tmp_path: Path ): - """Test that BackupFatalIOError during add-on backup propagates out of store_addons. + """Test that BackupFatalIOError during app backup propagates out of store_addons. - store_addons swallows BackupError for individual add-on failures, but + store_addons swallows BackupError for individual app failures, but BackupFatalIOError must not be swallowed since it indicates a corrupt tar. """ backup_file = tmp_path / "my_backup.tar" backup = Backup(coresys, backup_file, "test", None) backup.new("test", "2023-07-21T21:05:00.000000+00:00", BackupType.FULL) - install_addon_ssh.backup = MagicMock(side_effect=BackupFatalIOError("Disk full")) + install_app_ssh.backup = MagicMock(side_effect=BackupFatalIOError("Disk full")) with pytest.raises(BackupFatalIOError): async with backup.create(): - await backup.store_addons([install_addon_ssh]) + await backup.store_apps([install_app_ssh]) async def test_backup_oserror_close_suppressed_on_error( diff --git a/tests/backups/test_backup_security.py b/tests/backups/test_backup_security.py index bf6eaeb6648..91b34039eb8 100644 --- a/tests/backups/test_backup_security.py +++ b/tests/backups/test_backup_security.py @@ -7,7 +7,7 @@ import pytest from securetar import SecureTarFile -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.backups.backup import Backup from supervisor.backups.const import BackupType from supervisor.coresys import CoreSys @@ -172,10 +172,10 @@ async def test_homeassistant_restore_rejects_path_traversal( await coresys.homeassistant.restore(tar_file) -async def test_addon_restore_rejects_path_traversal( - coresys: CoreSys, install_addon_ssh: Addon, tmp_supervisor_data: Path +async def test_app_restore_rejects_path_traversal( + coresys: CoreSys, install_app_ssh: App, tmp_supervisor_data: Path ): - """Test that add-on restore raises BackupInvalidError for path traversal.""" + """Test that app restore raises BackupInvalidError for path traversal.""" tar_path = tmp_supervisor_data / "addon.tar.gz" traversal_info = tarfile.TarInfo(name="../../etc/passwd") traversal_info.size = 9 @@ -183,13 +183,13 @@ async def test_addon_restore_rejects_path_traversal( tar_file = SecureTarFile(tar_path, gzip=True) with pytest.raises(BackupInvalidError): - await install_addon_ssh.restore(tar_file) + await install_app_ssh.restore(tar_file) -async def test_addon_restore_rejects_symlink_escape( - coresys: CoreSys, install_addon_ssh: Addon, tmp_supervisor_data: Path +async def test_app_restore_rejects_symlink_escape( + coresys: CoreSys, install_app_ssh: App, tmp_supervisor_data: Path ): - """Test that add-on restore raises BackupInvalidError for symlink escape.""" + """Test that app restore raises BackupInvalidError for symlink escape.""" link_info = tarfile.TarInfo(name="escape") link_info.type = tarfile.SYMTYPE link_info.linkname = "../outside" @@ -205,7 +205,7 @@ async def test_addon_restore_rejects_symlink_escape( tar_file = SecureTarFile(tar_path, gzip=True) with pytest.raises(BackupInvalidError): - await install_addon_ssh.restore(tar_file) + await install_app_ssh.restore(tar_file) async def test_folder_restore_rejects_path_traversal( diff --git a/tests/backups/test_manager.py b/tests/backups/test_manager.py index dd5e1cae4c2..d1de3e02149 100644 --- a/tests/backups/test_manager.py +++ b/tests/backups/test_manager.py @@ -13,15 +13,15 @@ from dbus_fast import DBusError import pytest -from supervisor.addons.addon import Addon -from supervisor.addons.const import AddonBackupMode -from supervisor.addons.model import AddonModel +from supervisor.addons.addon import App +from supervisor.addons.const import AppBackupMode +from supervisor.addons.model import AppModel from supervisor.backups.backup import Backup, BackupLocation from supervisor.backups.const import LOCATION_TYPE, BackupJobStage, BackupType from supervisor.backups.manager import BackupManager -from supervisor.const import FOLDER_HOMEASSISTANT, FOLDER_SHARE, AddonState, CoreState +from supervisor.const import FOLDER_HOMEASSISTANT, FOLDER_SHARE, AppState, CoreState from supervisor.coresys import CoreSys -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.docker.const import ContainerState from supervisor.docker.homeassistant import DockerHomeAssistant from supervisor.docker.monitor import DockerContainerStateEvent @@ -50,7 +50,7 @@ from tests.dbus_service_mocks.systemd_unit import SystemdUnit as SystemdUnitService -async def test_do_backup_full(coresys: CoreSys, backup_mock, install_addon_ssh): +async def test_do_backup_full(coresys: CoreSys, backup_mock, install_app_ssh): """Test creating Backup.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 @@ -68,8 +68,8 @@ async def test_do_backup_full(coresys: CoreSys, backup_mock, install_addon_ssh): backup_instance.store_homeassistant.assert_called_once() backup_instance.store_repositories.assert_called_once() - backup_instance.store_addons.assert_called_once() - assert install_addon_ssh in backup_instance.store_addons.call_args[0][0] + backup_instance.store_apps.assert_called_once() + assert install_app_ssh in backup_instance.store_apps.call_args[0][0] backup_instance.store_folders.assert_called_once() assert len(backup_instance.store_folders.call_args[0][0]) == 4 @@ -100,7 +100,7 @@ async def test_do_backup_full_with_filename( @pytest.mark.usefixtures("backup_mock") -async def test_do_backup_full_uncompressed(coresys: CoreSys, install_addon_ssh: Addon): +async def test_do_backup_full_uncompressed(coresys: CoreSys, install_app_ssh: App): """Test creating Backup.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 @@ -118,8 +118,8 @@ async def test_do_backup_full_uncompressed(coresys: CoreSys, install_addon_ssh: backup_instance.store_homeassistant.assert_called_once() backup_instance.store_repositories.assert_called_once() - backup_instance.store_addons.assert_called_once() - assert install_addon_ssh in backup_instance.store_addons.call_args[0][0] + backup_instance.store_apps.assert_called_once() + assert install_app_ssh in backup_instance.store_apps.call_args[0][0] backup_instance.store_folders.assert_called_once() assert len(backup_instance.store_folders.call_args[0][0]) == 4 @@ -128,7 +128,7 @@ async def test_do_backup_full_uncompressed(coresys: CoreSys, install_addon_ssh: assert coresys.core.state == CoreState.RUNNING -@pytest.mark.usefixtures("backup_mock", "install_addon_ssh") +@pytest.mark.usefixtures("backup_mock", "install_app_ssh") async def test_do_backup_partial_minimal(coresys: CoreSys): """Test creating minimal partial Backup.""" await coresys.core.set_state(CoreState.RUNNING) @@ -147,14 +147,14 @@ async def test_do_backup_partial_minimal(coresys: CoreSys): backup_instance.store_homeassistant.assert_not_called() backup_instance.store_repositories.assert_called_once() - backup_instance.store_addons.assert_not_called() + backup_instance.store_apps.assert_not_called() backup_instance.store_folders.assert_not_called() assert coresys.core.state == CoreState.RUNNING -@pytest.mark.usefixtures("backup_mock", "install_addon_ssh") +@pytest.mark.usefixtures("backup_mock", "install_app_ssh") async def test_do_backup_partial_minimal_uncompressed(coresys: CoreSys): """Test creating minimal partial Backup.""" await coresys.core.set_state(CoreState.RUNNING) @@ -175,7 +175,7 @@ async def test_do_backup_partial_minimal_uncompressed(coresys: CoreSys): backup_instance.store_homeassistant.assert_not_called() backup_instance.store_repositories.assert_called_once() - backup_instance.store_addons.assert_not_called() + backup_instance.store_apps.assert_not_called() backup_instance.store_folders.assert_not_called() @@ -183,7 +183,7 @@ async def test_do_backup_partial_minimal_uncompressed(coresys: CoreSys): @pytest.mark.usefixtures("backup_mock") -async def test_do_backup_partial_maximal(coresys: CoreSys, install_addon_ssh: Addon): +async def test_do_backup_partial_maximal(coresys: CoreSys, install_app_ssh: App): """Test creating maximal partial Backup.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 @@ -192,7 +192,7 @@ async def test_do_backup_partial_maximal(coresys: CoreSys, install_addon_ssh: Ad # backup_mock fixture causes Backup() to be a MagicMock backup_instance: MagicMock = await manager.do_backup_partial( - addons=[TEST_ADDON_SLUG], + apps=[TEST_ADDON_SLUG], folders=[FOLDER_SHARE, FOLDER_HOMEASSISTANT], homeassistant=True, ) @@ -205,8 +205,8 @@ async def test_do_backup_partial_maximal(coresys: CoreSys, install_addon_ssh: Ad backup_instance.store_homeassistant.assert_called_once() backup_instance.store_repositories.assert_called_once() - backup_instance.store_addons.assert_called_once() - assert install_addon_ssh in backup_instance.store_addons.call_args[0][0] + backup_instance.store_apps.assert_called_once() + assert install_app_ssh in backup_instance.store_apps.call_args[0][0] backup_instance.store_folders.assert_called_once() assert len(backup_instance.store_folders.call_args[0][0]) == 1 @@ -217,7 +217,7 @@ async def test_do_backup_partial_maximal(coresys: CoreSys, install_addon_ssh: Ad @pytest.mark.usefixtures("supervisor_internet") async def test_do_restore_full( - coresys: CoreSys, full_backup_mock: Backup, install_addon_ssh: Addon + coresys: CoreSys, full_backup_mock: Backup, install_app_ssh: App ): """Test restoring full Backup.""" await coresys.core.set_state(CoreState.RUNNING) @@ -225,22 +225,22 @@ async def test_do_restore_full( coresys.homeassistant.core.start = AsyncMock(return_value=None) coresys.homeassistant.core.stop = AsyncMock(return_value=None) coresys.homeassistant.core.update = AsyncMock(return_value=None) - install_addon_ssh.uninstall = AsyncMock(return_value=None) + install_app_ssh.uninstall = AsyncMock(return_value=None) manager = await BackupManager(coresys).load_config() backup_instance = full_backup_mock.return_value - backup_instance.sys_addons = coresys.addons - backup_instance.remove_delta_addons = partial( - Backup.remove_delta_addons, backup_instance + backup_instance.sys_apps = coresys.apps + backup_instance.remove_delta_apps = partial( + Backup.remove_delta_apps, backup_instance ) assert await manager.do_restore_full(backup_instance) backup_instance.restore_homeassistant.assert_called_once() backup_instance.restore_repositories.assert_called_once() - backup_instance.restore_addons.assert_called_once() - install_addon_ssh.uninstall.assert_not_called() + backup_instance.restore_apps.assert_called_once() + install_app_ssh.uninstall.assert_not_called() backup_instance.restore_folders.assert_called_once() @@ -248,39 +248,39 @@ async def test_do_restore_full( @pytest.mark.usefixtures("supervisor_internet") -async def test_do_restore_full_different_addon( - coresys: CoreSys, full_backup_mock: Backup, install_addon_ssh: Addon +async def test_do_restore_full_different_app( + coresys: CoreSys, full_backup_mock: Backup, install_app_ssh: App ): - """Test restoring full Backup with different addons than installed.""" + """Test restoring full Backup with different apps than installed.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 coresys.homeassistant.core.start = AsyncMock(return_value=None) coresys.homeassistant.core.stop = AsyncMock(return_value=None) coresys.homeassistant.core.update = AsyncMock(return_value=None) - install_addon_ssh.uninstall = AsyncMock(return_value=None) + install_app_ssh.uninstall = AsyncMock(return_value=None) manager = await BackupManager(coresys).load_config() backup_instance = full_backup_mock.return_value - backup_instance.addon_list = ["differentslug"] - backup_instance.sys_addons = coresys.addons - backup_instance.remove_delta_addons = partial( - Backup.remove_delta_addons, backup_instance + backup_instance.app_list = ["differentslug"] + backup_instance.sys_apps = coresys.apps + backup_instance.remove_delta_apps = partial( + Backup.remove_delta_apps, backup_instance ) assert await manager.do_restore_full(backup_instance) backup_instance.restore_homeassistant.assert_called_once() backup_instance.restore_repositories.assert_called_once() - backup_instance.restore_addons.assert_called_once() - install_addon_ssh.uninstall.assert_called_once() + backup_instance.restore_apps.assert_called_once() + install_app_ssh.uninstall.assert_called_once() backup_instance.restore_folders.assert_called_once() assert coresys.core.state == CoreState.RUNNING -@pytest.mark.usefixtures("supervisor_internet", "install_addon_ssh") +@pytest.mark.usefixtures("supervisor_internet", "install_app_ssh") async def test_do_restore_partial_minimal( coresys: CoreSys, partial_backup_mock: Backup ): @@ -299,7 +299,7 @@ async def test_do_restore_partial_minimal( backup_instance.restore_homeassistant.assert_not_called() backup_instance.restore_repositories.assert_not_called() - backup_instance.restore_addons.assert_not_called() + backup_instance.restore_apps.assert_not_called() backup_instance.restore_folders.assert_not_called() @@ -322,7 +322,7 @@ async def test_do_restore_partial_maximal( backup_instance = partial_backup_mock.return_value assert await manager.do_restore_partial( backup_instance, - addons=[TEST_ADDON_SLUG], + apps=[TEST_ADDON_SLUG], folders=[FOLDER_SHARE, FOLDER_HOMEASSISTANT], homeassistant=True, ) @@ -330,7 +330,7 @@ async def test_do_restore_partial_maximal( backup_instance.restore_homeassistant.assert_called_once() backup_instance.restore_repositories.assert_called_once() - backup_instance.restore_addons.assert_called_once() + backup_instance.restore_apps.assert_called_once() backup_instance.restore_folders.assert_called_once() backup_instance.restore_homeassistant.assert_called_once() @@ -408,7 +408,7 @@ async def test_fail_invalid_partial_backup( await manager.do_restore_partial(backup_instance) -@pytest.mark.usefixtures("install_addon_ssh", "capture_exception") +@pytest.mark.usefixtures("install_app_ssh", "capture_exception") async def test_backup_error_homeassistant(coresys: CoreSys, backup_mock: MagicMock): """Test error collected and file deleted when Home Assistant Core backup fails.""" await coresys.core.set_state(CoreState.RUNNING) @@ -432,7 +432,7 @@ async def test_backup_error_homeassistant(coresys: CoreSys, backup_mock: MagicMo backup_instance.tarfile.unlink.assert_called_once() -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") async def test_backup_error_capture( coresys: CoreSys, backup_mock: MagicMock, capture_exception: Mock ): @@ -948,27 +948,27 @@ async def test_load_network_error( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_backup_with_healthcheck( - coresys: CoreSys, install_addon_ssh: Addon, container: DockerContainer + coresys: CoreSys, install_app_ssh: App, container: DockerContainer ): - """Test backup of addon with healthcheck in cold mode.""" + """Test backup of app with healthcheck in cold mode.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True container.show.return_value["Config"] = {"Healthcheck": "exists"} - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STARTUP + assert install_app_ssh.state == AppState.STARTUP - state_changes: list[AddonState] = [] + state_changes: list[AppState] = [] _container_events_task: asyncio.Task | None = None async def container_events(): nonlocal state_changes await asyncio.sleep(0.01) - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.STOPPED, @@ -977,8 +977,8 @@ async def container_events(): ) ) - state_changes.append(install_addon_ssh.state) - await install_addon_ssh.container_state_changed( + state_changes.append(install_app_ssh.state) + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.RUNNING, @@ -987,8 +987,8 @@ async def container_events(): ) ) - state_changes.append(install_addon_ssh.state) - await install_addon_ssh.container_state_changed( + state_changes.append(install_app_ssh.state) + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.HEALTHY, @@ -1002,21 +1002,21 @@ async def container_events_task(*args, **kwargs): _container_events_task = asyncio.create_task(container_events()) with ( - patch.object(DockerAddon, "run", new=container_events_task), + patch.object(DockerApp, "run", new=container_events_task), patch.object( - AddonModel, + AppModel, "backup_mode", - new=PropertyMock(return_value=AddonBackupMode.COLD), + new=PropertyMock(return_value=AppBackupMode.COLD), ), - patch.object(DockerAddon, "is_running", side_effect=[True, False, False]), + patch.object(DockerApp, "is_running", side_effect=[True, False, False]), ): backup = await coresys.backups.do_backup_partial( - homeassistant=False, addons=["local_ssh"] + homeassistant=False, apps=["local_ssh"] ) assert backup - assert state_changes == [AddonState.STOPPED, AddonState.STARTUP] - assert install_addon_ssh.state == AddonState.STARTED + assert state_changes == [AppState.STOPPED, AppState.STARTUP] + assert install_app_ssh.state == AppState.STARTED assert coresys.core.state == CoreState.RUNNING await _container_events_task @@ -1024,29 +1024,29 @@ async def container_events_task(*args, **kwargs): @pytest.mark.usefixtures("supervisor_internet", "tmp_supervisor_data", "path_extern") async def test_restore_with_healthcheck( - coresys: CoreSys, install_addon_ssh: Addon, container: DockerContainer + coresys: CoreSys, install_app_ssh: App, container: DockerContainer ): - """Test backup of addon with healthcheck in cold mode.""" + """Test backup of app with healthcheck in cold mode.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True container.show.return_value["Config"] = {"Healthcheck": "exists"} - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - await install_addon_ssh.load() + await install_app_ssh.load() await asyncio.sleep(0) - assert install_addon_ssh.state == AddonState.STARTUP + assert install_app_ssh.state == AppState.STARTUP backup = await coresys.backups.do_backup_partial( - homeassistant=False, addons=["local_ssh"] + homeassistant=False, apps=["local_ssh"] ) - state_changes: list[AddonState] = [] + state_changes: list[AppState] = [] _container_events_task: asyncio.Task | None = None async def container_events(): nonlocal state_changes - await install_addon_ssh.container_state_changed( + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.STOPPED, @@ -1055,8 +1055,8 @@ async def container_events(): ) ) - state_changes.append(install_addon_ssh.state) - await install_addon_ssh.container_state_changed( + state_changes.append(install_app_ssh.state) + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.RUNNING, @@ -1065,8 +1065,8 @@ async def container_events(): ) ) - state_changes.append(install_addon_ssh.state) - await install_addon_ssh.container_state_changed( + state_changes.append(install_app_ssh.state) + await install_app_ssh.container_state_changed( DockerContainerStateEvent( name=f"addon_{TEST_ADDON_SLUG}", state=ContainerState.HEALTHY, @@ -1080,15 +1080,15 @@ async def container_events_task(*args, **kwargs): _container_events_task = asyncio.create_task(container_events()) with ( - patch.object(DockerAddon, "run", new=container_events_task), - patch.object(DockerAddon, "is_running", return_value=False), - patch.object(AddonModel, "_validate_availability"), - patch.object(Addon, "with_ingress", new=PropertyMock(return_value=False)), + patch.object(DockerApp, "run", new=container_events_task), + patch.object(DockerApp, "is_running", return_value=False), + patch.object(AppModel, "_validate_availability"), + patch.object(App, "with_ingress", new=PropertyMock(return_value=False)), ): - await coresys.backups.do_restore_partial(backup, addons=["local_ssh"]) + await coresys.backups.do_restore_partial(backup, apps=["local_ssh"]) - assert state_changes == [AddonState.STOPPED, AddonState.STARTUP] - assert install_addon_ssh.state == AddonState.STARTED + assert state_changes == [AppState.STOPPED, AppState.STARTUP] + assert install_app_ssh.state == AppState.STARTED assert coresys.core.state == CoreState.RUNNING await _container_events_task @@ -1126,22 +1126,22 @@ def _make_backup_message_for_assert( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_backup_progress( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ha_ws_client: AsyncMock, ): """Test progress is tracked during backups.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 with ( patch.object( - AddonModel, + AppModel, "backup_mode", - new=PropertyMock(return_value=AddonBackupMode.COLD), + new=PropertyMock(return_value=AppBackupMode.COLD), ), patch("supervisor.addons.addon.asyncio.Event.wait"), ): @@ -1185,7 +1185,7 @@ async def test_backup_progress( ha_ws_client.async_send_command.reset_mock() partial_backup: Backup = await coresys.backups.do_backup_partial( - addons=["local_ssh"], folders=["media", "share", "ssl"] + apps=["local_ssh"], folders=["media", "share", "ssl"] ) await asyncio.sleep(0) @@ -1237,15 +1237,15 @@ async def test_backup_progress( @pytest.mark.usefixtures("supervisor_internet", "tmp_supervisor_data", "path_extern") async def test_restore_progress( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ha_ws_client: AsyncMock, ): """Test progress is tracked during backups.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - install_addon_ssh.path_data.mkdir() - install_addon_ssh.state = AddonState.STARTED + install_app_ssh.path_data.mkdir() + install_app_ssh.state = AppState.STARTED await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 @@ -1253,21 +1253,21 @@ async def test_restore_progress( await asyncio.sleep(0) ha_ws_client.async_send_command.reset_mock() - # Install another addon to be uninstalled + # Install another app to be uninstalled # Duplicate code from install_addon_example fixture # Apparently request.getfixturevalue does not work with async fixtures: https://github.com/pytest-dev/pytest-asyncio/issues/112 - store = coresys.addons.store["local_example"] - await coresys.addons.data.install(store) + store = coresys.apps.store["local_example"] + await coresys.apps.data.install(store) # pylint: disable-next=protected-access - coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data) - coresys.addons.local[store.slug] = Addon(coresys, store.slug) + coresys.apps.data._data = coresys.apps.data._schema(coresys.apps.data._data) + coresys.apps.local[store.slug] = App(coresys, store.slug) with ( patch("supervisor.addons.addon.asyncio.Event.wait"), patch.object(HomeAssistant, "restore"), patch.object(HomeAssistantCore, "update"), - patch.object(AddonModel, "_validate_availability"), - patch.object(AddonModel, "with_ingress", new=PropertyMock(return_value=False)), + patch.object(AppModel, "_validate_availability"), + patch.object(AppModel, "with_ingress", new=PropertyMock(return_value=False)), ): await coresys.backups.do_restore_full(full_backup) await asyncio.sleep(0) @@ -1377,15 +1377,15 @@ async def test_restore_progress( container.show.return_value["State"]["Status"] = "stopped" container.show.return_value["State"]["Running"] = False - install_addon_ssh.state = AddonState.STOPPED - addon_backup: Backup = await coresys.backups.do_backup_partial(addons=["local_ssh"]) + install_app_ssh.state = AppState.STOPPED + app_backup: Backup = await coresys.backups.do_backup_partial(apps=["local_ssh"]) ha_ws_client.async_send_command.reset_mock() with ( - patch.object(AddonModel, "_validate_availability"), + patch.object(AppModel, "_validate_availability"), patch.object(HomeAssistantCore, "start"), ): - await coresys.backups.do_restore_partial(addon_backup, addons=["local_ssh"]) + await coresys.backups.do_restore_partial(app_backup, apps=["local_ssh"]) await asyncio.sleep(0) messages = [ @@ -1401,27 +1401,27 @@ async def test_restore_progress( ), _make_backup_message_for_assert( action="partial_restore", - reference=addon_backup.slug, + reference=app_backup.slug, stage=None, ), _make_backup_message_for_assert( action="partial_restore", - reference=addon_backup.slug, + reference=app_backup.slug, stage="addon_repositories", ), _make_backup_message_for_assert( action="partial_restore", - reference=addon_backup.slug, + reference=app_backup.slug, stage="addons", ), _make_backup_message_for_assert( action="partial_restore", - reference=addon_backup.slug, + reference=app_backup.slug, stage="supervisor_config", ), _make_backup_message_for_assert( action="partial_restore", - reference=addon_backup.slug, + reference=app_backup.slug, stage="supervisor_config", done=True, progress=100, @@ -1432,24 +1432,24 @@ async def test_restore_progress( @pytest.mark.usefixtures("tmp_supervisor_data", "path_extern") async def test_freeze_thaw( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: DockerContainer, ha_ws_client: AsyncMock, ): """Test manual freeze and thaw for external snapshots.""" container.show.return_value["State"]["Status"] = "running" container.show.return_value["State"]["Running"] = True - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 ha_ws_client.ha_version = AwesomeVersion("2022.1.0") with ( patch.object( - AddonModel, "backup_pre", new=PropertyMock(return_value="pre_backup") + AppModel, "backup_pre", new=PropertyMock(return_value="pre_backup") ), patch.object( - AddonModel, "backup_post", new=PropertyMock(return_value="post_backup") + AppModel, "backup_post", new=PropertyMock(return_value="post_backup") ), ): # Run the freeze @@ -1557,21 +1557,21 @@ async def test_cannot_manually_thaw_normal_freeze(coresys: CoreSys): @pytest.mark.usefixtures("supervisor_internet", "tmp_supervisor_data", "path_extern") async def test_restore_only_reloads_ingress_on_change( - coresys: CoreSys, install_addon_ssh: Addon + coresys: CoreSys, install_app_ssh: App ): """Test restore only tells core to reload ingress when something has changed.""" - install_addon_ssh.path_data.mkdir() + install_app_ssh.path_data.mkdir() await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 backup_no_ingress: Backup = await coresys.backups.do_backup_partial( - addons=["local_ssh"] + apps=["local_ssh"] ) - install_addon_ssh.ingress_panel = True - await install_addon_ssh.save_persist() + install_app_ssh.ingress_panel = True + await install_app_ssh.save_persist() backup_with_ingress: Backup = await coresys.backups.do_backup_partial( - addons=["local_ssh"] + apps=["local_ssh"] ) async def mock_is_running(*_) -> bool: @@ -1579,94 +1579,86 @@ async def mock_is_running(*_) -> bool: with ( patch.object(HomeAssistantCore, "is_running", new=mock_is_running), - patch.object(AddonModel, "_validate_availability"), - patch.object(DockerAddon, "attach"), + patch.object(AppModel, "_validate_availability"), + patch.object(DockerApp, "attach"), patch.object(HomeAssistantAPI, "make_request") as make_request, ): make_request.return_value.__aenter__.return_value.status = 200 # Has ingress before and after - not called await coresys.backups.do_restore_partial( - backup_with_ingress, addons=["local_ssh"] + backup_with_ingress, apps=["local_ssh"] ) make_request.assert_not_called() # Restore removes ingress - tell Home Assistant - await coresys.backups.do_restore_partial( - backup_no_ingress, addons=["local_ssh"] - ) + await coresys.backups.do_restore_partial(backup_no_ingress, apps=["local_ssh"]) make_request.assert_called_once_with( "delete", "api/hassio_push/panel/local_ssh" ) # No ingress before or after - not called make_request.reset_mock() - await coresys.backups.do_restore_partial( - backup_no_ingress, addons=["local_ssh"] - ) + await coresys.backups.do_restore_partial(backup_no_ingress, apps=["local_ssh"]) make_request.assert_not_called() # Restore adds ingress - tell Home Assistant await coresys.backups.do_restore_partial( - backup_with_ingress, addons=["local_ssh"] + backup_with_ingress, apps=["local_ssh"] ) make_request.assert_called_once_with("post", "api/hassio_push/panel/local_ssh") @pytest.mark.usefixtures("supervisor_internet", "tmp_supervisor_data", "path_extern") -async def test_restore_new_addon(coresys: CoreSys, install_addon_example: Addon): - """Test restore installing new addon.""" +async def test_restore_new_app(coresys: CoreSys, install_app_example: App): + """Test restore installing new app.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - assert not install_addon_example.path_data.exists() - assert not install_addon_example.path_config.exists() + assert not install_app_example.path_data.exists() + assert not install_app_example.path_config.exists() - backup: Backup = await coresys.backups.do_backup_partial(addons=["local_example"]) - await coresys.addons.uninstall("local_example") - assert "local_example" not in coresys.addons.local + backup: Backup = await coresys.backups.do_backup_partial(apps=["local_example"]) + await coresys.apps.uninstall("local_example") + assert "local_example" not in coresys.apps.local with ( - patch.object(AddonModel, "_validate_availability"), - patch.object(DockerAddon, "attach"), + patch.object(AppModel, "_validate_availability"), + patch.object(DockerApp, "attach"), ): - assert await coresys.backups.do_restore_partial( - backup, addons=["local_example"] - ) + assert await coresys.backups.do_restore_partial(backup, apps=["local_example"]) - assert "local_example" in coresys.addons.local - assert install_addon_example.path_data.exists() - assert install_addon_example.path_config.exists() + assert "local_example" in coresys.apps.local + assert install_app_example.path_data.exists() + assert install_app_example.path_config.exists() @pytest.mark.usefixtures("supervisor_internet", "tmp_supervisor_data", "path_extern") async def test_restore_preserves_data_config( - coresys: CoreSys, install_addon_example: Addon + coresys: CoreSys, install_app_example: App ): """Test restore preserves data and config.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - install_addon_example.path_data.mkdir() - (test_data := install_addon_example.path_data / "data.txt").touch() - install_addon_example.path_config.mkdir() - (test_config := install_addon_example.path_config / "config.yaml").touch() + install_app_example.path_data.mkdir() + (test_data := install_app_example.path_data / "data.txt").touch() + install_app_example.path_config.mkdir() + (test_config := install_app_example.path_config / "config.yaml").touch() - backup: Backup = await coresys.backups.do_backup_partial(addons=["local_example"]) - (test_config2 := install_addon_example.path_config / "config2.yaml").touch() + backup: Backup = await coresys.backups.do_backup_partial(apps=["local_example"]) + (test_config2 := install_app_example.path_config / "config2.yaml").touch() - await coresys.addons.uninstall("local_example") - assert not install_addon_example.path_data.exists() - assert install_addon_example.path_config.exists() + await coresys.apps.uninstall("local_example") + assert not install_app_example.path_data.exists() + assert install_app_example.path_config.exists() assert test_config2.exists() with ( - patch.object(AddonModel, "_validate_availability"), - patch.object(DockerAddon, "attach"), + patch.object(AppModel, "_validate_availability"), + patch.object(DockerApp, "attach"), ): - assert await coresys.backups.do_restore_partial( - backup, addons=["local_example"] - ) + assert await coresys.backups.do_restore_partial(backup, apps=["local_example"]) assert test_data.exists() assert test_config.exists() @@ -2057,25 +2049,25 @@ async def test_backup_remove_one_location_of_multiple(coresys: CoreSys): @pytest.mark.usefixtures("tmp_supervisor_data", "supervisor_internet") -async def test_addon_backup_excludes(coresys: CoreSys, install_addon_example: Addon): - """Test backup excludes option for addons.""" +async def test_app_backup_excludes(coresys: CoreSys, install_app_example: App): + """Test backup excludes option for apps.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - install_addon_example.path_data.mkdir(parents=True) - (test1 := install_addon_example.path_data / "test1").touch() - (test_dir := install_addon_example.path_data / "test_dir").mkdir() + install_app_example.path_data.mkdir(parents=True) + (test1 := install_app_example.path_data / "test1").touch() + (test_dir := install_app_example.path_data / "test_dir").mkdir() (test2 := test_dir / "test2").touch() (test3 := test_dir / "test3").touch() - install_addon_example.data["backup_exclude"] = ["test1", "*/test2"] - backup = await coresys.backups.do_backup_partial(addons=["local_example"]) + install_app_example.data["backup_exclude"] = ["test1", "*/test2"] + backup = await coresys.backups.do_backup_partial(apps=["local_example"]) test1.unlink() test2.unlink() test3.unlink() test_dir.rmdir() - await coresys.backups.do_restore_partial(backup, addons=["local_example"]) + await coresys.backups.do_restore_partial(backup, apps=["local_example"]) assert not test1.exists() assert not test2.exists() assert test_dir.is_dir() @@ -2195,29 +2187,29 @@ async def test_get_upload_path_for_mount_location(coresys: CoreSys): @pytest.mark.usefixtures( - "supervisor_internet", "tmp_supervisor_data", "path_extern", "install_addon_example" + "supervisor_internet", "tmp_supervisor_data", "path_extern", "install_app_example" ) -async def test_backup_addon_skips_uninstalled( +async def test_backup_app_skips_uninstalled( coresys: CoreSys, caplog: pytest.LogCaptureFixture ): - """Test restore installing new addon.""" + """Test restore installing new app.""" await coresys.core.set_state(CoreState.RUNNING) coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - assert "local_example" in coresys.addons.local - orig_store_addons = Backup.store_addons + assert "local_example" in coresys.apps.local + orig_store_apps = Backup.store_apps - async def mock_store_addons(*args, **kwargs): + async def mock_store_apps(*args, **kwargs): # Mock an uninstall during the backup process - await coresys.addons.uninstall("local_example") - await orig_store_addons(*args, **kwargs) + await coresys.apps.uninstall("local_example") + await orig_store_apps(*args, **kwargs) - with patch.object(Backup, "store_addons", new=mock_store_addons): + with patch.object(Backup, "store_apps", new=mock_store_apps): backup: Backup = await coresys.backups.do_backup_partial( - addons=["local_example"], folders=["ssl"] + apps=["local_example"], folders=["ssl"] ) - assert "local_example" not in coresys.addons.local - assert not backup.addons + assert "local_example" not in coresys.apps.local + assert not backup.apps assert ( "Skipping backup of app local_example because it has been uninstalled" in caplog.text diff --git a/tests/conftest.py b/tests/conftest.py index 44b7721c3d6..ed7964a6f5e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,7 +26,7 @@ from securetar import SecureTarArchive from supervisor import config as su_config -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.addons.validate import SCHEMA_ADDON_SYSTEM from supervisor.api import RestAPI from supervisor.backups.backup import Backup @@ -34,8 +34,8 @@ from supervisor.backups.validate import ALL_FOLDERS from supervisor.bootstrap import initialize_coresys from supervisor.const import ( - ATTR_ADDONS, - ATTR_ADDONS_CUSTOM_LIST, + ATTR_APPS, + ATTR_APPS_CUSTOM_LIST, ATTR_DATE, ATTR_EXCLUDE_DATABASE, ATTR_FOLDERS, @@ -57,7 +57,7 @@ from supervisor.homeassistant.api import APIState from supervisor.host.logs import LogsControl from supervisor.os.manager import OSManager -from supervisor.store.addon import AddonStore +from supervisor.store.addon import AppStore from supervisor.store.repository import Repository from supervisor.utils.dt import utcnow @@ -517,7 +517,7 @@ async def coresys( coresys_obj._config.save_data = AsyncMock() coresys_obj._jobs.save_data = AsyncMock() coresys_obj._resolution.save_data = AsyncMock() - coresys_obj._addons.data.save_data = AsyncMock() + coresys_obj._apps.data.save_data = AsyncMock() coresys_obj._store.save_data = AsyncMock() coresys_obj._mounts.save_data = AsyncMock() @@ -574,7 +574,7 @@ async def coresys( if not request.node.get_closest_marker("no_mock_init_websession"): coresys_obj.init_websession = AsyncMock() - # Don't remove files/folders related to addons and stores + # Don't remove files/folders related to apps and stores with patch("supervisor.store.git.GitRepo.remove"): yield coresys_obj @@ -608,8 +608,8 @@ async def tmp_supervisor_data(coresys: CoreSys, tmp_path: Path) -> Path: coresys.config.path_audio.mkdir() coresys.config.path_dns.mkdir() coresys.config.path_share.mkdir() - coresys.config.path_addons_data.mkdir(parents=True) - coresys.config.path_addon_configs.mkdir(parents=True) + coresys.config.path_apps_data.mkdir(parents=True) + coresys.config.path_app_configs.mkdir(parents=True) coresys.config.path_ssl.mkdir() coresys.config.path_core_backup.mkdir(parents=True) coresys.config.path_cid_files.mkdir() @@ -689,9 +689,9 @@ async def api_client( @web.middleware async def _security_middleware(request: web.Request, handler: web.RequestHandler): - """Make request are from Core or specified add-on.""" + """Make request are from Core or specified app.""" if request_from: - request[REQUEST_FROM] = coresys.addons.get(request_from, local_only=True) + request[REQUEST_FROM] = coresys.apps.get(request_from, local_only=True) else: request[REQUEST_FROM] = coresys.homeassistant @@ -749,22 +749,22 @@ def run_supervisor_state(request: pytest.FixtureRequest) -> Generator[MagicMock] @pytest.fixture -def store_addon(coresys: CoreSys, tmp_path, test_repository): - """Store add-on fixture.""" - addon_obj = AddonStore(coresys, "test_store_addon") +def store_app(coresys: CoreSys, tmp_path, test_repository): + """Store app fixture.""" + app_obj = AppStore(coresys, "test_store_addon") - coresys.addons.store[addon_obj.slug] = addon_obj - coresys.store.data.addons[addon_obj.slug] = SCHEMA_ADDON_SYSTEM( + coresys.apps.store[app_obj.slug] = app_obj + coresys.store.data.apps[app_obj.slug] = SCHEMA_ADDON_SYSTEM( load_json_fixture("add-on.json") ) - coresys.store.data.addons[addon_obj.slug]["location"] = tmp_path - yield addon_obj + coresys.store.data.apps[app_obj.slug]["location"] = tmp_path + yield app_obj @pytest.fixture async def test_repository(coresys: CoreSys): - """Test add-on store repository fixture.""" - coresys.config._data[ATTR_ADDONS_CUSTOM_LIST] = [] + """Test app store repository fixture.""" + coresys.config._data[ATTR_APPS_CUSTOM_LIST] = [] with ( patch("supervisor.store.git.GitRepo.load", return_value=None), @@ -784,27 +784,27 @@ async def test_repository(coresys: CoreSys): @pytest.fixture -async def install_addon_ssh(coresys: CoreSys, test_repository): - """Install local_ssh add-on.""" - store = coresys.addons.store[TEST_ADDON_SLUG] - await coresys.addons.data.install(store) - coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data) +async def install_app_ssh(coresys: CoreSys, test_repository): + """Install local_ssh app.""" + store = coresys.apps.store[TEST_ADDON_SLUG] + await coresys.apps.data.install(store) + coresys.apps.data._data = coresys.apps.data._schema(coresys.apps.data._data) - addon = Addon(coresys, store.slug) - coresys.addons.local[addon.slug] = addon - yield addon + app = App(coresys, store.slug) + coresys.apps.local[app.slug] = app + yield app @pytest.fixture -async def install_addon_example(coresys: CoreSys, test_repository): - """Install local_example add-on.""" - store = coresys.addons.store["local_example"] - await coresys.addons.data.install(store) - coresys.addons.data._data = coresys.addons.data._schema(coresys.addons.data._data) +async def install_app_example(coresys: CoreSys, test_repository): + """Install local_example app.""" + store = coresys.apps.store["local_example"] + await coresys.apps.data.install(store) + coresys.apps.data._data = coresys.apps.data._schema(coresys.apps.data._data) - addon = Addon(coresys, store.slug) - coresys.addons.local[addon.slug] = addon - yield addon + app = App(coresys, store.slug) + coresys.apps.local[app.slug] = app + yield app @pytest.fixture @@ -816,7 +816,7 @@ async def mock_full_backup(coresys: CoreSys, tmp_path) -> Backup: mock_backup.new("Test", utcnow().isoformat(), BackupType.FULL) mock_backup.repositories = ["https://github.com/awesome-developer/awesome-repo"] mock_backup.docker = {} - mock_backup._data[ATTR_ADDONS] = [ + mock_backup._data[ATTR_APPS] = [ { ATTR_SLUG: "local_ssh", ATTR_NAME: "SSH", @@ -843,7 +843,7 @@ async def mock_partial_backup(coresys: CoreSys, tmp_path) -> Backup: mock_backup.new("Test", utcnow().isoformat(), BackupType.PARTIAL) mock_backup.repositories = ["https://github.com/awesome-developer/awesome-repo"] mock_backup.docker = {} - mock_backup._data[ATTR_ADDONS] = [ + mock_backup._data[ATTR_APPS] = [ { ATTR_SLUG: "local_ssh", ATTR_NAME: "SSH", diff --git a/tests/docker/test_addon.py b/tests/docker/test_addon.py index c293e16d5ed..45f1e7a65f9 100644 --- a/tests/docker/test_addon.py +++ b/tests/docker/test_addon.py @@ -1,4 +1,4 @@ -"""Test docker addon setup.""" +"""Test docker app setup.""" import asyncio from dataclasses import replace @@ -12,13 +12,13 @@ import pytest from supervisor.addons import validate as vd -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.addons.model import Data -from supervisor.addons.options import AddonOptions +from supervisor.addons.options import AppOptions from supervisor.const import BusEvent from supervisor.coresys import CoreSys from supervisor.dbus.agent.cgroup import CGroup -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.docker.const import ( DockerMount, MountBindOptions, @@ -39,29 +39,29 @@ @pytest.fixture(name="addonsdata_system") def fixture_addonsdata_system() -> dict[str, Data]: - """Mock AddonsData.system.""" + """Mock AppsData.system.""" with patch( - "supervisor.addons.data.AddonsData.system", new_callable=PropertyMock + "supervisor.addons.data.AppsData.system", new_callable=PropertyMock ) as mock: yield mock @pytest.fixture(name="addonsdata_user", autouse=True) def fixture_addonsdata_user() -> dict[str, Data]: - """Mock AddonsData.user.""" + """Mock AppsData.user.""" with patch( - "supervisor.addons.data.AddonsData.user", new_callable=PropertyMock + "supervisor.addons.data.AppsData.user", new_callable=PropertyMock ) as mock: mock.return_value = MagicMock() yield mock -def get_docker_addon( +def get_docker_app( coresys: CoreSys, addonsdata_system: dict[str, Data], config_file: str | dict[str, Any], -) -> DockerAddon: - """Make and return docker addon object.""" +) -> DockerApp: + """Make and return docker app object.""" config = ( load_json_fixture(config_file) if isinstance(config_file, str) else config_file ) @@ -69,41 +69,35 @@ def get_docker_addon( slug = config.get("slug") addonsdata_system.return_value = {slug: config} - addon = Addon(coresys, config.get("slug")) - docker_addon = DockerAddon(coresys, addon) - return docker_addon + app = App(coresys, config.get("slug")) + docker_app = DockerApp(coresys, app) + return docker_app @pytest.mark.usefixtures("path_extern") def test_base_volumes_included(coresys: CoreSys, addonsdata_system: dict[str, Data]): """Dev and data volumes always included.""" - docker_addon = get_docker_addon( - coresys, addonsdata_system, "basic-addon-config.json" - ) + docker_app = get_docker_app(coresys, addonsdata_system, "basic-addon-config.json") # Dev added as ro with bind-recursive=writable option - assert DEV_MOUNT in docker_addon.mounts + assert DEV_MOUNT in docker_app.mounts # Data added as rw assert ( DockerMount( type=MountType.BIND, - source=docker_addon.addon.path_extern_data.as_posix(), + source=docker_app.app.path_extern_data.as_posix(), target="/data", read_only=False, ) - in docker_addon.mounts + in docker_app.mounts ) @pytest.mark.usefixtures("path_extern") -def test_addon_map_folder_defaults( - coresys: CoreSys, addonsdata_system: dict[str, Data] -): - """Validate defaults for mapped folders in addons.""" - docker_addon = get_docker_addon( - coresys, addonsdata_system, "basic-addon-config.json" - ) +def test_app_map_folder_defaults(coresys: CoreSys, addonsdata_system: dict[str, Data]): + """Validate defaults for mapped folders in apps.""" + docker_app = get_docker_app(coresys, addonsdata_system, "basic-addon-config.json") # Config added and is marked rw assert ( DockerMount( @@ -112,7 +106,7 @@ def test_addon_map_folder_defaults( target="/config", read_only=False, ) - in docker_addon.mounts + in docker_app.mounts ) # SSL added and defaults to ro @@ -123,7 +117,7 @@ def test_addon_map_folder_defaults( target="/ssl", read_only=True, ) - in docker_addon.mounts + in docker_app.mounts ) # Media added and propagation set @@ -135,7 +129,7 @@ def test_addon_map_folder_defaults( read_only=True, bind_options=MountBindOptions(propagation=PropagationMode.RSLAVE), ) - in docker_addon.mounts + in docker_app.mounts ) # Share added and propagation set @@ -147,21 +141,21 @@ def test_addon_map_folder_defaults( read_only=True, bind_options=MountBindOptions(propagation=PropagationMode.RSLAVE), ) - in docker_addon.mounts + in docker_app.mounts ) # Backup not added - assert "/backup" not in [mount.target for mount in docker_addon.mounts] + assert "/backup" not in [mount.target for mount in docker_app.mounts] @pytest.mark.usefixtures("path_extern") -def test_addon_map_homeassistant_folder( +def test_app_map_homeassistant_folder( coresys: CoreSys, addonsdata_system: dict[str, Data] ): - """Test mounts for addon which maps homeassistant folder.""" + """Test mounts for app which maps homeassistant folder.""" config = load_json_fixture("addon-config-map-addon_config.json") config["map"].append("homeassistant_config") - docker_addon = get_docker_addon(coresys, addonsdata_system, config) + docker_app = get_docker_app(coresys, addonsdata_system, config) # Home Assistant config folder mounted to /homeassistant, not /config assert ( @@ -171,105 +165,103 @@ def test_addon_map_homeassistant_folder( target="/homeassistant", read_only=True, ) - in docker_addon.mounts + in docker_app.mounts ) @pytest.mark.usefixtures("path_extern") -def test_addon_map_addon_configs_folder( +def test_app_map_app_configs_folder( coresys: CoreSys, addonsdata_system: dict[str, Data] ): - """Test mounts for addon which maps addon configs folder.""" + """Test mounts for app which maps app configs folder.""" config = load_json_fixture("addon-config-map-addon_config.json") config["map"].append("all_addon_configs") - docker_addon = get_docker_addon(coresys, addonsdata_system, config) + docker_app = get_docker_app(coresys, addonsdata_system, config) - # Addon configs folder included + # App configs folder included assert ( DockerMount( type=MountType.BIND, - source=coresys.config.path_extern_addon_configs.as_posix(), + source=coresys.config.path_extern_app_configs.as_posix(), target="/addon_configs", read_only=True, ) - in docker_addon.mounts + in docker_app.mounts ) @pytest.mark.usefixtures("path_extern") -def test_addon_map_addon_config_folder( +def test_app_map_app_config_folder( coresys: CoreSys, addonsdata_system: dict[str, Data] ): - """Test mounts for addon which maps its own config folder.""" - docker_addon = get_docker_addon( + """Test mounts for app which maps its own config folder.""" + docker_app = get_docker_app( coresys, addonsdata_system, "addon-config-map-addon_config.json" ) - # Addon config folder included + # App config folder included assert ( DockerMount( type=MountType.BIND, - source=docker_addon.addon.path_extern_config.as_posix(), + source=docker_app.app.path_extern_config.as_posix(), target="/config", read_only=True, ) - in docker_addon.mounts + in docker_app.mounts ) @pytest.mark.usefixtures("path_extern") -def test_addon_map_addon_config_folder_with_custom_target( +def test_app_map_app_config_folder_with_custom_target( coresys: CoreSys, addonsdata_system: dict[str, Data] ): - """Test mounts for addon which maps its own config folder and sets target path.""" + """Test mounts for app which maps its own config folder and sets target path.""" config = load_json_fixture("addon-config-map-addon_config.json") config["map"].remove("addon_config") config["map"].append( {"type": "addon_config", "read_only": False, "path": "/custom/target/path"} ) - docker_addon = get_docker_addon(coresys, addonsdata_system, config) + docker_app = get_docker_app(coresys, addonsdata_system, config) - # Addon config folder included + # App config folder included assert ( DockerMount( type=MountType.BIND, - source=docker_addon.addon.path_extern_config.as_posix(), + source=docker_app.app.path_extern_config.as_posix(), target="/custom/target/path", read_only=False, ) - in docker_addon.mounts + in docker_app.mounts ) @pytest.mark.usefixtures("path_extern") -def test_addon_map_data_folder_with_custom_target( +def test_app_map_data_folder_with_custom_target( coresys: CoreSys, addonsdata_system: dict[str, Data] ): - """Test mounts for addon which sets target path for data folder.""" + """Test mounts for app which sets target path for data folder.""" config = load_json_fixture("addon-config-map-addon_config.json") config["map"].append({"type": "data", "path": "/custom/data/path"}) - docker_addon = get_docker_addon(coresys, addonsdata_system, config) + docker_app = get_docker_app(coresys, addonsdata_system, config) - # Addon config folder included + # App config folder included assert ( DockerMount( type=MountType.BIND, - source=docker_addon.addon.path_extern_data.as_posix(), + source=docker_app.app.path_extern_data.as_posix(), target="/custom/data/path", read_only=False, ) - in docker_addon.mounts + in docker_app.mounts ) @pytest.mark.usefixtures("path_extern") -def test_addon_ignore_on_config_map( - coresys: CoreSys, addonsdata_system: dict[str, Data] -): - """Test mounts for addon don't include addon config or homeassistant when config included.""" +def test_app_ignore_on_config_map(coresys: CoreSys, addonsdata_system: dict[str, Data]): + """Test mounts for app don't include app config or homeassistant when config included.""" config = load_json_fixture("basic-addon-config.json") config["map"].extend(["addon_config", "homeassistant_config"]) - docker_addon = get_docker_addon(coresys, addonsdata_system, config) + docker_app = get_docker_app(coresys, addonsdata_system, config) # Config added and is marked rw assert ( @@ -279,21 +271,19 @@ def test_addon_ignore_on_config_map( target="/config", read_only=False, ) - in docker_addon.mounts + in docker_app.mounts ) - # Mount for addon's specific config folder omitted since config in map field - assert ( - len([mount for mount in docker_addon.mounts if mount.target == "/config"]) == 1 - ) + # Mount for app's specific config folder omitted since config in map field + assert len([mount for mount in docker_app.mounts if mount.target == "/config"]) == 1 # Home Assistant mount omitted since config in map field - assert "/homeassistant" not in [mount.target for mount in docker_addon.mounts] + assert "/homeassistant" not in [mount.target for mount in docker_app.mounts] @pytest.mark.usefixtures("path_extern") -def test_journald_addon(coresys: CoreSys, addonsdata_system: dict[str, Data]): +def test_journald_app(coresys: CoreSys, addonsdata_system: dict[str, Data]): """Validate volume for journald option.""" - docker_addon = get_docker_addon( + docker_app = get_docker_app( coresys, addonsdata_system, "journald-addon-config.json" ) @@ -304,7 +294,7 @@ def test_journald_addon(coresys: CoreSys, addonsdata_system: dict[str, Data]): target="/var/log/journal", read_only=True, ) - in docker_addon.mounts + in docker_app.mounts ) assert ( DockerMount( @@ -313,41 +303,37 @@ def test_journald_addon(coresys: CoreSys, addonsdata_system: dict[str, Data]): target="/run/log/journal", read_only=True, ) - in docker_addon.mounts + in docker_app.mounts ) @pytest.mark.usefixtures("path_extern") -def test_not_journald_addon(coresys: CoreSys, addonsdata_system: dict[str, Data]): +def test_not_journald_app(coresys: CoreSys, addonsdata_system: dict[str, Data]): """Validate journald option defaults off.""" - docker_addon = get_docker_addon( - coresys, addonsdata_system, "basic-addon-config.json" - ) + docker_app = get_docker_app(coresys, addonsdata_system, "basic-addon-config.json") - assert "/var/log/journal" not in [mount.target for mount in docker_addon.mounts] + assert "/var/log/journal" not in [mount.target for mount in docker_app.mounts] @pytest.mark.usefixtures("path_extern", "tmp_supervisor_data") -async def test_addon_run_docker_error( +async def test_app_run_docker_error( coresys: CoreSys, addonsdata_system: dict[str, Data] ): - """Test docker error when addon is run.""" + """Test docker error when app is run.""" await coresys.dbus.timedate.connect(coresys.dbus.bus) coresys.docker.containers.create.side_effect = aiodocker.DockerError( HTTPStatus.NOT_FOUND, {"message": "missing"} ) - docker_addon = get_docker_addon( - coresys, addonsdata_system, "basic-addon-config.json" - ) + docker_app = get_docker_app(coresys, addonsdata_system, "basic-addon-config.json") with ( - patch.object(DockerAddon, "stop"), + patch.object(DockerApp, "stop"), patch.object( - AddonOptions, "validate", new=PropertyMock(return_value=lambda _: None) + AppOptions, "validate", new=PropertyMock(return_value=lambda _: None) ), pytest.raises(DockerNotFound), ): - await docker_addon.run() + await docker_app.run() assert ( Issue(IssueType.MISSING_IMAGE, ContextType.ADDON, reference="test_addon") @@ -356,44 +342,40 @@ async def test_addon_run_docker_error( @pytest.mark.usefixtures("path_extern", "tmp_supervisor_data") -async def test_addon_run_add_host_error( +async def test_app_run_add_host_error( coresys: CoreSys, addonsdata_system: dict[str, Data], capture_exception: Mock ): - """Test error adding host when addon is run.""" + """Test error adding host when app is run.""" await coresys.dbus.timedate.connect(coresys.dbus.bus) - docker_addon = get_docker_addon( - coresys, addonsdata_system, "basic-addon-config.json" - ) + docker_app = get_docker_app(coresys, addonsdata_system, "basic-addon-config.json") with ( - patch.object(DockerAddon, "stop"), + patch.object(DockerApp, "stop"), patch.object( - AddonOptions, "validate", new=PropertyMock(return_value=lambda _: None) + AppOptions, "validate", new=PropertyMock(return_value=lambda _: None) ), patch.object(PluginDns, "add_host", side_effect=(err := CoreDNSError())), ): - await docker_addon.run() + await docker_app.run() capture_exception.assert_called_once_with(err) -async def test_addon_stop_delete_host_error( +async def test_app_stop_delete_host_error( coresys: CoreSys, addonsdata_system: dict[str, Data], capture_exception: Mock ): - """Test error deleting host when addon is stopped.""" - docker_addon = get_docker_addon( - coresys, addonsdata_system, "basic-addon-config.json" - ) + """Test error deleting host when app is stopped.""" + docker_app = get_docker_app(coresys, addonsdata_system, "basic-addon-config.json") with ( patch.object( - DockerAddon, + DockerApp, "ip_address", new=PropertyMock(return_value=IPv4Address("172.30.33.1")), ), patch.object(PluginDns, "delete_host", side_effect=(err := CoreDNSError())), ): - await docker_addon.stop() + await docker_app.stop() capture_exception.assert_called_once_with(err) @@ -434,9 +416,9 @@ async def test_addon_stop_delete_host_error( (TEST_SYSFS_PATH, "2", True), ], ) -async def test_addon_new_device( +async def test_app_new_device( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, container: MagicMock, docker: DockerAPI, dev_path: str, @@ -445,16 +427,16 @@ async def test_addon_new_device( ): """Test new device that is listed in static devices.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - install_addon_ssh.data["devices"] = [dev_path] + install_app_ssh.data["devices"] = [dev_path] container.id = 123 docker._info = replace(docker.info, cgroup=cgroup) # pylint: disable=protected-access with ( - patch.object(Addon, "write_options"), + patch.object(App, "write_options"), patch.object(OSManager, "available", new=PropertyMock(return_value=is_os)), patch.object(CGroup, "add_devices_allowed") as add_devices, ): - await install_addon_ssh.start() + await install_app_ssh.start() coresys.bus.fire_event( BusEvent.HARDWARE_NEW_DEVICE, @@ -467,20 +449,20 @@ async def test_addon_new_device( @pytest.mark.usefixtures("path_extern", "tmp_supervisor_data") @pytest.mark.parametrize("dev_path", [TEST_DEV_PATH, TEST_SYSFS_PATH]) -async def test_addon_new_device_no_haos( - coresys: CoreSys, install_addon_ssh: Addon, docker: DockerAPI, dev_path: str +async def test_app_new_device_no_haos( + coresys: CoreSys, install_app_ssh: App, docker: DockerAPI, dev_path: str ): """Test new device that is listed in static devices on non HAOS system with CGroup V2.""" coresys.hardware.disk.get_disk_free_space = lambda x: 5000 - install_addon_ssh.data["devices"] = [dev_path] + install_app_ssh.data["devices"] = [dev_path] docker._info = replace(docker.info, cgroup="2") # pylint: disable=protected-access with ( - patch.object(Addon, "write_options"), + patch.object(App, "write_options"), patch.object(OSManager, "available", new=PropertyMock(return_value=False)), patch.object(CGroup, "add_devices_allowed") as add_devices, ): - await install_addon_ssh.start() + await install_app_ssh.start() coresys.bus.fire_event( BusEvent.HARDWARE_NEW_DEVICE, @@ -491,7 +473,7 @@ async def test_addon_new_device_no_haos( add_devices.assert_not_called() # Issue added with hardware event since access cannot be added dynamically - assert install_addon_ssh.device_access_missing_issue in coresys.resolution.issues + assert install_app_ssh.device_access_missing_issue in coresys.resolution.issues assert ( Suggestion( SuggestionType.EXECUTE_RESTART, ContextType.ADDON, reference="local_ssh" @@ -500,21 +482,21 @@ async def test_addon_new_device_no_haos( ) # Stopping and removing the container clears it as access granted on next start - await install_addon_ssh.stop() + await install_app_ssh.stop() assert coresys.resolution.issues == [] assert coresys.resolution.suggestions == [] -async def test_ulimits_integration(coresys: CoreSys, install_addon_ssh: Addon): - """Test ulimits integration with Docker addon.""" - docker_addon = DockerAddon(coresys, install_addon_ssh) +async def test_ulimits_integration(coresys: CoreSys, install_app_ssh: App): + """Test ulimits integration with Docker app.""" + docker_app = DockerApp(coresys, install_app_ssh) # Test default case (no ulimits, no realtime) - assert docker_addon.ulimits is None + assert docker_app.ulimits is None # Test with realtime enabled (should have built-in ulimits) - install_addon_ssh.data["realtime"] = True - ulimits = docker_addon.ulimits + install_app_ssh.data["realtime"] = True + ulimits = docker_app.ulimits assert ulimits is not None assert len(ulimits) == 2 # Check for rtprio limit @@ -529,9 +511,9 @@ async def test_ulimits_integration(coresys: CoreSys, install_addon_ssh: Addon): assert memlock_limit.hard == 128 * 1024 * 1024 # Test with configurable ulimits (simple format) - install_addon_ssh.data["realtime"] = False - install_addon_ssh.data["ulimits"] = {"nofile": 65535, "nproc": 32768} - ulimits = docker_addon.ulimits + install_app_ssh.data["realtime"] = False + install_app_ssh.data["ulimits"] = {"nofile": 65535, "nproc": 32768} + ulimits = docker_app.ulimits assert ulimits is not None assert len(ulimits) == 2 @@ -546,11 +528,11 @@ async def test_ulimits_integration(coresys: CoreSys, install_addon_ssh: Addon): assert nproc_limit.hard == 32768 # Test with configurable ulimits (detailed format) - install_addon_ssh.data["ulimits"] = { + install_app_ssh.data["ulimits"] = { "nofile": {"soft": 20000, "hard": 40000}, "memlock": {"soft": 67108864, "hard": 134217728}, } - ulimits = docker_addon.ulimits + ulimits = docker_app.ulimits assert ulimits is not None assert len(ulimits) == 2 @@ -565,12 +547,12 @@ async def test_ulimits_integration(coresys: CoreSys, install_addon_ssh: Addon): assert memlock_limit.hard == 134217728 # Test mixed format and realtime (realtime + custom ulimits) - install_addon_ssh.data["realtime"] = True - install_addon_ssh.data["ulimits"] = { + install_app_ssh.data["realtime"] = True + install_app_ssh.data["ulimits"] = { "nofile": 65535, "core": {"soft": 0, "hard": 0}, # Disable core dumps } - ulimits = docker_addon.ulimits + ulimits = docker_app.ulimits assert ulimits is not None assert ( len(ulimits) == 4 diff --git a/tests/docker/test_interface.py b/tests/docker/test_interface.py index 5ab182e362e..bb570ed3fa1 100644 --- a/tests/docker/test_interface.py +++ b/tests/docker/test_interface.py @@ -10,7 +10,7 @@ from awesomeversion import AwesomeVersion import pytest -from supervisor.addons.manager import Addon +from supervisor.addons.manager import App from supervisor.const import BusEvent, CoreState, CpuArch from supervisor.coresys import CoreSys from supervisor.docker.const import ContainerState @@ -346,17 +346,17 @@ async def test_image_pull_fail(coresys: CoreSys, capture_exception: Mock): @pytest.mark.usefixtures("path_extern", "tmp_supervisor_data") async def test_run_missing_image( - coresys: CoreSys, install_addon_ssh: Addon, capture_exception: Mock + coresys: CoreSys, install_app_ssh: App, capture_exception: Mock ): """Test run captures the exception when image is missing.""" coresys.docker.containers.create.side_effect = [ aiodocker.DockerError(HTTPStatus.NOT_FOUND, {"message": "missing"}), MagicMock(), ] - install_addon_ssh.data["image"] = "test_image" + install_app_ssh.data["image"] = "test_image" with pytest.raises(DockerNotFound): - await install_addon_ssh.instance.run() + await install_app_ssh.instance.run() capture_exception.assert_called_once() diff --git a/tests/misc/test_filter_data.py b/tests/misc/test_filter_data.py index 46916a7ba06..05a08566efa 100644 --- a/tests/misc/test_filter_data.py +++ b/tests/misc/test_filter_data.py @@ -7,7 +7,7 @@ import pytest from supervisor.const import SUPERVISOR_VERSION, CoreState -from supervisor.exceptions import AddonConfigurationError +from supervisor.exceptions import AppConfigurationError from supervisor.misc.filter import filter_data from supervisor.resolution.const import ( ContextType, @@ -83,7 +83,7 @@ def sys_env(autouse=True): def test_ignored_exception(coresys): """Test ignored exceptions.""" - hint = {"exc_info": (None, AddonConfigurationError(), None)} + hint = {"exc_info": (None, AppConfigurationError(), None)} assert filter_data(coresys, SAMPLE_EVENT, hint) is None diff --git a/tests/misc/test_tasks.py b/tests/misc/test_tasks.py index e3708f0f8d4..1a5c058b717 100644 --- a/tests/misc/test_tasks.py +++ b/tests/misc/test_tasks.py @@ -9,7 +9,7 @@ from awesomeversion import AwesomeVersion import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.const import ATTR_VERSION_TIMESTAMP, CoreState from supervisor.coresys import CoreSys from supervisor.exceptions import HomeAssistantError @@ -300,34 +300,34 @@ async def test_scheduled_reload_updater_triggers_one_supervisor_update( @pytest.mark.usefixtures("tmp_supervisor_data") -async def test_update_addons_auto_update_success( +async def test_update_apps_auto_update_success( tasks: Tasks, coresys: CoreSys, ha_ws_client: AsyncMock, - install_addon_example: Addon, + install_app_example: App, ): - """Test that an eligible add-on is auto-updated via websocket command.""" + """Test that an eligible app is auto-updated via websocket command.""" await coresys.core.set_state(CoreState.RUNNING) - # Set up the add-on as eligible for auto-update - install_addon_example.auto_update = True - install_addon_example.data_store[ATTR_VERSION_TIMESTAMP] = 0 + # Set up the app as eligible for auto-update + install_app_example.auto_update = True + install_app_example.data_store[ATTR_VERSION_TIMESTAMP] = 0 with patch.object( - Addon, "version", new=PropertyMock(return_value=AwesomeVersion("1.0")) + App, "version", new=PropertyMock(return_value=AwesomeVersion("1.0")) ): - assert install_addon_example.need_update is True - assert install_addon_example.auto_update_available is True + assert install_app_example.need_update is True + assert install_app_example.auto_update_available is True - # Make sure all job events from installing the add-on are cleared + # Make sure all job events from installing the app are cleared ha_ws_client.async_send_command.reset_mock() # pylint: disable-next=protected-access - await tasks._update_addons() + await tasks._update_apps() ha_ws_client.async_send_command.assert_any_call( { "type": "hassio/update/addon", - "addon": install_addon_example.slug, + "addon": install_app_example.slug, "backup": True, } ) diff --git a/tests/plugins/test_dns.py b/tests/plugins/test_dns.py index f03e1910468..422aaafd091 100644 --- a/tests/plugins/test_dns.py +++ b/tests/plugins/test_dns.py @@ -96,7 +96,7 @@ async def test_reset(coresys: CoreSys): coresys.plugins.dns.servers = ["dns://1.1.1.1", "dns://8.8.8.8"] coresys.plugins.dns.fallback = False coresys.plugins.dns._loop = True # pylint: disable=protected-access - assert len(coresys.addons.installed) == 0 + assert len(coresys.apps.installed) == 0 with ( patch.object(type(coresys.plugins.dns.hosts), "unlink") as unlink, diff --git a/tests/resolution/check/test_check_addon_pwned.py b/tests/resolution/check/test_check_addon_pwned.py index 81bca5b8f2d..b697f38effd 100644 --- a/tests/resolution/check/test_check_addon_pwned.py +++ b/tests/resolution/check/test_check_addon_pwned.py @@ -1,115 +1,115 @@ -"""Test Check Addon Pwned.""" +"""Test Check App Pwned.""" # pylint: disable=import-error,protected-access from unittest.mock import AsyncMock, patch -from supervisor.const import AddonState, CoreState +from supervisor.const import AppState, CoreState from supervisor.coresys import CoreSys from supervisor.exceptions import PwnedSecret -from supervisor.resolution.checks.addon_pwned import CheckAddonPwned +from supervisor.resolution.checks.addon_pwned import CheckAppPwned from supervisor.resolution.const import IssueType, SuggestionType class TestAddon: - """Test Addon.""" + """Test App.""" slug = "my_test" pwned = set() - state = AddonState.STARTED + state = AppState.STARTED is_installed = True async def test_base(coresys: CoreSys): """Test check basics.""" - addon_pwned = CheckAddonPwned(coresys) - assert addon_pwned.slug == "addon_pwned" - assert addon_pwned.enabled + app_pwned = CheckAppPwned(coresys) + assert app_pwned.slug == "addon_pwned" + assert app_pwned.enabled async def test_check(coresys: CoreSys): """Test check.""" - addon_pwned = CheckAddonPwned(coresys) + app_pwned = CheckAppPwned(coresys) await coresys.core.set_state(CoreState.RUNNING) - addon = TestAddon() - coresys.addons.local[addon.slug] = addon + app = TestAddon() + coresys.apps.local[app.slug] = app assert len(coresys.resolution.issues) == 0 coresys.security.verify_secret = AsyncMock(side_effect=PwnedSecret) - await addon_pwned.run_check.__wrapped__(addon_pwned) + await app_pwned.run_check.__wrapped__(app_pwned) assert not coresys.security.verify_secret.called - addon.pwned.add("123456") + app.pwned.add("123456") coresys.security.verify_secret = AsyncMock(return_value=None) - await addon_pwned.run_check.__wrapped__(addon_pwned) + await app_pwned.run_check.__wrapped__(app_pwned) assert coresys.security.verify_secret.called assert len(coresys.resolution.issues) == 0 coresys.security.verify_secret = AsyncMock(side_effect=PwnedSecret) - await addon_pwned.run_check.__wrapped__(addon_pwned) + await app_pwned.run_check.__wrapped__(app_pwned) assert coresys.security.verify_secret.called assert len(coresys.resolution.issues) == 1 assert coresys.resolution.issues[-1].type == IssueType.PWNED - assert coresys.resolution.issues[-1].reference == addon.slug + assert coresys.resolution.issues[-1].reference == app.slug assert coresys.resolution.suggestions[-1].type == SuggestionType.EXECUTE_STOP - assert coresys.resolution.suggestions[-1].reference == addon.slug + assert coresys.resolution.suggestions[-1].reference == app.slug async def test_approve(coresys: CoreSys, supervisor_internet): """Test check.""" - addon_pwned = CheckAddonPwned(coresys) + app_pwned = CheckAppPwned(coresys) await coresys.core.set_state(CoreState.RUNNING) - addon = TestAddon() - coresys.addons.local[addon.slug] = addon - addon.pwned.add("123456") + app = TestAddon() + coresys.apps.local[app.slug] = app + app.pwned.add("123456") coresys.security.verify_secret = AsyncMock(side_effect=PwnedSecret) - assert await addon_pwned.approve_check(reference=addon.slug) + assert await app_pwned.approve_check(reference=app.slug) coresys.security.verify_secret = AsyncMock(return_value=None) - assert not await addon_pwned.approve_check(reference=addon.slug) + assert not await app_pwned.approve_check(reference=app.slug) async def test_with_global_disable(coresys: CoreSys, caplog): """Test when pwned is globally disabled.""" coresys.security.pwned = False - addon_pwned = CheckAddonPwned(coresys) + app_pwned = CheckAppPwned(coresys) await coresys.core.set_state(CoreState.RUNNING) - addon = TestAddon() - coresys.addons.local[addon.slug] = addon + app = TestAddon() + coresys.apps.local[app.slug] = app assert len(coresys.resolution.issues) == 0 coresys.security.verify_secret = AsyncMock(side_effect=PwnedSecret) - await addon_pwned.run_check.__wrapped__(addon_pwned) + await app_pwned.run_check.__wrapped__(app_pwned) assert not coresys.security.verify_secret.called assert "Skipping addon_pwned, pwned is globally disabled" in caplog.text async def test_did_run(coresys: CoreSys): """Test that the check ran as expected.""" - addon_pwned = CheckAddonPwned(coresys) - should_run = addon_pwned.states + app_pwned = CheckAppPwned(coresys) + should_run = app_pwned.states should_not_run = [state for state in CoreState if state not in should_run] assert len(should_run) != 0 assert len(should_not_run) != 0 with patch( - "supervisor.resolution.checks.addon_pwned.CheckAddonPwned.run_check", + "supervisor.resolution.checks.addon_pwned.CheckAppPwned.run_check", return_value=None, ) as check: for state in should_run: await coresys.core.set_state(state) - await addon_pwned() + await app_pwned() check.assert_called_once() check.reset_mock() for state in should_not_run: await coresys.core.set_state(state) - await addon_pwned() + await app_pwned() check.assert_not_called() check.reset_mock() diff --git a/tests/resolution/check/test_check_deprecated_addon.py b/tests/resolution/check/test_check_deprecated_addon.py index 00380d220b2..611ed3fd9c0 100644 --- a/tests/resolution/check/test_check_deprecated_addon.py +++ b/tests/resolution/check/test_check_deprecated_addon.py @@ -1,75 +1,71 @@ -"""Test check for deprecated addons.""" +"""Test check for deprecated apps.""" from unittest.mock import patch -from supervisor.addons.addon import Addon -from supervisor.const import AddonStage, CoreState +from supervisor.addons.addon import App +from supervisor.const import AppStage, CoreState from supervisor.coresys import CoreSys -from supervisor.resolution.checks.deprecated_addon import CheckDeprecatedAddon +from supervisor.resolution.checks.deprecated_addon import CheckDeprecatedApp from supervisor.resolution.const import ContextType, IssueType async def test_base(coresys: CoreSys): """Test check basics.""" - deprecated_addon = CheckDeprecatedAddon(coresys) - assert deprecated_addon.slug == "deprecated_addon" - assert deprecated_addon.enabled + deprecated_app = CheckDeprecatedApp(coresys) + assert deprecated_app.slug == "deprecated_addon" + assert deprecated_app.enabled -async def test_check(coresys: CoreSys, install_addon_ssh: Addon): - """Test check for deprecated addons.""" - deprecated_addon = CheckDeprecatedAddon(coresys) +async def test_check(coresys: CoreSys, install_app_ssh: App): + """Test check for deprecated apps.""" + deprecated_app = CheckDeprecatedApp(coresys) await coresys.core.set_state(CoreState.SETUP) - await deprecated_addon() + await deprecated_app() assert len(coresys.resolution.issues) == 0 - # Mock test addon as deprecated - install_addon_ssh.data["stage"] = AddonStage.DEPRECATED + # Mock test app as deprecated + install_app_ssh.data["stage"] = AppStage.DEPRECATED - await deprecated_addon() + await deprecated_app() assert len(coresys.resolution.issues) == 1 assert coresys.resolution.issues[0].type is IssueType.DEPRECATED_ADDON assert coresys.resolution.issues[0].context is ContextType.ADDON - assert coresys.resolution.issues[0].reference == install_addon_ssh.slug + assert coresys.resolution.issues[0].reference == install_app_ssh.slug assert len(coresys.resolution.suggestions) == 1 -async def test_approve(coresys: CoreSys, install_addon_ssh: Addon): - """Test approve existing deprecated addon issues.""" - deprecated_addon = CheckDeprecatedAddon(coresys) +async def test_approve(coresys: CoreSys, install_app_ssh: App): + """Test approve existing deprecated app issues.""" + deprecated_app = CheckDeprecatedApp(coresys) await coresys.core.set_state(CoreState.SETUP) - assert ( - await deprecated_addon.approve_check(reference=install_addon_ssh.slug) is False - ) + assert await deprecated_app.approve_check(reference=install_app_ssh.slug) is False - # Mock test addon as deprecated - install_addon_ssh.data["stage"] = AddonStage.DEPRECATED + # Mock test app as deprecated + install_app_ssh.data["stage"] = AppStage.DEPRECATED - assert ( - await deprecated_addon.approve_check(reference=install_addon_ssh.slug) is True - ) + assert await deprecated_app.approve_check(reference=install_app_ssh.slug) is True async def test_did_run(coresys: CoreSys): """Test that the check ran as expected.""" - deprecated_addon = CheckDeprecatedAddon(coresys) - should_run = deprecated_addon.states + deprecated_app = CheckDeprecatedApp(coresys) + should_run = deprecated_app.states should_not_run = [state for state in CoreState if state not in should_run] assert should_run == [CoreState.SETUP] assert len(should_not_run) != 0 - with patch.object(CheckDeprecatedAddon, "run_check", return_value=None) as check: + with patch.object(CheckDeprecatedApp, "run_check", return_value=None) as check: for state in should_run: await coresys.core.set_state(state) - await deprecated_addon() + await deprecated_app() check.assert_called_once() check.reset_mock() for state in should_not_run: await coresys.core.set_state(state) - await deprecated_addon() + await deprecated_app() check.assert_not_called() check.reset_mock() diff --git a/tests/resolution/check/test_check_deprecated_arch_addon.py b/tests/resolution/check/test_check_deprecated_arch_addon.py index e59f61368e1..bf63c5c58d8 100644 --- a/tests/resolution/check/test_check_deprecated_arch_addon.py +++ b/tests/resolution/check/test_check_deprecated_arch_addon.py @@ -1,63 +1,63 @@ -"""Test check for add-ons with deprecated architectures.""" +"""Test check for apps with deprecated architectures.""" from unittest.mock import patch -from supervisor.addons.addon import Addon -from supervisor.const import AddonStage, CoreState +from supervisor.addons.addon import App +from supervisor.const import AppStage, CoreState from supervisor.coresys import CoreSys -from supervisor.resolution.checks.deprecated_arch_addon import CheckDeprecatedArchAddon +from supervisor.resolution.checks.deprecated_arch_addon import CheckDeprecatedArchApp from supervisor.resolution.const import ContextType, IssueType, SuggestionType async def test_base(coresys: CoreSys): """Test check basics.""" - deprecated_arch_addon = CheckDeprecatedArchAddon(coresys) - assert deprecated_arch_addon.slug == "deprecated_arch_addon" - assert deprecated_arch_addon.enabled + deprecated_arch_app = CheckDeprecatedArchApp(coresys) + assert deprecated_arch_app.slug == "deprecated_arch_addon" + assert deprecated_arch_app.enabled -async def test_check(coresys: CoreSys, install_addon_ssh: Addon): - """Test check for installed add-ons with deprecated architectures.""" - deprecated_arch_addon = CheckDeprecatedArchAddon(coresys) +async def test_check(coresys: CoreSys, install_app_ssh: App): + """Test check for installed apps with deprecated architectures.""" + deprecated_arch_app = CheckDeprecatedArchApp(coresys) await coresys.core.set_state(CoreState.SETUP) - await deprecated_arch_addon() + await deprecated_arch_app() assert len(coresys.resolution.issues) == 0 - install_addon_ssh.data["arch"] = ["armv7"] + install_app_ssh.data["arch"] = ["armv7"] - await deprecated_arch_addon() + await deprecated_arch_app() assert len(coresys.resolution.issues) == 1 assert coresys.resolution.issues[0].type is IssueType.DEPRECATED_ARCH_ADDON assert coresys.resolution.issues[0].context is ContextType.ADDON - assert coresys.resolution.issues[0].reference == install_addon_ssh.slug + assert coresys.resolution.issues[0].reference == install_app_ssh.slug assert len(coresys.resolution.suggestions) == 1 assert coresys.resolution.suggestions[0].type is SuggestionType.EXECUTE_REMOVE async def test_check_ignores_mixed_supported_arch( - coresys: CoreSys, install_addon_ssh: Addon + coresys: CoreSys, install_app_ssh: App ): """Test check does not create issue when a supported arch is still present.""" - deprecated_arch_addon = CheckDeprecatedArchAddon(coresys) + deprecated_arch_app = CheckDeprecatedArchApp(coresys) await coresys.core.set_state(CoreState.SETUP) - install_addon_ssh.data["arch"] = ["armv7", "amd64"] + install_app_ssh.data["arch"] = ["armv7", "amd64"] - await deprecated_arch_addon() + await deprecated_arch_app() assert len(coresys.resolution.issues) == 0 -async def test_check_deprecated_machine(coresys: CoreSys, install_addon_ssh: Addon): - """Test check for installed add-ons using deprecated machine entries.""" - deprecated_arch_addon = CheckDeprecatedArchAddon(coresys) +async def test_check_deprecated_machine(coresys: CoreSys, install_app_ssh: App): + """Test check for installed apps using deprecated machine entries.""" + deprecated_arch_app = CheckDeprecatedArchApp(coresys) await coresys.core.set_state(CoreState.SETUP) - install_addon_ssh.data["machine"] = ["raspberrypi3"] + install_app_ssh.data["machine"] = ["raspberrypi3"] - await deprecated_arch_addon() + await deprecated_arch_app() assert len(coresys.resolution.issues) == 1 assert coresys.resolution.issues[0].type is IssueType.DEPRECATED_ARCH_ADDON @@ -65,100 +65,90 @@ async def test_check_deprecated_machine(coresys: CoreSys, install_addon_ssh: Add async def test_check_ignores_mixed_supported_machine( - coresys: CoreSys, install_addon_ssh: Addon + coresys: CoreSys, install_app_ssh: App ): """Test check does not create issue when current machine is still supported.""" - deprecated_arch_addon = CheckDeprecatedArchAddon(coresys) + deprecated_arch_app = CheckDeprecatedArchApp(coresys) await coresys.core.set_state(CoreState.SETUP) - install_addon_ssh.data["machine"] = ["raspberrypi3", install_addon_ssh.sys_machine] + install_app_ssh.data["machine"] = ["raspberrypi3", install_app_ssh.sys_machine] - await deprecated_arch_addon() + await deprecated_arch_app() assert len(coresys.resolution.issues) == 0 -async def test_check_ignores_stage_deprecated( - coresys: CoreSys, install_addon_ssh: Addon -): - """Test check does not create arch repair issue for already deprecated add-ons.""" - deprecated_arch_addon = CheckDeprecatedArchAddon(coresys) +async def test_check_ignores_stage_deprecated(coresys: CoreSys, install_app_ssh: App): + """Test check does not create arch repair issue for already deprecated apps.""" + deprecated_arch_app = CheckDeprecatedArchApp(coresys) await coresys.core.set_state(CoreState.SETUP) - install_addon_ssh.data["stage"] = AddonStage.DEPRECATED - install_addon_ssh.data["arch"] = ["armv7"] + install_app_ssh.data["stage"] = AppStage.DEPRECATED + install_app_ssh.data["arch"] = ["armv7"] - await deprecated_arch_addon() + await deprecated_arch_app() assert len(coresys.resolution.issues) == 0 -async def test_approve(coresys: CoreSys, install_addon_ssh: Addon): - """Test approve existing deprecated arch addon issues.""" - deprecated_arch_addon = CheckDeprecatedArchAddon(coresys) +async def test_approve(coresys: CoreSys, install_app_ssh: App): + """Test approve existing deprecated arch app issues.""" + deprecated_arch_app = CheckDeprecatedArchApp(coresys) await coresys.core.set_state(CoreState.SETUP) assert ( - await deprecated_arch_addon.approve_check(reference=install_addon_ssh.slug) - is False + await deprecated_arch_app.approve_check(reference=install_app_ssh.slug) is False ) - install_addon_ssh.data["arch"] = ["armv7"] + install_app_ssh.data["arch"] = ["armv7"] assert ( - await deprecated_arch_addon.approve_check(reference=install_addon_ssh.slug) - is True + await deprecated_arch_app.approve_check(reference=install_app_ssh.slug) is True ) - install_addon_ssh.data["arch"] = ["armv7", "amd64"] + install_app_ssh.data["arch"] = ["armv7", "amd64"] assert ( - await deprecated_arch_addon.approve_check(reference=install_addon_ssh.slug) - is False + await deprecated_arch_app.approve_check(reference=install_app_ssh.slug) is False ) - install_addon_ssh.data["arch"] = ["amd64"] - install_addon_ssh.data["machine"] = ["raspberrypi3"] + install_app_ssh.data["arch"] = ["amd64"] + install_app_ssh.data["machine"] = ["raspberrypi3"] assert ( - await deprecated_arch_addon.approve_check(reference=install_addon_ssh.slug) - is True + await deprecated_arch_app.approve_check(reference=install_app_ssh.slug) is True ) - install_addon_ssh.data["machine"] = ["raspberrypi3", install_addon_ssh.sys_machine] + install_app_ssh.data["machine"] = ["raspberrypi3", install_app_ssh.sys_machine] assert ( - await deprecated_arch_addon.approve_check(reference=install_addon_ssh.slug) - is False + await deprecated_arch_app.approve_check(reference=install_app_ssh.slug) is False ) - install_addon_ssh.data["stage"] = AddonStage.DEPRECATED + install_app_ssh.data["stage"] = AppStage.DEPRECATED assert ( - await deprecated_arch_addon.approve_check(reference=install_addon_ssh.slug) - is False + await deprecated_arch_app.approve_check(reference=install_app_ssh.slug) is False ) async def test_did_run(coresys: CoreSys): """Test that the check ran as expected.""" - deprecated_arch_addon = CheckDeprecatedArchAddon(coresys) - should_run = deprecated_arch_addon.states + deprecated_arch_app = CheckDeprecatedArchApp(coresys) + should_run = deprecated_arch_app.states should_not_run = [state for state in CoreState if state not in should_run] assert should_run == [CoreState.SETUP, CoreState.RUNNING] assert len(should_not_run) != 0 - with patch.object( - CheckDeprecatedArchAddon, "run_check", return_value=None - ) as check: + with patch.object(CheckDeprecatedArchApp, "run_check", return_value=None) as check: for state in should_run: await coresys.core.set_state(state) - await deprecated_arch_addon() + await deprecated_arch_app() check.assert_called_once() check.reset_mock() for state in should_not_run: await coresys.core.set_state(state) - await deprecated_arch_addon() + await deprecated_arch_app() check.assert_not_called() check.reset_mock() diff --git a/tests/resolution/check/test_check_detached_addon_missing.py b/tests/resolution/check/test_check_detached_addon_missing.py index 72067ce1e38..be2383fd8ab 100644 --- a/tests/resolution/check/test_check_detached_addon_missing.py +++ b/tests/resolution/check/test_check_detached_addon_missing.py @@ -1,87 +1,82 @@ -"""Test check for detached addons due to repo missing.""" +"""Test check for detached apps due to repo missing.""" from unittest.mock import patch -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.const import CoreState from supervisor.coresys import CoreSys -from supervisor.resolution.checks.detached_addon_missing import ( - CheckDetachedAddonMissing, -) +from supervisor.resolution.checks.detached_addon_missing import CheckDetachedAppMissing from supervisor.resolution.const import ContextType, IssueType async def test_base(coresys: CoreSys): """Test check basics.""" - detached_addon_missing = CheckDetachedAddonMissing(coresys) - assert detached_addon_missing.slug == "detached_addon_missing" - assert detached_addon_missing.enabled + detached_app_missing = CheckDetachedAppMissing(coresys) + assert detached_app_missing.slug == "detached_addon_missing" + assert detached_app_missing.enabled -async def test_check(coresys: CoreSys, install_addon_ssh: Addon): - """Test check for detached addons.""" - detached_addon_missing = CheckDetachedAddonMissing(coresys) +async def test_check(coresys: CoreSys, install_app_ssh: App): + """Test check for detached apps.""" + detached_app_missing = CheckDetachedAppMissing(coresys) await coresys.core.set_state(CoreState.SETUP) - await detached_addon_missing() + await detached_app_missing() assert len(coresys.resolution.issues) == 0 - # Mock test addon was been installed from a now non-existent store - install_addon_ssh.slug = "abc123_ssh" - coresys.addons.data.system["abc123_ssh"] = coresys.addons.data.system["local_ssh"] - coresys.addons.local["abc123_ssh"] = coresys.addons.local["local_ssh"] - install_addon_ssh.data["repository"] = "abc123" + # Mock test app was been installed from a now non-existent store + install_app_ssh.slug = "abc123_ssh" + coresys.apps.data.system["abc123_ssh"] = coresys.apps.data.system["local_ssh"] + coresys.apps.local["abc123_ssh"] = coresys.apps.local["local_ssh"] + install_app_ssh.data["repository"] = "abc123" - await detached_addon_missing() + await detached_app_missing() assert len(coresys.resolution.issues) == 1 assert coresys.resolution.issues[0].type is IssueType.DETACHED_ADDON_MISSING assert coresys.resolution.issues[0].context is ContextType.ADDON - assert coresys.resolution.issues[0].reference == install_addon_ssh.slug + assert coresys.resolution.issues[0].reference == install_app_ssh.slug assert len(coresys.resolution.suggestions) == 0 -async def test_approve(coresys: CoreSys, install_addon_ssh: Addon): - """Test approve existing detached addon issues.""" - detached_addon_missing = CheckDetachedAddonMissing(coresys) +async def test_approve(coresys: CoreSys, install_app_ssh: App): + """Test approve existing detached app issues.""" + detached_app_missing = CheckDetachedAppMissing(coresys) await coresys.core.set_state(CoreState.SETUP) assert ( - await detached_addon_missing.approve_check(reference=install_addon_ssh.slug) + await detached_app_missing.approve_check(reference=install_app_ssh.slug) is False ) - # Mock test addon was been installed from a now non-existent store - install_addon_ssh.slug = "abc123_ssh" - coresys.addons.data.system["abc123_ssh"] = coresys.addons.data.system["local_ssh"] - coresys.addons.local["abc123_ssh"] = coresys.addons.local["local_ssh"] - install_addon_ssh.data["repository"] = "abc123" + # Mock test app was been installed from a now non-existent store + install_app_ssh.slug = "abc123_ssh" + coresys.apps.data.system["abc123_ssh"] = coresys.apps.data.system["local_ssh"] + coresys.apps.local["abc123_ssh"] = coresys.apps.local["local_ssh"] + install_app_ssh.data["repository"] = "abc123" assert ( - await detached_addon_missing.approve_check(reference=install_addon_ssh.slug) - is True + await detached_app_missing.approve_check(reference=install_app_ssh.slug) is True ) async def test_did_run(coresys: CoreSys): """Test that the check ran as expected.""" - detached_addon_missing = CheckDetachedAddonMissing(coresys) - should_run = detached_addon_missing.states + detached_app_missing = CheckDetachedAppMissing(coresys) + should_run = detached_app_missing.states should_not_run = [state for state in CoreState if state not in should_run] assert should_run == [CoreState.SETUP] assert len(should_not_run) != 0 - with patch.object( - CheckDetachedAddonMissing, "run_check", return_value=None - ) as check: + with patch.object(CheckDetachedAppMissing, "run_check", return_value=None) as check: for state in should_run: await coresys.core.set_state(state) - await detached_addon_missing() + await detached_app_missing() check.assert_called_once() check.reset_mock() for state in should_not_run: await coresys.core.set_state(state) - await detached_addon_missing() + await detached_app_missing() check.assert_not_called() check.reset_mock() diff --git a/tests/resolution/check/test_check_detached_addon_removed.py b/tests/resolution/check/test_check_detached_addon_removed.py index 02e96f443c7..ef2f6fabb5e 100644 --- a/tests/resolution/check/test_check_detached_addon_removed.py +++ b/tests/resolution/check/test_check_detached_addon_removed.py @@ -1,98 +1,91 @@ -"""Test check for detached addons due to removal from repo.""" +"""Test check for detached apps due to removal from repo.""" from pathlib import Path from unittest.mock import PropertyMock, patch -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.config import CoreConfig from supervisor.const import CoreState from supervisor.coresys import CoreSys -from supervisor.resolution.checks.detached_addon_removed import ( - CheckDetachedAddonRemoved, -) +from supervisor.resolution.checks.detached_addon_removed import CheckDetachedAppRemoved from supervisor.resolution.const import ContextType, IssueType, SuggestionType async def test_base(coresys: CoreSys): """Test check basics.""" - detached_addon_removed = CheckDetachedAddonRemoved(coresys) - assert detached_addon_removed.slug == "detached_addon_removed" - assert detached_addon_removed.enabled + detached_app_removed = CheckDetachedAppRemoved(coresys) + assert detached_app_removed.slug == "detached_addon_removed" + assert detached_app_removed.enabled -async def test_check( - coresys: CoreSys, install_addon_ssh: Addon, tmp_supervisor_data: Path -): - """Test check for detached addons.""" - detached_addon_removed = CheckDetachedAddonRemoved(coresys) +async def test_check(coresys: CoreSys, install_app_ssh: App, tmp_supervisor_data: Path): + """Test check for detached apps.""" + detached_app_removed = CheckDetachedAppRemoved(coresys) await coresys.core.set_state(CoreState.SETUP) - await detached_addon_removed() + await detached_app_removed() assert len(coresys.resolution.issues) == 0 assert len(coresys.resolution.suggestions) == 0 - (addons_dir := tmp_supervisor_data / "addons" / "local").mkdir() + (apps_dir := tmp_supervisor_data / "addons" / "local").mkdir() with patch.object( - CoreConfig, "path_addons_local", new=PropertyMock(return_value=addons_dir) + CoreConfig, "path_apps_local", new=PropertyMock(return_value=apps_dir) ): await coresys.store.load() - await detached_addon_removed() + await detached_app_removed() assert len(coresys.resolution.issues) == 1 assert coresys.resolution.issues[0].type is IssueType.DETACHED_ADDON_REMOVED assert coresys.resolution.issues[0].context is ContextType.ADDON - assert coresys.resolution.issues[0].reference == install_addon_ssh.slug + assert coresys.resolution.issues[0].reference == install_app_ssh.slug assert len(coresys.resolution.suggestions) == 1 assert coresys.resolution.suggestions[0].type is SuggestionType.EXECUTE_REMOVE assert coresys.resolution.suggestions[0].context is ContextType.ADDON - assert coresys.resolution.suggestions[0].reference == install_addon_ssh.slug + assert coresys.resolution.suggestions[0].reference == install_app_ssh.slug async def test_approve( - coresys: CoreSys, install_addon_ssh: Addon, tmp_supervisor_data: Path + coresys: CoreSys, install_app_ssh: App, tmp_supervisor_data: Path ): - """Test approve existing detached addon issues.""" - detached_addon_removed = CheckDetachedAddonRemoved(coresys) + """Test approve existing detached app issues.""" + detached_app_removed = CheckDetachedAppRemoved(coresys) await coresys.core.set_state(CoreState.SETUP) assert ( - await detached_addon_removed.approve_check(reference=install_addon_ssh.slug) + await detached_app_removed.approve_check(reference=install_app_ssh.slug) is False ) - (addons_dir := tmp_supervisor_data / "addons" / "local").mkdir() + (apps_dir := tmp_supervisor_data / "addons" / "local").mkdir() with patch.object( - CoreConfig, "path_addons_local", new=PropertyMock(return_value=addons_dir) + CoreConfig, "path_apps_local", new=PropertyMock(return_value=apps_dir) ): await coresys.store.load() assert ( - await detached_addon_removed.approve_check(reference=install_addon_ssh.slug) - is True + await detached_app_removed.approve_check(reference=install_app_ssh.slug) is True ) async def test_did_run(coresys: CoreSys): """Test that the check ran as expected.""" - detached_addon_removed = CheckDetachedAddonRemoved(coresys) - should_run = detached_addon_removed.states + detached_app_removed = CheckDetachedAppRemoved(coresys) + should_run = detached_app_removed.states should_not_run = [state for state in CoreState if state not in should_run] assert should_run == [CoreState.SETUP] assert len(should_not_run) != 0 - with patch.object( - CheckDetachedAddonRemoved, "run_check", return_value=None - ) as check: + with patch.object(CheckDetachedAppRemoved, "run_check", return_value=None) as check: for state in should_run: await coresys.core.set_state(state) - await detached_addon_removed() + await detached_app_removed() check.assert_called_once() check.reset_mock() for state in should_not_run: await coresys.core.set_state(state) - await detached_addon_removed() + await detached_app_removed() check.assert_not_called() check.reset_mock() diff --git a/tests/resolution/check/test_check_docker_config.py b/tests/resolution/check/test_check_docker_config.py index 709e4bcf850..e977d103900 100644 --- a/tests/resolution/check/test_check_docker_config.py +++ b/tests/resolution/check/test_check_docker_config.py @@ -5,7 +5,7 @@ from aiodocker.containers import DockerContainer import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.const import CoreState from supervisor.coresys import CoreSys from supervisor.docker.interface import DockerInterface @@ -83,7 +83,7 @@ async def test_base(coresys: CoreSys): @pytest.mark.parametrize("folder", ["media", "share"]) -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") async def test_check(docker: DockerAPI, coresys: CoreSys, folder: str): """Test check reports issue when containers have incorrect config.""" docker.containers.get = _make_mock_container_get( @@ -94,7 +94,7 @@ async def test_check(docker: DockerAPI, coresys: CoreSys, folder: str): with patch.object(DockerInterface, "is_running", return_value=True): await coresys.plugins.load() await coresys.homeassistant.load() - await coresys.addons.load() + await coresys.apps.load() docker_config = CheckDockerConfig(coresys) assert not coresys.resolution.issues @@ -146,7 +146,7 @@ async def test_check(docker: DockerAPI, coresys: CoreSys, folder: str): with patch.object(DockerInterface, "is_running", return_value=True): await coresys.plugins.load() await coresys.homeassistant.load() - await coresys.addons.load() + await coresys.apps.load() assert not await docker_config.approve_check() assert len(coresys.resolution.issues) == 1 @@ -157,13 +157,13 @@ async def test_check(docker: DockerAPI, coresys: CoreSys, folder: str): @pytest.mark.parametrize("folder", ["media", "share"]) -async def test_addon_volume_mount_not_flagged( - docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Addon, folder: str +async def test_app_volume_mount_not_flagged( + docker: DockerAPI, coresys: CoreSys, install_app_ssh: App, folder: str ): - """Test that add-on with VOLUME mount to media/share but not in config is not flagged.""" - # Create an add-on that doesn't have media/share in its mapping configuration - # Remove the mapping from the addon configuration - install_addon_ssh.data["map"] = [ + """Test that app with VOLUME mount to media/share but not in config is not flagged.""" + # Create an app that doesn't have media/share in its mapping configuration + # Remove the mapping from the app configuration + install_app_ssh.data["map"] = [ {"type": "config", "read_only": False}, {"type": "ssl", "read_only": True}, ] # No media/share @@ -177,22 +177,22 @@ async def test_addon_volume_mount_not_flagged( with patch.object(DockerInterface, "is_running", return_value=True): await coresys.plugins.load() await coresys.homeassistant.load() - await coresys.addons.load() + await coresys.apps.load() docker_config = CheckDockerConfig(coresys) assert not coresys.resolution.issues assert not coresys.resolution.suggestions - # Run check - should NOT create issue for add-on since mount wasn't requested + # Run check - should NOT create issue for app since mount wasn't requested await docker_config.run_check() - # Should not create addon issue for VOLUME mounts not in config - addon_issues = [ + # Should not create app issue for VOLUME mounts not in config + app_issues = [ issue for issue in coresys.resolution.issues if issue.context == ContextType.ADDON and issue.reference == "local_ssh" ] - assert len(addon_issues) == 0, ( + assert len(app_issues) == 0, ( "App should not be flagged for VOLUME mounts not in config" ) @@ -206,13 +206,13 @@ async def test_addon_volume_mount_not_flagged( @pytest.mark.parametrize("folder", ["media", "share"]) -@pytest.mark.usefixtures("install_addon_ssh") -async def test_addon_configured_mount_still_flagged( +@pytest.mark.usefixtures("install_app_ssh") +async def test_app_configured_mount_still_flagged( docker: DockerAPI, coresys: CoreSys, folder: str ): - """Test that add-on with configured media/share mount is still flagged when propagation wrong.""" + """Test that app with configured media/share mount is still flagged when propagation wrong.""" # Keep the original configuration which includes media/share - # SSH addon config already has media:rw and share:rw + # SSH app config already has media:rw and share:rw # Mock container that has supervisor-managed mount with wrong propagation mount = { @@ -243,34 +243,34 @@ async def mock_container_get(name): with patch.object(DockerInterface, "is_running", return_value=True): await coresys.plugins.load() await coresys.homeassistant.load() - await coresys.addons.load() + await coresys.apps.load() docker_config = CheckDockerConfig(coresys) assert not coresys.resolution.issues - # Run check - should create issue for add-on since mount was requested in config + # Run check - should create issue for app since mount was requested in config await docker_config.run_check() - # Should have addon issue since the mount was configured - addon_issues = [ + # Should have app issue since the mount was configured + app_issues = [ issue for issue in coresys.resolution.issues if issue.context == ContextType.ADDON and issue.reference == "local_ssh" ] - assert len(addon_issues) == 1, ( + assert len(app_issues) == 1, ( "App should be flagged for configured mounts with wrong propagation" ) @pytest.mark.parametrize("folder", ["media", "share"]) -async def test_addon_custom_target_path_flagged( - docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Addon, folder: str +async def test_app_custom_target_path_flagged( + docker: DockerAPI, coresys: CoreSys, install_app_ssh: App, folder: str ): - """Test that add-on with custom target path for media/share is properly checked.""" - # Configure add-on with custom target path + """Test that app with custom target path for media/share is properly checked.""" + # Configure app with custom target path custom_path = f"/custom/{folder}" mapping_type = "media" if folder == "media" else "share" - install_addon_ssh.data["map"] = [ + install_app_ssh.data["map"] = [ {"type": mapping_type, "read_only": False, "path": custom_path}, ] @@ -302,21 +302,21 @@ async def mock_container_get(name: str) -> MagicMock: with patch.object(DockerInterface, "is_running", return_value=True): await coresys.plugins.load() await coresys.homeassistant.load() - await coresys.addons.load() + await coresys.apps.load() docker_config = CheckDockerConfig(coresys) assert not coresys.resolution.issues - # Run check - should create issue for add-on with custom target path + # Run check - should create issue for app with custom target path await docker_config.run_check() - # Should have addon issue since the mount with custom path was configured - addon_issues = [ + # Should have app issue since the mount with custom path was configured + app_issues = [ issue for issue in coresys.resolution.issues if issue.context == ContextType.ADDON and issue.reference == "local_ssh" ] - assert len(addon_issues) == 1, ( + assert len(app_issues) == 1, ( "App should be flagged for configured mounts with custom paths and wrong propagation" ) diff --git a/tests/resolution/evaluation/test_restart_policy.py b/tests/resolution/evaluation/test_restart_policy.py index 9ba6fb40c65..da3062ed93c 100644 --- a/tests/resolution/evaluation/test_restart_policy.py +++ b/tests/resolution/evaluation/test_restart_policy.py @@ -5,7 +5,7 @@ from aiodocker.containers import DockerContainer from awesomeversion import AwesomeVersion -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.const import CoreState from supervisor.coresys import CoreSys from supervisor.resolution.evaluations.restart_policy import EvaluateRestartPolicy @@ -15,7 +15,7 @@ TEST_VERSION = AwesomeVersion("1.0.0") -async def test_evaluation(coresys: CoreSys, install_addon_ssh: Addon): +async def test_evaluation(coresys: CoreSys, install_app_ssh: App): """Test evaluation.""" restart_policy = EvaluateRestartPolicy(coresys) await coresys.core.set_state(CoreState.RUNNING) @@ -26,30 +26,30 @@ async def test_evaluation(coresys: CoreSys, install_addon_ssh: Addon): no_restart_attrs = load_json_fixture("container_attrs.json") always_restart_attrs = load_json_fixture("container_attrs.json") always_restart_attrs["HostConfig"]["RestartPolicy"]["Name"] = "always" - addon_attrs = no_restart_attrs + app_attrs = no_restart_attrs observer_attrs = always_restart_attrs async def get_container(name: str) -> DockerContainer: meta = MagicMock(spec=DockerContainer) meta.show.return_value = ( - observer_attrs if name == "hassio_observer" else addon_attrs + observer_attrs if name == "hassio_observer" else app_attrs ) return meta coresys.docker.containers.get = get_container await coresys.plugins.observer.instance.attach(TEST_VERSION) - await install_addon_ssh.instance.attach(TEST_VERSION) + await install_app_ssh.instance.attach(TEST_VERSION) await restart_policy() assert restart_policy.reason not in coresys.resolution.unsupported - addon_attrs = always_restart_attrs - await install_addon_ssh.instance.attach(TEST_VERSION) + app_attrs = always_restart_attrs + await install_app_ssh.instance.attach(TEST_VERSION) await restart_policy() assert restart_policy.reason in coresys.resolution.unsupported - addon_attrs = no_restart_attrs - await install_addon_ssh.instance.attach(TEST_VERSION) + app_attrs = no_restart_attrs + await install_app_ssh.instance.attach(TEST_VERSION) await restart_policy() assert restart_policy.reason not in coresys.resolution.unsupported diff --git a/tests/resolution/fixup/test_addon_disable_boot.py b/tests/resolution/fixup/test_addon_disable_boot.py index e89cc9068c1..7192c5c4e43 100644 --- a/tests/resolution/fixup/test_addon_disable_boot.py +++ b/tests/resolution/fixup/test_addon_disable_boot.py @@ -1,40 +1,40 @@ -"""Test fixup addon disable boot.""" +"""Test fixup app disable boot.""" -from supervisor.addons.addon import Addon -from supervisor.const import AddonBoot +from supervisor.addons.addon import App +from supervisor.const import AppBoot from supervisor.coresys import CoreSys from supervisor.resolution.const import SuggestionType -from supervisor.resolution.fixups.addon_disable_boot import FixupAddonDisableBoot +from supervisor.resolution.fixups.addon_disable_boot import FixupAppDisableBoot from tests.addons.test_manager import BOOT_FAIL_ISSUE -async def test_fixup(coresys: CoreSys, install_addon_ssh: Addon): +async def test_fixup(coresys: CoreSys, install_app_ssh: App): """Test fixup disables boot.""" - install_addon_ssh.boot = AddonBoot.AUTO - addon_disable_boot = FixupAddonDisableBoot(coresys) - assert addon_disable_boot.auto is False + install_app_ssh.boot = AppBoot.AUTO + app_disable_boot = FixupAppDisableBoot(coresys) + assert app_disable_boot.auto is False coresys.resolution.add_issue( BOOT_FAIL_ISSUE, suggestions=[SuggestionType.DISABLE_BOOT], ) - await addon_disable_boot() + await app_disable_boot() - assert install_addon_ssh.boot == AddonBoot.MANUAL + assert install_app_ssh.boot == AppBoot.MANUAL assert not coresys.resolution.issues assert not coresys.resolution.suggestions -async def test_fixup_no_addon(coresys: CoreSys): - """Test fixup dismisses if addon is missing.""" - addon_disable_boot = FixupAddonDisableBoot(coresys) +async def test_fixup_no_app(coresys: CoreSys): + """Test fixup dismisses if app is missing.""" + app_disable_boot = FixupAppDisableBoot(coresys) coresys.resolution.add_issue( BOOT_FAIL_ISSUE, suggestions=[SuggestionType.DISABLE_BOOT], ) - await addon_disable_boot() + await app_disable_boot() assert not coresys.resolution.issues assert not coresys.resolution.suggestions diff --git a/tests/resolution/fixup/test_addon_execute_rebuild.py b/tests/resolution/fixup/test_addon_execute_rebuild.py index 491664d5beb..d4401bc54df 100644 --- a/tests/resolution/fixup/test_addon_execute_rebuild.py +++ b/tests/resolution/fixup/test_addon_execute_rebuild.py @@ -9,12 +9,12 @@ from aiodocker.containers import DockerContainer import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.coresys import CoreSys from supervisor.docker.interface import DockerInterface from supervisor.docker.manager import DockerAPI from supervisor.resolution.const import ContextType, IssueType, SuggestionType -from supervisor.resolution.fixups.addon_execute_rebuild import FixupAddonExecuteRebuild +from supervisor.resolution.fixups.addon_execute_rebuild import FixupAppExecuteRebuild def make_mock_container_get( @@ -31,14 +31,14 @@ async def mock_container_get(name) -> DockerContainer: return mock_container_get -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") async def test_fixup(docker: DockerAPI, coresys: CoreSys): - """Test fixup rebuilds addon's container.""" + """Test fixup rebuilds app's container.""" docker.containers.get = make_mock_container_get("running") - addon_execute_rebuild = FixupAddonExecuteRebuild(coresys) + app_execute_rebuild = FixupAppExecuteRebuild(coresys) - assert addon_execute_rebuild.auto is False + assert app_execute_rebuild.auto is False coresys.resolution.create_issue( IssueType.DOCKER_CONFIG, @@ -46,22 +46,22 @@ async def test_fixup(docker: DockerAPI, coresys: CoreSys): reference="local_ssh", suggestions=[SuggestionType.EXECUTE_REBUILD], ) - with patch.object(Addon, "restart", return_value=asyncio.sleep(0)) as restart: - await addon_execute_rebuild() + with patch.object(App, "restart", return_value=asyncio.sleep(0)) as restart: + await app_execute_rebuild() restart.assert_called_once() assert not coresys.resolution.issues assert not coresys.resolution.suggestions -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") async def test_fixup_stopped_core( docker: DockerAPI, coresys: CoreSys, caplog: pytest.LogCaptureFixture ): - """Test fixup just removes addon's container when it is stopped.""" + """Test fixup just removes app's container when it is stopped.""" caplog.clear() docker.containers.get = make_mock_container_get("stopped") - addon_execute_rebuild = FixupAddonExecuteRebuild(coresys) + app_execute_rebuild = FixupAppExecuteRebuild(coresys) coresys.resolution.create_issue( IssueType.DOCKER_CONFIG, @@ -69,8 +69,8 @@ async def test_fixup_stopped_core( reference="local_ssh", suggestions=[SuggestionType.EXECUTE_REBUILD], ) - with patch.object(Addon, "restart") as restart: - await addon_execute_rebuild() + with patch.object(App, "restart") as restart: + await app_execute_rebuild() restart.assert_not_called() assert not coresys.resolution.issues @@ -81,16 +81,16 @@ async def test_fixup_stopped_core( assert "App local_ssh is stopped" in caplog.text -@pytest.mark.usefixtures("install_addon_ssh") +@pytest.mark.usefixtures("install_app_ssh") async def test_fixup_unknown_core( docker: DockerAPI, coresys: CoreSys, caplog: pytest.LogCaptureFixture ): - """Test fixup does nothing if addon's container has already been removed.""" + """Test fixup does nothing if app's container has already been removed.""" caplog.clear() docker.containers.get.side_effect = aiodocker.DockerError( 404, {"message": "missing"} ) - addon_execute_rebuild = FixupAddonExecuteRebuild(coresys) + app_execute_rebuild = FixupAppExecuteRebuild(coresys) coresys.resolution.create_issue( IssueType.DOCKER_CONFIG, @@ -99,10 +99,10 @@ async def test_fixup_unknown_core( suggestions=[SuggestionType.EXECUTE_REBUILD], ) with ( - patch.object(Addon, "restart") as restart, + patch.object(App, "restart") as restart, patch.object(DockerInterface, "stop") as stop, ): - await addon_execute_rebuild() + await app_execute_rebuild() restart.assert_not_called() stop.assert_not_called() @@ -111,10 +111,10 @@ async def test_fixup_unknown_core( assert "Container for app local_ssh does not exist" in caplog.text -async def test_fixup_addon_removed(coresys: CoreSys, caplog: pytest.LogCaptureFixture): - """Test fixup does nothing if addon has been removed.""" +async def test_fixup_app_removed(coresys: CoreSys, caplog: pytest.LogCaptureFixture): + """Test fixup does nothing if app has been removed.""" caplog.clear() - addon_execute_rebuild = FixupAddonExecuteRebuild(coresys) + app_execute_rebuild = FixupAppExecuteRebuild(coresys) coresys.resolution.create_issue( IssueType.DOCKER_CONFIG, @@ -122,5 +122,5 @@ async def test_fixup_addon_removed(coresys: CoreSys, caplog: pytest.LogCaptureFi reference="local_ssh", suggestions=[SuggestionType.EXECUTE_REBUILD], ) - await addon_execute_rebuild() + await app_execute_rebuild() assert "Cannot rebuild app local_ssh as it is not installed" in caplog.text diff --git a/tests/resolution/fixup/test_addon_execute_remove.py b/tests/resolution/fixup/test_addon_execute_remove.py index 869c2a1fa33..52c32699059 100644 --- a/tests/resolution/fixup/test_addon_execute_remove.py +++ b/tests/resolution/fixup/test_addon_execute_remove.py @@ -2,36 +2,36 @@ from unittest.mock import patch -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.coresys import CoreSys from supervisor.resolution.const import ContextType, IssueType, SuggestionType from supervisor.resolution.data import Issue, Suggestion -from supervisor.resolution.fixups.addon_execute_remove import FixupAddonExecuteRemove +from supervisor.resolution.fixups.addon_execute_remove import FixupAppExecuteRemove -async def test_fixup(coresys: CoreSys, install_addon_ssh: Addon): +async def test_fixup(coresys: CoreSys, install_app_ssh: App): """Test fixup.""" - addon_execute_remove = FixupAddonExecuteRemove(coresys) + app_execute_remove = FixupAppExecuteRemove(coresys) - assert addon_execute_remove.auto is False + assert app_execute_remove.auto is False coresys.resolution.add_suggestion( Suggestion( SuggestionType.EXECUTE_REMOVE, ContextType.ADDON, - reference=install_addon_ssh.slug, + reference=install_app_ssh.slug, ) ) coresys.resolution.add_issue( Issue( IssueType.DETACHED_ADDON_REMOVED, ContextType.ADDON, - reference=install_addon_ssh.slug, + reference=install_app_ssh.slug, ) ) - with patch.object(Addon, "uninstall") as uninstall: - await addon_execute_remove() + with patch.object(App, "uninstall") as uninstall: + await app_execute_remove() assert uninstall.called @@ -39,27 +39,27 @@ async def test_fixup(coresys: CoreSys, install_addon_ssh: Addon): assert len(coresys.resolution.issues) == 0 -async def test_fixup_deprecated_arch_addon(coresys: CoreSys, install_addon_ssh: Addon): - """Test fixup for deprecated arch add-on issue.""" - addon_execute_remove = FixupAddonExecuteRemove(coresys) +async def test_fixup_deprecated_arch_app(coresys: CoreSys, install_app_ssh: App): + """Test fixup for deprecated arch app issue.""" + app_execute_remove = FixupAppExecuteRemove(coresys) coresys.resolution.add_suggestion( Suggestion( SuggestionType.EXECUTE_REMOVE, ContextType.ADDON, - reference=install_addon_ssh.slug, + reference=install_app_ssh.slug, ) ) coresys.resolution.add_issue( Issue( IssueType.DEPRECATED_ARCH_ADDON, ContextType.ADDON, - reference=install_addon_ssh.slug, + reference=install_app_ssh.slug, ) ) - with patch.object(Addon, "uninstall") as uninstall: - await addon_execute_remove() + with patch.object(App, "uninstall") as uninstall: + await app_execute_remove() assert uninstall.called diff --git a/tests/resolution/fixup/test_addon_execute_repair.py b/tests/resolution/fixup/test_addon_execute_repair.py index 7946d2234bd..b000de2b7b0 100644 --- a/tests/resolution/fixup/test_addon_execute_repair.py +++ b/tests/resolution/fixup/test_addon_execute_repair.py @@ -1,4 +1,4 @@ -"""Test fixup addon execute repair.""" +"""Test fixup app execute repair.""" from http import HTTPStatus from unittest.mock import patch @@ -6,25 +6,25 @@ import aiodocker import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.coresys import CoreSys -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.docker.interface import DockerInterface from supervisor.docker.manager import DockerAPI from supervisor.exceptions import DockerError from supervisor.resolution.const import ContextType, IssueType, SuggestionType -from supervisor.resolution.fixups.addon_execute_repair import FixupAddonExecuteRepair +from supervisor.resolution.fixups.addon_execute_repair import FixupAppExecuteRepair -async def test_fixup(docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Addon): - """Test fixup rebuilds addon's container.""" +async def test_fixup(docker: DockerAPI, coresys: CoreSys, install_app_ssh: App): + """Test fixup rebuilds app's container.""" docker.images.inspect.side_effect = aiodocker.DockerError( HTTPStatus.NOT_FOUND, {"message": "missing"} ) - install_addon_ssh.data["image"] = "test_image" + install_app_ssh.data["image"] = "test_image" - addon_execute_repair = FixupAddonExecuteRepair(coresys) - assert addon_execute_repair.auto is True + app_execute_repair = FixupAppExecuteRepair(coresys) + assert app_execute_repair.auto is True coresys.resolution.create_issue( IssueType.MISSING_IMAGE, @@ -33,7 +33,7 @@ async def test_fixup(docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Add suggestions=[SuggestionType.EXECUTE_REPAIR], ) with patch.object(DockerInterface, "install") as install: - await addon_execute_repair() + await app_execute_repair() install.assert_called_once() assert not coresys.resolution.issues @@ -41,15 +41,15 @@ async def test_fixup(docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Add async def test_fixup_max_auto_attempts( - docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Addon + docker: DockerAPI, coresys: CoreSys, install_app_ssh: App ): """Test fixup stops being auto-applied after 5 failures.""" docker.images.inspect.side_effect = aiodocker.DockerError( HTTPStatus.NOT_FOUND, {"message": "missing"} ) - install_addon_ssh.data["image"] = "test_image" + install_app_ssh.data["image"] = "test_image" - addon_execute_repair = FixupAddonExecuteRepair(coresys) + app_execute_repair = FixupAppExecuteRepair(coresys) coresys.resolution.create_issue( IssueType.MISSING_IMAGE, @@ -59,17 +59,17 @@ async def test_fixup_max_auto_attempts( ) with patch.object(DockerInterface, "install", side_effect=DockerError): for _ in range(5): - assert addon_execute_repair.auto is True + assert app_execute_repair.auto is True with pytest.raises(DockerError): - await addon_execute_repair() + await app_execute_repair() - assert addon_execute_repair.auto is False + assert app_execute_repair.auto is False -async def test_fixup_no_addon(coresys: CoreSys): - """Test fixup dismisses if addon is missing.""" - addon_execute_repair = FixupAddonExecuteRepair(coresys) - assert addon_execute_repair.auto is True +async def test_fixup_no_app(coresys: CoreSys): + """Test fixup dismisses if app is missing.""" + app_execute_repair = FixupAppExecuteRepair(coresys) + assert app_execute_repair.auto is True coresys.resolution.create_issue( IssueType.MISSING_IMAGE, @@ -78,17 +78,17 @@ async def test_fixup_no_addon(coresys: CoreSys): suggestions=[SuggestionType.EXECUTE_REPAIR], ) - with patch.object(DockerAddon, "install") as install: - await addon_execute_repair() + with patch.object(DockerApp, "install") as install: + await app_execute_repair() install.assert_not_called() async def test_fixup_image_exists( - docker: DockerAPI, coresys: CoreSys, install_addon_ssh: Addon + docker: DockerAPI, coresys: CoreSys, install_app_ssh: App ): """Test fixup dismisses if image exists.""" - addon_execute_repair = FixupAddonExecuteRepair(coresys) - assert addon_execute_repair.auto is True + app_execute_repair = FixupAppExecuteRepair(coresys) + assert app_execute_repair.auto is True coresys.resolution.create_issue( IssueType.MISSING_IMAGE, @@ -97,6 +97,6 @@ async def test_fixup_image_exists( suggestions=[SuggestionType.EXECUTE_REPAIR], ) - with patch.object(DockerAddon, "install") as install: - await addon_execute_repair() + with patch.object(DockerApp, "install") as install: + await app_execute_repair() install.assert_not_called() diff --git a/tests/resolution/fixup/test_addon_execute_restart.py b/tests/resolution/fixup/test_addon_execute_restart.py index bfec0228405..057b06a6e6b 100644 --- a/tests/resolution/fixup/test_addon_execute_restart.py +++ b/tests/resolution/fixup/test_addon_execute_restart.py @@ -1,18 +1,18 @@ -"""Test fixup addon execute restart.""" +"""Test fixup app execute restart.""" from unittest.mock import patch import pytest -from supervisor.addons.addon import Addon -from supervisor.const import AddonState +from supervisor.addons.addon import App +from supervisor.const import AppState from supervisor.coresys import CoreSys -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.docker.interface import DockerInterface from supervisor.exceptions import DockerError from supervisor.resolution.const import ContextType, IssueType, SuggestionType from supervisor.resolution.data import Issue, Suggestion -from supervisor.resolution.fixups.addon_execute_restart import FixupAddonExecuteRestart +from supervisor.resolution.fixups.addon_execute_restart import FixupAppExecuteRestart from tests.const import TEST_ADDON_SLUG @@ -27,14 +27,14 @@ @pytest.mark.usefixtures("path_extern") -async def test_fixup(coresys: CoreSys, install_addon_ssh: Addon): - """Test fixup restarts addon.""" - install_addon_ssh.state = AddonState.STARTED - addon_execute_restart = FixupAddonExecuteRestart(coresys) - assert addon_execute_restart.auto is False +async def test_fixup(coresys: CoreSys, install_app_ssh: App): + """Test fixup restarts app.""" + install_app_ssh.state = AppState.STARTED + app_execute_restart = FixupAppExecuteRestart(coresys) + assert app_execute_restart.auto is False async def mock_stop(*args, **kwargs): - install_addon_ssh.state = AddonState.STOPPED + install_app_ssh.state = AppState.STOPPED coresys.resolution.add_issue( DEVICE_ACCESS_MISSING_ISSUE, @@ -42,11 +42,11 @@ async def mock_stop(*args, **kwargs): ) with ( patch.object(DockerInterface, "stop") as stop, - patch.object(DockerAddon, "run") as run, - patch.object(Addon, "_wait_for_startup"), - patch.object(Addon, "write_options"), + patch.object(DockerApp, "run") as run, + patch.object(App, "_wait_for_startup"), + patch.object(App, "write_options"), ): - await addon_execute_restart() + await app_execute_restart() stop.assert_called_once() run.assert_called_once() @@ -56,11 +56,11 @@ async def mock_stop(*args, **kwargs): @pytest.mark.usefixtures("path_extern") async def test_fixup_stop_error( - coresys: CoreSys, install_addon_ssh: Addon, caplog: pytest.LogCaptureFixture + coresys: CoreSys, install_app_ssh: App, caplog: pytest.LogCaptureFixture ): - """Test fixup fails on stop addon failure.""" - install_addon_ssh.state = AddonState.STARTED - addon_execute_start = FixupAddonExecuteRestart(coresys) + """Test fixup fails on stop app failure.""" + install_app_ssh.state = AppState.STARTED + app_execute_start = FixupAppExecuteRestart(coresys) coresys.resolution.add_issue( DEVICE_ACCESS_MISSING_ISSUE, @@ -68,9 +68,9 @@ async def test_fixup_stop_error( ) with ( patch.object(DockerInterface, "stop", side_effect=DockerError), - patch.object(DockerAddon, "run") as run, + patch.object(DockerApp, "run") as run, ): - await addon_execute_start() + await app_execute_start() run.assert_not_called() assert DEVICE_ACCESS_MISSING_ISSUE in coresys.resolution.issues @@ -80,11 +80,11 @@ async def test_fixup_stop_error( @pytest.mark.usefixtures("path_extern") async def test_fixup_start_error( - coresys: CoreSys, install_addon_ssh: Addon, caplog: pytest.LogCaptureFixture + coresys: CoreSys, install_app_ssh: App, caplog: pytest.LogCaptureFixture ): - """Test fixup logs a start addon failure.""" - install_addon_ssh.state = AddonState.STARTED - addon_execute_start = FixupAddonExecuteRestart(coresys) + """Test fixup logs a start app failure.""" + install_app_ssh.state = AppState.STARTED + app_execute_start = FixupAppExecuteRestart(coresys) coresys.resolution.add_issue( DEVICE_ACCESS_MISSING_ISSUE, @@ -92,10 +92,10 @@ async def test_fixup_start_error( ) with ( patch.object(DockerInterface, "stop") as stop, - patch.object(DockerAddon, "run", side_effect=DockerError), - patch.object(Addon, "write_options"), + patch.object(DockerApp, "run", side_effect=DockerError), + patch.object(App, "write_options"), ): - await addon_execute_start() + await app_execute_start() stop.assert_called_once() assert DEVICE_ACCESS_MISSING_ISSUE not in coresys.resolution.issues @@ -103,16 +103,16 @@ async def test_fixup_start_error( assert "Could not restart local_ssh" in caplog.text -async def test_fixup_no_addon(coresys: CoreSys, caplog: pytest.LogCaptureFixture): - """Test fixup dismisses if addon is missing.""" - addon_execute_start = FixupAddonExecuteRestart(coresys) +async def test_fixup_no_app(coresys: CoreSys, caplog: pytest.LogCaptureFixture): + """Test fixup dismisses if app is missing.""" + app_execute_start = FixupAppExecuteRestart(coresys) coresys.resolution.add_issue( DEVICE_ACCESS_MISSING_ISSUE, suggestions=[SuggestionType.EXECUTE_RESTART], ) - with patch.object(DockerAddon, "stop") as stop: - await addon_execute_start() + with patch.object(DockerApp, "stop") as stop: + await app_execute_start() stop.assert_not_called() assert not coresys.resolution.issues diff --git a/tests/resolution/fixup/test_addon_execute_start.py b/tests/resolution/fixup/test_addon_execute_start.py index 26c876cd959..7531255b5ec 100644 --- a/tests/resolution/fixup/test_addon_execute_start.py +++ b/tests/resolution/fixup/test_addon_execute_start.py @@ -1,17 +1,17 @@ -"""Test fixup addon execute start.""" +"""Test fixup app execute start.""" from unittest.mock import patch import pytest -from supervisor.addons.addon import Addon -from supervisor.const import AddonState +from supervisor.addons.addon import App +from supervisor.const import AppState from supervisor.coresys import CoreSys -from supervisor.docker.addon import DockerAddon +from supervisor.docker.addon import DockerApp from supervisor.exceptions import DockerError from supervisor.resolution.const import ContextType, SuggestionType from supervisor.resolution.data import Suggestion -from supervisor.resolution.fixups.addon_execute_start import FixupAddonExecuteStart +from supervisor.resolution.fixups.addon_execute_start import FixupAppExecuteStart from tests.addons.test_manager import BOOT_FAIL_ISSUE @@ -21,28 +21,28 @@ @pytest.mark.parametrize( - "state", [AddonState.STARTED, AddonState.STARTUP, AddonState.STOPPED] + "state", [AppState.STARTED, AppState.STARTUP, AppState.STOPPED] ) @pytest.mark.usefixtures("path_extern") -async def test_fixup(coresys: CoreSys, install_addon_ssh: Addon, state: AddonState): - """Test fixup starts addon.""" - install_addon_ssh.state = AddonState.UNKNOWN - addon_execute_start = FixupAddonExecuteStart(coresys) - assert addon_execute_start.auto is False +async def test_fixup(coresys: CoreSys, install_app_ssh: App, state: AppState): + """Test fixup starts app.""" + install_app_ssh.state = AppState.UNKNOWN + app_execute_start = FixupAppExecuteStart(coresys) + assert app_execute_start.auto is False async def mock_start(*args, **kwargs): - install_addon_ssh.state = state + install_app_ssh.state = state coresys.resolution.add_issue( BOOT_FAIL_ISSUE, suggestions=[SuggestionType.EXECUTE_START], ) with ( - patch.object(DockerAddon, "run") as run, - patch.object(Addon, "_wait_for_startup", new=mock_start), - patch.object(Addon, "write_options"), + patch.object(DockerApp, "run") as run, + patch.object(App, "_wait_for_startup", new=mock_start), + patch.object(App, "write_options"), ): - await addon_execute_start() + await app_execute_start() run.assert_called_once() assert not coresys.resolution.issues @@ -50,67 +50,67 @@ async def mock_start(*args, **kwargs): @pytest.mark.usefixtures("path_extern") -async def test_fixup_start_error(coresys: CoreSys, install_addon_ssh: Addon): - """Test fixup fails on start addon failure.""" - install_addon_ssh.state = AddonState.UNKNOWN - addon_execute_start = FixupAddonExecuteStart(coresys) +async def test_fixup_start_error(coresys: CoreSys, install_app_ssh: App): + """Test fixup fails on start app failure.""" + install_app_ssh.state = AppState.UNKNOWN + app_execute_start = FixupAppExecuteStart(coresys) coresys.resolution.add_issue( BOOT_FAIL_ISSUE, suggestions=[SuggestionType.EXECUTE_START], ) with ( - patch.object(DockerAddon, "run", side_effect=DockerError) as run, - patch.object(Addon, "write_options"), + patch.object(DockerApp, "run", side_effect=DockerError) as run, + patch.object(App, "write_options"), ): - await addon_execute_start() + await app_execute_start() run.assert_called_once() assert BOOT_FAIL_ISSUE in coresys.resolution.issues assert EXECUTE_START_SUGGESTION in coresys.resolution.suggestions -@pytest.mark.parametrize("state", [AddonState.ERROR, AddonState.UNKNOWN]) +@pytest.mark.parametrize("state", [AppState.ERROR, AppState.UNKNOWN]) @pytest.mark.usefixtures("path_extern") async def test_fixup_wait_start_failure( - coresys: CoreSys, install_addon_ssh: Addon, state: AddonState + coresys: CoreSys, install_app_ssh: App, state: AppState ): - """Test fixup fails if addon does not complete startup.""" - install_addon_ssh.state = AddonState.UNKNOWN - addon_execute_start = FixupAddonExecuteStart(coresys) + """Test fixup fails if app does not complete startup.""" + install_app_ssh.state = AppState.UNKNOWN + app_execute_start = FixupAppExecuteStart(coresys) async def mock_start(*args, **kwargs): - install_addon_ssh.state = state + install_app_ssh.state = state coresys.resolution.add_issue( BOOT_FAIL_ISSUE, suggestions=[SuggestionType.EXECUTE_START], ) with ( - patch.object(DockerAddon, "run") as run, - patch.object(Addon, "_wait_for_startup", new=mock_start), - patch.object(Addon, "write_options"), + patch.object(DockerApp, "run") as run, + patch.object(App, "_wait_for_startup", new=mock_start), + patch.object(App, "write_options"), ): - await addon_execute_start() + await app_execute_start() run.assert_called_once() assert BOOT_FAIL_ISSUE in coresys.resolution.issues assert EXECUTE_START_SUGGESTION in coresys.resolution.suggestions -async def test_fixup_no_addon(coresys: CoreSys): - """Test fixup dismisses if addon is missing.""" - addon_execute_start = FixupAddonExecuteStart(coresys) +async def test_fixup_no_app(coresys: CoreSys): + """Test fixup dismisses if app is missing.""" + app_execute_start = FixupAppExecuteStart(coresys) coresys.resolution.add_issue( BOOT_FAIL_ISSUE, suggestions=[SuggestionType.EXECUTE_START], ) with ( - patch.object(DockerAddon, "run") as run, - patch.object(Addon, "write_options"), + patch.object(DockerApp, "run") as run, + patch.object(App, "write_options"), ): - await addon_execute_start() + await app_execute_start() run.assert_not_called() assert not coresys.resolution.issues diff --git a/tests/resolution/fixup/test_store_execute_reset.py b/tests/resolution/fixup/test_store_execute_reset.py index d2eafa70970..84541cf70e7 100644 --- a/tests/resolution/fixup/test_store_execute_reset.py +++ b/tests/resolution/fixup/test_store_execute_reset.py @@ -24,11 +24,11 @@ @pytest.fixture(name="mock_addons_git", autouse=True) -async def fixture_mock_addons_git(tmp_supervisor_data: Path) -> None: - """Mock addons git path.""" +async def fixture_mock_apps_git(tmp_supervisor_data: Path) -> None: + """Mock apps git path.""" with patch.object( CoreConfig, - "path_addons_git", + "path_apps_git", new=PropertyMock(return_value=tmp_supervisor_data / "addons" / "git"), ): yield @@ -50,7 +50,7 @@ def add_store_reset_suggestion(coresys: CoreSys) -> None: async def test_fixup(coresys: CoreSys): """Test fixup.""" store_execute_reset = FixupStoreExecuteReset(coresys) - test_repo = coresys.config.path_addons_git / "94cfad5a" + test_repo = coresys.config.path_apps_git / "94cfad5a" assert store_execute_reset.auto @@ -89,7 +89,7 @@ async def mock_clone(obj: GitRepo, path: Path | None = None): async def test_fixup_clone_fail(coresys: CoreSys): """Test fixup does not delete cache when clone fails.""" store_execute_reset = FixupStoreExecuteReset(coresys) - test_repo = coresys.config.path_addons_git / "94cfad5a" + test_repo = coresys.config.path_apps_git / "94cfad5a" add_store_reset_suggestion(coresys) test_repo.mkdir(parents=True) @@ -125,7 +125,7 @@ async def test_fixup_move_fail(coresys: CoreSys, error_num: int, unhealthy: bool It will leave the user in a bind without the git cache but at least we try to clean up tmp. """ store_execute_reset = FixupStoreExecuteReset(coresys) - test_repo = coresys.config.path_addons_git / "94cfad5a" + test_repo = coresys.config.path_apps_git / "94cfad5a" add_store_reset_suggestion(coresys) test_repo.mkdir(parents=True) diff --git a/tests/resolution/fixup/test_system_execute_rebuild.py b/tests/resolution/fixup/test_system_execute_rebuild.py index c9da5c2272d..2f318b5bd1b 100644 --- a/tests/resolution/fixup/test_system_execute_rebuild.py +++ b/tests/resolution/fixup/test_system_execute_rebuild.py @@ -4,7 +4,7 @@ from supervisor.coresys import CoreSys from supervisor.resolution.const import ContextType, IssueType, SuggestionType -from supervisor.resolution.fixups.addon_execute_rebuild import FixupAddonExecuteRebuild +from supervisor.resolution.fixups.addon_execute_rebuild import FixupAppExecuteRebuild from supervisor.resolution.fixups.core_execute_rebuild import FixupCoreExecuteRebuild from supervisor.resolution.fixups.plugin_execute_rebuild import ( FixupPluginExecuteRebuild, @@ -43,12 +43,12 @@ async def test_fixup(coresys: CoreSys): suggestions=[SuggestionType.EXECUTE_REBUILD], ) with ( - patch.object(FixupAddonExecuteRebuild, "process_fixup") as addon_fixup, + patch.object(FixupAppExecuteRebuild, "process_fixup") as app_fixup, patch.object(FixupCoreExecuteRebuild, "process_fixup") as core_fixup, patch.object(FixupPluginExecuteRebuild, "process_fixup") as plugin_fixup, ): await system_execute_rebuild() - addon_fixup.assert_called_once_with(reference="local_ssh") + app_fixup.assert_called_once_with(reference="local_ssh") core_fixup.assert_called_once() plugin_fixup.assert_called_once_with(reference="audio") diff --git a/tests/store/test_builtin_stores.py b/tests/store/test_builtin_stores.py index ae56b60631a..74d140c8786 100644 --- a/tests/store/test_builtin_stores.py +++ b/tests/store/test_builtin_stores.py @@ -7,11 +7,11 @@ def test_local_store(coresys: CoreSys, test_repository) -> None: """Test loading from local store.""" assert coresys.store.get("local") - assert "local_ssh" in coresys.addons.store + assert "local_ssh" in coresys.apps.store def test_core_store(coresys: CoreSys, test_repository) -> None: """Test loading from core store.""" assert coresys.store.get("core") - assert "core_samba" in coresys.addons.store + assert "core_samba" in coresys.apps.store diff --git a/tests/store/test_custom_repository.py b/tests/store/test_custom_repository.py index 928b15b79f0..6bef84c6d54 100644 --- a/tests/store/test_custom_repository.py +++ b/tests/store/test_custom_repository.py @@ -5,7 +5,7 @@ import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.coresys import CoreSys from supervisor.exceptions import ( StoreError, @@ -16,7 +16,7 @@ ) from supervisor.resolution.const import SuggestionType from supervisor.store import StoreManager -from supervisor.store.addon import AddonStore +from supervisor.store.addon import AppStore from supervisor.store.const import BuiltinRepository from supervisor.store.repository import Repository @@ -226,7 +226,7 @@ async def test_remove_repository( await store_manager.remove_repository(test_repository) assert test_repository.source not in coresys.store.repository_urls - assert test_repository.slug not in coresys.addons.store + assert test_repository.slug not in coresys.apps.store assert test_repository.slug not in coresys.store.repositories @@ -234,15 +234,15 @@ async def test_remove_repository( async def test_remove_used_repository( coresys: CoreSys, store_manager: StoreManager, - store_addon: AddonStore, + store_app: AppStore, use_update: bool, ): """Test removing used custom repository.""" - await coresys.addons.data.install(store_addon) - addon = Addon(coresys, store_addon.slug) - coresys.addons.local[addon.slug] = addon + await coresys.apps.data.install(store_app) + app = App(coresys, store_app.slug) + coresys.apps.local[app.slug] = app - assert store_addon.repository in coresys.store.repositories + assert store_app.repository in coresys.store.repositories with pytest.raises( StoreError, @@ -252,7 +252,7 @@ async def test_remove_used_repository( await store_manager.update_repositories(set()) else: await store_manager.remove_repository( - coresys.store.repositories[store_addon.repository] + coresys.store.repositories[store_app.repository] ) diff --git a/tests/store/test_reading_addons.py b/tests/store/test_reading_addons.py index 4236d55eb40..cdfda4840e2 100644 --- a/tests/store/test_reading_addons.py +++ b/tests/store/test_reading_addons.py @@ -1,4 +1,4 @@ -"""Test that we are reading add-on files correctly.""" +"""Test that we are reading app files correctly.""" import errno from pathlib import Path @@ -11,8 +11,8 @@ # pylint: disable=protected-access -async def test_read_addon_files(coresys: CoreSys): - """Test that we are reading add-on files correctly.""" +async def test_read_app_files(coresys: CoreSys): + """Test that we are reading app files correctly.""" with patch( "pathlib.Path.glob", return_value=[ @@ -25,20 +25,20 @@ async def test_read_addon_files(coresys: CoreSys): Path(".circleci/config.yml"), ], ): - addon_list = await coresys.store.data._find_addon_configs(Path("test"), {}) + app_list = await coresys.store.data._find_app_configs(Path("test"), {}) - assert len(addon_list) == 1 - assert str(addon_list[0]) == "addon/config.yml" + assert len(app_list) == 1 + assert str(app_list[0]) == "addon/config.yml" -async def test_reading_addon_files_error(coresys: CoreSys): - """Test error trying to read addon files.""" +async def test_reading_app_files_error(coresys: CoreSys): + """Test error trying to read app files.""" corrupt_repo = Issue(IssueType.CORRUPT_REPOSITORY, ContextType.STORE, "test") reset_repo = Suggestion(SuggestionType.EXECUTE_RESET, ContextType.STORE, "test") with patch("pathlib.Path.glob", side_effect=(err := OSError())): err.errno = errno.EBUSY - assert (await coresys.store.data._find_addon_configs(Path("test"), {})) is None + assert (await coresys.store.data._find_app_configs(Path("test"), {})) is None assert corrupt_repo in coresys.resolution.issues assert reset_repo in coresys.resolution.suggestions assert coresys.core.healthy is True @@ -47,7 +47,7 @@ async def test_reading_addon_files_error(coresys: CoreSys): coresys.resolution.get_issue_if_present(corrupt_repo) ) err.errno = errno.EBADMSG - assert (await coresys.store.data._find_addon_configs(Path("test"), {})) is None + assert (await coresys.store.data._find_app_configs(Path("test"), {})) is None assert corrupt_repo in coresys.resolution.issues assert reset_repo not in coresys.resolution.suggestions assert coresys.core.healthy is False diff --git a/tests/store/test_store_manager.py b/tests/store/test_store_manager.py index 95863e82840..f5684ae753c 100644 --- a/tests/store/test_store_manager.py +++ b/tests/store/test_store_manager.py @@ -8,14 +8,14 @@ from awesomeversion import AwesomeVersion import pytest -from supervisor.addons.addon import Addon +from supervisor.addons.addon import App from supervisor.arch import CpuArchManager from supervisor.backups.manager import BackupManager from supervisor.coresys import CoreSys -from supervisor.exceptions import AddonNotSupportedError, StoreJobError +from supervisor.exceptions import AppNotSupportedError, StoreJobError from supervisor.homeassistant.module import HomeAssistant from supervisor.store import StoreManager -from supervisor.store.addon import AddonStore +from supervisor.store.addon import AppStore from supervisor.store.git import GitRepo from supervisor.store.repository import Repository @@ -27,16 +27,16 @@ async def test_default_load(coresys: CoreSys): store_manager = await StoreManager(coresys).load_config() refresh_cache_calls: set[str] = set() - async def mock_refresh_cache(obj: AddonStore): + async def mock_refresh_cache(obj: AppStore): nonlocal refresh_cache_calls refresh_cache_calls.add(obj.slug) with ( patch("supervisor.store.repository.RepositoryGit.load", return_value=None), patch("supervisor.store.repository.RepositoryLocal.load", return_value=None), - patch.object(type(coresys.config), "addons_repositories", return_value=[]), + patch.object(type(coresys.config), "apps_repositories", return_value=[]), patch("pathlib.Path.exists", return_value=True), - patch.object(AddonStore, "refresh_path_cache", new=mock_refresh_cache), + patch.object(AppStore, "refresh_path_cache", new=mock_refresh_cache), ): await store_manager.load() @@ -56,7 +56,7 @@ async def mock_refresh_cache(obj: AddonStore): "https://github.com/music-assistant/home-assistant-addon" in store_manager.repository_urls ) - # NOTE: When adding new stores, make sure to add it to tests/fixtures/addons/git/ + # NOTE: When adding new stores, make sure to add it to tests/fixtures/apps/git/ assert refresh_cache_calls == { "local_ssh", "local_example", @@ -83,13 +83,13 @@ async def mock_refresh_cache(_): with ( patch("supervisor.store.repository.RepositoryGit.load", return_value=None), patch("supervisor.store.repository.RepositoryLocal.load", return_value=None), - patch.object(type(coresys.config), "addons_repositories", return_value=[]), + patch.object(type(coresys.config), "apps_repositories", return_value=[]), patch("supervisor.store.repository.RepositoryGit.validate", return_value=True), patch( "supervisor.store.repository.RepositoryLocal.validate", return_value=True ), patch("pathlib.Path.exists", return_value=True), - patch.object(AddonStore, "refresh_path_cache", new=mock_refresh_cache), + patch.object(AppStore, "refresh_path_cache", new=mock_refresh_cache), ): await store_manager.load() @@ -144,15 +144,15 @@ async def test_reload_fails_if_out_of_date(coresys: CoreSys): ), ], ) -async def test_update_unavailable_addon( +async def test_update_unavailable_app( coresys: CoreSys, - install_addon_ssh: Addon, + install_app_ssh: App, caplog: pytest.LogCaptureFixture, config: dict[str, Any], log: str, ): - """Test updating addon when new version not available for system.""" - addon_config = dict( + """Test updating app when new version not available for system.""" + app_config = dict( await coresys.run_in_executor( load_yaml_fixture, "addons/local/ssh/config.yaml" ), @@ -162,7 +162,7 @@ async def test_update_unavailable_addon( with ( patch.object(BackupManager, "do_backup_partial") as backup, - patch.object(AddonStore, "data", new=PropertyMock(return_value=addon_config)), + patch.object(AppStore, "data", new=PropertyMock(return_value=app_config)), patch.object( CpuArchManager, "supported", new=PropertyMock(return_value=["amd64"]) ), @@ -174,8 +174,8 @@ async def test_update_unavailable_addon( ), patch("shutil.disk_usage", return_value=(42, 42, (5120.0**3))), ): - with pytest.raises(AddonNotSupportedError): - await coresys.addons.update("local_ssh", backup=True) + with pytest.raises(AppNotSupportedError): + await coresys.apps.update("local_ssh", backup=True) backup.assert_not_called() @@ -203,15 +203,15 @@ async def test_update_unavailable_addon( ), ], ) -async def test_install_unavailable_addon( +async def test_install_unavailable_app( coresys: CoreSys, test_repository: Repository, caplog: pytest.LogCaptureFixture, config: dict[str, Any], log: str, ): - """Test updating addon when new version not available for system.""" - addon_config = dict( + """Test updating app when new version not available for system.""" + app_config = dict( await coresys.run_in_executor( load_yaml_fixture, "addons/local/ssh/config.yaml" ), @@ -220,7 +220,7 @@ async def test_install_unavailable_addon( ) with ( - patch.object(AddonStore, "data", new=PropertyMock(return_value=addon_config)), + patch.object(AppStore, "data", new=PropertyMock(return_value=app_config)), patch.object( CpuArchManager, "supported", new=PropertyMock(return_value=["amd64"]) ), @@ -231,9 +231,9 @@ async def test_install_unavailable_addon( new=PropertyMock(return_value=AwesomeVersion("2022.1.1")), ), patch("shutil.disk_usage", return_value=(42, 42, (5120.0**3))), - pytest.raises(AddonNotSupportedError), + pytest.raises(AppNotSupportedError), ): - await coresys.addons.install("local_ssh") + await coresys.apps.install("local_ssh") assert log in caplog.text @@ -250,17 +250,17 @@ async def test_reload(coresys: CoreSys, supervisor_internet): assert git_pull.call_count == 4 -async def test_addon_version_timestamp(coresys: CoreSys, install_addon_example: Addon): - """Test timestamp tracked for addon's version.""" +async def test_app_version_timestamp(coresys: CoreSys, install_app_example: App): + """Test timestamp tracked for app's version.""" # When unset, version timestamp set to utcnow on store load - assert (timestamp := install_addon_example.latest_version_timestamp) + assert (timestamp := install_app_example.latest_version_timestamp) # Reload of the store does not change timestamp unless version changes await coresys.store.reload() - assert timestamp == install_addon_example.latest_version_timestamp + assert timestamp == install_app_example.latest_version_timestamp # If a new version is seen processing repo, reset to utc now - install_addon_example.data_store["version"] = "1.1.0" + install_app_example.data_store["version"] = "1.1.0" with patch( "pathlib.Path.stat", @@ -269,4 +269,4 @@ async def test_addon_version_timestamp(coresys: CoreSys, install_addon_example: ), ): await coresys.store.reload() - assert timestamp < install_addon_example.latest_version_timestamp + assert timestamp < install_app_example.latest_version_timestamp diff --git a/tests/store/test_translation_load.py b/tests/store/test_translation_load.py index 1b7057e1eda..7716346ea86 100644 --- a/tests/store/test_translation_load.py +++ b/tests/store/test_translation_load.py @@ -7,7 +7,7 @@ import pytest from supervisor.coresys import CoreSys -from supervisor.store.data import _read_addon_translations +from supervisor.store.data import _read_app_translations from supervisor.utils.common import write_json_or_yaml_file @@ -15,7 +15,7 @@ def test_loading_traslations(coresys: CoreSys, tmp_path: Path): """Test loading add-translation.""" os.makedirs(tmp_path / "translations") # no transaltions - assert _read_addon_translations(tmp_path) == {} + assert _read_app_translations(tmp_path) == {} for file in ("en.json", "es.json"): write_json_or_yaml_file( @@ -41,7 +41,7 @@ def test_loading_traslations(coresys: CoreSys, tmp_path: Path): }, ) - translations = _read_addon_translations(tmp_path) + translations = _read_app_translations(tmp_path) assert translations["en"]["configuration"]["test"]["name"] == "test" assert translations["es"]["configuration"]["test"]["name"] == "test" @@ -78,7 +78,7 @@ def test_translation_file_failure( with fail_path.open("w") as de_file: de_file.write("not json") - translations = _read_addon_translations(tmp_path) + translations = _read_app_translations(tmp_path) assert translations["en"]["configuration"]["test"]["name"] == "test" assert f"Can't read translations from {fail_path.as_posix()}" in caplog.text diff --git a/tests/test_auth.py b/tests/test_auth.py index fe38bd0c25e..e0b1da0b610 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -30,11 +30,11 @@ def mock_api_state_fixture(coresys): async def test_auth_request_with_backend(coresys, mock_auth_backend, mock_api_state): """Make simple auth request.""" - addon = MagicMock() + app = MagicMock() mock_auth_backend.return_value = True mock_api_state.return_value = True - assert await coresys.auth.check_login(addon, "username", "password") + assert await coresys.auth.check_login(app, "username", "password") assert mock_auth_backend.called @@ -42,11 +42,11 @@ async def test_auth_request_with_backend(coresys, mock_auth_backend, mock_api_st async def test_auth_request_without_backend(coresys, mock_auth_backend, mock_api_state): """Make simple auth without request.""" - addon = MagicMock() + app = MagicMock() mock_auth_backend.return_value = True mock_api_state.return_value = False - assert not await coresys.auth.check_login(addon, "username", "password") + assert not await coresys.auth.check_login(app, "username", "password") assert not mock_auth_backend.called @@ -56,13 +56,13 @@ async def test_auth_request_without_backend_cache( ): """Make simple auth without request.""" - addon = MagicMock() + app = MagicMock() mock_auth_backend.return_value = True mock_api_state.return_value = False await coresys.auth._update_cache("username", "password") - assert await coresys.auth.check_login(addon, "username", "password") + assert await coresys.auth.check_login(app, "username", "password") assert not mock_auth_backend.called @@ -72,16 +72,16 @@ async def test_auth_request_with_backend_cache_update( ): """Make simple auth without request and cache update.""" - addon = MagicMock() + app = MagicMock() mock_auth_backend.return_value = False mock_api_state.return_value = True await coresys.auth._update_cache("username", "password") - assert await coresys.auth.check_login(addon, "username", "password") + assert await coresys.auth.check_login(app, "username", "password") await asyncio.sleep(0) assert mock_auth_backend.called await coresys.auth._dismatch_cache("username", "password") - assert not await coresys.auth.check_login(addon, "username", "password") + assert not await coresys.auth.check_login(app, "username", "password")