diff --git a/CODEOWNERS b/CODEOWNERS index bcf8b3d745af18..90bd4f6e4d70f4 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1671,6 +1671,8 @@ build.json @home-assistant/supervisor /tests/components/telegram_bot/ @hanwg /homeassistant/components/tellduslive/ @fredrike /tests/components/tellduslive/ @fredrike +/homeassistant/components/teltonika/ @karlbeecken +/tests/components/teltonika/ @karlbeecken /homeassistant/components/template/ @Petro31 @home-assistant/core /tests/components/template/ @Petro31 @home-assistant/core /homeassistant/components/tesla_fleet/ @Bre77 diff --git a/homeassistant/components/analytics/analytics.py b/homeassistant/components/analytics/analytics.py index 7778e3239abce7..1634f01bf06b60 100644 --- a/homeassistant/components/analytics/analytics.py +++ b/homeassistant/components/analytics/analytics.py @@ -534,6 +534,10 @@ async def send_snapshot(self, _: datetime | None = None) -> None: payload = await _async_snapshot_payload(self._hass) + if not payload: + LOGGER.info("Skipping snapshot submission, no data to send") + return + headers = { "Content-Type": "application/json", "User-Agent": f"home-assistant/{HA_VERSION}", diff --git a/homeassistant/components/control4/climate.py b/homeassistant/components/control4/climate.py index 8669d09122deca..ba0005cbf3ade2 100644 --- a/homeassistant/components/control4/climate.py +++ b/homeassistant/components/control4/climate.py @@ -75,11 +75,12 @@ HA_TO_C4_HVAC_MODE = {v: k for k, v in C4_TO_HA_HVAC_MODE.items()} -# Map the five known Control4 HVAC states to Home Assistant HVAC actions +# Map Control4 HVAC states to Home Assistant HVAC actions C4_TO_HA_HVAC_ACTION = { "off": HVACAction.OFF, "heat": HVACAction.HEATING, "cool": HVACAction.COOLING, + "idle": HVACAction.IDLE, "dry": HVACAction.DRYING, "fan": HVACAction.FAN, } @@ -292,8 +293,14 @@ def hvac_action(self) -> HVACAction | None: c4_state = data.get(CONTROL4_HVAC_STATE) if c4_state is None: return None - # Convert state to lowercase for mapping action = C4_TO_HA_HVAC_ACTION.get(str(c4_state).lower()) + # Substring match for multi-stage systems that report + # e.g. "Stage 1 Heat", "Stage 2 Cool" + if action is None: + if "heat" in str(c4_state).lower(): + action = HVACAction.HEATING + elif "cool" in str(c4_state).lower(): + action = HVACAction.COOLING if action is None: _LOGGER.debug("Unknown HVAC state received from Control4: %s", c4_state) return action diff --git a/homeassistant/components/demo/vacuum.py b/homeassistant/components/demo/vacuum.py index ba00bcaedb9db3..28bfea66be2b7d 100644 --- a/homeassistant/components/demo/vacuum.py +++ b/homeassistant/components/demo/vacuum.py @@ -7,6 +7,7 @@ from homeassistant.components.vacuum import ( ATTR_CLEANED_AREA, + Segment, StateVacuumEntity, VacuumActivity, VacuumEntityFeature, @@ -14,8 +15,11 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import event +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from . import DOMAIN + SUPPORT_MINIMAL_SERVICES = VacuumEntityFeature.TURN_ON | VacuumEntityFeature.TURN_OFF SUPPORT_BASIC_SERVICES = ( @@ -45,9 +49,17 @@ | VacuumEntityFeature.LOCATE | VacuumEntityFeature.MAP | VacuumEntityFeature.CLEAN_SPOT + | VacuumEntityFeature.CLEAN_AREA ) FAN_SPEEDS = ["min", "medium", "high", "max"] +DEMO_SEGMENTS = [ + Segment(id="living_room", name="Living room"), + Segment(id="kitchen", name="Kitchen"), + Segment(id="bedroom_1", name="Master bedroom", group="Bedrooms"), + Segment(id="bedroom_2", name="Guest bedroom", group="Bedrooms"), + Segment(id="bathroom", name="Bathroom"), +] DEMO_VACUUM_COMPLETE = "Demo vacuum 0 ground floor" DEMO_VACUUM_MOST = "Demo vacuum 1 first floor" DEMO_VACUUM_BASIC = "Demo vacuum 2 second floor" @@ -63,11 +75,11 @@ async def async_setup_entry( """Set up the Demo config entry.""" async_add_entities( [ - StateDemoVacuum(DEMO_VACUUM_COMPLETE, SUPPORT_ALL_SERVICES), - StateDemoVacuum(DEMO_VACUUM_MOST, SUPPORT_MOST_SERVICES), - StateDemoVacuum(DEMO_VACUUM_BASIC, SUPPORT_BASIC_SERVICES), - StateDemoVacuum(DEMO_VACUUM_MINIMAL, SUPPORT_MINIMAL_SERVICES), - StateDemoVacuum(DEMO_VACUUM_NONE, VacuumEntityFeature(0)), + StateDemoVacuum("vacuum_1", DEMO_VACUUM_COMPLETE, SUPPORT_ALL_SERVICES), + StateDemoVacuum("vacuum_2", DEMO_VACUUM_MOST, SUPPORT_MOST_SERVICES), + StateDemoVacuum("vacuum_3", DEMO_VACUUM_BASIC, SUPPORT_BASIC_SERVICES), + StateDemoVacuum("vacuum_4", DEMO_VACUUM_MINIMAL, SUPPORT_MINIMAL_SERVICES), + StateDemoVacuum("vacuum_5", DEMO_VACUUM_NONE, VacuumEntityFeature(0)), ] ) @@ -75,13 +87,21 @@ async def async_setup_entry( class StateDemoVacuum(StateVacuumEntity): """Representation of a demo vacuum supporting states.""" + _attr_has_entity_name = True + _attr_name = None _attr_should_poll = False _attr_translation_key = "model_s" - def __init__(self, name: str, supported_features: VacuumEntityFeature) -> None: + def __init__( + self, unique_id: str, name: str, supported_features: VacuumEntityFeature + ) -> None: """Initialize the vacuum.""" - self._attr_name = name + self._attr_unique_id = unique_id self._attr_supported_features = supported_features + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + name=name, + ) self._attr_activity = VacuumActivity.DOCKED self._fan_speed = FAN_SPEEDS[1] self._cleaned_area: float = 0 @@ -163,6 +183,16 @@ async def async_send_command( self._attr_activity = VacuumActivity.IDLE self.async_write_ha_state() + async def async_get_segments(self) -> list[Segment]: + """Get the list of segments.""" + return DEMO_SEGMENTS + + async def async_clean_segments(self, segment_ids: list[str], **kwargs: Any) -> None: + """Clean the specified segments.""" + self._attr_activity = VacuumActivity.CLEANING + self._cleaned_area += len(segment_ids) * 0.7 + self.async_write_ha_state() + def __set_state_to_dock(self, _: datetime) -> None: self._attr_activity = VacuumActivity.DOCKED self.schedule_update_ha_state() diff --git a/homeassistant/components/dialogflow/strings.json b/homeassistant/components/dialogflow/strings.json index b357bf7cfe2bcd..48939ba9913cdd 100644 --- a/homeassistant/components/dialogflow/strings.json +++ b/homeassistant/components/dialogflow/strings.json @@ -2,6 +2,7 @@ "config": { "abort": { "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", + "reconfigure_successful": "**Reconfiguration was successful**\n\nGo to the [webhook service of Dialogflow]({dialogflow_url}) and update the webhook with following settings:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) for further details.", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, @@ -9,6 +10,10 @@ "default": "To send events to Home Assistant, you will need to set up the [webhook service of Dialogflow]({dialogflow_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) for further details." }, "step": { + "reconfigure": { + "description": "Are you sure you want to reconfigure Dialogflow?", + "title": "Reconfigure Dialogflow webhook" + }, "user": { "description": "Are you sure you want to set up Dialogflow?", "title": "Set up the Dialogflow webhook" diff --git a/homeassistant/components/duckdns/strings.json b/homeassistant/components/duckdns/strings.json index 64625c9ac8657b..fdd3db2ad36f0a 100644 --- a/homeassistant/components/duckdns/strings.json +++ b/homeassistant/components/duckdns/strings.json @@ -16,7 +16,7 @@ "data_description": { "access_token": "[%key:component::duckdns::config::step::user::data_description::access_token%]" }, - "title": "Re-configure {name}" + "title": "Reconfigure {name}" }, "user": { "data": { diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index 69dd0f0813f0ad..127262f00bf425 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from collections.abc import Mapping from functools import partial import logging @@ -80,11 +81,22 @@ async def initialize(self) -> None: try: devices = await self._api_client.get_devices() credentials = await self._authenticator.authenticate() - for device_info in devices.mqtt: - device = Device(device_info, self._authenticator) + + if devices.mqtt: mqtt = await self._get_mqtt_client() - await device.initialize(mqtt) - self._devices.append(device) + mqtt_devices = [ + Device(info, self._authenticator) for info in devices.mqtt + ] + async with asyncio.TaskGroup() as tg: + + async def _init(device: Device) -> None: + """Initialize MQTT device.""" + await device.initialize(mqtt) + self._devices.append(device) + + for device in mqtt_devices: + tg.create_task(_init(device)) + for device_config in devices.xmpp: bot = VacBot( credentials.user_id, diff --git a/homeassistant/components/geofency/strings.json b/homeassistant/components/geofency/strings.json index 82c6da6d5b2dd3..1df8b77c3d3cf1 100644 --- a/homeassistant/components/geofency/strings.json +++ b/homeassistant/components/geofency/strings.json @@ -2,6 +2,7 @@ "config": { "abort": { "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", + "reconfigure_successful": "**Reconfiguration was successful**\n\nGo to the webhook feature in Geofency and update the webhook with the following settings:\n\n- URL: `{webhook_url}`\n- Method: POST\n\nSee [the documentation]({docs_url}) for further details.", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, @@ -9,6 +10,10 @@ "default": "To send events to Home Assistant, you will need to set up the webhook feature in Geofency.\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n\nSee [the documentation]({docs_url}) for further details." }, "step": { + "reconfigure": { + "description": "Are you sure you want to reconfigure the Geofency webhook?", + "title": "Reconfigure Geofency webhook" + }, "user": { "description": "Are you sure you want to set up the Geofency webhook?", "title": "Set up the Geofency webhook" diff --git a/homeassistant/components/gpslogger/strings.json b/homeassistant/components/gpslogger/strings.json index e6458c38007c79..19cf5ba5bb500b 100644 --- a/homeassistant/components/gpslogger/strings.json +++ b/homeassistant/components/gpslogger/strings.json @@ -2,6 +2,7 @@ "config": { "abort": { "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", + "reconfigure_successful": "**Reconfiguration was successful**\n\nGo to the webhook feature in GPSLogger and update the webhook with the following settings:\n\n- URL: `{webhook_url}`\n- Method: POST\n\nSee [the documentation]({docs_url}) for further details.", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, @@ -9,6 +10,10 @@ "default": "To send events to Home Assistant, you will need to set up the webhook feature in GPSLogger.\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n\nSee [the documentation]({docs_url}) for further details." }, "step": { + "reconfigure": { + "description": "Are you sure you want to reconfigure the GPSLogger webhook?", + "title": "Reconfigure GPSLogger webhook" + }, "user": { "description": "Are you sure you want to set up the GPSLogger webhook?", "title": "Set up the GPSLogger webhook" diff --git a/homeassistant/components/hdfury/__init__.py b/homeassistant/components/hdfury/__init__.py index fcf40cbbac0cad..9e8f1cc092c539 100644 --- a/homeassistant/components/hdfury/__init__.py +++ b/homeassistant/components/hdfury/__init__.py @@ -7,6 +7,7 @@ PLATFORMS = [ Platform.BUTTON, + Platform.NUMBER, Platform.SELECT, Platform.SENSOR, Platform.SWITCH, diff --git a/homeassistant/components/hdfury/icons.json b/homeassistant/components/hdfury/icons.json index 91d1c3c6784b5d..60123cec6574f3 100644 --- a/homeassistant/components/hdfury/icons.json +++ b/homeassistant/components/hdfury/icons.json @@ -5,6 +5,14 @@ "default": "mdi:connection" } }, + "number": { + "oled_fade": { + "default": "mdi:cellphone-information" + }, + "reboot_timer": { + "default": "mdi:timer-refresh" + } + }, "select": { "opmode": { "default": "mdi:cogs" diff --git a/homeassistant/components/hdfury/number.py b/homeassistant/components/hdfury/number.py new file mode 100644 index 00000000000000..3693c5171bac72 --- /dev/null +++ b/homeassistant/components/hdfury/number.py @@ -0,0 +1,101 @@ +"""Number platform for HDFury Integration.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from hdfury import HDFuryAPI, HDFuryError + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import EntityCategory, UnitOfTime +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN +from .coordinator import HDFuryConfigEntry +from .entity import HDFuryEntity + +PARALLEL_UPDATES = 1 + + +@dataclass(kw_only=True, frozen=True) +class HDFuryNumberEntityDescription(NumberEntityDescription): + """Description for HDFury number entities.""" + + set_value_fn: Callable[[HDFuryAPI, str], Awaitable[None]] + + +NUMBERS: tuple[HDFuryNumberEntityDescription, ...] = ( + HDFuryNumberEntityDescription( + key="oledfade", + translation_key="oled_fade", + mode=NumberMode.BOX, + native_min_value=1, + native_max_value=100, + native_step=1, + device_class=NumberDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + entity_category=EntityCategory.CONFIG, + set_value_fn=lambda client, value: client.set_oled_fade(value), + ), + HDFuryNumberEntityDescription( + key="reboottimer", + translation_key="reboot_timer", + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=100, + native_step=1, + device_class=NumberDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.HOURS, + entity_category=EntityCategory.CONFIG, + set_value_fn=lambda client, value: client.set_reboot_timer(value), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: HDFuryConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up numbers using the platform schema.""" + + coordinator = entry.runtime_data + + async_add_entities( + HDFuryNumber(coordinator, description) + for description in NUMBERS + if description.key in coordinator.data.config + ) + + +class HDFuryNumber(HDFuryEntity, NumberEntity): + """Base HDFury Number Class.""" + + entity_description: HDFuryNumberEntityDescription + + @property + def native_value(self) -> float: + """Return the current number value.""" + + return float(self.coordinator.data.config[self.entity_description.key]) + + async def async_set_native_value(self, value: float) -> None: + """Set Number Value Event.""" + + try: + await self.entity_description.set_value_fn( + self.coordinator.client, str(int(value)) + ) + except HDFuryError as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="communication_error", + ) from error + + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/hdfury/strings.json b/homeassistant/components/hdfury/strings.json index b5addd5668a184..54a09bd9485550 100644 --- a/homeassistant/components/hdfury/strings.json +++ b/homeassistant/components/hdfury/strings.json @@ -40,6 +40,14 @@ "name": "Issue hotplug" } }, + "number": { + "oled_fade": { + "name": "OLED fade timer" + }, + "reboot_timer": { + "name": "Restart timer" + } + }, "select": { "opmode": { "name": "Operation mode", diff --git a/homeassistant/components/hko/coordinator.py b/homeassistant/components/hko/coordinator.py index 29746c20728201..a2c47a765db8dd 100644 --- a/homeassistant/components/hko/coordinator.py +++ b/homeassistant/components/hko/coordinator.py @@ -119,7 +119,7 @@ def _convert_current(self, data: dict[str, Any]) -> dict[str, Any]: for item in data[API_TEMPERATURE][API_DATA] if item[API_PLACE] == self.location ), - 0, + None, ), } diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 95fc7c5aa5b5cb..16cad4835abde7 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -27,6 +27,15 @@ "multiple_integration_config_errors": { "message": "Failed to process config for integration {domain} due to multiple ({errors}) errors. Check the logs for more information." }, + "oauth2_helper_reauth_required": { + "message": "Credentials are invalid, re-authentication required" + }, + "oauth2_helper_refresh_failed": { + "message": "OAuth2 token refresh failed for {domain}" + }, + "oauth2_helper_refresh_transient": { + "message": "Temporary error refreshing credentials for {domain}, try again later" + }, "platform_component_load_err": { "message": "Platform error: {domain} - {error}." }, diff --git a/homeassistant/components/ifttt/strings.json b/homeassistant/components/ifttt/strings.json index 817e6a7872e718..13b4181fc85051 100644 --- a/homeassistant/components/ifttt/strings.json +++ b/homeassistant/components/ifttt/strings.json @@ -2,6 +2,7 @@ "config": { "abort": { "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", + "reconfigure_successful": "**Reconfiguration was successful**\n\nGo to the \"Make a web request\" action from the [IFTTT webhook applet]({applet_url}) and update the webhook with the following settings:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data.", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, @@ -9,6 +10,10 @@ "default": "To send events to Home Assistant, you will need to use the \"Make a web request\" action from the [IFTTT webhook applet]({applet_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data." }, "step": { + "reconfigure": { + "description": "Are you sure you want to reconfigure IFTTT?", + "title": "Reconfigure IFTTT webhook applet" + }, "user": { "description": "Are you sure you want to set up IFTTT?", "title": "Set up the IFTTT webhook applet" diff --git a/homeassistant/components/kaleidescape/entity.py b/homeassistant/components/kaleidescape/entity.py index 1c391b6600b3ec..f9a67323f82a42 100644 --- a/homeassistant/components/kaleidescape/entity.py +++ b/homeassistant/components/kaleidescape/entity.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo @@ -44,7 +44,7 @@ async def async_added_to_hass(self) -> None: """Register update listener.""" @callback - def _update(event: str) -> None: + def _update(event: str, *args: Any) -> None: """Handle device state changes.""" self.async_write_ha_state() diff --git a/homeassistant/components/kaleidescape/manifest.json b/homeassistant/components/kaleidescape/manifest.json index ee607829b7affa..6d5a3801247e7f 100644 --- a/homeassistant/components/kaleidescape/manifest.json +++ b/homeassistant/components/kaleidescape/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/kaleidescape", "integration_type": "device", "iot_class": "local_push", - "requirements": ["pykaleidescape==1.0.2"], + "requirements": ["pykaleidescape==1.1.1"], "ssdp": [ { "deviceType": "schemas-upnp-org:device:Basic:1", diff --git a/homeassistant/components/locative/strings.json b/homeassistant/components/locative/strings.json index b43d634a8684c7..cd6996590f3cdc 100644 --- a/homeassistant/components/locative/strings.json +++ b/homeassistant/components/locative/strings.json @@ -2,6 +2,7 @@ "config": { "abort": { "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", + "reconfigure_successful": "**Reconfiguration was successful**\n\nGo to webhooks in the Locative app and update webhook with the following settings:\n\n- URL: `{webhook_url}`\n- Method: POST\n\nSee [the documentation]({docs_url}) for further details.", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, @@ -9,6 +10,10 @@ "default": "To send locations to Home Assistant, you will need to set up the webhook feature in the Locative app.\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n\nSee [the documentation]({docs_url}) for further details." }, "step": { + "reconfigure": { + "description": "Do you want to start reconfiguration?", + "title": "Reconfigure Locative webhook" + }, "user": { "description": "[%key:common::config_flow::description::confirm_setup%]", "title": "Set up the Locative webhook" diff --git a/homeassistant/components/lunatone/config_flow.py b/homeassistant/components/lunatone/config_flow.py index 4dc5d8c03ecf69..b5004ffdce4af7 100644 --- a/homeassistant/components/lunatone/config_flow.py +++ b/homeassistant/components/lunatone/config_flow.py @@ -22,11 +22,6 @@ ) -def compose_title(name: str | None, serial_number: int) -> str: - """Compose a title string from a given name and serial number.""" - return f"{name or 'DALI Gateway'} {serial_number}" - - class LunatoneConfigFlow(ConfigFlow, domain=DOMAIN): """Lunatone config flow.""" @@ -54,22 +49,17 @@ async def async_step_user( except aiohttp.ClientConnectionError: errors["base"] = "cannot_connect" else: - if info_api.data is None or info_api.serial_number is None: + if info_api.serial_number is None: errors["base"] = "missing_device_info" else: await self.async_set_unique_id(str(info_api.serial_number)) if self.source == SOURCE_RECONFIGURE: self._abort_if_unique_id_mismatch() return self.async_update_reload_and_abort( - self._get_reconfigure_entry(), - data_updates=data, - title=compose_title(info_api.name, info_api.serial_number), + self._get_reconfigure_entry(), data_updates=data, title=url ) self._abort_if_unique_id_configured() - return self.async_create_entry( - title=compose_title(info_api.name, info_api.serial_number), - data={CONF_URL: url}, - ) + return self.async_create_entry(title=url, data={CONF_URL: url}) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, diff --git a/homeassistant/components/mailgun/strings.json b/homeassistant/components/mailgun/strings.json index 50b2f9cbe65cef..f7cada0e942158 100644 --- a/homeassistant/components/mailgun/strings.json +++ b/homeassistant/components/mailgun/strings.json @@ -2,6 +2,7 @@ "config": { "abort": { "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", + "reconfigure_successful": "**Reconfiguration was successful**\n\nGo to [webhooks in Mailgun]({mailgun_url}) and update the webhook with the following settings:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data.", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, @@ -9,6 +10,10 @@ "default": "To send events to Home Assistant, you will need to set up a [webhook with Mailgun]({mailgun_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data." }, "step": { + "reconfigure": { + "description": "Are you sure you want to reconfigure Mailgun?", + "title": "Reconfigure Mailgun webhook" + }, "user": { "description": "Are you sure you want to set up Mailgun?", "title": "Set up the Mailgun webhook" diff --git a/homeassistant/components/mastodon/const.py b/homeassistant/components/mastodon/const.py index b26aca307efa20..592b6a2300ebc1 100644 --- a/homeassistant/components/mastodon/const.py +++ b/homeassistant/components/mastodon/const.py @@ -21,3 +21,5 @@ ATTR_MEDIA = "media" ATTR_MEDIA_DESCRIPTION = "media_description" ATTR_LANGUAGE = "language" +ATTR_DURATION = "duration" +ATTR_HIDE_NOTIFICATIONS = "hide_notifications" diff --git a/homeassistant/components/mastodon/icons.json b/homeassistant/components/mastodon/icons.json index 2883f2e857f10b..e9185ee13b18e2 100644 --- a/homeassistant/components/mastodon/icons.json +++ b/homeassistant/components/mastodon/icons.json @@ -35,8 +35,14 @@ "get_account": { "service": "mdi:account-search" }, + "mute_account": { + "service": "mdi:account-voice-off" + }, "post": { "service": "mdi:message-text" + }, + "unmute_account": { + "service": "mdi:account-voice" } } } diff --git a/homeassistant/components/mastodon/services.py b/homeassistant/components/mastodon/services.py index dbb5fc2afdc9a3..2208588570c2f6 100644 --- a/homeassistant/components/mastodon/services.py +++ b/homeassistant/components/mastodon/services.py @@ -1,11 +1,18 @@ """Define services for the Mastodon integration.""" +from datetime import timedelta from enum import StrEnum from functools import partial +from math import isfinite from typing import Any from mastodon import Mastodon -from mastodon.Mastodon import Account, MastodonAPIError, MediaAttachment +from mastodon.Mastodon import ( + Account, + MastodonAPIError, + MastodonNotFoundError, + MediaAttachment, +) import voluptuous as vol from homeassistant.const import ATTR_CONFIG_ENTRY_ID @@ -17,11 +24,13 @@ callback, ) from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import service +from homeassistant.helpers import config_validation as cv, service from .const import ( ATTR_ACCOUNT_NAME, ATTR_CONTENT_WARNING, + ATTR_DURATION, + ATTR_HIDE_NOTIFICATIONS, ATTR_IDEMPOTENCY_KEY, ATTR_LANGUAGE, ATTR_MEDIA, @@ -34,6 +43,8 @@ from .coordinator import MastodonConfigEntry from .utils import get_media_type +MAX_DURATION_SECONDS = 315360000 # 10 years + class StatusVisibility(StrEnum): """StatusVisibility model.""" @@ -51,6 +62,27 @@ class StatusVisibility(StrEnum): vol.Required(ATTR_ACCOUNT_NAME): str, } ) +SERVICE_MUTE_ACCOUNT = "mute_account" +SERVICE_MUTE_ACCOUNT_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): str, + vol.Required(ATTR_ACCOUNT_NAME): str, + vol.Optional(ATTR_DURATION): vol.All( + cv.time_period, + vol.Range( + min=timedelta(seconds=1), max=timedelta(seconds=MAX_DURATION_SECONDS) + ), + ), + vol.Optional(ATTR_HIDE_NOTIFICATIONS, default=True): bool, + } +) +SERVICE_UNMUTE_ACCOUNT = "unmute_account" +SERVICE_UNMUTE_ACCOUNT_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): str, + vol.Required(ATTR_ACCOUNT_NAME): str, + } +) SERVICE_POST = "post" SERVICE_POST_SCHEMA = vol.Schema( { @@ -77,11 +109,40 @@ def async_setup_services(hass: HomeAssistant) -> None: schema=SERVICE_GET_ACCOUNT_SCHEMA, supports_response=SupportsResponse.ONLY, ) + hass.services.async_register( + DOMAIN, + SERVICE_MUTE_ACCOUNT, + _async_mute_account, + schema=SERVICE_MUTE_ACCOUNT_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_UNMUTE_ACCOUNT, + _async_unmute_account, + schema=SERVICE_UNMUTE_ACCOUNT_SCHEMA, + ) hass.services.async_register( DOMAIN, SERVICE_POST, _async_post, schema=SERVICE_POST_SCHEMA ) +async def _async_account_lookup( + hass: HomeAssistant, client: Mastodon, account_name: str +) -> Account: + """Lookup a Mastodon account by its username.""" + try: + account: Account = await hass.async_add_executor_job( + partial(client.account_lookup, acct=account_name) + ) + except MastodonNotFoundError: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="account_not_found", + translation_placeholders={"account_name": account_name}, + ) from None + return account + + async def _async_get_account(call: ServiceCall) -> ServiceResponse: """Get account information.""" entry: MastodonConfigEntry = service.async_get_config_entry( @@ -92,9 +153,7 @@ async def _async_get_account(call: ServiceCall) -> ServiceResponse: account_name: str = call.data[ATTR_ACCOUNT_NAME] try: - account: Account = await call.hass.async_add_executor_job( - partial(client.account_lookup, acct=account_name) - ) + account = await _async_account_lookup(call.hass, client, account_name) except MastodonAPIError as err: raise HomeAssistantError( translation_domain=DOMAIN, @@ -105,6 +164,72 @@ async def _async_get_account(call: ServiceCall) -> ServiceResponse: return {"account": account} +async def _async_mute_account(call: ServiceCall) -> ServiceResponse: + """Mute account.""" + entry: MastodonConfigEntry = service.async_get_config_entry( + call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID] + ) + client = entry.runtime_data.client + + account_name: str = call.data[ATTR_ACCOUNT_NAME] + hide_notifications: bool = call.data[ATTR_HIDE_NOTIFICATIONS] + duration: int | None = None + if call.data.get(ATTR_DURATION) is not None: + td: timedelta = call.data[ATTR_DURATION] + duration_seconds = td.total_seconds() + + if not isfinite(duration_seconds) or duration_seconds > MAX_DURATION_SECONDS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="mute_duration_too_long", + ) + + duration = int(duration_seconds) + + try: + account = await _async_account_lookup(call.hass, client, account_name) + await call.hass.async_add_executor_job( + partial( + client.account_mute, + id=account.id, + notifications=hide_notifications, + duration=duration, + ) + ) + except MastodonAPIError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unable_to_mute_account", + translation_placeholders={"account_name": account_name}, + ) from err + + return None + + +async def _async_unmute_account(call: ServiceCall) -> ServiceResponse: + """Unmute account.""" + entry: MastodonConfigEntry = service.async_get_config_entry( + call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID] + ) + client = entry.runtime_data.client + + account_name: str = call.data[ATTR_ACCOUNT_NAME] + + try: + account = await _async_account_lookup(call.hass, client, account_name) + await call.hass.async_add_executor_job( + partial(client.account_unmute, id=account.id) + ) + except MastodonAPIError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unable_to_unmute_account", + translation_placeholders={"account_name": account_name}, + ) from err + + return None + + async def _async_post(call: ServiceCall) -> ServiceResponse: """Post a status.""" entry: MastodonConfigEntry = service.async_get_config_entry( diff --git a/homeassistant/components/mastodon/services.yaml b/homeassistant/components/mastodon/services.yaml index 9027c6f9fcc107..bdeefc8b570870 100644 --- a/homeassistant/components/mastodon/services.yaml +++ b/homeassistant/components/mastodon/services.yaml @@ -9,6 +9,38 @@ get_account: required: true selector: text: +mute_account: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: mastodon + account_name: + required: true + selector: + text: + duration: + required: false + selector: + duration: + enable_day: true + hide_notifications: + default: true + required: false + selector: + boolean: +unmute_account: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: mastodon + account_name: + required: true + selector: + text: post: fields: config_entry_id: diff --git a/homeassistant/components/mastodon/strings.json b/homeassistant/components/mastodon/strings.json index 9b07630a3c33ff..5bfc629f1f3fbf 100644 --- a/homeassistant/components/mastodon/strings.json +++ b/homeassistant/components/mastodon/strings.json @@ -95,21 +95,33 @@ } }, "exceptions": { + "account_not_found": { + "message": "Mastodon account \"{account_name}\" not found." + }, "auth_failed": { "message": "Authentication failed, please reauthenticate with Mastodon." }, "idempotency_key_too_short": { "message": "Idempotency key must be at least 4 characters long." }, + "mute_duration_too_long": { + "message": "Mute duration is too long." + }, "not_whitelisted_directory": { "message": "{media} is not a whitelisted directory." }, "unable_to_get_account": { "message": "Unable to get account \"{account_name}\"." }, + "unable_to_mute_account": { + "message": "Unable to mute account \"{account_name}\"" + }, "unable_to_send_message": { "message": "Unable to send message." }, + "unable_to_unmute_account": { + "message": "Unable to unmute account \"{account_name}\"" + }, "unable_to_upload_image": { "message": "Unable to upload image {media_path}." } @@ -139,6 +151,28 @@ }, "name": "Get account" }, + "mute_account": { + "description": "Mutes a Mastodon account.", + "fields": { + "account_name": { + "description": "The Mastodon account username to mute (e.g. @user@instance).", + "name": "Account name" + }, + "config_entry_id": { + "description": "Select the Mastodon instance to mute this account on.", + "name": "Mastodon instance" + }, + "duration": { + "description": "The duration to mute the account for (default: indefinitely).", + "name": "Duration" + }, + "hide_notifications": { + "description": "Hide notifications from this account while muted.", + "name": "Hide notifications" + } + }, + "name": "Mute account" + }, "post": { "description": "Posts a status on your Mastodon account.", "fields": { @@ -180,6 +214,20 @@ } }, "name": "Post" + }, + "unmute_account": { + "description": "Unmutes a Mastodon account.", + "fields": { + "account_name": { + "description": "The Mastodon account username to unmute (e.g. @user@instance).", + "name": "Account name" + }, + "config_entry_id": { + "description": "Select the Mastodon instance to unmute this account on.", + "name": "Mastodon instance" + } + }, + "name": "Unmute account" } } } diff --git a/homeassistant/components/meteo_france/sensor.py b/homeassistant/components/meteo_france/sensor.py index 975fb038650156..de196ae00a4c4c 100644 --- a/homeassistant/components/meteo_france/sensor.py +++ b/homeassistant/components/meteo_france/sensor.py @@ -333,10 +333,14 @@ def native_value(self) -> str | None: ) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" return { - **readable_phenomenons_dict(self.coordinator.data.phenomenons_max_colors), + k: v + for k, v in readable_phenomenons_dict( + self.coordinator.data.phenomenons_max_colors + ).items() + if k is not None } diff --git a/homeassistant/components/mysensors/const.py b/homeassistant/components/mysensors/const.py index a87b78b549ea2d..05e19d452a2144 100644 --- a/homeassistant/components/mysensors/const.py +++ b/homeassistant/components/mysensors/const.py @@ -33,7 +33,7 @@ CHILD_CALLBACK: str = "mysensors_child_callback_{}_{}_{}_{}" NODE_CALLBACK: str = "mysensors_node_callback_{}_{}" MYSENSORS_DISCOVERY: str = "mysensors_discovery_{}_{}" -MYSENSORS_NODE_DISCOVERY: str = "mysensors_node_discovery" +MYSENSORS_NODE_DISCOVERY: str = "mysensors_node_discovery_{}" TYPE: Final = "type" UPDATE_DELAY: float = 0.1 diff --git a/homeassistant/components/mysensors/helpers.py b/homeassistant/components/mysensors/helpers.py index 9ed41dfe4e9f25..3c9b841bdb339e 100644 --- a/homeassistant/components/mysensors/helpers.py +++ b/homeassistant/components/mysensors/helpers.py @@ -70,7 +70,7 @@ def discover_mysensors_node( discovered_nodes.add(node_id) async_dispatcher_send( hass, - MYSENSORS_NODE_DISCOVERY, + MYSENSORS_NODE_DISCOVERY.format(gateway_id), { ATTR_GATEWAY_ID: gateway_id, ATTR_NODE_ID: node_id, diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index 3793bed8af2ef7..c6fee7ba52a887 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -244,7 +244,7 @@ def async_node_discover(discovery_info: NodeDiscoveryInfo) -> None: config_entry.async_on_unload( async_dispatcher_connect( hass, - MYSENSORS_NODE_DISCOVERY, + MYSENSORS_NODE_DISCOVERY.format(config_entry.entry_id), async_node_discover, ), ) diff --git a/homeassistant/components/namecheapdns/strings.json b/homeassistant/components/namecheapdns/strings.json index 7685de9cf0db9b..da924a9faa331a 100644 --- a/homeassistant/components/namecheapdns/strings.json +++ b/homeassistant/components/namecheapdns/strings.json @@ -30,7 +30,7 @@ "password": "[%key:component::namecheapdns::config::step::user::data_description::password%]" }, "description": "You can find the Dynamic DNS password in your Namecheap account under [Domain List > {domain} > Manage > Advanced DNS > Dynamic DNS]({account_panel}).", - "title": "Re-configure {name}" + "title": "Reconfigure {name}" }, "user": { "data": { diff --git a/homeassistant/components/onedrive_for_business/diagnostics.py b/homeassistant/components/onedrive_for_business/diagnostics.py new file mode 100644 index 00000000000000..404cb3b507de0b --- /dev/null +++ b/homeassistant/components/onedrive_for_business/diagnostics.py @@ -0,0 +1,33 @@ +"""Diagnostics support for OneDrive for Business.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN +from homeassistant.core import HomeAssistant + +from .coordinator import OneDriveConfigEntry + +TO_REDACT = {"display_name", "email", CONF_ACCESS_TOKEN, CONF_TOKEN} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, + entry: OneDriveConfigEntry, +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + coordinator = entry.runtime_data.coordinator + + data = { + "drive": asdict(coordinator.data), + "config": { + **entry.data, + **entry.options, + }, + } + + return async_redact_data(data, TO_REDACT) diff --git a/homeassistant/components/onedrive_for_business/manifest.json b/homeassistant/components/onedrive_for_business/manifest.json index 42ec77be274cd5..c3a6ceb537b484 100644 --- a/homeassistant/components/onedrive_for_business/manifest.json +++ b/homeassistant/components/onedrive_for_business/manifest.json @@ -9,6 +9,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["onedrive_personal_sdk"], - "quality_scale": "bronze", + "quality_scale": "platinum", "requirements": ["onedrive-personal-sdk==0.1.4"] } diff --git a/homeassistant/components/onedrive_for_business/quality_scale.yaml b/homeassistant/components/onedrive_for_business/quality_scale.yaml index 05e6ffcc17a490..566b65e0311dc1 100644 --- a/homeassistant/components/onedrive_for_business/quality_scale.yaml +++ b/homeassistant/components/onedrive_for_business/quality_scale.yaml @@ -45,7 +45,7 @@ rules: # Gold devices: done - diagnostics: todo + diagnostics: done discovery-update-info: status: exempt comment: | diff --git a/homeassistant/components/proxmoxve/__init__.py b/homeassistant/components/proxmoxve/__init__.py index ed9652c55c6d04..1f5e3eae2f98e5 100644 --- a/homeassistant/components/proxmoxve/__init__.py +++ b/homeassistant/components/proxmoxve/__init__.py @@ -2,16 +2,11 @@ from __future__ import annotations -from datetime import timedelta import logging -from typing import Any -from proxmoxer import AuthenticationError, ProxmoxAPI -import requests.exceptions -from requests.exceptions import ConnectTimeout, SSLError import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -22,17 +17,13 @@ ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, issue_registry as ir +from homeassistant.helpers import ( + config_validation as cv, + entity_registry as er, + issue_registry as ir, +) from homeassistant.helpers.typing import ConfigType -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .common import ( - ProxmoxClient, - ResourceException, - call_api_container_vm, - parse_api_container_vm, -) from .const import ( CONF_CONTAINERS, CONF_NODE, @@ -43,16 +34,11 @@ DEFAULT_REALM, DEFAULT_VERIFY_SSL, DOMAIN, - TYPE_CONTAINER, - TYPE_VM, - UPDATE_INTERVAL, ) +from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator PLATFORMS = [Platform.BINARY_SENSOR] -type ProxmoxConfigEntry = ConfigEntry[ - dict[str, dict[str, dict[int, DataUpdateCoordinator[dict[str, Any] | None]]]] -] CONFIG_SCHEMA = vol.Schema( { @@ -93,7 +79,7 @@ extra=vol.ALLOW_EXTRA, ) -LOGGER = logging.getLogger(__name__) +_LOGGER = logging.getLogger(__name__) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -150,132 +136,42 @@ async def _async_setup(hass: HomeAssistant, config: ConfigType) -> None: async def async_setup_entry(hass: HomeAssistant, entry: ProxmoxConfigEntry) -> bool: - """Set up a ProxmoxVE instance from a config entry.""" - - def build_client() -> ProxmoxClient: - """Build and return the Proxmox client connection.""" - host = entry.data[CONF_HOST] - port = entry.data[CONF_PORT] - user = entry.data[CONF_USERNAME] - realm = entry.data[CONF_REALM] - password = entry.data[CONF_PASSWORD] - verify_ssl = entry.data[CONF_VERIFY_SSL] - try: - client = ProxmoxClient(host, port, user, realm, password, verify_ssl) - client.build_client() - except AuthenticationError as ex: - raise ConfigEntryAuthFailed("Invalid credentials") from ex - except SSLError as ex: - raise ConfigEntryAuthFailed( - f"Unable to verify proxmox server SSL. Try using 'verify_ssl: false' for proxmox instance {host}:{port}" - ) from ex - except ConnectTimeout as ex: - raise ConfigEntryNotReady("Connection timed out") from ex - except requests.exceptions.ConnectionError as ex: - raise ConfigEntryNotReady(f"Host {host} is not reachable: {ex}") from ex - else: - return client + """Set up a ProxmoxVE from a config entry.""" + coordinator = ProxmoxCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() - proxmox_client = await hass.async_add_executor_job(build_client) - - coordinators: dict[ - str, dict[str, dict[int, DataUpdateCoordinator[dict[str, Any] | None]]] - ] = {} - entry.runtime_data = coordinators + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - host_name = entry.data[CONF_HOST] - coordinators[host_name] = {} + return True - proxmox: ProxmoxAPI = proxmox_client.get_api_client() - for node_config in entry.data[CONF_NODES]: - node_name = node_config[CONF_NODE] - node_coordinators = coordinators[host_name][node_name] = {} +async def async_migrate_entry(hass: HomeAssistant, entry: ProxmoxConfigEntry) -> bool: + """Migrate old config entries.""" - try: - vms, containers = await hass.async_add_executor_job( - _get_vms_containers, proxmox, node_config + # Migration for only the old binary sensors to new unique_id format + if entry.version < 2: + ent_reg = er.async_get(hass) + for entity_entry in er.async_entries_for_config_entry(ent_reg, entry.entry_id): + new_unique_id = ( + f"{entry.entry_id}_{entity_entry.unique_id.split('_')[-2]}_status" ) - except (ResourceException, requests.exceptions.ConnectionError) as err: - LOGGER.error("Unable to get vms/containers for node %s: %s", node_name, err) - continue - for vm in vms: - coordinator = _create_coordinator_container_vm( - hass, entry, proxmox, host_name, node_name, vm["vmid"], TYPE_VM + _LOGGER.debug( + "Migrating entity %s from old unique_id %s to new unique_id %s", + entity_entry.entity_id, + entity_entry.unique_id, + new_unique_id, ) - await coordinator.async_config_entry_first_refresh() - - node_coordinators[vm["vmid"]] = coordinator - - for container in containers: - coordinator = _create_coordinator_container_vm( - hass, - entry, - proxmox, - host_name, - node_name, - container["vmid"], - TYPE_CONTAINER, + ent_reg.async_update_entity( + entity_entry.entity_id, new_unique_id=new_unique_id ) - await coordinator.async_config_entry_first_refresh() - node_coordinators[container["vmid"]] = coordinator - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + hass.config_entries.async_update_entry(entry, version=2) return True -def _get_vms_containers( - proxmox: ProxmoxAPI, - node_config: dict[str, Any], -) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: - """Get vms and containers for a node.""" - vms = proxmox.nodes(node_config[CONF_NODE]).qemu.get() - containers = proxmox.nodes(node_config[CONF_NODE]).lxc.get() - assert vms is not None and containers is not None - return vms, containers - - -def _create_coordinator_container_vm( - hass: HomeAssistant, - entry: ProxmoxConfigEntry, - proxmox: ProxmoxAPI, - host_name: str, - node_name: str, - vm_id: int, - vm_type: int, -) -> DataUpdateCoordinator[dict[str, Any] | None]: - """Create and return a DataUpdateCoordinator for a vm/container.""" - - async def async_update_data() -> dict[str, Any] | None: - """Call the api and handle the response.""" - - def poll_api() -> dict[str, Any] | None: - """Call the api.""" - return call_api_container_vm(proxmox, node_name, vm_id, vm_type) - - vm_status = await hass.async_add_executor_job(poll_api) - - if vm_status is None: - LOGGER.warning( - "Vm/Container %s unable to be found in node %s", vm_id, node_name - ) - return None - - return parse_api_container_vm(vm_status) - - return DataUpdateCoordinator( - hass, - LOGGER, - config_entry=entry, - name=f"proxmox_coordinator_{host_name}_{node_name}_{vm_id}", - update_method=async_update_data, - update_interval=timedelta(seconds=UPDATE_INTERVAL), - ) - - async def async_unload_entry(hass: HomeAssistant, entry: ProxmoxConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/proxmoxve/binary_sensor.py b/homeassistant/components/proxmoxve/binary_sensor.py index abc3ced24f012d..1d607a741bd7cf 100644 --- a/homeassistant/components/proxmoxve/binary_sensor.py +++ b/homeassistant/components/proxmoxve/binary_sensor.py @@ -2,20 +2,77 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, + BinarySensorEntityDescription, ) -from homeassistant.const import CONF_HOST +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import ProxmoxConfigEntry -from .const import CONF_CONTAINERS, CONF_NODE, CONF_NODES, CONF_VMS -from .entity import ProxmoxEntity +from .const import NODE_ONLINE, VM_CONTAINER_RUNNING +from .coordinator import ProxmoxConfigEntry, ProxmoxCoordinator, ProxmoxNodeData +from .entity import ProxmoxContainerEntity, ProxmoxNodeEntity, ProxmoxVMEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class ProxmoxContainerBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class to hold Proxmox container binary sensor description.""" + + state_fn: Callable[[dict[str, Any]], bool | None] + + +@dataclass(frozen=True, kw_only=True) +class ProxmoxVMBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class to hold Proxmox endpoint binary sensor description.""" + + state_fn: Callable[[dict[str, Any]], bool | None] + + +@dataclass(frozen=True, kw_only=True) +class ProxmoxNodeBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class to hold Proxmox node binary sensor description.""" + + state_fn: Callable[[ProxmoxNodeData], bool | None] + + +NODE_SENSORS: tuple[ProxmoxNodeBinarySensorEntityDescription, ...] = ( + ProxmoxNodeBinarySensorEntityDescription( + key="status", + translation_key="status", + state_fn=lambda data: data.node["status"] == NODE_ONLINE, + device_class=BinarySensorDeviceClass.RUNNING, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + +CONTAINER_SENSORS: tuple[ProxmoxContainerBinarySensorEntityDescription, ...] = ( + ProxmoxContainerBinarySensorEntityDescription( + key="status", + translation_key="status", + state_fn=lambda data: data["status"] == VM_CONTAINER_RUNNING, + device_class=BinarySensorDeviceClass.RUNNING, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + +VM_SENSORS: tuple[ProxmoxVMBinarySensorEntityDescription, ...] = ( + ProxmoxVMBinarySensorEntityDescription( + key="status", + translation_key="status", + state_fn=lambda data: data["status"] == VM_CONTAINER_RUNNING, + device_class=BinarySensorDeviceClass.RUNNING, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) async def async_setup_entry( @@ -23,78 +80,134 @@ async def async_setup_entry( entry: ProxmoxConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: - """Set up binary sensors.""" - sensors = [] + """Set up Proxmox VE binary sensors.""" + coordinator = entry.runtime_data + + def _async_add_new_nodes(nodes: list[ProxmoxNodeData]) -> None: + """Add new node binary sensors.""" + async_add_entities( + ProxmoxNodeBinarySensor(coordinator, entity_description, node) + for node in nodes + for entity_description in NODE_SENSORS + ) - host_name = entry.data[CONF_HOST] - host_name_coordinators = entry.runtime_data[host_name] + def _async_add_new_vms( + vms: list[tuple[ProxmoxNodeData, dict[str, Any]]], + ) -> None: + """Add new VM binary sensors.""" + async_add_entities( + ProxmoxVMBinarySensor(coordinator, entity_description, vm, node_data) + for (node_data, vm) in vms + for entity_description in VM_SENSORS + ) - for node_config in entry.data[CONF_NODES]: - node_name = node_config[CONF_NODE] + def _async_add_new_containers( + containers: list[tuple[ProxmoxNodeData, dict[str, Any]]], + ) -> None: + """Add new container binary sensors.""" + async_add_entities( + ProxmoxContainerBinarySensor( + coordinator, entity_description, container, node_data + ) + for (node_data, container) in containers + for entity_description in CONTAINER_SENSORS + ) - for dev_id in node_config[CONF_VMS] + node_config[CONF_CONTAINERS]: - coordinator = host_name_coordinators[node_name][dev_id] + coordinator.new_nodes_callbacks.append(_async_add_new_nodes) + coordinator.new_vms_callbacks.append(_async_add_new_vms) + coordinator.new_containers_callbacks.append(_async_add_new_containers) - if TYPE_CHECKING: - assert coordinator.data is not None - name = coordinator.data["name"] - sensor = create_binary_sensor( - coordinator, host_name, node_name, dev_id, name - ) - sensors.append(sensor) - - async_add_entities(sensors) - - -def create_binary_sensor( - coordinator, - host_name: str, - node_name: str, - vm_id: int, - name: str, -) -> ProxmoxBinarySensor: - """Create a binary sensor based on the given data.""" - return ProxmoxBinarySensor( - coordinator=coordinator, - unique_id=f"proxmox_{node_name}_{vm_id}_running", - name=f"{node_name}_{name}", - icon="", - host_name=host_name, - node_name=node_name, - vm_id=vm_id, + _async_add_new_nodes( + [ + node_data + for node_data in coordinator.data.values() + if node_data.node["node"] in coordinator.known_nodes + ] + ) + _async_add_new_vms( + [ + (node_data, vm_data) + for node_data in coordinator.data.values() + for vmid, vm_data in node_data.vms.items() + if (node_data.node["node"], vmid) in coordinator.known_vms + ] + ) + _async_add_new_containers( + [ + (node_data, container_data) + for node_data in coordinator.data.values() + for vmid, container_data in node_data.containers.items() + if (node_data.node["node"], vmid) in coordinator.known_containers + ] ) -class ProxmoxBinarySensor(ProxmoxEntity, BinarySensorEntity): - """A binary sensor for reading Proxmox VE data.""" +class ProxmoxNodeBinarySensor(ProxmoxNodeEntity, BinarySensorEntity): + """A binary sensor for reading Proxmox VE node data.""" - _attr_device_class = BinarySensorDeviceClass.RUNNING + entity_description: ProxmoxNodeBinarySensorEntityDescription def __init__( self, - coordinator: DataUpdateCoordinator, - unique_id: str, - name: str, - icon: str, - host_name: str, - node_name: str, - vm_id: int, + coordinator: ProxmoxCoordinator, + entity_description: ProxmoxNodeBinarySensorEntityDescription, + node_data: ProxmoxNodeData, ) -> None: - """Create the binary sensor for vms or containers.""" - super().__init__( - coordinator, unique_id, name, icon, host_name, node_name, vm_id - ) + """Initialize Proxmox node binary sensor entity.""" + self.entity_description = entity_description + super().__init__(coordinator, node_data) + + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{node_data.node['id']}_{entity_description.key}" @property def is_on(self) -> bool | None: - """Return the state of the binary sensor.""" - if (data := self.coordinator.data) is None: - return None + """Return true if the binary sensor is on.""" + return self.entity_description.state_fn(self.coordinator.data[self.device_name]) + + +class ProxmoxVMBinarySensor(ProxmoxVMEntity, BinarySensorEntity): + """Representation of a Proxmox VM binary sensor.""" + + entity_description: ProxmoxVMBinarySensorEntityDescription + + def __init__( + self, + coordinator: ProxmoxCoordinator, + entity_description: ProxmoxVMBinarySensorEntityDescription, + vm_data: dict[str, Any], + node_data: ProxmoxNodeData, + ) -> None: + """Initialize the Proxmox VM binary sensor.""" + self.entity_description = entity_description + super().__init__(coordinator, vm_data, node_data) - return data["status"] == "running" + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}" @property - def available(self) -> bool: - """Return sensor availability.""" + def is_on(self) -> bool | None: + """Return true if the binary sensor is on.""" + return self.entity_description.state_fn(self.vm_data) + + +class ProxmoxContainerBinarySensor(ProxmoxContainerEntity, BinarySensorEntity): + """Representation of a Proxmox Container binary sensor.""" - return super().available and self.coordinator.data is not None + entity_description: ProxmoxContainerBinarySensorEntityDescription + + def __init__( + self, + coordinator: ProxmoxCoordinator, + entity_description: ProxmoxContainerBinarySensorEntityDescription, + container_data: dict[str, Any], + node_data: ProxmoxNodeData, + ) -> None: + """Initialize the Proxmox Container binary sensor.""" + self.entity_description = entity_description + super().__init__(coordinator, container_data, node_data) + + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_id}_{entity_description.key}" + + @property + def is_on(self) -> bool | None: + """Return true if the binary sensor is on.""" + return self.entity_description.state_fn(self.container_data) diff --git a/homeassistant/components/proxmoxve/common.py b/homeassistant/components/proxmoxve/common.py deleted file mode 100644 index 6dc59cb8dd041c..00000000000000 --- a/homeassistant/components/proxmoxve/common.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Commons for Proxmox VE integration.""" - -from __future__ import annotations - -from typing import Any - -from proxmoxer import ProxmoxAPI -from proxmoxer.core import ResourceException -import requests.exceptions - -from .const import TYPE_CONTAINER, TYPE_VM - - -class ProxmoxClient: - """A wrapper for the proxmoxer ProxmoxAPI client.""" - - _proxmox: ProxmoxAPI - - def __init__( - self, - host: str, - port: int, - user: str, - realm: str, - password: str, - verify_ssl: bool, - ) -> None: - """Initialize the ProxmoxClient.""" - - self._host = host - self._port = port - self._user = user - self._realm = realm - self._password = password - self._verify_ssl = verify_ssl - - def build_client(self) -> None: - """Construct the ProxmoxAPI client. - - Allows inserting the realm within the `user` value. - """ - - if "@" in self._user: - user_id = self._user - else: - user_id = f"{self._user}@{self._realm}" - - self._proxmox = ProxmoxAPI( - self._host, - port=self._port, - user=user_id, - password=self._password, - verify_ssl=self._verify_ssl, - ) - - def get_api_client(self) -> ProxmoxAPI: - """Return the ProxmoxAPI client.""" - return self._proxmox - - -def parse_api_container_vm(status: dict[str, Any]) -> dict[str, Any]: - """Get the container or vm api data and return it formatted in a dictionary. - - It is implemented in this way to allow for more data to be added for sensors - in the future. - """ - - return {"status": status["status"], "name": status["name"]} - - -def call_api_container_vm( - proxmox: ProxmoxAPI, - node_name: str, - vm_id: int, - machine_type: int, -) -> dict[str, Any] | None: - """Make proper api calls.""" - status = None - - try: - if machine_type == TYPE_VM: - status = proxmox.nodes(node_name).qemu(vm_id).status.current.get() - elif machine_type == TYPE_CONTAINER: - status = proxmox.nodes(node_name).lxc(vm_id).status.current.get() - except ResourceException, requests.exceptions.ConnectionError: - return None - - return status diff --git a/homeassistant/components/proxmoxve/config_flow.py b/homeassistant/components/proxmoxve/config_flow.py index 50d1778c4b188d..4985d92c6f6462 100644 --- a/homeassistant/components/proxmoxve/config_flow.py +++ b/homeassistant/components/proxmoxve/config_flow.py @@ -7,6 +7,7 @@ from typing import Any from proxmoxer import AuthenticationError, ProxmoxAPI +from proxmoxer.core import ResourceException import requests from requests.exceptions import ConnectTimeout, SSLError import voluptuous as vol @@ -22,7 +23,6 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv -from .common import ResourceException from .const import ( CONF_CONTAINERS, CONF_NODE, @@ -77,8 +77,6 @@ def _get_nodes_data(data: dict[str, Any]) -> list[dict[str, Any]]: except (ResourceException, requests.exceptions.ConnectionError) as err: raise ProxmoxNoNodesFound from err - _LOGGER.debug("Proxmox nodes: %s", nodes) - nodes_data: list[dict[str, Any]] = [] for node in nodes: try: @@ -102,7 +100,7 @@ def _get_nodes_data(data: dict[str, Any]) -> list[dict[str, Any]]: class ProxmoxveConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Proxmox VE.""" - VERSION = 1 + VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/proxmoxve/const.py b/homeassistant/components/proxmoxve/const.py index da62f89069a918..665201b1cda8bf 100644 --- a/homeassistant/components/proxmoxve/const.py +++ b/homeassistant/components/proxmoxve/const.py @@ -7,6 +7,9 @@ CONF_VMS = "vms" CONF_CONTAINERS = "containers" +NODE_ONLINE = "online" +VM_CONTAINER_RUNNING = "running" + DEFAULT_PORT = 8006 DEFAULT_REALM = "pam" diff --git a/homeassistant/components/proxmoxve/coordinator.py b/homeassistant/components/proxmoxve/coordinator.py new file mode 100644 index 00000000000000..f912bbabefe007 --- /dev/null +++ b/homeassistant/components/proxmoxve/coordinator.py @@ -0,0 +1,220 @@ +"""Data Update Coordinator for Proxmox VE integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass, field +from datetime import timedelta +import logging +from typing import Any + +from proxmoxer import AuthenticationError, ProxmoxAPI +from proxmoxer.core import ResourceException +import requests +from requests.exceptions import ConnectTimeout, SSLError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, + CONF_VERIFY_SSL, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import CONF_NODE, CONF_REALM, DEFAULT_VERIFY_SSL, DOMAIN + +type ProxmoxConfigEntry = ConfigEntry[ProxmoxCoordinator] + +DEFAULT_UPDATE_INTERVAL = timedelta(seconds=60) + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(slots=True, kw_only=True) +class ProxmoxNodeData: + """All resources for a single Proxmox node.""" + + node: dict[str, str] = field(default_factory=dict) + vms: dict[int, dict[str, Any]] = field(default_factory=dict) + containers: dict[int, dict[str, Any]] = field(default_factory=dict) + + +class ProxmoxCoordinator(DataUpdateCoordinator[dict[str, ProxmoxNodeData]]): + """Data Update Coordinator for Proxmox VE integration.""" + + config_entry: ProxmoxConfigEntry + + def __init__( + self, + hass: HomeAssistant, + config_entry: ProxmoxConfigEntry, + ) -> None: + """Initialize the Proxmox VE coordinator.""" + super().__init__( + hass, + _LOGGER, + config_entry=config_entry, + name=DOMAIN, + update_interval=DEFAULT_UPDATE_INTERVAL, + ) + self.proxmox: ProxmoxAPI + + self.known_nodes: set[str] = set() + self.known_vms: set[tuple[str, int]] = set() + self.known_containers: set[tuple[str, int]] = set() + + self.new_nodes_callbacks: list[Callable[[list[ProxmoxNodeData]], None]] = [] + self.new_vms_callbacks: list[ + Callable[[list[tuple[ProxmoxNodeData, dict[str, Any]]]], None] + ] = [] + self.new_containers_callbacks: list[ + Callable[[list[tuple[ProxmoxNodeData, dict[str, Any]]]], None] + ] = [] + + async def _async_setup(self) -> None: + """Set up the coordinator.""" + try: + await self.hass.async_add_executor_job(self._init_proxmox) + except AuthenticationError as err: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="invalid_auth", + translation_placeholders={"error": repr(err)}, + ) from err + except SSLError as err: + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="ssl_error", + translation_placeholders={"error": repr(err)}, + ) from err + except ConnectTimeout as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="timeout_connect", + translation_placeholders={"error": repr(err)}, + ) from err + except (ResourceException, requests.exceptions.ConnectionError) as err: + raise ConfigEntryError( + translation_domain=DOMAIN, + translation_key="no_nodes_found", + translation_placeholders={"error": repr(err)}, + ) from err + + async def _async_update_data(self) -> dict[str, ProxmoxNodeData]: + """Fetch data from Proxmox VE API.""" + + try: + nodes, vms_containers = await self.hass.async_add_executor_job( + self._fetch_all_nodes + ) + except AuthenticationError as err: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="invalid_auth", + translation_placeholders={"error": repr(err)}, + ) from err + except SSLError as err: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="ssl_error", + translation_placeholders={"error": repr(err)}, + ) from err + except ConnectTimeout as err: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="timeout_connect", + translation_placeholders={"error": repr(err)}, + ) from err + except (ResourceException, requests.exceptions.ConnectionError) as err: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="no_nodes_found", + translation_placeholders={"error": repr(err)}, + ) from err + + data: dict[str, ProxmoxNodeData] = {} + for node, (vms, containers) in zip(nodes, vms_containers, strict=True): + data[node[CONF_NODE]] = ProxmoxNodeData( + node=node, + vms={int(vm["vmid"]): vm for vm in vms}, + containers={ + int(container["vmid"]): container for container in containers + }, + ) + + self._async_add_remove_nodes(data) + return data + + def _init_proxmox(self) -> None: + """Initialize ProxmoxAPI instance.""" + user_id = ( + self.config_entry.data[CONF_USERNAME] + if "@" in self.config_entry.data[CONF_USERNAME] + else f"{self.config_entry.data[CONF_USERNAME]}@{self.config_entry.data[CONF_REALM]}" + ) + + self.proxmox = ProxmoxAPI( + host=self.config_entry.data[CONF_HOST], + port=self.config_entry.data[CONF_PORT], + user=user_id, + password=self.config_entry.data[CONF_PASSWORD], + verify_ssl=self.config_entry.data.get(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL), + ) + self.proxmox.nodes.get() + + def _fetch_all_nodes( + self, + ) -> tuple[ + list[dict[str, Any]], list[tuple[list[dict[str, Any]], list[dict[str, Any]]]] + ]: + """Fetch all nodes, and then proceed to the VMs and containers.""" + nodes = self.proxmox.nodes.get() + vms_containers = [self._get_vms_containers(node) for node in nodes] + return nodes, vms_containers + + def _get_vms_containers( + self, + node: dict[str, Any], + ) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]: + """Get vms and containers for a node.""" + vms = self.proxmox.nodes(node[CONF_NODE]).qemu.get() + containers = self.proxmox.nodes(node[CONF_NODE]).lxc.get() + assert vms is not None and containers is not None + return vms, containers + + def _async_add_remove_nodes(self, data: dict[str, ProxmoxNodeData]) -> None: + """Add new nodes/VMs/containers, track removals.""" + current_nodes = set(data.keys()) + new_nodes = current_nodes - self.known_nodes + if new_nodes: + _LOGGER.debug("New nodes found: %s", new_nodes) + self.known_nodes.update(new_nodes) + + # And yes, track new VM's and containers as well + current_vms = { + (node_name, vmid) + for node_name, node_data in data.items() + for vmid in node_data.vms + } + new_vms = current_vms - self.known_vms + if new_vms: + _LOGGER.debug("New VMs found: %s", new_vms) + self.known_vms.update(new_vms) + + current_containers = { + (node_name, vmid) + for node_name, node_data in data.items() + for vmid in node_data.containers + } + new_containers = current_containers - self.known_containers + if new_containers: + _LOGGER.debug("New containers found: %s", new_containers) + self.known_containers.update(new_containers) diff --git a/homeassistant/components/proxmoxve/entity.py b/homeassistant/components/proxmoxve/entity.py index 5dfd264df2db5c..8129c0f0b5a3ae 100644 --- a/homeassistant/components/proxmoxve/entity.py +++ b/homeassistant/components/proxmoxve/entity.py @@ -1,39 +1,133 @@ """Proxmox parent entity class.""" -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from __future__ import annotations +from typing import Any -class ProxmoxEntity(CoordinatorEntity): - """Represents any entity created for the Proxmox VE platform.""" +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ProxmoxCoordinator, ProxmoxNodeData + + +class ProxmoxCoordinatorEntity(CoordinatorEntity[ProxmoxCoordinator]): + """Base class for Proxmox entities.""" + + _attr_has_entity_name = True + + +class ProxmoxNodeEntity(ProxmoxCoordinatorEntity): + """Represents any entity created for a Proxmox VE node.""" + + def __init__( + self, + coordinator: ProxmoxCoordinator, + node_data: ProxmoxNodeData, + ) -> None: + """Initialize the Proxmox node entity.""" + super().__init__(coordinator) + self._node_data = node_data + self.device_id = node_data.node["id"] + self.device_name = node_data.node["node"] + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, f"{coordinator.config_entry.entry_id}_node_{self.device_id}") + }, + name=node_data.node.get("node", str(self.device_id)), + model="Node", + ) + + @property + def available(self) -> bool: + """Return if the device is available.""" + return super().available and self.device_name in self.coordinator.data + + +class ProxmoxVMEntity(ProxmoxCoordinatorEntity): + """Represents a VM entity.""" def __init__( self, - coordinator: DataUpdateCoordinator, - unique_id: str, - name: str, - icon: str, - host_name: str, - node_name: str, - vm_id: int | None = None, + coordinator: ProxmoxCoordinator, + vm_data: dict[str, Any], + node_data: ProxmoxNodeData, ) -> None: - """Initialize the Proxmox entity.""" + """Initialize the Proxmox VM entity.""" super().__init__(coordinator) + self._vm_data = vm_data + self._node_name = node_data.node["node"] + self.device_id = vm_data["vmid"] + self.device_name = vm_data["name"] - self.coordinator = coordinator - self._attr_unique_id = unique_id - self._attr_name = name - self._host_name = host_name - self._attr_icon = icon - self._available = True - self._node_name = node_name - self._vm_id = vm_id + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, f"{coordinator.config_entry.entry_id}_vm_{self.device_id}") + }, + name=self.device_name, + model="VM", + via_device=( + DOMAIN, + f"{coordinator.config_entry.entry_id}_node_{node_data.node['id']}", + ), + ) - self._state = None + @property + def available(self) -> bool: + """Return if the device is available.""" + return ( + super().available + and self._node_name in self.coordinator.data + and self.device_id in self.coordinator.data[self._node_name].vms + ) + + @property + def vm_data(self) -> dict[str, Any]: + """Return the VM data.""" + return self.coordinator.data[self._node_name].vms[self.device_id] + + +class ProxmoxContainerEntity(ProxmoxCoordinatorEntity): + """Represents a Container entity.""" + + def __init__( + self, + coordinator: ProxmoxCoordinator, + container_data: dict[str, Any], + node_data: ProxmoxNodeData, + ) -> None: + """Initialize the Proxmox Container entity.""" + super().__init__(coordinator) + self._container_data = container_data + self._node_name = node_data.node["node"] + self.device_id = container_data["vmid"] + self.device_name = container_data["name"] + + self._attr_device_info = DeviceInfo( + identifiers={ + ( + DOMAIN, + f"{coordinator.config_entry.entry_id}_container_{self.device_id}", + ) + }, + name=self.device_name, + model="Container", + via_device=( + DOMAIN, + f"{coordinator.config_entry.entry_id}_node_{node_data.node['id']}", + ), + ) @property def available(self) -> bool: - """Return True if entity is available.""" - return self.coordinator.last_update_success and self._available + """Return if the device is available.""" + return ( + super().available + and self._node_name in self.coordinator.data + and self.device_id in self.coordinator.data[self._node_name].containers + ) + + @property + def container_data(self) -> dict[str, Any]: + """Return the Container data.""" + return self.coordinator.data[self._node_name].containers[self.device_id] diff --git a/homeassistant/components/proxmoxve/strings.json b/homeassistant/components/proxmoxve/strings.json index 49d5aed4b2cc0c..b6e63ee802e63b 100644 --- a/homeassistant/components/proxmoxve/strings.json +++ b/homeassistant/components/proxmoxve/strings.json @@ -49,6 +49,30 @@ } } }, + "entity": { + "binary_sensor": { + "status": { + "name": "Status" + } + } + }, + "exceptions": { + "cannot_connect": { + "message": "An error occurred while trying to connect to the Proxmox VE instance: {error}" + }, + "invalid_auth": { + "message": "An error occurred while trying to authenticate: {error}" + }, + "no_nodes_found": { + "message": "No active nodes were found on the Proxmox VE server." + }, + "ssl_error": { + "message": "An SSL error occurred: {error}" + }, + "timeout_connect": { + "message": "A timeout occurred while trying to connect to the Proxmox VE instance: {error}" + } + }, "issues": { "deprecated_yaml_import_issue_connect_timeout": { "description": "Configuring {integration_title} via YAML is deprecated and will be removed in a future release. While importing your configuration, a connection timeout occurred. Please correct your YAML configuration and restart Home Assistant, or remove the {domain} key from your configuration and configure the integration via the UI.", diff --git a/homeassistant/components/remote_calendar/calendar.py b/homeassistant/components/remote_calendar/calendar.py index 86a49e6b0c6fa6..10e1bb44295b91 100644 --- a/homeassistant/components/remote_calendar/calendar.py +++ b/homeassistant/components/remote_calendar/calendar.py @@ -1,9 +1,10 @@ """Calendar platform for a Remote Calendar.""" -from datetime import datetime +from datetime import datetime, timedelta import logging from ical.event import Event +from ical.timeline import Timeline, materialize_timeline from homeassistant.components.calendar import CalendarEntity, CalendarEvent from homeassistant.core import HomeAssistant @@ -20,6 +21,14 @@ # Coordinator is used to centralize the data updates PARALLEL_UPDATES = 0 +# Every coordinator update refresh, we materialize a timeline of upcoming +# events for determining state. This is done in the background to avoid blocking +# the event loop. When a state update happens we can scan for active events on +# the materialized timeline. These parameters control the maximum lookahead +# window and number of events we materialize from the calendar. +MAX_LOOKAHEAD_EVENTS = 20 +MAX_LOOKAHEAD_TIME = timedelta(days=365) + async def async_setup_entry( hass: HomeAssistant, @@ -48,12 +57,18 @@ def __init__( super().__init__(coordinator) self._attr_name = entry.data[CONF_CALENDAR_NAME] self._attr_unique_id = entry.entry_id - self._event: CalendarEvent | None = None + self._timeline: Timeline | None = None @property def event(self) -> CalendarEvent | None: """Return the next upcoming event.""" - return self._event + if self._timeline is None: + return None + now = dt_util.now() + events = self._timeline.active_after(now) + if event := next(events, None): + return _get_calendar_event(event) + return None async def async_get_events( self, hass: HomeAssistant, start_date: datetime, end_date: datetime @@ -79,14 +94,18 @@ async def async_update(self) -> None: """ await super().async_update() - def next_event() -> CalendarEvent | None: + def _get_timeline() -> Timeline | None: + """Return a materialized timeline with upcoming events.""" now = dt_util.now() - events = self.coordinator.data.timeline_tz(now.tzinfo).active_after(now) - if event := next(events, None): - return _get_calendar_event(event) - return None + timeline = self.coordinator.data.timeline_tz(now.tzinfo) + return materialize_timeline( + timeline, + start=now, + stop=now + MAX_LOOKAHEAD_TIME, + max_number_of_events=MAX_LOOKAHEAD_EVENTS, + ) - self._event = await self.hass.async_add_executor_job(next_event) + self._timeline = await self.hass.async_add_executor_job(_get_timeline) def _get_calendar_event(event: Event) -> CalendarEvent: diff --git a/homeassistant/components/satel_integra/alarm_control_panel.py b/homeassistant/components/satel_integra/alarm_control_panel.py index ed72698cb3d41e..549ddcca9a2c3a 100644 --- a/homeassistant/components/satel_integra/alarm_control_panel.py +++ b/homeassistant/components/satel_integra/alarm_control_panel.py @@ -102,11 +102,8 @@ def __init__( @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - state = self._read_alarm_state() - - if state != self._attr_alarm_state: - self._attr_alarm_state = state - self.async_write_ha_state() + self._attr_alarm_state = self._read_alarm_state() + self.async_write_ha_state() def _read_alarm_state(self) -> AlarmControlPanelState | None: """Read current status of the alarm and translate it into HA status.""" diff --git a/homeassistant/components/satel_integra/binary_sensor.py b/homeassistant/components/satel_integra/binary_sensor.py index a16fba0304691d..567fecb132d86f 100644 --- a/homeassistant/components/satel_integra/binary_sensor.py +++ b/homeassistant/components/satel_integra/binary_sensor.py @@ -103,10 +103,8 @@ def __init__( @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - new_state = self._get_state_from_coordinator() - if new_state != self._attr_is_on: - self._attr_is_on = new_state - self.async_write_ha_state() + self._attr_is_on = self._get_state_from_coordinator() + self.async_write_ha_state() def _get_state_from_coordinator(self) -> bool | None: """Method to get binary sensor state from coordinator data.""" diff --git a/homeassistant/components/satel_integra/switch.py b/homeassistant/components/satel_integra/switch.py index 7b321d6eeda2cb..1c53ce7ee9eeae 100644 --- a/homeassistant/components/satel_integra/switch.py +++ b/homeassistant/components/satel_integra/switch.py @@ -74,10 +74,8 @@ def __init__( @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - new_state = self._get_state_from_coordinator() - if new_state != self._attr_is_on: - self._attr_is_on = new_state - self.async_write_ha_state() + self._attr_is_on = self._get_state_from_coordinator() + self.async_write_ha_state() def _get_state_from_coordinator(self) -> bool | None: """Method to get switch state from coordinator data.""" diff --git a/homeassistant/components/scrape/coordinator.py b/homeassistant/components/scrape/coordinator.py index ea3d5054bdb94f..d491e5925e13e9 100644 --- a/homeassistant/components/scrape/coordinator.py +++ b/homeassistant/components/scrape/coordinator.py @@ -16,6 +16,13 @@ _LOGGER = logging.getLogger(__name__) +XML_MIME_TYPES = ( + "application/rss+xml", + "application/xhtml+xml", + "application/xml", + "text/xml", +) + class ScrapeCoordinator(DataUpdateCoordinator[BeautifulSoup]): """Scrape Coordinator.""" @@ -52,6 +59,33 @@ async def _async_update_data(self) -> BeautifulSoup: await self._rest.async_update() if (data := self._rest.data) is None: raise UpdateFailed("REST data is not available") - soup = await self.hass.async_add_executor_job(BeautifulSoup, data, "lxml") + + # Detect if content is XML and use appropriate parser + # Check Content-Type header first (most reliable), then fall back to content detection + parser = "lxml" + headers = self._rest.headers + content_type = headers.get("Content-Type", "") if headers else "" + if content_type.startswith(XML_MIME_TYPES): + parser = "lxml-xml" + elif isinstance(data, str): + data_stripped = data.lstrip() + if data_stripped.startswith("") + if xml_end != -1: + after_xml = data_stripped[xml_end + 2 :].lstrip() + after_xml_lower = after_xml.lower() + is_html = after_xml_lower.startswith((" bool: + """Set up Teltonika from a config entry.""" + host = entry.data[CONF_HOST] + username = entry.data[CONF_USERNAME] + password = entry.data[CONF_PASSWORD] + validate_ssl = entry.data.get(CONF_VERIFY_SSL, False) + session = async_get_clientsession(hass) + + base_url = normalize_url(host) + + client = Teltasync( + base_url=f"{base_url}/api", + username=username, + password=password, + session=session, + verify_ssl=validate_ssl, + ) + + # Create coordinator + coordinator = TeltonikaDataUpdateCoordinator(hass, client, entry, base_url) + + # Fetch initial data and set up device info + await coordinator.async_config_entry_first_refresh() + + assert coordinator.device_info is not None + + # Store runtime data + entry.runtime_data = coordinator + + # Set up platforms + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: TeltonikaConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + await entry.runtime_data.client.close() + + return unload_ok diff --git a/homeassistant/components/teltonika/config_flow.py b/homeassistant/components/teltonika/config_flow.py new file mode 100644 index 00000000000000..3af1d28620c145 --- /dev/null +++ b/homeassistant/components/teltonika/config_flow.py @@ -0,0 +1,231 @@ +"""Config flow for the Teltonika integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from teltasync import Teltasync, TeltonikaAuthenticationError, TeltonikaConnectionError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_VERIFY_SSL +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo + +from .const import DOMAIN +from .util import get_url_variants + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + vol.Optional(CONF_VERIFY_SSL, default=False): bool, + } +) + + +class CannotConnect(HomeAssistantError): + """Error to indicate we cannot connect.""" + + +class InvalidAuth(HomeAssistantError): + """Error to indicate there is invalid auth.""" + + +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: + """Validate the user input allows us to connect. + + Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. + """ + session = async_get_clientsession(hass) + host = data[CONF_HOST] + + last_error: Exception | None = None + + for base_url in get_url_variants(host): + client = Teltasync( + base_url=f"{base_url}/api", + username=data[CONF_USERNAME], + password=data[CONF_PASSWORD], + session=session, + verify_ssl=data.get(CONF_VERIFY_SSL, True), + ) + + try: + device_info = await client.get_device_info() + auth_valid = await client.validate_credentials() + except TeltonikaConnectionError as err: + _LOGGER.debug( + "Failed to connect to Teltonika device at %s: %s", base_url, err + ) + last_error = err + continue + except TeltonikaAuthenticationError as err: + _LOGGER.error("Authentication failed: %s", err) + raise InvalidAuth from err + finally: + await client.close() + + if not auth_valid: + raise InvalidAuth + + return { + "title": device_info.device_name, + "device_id": device_info.device_identifier, + "host": base_url, + } + + _LOGGER.error("Cannot connect to device after trying all schemas") + raise CannotConnect from last_error + + +class TeltonikaConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Teltonika.""" + + VERSION = 1 + MINOR_VERSION = 1 + _discovered_host: str | None = None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + info = await validate_input(self.hass, user_input) + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidAuth: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + # Set unique ID to prevent duplicates + await self.async_set_unique_id(info["device_id"]) + self._abort_if_unique_id_configured() + + data_to_store = dict(user_input) + if "host" in info: + data_to_store[CONF_HOST] = info["host"] + + return self.async_create_entry( + title=info["title"], + data=data_to_store, + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) + + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle DHCP discovery.""" + host = discovery_info.ip + + # Store discovered host for later use + self._discovered_host = host + + # Try to get device info without authentication to get device identifier and name + session = async_get_clientsession(self.hass) + + for base_url in get_url_variants(host): + client = Teltasync( + base_url=f"{base_url}/api", + username="", # No credentials yet + password="", + session=session, + verify_ssl=False, # Teltonika devices use self-signed certs by default + ) + + try: + # Get device info from unauthorized endpoint + device_info = await client.get_device_info() + device_name = device_info.device_name + device_id = device_info.device_identifier + break + except TeltonikaConnectionError: + # Connection failed, try next URL variant + continue + finally: + await client.close() + else: + # No URL variant worked, device not reachable, don't autodiscover + return self.async_abort(reason="cannot_connect") + + # Set unique ID and check for existing conf + await self.async_set_unique_id(device_id) + self._abort_if_unique_id_configured(updates={CONF_HOST: host}) + + # Store discovery info for the user step + self.context["title_placeholders"] = { + "name": device_name, + "host": host, + } + + # Proceed to confirmation step to get credentials + return await self.async_step_dhcp_confirm() + + async def async_step_dhcp_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm DHCP discovery and get credentials.""" + errors: dict[str, str] = {} + + if user_input is not None: + # Get the host from the discovery + host = getattr(self, "_discovered_host", "") + + try: + # Validate credentials with discovered host + data = { + CONF_HOST: host, + CONF_USERNAME: user_input[CONF_USERNAME], + CONF_PASSWORD: user_input[CONF_PASSWORD], + CONF_VERIFY_SSL: False, + } + info = await validate_input(self.hass, data) + + # Update unique ID to device identifier if we didn't get it during discovery + await self.async_set_unique_id( + info["device_id"], raise_on_progress=False + ) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=info["title"], + data={ + CONF_HOST: info["host"], + CONF_USERNAME: user_input[CONF_USERNAME], + CONF_PASSWORD: user_input[CONF_PASSWORD], + CONF_VERIFY_SSL: False, + }, + ) + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidAuth: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception during DHCP confirm") + errors["base"] = "unknown" + + return self.async_show_form( + step_id="dhcp_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } + ), + errors=errors, + description_placeholders=self.context.get("title_placeholders", {}), + ) diff --git a/homeassistant/components/teltonika/const.py b/homeassistant/components/teltonika/const.py new file mode 100644 index 00000000000000..5a1f0f66211c07 --- /dev/null +++ b/homeassistant/components/teltonika/const.py @@ -0,0 +1,3 @@ +"""Constants for the Teltonika integration.""" + +DOMAIN = "teltonika" diff --git a/homeassistant/components/teltonika/coordinator.py b/homeassistant/components/teltonika/coordinator.py new file mode 100644 index 00000000000000..0604ca4cd542d2 --- /dev/null +++ b/homeassistant/components/teltonika/coordinator.py @@ -0,0 +1,98 @@ +"""DataUpdateCoordinator for Teltonika.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import TYPE_CHECKING, Any + +from aiohttp import ClientResponseError, ContentTypeError +from teltasync import Teltasync, TeltonikaAuthenticationError, TeltonikaConnectionError +from teltasync.modems import Modems + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +if TYPE_CHECKING: + from . import TeltonikaConfigEntry + +_LOGGER = logging.getLogger(__name__) + +SCAN_INTERVAL = timedelta(seconds=30) + + +class TeltonikaDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Class to manage fetching Teltonika data.""" + + device_info: DeviceInfo + + def __init__( + self, + hass: HomeAssistant, + client: Teltasync, + config_entry: TeltonikaConfigEntry, + base_url: str, + ) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + _LOGGER, + name="Teltonika", + update_interval=SCAN_INTERVAL, + config_entry=config_entry, + ) + self.client = client + self.base_url = base_url + + async def _async_setup(self) -> None: + """Set up the coordinator - authenticate and fetch device info.""" + try: + await self.client.get_device_info() + system_info_response = await self.client.get_system_info() + except TeltonikaAuthenticationError as err: + raise ConfigEntryError(f"Authentication failed: {err}") from err + except (ClientResponseError, ContentTypeError) as err: + if isinstance(err, ClientResponseError) and err.status in (401, 403): + raise ConfigEntryError(f"Authentication failed: {err}") from err + if isinstance(err, ContentTypeError) and err.status == 403: + raise ConfigEntryError(f"Authentication failed: {err}") from err + raise ConfigEntryNotReady(f"Failed to connect to device: {err}") from err + except TeltonikaConnectionError as err: + raise ConfigEntryNotReady(f"Failed to connect to device: {err}") from err + + # Store device info for use by entities + self.device_info = DeviceInfo( + identifiers={(DOMAIN, system_info_response.mnf_info.serial)}, + name=system_info_response.static.device_name, + manufacturer="Teltonika", + model=system_info_response.static.model, + sw_version=system_info_response.static.fw_version, + serial_number=system_info_response.mnf_info.serial, + configuration_url=self.base_url, + ) + + async def _async_update_data(self) -> dict[str, Any]: + """Fetch data from Teltonika device.""" + modems = Modems(self.client.auth) + try: + # Get modems data using the teltasync library + modems_response = await modems.get_status() + except TeltonikaConnectionError as err: + raise UpdateFailed(f"Error communicating with device: {err}") from err + + # Return only modems which are online + modem_data: dict[str, Any] = {} + if modems_response.data: + modem_data.update( + { + modem.id: modem + for modem in modems_response.data + if Modems.is_online(modem) + } + ) + + return modem_data diff --git a/homeassistant/components/teltonika/manifest.json b/homeassistant/components/teltonika/manifest.json new file mode 100644 index 00000000000000..3be87d345d1dfb --- /dev/null +++ b/homeassistant/components/teltonika/manifest.json @@ -0,0 +1,19 @@ +{ + "domain": "teltonika", + "name": "Teltonika", + "codeowners": ["@karlbeecken"], + "config_flow": true, + "dhcp": [ + { + "macaddress": "209727*" + }, + { + "macaddress": "001E42*" + } + ], + "documentation": "https://www.home-assistant.io/integrations/teltonika", + "integration_type": "device", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["teltasync==0.1.3"] +} diff --git a/homeassistant/components/teltonika/quality_scale.yaml b/homeassistant/components/teltonika/quality_scale.yaml new file mode 100644 index 00000000000000..329aa7f7b78676 --- /dev/null +++ b/homeassistant/components/teltonika/quality_scale.yaml @@ -0,0 +1,68 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: No custom actions registered. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: No custom actions registered. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: No custom events registered. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: No custom actions registered. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: done + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: done + diagnostics: todo + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/teltonika/sensor.py b/homeassistant/components/teltonika/sensor.py new file mode 100644 index 00000000000000..623d73c987b7ed --- /dev/null +++ b/homeassistant/components/teltonika/sensor.py @@ -0,0 +1,187 @@ +"""Teltonika sensor platform.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging + +from teltasync.modems import ModemStatus + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + SIGNAL_STRENGTH_DECIBELS, + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + UnitOfTemperature, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import TeltonikaConfigEntry, TeltonikaDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class TeltonikaSensorEntityDescription(SensorEntityDescription): + """Describes Teltonika sensor entity.""" + + value_fn: Callable[[ModemStatus], StateType] + + +SENSOR_DESCRIPTIONS: tuple[TeltonikaSensorEntityDescription, ...] = ( + TeltonikaSensorEntityDescription( + key="rssi", + translation_key="rssi", + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + suggested_display_precision=0, + value_fn=lambda modem: modem.rssi, + ), + TeltonikaSensorEntityDescription( + key="rsrp", + translation_key="rsrp", + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + suggested_display_precision=0, + value_fn=lambda modem: modem.rsrp, + ), + TeltonikaSensorEntityDescription( + key="rsrq", + translation_key="rsrq", + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, + suggested_display_precision=0, + value_fn=lambda modem: modem.rsrq, + ), + TeltonikaSensorEntityDescription( + key="sinr", + translation_key="sinr", + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, + suggested_display_precision=0, + value_fn=lambda modem: modem.sinr, + ), + TeltonikaSensorEntityDescription( + key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=0, + value_fn=lambda modem: modem.temperature, + ), + TeltonikaSensorEntityDescription( + key="operator", + translation_key="operator", + value_fn=lambda modem: modem.operator, + ), + TeltonikaSensorEntityDescription( + key="connection_type", + translation_key="connection_type", + value_fn=lambda modem: modem.conntype, + ), + TeltonikaSensorEntityDescription( + key="band", + translation_key="band", + value_fn=lambda modem: modem.band, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeltonikaConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Teltonika sensor platform.""" + coordinator = entry.runtime_data + + # Track known modems to detect new ones + known_modems: set[str] = set() + + @callback + def _async_add_new_modems() -> None: + """Add sensors for newly discovered modems.""" + current_modems = set(coordinator.data.keys()) + new_modems = current_modems - known_modems + + if new_modems: + entities = [ + TeltonikaSensorEntity( + coordinator, + coordinator.device_info, + description, + modem_id, + coordinator.data[modem_id], + ) + for modem_id in new_modems + for description in SENSOR_DESCRIPTIONS + ] + async_add_entities(entities) + known_modems.update(new_modems) + + # Add sensors for initial modems + _async_add_new_modems() + + # Listen for new modems + entry.async_on_unload(coordinator.async_add_listener(_async_add_new_modems)) + + +class TeltonikaSensorEntity( + CoordinatorEntity[TeltonikaDataUpdateCoordinator], SensorEntity +): + """Teltonika sensor entity.""" + + _attr_has_entity_name = True + entity_description: TeltonikaSensorEntityDescription + + def __init__( + self, + coordinator: TeltonikaDataUpdateCoordinator, + device_info: DeviceInfo, + description: TeltonikaSensorEntityDescription, + modem_id: str, + modem: ModemStatus, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._modem_id = modem_id + self._attr_device_info = device_info + + # Create unique ID using entry unique identifier, modem ID, and sensor type + assert coordinator.config_entry is not None + entry_unique_id = ( + coordinator.config_entry.unique_id or coordinator.config_entry.entry_id + ) + self._attr_unique_id = f"{entry_unique_id}_{modem_id}_{description.key}" + + # Use translation key for proper naming + modem_name = modem.name or f"Modem {modem_id}" + self._modem_name = modem_name + self._attr_translation_key = description.translation_key + self._attr_translation_placeholders = {"modem_name": modem_name} + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self._modem_id in self.coordinator.data + + @property + def native_value(self) -> StateType: + """Handle updated data from the coordinator.""" + return self.entity_description.value_fn(self.coordinator.data[self._modem_id]) diff --git a/homeassistant/components/teltonika/strings.json b/homeassistant/components/teltonika/strings.json new file mode 100644 index 00000000000000..954f648f2ddab4 --- /dev/null +++ b/homeassistant/components/teltonika/strings.json @@ -0,0 +1,69 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "wrong_account": "The device does not match the existing configuration." + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "step": { + "dhcp_confirm": { + "data": { + "password": "[%key:common::config_flow::data::password%]", + "username": "[%key:common::config_flow::data::username%]" + }, + "data_description": { + "password": "The password to authenticate with the device.", + "username": "The username to authenticate with the device." + }, + "description": "A Teltonika device ({name}) was discovered at {host}. Enter the credentials to add it to Home Assistant.", + "title": "Discovered Teltonika device" + }, + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]", + "username": "[%key:common::config_flow::data::username%]", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of your Teltonika device.", + "password": "[%key:component::teltonika::config::step::dhcp_confirm::data_description::password%]", + "username": "[%key:component::teltonika::config::step::dhcp_confirm::data_description::username%]", + "verify_ssl": "Whether to validate the SSL certificate when using HTTPS. Most Teltonika devices use self-signed certificates, so you will need to disable this option unless you have installed a valid certificate on your device." + }, + "description": "Enter the connection details for your Teltonika device.", + "title": "Set up Teltonika device" + } + } + }, + "entity": { + "sensor": { + "band": { + "name": "{modem_name} Band" + }, + "connection_type": { + "name": "{modem_name} Connection type" + }, + "operator": { + "name": "{modem_name} Operator" + }, + "rsrp": { + "name": "{modem_name} RSRP" + }, + "rsrq": { + "name": "{modem_name} RSRQ" + }, + "rssi": { + "name": "{modem_name} RSSI" + }, + "sinr": { + "name": "{modem_name} SINR" + } + } + } +} diff --git a/homeassistant/components/teltonika/util.py b/homeassistant/components/teltonika/util.py new file mode 100644 index 00000000000000..54cc0c4fedf1f3 --- /dev/null +++ b/homeassistant/components/teltonika/util.py @@ -0,0 +1,39 @@ +"""Utility helpers for the Teltonika integration.""" + +from __future__ import annotations + +from yarl import URL + + +def normalize_url(host: str) -> str: + """Normalize host input to a base URL without path. + + Returns just the scheme://host part, without /api. + Ensures the URL has a scheme (defaults to HTTPS). + """ + host_input = host.strip().rstrip("/") + + # Parse or construct URL + if host_input.startswith(("http://", "https://")): + url = URL(host_input) + else: + # handle as scheme-relative URL and add HTTPS scheme by default + url = URL(f"//{host_input}").with_scheme("https") + + # Return base URL without path, only including scheme, host and port + return str(url.origin()) + + +def get_url_variants(host: str) -> list[str]: + """Get URL variants to try during setup (HTTPS first, then HTTP fallback).""" + normalized = normalize_url(host) + url = URL(normalized) + + # If user specified a scheme, only try that + if host.strip().startswith(("http://", "https://")): + return [normalized] + + # Otherwise try HTTPS first, then HTTP + https_url = str(url.with_scheme("https")) + http_url = str(url.with_scheme("http")) + return [https_url, http_url] diff --git a/homeassistant/components/thread/dataset_store.py b/homeassistant/components/thread/dataset_store.py index e64a0a4afe7f1f..78f8b736b7f971 100644 --- a/homeassistant/components/thread/dataset_store.py +++ b/homeassistant/components/thread/dataset_store.py @@ -6,6 +6,7 @@ import dataclasses from datetime import datetime import logging +from pprint import pformat from typing import Any, cast from propcache.api import cached_property @@ -14,6 +15,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.redact import REDACTED from homeassistant.helpers.singleton import singleton from homeassistant.helpers.storage import Store from homeassistant.util import dt as dt_util, ulid as ulid_util @@ -30,6 +32,24 @@ _LOGGER = logging.getLogger(__name__) +def _format_dataset( + dataset: dict[MeshcopTLVType | int, tlv_parser.MeshcopTLVItem], +) -> dict[str, str]: + """Format a parsed Thread dataset for logging. + + Returns a human-readable dict with enum field names as keys, redacting + NETWORKKEY and PSKC to avoid logging sensitive network credentials. + """ + result = {} + for key, value in dataset.items(): + name = key.name if isinstance(key, MeshcopTLVType) else str(key) + if key in (MeshcopTLVType.NETWORKKEY, MeshcopTLVType.PSKC): + result[name] = REDACTED + else: + result[name] = str(value) + return result + + class DatasetPreferredError(HomeAssistantError): """Raised when attempting to delete the preferred dataset.""" @@ -116,7 +136,8 @@ async def _async_migrate_func( or MeshcopTLVType.ACTIVETIMESTAMP not in entry.dataset ): _LOGGER.warning( - "Dropped invalid Thread dataset '%s'", entry.tlv + "Dropped invalid Thread dataset:\n%s", + pformat(_format_dataset(entry.dataset)), ) if entry.id == preferred_dataset: preferred_dataset = None @@ -125,12 +146,14 @@ async def _async_migrate_func( if entry.extended_pan_id in datasets: if datasets[entry.extended_pan_id].id == preferred_dataset: _LOGGER.warning( - ( - "Dropped duplicated Thread dataset '%s' " - "(duplicate of preferred dataset '%s')" + "Dropped duplicated Thread dataset" + " (duplicate of preferred dataset):\n%s\nkept:\n%s", + pformat(_format_dataset(entry.dataset)), + pformat( + _format_dataset( + datasets[entry.extended_pan_id].dataset + ) ), - entry.tlv, - datasets[entry.extended_pan_id].tlv, ) continue new_timestamp = cast( @@ -148,21 +171,21 @@ async def _async_migrate_func( new_timestamp.ticks, ): _LOGGER.warning( - ( - "Dropped duplicated Thread dataset '%s' " - "(duplicate of '%s')" + "Dropped duplicated Thread dataset:\n%s\nkept:\n%s", + pformat(_format_dataset(entry.dataset)), + pformat( + _format_dataset( + datasets[entry.extended_pan_id].dataset + ) ), - entry.tlv, - datasets[entry.extended_pan_id].tlv, ) continue _LOGGER.warning( - ( - "Dropped duplicated Thread dataset '%s' " - "(duplicate of '%s')" + "Dropped duplicated Thread dataset:\n%s\nkept:\n%s", + pformat( + _format_dataset(datasets[entry.extended_pan_id].dataset) ), - datasets[entry.extended_pan_id].tlv, - entry.tlv, + pformat(_format_dataset(entry.dataset)), ) datasets[entry.extended_pan_id] = entry data = { @@ -261,22 +284,19 @@ def async_add( new_timestamp.ticks, ): _LOGGER.warning( - ( - "Got dataset with same extended PAN ID and same or older active" - " timestamp, old dataset: '%s', new dataset: '%s'" - ), - entry.tlv, - tlv, + "Got dataset with same extended PAN ID and same or older" + " active timestamp\nold:\n%s\nnew:\n%s", + pformat(_format_dataset(entry.dataset)), + pformat(_format_dataset(dataset)), ) return - _LOGGER.debug( - ( - "Updating dataset with same extended PAN ID and newer active " - "timestamp, old dataset: '%s', new dataset: '%s'" - ), - entry.tlv, - tlv, - ) + if _LOGGER.isEnabledFor(logging.DEBUG): + _LOGGER.debug( + "Updating dataset with same extended PAN ID and newer" + " active timestamp\nold:\n%s\nnew:\n%s", + pformat(_format_dataset(entry.dataset)), + pformat(_format_dataset(dataset)), + ) self.datasets[entry.id] = dataclasses.replace( self.datasets[entry.id], tlv=tlv ) diff --git a/homeassistant/components/traccar/strings.json b/homeassistant/components/traccar/strings.json index 7bf76eff33a61c..35c2d583c2fe02 100644 --- a/homeassistant/components/traccar/strings.json +++ b/homeassistant/components/traccar/strings.json @@ -2,6 +2,7 @@ "config": { "abort": { "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", + "reconfigure_successful": "**Reconfiguration was successful**\n\nGo to webhooks in the Traccar Client and update the webhook with the following URL: `{webhook_url}`\n\nSee [the documentation]({docs_url}) for further details.", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, @@ -9,6 +10,10 @@ "default": "To send events to Home Assistant, you will need to set up the webhook feature in Traccar Client.\n\nUse the following URL: `{webhook_url}`\n\nSee [the documentation]({docs_url}) for further details." }, "step": { + "reconfigure": { + "description": "Are you sure you want to reconfigure the Traccar Client?", + "title": "Reconfigure Traccar Client" + }, "user": { "description": "Are you sure you want to set up Traccar Client?", "title": "Set up Traccar Client" diff --git a/homeassistant/components/transmission/__init__.py b/homeassistant/components/transmission/__init__.py index d5a566879a66e9..56d6a2d5d67a02 100644 --- a/homeassistant/components/transmission/__init__.py +++ b/homeassistant/components/transmission/__init__.py @@ -36,7 +36,6 @@ from .const import DEFAULT_PATH, DEFAULT_SSL, DOMAIN from .coordinator import TransmissionConfigEntry, TransmissionDataUpdateCoordinator -from .errors import AuthenticationError, CannotConnect, UnknownError from .services import async_setup_services _LOGGER = logging.getLogger(__name__) @@ -93,10 +92,10 @@ def update_unique_id( try: api = await get_api(hass, dict(config_entry.data)) - except CannotConnect as error: - raise ConfigEntryNotReady from error - except (AuthenticationError, UnknownError) as error: - raise ConfigEntryAuthFailed from error + except TransmissionAuthError as err: + raise ConfigEntryAuthFailed from err + except (TransmissionConnectError, TransmissionError) as err: + raise ConfigEntryNotReady from err protocol: Final = "https" if config_entry.data[CONF_SSL] else "http" device_registry = dr.async_get(hass) @@ -171,26 +170,17 @@ async def get_api( username = entry.get(CONF_USERNAME) password = entry.get(CONF_PASSWORD) - try: - api = await hass.async_add_executor_job( - partial( - transmission_rpc.Client, - username=username, - password=password, - protocol=protocol, - host=host, - port=port, - path=path, - ) + api = await hass.async_add_executor_job( + partial( + transmission_rpc.Client, + username=username, + password=password, + protocol=protocol, + host=host, + port=port, + path=path, ) - except TransmissionAuthError as error: - _LOGGER.error("Credentials for Transmission client are not valid") - raise AuthenticationError from error - except TransmissionConnectError as error: - _LOGGER.error("Connecting to the Transmission client %s failed", host) - raise CannotConnect from error - except TransmissionError as error: - _LOGGER.error(error) - raise UnknownError from error + ) + _LOGGER.debug("Successfully connected to %s", host) return api diff --git a/homeassistant/components/transmission/config_flow.py b/homeassistant/components/transmission/config_flow.py index 467a2ce55487b4..9294319aeb8806 100644 --- a/homeassistant/components/transmission/config_flow.py +++ b/homeassistant/components/transmission/config_flow.py @@ -5,6 +5,11 @@ from collections.abc import Mapping from typing import Any +from transmission_rpc.error import ( + TransmissionAuthError, + TransmissionConnectError, + TransmissionError, +) import voluptuous as vol from homeassistant.config_entries import ( @@ -37,7 +42,6 @@ DOMAIN, SUPPORTED_ORDER_MODES, ) -from .errors import AuthenticationError, CannotConnect, UnknownError DATA_SCHEMA = vol.Schema( { @@ -78,10 +82,10 @@ async def async_step_user( try: await get_api(self.hass, user_input) - except AuthenticationError: + except TransmissionAuthError: errors[CONF_USERNAME] = "invalid_auth" errors[CONF_PASSWORD] = "invalid_auth" - except CannotConnect, UnknownError: + except TransmissionConnectError, TransmissionError: errors["base"] = "cannot_connect" if not errors: @@ -113,9 +117,9 @@ async def async_step_reauth_confirm( try: await get_api(self.hass, user_input) - except AuthenticationError: + except TransmissionAuthError: errors[CONF_PASSWORD] = "invalid_auth" - except CannotConnect, UnknownError: + except TransmissionConnectError, TransmissionError: errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort(reauth_entry, data=user_input) diff --git a/homeassistant/components/transmission/errors.py b/homeassistant/components/transmission/errors.py deleted file mode 100644 index 68d442c3a74480..00000000000000 --- a/homeassistant/components/transmission/errors.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Errors for the Transmission component.""" - -from homeassistant.exceptions import HomeAssistantError - - -class AuthenticationError(HomeAssistantError): - """Wrong Username or Password.""" - - -class CannotConnect(HomeAssistantError): - """Unable to connect to client.""" - - -class UnknownError(HomeAssistantError): - """Unknown Error.""" diff --git a/homeassistant/components/twilio/strings.json b/homeassistant/components/twilio/strings.json index 00fc168fc055ba..f7a031b9d9ce42 100644 --- a/homeassistant/components/twilio/strings.json +++ b/homeassistant/components/twilio/strings.json @@ -2,6 +2,7 @@ "config": { "abort": { "cloud_not_connected": "[%key:common::config_flow::abort::cloud_not_connected%]", + "reconfigure_successful": "**Reconfiguration was successful**\n\nGo to [webhooks in Twilio]({twilio_url}) and update the webhook with the following settings:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/x-www-form-urlencoded\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data.", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, @@ -9,6 +10,10 @@ "default": "To send events to Home Assistant, you will need to set up a [webhook with Twilio]({twilio_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/x-www-form-urlencoded\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data." }, "step": { + "reconfigure": { + "description": "Do you want to start reconfiguration?", + "title": "Reconfigure Twilio webhook" + }, "user": { "description": "[%key:common::config_flow::description::confirm_setup%]", "title": "Set up the Twilio webhook" diff --git a/homeassistant/components/vacuum/__init__.py b/homeassistant/components/vacuum/__init__.py index 2e68cf3938cb23..288f40727d042b 100644 --- a/homeassistant/components/vacuum/__init__.py +++ b/homeassistant/components/vacuum/__init__.py @@ -3,6 +3,8 @@ from __future__ import annotations import asyncio +from collections.abc import Mapping +from dataclasses import dataclass from datetime import timedelta from functools import partial import logging @@ -21,7 +23,7 @@ STATE_ON, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import EntityPlatform @@ -31,6 +33,7 @@ from homeassistant.loader import bind_hass from .const import DATA_COMPONENT, DOMAIN, VacuumActivity, VacuumEntityFeature +from .websocket import async_register_websocket_handlers _LOGGER = logging.getLogger(__name__) @@ -47,6 +50,7 @@ ATTR_STATUS = "status" SERVICE_CLEAN_SPOT = "clean_spot" +SERVICE_CLEAN_AREA = "clean_area" SERVICE_LOCATE = "locate" SERVICE_RETURN_TO_BASE = "return_to_base" SERVICE_SEND_COMMAND = "send_command" @@ -58,6 +62,8 @@ DEFAULT_NAME = "Vacuum cleaner robot" +ISSUE_SEGMENTS_CHANGED = "segments_changed" + _BATTERY_DEPRECATION_IGNORED_PLATFORMS = ("template",) @@ -78,6 +84,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) + async_register_websocket_handlers(hass) + component.async_register_entity_service( SERVICE_START, None, @@ -102,6 +110,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: "async_clean_spot", [VacuumEntityFeature.CLEAN_SPOT], ) + component.async_register_entity_service( + SERVICE_CLEAN_AREA, + { + vol.Required("cleaning_area_id"): vol.All(cv.ensure_list, [str]), + }, + "async_internal_clean_area", + [VacuumEntityFeature.CLEAN_AREA], + ) component.async_register_entity_service( SERVICE_LOCATE, None, @@ -368,6 +384,112 @@ async def async_clean_spot(self, **kwargs: Any) -> None: """ await self.hass.async_add_executor_job(partial(self.clean_spot, **kwargs)) + async def async_get_segments(self) -> list[Segment]: + """Get the segments that can be cleaned. + + Returns a list of segments containing their ids and names. + """ + raise NotImplementedError + + @final + @property + def last_seen_segments(self) -> list[Segment] | None: + """Return segments as seen by the user, when last mapping the areas. + + Returns None if no mapping has been saved yet. + This can be used by integrations to detect changes in segments reported + by the vacuum and create a repair issue. + """ + if self.registry_entry is None: + raise RuntimeError( + "Cannot access last_seen_segments, registry entry is not set for" + f" {self.entity_id}" + ) + + options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {}) + last_seen_segments = options.get("last_seen_segments") + + if last_seen_segments is None: + return None + + return [Segment(**segment) for segment in last_seen_segments] + + @final + async def async_internal_clean_area( + self, cleaning_area_id: list[str], **kwargs: Any + ) -> None: + """Perform an area clean. + + Calls async_clean_segments. + """ + if self.registry_entry is None: + raise RuntimeError( + "Cannot perform area clean, registry entry is not set for" + f" {self.entity_id}" + ) + + options: Mapping[str, Any] = self.registry_entry.options.get(DOMAIN, {}) + area_mapping: dict[str, list[str]] = options.get("area_mapping", {}) + + # We use a dict to preserve the order of segments. + segment_ids: dict[str, None] = {} + for area_id in cleaning_area_id: + for segment_id in area_mapping.get(area_id, []): + segment_ids[segment_id] = None + + if not segment_ids: + _LOGGER.debug( + "No segments found for cleaning_area_id %s on vacuum %s", + cleaning_area_id, + self.entity_id, + ) + return + + await self.async_clean_segments(list(segment_ids), **kwargs) + + def clean_segments(self, segment_ids: list[str], **kwargs: Any) -> None: + """Perform an area clean.""" + raise NotImplementedError + + async def async_clean_segments(self, segment_ids: list[str], **kwargs: Any) -> None: + """Perform an area clean.""" + await self.hass.async_add_executor_job( + partial(self.clean_segments, segment_ids, **kwargs) + ) + + @callback + def async_create_segments_issue(self) -> None: + """Create a repair issue when vacuum segments have changed. + + Integrations should call this method when the vacuum reports + different segments than what was previously mapped to areas. + + The issue is not fixable via the standard repair flow. The frontend + will handle the fix by showing the segment mapping dialog. + """ + if self.registry_entry is None: + raise RuntimeError( + "Cannot create segments issue, registry entry is not set for" + f" {self.entity_id}" + ) + + issue_id = f"{ISSUE_SEGMENTS_CHANGED}_{self.registry_entry.id}" + ir.async_create_issue( + self.hass, + DOMAIN, + issue_id, + data={ + "entry_id": self.registry_entry.id, + "entity_id": self.entity_id, + }, + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key=ISSUE_SEGMENTS_CHANGED, + translation_placeholders={ + "entity_id": self.entity_id, + }, + ) + def locate(self, **kwargs: Any) -> None: """Locate the vacuum cleaner.""" raise NotImplementedError @@ -436,3 +558,12 @@ async def async_pause(self) -> None: This method must be run in the event loop. """ await self.hass.async_add_executor_job(self.pause) + + +@dataclass(slots=True) +class Segment: + """Represents a cleanable segment reported by a vacuum.""" + + id: str + name: str + group: str | None = None diff --git a/homeassistant/components/vacuum/const.py b/homeassistant/components/vacuum/const.py index a6e8703a1b0752..919eb1df5660ba 100644 --- a/homeassistant/components/vacuum/const.py +++ b/homeassistant/components/vacuum/const.py @@ -44,3 +44,4 @@ class VacuumEntityFeature(IntFlag): MAP = 2048 STATE = 4096 # Must be set by vacuum platforms derived from StateVacuumEntity START = 8192 + CLEAN_AREA = 16384 diff --git a/homeassistant/components/vacuum/icons.json b/homeassistant/components/vacuum/icons.json index 7cc83f647dd011..dabca1057ac244 100644 --- a/homeassistant/components/vacuum/icons.json +++ b/homeassistant/components/vacuum/icons.json @@ -22,6 +22,9 @@ } }, "services": { + "clean_area": { + "service": "mdi:target-variant" + }, "clean_spot": { "service": "mdi:target-variant" }, diff --git a/homeassistant/components/vacuum/services.yaml b/homeassistant/components/vacuum/services.yaml index 25f3822bd35549..2f14a5bd3c6c25 100644 --- a/homeassistant/components/vacuum/services.yaml +++ b/homeassistant/components/vacuum/services.yaml @@ -69,6 +69,19 @@ clean_spot: entity: domain: vacuum +clean_area: + target: + entity: + domain: vacuum + supported_features: + - vacuum.VacuumEntityFeature.CLEAN_AREA + fields: + cleaning_area_id: + required: true + selector: + area: + multiple: true + send_command: target: entity: diff --git a/homeassistant/components/vacuum/strings.json b/homeassistant/components/vacuum/strings.json index 8e980aedb54dba..1695e1f2a4ca6b 100644 --- a/homeassistant/components/vacuum/strings.json +++ b/homeassistant/components/vacuum/strings.json @@ -89,6 +89,12 @@ } } }, + "issues": { + "segments_changed": { + "description": "", + "title": "Vacuum segments have changed for {entity_id}" + } + }, "selector": { "condition_behavior": { "options": { @@ -105,12 +111,22 @@ } }, "services": { + "clean_area": { + "description": "Tells a vacuum cleaner to clean an area.", + "fields": { + "cleaning_area_id": { + "description": "Areas to clean.", + "name": "Areas" + } + }, + "name": "Clean area" + }, "clean_spot": { - "description": "Tells the vacuum cleaner to do a spot clean-up.", + "description": "Tells a vacuum cleaner to do a spot clean-up.", "name": "Clean spot" }, "locate": { - "description": "Locates the vacuum cleaner robot.", + "description": "Locates a vacuum cleaner robot.", "name": "Locate" }, "pause": { @@ -118,11 +134,11 @@ "name": "[%key:common::action::pause%]" }, "return_to_base": { - "description": "Tells the vacuum cleaner to return to its dock.", + "description": "Tells a vacuum cleaner to return to its dock.", "name": "Return to dock" }, "send_command": { - "description": "Sends a command to the vacuum cleaner.", + "description": "Sends a command to a vacuum cleaner.", "fields": { "command": { "description": "Command to execute. The commands are integration-specific.", @@ -136,7 +152,7 @@ "name": "Send command" }, "set_fan_speed": { - "description": "Sets the fan speed of the vacuum cleaner.", + "description": "Sets the fan speed of a vacuum cleaner.", "fields": { "fan_speed": { "description": "Fan speed. The value depends on the integration. Some integrations have speed steps, like 'medium'. Some use a percentage, between 0 and 100.", diff --git a/homeassistant/components/vacuum/websocket.py b/homeassistant/components/vacuum/websocket.py new file mode 100644 index 00000000000000..7be4187bc13a49 --- /dev/null +++ b/homeassistant/components/vacuum/websocket.py @@ -0,0 +1,51 @@ +"""Websocket commands for the Vacuum integration.""" + +from __future__ import annotations + +from typing import Any + +import voluptuous as vol + +from homeassistant.components import websocket_api +from homeassistant.components.websocket_api import ERR_NOT_FOUND, ERR_NOT_SUPPORTED +from homeassistant.core import HomeAssistant, callback +import homeassistant.helpers.config_validation as cv + +from .const import DATA_COMPONENT, VacuumEntityFeature + + +@callback +def async_register_websocket_handlers(hass: HomeAssistant) -> None: + """Register websocket commands.""" + websocket_api.async_register_command(hass, handle_get_segments) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "vacuum/get_segments", + vol.Required("entity_id"): cv.strict_entity_id, + } +) +@websocket_api.async_response +async def handle_get_segments( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get segments for a vacuum.""" + entity_id = msg["entity_id"] + entity = hass.data[DATA_COMPONENT].get_entity(entity_id) + if entity is None: + connection.send_error(msg["id"], ERR_NOT_FOUND, f"Entity {entity_id} not found") + return + + if VacuumEntityFeature.CLEAN_AREA not in entity.supported_features: + connection.send_error( + msg["id"], ERR_NOT_SUPPORTED, f"Entity {entity_id} not supported" + ) + return + + segments = await entity.async_get_segments() + + connection.send_result(msg["id"], {"segments": segments}) diff --git a/homeassistant/components/watts/config_flow.py b/homeassistant/components/watts/config_flow.py index 620d376cfec41a..aa79f24857e080 100644 --- a/homeassistant/components/watts/config_flow.py +++ b/homeassistant/components/watts/config_flow.py @@ -6,7 +6,11 @@ from visionpluspython.auth import WattsVisionAuth -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + ConfigFlowResult, +) from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, OAUTH2_SCOPES @@ -52,6 +56,18 @@ async def async_step_reauth_confirm( } ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + return await self.async_step_pick_implementation( + user_input={ + "implementation": self._get_reconfigure_entry().data[ + "auth_implementation" + ] + } + ) + async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the OAuth2 flow.""" @@ -64,13 +80,21 @@ async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResu await self.async_set_unique_id(user_id) if self.source == SOURCE_REAUTH: - self._abort_if_unique_id_mismatch(reason="reauth_account_mismatch") + self._abort_if_unique_id_mismatch(reason="account_mismatch") return self.async_update_reload_and_abort( self._get_reauth_entry(), data=data, ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="account_mismatch") + + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data=data, + ) + self._abort_if_unique_id_configured() return self.async_create_entry( diff --git a/homeassistant/components/watts/manifest.json b/homeassistant/components/watts/manifest.json index 25135798cb2b87..f1e32b8c503e36 100644 --- a/homeassistant/components/watts/manifest.json +++ b/homeassistant/components/watts/manifest.json @@ -6,6 +6,6 @@ "dependencies": ["application_credentials", "cloud"], "documentation": "https://www.home-assistant.io/integrations/watts", "iot_class": "cloud_polling", - "quality_scale": "silver", + "quality_scale": "platinum", "requirements": ["visionpluspython==1.0.2"] } diff --git a/homeassistant/components/watts/quality_scale.yaml b/homeassistant/components/watts/quality_scale.yaml index 349cd8ced16d3d..c42cee4a798ae6 100644 --- a/homeassistant/components/watts/quality_scale.yaml +++ b/homeassistant/components/watts/quality_scale.yaml @@ -60,7 +60,7 @@ rules: icon-translations: status: exempt comment: Thermostat entities use standard HA Climate entity. - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: No actionable repair scenarios, auth issues are handled by reauthentication flow. diff --git a/homeassistant/components/watts/strings.json b/homeassistant/components/watts/strings.json index aeea7abfd83f8f..9f1c761d8f7edb 100644 --- a/homeassistant/components/watts/strings.json +++ b/homeassistant/components/watts/strings.json @@ -1,6 +1,7 @@ { "config": { "abort": { + "account_mismatch": "The authenticated account does not match the configured account", "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", @@ -12,8 +13,8 @@ "oauth_implementation_unavailable": "[%key:common::config_flow::abort::oauth2_implementation_unavailable%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", - "reauth_account_mismatch": "The authenticated account does not match the account that needed re-authentication", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]" }, "create_entry": { diff --git a/homeassistant/components/wirelesstag/__init__.py b/homeassistant/components/wirelesstag/__init__.py index 8cc4c53a479e44..84d032dec462f3 100644 --- a/homeassistant/components/wirelesstag/__init__.py +++ b/homeassistant/components/wirelesstag/__init__.py @@ -1,10 +1,14 @@ """Support for Wireless Sensor Tags.""" +from __future__ import annotations + import logging +from typing import TYPE_CHECKING from requests.exceptions import ConnectTimeout, HTTPError import voluptuous as vol from wirelesstagpy import SensorTag, WirelessTags +from wirelesstagpy.binaryevent import BinaryEvent from wirelesstagpy.exceptions import WirelessTagsException from homeassistant.components import persistent_notification @@ -21,6 +25,9 @@ WIRELESSTAG_DATA, ) +if TYPE_CHECKING: + from .switch import WirelessTagSwitch + _LOGGER = logging.getLogger(__name__) NOTIFICATION_ID = "wirelesstag_notification" @@ -56,22 +63,24 @@ def load_tags(self) -> dict[str, SensorTag]: self.tags = self.api.load_tags() return self.tags - def arm(self, switch): + def arm(self, switch: WirelessTagSwitch) -> None: """Arm entity sensor monitoring.""" func_name = f"arm_{switch.entity_description.key}" if (arm_func := getattr(self.api, func_name)) is not None: arm_func(switch.tag_id, switch.tag_manager_mac) - def disarm(self, switch): + def disarm(self, switch: WirelessTagSwitch) -> None: """Disarm entity sensor monitoring.""" func_name = f"disarm_{switch.entity_description.key}" if (disarm_func := getattr(self.api, func_name)) is not None: disarm_func(switch.tag_id, switch.tag_manager_mac) - def start_monitoring(self): + def start_monitoring(self) -> None: """Start monitoring push events.""" - def push_callback(tags_spec, event_spec): + def push_callback( + tags_spec: dict[str, SensorTag], event_spec: dict[str, list[BinaryEvent]] + ) -> None: """Handle push update.""" _LOGGER.debug( "Push notification arrived: %s, events: %s", tags_spec, event_spec diff --git a/homeassistant/components/wirelesstag/binary_sensor.py b/homeassistant/components/wirelesstag/binary_sensor.py index 430c4c07bde015..b153f43109efb0 100644 --- a/homeassistant/components/wirelesstag/binary_sensor.py +++ b/homeassistant/components/wirelesstag/binary_sensor.py @@ -77,8 +77,8 @@ def __init__( """Initialize a binary sensor for a Wireless Sensor Tags.""" super().__init__(api, tag) self._sensor_type = sensor_type - self._name = f"{self._tag.name} {self.event.human_readable_name}" self._attr_device_class = SENSOR_TYPES[sensor_type] + self._attr_name = f"{self._tag.name} {self.event.human_readable_name}" self._attr_unique_id = f"{self._uuid}_{self._sensor_type}" async def async_added_to_hass(self) -> None: @@ -95,7 +95,7 @@ async def async_added_to_hass(self) -> None: ) @property - def is_on(self): + def is_on(self) -> bool: """Return True if the binary sensor is on.""" return self._state == STATE_ON @@ -117,7 +117,7 @@ def updated_state_value(self): return self.principal_value @callback - def _on_binary_event_callback(self, new_tag): + def _on_binary_event_callback(self, new_tag: SensorTag) -> None: """Update state from arrived push notification.""" self._tag = new_tag self._state = self.updated_state_value() diff --git a/homeassistant/components/wirelesstag/entity.py b/homeassistant/components/wirelesstag/entity.py index 73b13cdc39710f..daa3e3b5842843 100644 --- a/homeassistant/components/wirelesstag/entity.py +++ b/homeassistant/components/wirelesstag/entity.py @@ -2,6 +2,8 @@ import logging +from wirelesstagpy import SensorTag + from homeassistant.const import ( ATTR_BATTERY_LEVEL, ATTR_VOLTAGE, @@ -11,6 +13,8 @@ ) from homeassistant.helpers.entity import Entity +from . import WirelessTagPlatform + _LOGGER = logging.getLogger(__name__) @@ -25,21 +29,16 @@ class WirelessTagBaseSensor(Entity): """Base class for HA implementation for Wireless Sensor Tag.""" - def __init__(self, api, tag): + def __init__(self, api: WirelessTagPlatform, tag: SensorTag) -> None: """Initialize a base sensor for Wireless Sensor Tag platform.""" self._api = api self._tag = tag self._uuid = self._tag.uuid self.tag_id = self._tag.tag_id self.tag_manager_mac = self._tag.tag_manager_mac - self._name = self._tag.name + self._attr_name = self._tag.name self._state = None - @property - def name(self): - """Return the name of the sensor.""" - return self._name - @property def principal_value(self): """Return base value. diff --git a/homeassistant/components/wirelesstag/sensor.py b/homeassistant/components/wirelesstag/sensor.py index 913e1dbf7a0613..33ea005c56ac40 100644 --- a/homeassistant/components/wirelesstag/sensor.py +++ b/homeassistant/components/wirelesstag/sensor.py @@ -5,6 +5,7 @@ import logging import voluptuous as vol +from wirelesstagpy import SensorTag from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, @@ -20,6 +21,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from . import WirelessTagPlatform from .const import DOMAIN, SIGNAL_TAG_UPDATE, WIRELESSTAG_DATA from .entity import WirelessTagBaseSensor from .util import async_migrate_unique_id @@ -97,13 +99,18 @@ class WirelessTagSensor(WirelessTagBaseSensor, SensorEntity): entity_description: SensorEntityDescription - def __init__(self, api, tag, description): + def __init__( + self, + api: WirelessTagPlatform, + tag: SensorTag, + description: SensorEntityDescription, + ) -> None: """Initialize a WirelessTag sensor.""" super().__init__(api, tag) self._sensor_type = description.key self.entity_description = description - self._name = self._tag.name + self._attr_name = self._tag.name self._attr_unique_id = f"{self._uuid}_{self._sensor_type}" # I want to see entity_id as: @@ -148,7 +155,7 @@ def _sensor(self): return self._tag.sensor[self._sensor_type] @callback - def _update_tag_info_callback(self, new_tag): + def _update_tag_info_callback(self, new_tag: SensorTag) -> None: """Handle push notification sent by tag manager.""" _LOGGER.debug("Entity to update state: %s with new tag: %s", self, new_tag) self._tag = new_tag diff --git a/homeassistant/components/wirelesstag/switch.py b/homeassistant/components/wirelesstag/switch.py index 53e28f9103d360..6743138fb99ab3 100644 --- a/homeassistant/components/wirelesstag/switch.py +++ b/homeassistant/components/wirelesstag/switch.py @@ -5,6 +5,7 @@ from typing import Any import voluptuous as vol +from wirelesstagpy import SensorTag from homeassistant.components.switch import ( PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA, @@ -17,6 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from . import WirelessTagPlatform from .const import WIRELESSTAG_DATA from .entity import WirelessTagBaseSensor from .util import async_migrate_unique_id @@ -82,11 +84,16 @@ async def async_setup_platform( class WirelessTagSwitch(WirelessTagBaseSensor, SwitchEntity): """A switch implementation for Wireless Sensor Tags.""" - def __init__(self, api, tag, description: SwitchEntityDescription) -> None: + def __init__( + self, + api: WirelessTagPlatform, + tag: SensorTag, + description: SwitchEntityDescription, + ) -> None: """Initialize a switch for Wireless Sensor Tag.""" super().__init__(api, tag) self.entity_description = description - self._name = f"{self._tag.name} {description.name}" + self._attr_name = f"{self._tag.name} {description.name}" self._attr_unique_id = f"{self._uuid}_{description.key}" def turn_on(self, **kwargs: Any) -> None: @@ -98,7 +105,7 @@ def turn_off(self, **kwargs: Any) -> None: self._api.disarm(self) @property - def is_on(self) -> bool: + def is_on(self) -> bool | None: """Return True if entity is on.""" return self._state diff --git a/homeassistant/components/xiaomi_aqara/binary_sensor.py b/homeassistant/components/xiaomi_aqara/binary_sensor.py index b7a6d7ba93501c..544cd6f7e318d3 100644 --- a/homeassistant/components/xiaomi_aqara/binary_sensor.py +++ b/homeassistant/components/xiaomi_aqara/binary_sensor.py @@ -181,11 +181,12 @@ def __init__( ) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - attrs = {ATTR_DENSITY: self._density} - attrs.update(super().extra_state_attributes) - return attrs + return { + ATTR_DENSITY: self._density, + **self._attr_extra_state_attributes, + } async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" @@ -243,11 +244,12 @@ def __init__( ) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - attrs = {ATTR_NO_MOTION_SINCE: self._no_motion_since} - attrs.update(super().extra_state_attributes) - return attrs + return { + ATTR_NO_MOTION_SINCE: self._no_motion_since, + **self._attr_extra_state_attributes, + } @callback def _async_set_no_motion(self, now): @@ -349,11 +351,12 @@ def __init__( ) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - attrs = {ATTR_OPEN_SINCE: self._open_since} - attrs.update(super().extra_state_attributes) - return attrs + return { + ATTR_OPEN_SINCE: self._open_since, + **self._attr_extra_state_attributes, + } async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" @@ -462,11 +465,12 @@ def __init__( ) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - attrs = {ATTR_DENSITY: self._density} - attrs.update(super().extra_state_attributes) - return attrs + return { + ATTR_DENSITY: self._density, + **self._attr_extra_state_attributes, + } async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" @@ -511,11 +515,12 @@ def __init__( super().__init__(device, name, xiaomi_hub, data_key, None, config_entry) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - attrs = {ATTR_LAST_ACTION: self._last_action} - attrs.update(super().extra_state_attributes) - return attrs + return { + ATTR_LAST_ACTION: self._last_action, + **self._attr_extra_state_attributes, + } async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" @@ -559,11 +564,12 @@ def __init__( super().__init__(device, name, xiaomi_hub, data_key, None, config_entry) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - attrs = {ATTR_LAST_ACTION: self._last_action} - attrs.update(super().extra_state_attributes) - return attrs + return { + ATTR_LAST_ACTION: self._last_action, + **self._attr_extra_state_attributes, + } async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" @@ -629,11 +635,12 @@ def __init__( super().__init__(device, "Cube", xiaomi_hub, data_key, None, config_entry) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" - attrs = {ATTR_LAST_ACTION: self._last_action} - attrs.update(super().extra_state_attributes) - return attrs + return { + ATTR_LAST_ACTION: self._last_action, + **self._attr_extra_state_attributes, + } async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" diff --git a/homeassistant/components/xiaomi_aqara/switch.py b/homeassistant/components/xiaomi_aqara/switch.py index e9e2c92314e3d0..6afd878f807798 100644 --- a/homeassistant/components/xiaomi_aqara/switch.py +++ b/homeassistant/components/xiaomi_aqara/switch.py @@ -165,18 +165,16 @@ def icon(self): return "mdi:power-socket" @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" if self._supports_power_consumption: - attrs = { + return { ATTR_IN_USE: self._in_use, ATTR_LOAD_POWER: self._load_power, ATTR_POWER_CONSUMED: self._power_consumed, + **self._attr_extra_state_attributes, } - else: - attrs = {} - attrs.update(super().extra_state_attributes) - return attrs + return self._attr_extra_state_attributes def turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 4b1b629f8af353..f5fbf1c56b6287 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -2009,7 +2009,7 @@ }, "init": { "description": "A backup will be performed and ZHA will be stopped. Do you wish to continue?", - "title": "Reconfigure ZHA" + "title": "Change ZHA adapter settings" }, "intent_migrate": { "description": "Before plugging in your new adapter, your old adapter needs to be reset. An automatic backup will be performed. If you are using a combined Z-Wave and Zigbee adapter like the HUSBZB-1, this will only reset the Zigbee portion.\n\n*Note: if you are migrating from a **ConBee/RaspBee**, make sure it is running firmware `0x26720700` or newer! Otherwise, some devices may not be controllable after migrating until they are power cycled.*\n\nDo you wish to continue?", @@ -2051,16 +2051,16 @@ "title": "[%key:component::zha::config::step::plug_in_old_radio::title%]" }, "prompt_migrate_or_reconfigure": { - "description": "Are you migrating to a new adapter or re-configuring the current adapter?", + "description": "Are you migrating to a new adapter or changing the settings for your current adapter?", "menu_option_descriptions": { "intent_migrate": "This will help you migrate your Zigbee network from your old adapter to a new one.", "intent_reconfigure": "This will let you change the serial port for your current Zigbee adapter." }, "menu_options": { "intent_migrate": "Migrate to a new adapter", - "intent_reconfigure": "Re-configure the current adapter" + "intent_reconfigure": "Change the current adapter's settings" }, - "title": "Migrate or re-configure" + "title": "Migrate or change adapter settings" }, "restore_backup": { "title": "[%key:component::zha::config::step::restore_backup::title%]" diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 143c43c422c7fe..18a3d362f03504 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -140,16 +140,16 @@ "title": "[%key:component::zwave_js::config::step::on_supervisor::title%]" }, "reconfigure": { - "description": "Are you migrating to a new adapter or re-configuring the current adapter?", + "description": "Are you migrating to a new adapter or reconfiguring the current adapter?", "menu_option_descriptions": { "intent_migrate": "This will move your Z-Wave network to a new adapter.", "intent_reconfigure": "This will let you change the adapter configuration." }, "menu_options": { "intent_migrate": "Migrate to a new adapter", - "intent_reconfigure": "Re-configure the current adapter" + "intent_reconfigure": "Reconfigure the current adapter" }, - "title": "Migrate or re-configure" + "title": "Migrate or reconfigure" }, "restore_failed": { "description": "Your Z-Wave network could not be restored to the new adapter. This means that your Z-Wave devices are not connected to Home Assistant.\n\nThe backup is saved to ”{file_path}”\n\n'<'a href=\"{file_url}\" download=\"{file_name}\"'>'Download backup file'<'/a'>'", diff --git a/homeassistant/exceptions.py b/homeassistant/exceptions.py index 23416480dd754d..58d8c22092cbee 100644 --- a/homeassistant/exceptions.py +++ b/homeassistant/exceptions.py @@ -6,6 +6,9 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any +from aiohttp import ClientResponse, ClientResponseError, RequestInfo +from multidict import MultiMapping + from .util.event_type import EventType if TYPE_CHECKING: @@ -218,6 +221,63 @@ class ConfigEntryAuthFailed(IntegrationError): """Error to indicate that config entry could not authenticate.""" +class OAuth2TokenRequestError(ClientResponseError, HomeAssistantError): + """Error to indicate that the OAuth 2.0 flow could not refresh token.""" + + def __init__( + self, + *, + request_info: RequestInfo, + history: tuple[ClientResponse, ...] = (), + status: int = 0, + message: str = "OAuth 2.0 token refresh failed", + headers: MultiMapping[str] | None = None, + domain: str, + ) -> None: + """Initialize OAuth2RefreshTokenFailed.""" + ClientResponseError.__init__( + self, + request_info=request_info, + history=history, + status=status, + message=message, + headers=headers, + ) + HomeAssistantError.__init__(self) + self.domain = domain + self.translation_domain = "homeassistant" + self.translation_key = "oauth2_helper_refresh_failed" + self.translation_placeholders = {"domain": domain} + self.generate_message = True + + +class OAuth2TokenRequestTransientError(OAuth2TokenRequestError): + """Recoverable error to indicate flow could not refresh token.""" + + def __init__(self, *, domain: str, **kwargs: Any) -> None: + """Initialize OAuth2RefreshTokenTransientError.""" + super().__init__(domain=domain, **kwargs) + self.translation_domain = "homeassistant" + self.translation_key = "oauth2_helper_refresh_transient" + self.translation_placeholders = {"domain": domain} + self.generate_message = True + + +class OAuth2TokenRequestReauthError(OAuth2TokenRequestError): + """Non recoverable error to indicate the flow could not refresh token. + + Re-authentication is required. + """ + + def __init__(self, *, domain: str, **kwargs: Any) -> None: + """Initialize OAuth2RefreshTokenReauthError.""" + super().__init__(domain=domain, **kwargs) + self.translation_domain = "homeassistant" + self.translation_key = "oauth2_helper_reauth_required" + self.translation_placeholders = {"domain": domain} + self.generate_message = True + + class InvalidStateError(HomeAssistantError): """When an invalid state is encountered.""" diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 156029ea0f0636..03db1726027945 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -701,6 +701,7 @@ "tedee", "telegram_bot", "tellduslive", + "teltonika", "tesla_fleet", "tesla_wall_connector", "teslemetry", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index d3dde435250cf9..9fc6f76c0b663d 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -857,6 +857,14 @@ "domain": "tailwind", "registered_devices": True, }, + { + "domain": "teltonika", + "macaddress": "209727*", + }, + { + "domain": "teltonika", + "macaddress": "001E42*", + }, { "domain": "tesla_wall_connector", "hostname": "teslawallconnector_*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 38063333132953..9cd7bc78533290 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -6026,7 +6026,7 @@ }, "sensorpro": { "name": "SensorPro", - "integration_type": "hub", + "integration_type": "device", "config_flow": true, "iot_class": "local_push" }, @@ -6347,7 +6347,7 @@ }, "smhi": { "name": "SMHI", - "integration_type": "hub", + "integration_type": "service", "config_flow": true, "iot_class": "cloud_polling" }, @@ -6870,6 +6870,12 @@ "config_flow": false, "iot_class": "local_polling" }, + "teltonika": { + "name": "Teltonika", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "temper": { "name": "TEMPer", "integration_type": "hub", diff --git a/homeassistant/helpers/config_entry_flow.py b/homeassistant/helpers/config_entry_flow.py index 761a9c5714ec1e..7e38dff3a31af0 100644 --- a/homeassistant/helpers/config_entry_flow.py +++ b/homeassistant/helpers/config_entry_flow.py @@ -215,11 +215,19 @@ async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> config_entries.ConfigFlowResult: """Handle a user initiated set up flow to create a webhook.""" - if not self._allow_multiple and self._async_current_entries(): + if ( + not self._allow_multiple + and self._async_current_entries() + and self.source != config_entries.SOURCE_RECONFIGURE + ): return self.async_abort(reason="single_instance_allowed") if user_input is None: - return self.async_show_form(step_id="user") + return self.async_show_form( + step_id="reconfigure" + if self.source == config_entries.SOURCE_RECONFIGURE + else "user" + ) # Local import to be sure cloud is loaded and setup from homeassistant.components.cloud import ( # noqa: PLC0415 @@ -234,7 +242,11 @@ async def async_step_user( async_generate_url, ) - webhook_id = async_generate_id() + if self.source == config_entries.SOURCE_RECONFIGURE: + entry = self._get_reconfigure_entry() + webhook_id = entry.data["webhook_id"] + else: + webhook_id = async_generate_id() if "cloud" in self.hass.config.components and async_active_subscription( self.hass @@ -250,12 +262,30 @@ async def async_step_user( self._description_placeholder["webhook_url"] = webhook_url + if self.source == config_entries.SOURCE_RECONFIGURE: + if self.hass.config_entries.async_update_entry( + entry=entry, + data={**entry.data, "webhook_id": webhook_id, "cloudhook": cloudhook}, + ): + self.hass.config_entries.async_schedule_reload(entry.entry_id) + return self.async_abort( + reason="reconfigure_successful", + description_placeholders=self._description_placeholder, + ) + return self.async_create_entry( title=self._title, data={"webhook_id": webhook_id, "cloudhook": cloudhook}, description_placeholders=self._description_placeholder, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> config_entries.ConfigFlowResult: + """Handle a user initiated flow to re-configure a webhook.""" + + return await self.async_step_user(user_input) + def register_webhook_flow( domain: str, title: str, description_placeholder: dict, allow_multiple: bool = False diff --git a/homeassistant/helpers/config_entry_oauth2_flow.py b/homeassistant/helpers/config_entry_oauth2_flow.py index d7fc606b591e97..c25c609dd06ada 100644 --- a/homeassistant/helpers/config_entry_oauth2_flow.py +++ b/homeassistant/helpers/config_entry_oauth2_flow.py @@ -29,7 +29,12 @@ from homeassistant import config_entries from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ( + HomeAssistantError, + OAuth2TokenRequestError, + OAuth2TokenRequestReauthError, + OAuth2TokenRequestTransientError, +) from homeassistant.loader import async_get_application_credentials from homeassistant.util.hass_dict import HassKey @@ -56,6 +61,7 @@ HEADER_FRONTEND_BASE = "HA-Frontend-Base" MY_AUTH_CALLBACK_PATH = "https://my.home-assistant.io/redirect/oauth" + CLOCK_OUT_OF_SYNC_MAX_SEC = 20 OAUTH_AUTHORIZE_URL_TIMEOUT_SEC = 30 @@ -134,7 +140,10 @@ async def async_refresh_token(self, token: dict) -> dict: @abstractmethod async def _async_refresh_token(self, token: dict) -> dict: - """Refresh a token.""" + """Refresh a token. + + Should raise OAuth2TokenRequestError on token refresh failure. + """ class LocalOAuth2Implementation(AbstractOAuth2Implementation): @@ -211,7 +220,8 @@ async def async_resolve_external_data(self, external_data: Any) -> dict: return await self._token_request(request_data) async def _async_refresh_token(self, token: dict) -> dict: - """Refresh tokens.""" + """Refresh a token.""" + new_token = await self._token_request( { "grant_type": "refresh_token", @@ -219,33 +229,71 @@ async def _async_refresh_token(self, token: dict) -> dict: "refresh_token": token["refresh_token"], } ) + return {**token, **new_token} async def _token_request(self, data: dict) -> dict: - """Make a token request.""" + """Make a token request. + + Raises OAuth2TokenRequestError on token request failure. + """ session = async_get_clientsession(self.hass) data["client_id"] = self.client_id - if self.client_secret: data["client_secret"] = self.client_secret _LOGGER.debug("Sending token request to %s", self.token_url) - resp = await session.post(self.token_url, data=data) - if resp.status >= 400: - try: - error_response = await resp.json() - except ClientError, JSONDecodeError: - error_response = {} - error_code = error_response.get("error", "unknown") - error_description = error_response.get("error_description", "unknown error") - _LOGGER.error( - "Token request for %s failed (%s): %s", - self.domain, - error_code, - error_description, - ) - resp.raise_for_status() + + try: + resp = await session.post(self.token_url, data=data) + if resp.status >= 400: + try: + error_response = await resp.json() + except ClientError, JSONDecodeError: + error_response = {} + error_code = error_response.get("error", "unknown") + error_description = error_response.get( + "error_description", "unknown error" + ) + _LOGGER.error( + "Token request for %s failed (%s): %s", + self.domain, + error_code, + error_description, + ) + resp.raise_for_status() + except ClientResponseError as err: + if err.status == HTTPStatus.TOO_MANY_REQUESTS or 500 <= err.status <= 599: + # Recoverable error + raise OAuth2TokenRequestTransientError( + request_info=err.request_info, + history=err.history, + status=err.status, + message=err.message, + headers=err.headers, + domain=self._domain, + ) from err + if 400 <= err.status <= 499: + # Non-recoverable error + raise OAuth2TokenRequestReauthError( + request_info=err.request_info, + history=err.history, + status=err.status, + message=err.message, + headers=err.headers, + domain=self._domain, + ) from err + + raise OAuth2TokenRequestError( + request_info=err.request_info, + history=err.history, + status=err.status, + message=err.message, + headers=err.headers, + domain=self._domain, + ) from err + return cast(dict, await resp.json()) @@ -458,12 +506,12 @@ async def async_step_creation( except TimeoutError as err: _LOGGER.error("Timeout resolving OAuth token: %s", err) return self.async_abort(reason="oauth_timeout") - except (ClientResponseError, ClientError) as err: + except ( + OAuth2TokenRequestError, + ClientError, + ) as err: _LOGGER.error("Error resolving OAuth token: %s", err) - if ( - isinstance(err, ClientResponseError) - and err.status == HTTPStatus.UNAUTHORIZED - ): + if isinstance(err, OAuth2TokenRequestReauthError): return self.async_abort(reason="oauth_unauthorized") return self.async_abort(reason="oauth_failed") diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index 0bbea1ac6f4c93..7bed9ca1f28502 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -25,6 +25,8 @@ ConfigEntryError, ConfigEntryNotReady, HomeAssistantError, + OAuth2TokenRequestError, + OAuth2TokenRequestReauthError, ) from homeassistant.util.dt import utcnow @@ -352,6 +354,14 @@ async def __wrap_async_setup(self) -> bool: """Error handling for _async_setup.""" try: await self._async_setup() + + except OAuth2TokenRequestError as err: + self.last_exception = err + if isinstance(err, OAuth2TokenRequestReauthError): + self.last_update_success = False + # Non-recoverable error + raise ConfigEntryAuthFailed from err + except ( TimeoutError, requests.exceptions.Timeout, @@ -423,6 +433,32 @@ async def _async_refresh( # noqa: C901 self.logger.debug("Full error:", exc_info=True) self.last_update_success = False + except (OAuth2TokenRequestError,) as err: + self.last_exception = err + if isinstance(err, OAuth2TokenRequestReauthError): + # Non-recoverable error + auth_failed = True + if self.last_update_success: + if log_failures: + self.logger.error( + "Authentication failed while fetching %s data: %s", + self.name, + err, + ) + self.last_update_success = False + if raise_on_auth_failed: + raise ConfigEntryAuthFailed from err + + if self.config_entry: + self.config_entry.async_start_reauth(self.hass) + return + + # Recoverable error + if self.last_update_success: + if log_failures: + self.logger.error("Error fetching %s data: %s", self.name, err) + self.last_update_success = False + except (aiohttp.ClientError, requests.exceptions.RequestException) as err: self.last_exception = err if self.last_update_success: diff --git a/homeassistant/strings.json b/homeassistant/strings.json index 93c8b5e88f3186..482798c0376f09 100644 --- a/homeassistant/strings.json +++ b/homeassistant/strings.json @@ -36,7 +36,7 @@ "oauth2_unauthorized": "OAuth authorization error while obtaining access token.", "oauth2_user_rejected_authorize": "Account linking rejected: {error}", "reauth_successful": "Re-authentication was successful", - "reconfigure_successful": "Re-configuration was successful", + "reconfigure_successful": "Reconfiguration was successful", "single_instance_allowed": "Already configured. Only a single configuration possible.", "unknown_authorize_url_generation": "Unknown error generating an authorize URL.", "webhook_not_internet_accessible": "Your Home Assistant instance needs to be accessible from the internet to receive webhook messages." diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index da31c415828d03..08ae1ac3767a48 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -2591,10 +2591,12 @@ class ClassTypeHintMatch: TypeHintMatch( function_name="available_tones", return_type=["dict[int, str]", "list[int | str]", None], + mandatory=True, ), TypeHintMatch( function_name="supported_features", return_type="SirenEntityFeature", + mandatory=True, ), ], ), @@ -2606,31 +2608,38 @@ class ClassTypeHintMatch: TypeHintMatch( function_name="supported_languages", return_type="list[str]", + mandatory=True, ), TypeHintMatch( function_name="supported_formats", return_type="list[AudioFormats]", + mandatory=True, ), TypeHintMatch( function_name="supported_codecs", return_type="list[AudioCodecs]", + mandatory=True, ), TypeHintMatch( function_name="supported_bit_rates", return_type="list[AudioBitRates]", + mandatory=True, ), TypeHintMatch( function_name="supported_sample_rates", return_type="list[AudioSampleRates]", + mandatory=True, ), TypeHintMatch( function_name="supported_channels", return_type="list[AudioChannels]", + mandatory=True, ), TypeHintMatch( function_name="async_process_audio_stream", arg_types={1: "SpeechMetadata", 2: "AsyncIterable[bytes]"}, return_type="SpeechResult", + mandatory=True, ), ], ), @@ -2674,6 +2683,7 @@ class ClassTypeHintMatch: TypeHintMatch( function_name="todo_items", return_type=["list[TodoItem]", None], + mandatory=True, ), TypeHintMatch( function_name="async_create_todo_item", @@ -2681,6 +2691,7 @@ class ClassTypeHintMatch: 1: "TodoItem", }, return_type="None", + mandatory=True, ), TypeHintMatch( function_name="async_update_todo_item", @@ -2688,6 +2699,7 @@ class ClassTypeHintMatch: 1: "TodoItem", }, return_type="None", + mandatory=True, ), TypeHintMatch( function_name="async_delete_todo_items", @@ -2695,6 +2707,7 @@ class ClassTypeHintMatch: 1: "list[str]", }, return_type="None", + mandatory=True, ), TypeHintMatch( function_name="async_move_todo_item", @@ -2703,6 +2716,7 @@ class ClassTypeHintMatch: 2: "str | None", }, return_type="None", + mandatory=True, ), ], ), diff --git a/requirements_all.txt b/requirements_all.txt index ce085bf85d0153..a1e38dfd2fdc4e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -562,7 +562,7 @@ asyncinotify==4.2.0 asyncpysupla==0.0.5 # homeassistant.components.sleepiq -asyncsleepiq==1.6.0 +asyncsleepiq==1.7.0 # homeassistant.components.sftp_storage asyncssh==2.21.0 @@ -2175,7 +2175,7 @@ pyituran==0.1.5 pyjvcprojector==2.0.1 # homeassistant.components.kaleidescape -pykaleidescape==1.0.2 +pykaleidescape==1.1.1 # homeassistant.components.kira pykira==0.1.1 @@ -2466,7 +2466,7 @@ pysmappee==0.2.29 pysmarlaapi==1.0.1 # homeassistant.components.smartthings -pysmartthings==3.5.2 +pysmartthings==3.5.3 # homeassistant.components.smarty pysmarty2==0.10.3 @@ -3027,6 +3027,9 @@ tellcore-py==1.1.2 # homeassistant.components.tellduslive tellduslive==0.10.12 +# homeassistant.components.teltonika +teltasync==0.1.3 + # homeassistant.components.lg_soundbar temescal==0.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5b4e7aeeabb094..b6b9fb0e346aa1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -523,7 +523,7 @@ async-upnp-client==0.46.2 asyncarve==0.1.1 # homeassistant.components.sleepiq -asyncsleepiq==1.6.0 +asyncsleepiq==1.7.0 # homeassistant.components.sftp_storage asyncssh==2.21.0 @@ -1852,7 +1852,7 @@ pyituran==0.1.5 pyjvcprojector==2.0.1 # homeassistant.components.kaleidescape -pykaleidescape==1.0.2 +pykaleidescape==1.1.1 # homeassistant.components.kira pykira==0.1.1 @@ -2095,7 +2095,7 @@ pysmappee==0.2.29 pysmarlaapi==1.0.1 # homeassistant.components.smartthings -pysmartthings==3.5.2 +pysmartthings==3.5.3 # homeassistant.components.smarty pysmarty2==0.10.3 @@ -2539,6 +2539,9 @@ tailscale==0.6.2 # homeassistant.components.tellduslive tellduslive==0.10.12 +# homeassistant.components.teltonika +teltasync==0.1.3 + # homeassistant.components.lg_soundbar temescal==0.5 diff --git a/tests/components/analytics/conftest.py b/tests/components/analytics/conftest.py new file mode 100644 index 00000000000000..150fcc1df8cfc5 --- /dev/null +++ b/tests/components/analytics/conftest.py @@ -0,0 +1,18 @@ +"""Common fixtures for the analytics tests.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest + +MOCK_SNAPSHOT_PAYLOAD = {"mock_integration": {"devices": [], "entities": []}} + + +@pytest.fixture +def mock_snapshot_payload() -> Generator[None]: + """Mock _async_snapshot_payload to return non-empty data.""" + with patch( + "homeassistant.components.analytics.analytics._async_snapshot_payload", + return_value=MOCK_SNAPSHOT_PAYLOAD, + ): + yield diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index cf0e327ef7f899..d1c90085cb014a 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -1464,7 +1464,7 @@ async def async_modify_analytics( } -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") async def test_send_snapshot_disabled( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -1481,6 +1481,24 @@ async def test_send_snapshot_disabled( @pytest.mark.usefixtures("labs_snapshots_enabled") +async def test_send_snapshot_empty( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test no snapshots are sent when payload is empty.""" + aioclient_mock.post(SNAPSHOT_ENDPOINT_URL, status=200, json={}) + + analytics = Analytics(hass) + + await analytics.save_preferences({ATTR_SNAPSHOTS: True}) + await analytics.send_snapshot() + + assert len(aioclient_mock.mock_calls) == 0 + assert "Skipping snapshot submission, no data to send" in caplog.text + + +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") async def test_send_snapshot_success( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -1505,7 +1523,7 @@ async def test_send_snapshot_success( assert "Submitted snapshot analytics to Home Assistant servers" in caplog.text -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") async def test_send_snapshot_with_existing_identifier( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -1541,7 +1559,7 @@ async def test_send_snapshot_with_existing_identifier( assert "Submitted snapshot analytics to Home Assistant servers" in caplog.text -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") async def test_send_snapshot_invalid_identifier( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -1578,7 +1596,7 @@ async def test_send_snapshot_invalid_identifier( assert "Invalid submission identifier" in caplog.text -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") @pytest.mark.parametrize( ("post_kwargs", "expected_log"), [ @@ -1643,7 +1661,7 @@ async def test_send_snapshot_error( assert expected_log in caplog.text -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") async def test_async_schedule( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -1680,7 +1698,7 @@ async def test_async_schedule( assert 0 <= preferences["snapshot_submission_time"] <= 86400 -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") async def test_async_schedule_disabled( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -1705,7 +1723,7 @@ async def test_async_schedule_disabled( assert len(aioclient_mock.mock_calls) == 0 -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") async def test_async_schedule_already_scheduled( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -1739,7 +1757,7 @@ async def test_async_schedule_already_scheduled( ) -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") @pytest.mark.parametrize(("onboarded"), [True, False]) async def test_async_schedule_cancel_when_disabled( hass: HomeAssistant, @@ -1778,7 +1796,7 @@ async def test_async_schedule_cancel_when_disabled( assert len(aioclient_mock.mock_calls) == 0 -@pytest.mark.usefixtures("labs_snapshots_enabled") +@pytest.mark.usefixtures("labs_snapshots_enabled", "mock_snapshot_payload") async def test_async_schedule_snapshots_url( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, diff --git a/tests/components/analytics/test_init.py b/tests/components/analytics/test_init.py index daae59e5445778..2459a7320ed2ee 100644 --- a/tests/components/analytics/test_init.py +++ b/tests/components/analytics/test_init.py @@ -37,6 +37,7 @@ async def test_setup(hass: HomeAssistant) -> None: assert DOMAIN in hass.data +@pytest.mark.usefixtures("mock_snapshot_payload") async def test_labs_feature_toggle( hass: HomeAssistant, hass_storage: dict[str, Any], diff --git a/tests/components/control4/test_climate.py b/tests/components/control4/test_climate.py index 50015672e65e89..c77ebee1f654b0 100644 --- a/tests/components/control4/test_climate.py +++ b/tests/components/control4/test_climate.py @@ -115,6 +115,21 @@ async def test_climate_entities( HVACAction.FAN, id="fan", ), + pytest.param( + _make_climate_data(hvac_state="Idle"), + HVACAction.IDLE, + id="idle", + ), + pytest.param( + _make_climate_data(hvac_state="Stage 1 Heat"), + HVACAction.HEATING, + id="stage_1_heat", + ), + pytest.param( + _make_climate_data(hvac_state="Stage 2 Cool", hvac_mode="Cool"), + HVACAction.COOLING, + id="stage_2_cool", + ), ], ) @pytest.mark.usefixtures( diff --git a/tests/components/demo/test_vacuum.py b/tests/components/demo/test_vacuum.py index a497bd964ec660..d3858a91c7c1c6 100644 --- a/tests/components/demo/test_vacuum.py +++ b/tests/components/demo/test_vacuum.py @@ -70,7 +70,7 @@ async def setup_demo_vacuum(hass: HomeAssistant, vacuum_only: None): async def test_supported_features(hass: HomeAssistant) -> None: """Test vacuum supported features.""" state = hass.states.get(ENTITY_VACUUM_COMPLETE) - assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 16316 + assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 32700 assert state.attributes.get(ATTR_FAN_SPEED) == "medium" assert state.attributes.get(ATTR_FAN_SPEED_LIST) == FAN_SPEEDS assert state.state == VacuumActivity.DOCKED diff --git a/tests/components/hdfury/conftest.py b/tests/components/hdfury/conftest.py index cf8c1b5308b479..b296ed902b8f1c 100644 --- a/tests/components/hdfury/conftest.py +++ b/tests/components/hdfury/conftest.py @@ -103,7 +103,9 @@ def mock_hdfury_client() -> Generator[AsyncMock]: "mutetx1": "1", "relay": "0", "macaddr": "c7:1c:df:9d:f6:40", + "reboottimer": "0", "oled": "1", + "oledfade": "30", } ) diff --git a/tests/components/hdfury/snapshots/test_diagnostics.ambr b/tests/components/hdfury/snapshots/test_diagnostics.ambr index d77ab9eccb5758..6d4043fb3b1ca2 100644 --- a/tests/components/hdfury/snapshots/test_diagnostics.ambr +++ b/tests/components/hdfury/snapshots/test_diagnostics.ambr @@ -24,6 +24,8 @@ 'mutetx0': '1', 'mutetx1': '1', 'oled': '1', + 'oledfade': '30', + 'reboottimer': '0', 'relay': '0', 'tx0plus5': '1', 'tx1plus5': '1', diff --git a/tests/components/hdfury/snapshots/test_number.ambr b/tests/components/hdfury/snapshots/test_number.ambr new file mode 100644 index 00000000000000..20cde1949d63bd --- /dev/null +++ b/tests/components/hdfury/snapshots/test_number.ambr @@ -0,0 +1,121 @@ +# serializer version: 1 +# name: test_number_entities[number.hdfury_vrroom_02_oled_fade_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.hdfury_vrroom_02_oled_fade_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'OLED fade timer', + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'OLED fade timer', + 'platform': 'hdfury', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'oled_fade', + 'unique_id': '000123456789_oledfade', + 'unit_of_measurement': , + }) +# --- +# name: test_number_entities[number.hdfury_vrroom_02_oled_fade_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'HDFury VRROOM-02 OLED fade timer', + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.hdfury_vrroom_02_oled_fade_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.0', + }) +# --- +# name: test_number_entities[number.hdfury_vrroom_02_restart_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.hdfury_vrroom_02_restart_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Restart timer', + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart timer', + 'platform': 'hdfury', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'reboot_timer', + 'unique_id': '000123456789_reboottimer', + 'unit_of_measurement': , + }) +# --- +# name: test_number_entities[number.hdfury_vrroom_02_restart_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'HDFury VRROOM-02 Restart timer', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.hdfury_vrroom_02_restart_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- diff --git a/tests/components/hdfury/test_number.py b/tests/components/hdfury/test_number.py new file mode 100644 index 00000000000000..b39a73d8467df9 --- /dev/null +++ b/tests/components/hdfury/test_number.py @@ -0,0 +1,122 @@ +"""Tests for the HDFury number platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from hdfury import HDFuryError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +import homeassistant.helpers.entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_number_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test HDFury number entities.""" + + await setup_integration(hass, mock_config_entry, [Platform.NUMBER]) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ("number.hdfury_vrroom_02_oled_fade_timer", "set_oled_fade"), + ("number.hdfury_vrroom_02_restart_timer", "set_reboot_timer"), + ], +) +async def test_number_set_value( + hass: HomeAssistant, + mock_hdfury_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_id: str, + method: str, +) -> None: + """Test setting a device number value.""" + + await setup_integration(hass, mock_config_entry, [Platform.NUMBER]) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + getattr(mock_hdfury_client, method).assert_awaited_once_with("50") + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ("number.hdfury_vrroom_02_oled_fade_timer", "set_oled_fade"), + ("number.hdfury_vrroom_02_restart_timer", "set_reboot_timer"), + ], +) +async def test_number_error( + hass: HomeAssistant, + mock_hdfury_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_id: str, + method: str, +) -> None: + """Test set number value raises HomeAssistantError on API failure.""" + + getattr(mock_hdfury_client, method).side_effect = HDFuryError() + + await setup_integration(hass, mock_config_entry, [Platform.NUMBER]) + + with pytest.raises( + HomeAssistantError, + match="An error occurred while communicating with HDFury device", + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("entity_id"), + [ + ("number.hdfury_vrroom_02_oled_fade_timer"), + ("number.hdfury_vrroom_02_restart_timer"), + ], +) +async def test_number_entities_unavailable_on_error( + hass: HomeAssistant, + mock_hdfury_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + entity_id: str, +) -> None: + """Test API error causes entities to become unavailable.""" + + await setup_integration(hass, mock_config_entry, [Platform.NUMBER]) + + mock_hdfury_client.get_info.side_effect = HDFuryError() + + freezer.tick(timedelta(seconds=61)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/lunatone/conftest.py b/tests/components/lunatone/conftest.py index abac3522d2bb00..318ff9ed38a49f 100644 --- a/tests/components/lunatone/conftest.py +++ b/tests/components/lunatone/conftest.py @@ -96,7 +96,7 @@ def mock_lunatone_dali_broadcast() -> Generator[AsyncMock]: def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" return MockConfigEntry( - title=f"Lunatone {SERIAL_NUMBER}", + title=BASE_URL, domain=DOMAIN, data={CONF_URL: BASE_URL}, unique_id=str(SERIAL_NUMBER), diff --git a/tests/components/lunatone/test_config_flow.py b/tests/components/lunatone/test_config_flow.py index 56bae075a199b5..2ed358a54c0a53 100644 --- a/tests/components/lunatone/test_config_flow.py +++ b/tests/components/lunatone/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import BASE_URL, SERIAL_NUMBER +from . import BASE_URL from tests.common import MockConfigEntry @@ -32,7 +32,7 @@ async def test_full_flow( {CONF_URL: BASE_URL}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Test {SERIAL_NUMBER}" + assert result["title"] == BASE_URL assert result["data"] == {CONF_URL: BASE_URL} @@ -41,7 +41,7 @@ async def test_full_flow_fail_because_of_missing_device_infos( mock_lunatone_info: AsyncMock, ) -> None: """Test full flow.""" - mock_lunatone_info.data = None + mock_lunatone_info.serial_number = None result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -117,7 +117,7 @@ async def test_user_step_fail_with_error( {CONF_URL: BASE_URL}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Test {SERIAL_NUMBER}" + assert result["title"] == BASE_URL assert result["data"] == {CONF_URL: BASE_URL} diff --git a/tests/components/mastodon/test_services.py b/tests/components/mastodon/test_services.py index 8cc28b5ffdecfb..239da9cb00c1fe 100644 --- a/tests/components/mastodon/test_services.py +++ b/tests/components/mastodon/test_services.py @@ -1,14 +1,17 @@ """Tests for the Mastodon services.""" +from datetime import timedelta from unittest.mock import AsyncMock, Mock, patch -from mastodon.Mastodon import MastodonAPIError, MediaAttachment +from mastodon.Mastodon import MastodonAPIError, MastodonNotFoundError, MediaAttachment import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.mastodon.const import ( ATTR_ACCOUNT_NAME, ATTR_CONTENT_WARNING, + ATTR_DURATION, + ATTR_HIDE_NOTIFICATIONS, ATTR_IDEMPOTENCY_KEY, ATTR_LANGUAGE, ATTR_MEDIA, @@ -17,7 +20,12 @@ ATTR_VISIBILITY, DOMAIN, ) -from homeassistant.components.mastodon.services import SERVICE_GET_ACCOUNT, SERVICE_POST +from homeassistant.components.mastodon.services import ( + SERVICE_GET_ACCOUNT, + SERVICE_MUTE_ACCOUNT, + SERVICE_POST, + SERVICE_UNMUTE_ACCOUNT, +) from homeassistant.const import ATTR_CONFIG_ENTRY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -79,6 +87,265 @@ async def test_get_account_failure( ) +@pytest.mark.parametrize( + ( + "service_data", + "expected_notifications", + "expected_duration", + ), + [ + ( + {ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social"}, + True, + None, + ), + ( + { + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + ATTR_HIDE_NOTIFICATIONS: False, + }, + False, + None, + ), + ( + { + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + ATTR_DURATION: timedelta(hours=2), + }, + True, + 7200, + ), + ( + { + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + ATTR_DURATION: timedelta(hours=12), + ATTR_HIDE_NOTIFICATIONS: False, + }, + False, + 43200, + ), + ], +) +async def test_mute_account_success( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + service_data: dict[str, str | int | bool], + expected_notifications: bool, + expected_duration: int | None, +) -> None: + """Test the mute_account service mutes the target account with all options.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + DOMAIN, + SERVICE_MUTE_ACCOUNT, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id} | service_data, + blocking=True, + return_response=False, + ) + + mock_mastodon_client.account_lookup.assert_called_once_with( + acct=service_data[ATTR_ACCOUNT_NAME] + ) + account = mock_mastodon_client.account_lookup.return_value + assert mock_mastodon_client.account_mute.call_count == 1 + call_args, call_kwargs = mock_mastodon_client.account_mute.call_args + + if call_kwargs: + actual_id = call_kwargs["id"] + actual_notifications = call_kwargs["notifications"] + actual_duration = call_kwargs.get("duration") + else: + _, positional_args, _ = call_args + actual_id, actual_notifications, actual_duration = positional_args + + assert actual_id == account.id + assert actual_notifications == expected_notifications + assert actual_duration == expected_duration + + +async def test_mute_account_duration_too_long( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test mute_account rejects overly long durations.""" + await setup_integration(hass, mock_config_entry) + + with ( + patch("homeassistant.components.mastodon.services.MAX_DURATION_SECONDS", 5), + pytest.raises(ServiceValidationError) as err, + ): + await hass.services.async_call( + DOMAIN, + SERVICE_MUTE_ACCOUNT, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + ATTR_DURATION: timedelta(seconds=10), + }, + blocking=True, + return_response=False, + ) + + assert err.value.translation_key == "mute_duration_too_long" + mock_mastodon_client.account_mute.assert_not_called() + + +async def test_mute_account_failure_not_found( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test mute_account raises validation when account does not exist.""" + await setup_integration(hass, mock_config_entry) + + mock_mastodon_client.account_lookup.side_effect = MastodonNotFoundError( + "account not found" + ) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_MUTE_ACCOUNT, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + }, + blocking=True, + return_response=False, + ) + + mock_mastodon_client.account_lookup.assert_called_once_with( + acct="@trwnh@mastodon.social" + ) + mock_mastodon_client.account_mute.assert_not_called() + + +async def test_mute_account_failure_api_error( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test mute_account wraps API errors with translated message.""" + await setup_integration(hass, mock_config_entry) + + mock_mastodon_client.account_mute.side_effect = MastodonAPIError("mute failed") + + with pytest.raises( + HomeAssistantError, + match='Unable to mute account "@trwnh@mastodon.social"', + ): + await hass.services.async_call( + DOMAIN, + SERVICE_MUTE_ACCOUNT, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + }, + blocking=True, + return_response=False, + ) + + mock_mastodon_client.account_lookup.assert_called_once_with( + acct="@trwnh@mastodon.social" + ) + account = mock_mastodon_client.account_lookup.return_value + mock_mastodon_client.account_mute.assert_called_once_with( + id=account.id, notifications=True, duration=None + ) + + +async def test_unmute_account_success( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the unmute_account service unmutes the target account.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + DOMAIN, + SERVICE_UNMUTE_ACCOUNT, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + }, + blocking=True, + return_response=False, + ) + + mock_mastodon_client.account_lookup.assert_called_once_with( + acct="@trwnh@mastodon.social" + ) + account = mock_mastodon_client.account_lookup.return_value + mock_mastodon_client.account_unmute.assert_called_once_with(id=account.id) + + +async def test_unmute_account_failure_not_found( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test unmute_account raises validation when account does not exist.""" + await setup_integration(hass, mock_config_entry) + + mock_mastodon_client.account_lookup.side_effect = MastodonNotFoundError( + "account not found" + ) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_UNMUTE_ACCOUNT, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + }, + blocking=True, + return_response=False, + ) + + mock_mastodon_client.account_lookup.assert_called_once_with( + acct="@trwnh@mastodon.social" + ) + mock_mastodon_client.account_unmute.assert_not_called() + + +async def test_unmute_account_failure_api_error( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test unmute_account wraps API errors with translated message.""" + await setup_integration(hass, mock_config_entry) + + mock_mastodon_client.account_unmute.side_effect = MastodonAPIError("unmute failed") + + with pytest.raises( + HomeAssistantError, + match='Unable to unmute account "@trwnh@mastodon.social"', + ): + await hass.services.async_call( + DOMAIN, + SERVICE_UNMUTE_ACCOUNT, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_ACCOUNT_NAME: "@trwnh@mastodon.social", + }, + blocking=True, + return_response=False, + ) + + mock_mastodon_client.account_lookup.assert_called_once_with( + acct="@trwnh@mastodon.social" + ) + account = mock_mastodon_client.account_lookup.return_value + mock_mastodon_client.account_unmute.assert_called_once_with(id=account.id) + + @pytest.mark.parametrize( ("payload", "kwargs"), [ diff --git a/tests/components/nest/test_config_flow.py b/tests/components/nest/test_config_flow.py index 24b12b047bfca4..9ff7713e9ed2f3 100644 --- a/tests/components/nest/test_config_flow.py +++ b/tests/components/nest/test_config_flow.py @@ -1368,7 +1368,7 @@ async def test_dhcp_discovery_with_creds( ("status_code", "error_reason"), [ (HTTPStatus.UNAUTHORIZED, "oauth_unauthorized"), - (HTTPStatus.NOT_FOUND, "oauth_failed"), + (HTTPStatus.NOT_FOUND, "oauth_unauthorized"), (HTTPStatus.INTERNAL_SERVER_ERROR, "oauth_failed"), ], ) diff --git a/tests/components/onedrive_for_business/snapshots/test_diagnostics.ambr b/tests/components/onedrive_for_business/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000000..dd572b72249743 --- /dev/null +++ b/tests/components/onedrive_for_business/snapshots/test_diagnostics.ambr @@ -0,0 +1,32 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'config': dict({ + 'auth_implementation': 'onedrive_for_business', + 'folder_id': 'my_folder_id', + 'folder_path': 'backups/home_assistant', + 'tenant_id': 'test_tenant_id', + 'token': '**REDACTED**', + }), + 'drive': dict({ + 'drive_type': 'personal', + 'id': 'mock_drive_id', + 'name': 'My Drive', + 'owner': dict({ + 'application': None, + 'user': dict({ + 'display_name': '**REDACTED**', + 'email': '**REDACTED**', + 'id': 'id', + }), + }), + 'quota': dict({ + 'deleted': 5, + 'remaining': 805306368, + 'state': 'nearing', + 'total': 5368709120, + 'used': 4250000000, + }), + }), + }) +# --- diff --git a/tests/components/onedrive_for_business/test_diagnostics.py b/tests/components/onedrive_for_business/test_diagnostics.py new file mode 100644 index 00000000000000..c476e989228e32 --- /dev/null +++ b/tests/components/onedrive_for_business/test_diagnostics.py @@ -0,0 +1,26 @@ +"""Tests for the diagnostics data provided by the OneDrive for Business integration.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/proxmoxve/conftest.py b/tests/components/proxmoxve/conftest.py index 7d9405d5064967..934c93eeeb1a15 100644 --- a/tests/components/proxmoxve/conftest.py +++ b/tests/components/proxmoxve/conftest.py @@ -64,18 +64,14 @@ def mock_proxmox_client(): """Mock Proxmox client with dynamic exception injection support.""" with ( patch( - "homeassistant.components.proxmoxve.ProxmoxAPI", autospec=True + "homeassistant.components.proxmoxve.coordinator.ProxmoxAPI", autospec=True ) as mock_api, - patch( - "homeassistant.components.proxmoxve.common.ProxmoxAPI", autospec=True - ) as mock_api_common, patch( "homeassistant.components.proxmoxve.config_flow.ProxmoxAPI" ) as mock_api_cf, ): mock_instance = MagicMock() mock_api.return_value = mock_instance - mock_api_common.return_value = mock_instance mock_api_cf.return_value = mock_instance mock_instance.access.ticket.post.return_value = load_json_object_fixture( @@ -139,4 +135,5 @@ def mock_config_entry() -> MockConfigEntry: domain=DOMAIN, title="ProxmoxVE test", data=MOCK_TEST_CONFIG, + entry_id="1234", ) diff --git a/tests/components/proxmoxve/snapshots/test_binary_sensor.ambr b/tests/components/proxmoxve/snapshots/test_binary_sensor.ambr index 81a6710d8d1272..3f03fec11519c9 100644 --- a/tests/components/proxmoxve/snapshots/test_binary_sensor.ambr +++ b/tests/components/proxmoxve/snapshots/test_binary_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_all_entities[binary_sensor.pve1_ct_backup-entry] +# name: test_all_entities[binary_sensor.ct_backup_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -11,46 +11,45 @@ 'device_id': , 'disabled_by': None, 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.pve1_ct_backup', - 'has_entity_name': False, + 'entity_category': , + 'entity_id': 'binary_sensor.ct_backup_status', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , 'labels': set({ }), 'name': None, - 'object_id_base': 'pve1_ct-backup', + 'object_id_base': 'Status', 'options': dict({ }), 'original_device_class': , - 'original_icon': '', - 'original_name': 'pve1_ct-backup', + 'original_icon': None, + 'original_name': 'Status', 'platform': 'proxmoxve', 'previous_unique_id': None, 'suggested_object_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'proxmox_pve1_201_running', + 'translation_key': 'status', + 'unique_id': '1234_201_status', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[binary_sensor.pve1_ct_backup-state] +# name: test_all_entities[binary_sensor.ct_backup_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'pve1_ct-backup', - 'icon': '', + 'friendly_name': 'ct-backup Status', }), 'context': , - 'entity_id': 'binary_sensor.pve1_ct_backup', + 'entity_id': 'binary_sensor.ct_backup_status', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_all_entities[binary_sensor.pve1_ct_nginx-entry] +# name: test_all_entities[binary_sensor.ct_nginx_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -62,46 +61,45 @@ 'device_id': , 'disabled_by': None, 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.pve1_ct_nginx', - 'has_entity_name': False, + 'entity_category': , + 'entity_id': 'binary_sensor.ct_nginx_status', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , 'labels': set({ }), 'name': None, - 'object_id_base': 'pve1_ct-nginx', + 'object_id_base': 'Status', 'options': dict({ }), 'original_device_class': , - 'original_icon': '', - 'original_name': 'pve1_ct-nginx', + 'original_icon': None, + 'original_name': 'Status', 'platform': 'proxmoxve', 'previous_unique_id': None, 'suggested_object_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'proxmox_pve1_200_running', + 'translation_key': 'status', + 'unique_id': '1234_200_status', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[binary_sensor.pve1_ct_nginx-state] +# name: test_all_entities[binary_sensor.ct_nginx_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'pve1_ct-nginx', - 'icon': '', + 'friendly_name': 'ct-nginx Status', }), 'context': , - 'entity_id': 'binary_sensor.pve1_ct_nginx', + 'entity_id': 'binary_sensor.ct_nginx_status', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.pve1_vm_db-entry] +# name: test_all_entities[binary_sensor.pve1_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -113,148 +111,45 @@ 'device_id': , 'disabled_by': None, 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.pve1_vm_db', - 'has_entity_name': False, + 'entity_category': , + 'entity_id': 'binary_sensor.pve1_status', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , 'labels': set({ }), 'name': None, - 'object_id_base': 'pve1_vm-db', + 'object_id_base': 'Status', 'options': dict({ }), 'original_device_class': , - 'original_icon': '', - 'original_name': 'pve1_vm-db', + 'original_icon': None, + 'original_name': 'Status', 'platform': 'proxmoxve', 'previous_unique_id': None, 'suggested_object_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'proxmox_pve1_101_running', + 'translation_key': 'status', + 'unique_id': '1234_node/pve1_status', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[binary_sensor.pve1_vm_db-state] +# name: test_all_entities[binary_sensor.pve1_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'pve1_vm-db', - 'icon': '', + 'friendly_name': 'pve1 Status', }), 'context': , - 'entity_id': 'binary_sensor.pve1_vm_db', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.pve1_vm_web-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.pve1_vm_web', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'object_id_base': 'pve1_vm-web', - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': '', - 'original_name': 'pve1_vm-web', - 'platform': 'proxmoxve', - 'previous_unique_id': None, - 'suggested_object_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'proxmox_pve1_100_running', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.pve1_vm_web-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'pve1_vm-web', - 'icon': '', - }), - 'context': , - 'entity_id': 'binary_sensor.pve1_vm_web', + 'entity_id': 'binary_sensor.pve1_status', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.pve2_ct_backup-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.pve2_ct_backup', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'object_id_base': 'pve2_ct-backup', - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': '', - 'original_name': 'pve2_ct-backup', - 'platform': 'proxmoxve', - 'previous_unique_id': None, - 'suggested_object_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'proxmox_pve2_201_running', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.pve2_ct_backup-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'running', - 'friendly_name': 'pve2_ct-backup', - 'icon': '', - }), - 'context': , - 'entity_id': 'binary_sensor.pve2_ct_backup', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.pve2_ct_nginx-entry] +# name: test_all_entities[binary_sensor.pve2_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -266,46 +161,45 @@ 'device_id': , 'disabled_by': None, 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.pve2_ct_nginx', - 'has_entity_name': False, + 'entity_category': , + 'entity_id': 'binary_sensor.pve2_status', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , 'labels': set({ }), 'name': None, - 'object_id_base': 'pve2_ct-nginx', + 'object_id_base': 'Status', 'options': dict({ }), 'original_device_class': , - 'original_icon': '', - 'original_name': 'pve2_ct-nginx', + 'original_icon': None, + 'original_name': 'Status', 'platform': 'proxmoxve', 'previous_unique_id': None, 'suggested_object_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'proxmox_pve2_200_running', + 'translation_key': 'status', + 'unique_id': '1234_node/pve2_status', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[binary_sensor.pve2_ct_nginx-state] +# name: test_all_entities[binary_sensor.pve2_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'pve2_ct-nginx', - 'icon': '', + 'friendly_name': 'pve2 Status', }), 'context': , - 'entity_id': 'binary_sensor.pve2_ct_nginx', + 'entity_id': 'binary_sensor.pve2_status', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.pve2_vm_db-entry] +# name: test_all_entities[binary_sensor.vm_db_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -317,46 +211,45 @@ 'device_id': , 'disabled_by': None, 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.pve2_vm_db', - 'has_entity_name': False, + 'entity_category': , + 'entity_id': 'binary_sensor.vm_db_status', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , 'labels': set({ }), 'name': None, - 'object_id_base': 'pve2_vm-db', + 'object_id_base': 'Status', 'options': dict({ }), 'original_device_class': , - 'original_icon': '', - 'original_name': 'pve2_vm-db', + 'original_icon': None, + 'original_name': 'Status', 'platform': 'proxmoxve', 'previous_unique_id': None, 'suggested_object_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'proxmox_pve2_101_running', + 'translation_key': 'status', + 'unique_id': '1234_101_status', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[binary_sensor.pve2_vm_db-state] +# name: test_all_entities[binary_sensor.vm_db_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'pve2_vm-db', - 'icon': '', + 'friendly_name': 'vm-db Status', }), 'context': , - 'entity_id': 'binary_sensor.pve2_vm_db', + 'entity_id': 'binary_sensor.vm_db_status', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_all_entities[binary_sensor.pve2_vm_web-entry] +# name: test_all_entities[binary_sensor.vm_web_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -368,39 +261,38 @@ 'device_id': , 'disabled_by': None, 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.pve2_vm_web', - 'has_entity_name': False, + 'entity_category': , + 'entity_id': 'binary_sensor.vm_web_status', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , 'labels': set({ }), 'name': None, - 'object_id_base': 'pve2_vm-web', + 'object_id_base': 'Status', 'options': dict({ }), 'original_device_class': , - 'original_icon': '', - 'original_name': 'pve2_vm-web', + 'original_icon': None, + 'original_name': 'Status', 'platform': 'proxmoxve', 'previous_unique_id': None, 'suggested_object_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'proxmox_pve2_100_running', + 'translation_key': 'status', + 'unique_id': '1234_100_status', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[binary_sensor.pve2_vm_web-state] +# name: test_all_entities[binary_sensor.vm_web_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'pve2_vm-web', - 'icon': '', + 'friendly_name': 'vm-web Status', }), 'context': , - 'entity_id': 'binary_sensor.pve2_vm_web', + 'entity_id': 'binary_sensor.vm_web_status', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/proxmoxve/test_binary_sensor.py b/tests/components/proxmoxve/test_binary_sensor.py index 0f16eedfc858b0..7b21f4ff46a9d2 100644 --- a/tests/components/proxmoxve/test_binary_sensor.py +++ b/tests/components/proxmoxve/test_binary_sensor.py @@ -2,19 +2,30 @@ from unittest.mock import MagicMock, patch +from freezegun.api import FrozenDateTimeFactory +from proxmoxer import AuthenticationError +from proxmoxer.core import ResourceException import pytest +import requests +from requests.exceptions import ConnectTimeout, SSLError from syrupy.assertion import SnapshotAssertion -from homeassistant.const import Platform +from homeassistant.components.proxmoxve.coordinator import DEFAULT_UPDATE_INTERVAL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant import homeassistant.helpers.entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.fixture(autouse=True) +def enable_all_entities(entity_registry_enabled_by_default: None) -> None: + """Make sure all entities are enabled.""" -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_all_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -31,3 +42,41 @@ async def test_all_entities( await snapshot_platform( hass, entity_registry, snapshot, mock_config_entry.entry_id ) + + +@pytest.mark.parametrize( + ("exception"), + [ + (AuthenticationError("Invalid credentials")), + (SSLError("SSL handshake failed")), + (ConnectTimeout("Connection timed out")), + (ResourceException), + (requests.exceptions.ConnectionError), + ], + ids=[ + "auth_error", + "ssl_error", + "connect_timeout", + "resource_exception", + "connection_error", + ], +) +async def test_refresh_exceptions( + hass: HomeAssistant, + mock_proxmox_client: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + exception: Exception, +) -> None: + """Test entities go unavailable after coordinator refresh failures.""" + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.LOADED + + mock_proxmox_client.nodes.get.side_effect = exception + + freezer.tick(DEFAULT_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("binary_sensor.ct_nginx_status") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/proxmoxve/test_config_flow.py b/tests/components/proxmoxve/test_config_flow.py index 6edf1392ede9c7..d6010f2b641693 100644 --- a/tests/components/proxmoxve/test_config_flow.py +++ b/tests/components/proxmoxve/test_config_flow.py @@ -5,11 +5,11 @@ from unittest.mock import MagicMock from proxmoxer import AuthenticationError +from proxmoxer.core import ResourceException import pytest from requests.exceptions import ConnectTimeout, SSLError from homeassistant.components.proxmoxve import CONF_HOST, CONF_REALM -from homeassistant.components.proxmoxve.common import ResourceException from homeassistant.components.proxmoxve.const import CONF_NODES, DOMAIN from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_PASSWORD, CONF_PORT, CONF_USERNAME, CONF_VERIFY_SSL diff --git a/tests/components/proxmoxve/test_init.py b/tests/components/proxmoxve/test_init.py index 1b6b7449cca1da..26282342cafb9e 100644 --- a/tests/components/proxmoxve/test_init.py +++ b/tests/components/proxmoxve/test_init.py @@ -2,6 +2,12 @@ from unittest.mock import MagicMock +from proxmoxer import AuthenticationError +from proxmoxer.core import ResourceException +import pytest +import requests +from requests.exceptions import ConnectTimeout, SSLError + from homeassistant.components.proxmoxve.const import ( CONF_CONTAINERS, CONF_NODE, @@ -10,6 +16,7 @@ CONF_VMS, DOMAIN, ) +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -18,9 +25,14 @@ CONF_VERIFY_SSL, ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.helpers.issue_registry as ir from homeassistant.setup import async_setup_component +from . import setup_integration + +from tests.common import MockConfigEntry + async def test_config_import( hass: HomeAssistant, @@ -58,3 +70,115 @@ async def test_config_import( assert len(issue_registry.issues) == 1 assert (HOMEASSISTANT_DOMAIN, "deprecated_yaml") in issue_registry.issues assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +@pytest.mark.parametrize( + ("exception", "expected_state"), + [ + ( + AuthenticationError("Invalid credentials"), + ConfigEntryState.SETUP_ERROR, + ), + ( + SSLError("SSL handshake failed"), + ConfigEntryState.SETUP_ERROR, + ), + (ConnectTimeout("Connection timed out"), ConfigEntryState.SETUP_RETRY), + ( + ResourceException(500, "Internal Server Error", ""), + ConfigEntryState.SETUP_ERROR, + ), + ( + requests.exceptions.ConnectionError("Connection refused"), + ConfigEntryState.SETUP_ERROR, + ), + ], + ids=[ + "auth_error", + "ssl_error", + "connect_timeout", + "resource_exception", + "connection_error", + ], +) +async def test_setup_exceptions( + hass: HomeAssistant, + mock_proxmox_client: MagicMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + expected_state: ConfigEntryState, +) -> None: + """Test the _async_setup.""" + mock_proxmox_client.nodes.get.side_effect = exception + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state == expected_state + + +async def test_migration_v1_to_v2( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test migration from version 1 to 2.""" + entry = MockConfigEntry( + domain=DOMAIN, + version=1, + unique_id="1", + data={ + CONF_HOST: "http://test_host", + CONF_PORT: 8006, + CONF_REALM: "pam", + CONF_USERNAME: "test_user@pam", + CONF_PASSWORD: "test_password", + CONF_VERIFY_SSL: True, + }, + ) + entry.add_to_hass(hass) + assert entry.version == 1 + + device_registry = dr.async_get(hass) + entity_registry = er.async_get(hass) + + vm_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, f"{entry.entry_id}_vm_100")}, + name="Test VM", + ) + + container_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, f"{entry.entry_id}_container_200")}, + name="Test Container", + ) + + vm_entity = entity_registry.async_get_or_create( + domain="binary_sensor", + platform=DOMAIN, + unique_id="proxmox_pve1_100_running", + config_entry=entry, + device_id=vm_device.id, + original_name="Test VM Binary Sensor", + ) + + container_entity = entity_registry.async_get_or_create( + domain="binary_sensor", + platform=DOMAIN, + unique_id="proxmox_pve1_200_running", + config_entry=entry, + device_id=container_device.id, + original_name="Test Container Binary Sensor", + ) + + assert vm_entity.unique_id == "proxmox_pve1_100_running" + assert container_entity.unique_id == "proxmox_pve1_200_running" + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.version == 2 + + vm_entity_after = entity_registry.async_get(vm_entity.entity_id) + container_entity_after = entity_registry.async_get(container_entity.entity_id) + + assert vm_entity_after.unique_id == f"{entry.entry_id}_100_status" + assert container_entity_after.unique_id == f"{entry.entry_id}_200_status" diff --git a/tests/components/remote_calendar/test_calendar.py b/tests/components/remote_calendar/test_calendar.py index a0c18383369fd8..ea52d961414ba4 100644 --- a/tests/components/remote_calendar/test_calendar.py +++ b/tests/components/remote_calendar/test_calendar.py @@ -4,6 +4,7 @@ import pathlib import textwrap +from freezegun.api import FrozenDateTimeFactory from httpx import Response import pytest import respx @@ -21,7 +22,7 @@ event_fields, ) -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed # Test data files with known calendars from various sources. You can add a new file # in the testdata directory and add it will be parsed and tested. @@ -422,3 +423,110 @@ async def test_calendar_examples( await setup_integration(hass, config_entry) events = await get_events("1997-07-14T00:00:00", "2025-07-01T00:00:00") assert events == snapshot + + +@respx.mock +@pytest.mark.freeze_time("2023-01-01 09:59:00+00:00") +async def test_event_lifecycle( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the lifecycle of an event from upcoming to active to finished.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Test Event + DTSTART:20230101T100000Z + DTEND:20230101T110000Z + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + + await setup_integration(hass, config_entry) + + # An upcoming event is off + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == STATE_OFF + assert state.attributes.get("message") == "Test Event" + + # Advance time to the start of the event + freezer.move_to(datetime.fromisoformat("2023-01-01T10:00:00+00:00")) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # The event is active + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == STATE_ON + assert state.attributes.get("message") == "Test Event" + + # Advance time to the end of the event + freezer.move_to(datetime.fromisoformat("2023-01-01T11:00:00+00:00")) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # The event is finished + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == STATE_OFF + + +@respx.mock +@pytest.mark.freeze_time("2023-01-01 09:59:00+00:00") +async def test_event_edge_during_refresh_interval( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the lifecycle of multiple sequential events.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Event One + DTSTART:20230101T100000Z + DTEND:20230101T110000Z + END:VEVENT + BEGIN:VEVENT + SUMMARY:Event Two + DTSTART:20230102T190000Z + DTEND:20230102T200000Z + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + + await setup_integration(hass, config_entry) + + # Event One is upcoming + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == STATE_OFF + assert state.attributes.get("message") == "Event One" + + # Advance time to after the end of the first event + freezer.move_to(datetime.fromisoformat("2023-01-01T11:01:00+00:00")) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Event Two is upcoming + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == STATE_OFF + assert state.attributes.get("message") == "Event Two" diff --git a/tests/components/satel_integra/test_alarm_control_panel.py b/tests/components/satel_integra/test_alarm_control_panel.py index f447739d30e439..5de46aff313227 100644 --- a/tests/components/satel_integra/test_alarm_control_panel.py +++ b/tests/components/satel_integra/test_alarm_control_panel.py @@ -3,6 +3,7 @@ from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from satel_integra.satel_integra import AlarmState from syrupy.assertion import SnapshotAssertion @@ -26,7 +27,12 @@ from . import MOCK_CODE, MOCK_ENTRY_ID, get_monitor_callbacks, setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_capture_events, + async_fire_time_changed, + snapshot_platform, +) @pytest.fixture(autouse=True) @@ -163,3 +169,29 @@ async def test_alarm_control_panel_disarming( mock_satel.disarm.assert_awaited_once_with(MOCK_CODE, [1]) mock_satel.clear_alarm.assert_awaited_once_with(MOCK_CODE, [1]) + + +async def test_alarm_panel_last_reported( + hass: HomeAssistant, + mock_satel: AsyncMock, + mock_config_entry_with_subentries: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test alarm panels update last_reported if same state is reported.""" + events = async_capture_events(hass, "state_changed") + await setup_integration(hass, mock_config_entry_with_subentries) + + first_reported = hass.states.get("alarm_control_panel.home").last_reported + assert first_reported is not None + # Initial state change event + assert len(events) == 1 + + freezer.tick(1) + async_fire_time_changed(hass) + + # Run callbacks with same payload + alarm_panel_update_method, _, _ = get_monitor_callbacks(mock_satel) + alarm_panel_update_method() + + assert first_reported != hass.states.get("alarm_control_panel.home").last_reported + assert len(events) == 1 # last_reported shall not fire state_changed diff --git a/tests/components/satel_integra/test_binary_sensor.py b/tests/components/satel_integra/test_binary_sensor.py index 7d125e53309dde..42435968146c26 100644 --- a/tests/components/satel_integra/test_binary_sensor.py +++ b/tests/components/satel_integra/test_binary_sensor.py @@ -3,6 +3,7 @@ from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -15,7 +16,12 @@ from . import get_monitor_callbacks, setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_capture_events, + async_fire_time_changed, + snapshot_platform, +) @pytest.fixture(autouse=True) @@ -117,3 +123,30 @@ async def test_binary_sensor_callback( zone_update_method({"zones": {2: 1}}) assert hass.states.get("binary_sensor.zone").state == STATE_UNKNOWN assert hass.states.get("binary_sensor.output").state == STATE_UNKNOWN + + +async def test_binary_sensor_last_reported( + hass: HomeAssistant, + mock_satel: AsyncMock, + mock_config_entry_with_subentries: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test binary sensors update last_reported if same state is reported.""" + events = async_capture_events(hass, "state_changed") + await setup_integration(hass, mock_config_entry_with_subentries) + + first_reported = hass.states.get("binary_sensor.zone").last_reported + assert first_reported is not None + # Initial 2 state change events for both zone and output + assert len(events) == 2 + + freezer.tick(1) + async_fire_time_changed(hass) + + # Run callbacks with same payload + _, zone_update_method, output_update_method = get_monitor_callbacks(mock_satel) + output_update_method({"outputs": {1: 0}}) + zone_update_method({"zones": {1: 0}}) + + assert first_reported != hass.states.get("binary_sensor.zone").last_reported + assert len(events) == 2 # last_reported shall not fire state_changed diff --git a/tests/components/satel_integra/test_switch.py b/tests/components/satel_integra/test_switch.py index 165324075592c6..8a6a3bedc830c7 100644 --- a/tests/components/satel_integra/test_switch.py +++ b/tests/components/satel_integra/test_switch.py @@ -3,6 +3,7 @@ from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -25,7 +26,12 @@ from . import MOCK_CODE, MOCK_ENTRY_ID, get_monitor_callbacks, setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_capture_events, + async_fire_time_changed, + snapshot_platform, +) @pytest.fixture(autouse=True) @@ -144,3 +150,29 @@ async def test_switch_change_state( assert hass.states.get("switch.switchable_output").state == STATE_OFF mock_satel.set_output.assert_awaited_once_with(MOCK_CODE, 1, False) + + +async def test_switch_last_reported( + hass: HomeAssistant, + mock_satel: AsyncMock, + mock_config_entry_with_subentries: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test switches update last_reported if same state is reported.""" + events = async_capture_events(hass, "state_changed") + await setup_integration(hass, mock_config_entry_with_subentries) + + first_reported = hass.states.get("switch.switchable_output").last_reported + assert first_reported is not None + # Initial state change event + assert len(events) == 1 + + freezer.tick(1) + async_fire_time_changed(hass) + + # Run callbacks with same payload + _, _, output_update_method = get_monitor_callbacks(mock_satel) + output_update_method({"outputs": {1: 0}}) + + assert first_reported != hass.states.get("switch.switchable_output").last_reported + assert len(events) == 1 # last_reported shall not fire state_changed diff --git a/tests/components/scrape/__init__.py b/tests/components/scrape/__init__.py index de061d051b20cf..d56d26e2796b7d 100644 --- a/tests/components/scrape/__init__.py +++ b/tests/components/scrape/__init__.py @@ -39,12 +39,14 @@ def __init__( ) -> None: """Init RestDataMock.""" self.data: str | None = None + self.headers: dict[str, str] | None = None self.payload = payload self.count = 0 async def async_update(self, data: bool | None = True) -> None: """Update.""" self.count += 1 + self.headers = {} if self.payload == "test_scrape_sensor": self.data = ( # Default @@ -74,5 +76,33 @@ async def async_update(self, data: bool | None = True) -> None: self.data = "
secret text
" if self.payload == "test_scrape_sensor_no_data": self.data = None + if self.payload == "test_scrape_xml": + # XML/RSS content for testing XML parser detection via Content-Type + self.headers = {"Content-Type": "application/rss+xml"} + self.data = ( + '' + "Test RSS Feed" + "Test Itemhttps://example.com/item" + "" + ) + if self.payload == "test_scrape_xml_fallback": + # XML/RSS content with non-XML Content-Type for testing content-based detection + self.headers = {"Content-Type": "text/html"} + self.data = ( + '' + "Test RSS Feed" + "Test Itemhttps://example.com/item" + "" + ) + if self.payload == "test_scrape_html5_with_xml_declaration": + # HTML5 with XML declaration, no Content-Type header, and uppercase tags + # Tests: XML stripping, content detection, case-insensitive selectors + self.data = ( + '\n' + "\n" + "Test Page" + "
" + "

Current Version: 2021.12.10

" + ) if self.count == 3: self.data = None diff --git a/tests/components/scrape/test_sensor.py b/tests/components/scrape/test_sensor.py index c97e2cd3716e95..b6ef2f8a8467cd 100644 --- a/tests/components/scrape/test_sensor.py +++ b/tests/components/scrape/test_sensor.py @@ -75,6 +75,116 @@ async def test_scrape_sensor(hass: HomeAssistant) -> None: assert state.state == "Current Version: 2021.12.10" +async def test_scrape_xml_content_type( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test Scrape sensor with XML Content-Type header uses XML parser.""" + config = { + DOMAIN: [ + return_integration_config( + sensors=[ + {"select": "title", "name": "RSS Title"}, + # Test tag - HTML parser treats this as self-closing, + # but XML parser correctly parses the content + {"select": "item link", "name": "RSS Link"}, + ] + ) + ] + } + + mocker = MockRestData("test_scrape_xml") + with patch( + "homeassistant.components.rest.RestData", + return_value=mocker, + ): + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + + # Verify XML Content-Type header is set + assert mocker.headers.get("Content-Type") == "application/rss+xml" + + state = hass.states.get("sensor.rss_title") + assert state.state == "Test RSS Feed" + + # Verify content is correctly parsed with XML parser + link_state = hass.states.get("sensor.rss_link") + assert link_state.state == "https://example.com/item" + + assert "XMLParsedAsHTMLWarning" not in caplog.text + + +async def test_scrape_xml_declaration( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test Scrape sensor with XML declaration (no XML Content-Type) uses XML parser.""" + config = { + DOMAIN: [ + return_integration_config( + sensors=[{"select": "title", "name": "RSS Title"}] + ) + ] + } + + mocker = MockRestData("test_scrape_xml_fallback") + with patch( + "homeassistant.components.rest.RestData", + return_value=mocker, + ): + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + + # Verify non-XML Content-Type but XML parser used due to None: + """Test HTML5 with XML declaration strips XML prefix and uses HTML parser. + + This test verifies backward compatibility by testing: + - No Content-Type header (relies on content detection) + - Uppercase HTML tags with lowercase selectors (case-insensitive matching) + - Class selectors work correctly + - No XMLParsedAsHTMLWarning is logged + """ + config = { + DOMAIN: [ + return_integration_config( + sensors=[ + # Lowercase selector matches uppercase

tag + {"select": ".current-version h1", "name": "HA version"}, + # Lowercase selector matches uppercase tag + {"select": "title", "name": "Page Title"}, + ] + ) + ] + } + + mocker = MockRestData("test_scrape_html5_with_xml_declaration") + with patch( + "homeassistant.components.rest.RestData", + return_value=mocker, + ): + assert await async_setup_component(hass, DOMAIN, config) + await hass.async_block_till_done() + + # Verify no Content-Type header is set (tests content-based detection) + assert "Content-Type" not in mocker.headers + + state = hass.states.get("sensor.ha_version") + assert state.state == "Current Version: 2021.12.10" + + title_state = hass.states.get("sensor.page_title") + assert title_state.state == "Test Page" + + assert "XMLParsedAsHTMLWarning" not in caplog.text + + async def test_scrape_sensor_value_template(hass: HomeAssistant) -> None: """Test Scrape sensor with value template.""" config = { diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 4bd35611b2eb8e..8cecdde8870605 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -164,6 +164,7 @@ def mock_smartthings() -> Generator[AsyncMock]: "ecobee_thermostat", "ecobee_thermostat_offline", "sensi_thermostat", + "siemens_washer", "fake_fan", "generic_fan_3_speed", "heatit_ztrm3_thermostat", diff --git a/tests/components/smartthings/fixtures/device_status/da_ref_normal_000001.json b/tests/components/smartthings/fixtures/device_status/da_ref_normal_000001.json index 57dba2e0259a11..a3e4a0dcbc374e 100644 --- a/tests/components/smartthings/fixtures/device_status/da_ref_normal_000001.json +++ b/tests/components/smartthings/fixtures/device_status/da_ref_normal_000001.json @@ -1,6 +1,20 @@ { "components": { "pantry-01": { + "custom.fridgeMode": { + "fridgeModeValue": { + "value": null + }, + "fridgeMode": { + "value": null + }, + "supportedFullFridgeModes": { + "value": null + }, + "supportedFridgeModes": { + "value": null + } + }, "samsungce.fridgePantryInfo": { "name": { "value": null @@ -9,7 +23,7 @@ "custom.disabledCapabilities": { "disabledCapabilities": { "value": [], - "timestamp": "2022-02-07T10:47:54.524Z" + "timestamp": "2022-07-16T15:22:24.391Z" } }, "samsungce.fridgePantryMode": { @@ -22,6 +36,20 @@ } }, "pantry-02": { + "custom.fridgeMode": { + "fridgeModeValue": { + "value": null + }, + "fridgeMode": { + "value": null + }, + "supportedFullFridgeModes": { + "value": null + }, + "supportedFridgeModes": { + "value": null + } + }, "samsungce.fridgePantryInfo": { "name": { "value": null @@ -30,7 +58,7 @@ "custom.disabledCapabilities": { "disabledCapabilities": { "value": [], - "timestamp": "2022-02-07T10:47:54.524Z" + "timestamp": "2022-07-16T15:22:24.391Z" } }, "samsungce.fridgePantryMode": { @@ -43,16 +71,22 @@ } }, "icemaker": { + "samsungce.fridgeIcemakerInfo": { + "name": { + "value": "ICE_MAKER", + "timestamp": "2026-01-13T22:28:05.342Z" + } + }, "custom.disabledCapabilities": { "disabledCapabilities": { "value": [], - "timestamp": "2024-10-12T13:55:04.008Z" + "timestamp": "2024-09-26T22:29:21.805Z" } }, "switch": { "switch": { - "value": "off", - "timestamp": "2025-02-09T13:55:01.720Z" + "value": "on", + "timestamp": "2026-02-14T08:38:13.451Z" } } }, @@ -64,6 +98,9 @@ "fridgeMode": { "value": null }, + "supportedFullFridgeModes": { + "value": null + }, "supportedFridgeModes": { "value": null } @@ -76,13 +113,18 @@ "samsungce.unavailableCapabilities": { "unavailableCommands": { "value": [], - "timestamp": "2024-11-08T04:14:59.899Z" + "timestamp": "2024-11-08T04:11:13.422Z" } }, "custom.disabledCapabilities": { "disabledCapabilities": { "value": ["samsungce.freezerConvertMode", "custom.fridgeMode"], - "timestamp": "2024-11-12T08:23:59.944Z" + "timestamp": "2024-11-08T13:08:56.542Z" + } + }, + "samsungce.fridgeZoneInfo": { + "name": { + "value": null } }, "temperatureMeasurement": { @@ -126,6 +168,9 @@ "fridgeMode": { "value": null }, + "supportedFullFridgeModes": { + "value": null + }, "supportedFridgeModes": { "value": null } @@ -133,19 +178,19 @@ "contactSensor": { "contact": { "value": "closed", - "timestamp": "2025-02-09T16:26:21.425Z" + "timestamp": "2026-02-14T14:42:52.354Z" } }, "samsungce.unavailableCapabilities": { "unavailableCommands": { "value": [], - "timestamp": "2024-11-08T04:14:59.899Z" + "timestamp": "2024-11-08T04:11:13.422Z" } }, "custom.disabledCapabilities": { "disabledCapabilities": { "value": ["custom.fridgeMode"], - "timestamp": "2024-10-12T13:55:04.008Z" + "timestamp": "2024-09-26T22:29:21.805Z" } }, "temperatureMeasurement": { @@ -155,19 +200,19 @@ "temperature": { "value": 37, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-02-02T23:48:51.492Z" } }, "custom.thermostatSetpointControl": { "minimumSetpoint": { "value": 34, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "maximumSetpoint": { "value": 44, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-01-13T22:29:09.026Z" } }, "thermostatCoolingSetpoint": { @@ -178,12 +223,12 @@ "step": 1 }, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "coolingSetpoint": { "value": 37, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-01-13T22:29:09.026Z" } } }, @@ -195,6 +240,9 @@ "fridgeMode": { "value": null }, + "supportedFullFridgeModes": { + "value": null + }, "supportedFridgeModes": { "value": null } @@ -202,19 +250,19 @@ "contactSensor": { "contact": { "value": "closed", - "timestamp": "2025-02-09T14:48:16.247Z" + "timestamp": "2026-02-14T14:42:09.828Z" } }, "samsungce.unavailableCapabilities": { "unavailableCommands": { "value": [], - "timestamp": "2024-11-08T04:14:59.899Z" + "timestamp": "2024-11-08T04:11:13.422Z" } }, "custom.disabledCapabilities": { "disabledCapabilities": { "value": ["custom.fridgeMode", "samsungce.freezerConvertMode"], - "timestamp": "2024-11-08T01:09:17.382Z" + "timestamp": "2024-11-08T01:25:04.838Z" } }, "temperatureMeasurement": { @@ -224,19 +272,19 @@ "temperature": { "value": 0, "unit": "F", - "timestamp": "2025-01-23T04:42:18.178Z" + "timestamp": "2026-01-30T18:15:33.427Z" } }, "custom.thermostatSetpointControl": { "minimumSetpoint": { "value": -8, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "maximumSetpoint": { "value": 5, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-01-13T22:29:09.026Z" } }, "samsungce.freezerConvertMode": { @@ -255,12 +303,12 @@ "step": 1 }, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "coolingSetpoint": { "value": 0, "unit": "F", - "timestamp": "2025-01-19T21:07:55.764Z" + "timestamp": "2026-01-13T22:29:09.026Z" } } }, @@ -268,18 +316,19 @@ "contactSensor": { "contact": { "value": "closed", - "timestamp": "2025-02-09T16:26:21.425Z" + "timestamp": "2026-02-14T14:42:52.354Z" } }, "samsungce.dongleSoftwareInstallation": { "status": { "value": "completed", - "timestamp": "2022-02-07T10:47:54.524Z" + "timestamp": "2022-07-16T15:22:24.391Z" } }, "samsungce.deviceIdentification": { "micomAssayCode": { - "value": null + "value": "00134041", + "timestamp": "2026-01-13T22:29:09.026Z" }, "modelName": { "value": null @@ -290,19 +339,24 @@ "serialNumberExtra": { "value": null }, - "modelClassificationCode": { + "releaseCountry": { "value": null }, + "modelClassificationCode": { + "value": "00020232011511200100000030000000", + "timestamp": "2026-01-13T22:29:09.026Z" + }, "description": { - "value": null + "value": "TP2X_REF_20K", + "timestamp": "2026-01-13T22:29:09.026Z" }, "releaseYear": { - "value": 20, - "timestamp": "2024-11-08T01:09:17.382Z" + "value": 21, + "timestamp": "2024-11-08T01:25:04.838Z" }, "binaryId": { "value": "TP2X_REF_20K", - "timestamp": "2025-02-09T13:55:01.720Z" + "timestamp": "2026-02-14T08:38:13.351Z" } }, "samsungce.quickControl": { @@ -317,6 +371,9 @@ "fridgeMode": { "value": null }, + "supportedFullFridgeModes": { + "value": null + }, "supportedFridgeModes": { "value": null } @@ -330,59 +387,59 @@ }, "mnfv": { "value": "A-RFWW-TP2-21-COMMON_20220110", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "mnhw": { "value": "MediaTek", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "di": { - "value": "7db87911-7dce-1cf2-7119-b953432a2f09", - "timestamp": "2024-12-21T22:04:22.037Z" + "value": "cef5af9b-7a3e-df50-5023-be27c11ae4c8", + "timestamp": "2025-08-08T20:55:21.175Z" }, "mnsl": { "value": "http://www.samsung.com", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "dmv": { "value": "res.1.1.0,sh.1.1.0", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "n": { "value": "[refrigerator] Samsung", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "mnmo": { - "value": "TP2X_REF_20K|00115641|0004014D011411200103000020000000", - "timestamp": "2024-12-21T22:04:22.037Z" + "value": "TP2X_REF_20K|00134041|00020232011511200100000030000000", + "timestamp": "2025-08-08T20:55:21.175Z" }, "vid": { "value": "DA-REF-NORMAL-000001", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "mnmn": { "value": "Samsung Electronics", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "mnml": { "value": "http://www.samsung.com", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "mnpv": { "value": "DAWIT 2.0", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "mnos": { "value": "TizenRT 1.0 + IPv6", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" }, "pi": { - "value": "7db87911-7dce-1cf2-7119-b953432a2f09", - "timestamp": "2024-12-21T22:04:22.037Z" + "value": "cef5af9b-7a3e-df50-5023-be27c11ae4c8", + "timestamp": "2025-08-08T20:55:21.175Z" }, "icv": { "value": "core.1.1.0", - "timestamp": "2024-12-21T22:04:22.037Z" + "timestamp": "2025-08-08T20:55:21.175Z" } }, "samsungce.fridgeVacationMode": { @@ -390,6 +447,299 @@ "value": null } }, + "samsungce.driverState": { + "driverState": { + "value": { + "device/0": [ + { + "href": "/alarms/vs/0", + "rep": {} + }, + { + "href": "/temperatures/vs/0", + "rep": { + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "Freezer", + "x.com.samsung.da.desired": "2", + "x.com.samsung.da.current": "2", + "x.com.samsung.da.maximum": "5", + "x.com.samsung.da.minimum": "-8", + "x.com.samsung.da.unit": "Fahrenheit" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Fridge", + "x.com.samsung.da.desired": "37", + "x.com.samsung.da.current": "37", + "x.com.samsung.da.maximum": "44", + "x.com.samsung.da.minimum": "34", + "x.com.samsung.da.unit": "Fahrenheit" + } + ] + } + }, + { + "href": "/temperature/current/freezer/0", + "rep": { + "range": [-8.0, 5.0], + "units": "F", + "temperature": 2.0 + } + }, + { + "href": "/temperature/desired/freezer/0", + "rep": { + "range": [-8.0, 5.0], + "units": "F", + "temperature": 2.0 + } + }, + { + "href": "/temperature/current/cooler/0", + "rep": { + "range": [34.0, 44.0], + "units": "F", + "temperature": 37.0 + } + }, + { + "href": "/temperature/desired/cooler/0", + "rep": { + "range": [34.0, 44.0], + "units": "F", + "temperature": 37.0 + } + }, + { + "href": "/diagnosis/vs/0", + "rep": { + "x.com.samsung.da.diagnosisStart": "Ready" + } + }, + { + "href": "/energy/consumption/vs/0", + "rep": { + "x.com.samsung.da.cumulativeConsumption": "20353", + "x.com.samsung.da.cumulativePower": "1444766", + "x.com.samsung.da.cumulativeUnit": "Wh", + "x.com.samsung.da.instantaneousPower": "78", + "x.com.samsung.da.instantaneousPowerUnit": "W", + "x.com.samsung.da.monthlyConsumption": "39800", + "x.com.samsung.da.thismonthlyConsumption": "11768" + } + }, + { + "href": "/mode/vs/0", + "rep": { + "x.com.samsung.da.supportedModes": [ + "HOMECARE_WIZARD_V2", + "ENERGY_REPORT_MODEL", + "18K_REF_OUTDOOR_CONTROL_V2", + "SUPPORT_SABBATH_CONTROL" + ] + } + }, + { + "href": "/mode/0", + "rep": { + "supportedModes": [ + "HOMECARE_WIZARD_V2", + "ENERGY_REPORT_MODEL", + "18K_REF_OUTDOOR_CONTROL_V2", + "SUPPORT_SABBATH_CONTROL" + ] + } + }, + { + "href": "/defrost/block/vs/0", + "rep": { + "x.com.samsung.da.supportedModes": [ + "DEFROST_BLOCK_ON", + "DEFROST_BLOCK_OFF" + ], + "x.com.samsung.da.modes": ["DEFROST_BLOCK_OFF"] + } + }, + { + "href": "/sabbath/vs/0", + "rep": { + "x.com.samsung.da.sabbathMode": "Off" + } + }, + { + "href": "/realtimenotiforclient/vs/0", + "rep": { + "x.com.samsung.da.timeforshortnoti": "0", + "x.com.samsung.da.periodicnotisubscription": "true" + } + }, + { + "href": "/information/vs/0", + "rep": { + "x.com.samsung.da.modelNum": "TP2X_REF_20K|00134041|00020232011511200100000030000000", + "x.com.samsung.da.description": "TP2X_REF_20K", + "x.com.samsung.da.serialNum": "0BEF4BAT504767H", + "x.com.samsung.da.otnDUID": "BDCGCEMP2S7FI", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "WiFi Module", + "x.com.samsung.da.type": "Software", + "x.com.samsung.da.number": "02144A220110", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Micom", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "22030712,FFFFFFFF", + "x.com.samsung.da.newVersionAvailable": "0" + } + ] + } + }, + { + "href": "/file/information/vs/0", + "rep": { + "x.com.samsung.timeoffset": "-06:00", + "x.com.samsung.supprtedtype": 1 + } + }, + { + "href": "/doors/vs/0", + "rep": { + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "Door", + "x.com.samsung.da.openState": "Close" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Door", + "x.com.samsung.da.openState": "Close" + } + ] + } + }, + { + "href": "/door/freezer/0", + "rep": { + "openState": "Close" + } + }, + { + "href": "/door/cooler/0", + "rep": { + "openState": "Close" + } + }, + { + "href": "/configuration/vs/0", + "rep": { + "x.com.samsung.da.countryCode": "US", + "x.com.samsung.da.region": "" + } + }, + { + "href": "/defrost/delay/0", + "rep": { + "value": false + } + }, + { + "href": "/defrost/delay/vs/0", + "rep": { + "x.com.samsung.da.delayDefrost": "Off" + } + }, + { + "href": "/defrost/reservation/vs/0", + "rep": { + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "Summer Season", + "x.com.samsung.da.startTime": "0000-05-01T15:00:00", + "x.com.samsung.da.period": "04:00:00", + "x.com.samsung.da.endTime": "0000-10-31T00:00:00" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Winter Season", + "x.com.samsung.da.startTime": "0000-11-01T06:00:00", + "x.com.samsung.da.period": "04:00:00", + "x.com.samsung.da.endTime": "0000-04-30T00:00:00" + } + ] + } + }, + { + "href": "/icemaker/status/0", + "rep": { + "status": "On" + } + }, + { + "href": "/icemaker/status/vs/0", + "rep": { + "x.com.samsung.da.iceMaker": "On" + } + }, + { + "href": "/refrigeration/0", + "rep": { + "defrost": false, + "rapidFreeze": false, + "rapidCool": false + } + }, + { + "href": "/refrigeration/vs/0", + "rep": { + "x.com.samsung.da.rapidFridge": "Off", + "x.com.samsung.da.rapidFreezing": "Off" + } + }, + { + "href": "/drlc/0", + "rep": { + "DRLevel": 0, + "override": false + } + }, + { + "href": "/drlc/vs/0", + "rep": { + "x.com.samsung.da.drlcLevel": "0", + "x.com.samsung.da.override": "Off", + "x.com.samsung.da.durationminutes": "0" + } + }, + { + "href": "/otninformation/vs/0", + "rep": { + "x.com.samsung.da.target": "Micom", + "x.com.samsung.da.newVersionAvailable": "false" + } + }, + { + "href": "/icemaker/one/vs/0", + "rep": { + "x.com.samsung.da.iceMaker.name": "ICE_MAKER", + "x.com.samsung.da.iceMaker.state": "On", + "x.com.samsung.da.iceMaker.type": "toggle", + "x.com.samsung.da.iceMaker.iceMakingStatus": "ICESTATUS_RUN", + "x.com.samsung.da.iceType.desired": "NORMAL" + } + } + ] + }, + "timestamp": "2026-02-13T22:29:45.844Z" + } + }, "custom.disabledCapabilities": { "disabledCapabilities": { "value": [ @@ -399,18 +749,17 @@ "custom.deodorFilter", "samsungce.dongleSoftwareInstallation", "samsungce.quickControl", - "samsungce.deviceInfoPrivate", - "demandResponseLoadControl", "samsungce.fridgeVacationMode", - "sec.diagnosticsInformation" + "sec.diagnosticsInformation", + "demandResponseLoadControl" ], - "timestamp": "2025-02-09T13:55:01.720Z" + "timestamp": "2026-02-14T08:38:13.413Z" } }, "samsungce.driverVersion": { "versionNumber": { - "value": 24100101, - "timestamp": "2024-11-08T04:14:59.025Z" + "value": 25080101, + "timestamp": "2026-02-12T09:56:15.470Z" } }, "sec.diagnosticsInformation": { @@ -458,11 +807,11 @@ "value": { "state": "disabled" }, - "timestamp": "2025-01-19T21:07:55.703Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "reportStatePeriod": { "value": "enabled", - "timestamp": "2025-01-19T21:07:55.703Z" + "timestamp": "2026-01-13T22:29:09.026Z" } }, "thermostatCoolingSetpoint": { @@ -482,7 +831,7 @@ "cvroom", "onedoor" ], - "timestamp": "2024-11-08T01:09:17.382Z" + "timestamp": "2024-11-08T01:25:04.838Z" } }, "demandResponseLoadControl": { @@ -493,63 +842,71 @@ "duration": 0, "override": false }, - "timestamp": "2025-01-19T21:07:55.691Z" + "timestamp": "2026-01-13T22:29:09.026Z" } }, "samsungce.sabbathMode": { "supportedActions": { "value": ["on", "off"], - "timestamp": "2025-01-19T21:07:55.799Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "status": { "value": "off", - "timestamp": "2025-01-19T21:07:55.799Z" + "timestamp": "2026-01-13T22:29:09.026Z" } }, "powerConsumptionReport": { "powerConsumption": { "value": { - "energy": 1568087, - "deltaEnergy": 7, - "power": 6, - "powerEnergy": 13.555977778169844, + "energy": 1446085, + "deltaEnergy": 21, + "power": 74, + "powerEnergy": 20.573116664422884, "persistedEnergy": 0, "energySaved": 0, - "start": "2025-02-09T17:38:01Z", - "end": "2025-02-09T17:49:00Z" + "start": "2026-02-14T14:35:06Z", + "end": "2026-02-14T14:51:07Z" }, - "timestamp": "2025-02-09T17:49:00.507Z" + "timestamp": "2026-02-14T14:51:07.863Z" } }, "refresh": {}, "execute": { "data": { - "value": { - "payload": { - "rt": ["x.com.samsung.da.rm.micomdata"], - "if": ["oic.if.baseline", "oic.if.a"], - "x.com.samsung.rm.micomdata": "D0C0022B00000000000DFE15051F5AA54400000000000000000000000000000000000000000000000001F04A00C5E0", - "x.com.samsung.rm.micomdataLength": 94 + "value": null + } + }, + "samsungce.softwareVersion": { + "versions": { + "value": [ + { + "id": "0", + "swType": "Software", + "versionNumber": "02144A220110", + "description": "WiFi Module" + }, + { + "id": "1", + "swType": "Firmware", + "versionNumber": "22030712,FFFFFFFF", + "description": "Micom" } - }, - "data": { - "href": "/rm/micomdata/vs/0" - }, - "timestamp": "2023-07-19T05:25:39.852Z" + ], + "timestamp": "2026-01-13T22:29:09.026Z" } }, "refrigeration": { "defrost": { "value": "off", - "timestamp": "2025-01-19T21:07:55.772Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "rapidCooling": { "value": "off", - "timestamp": "2025-01-19T21:07:55.725Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "rapidFreezing": { "value": "off", - "timestamp": "2025-01-19T21:07:55.725Z" + "timestamp": "2026-01-21T23:20:21.048Z" } }, "custom.deodorFilter": { @@ -574,23 +931,23 @@ }, "samsungce.powerCool": { "activated": { - "value": true, - "timestamp": "2025-01-19T21:07:55.725Z" + "value": false, + "timestamp": "2026-01-13T22:29:09.026Z" } }, "custom.energyType": { "energyType": { "value": "2.0", - "timestamp": "2022-02-07T10:47:54.524Z" + "timestamp": "2022-07-16T15:22:24.391Z" }, "energySavingSupport": { "value": false, - "timestamp": "2022-02-07T10:47:54.524Z" + "timestamp": "2022-07-16T15:22:24.391Z" }, "drMaxDuration": { "value": 1440, "unit": "min", - "timestamp": "2022-02-07T11:39:47.504Z" + "timestamp": "2022-07-16T15:22:25.083Z" }, "energySavingLevel": { "value": null @@ -609,28 +966,28 @@ }, "energySavingOperationSupport": { "value": false, - "timestamp": "2022-02-07T11:39:47.504Z" + "timestamp": "2022-07-16T15:22:25.083Z" } }, "samsungce.softwareUpdate": { "targetModule": { "value": {}, - "timestamp": "2025-01-19T21:07:55.725Z" + "timestamp": "2026-01-15T10:36:37.596Z" }, "otnDUID": { - "value": "P7CNQWBWM3XBW", - "timestamp": "2025-01-19T21:07:55.744Z" + "value": "BDCGCEMP2S7FI", + "timestamp": "2026-01-13T22:29:09.026Z" }, "lastUpdatedDate": { "value": null }, "availableModules": { "value": [], - "timestamp": "2025-01-19T21:07:55.744Z" + "timestamp": "2026-01-15T10:36:37.711Z" }, "newVersionAvailable": { "value": false, - "timestamp": "2025-01-19T21:07:55.725Z" + "timestamp": "2026-01-13T22:29:09.026Z" }, "operatingState": { "value": null @@ -642,7 +999,7 @@ "samsungce.powerFreeze": { "activated": { "value": false, - "timestamp": "2025-01-19T21:07:55.725Z" + "timestamp": "2026-01-21T23:20:21.048Z" } }, "custom.waterFilter": { @@ -678,6 +1035,9 @@ "fridgeMode": { "value": null }, + "supportedFullFridgeModes": { + "value": null + }, "supportedFridgeModes": { "value": null } @@ -690,7 +1050,12 @@ "custom.disabledCapabilities": { "disabledCapabilities": { "value": ["temperatureMeasurement", "thermostatCoolingSetpoint"], - "timestamp": "2022-02-07T11:39:42.105Z" + "timestamp": "2022-07-16T15:22:24.391Z" + } + }, + "samsungce.fridgeZoneInfo": { + "name": { + "value": null } }, "temperatureMeasurement": { @@ -711,10 +1076,15 @@ } }, "icemaker-02": { + "samsungce.fridgeIcemakerInfo": { + "name": { + "value": null + } + }, "custom.disabledCapabilities": { "disabledCapabilities": { "value": [], - "timestamp": "2022-02-07T11:39:42.105Z" + "timestamp": "2022-07-16T15:22:24.391Z" } }, "switch": { diff --git a/tests/components/smartthings/fixtures/device_status/siemens_washer.json b/tests/components/smartthings/fixtures/device_status/siemens_washer.json new file mode 100644 index 00000000000000..590be06286cf2c --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/siemens_washer.json @@ -0,0 +1,76 @@ +{ + "components": { + "main": { + "signalahead13665.pauseresumev2": { + "pauseState": { + "value": "play", + "timestamp": "2026-02-14T10:26:29.493Z" + } + }, + "signalahead13665.startstopprogramv2": { + "startstop": { + "value": "play", + "timestamp": "2026-02-14T10:26:29.493Z" + } + }, + "healthCheck": { + "checkInterval": { + "value": 60, + "unit": "s", + "data": { + "deviceScheme": "UNTRACKED", + "protocol": "cloud" + }, + "timestamp": "2025-11-18T19:30:58.067Z" + }, + "healthStatus": { + "value": null + }, + "DeviceWatch-Enroll": { + "value": null + }, + "DeviceWatch-DeviceStatus": { + "value": "online", + "data": {}, + "timestamp": "2026-02-14T10:56:39.394Z" + } + }, + "refresh": {}, + "signalahead13665.applianceoperationstatesv2": { + "operationState": { + "value": "Finished", + "timestamp": "2026-02-14T10:26:29.493Z" + } + }, + "signalahead13665.washerprogramsv2": { + "availablePrograms": { + "value": [ + "LaundryCare_Washer_Program_Mix", + "LaundryCare_Washer_Program_ShirtsBlouses", + "LaundryCare_Washer_Program_Cotton", + "LaundryCare_Washer_Program_Cotton_CottonEco", + "LaundryCare_Washer_Program_EasyCare", + "LaundryCare_Washer_Program_Wool", + "LaundryCare_Washer_Program_Auto40", + "LaundryCare_Washer_Program_Super153045_Super1530", + "LaundryCare_Washer_Program_DelicatesSilk", + "LaundryCare_Washer_Program_Auto30", + "LaundryCare_Washer_Program_Sensitive" + ], + "timestamp": "2026-02-14T08:35:09.780Z" + }, + "program": { + "value": "None", + "data": {}, + "timestamp": "2026-02-14T08:35:09.780Z" + } + }, + "switch": { + "switch": { + "value": "on", + "timestamp": "2026-02-09T08:35:03.405Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/devices/da_ref_normal_000001.json b/tests/components/smartthings/fixtures/devices/da_ref_normal_000001.json index 29372cac23cf92..04665d98b5c8ce 100644 --- a/tests/components/smartthings/fixtures/devices/da_ref_normal_000001.json +++ b/tests/components/smartthings/fixtures/devices/da_ref_normal_000001.json @@ -96,6 +96,10 @@ "id": "samsungce.driverVersion", "version": 1 }, + { + "id": "samsungce.driverState", + "version": 1 + }, { "id": "samsungce.fridgeVacationMode", "version": 1 @@ -116,6 +120,10 @@ "id": "samsungce.quickControl", "version": 1 }, + { + "id": "samsungce.softwareVersion", + "version": 1 + }, { "id": "sec.diagnosticsInformation", "version": 1 @@ -130,7 +138,8 @@ "name": "Refrigerator", "categoryType": "user" } - ] + ], + "optional": false }, { "id": "freezer", @@ -174,7 +183,8 @@ "name": "Other", "categoryType": "manufacturer" } - ] + ], + "optional": false }, { "id": "cooler", @@ -214,7 +224,8 @@ "name": "Other", "categoryType": "manufacturer" } - ] + ], + "optional": false }, { "id": "cvroom", @@ -239,6 +250,10 @@ { "id": "custom.fridgeMode", "version": 1 + }, + { + "id": "samsungce.fridgeZoneInfo", + "version": 1 } ], "categories": [ @@ -246,7 +261,8 @@ "name": "Other", "categoryType": "manufacturer" } - ] + ], + "optional": false }, { "id": "onedoor", @@ -280,6 +296,10 @@ "id": "samsungce.freezerConvertMode", "version": 1 }, + { + "id": "samsungce.fridgeZoneInfo", + "version": 1 + }, { "id": "samsungce.unavailableCapabilities", "version": 1 @@ -290,7 +310,8 @@ "name": "Other", "categoryType": "manufacturer" } - ] + ], + "optional": false }, { "id": "icemaker", @@ -300,6 +321,10 @@ "id": "switch", "version": 1 }, + { + "id": "samsungce.fridgeIcemakerInfo", + "version": 1 + }, { "id": "custom.disabledCapabilities", "version": 1 @@ -310,7 +335,8 @@ "name": "Other", "categoryType": "manufacturer" } - ] + ], + "optional": false }, { "id": "icemaker-02", @@ -320,6 +346,10 @@ "id": "switch", "version": 1 }, + { + "id": "samsungce.fridgeIcemakerInfo", + "version": 1 + }, { "id": "custom.disabledCapabilities", "version": 1 @@ -330,12 +360,17 @@ "name": "Other", "categoryType": "manufacturer" } - ] + ], + "optional": false }, { "id": "pantry-01", "label": "pantry-01", "capabilities": [ + { + "id": "custom.fridgeMode", + "version": 1 + }, { "id": "samsungce.fridgePantryInfo", "version": 1 @@ -354,12 +389,17 @@ "name": "Other", "categoryType": "manufacturer" } - ] + ], + "optional": false }, { "id": "pantry-02", "label": "pantry-02", "capabilities": [ + { + "id": "custom.fridgeMode", + "version": 1 + }, { "id": "samsungce.fridgePantryInfo", "version": 1 @@ -378,7 +418,8 @@ "name": "Other", "categoryType": "manufacturer" } - ] + ], + "optional": false } ], "createTime": "2022-01-08T16:50:43.544Z", diff --git a/tests/components/smartthings/fixtures/devices/siemens_washer.json b/tests/components/smartthings/fixtures/devices/siemens_washer.json new file mode 100644 index 00000000000000..84b1d7220f1569 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/siemens_washer.json @@ -0,0 +1,74 @@ +{ + "items": [ + { + "deviceId": "42a6aa8d-9bce-4f80-bc29-d9f8b8dc1af1", + "name": "Washer-v0.13", + "label": "Wasmachine", + "manufacturerName": "0A5j", + "presentationId": "0a504464-b2b6-3a32-b3ad-44ca2bb380f5", + "deviceManufacturerCode": "Siemens", + "locationId": "04b44aee-2bd7-44e3-8303-42834a57d568", + "ownerId": "3854926e-dee0-524a-0817-66f0f5613d77", + "roomId": "c934a1a9-c8d5-4a9a-bca5-a958282428b2", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "signalahead13665.washerprogramsv2", + "version": 1 + }, + { + "id": "signalahead13665.startstopprogramv2", + "version": 1 + }, + { + "id": "signalahead13665.pauseresumev2", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "healthCheck", + "version": 1 + }, + { + "id": "signalahead13665.applianceoperationstatesv2", + "version": 1 + } + ], + "categories": [ + { + "name": "Washer", + "categoryType": "manufacturer" + } + ], + "optional": false + } + ], + "createTime": "2025-11-18T19:30:58.002Z", + "profile": { + "id": "7a50ea9d-9b44-4265-9c47-28042358123f" + }, + "viper": { + "uniqueIdentifier": "SIEMENS-WM14T6H6NL-68A40E366901", + "manufacturerName": "Siemens", + "modelName": "WM14T6H6NL", + "endpointAppId": "viper_f8009b80-d4c4-11eb-89df-5bbe1b05472c" + }, + "type": "VIPER", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/snapshots/test_binary_sensor.ambr b/tests/components/smartthings/snapshots/test_binary_sensor.ambr index 7e69088cabe6f6..ca4ac2193f18f4 100644 --- a/tests/components/smartthings/snapshots/test_binary_sensor.ambr +++ b/tests/components/smartthings/snapshots/test_binary_sensor.ambr @@ -3911,6 +3911,56 @@ 'state': 'off', }) # --- +# name: test_all_entities[siemens_washer][binary_sensor.wasmachine_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.wasmachine_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Power', + 'options': dict({ + }), + 'original_device_class': <BinarySensorDeviceClass.POWER: 'power'>, + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '42a6aa8d-9bce-4f80-bc29-d9f8b8dc1af1_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[siemens_washer][binary_sensor.wasmachine_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wasmachine Power', + }), + 'context': <ANY>, + 'entity_id': 'binary_sensor.wasmachine_power', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': 'on', + }) +# --- # name: test_all_entities[virtual_water_sensor][binary_sensor.asd_moisture-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/smartthings/snapshots/test_init.ambr b/tests/components/smartthings/snapshots/test_init.ambr index 466e16cea29a86..911ba8ec8301c5 100644 --- a/tests/components/smartthings/snapshots/test_init.ambr +++ b/tests/components/smartthings/snapshots/test_init.ambr @@ -2110,6 +2110,37 @@ 'via_device_id': None, }) # --- +# name: test_devices[siemens_washer] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': <ANY>, + 'config_entries_subentries': <ANY>, + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': <ANY>, + 'identifiers': set({ + tuple( + 'smartthings', + '42a6aa8d-9bce-4f80-bc29-d9f8b8dc1af1', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Siemens', + 'model': 'WM14T6H6NL', + 'model_id': None, + 'name': 'Wasmachine', + 'name_by_user': None, + 'primary_config_entry': <ANY>, + 'serial_number': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_devices[smart_plug] DeviceRegistryEntrySnapshot({ 'area_id': 'theater', diff --git a/tests/components/smartthings/snapshots/test_sensor.ambr b/tests/components/smartthings/snapshots/test_sensor.ambr index a0ac3f8cc0bff0..55daeb9beca439 100644 --- a/tests/components/smartthings/snapshots/test_sensor.ambr +++ b/tests/components/smartthings/snapshots/test_sensor.ambr @@ -8822,7 +8822,7 @@ 'last_changed': <ANY>, 'last_reported': <ANY>, 'last_updated': <ANY>, - 'state': '1568.087', + 'state': '1446.085', }) # --- # name: test_all_entities[da_ref_normal_000001][sensor.refrigerator_energy_difference-entry] @@ -8879,7 +8879,7 @@ 'last_changed': <ANY>, 'last_reported': <ANY>, 'last_updated': <ANY>, - 'state': '0.007', + 'state': '0.021', }) # --- # name: test_all_entities[da_ref_normal_000001][sensor.refrigerator_energy_saved-entry] @@ -9099,8 +9099,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'Refrigerator Power', - 'power_consumption_end': '2025-02-09T17:49:00Z', - 'power_consumption_start': '2025-02-09T17:38:01Z', + 'power_consumption_end': '2026-02-14T14:51:07Z', + 'power_consumption_start': '2026-02-14T14:35:06Z', 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, 'unit_of_measurement': <UnitOfPower.WATT: 'W'>, }), @@ -9109,7 +9109,7 @@ 'last_changed': <ANY>, 'last_reported': <ANY>, 'last_updated': <ANY>, - 'state': '6', + 'state': '74', }) # --- # name: test_all_entities[da_ref_normal_000001][sensor.refrigerator_power_energy-entry] @@ -9166,7 +9166,7 @@ 'last_changed': <ANY>, 'last_reported': <ANY>, 'last_updated': <ANY>, - 'state': '0.0135559777781698', + 'state': '0.0205731166644229', }) # --- # name: test_all_entities[da_ref_normal_000001][sensor.refrigerator_water_filter_usage-entry] diff --git a/tests/components/smartthings/snapshots/test_switch.ambr b/tests/components/smartthings/snapshots/test_switch.ambr index d9ccafd555698a..bb451be10d9cac 100644 --- a/tests/components/smartthings/snapshots/test_switch.ambr +++ b/tests/components/smartthings/snapshots/test_switch.ambr @@ -339,7 +339,7 @@ 'last_changed': <ANY>, 'last_reported': <ANY>, 'last_updated': <ANY>, - 'state': 'off', + 'state': 'on', }) # --- # name: test_all_entities[da_ref_normal_000001][switch.refrigerator_power_cool-entry] @@ -388,7 +388,7 @@ 'last_changed': <ANY>, 'last_reported': <ANY>, 'last_updated': <ANY>, - 'state': 'on', + 'state': 'off', }) # --- # name: test_all_entities[da_ref_normal_000001][switch.refrigerator_power_freeze-entry] diff --git a/tests/components/teltonika/__init__.py b/tests/components/teltonika/__init__.py new file mode 100644 index 00000000000000..f5c40a39a2f7b0 --- /dev/null +++ b/tests/components/teltonika/__init__.py @@ -0,0 +1 @@ +"""Tests for Teltonika.""" diff --git a/tests/components/teltonika/conftest.py b/tests/components/teltonika/conftest.py new file mode 100644 index 00000000000000..db90e8b8230b49 --- /dev/null +++ b/tests/components/teltonika/conftest.py @@ -0,0 +1,111 @@ +"""Fixtures for Teltonika tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from teltasync.modems import ModemStatusFull +from teltasync.system import DeviceStatusData +from teltasync.unauthorized import UnauthorizedStatusData + +from homeassistant.components.teltonika.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_json_object_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.teltonika.async_setup_entry", + return_value=True, + ) as mock_setup: + yield mock_setup + + +@pytest.fixture +def mock_teltasync() -> Generator[MagicMock]: + """Mock Teltasync client for both config flow and init.""" + with ( + patch( + "homeassistant.components.teltonika.config_flow.Teltasync", + autospec=True, + ) as mock_teltasync_class, + patch( + "homeassistant.components.teltonika.Teltasync", + new=mock_teltasync_class, + ), + ): + shared_client = mock_teltasync_class.return_value + + device_info = load_json_object_fixture("device_info.json", DOMAIN) + shared_client.get_device_info.return_value = UnauthorizedStatusData( + **device_info + ) + + system_info = load_json_object_fixture("system_info.json", DOMAIN) + shared_client.get_system_info.return_value = DeviceStatusData(**system_info) + + yield mock_teltasync_class + + +@pytest.fixture +def mock_teltasync_client(mock_teltasync: MagicMock) -> MagicMock: + """Return the client instance from mock_teltasync.""" + return mock_teltasync.return_value + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + device_data = load_json_object_fixture("device_data.json", DOMAIN) + return MockConfigEntry( + domain=DOMAIN, + title="RUTX50 Test", + data={ + CONF_HOST: "192.168.1.1", + CONF_USERNAME: "admin", + CONF_PASSWORD: "test_password", + }, + unique_id=device_data["system_info"]["mnf_info"]["serial"], + ) + + +@pytest.fixture +def mock_modems() -> Generator[AsyncMock]: + """Mock Modems class.""" + with patch( + "homeassistant.components.teltonika.coordinator.Modems", + autospec=True, + ) as mock_modems_class: + mock_modems_instance = mock_modems_class.return_value + + # Load device data to get modem info + device_data = load_json_object_fixture("device_data.json", DOMAIN) + # Create response object with data attribute + response_mock = MagicMock() + response_mock.data = [ + ModemStatusFull(**modem) for modem in device_data["modems_data"] + ] + mock_modems_instance.get_status.return_value = response_mock + + # Mock is_online to return True for the modem + mock_modems_class.is_online = MagicMock(return_value=True) + + yield mock_modems_instance + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_teltasync: MagicMock, + mock_modems: MagicMock, +) -> MockConfigEntry: + """Set up the Teltonika integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + return mock_config_entry diff --git a/tests/components/teltonika/fixtures/device_data.json b/tests/components/teltonika/fixtures/device_data.json new file mode 100644 index 00000000000000..15c8c070543c9f --- /dev/null +++ b/tests/components/teltonika/fixtures/device_data.json @@ -0,0 +1,55 @@ +{ + "device_info": { + "lang": "en", + "filename": null, + "device_name": "RUTX50 Test", + "device_model": "RUTX50", + "api_version": "1.9.2", + "device_identifier": "abcd1234567890ef1234567890abcdef", + "serial": "1234567890", + "model": "RUTX50" + }, + "system_info": { + "mnf_info": { + "mac_eth": "001122334455", + "name": "RUTX5000XXXX", + "hw_ver": "0202", + "batch": "0024", + "serial": "1234567890", + "mac": "001122334456", + "bl_ver": "3.0" + }, + "static": { + "fw_version": "RUTX_R_00.07.17.3", + "kernel": "6.6.96", + "system": "ARMv7 Processor rev 5 (v7l)", + "device_name": "RUTX50 Test", + "hostname": "RUTX50", + "cpu_count": 4, + "model": "RUTX50" + } + }, + "modems_data": [ + { + "id": "2-1", + "imei": "123456789012345", + "model": "RG501Q-EU", + "name": "Internal modem", + "temperature": 42, + "signal": -63, + "operator": "test.operator", + "conntype": "5G (NSA)", + "state": "Connected", + "rssi": -63, + "rsrp": -93, + "rsrq": -10, + "sinr": 15, + "band": "5G N3", + "active_sim": 1, + "simstate": "Inserted", + "data_conn_state": "Connected", + "txbytes": 215863700781, + "rxbytes": 445573412885 + } + ] +} diff --git a/tests/components/teltonika/fixtures/device_info.json b/tests/components/teltonika/fixtures/device_info.json new file mode 100644 index 00000000000000..363893c1b91f0a --- /dev/null +++ b/tests/components/teltonika/fixtures/device_info.json @@ -0,0 +1,12 @@ +{ + "lang": "en", + "filename": null, + "device_name": "RUTX50 Test", + "device_model": "RUTX50", + "api_version": "1.9.2", + "device_identifier": "1234567890", + "security_banner": { + "title": "Unauthorized access prohibited", + "message": "This system is for authorized use only. All activities on this system are logged and monitored. By using this system, you consent to such monitoring. Unauthorized access or misuse may result in disciplinary action, civil and criminal penalties, or both.\n\nIf you are not authorized to use this system, disconnect immediately." + } +} diff --git a/tests/components/teltonika/fixtures/system_info.json b/tests/components/teltonika/fixtures/system_info.json new file mode 100644 index 00000000000000..1d672daae53692 --- /dev/null +++ b/tests/components/teltonika/fixtures/system_info.json @@ -0,0 +1,236 @@ +{ + "mnf_info": { + "mac_eth": "001122334455", + "name": "RUTX5000XXXX", + "hw_ver": "0202", + "batch": "0024", + "serial": "1234567890", + "mac": "001122334456", + "bl_ver": "3.0" + }, + "static": { + "fw_version": "RUTX_R_00.07.17.3", + "kernel": "6.6.96", + "system": "ARMv7 Processor rev 5 (v7l)", + "device_name": "RUTX50 Test", + "hostname": "RUTX50", + "cpu_count": 4, + "release": { + "distribution": "OpenWrt", + "revision": "r16279-5cc0535800", + "version": "21.02.0", + "target": "ipq40xx/generic", + "description": "OpenWrt 21.02.0 r16279-5cc0535800" + }, + "fw_build_date": "2025-09-04 14:49:05", + "model": "RUTX50", + "board_name": "teltonika,rutx" + }, + "features": { + "ipv6": true + }, + "board": { + "modems": [ + { + "id": "2-1", + "num": "1", + "builtin": true, + "sim_count": 2, + "gps_out": true, + "primary": true, + "revision": "RG501QEUAAR12A11M4G_04.202.04.202", + "modem_func_id": 2, + "multi_apn": true, + "operator_scan": true, + "dhcp_filter": true, + "dynamic_mtu": true, + "ipv6": true, + "volte": true, + "csd": false, + "band_list": [ + "WCDMA_850", + "WCDMA_900", + "WCDMA_2100", + "LTE_B1", + "LTE_B3", + "LTE_B5", + "LTE_B7", + "LTE_B8", + "LTE_B20", + "LTE_B28", + "LTE_B32", + "LTE_B38", + "LTE_B40", + "LTE_B41", + "LTE_B42", + "LTE_B43", + "NSA_5G_N1", + "NSA_5G_N3", + "NSA_5G_N5", + "NSA_5G_N7", + "NSA_5G_N8", + "NSA_5G_N20", + "NSA_5G_N28", + "NSA_5G_N38", + "NSA_5G_N40", + "NSA_5G_N41", + "NSA_5G_N77", + "NSA_5G_N78", + "5G_N1", + "5G_N3", + "5G_N5", + "5G_N7", + "5G_N8", + "5G_N20", + "5G_N28", + "5G_N38", + "5G_N40", + "5G_N41", + "5G_N77", + "5G_N78" + ], + "product": "0800", + "vendor": "2c7c", + "gps": "1", + "stop_bits": "8", + "baudrate": "115200", + "type": "gobinet", + "desc": "Quectel RG50X", + "control": "2" + } + ], + "network": { + "wan": { + "proto": "dhcp", + "device": "eth1", + "default_ip": null + }, + "lan": { + "proto": "static", + "device": "eth0", + "default_ip": "192.168.1.1" + } + }, + "model": { + "id": "teltonika,rutx", + "platform": "RUTX", + "name": "RUTX50" + }, + "usb_jack": "/usb3/3-1/", + "network_options": { + "readonly_vlans": 2, + "max_mtu": 9000, + "vlans": 128 + }, + "switch": { + "switch0": { + "enable": true, + "roles": [ + { + "ports": "1 2 3 4 0", + "role": "lan", + "device": "eth0" + }, + { + "ports": "5 0", + "role": "wan", + "device": "eth1" + } + ], + "ports": [ + { + "device": "eth0", + "num": 0, + "want_untag": true, + "need_tag": false, + "role": null, + "index": null + }, + { + "device": null, + "num": 1, + "want_untag": null, + "need_tag": null, + "role": "lan", + "index": null + }, + { + "device": null, + "num": 2, + "want_untag": null, + "need_tag": null, + "role": "lan", + "index": null + }, + { + "device": null, + "num": 3, + "want_untag": null, + "need_tag": null, + "role": "lan", + "index": null + }, + { + "device": null, + "num": 4, + "want_untag": null, + "need_tag": null, + "role": "lan", + "index": null + }, + { + "device": "eth1", + "num": 0, + "want_untag": true, + "need_tag": false, + "role": null, + "index": null + }, + { + "device": null, + "num": 5, + "want_untag": null, + "need_tag": null, + "role": "wan", + "index": null + } + ], + "reset": true + } + }, + "hw_info": { + "wps": false, + "rs232": false, + "nat_offloading": true, + "dual_sim": true, + "bluetooth": false, + "soft_port_mirror": false, + "vcert": null, + "micro_usb": false, + "wifi": true, + "sd_card": false, + "multi_tag": true, + "dual_modem": false, + "sfp_switch": null, + "dsa": false, + "hw_nat": false, + "sw_rst_on_init": null, + "at_sim": true, + "port_link": true, + "ios": true, + "usb": true, + "console": false, + "dual_band_ssid": true, + "gps": true, + "ethernet": true, + "sfp_port": false, + "rs485": false, + "mobile": true, + "poe": false, + "gigabit_port": true, + "field_2_5_gigabit_port": false, + "esim": false, + "modem_reset": null + } + } +} diff --git a/tests/components/teltonika/snapshots/test_init.ambr b/tests/components/teltonika/snapshots/test_init.ambr new file mode 100644 index 00000000000000..b12a01f3b1c8ee --- /dev/null +++ b/tests/components/teltonika/snapshots/test_init.ambr @@ -0,0 +1,32 @@ +# serializer version: 1 +# name: test_device_registry_creation + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': <ANY>, + 'config_entries_subentries': <ANY>, + 'configuration_url': 'https://192.168.1.1', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': <ANY>, + 'identifiers': set({ + tuple( + 'teltonika', + '1234567890', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Teltonika', + 'model': 'RUTX50', + 'model_id': None, + 'name': 'RUTX50 Test', + 'name_by_user': None, + 'primary_config_entry': <ANY>, + 'serial_number': '1234567890', + 'sw_version': 'RUTX_R_00.07.17.3', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/teltonika/snapshots/test_sensor.ambr b/tests/components/teltonika/snapshots/test_sensor.ambr new file mode 100644 index 00000000000000..566fdff32e8934 --- /dev/null +++ b/tests/components/teltonika/snapshots/test_sensor.ambr @@ -0,0 +1,433 @@ +# serializer version: 1 +# name: test_sensors[sensor.rutx50_test_internal_modem_band-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rutx50_test_internal_modem_band', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Internal modem Band', + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Internal modem Band', + 'platform': 'teltonika', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'band', + 'unique_id': '1234567890_2-1_band', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_band-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'RUTX50 Test Internal modem Band', + }), + 'context': <ANY>, + 'entity_id': 'sensor.rutx50_test_internal_modem_band', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': '5G N3', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_connection_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rutx50_test_internal_modem_connection_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Internal modem Connection type', + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Internal modem Connection type', + 'platform': 'teltonika', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'connection_type', + 'unique_id': '1234567890_2-1_connection_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_connection_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'RUTX50 Test Internal modem Connection type', + }), + 'context': <ANY>, + 'entity_id': 'sensor.rutx50_test_internal_modem_connection_type', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': '5G (NSA)', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_operator-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rutx50_test_internal_modem_operator', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Internal modem Operator', + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Internal modem Operator', + 'platform': 'teltonika', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'operator', + 'unique_id': '1234567890_2-1_operator', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_operator-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'RUTX50 Test Internal modem Operator', + }), + 'context': <ANY>, + 'entity_id': 'sensor.rutx50_test_internal_modem_operator', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': 'test.operator', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_rsrp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + }), + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rutx50_test_internal_modem_rsrp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Internal modem RSRP', + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': <SensorDeviceClass.SIGNAL_STRENGTH: 'signal_strength'>, + 'original_icon': None, + 'original_name': 'Internal modem RSRP', + 'platform': 'teltonika', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'rsrp', + 'unique_id': '1234567890_2-1_rsrp', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_rsrp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'RUTX50 Test Internal modem RSRP', + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + 'unit_of_measurement': 'dBm', + }), + 'context': <ANY>, + 'entity_id': 'sensor.rutx50_test_internal_modem_rsrp', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': '-93', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_rsrq-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + }), + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rutx50_test_internal_modem_rsrq', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Internal modem RSRQ', + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': <SensorDeviceClass.SIGNAL_STRENGTH: 'signal_strength'>, + 'original_icon': None, + 'original_name': 'Internal modem RSRQ', + 'platform': 'teltonika', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'rsrq', + 'unique_id': '1234567890_2-1_rsrq', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_rsrq-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'RUTX50 Test Internal modem RSRQ', + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + 'unit_of_measurement': 'dB', + }), + 'context': <ANY>, + 'entity_id': 'sensor.rutx50_test_internal_modem_rsrq', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': '-10', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_rssi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + }), + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rutx50_test_internal_modem_rssi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Internal modem RSSI', + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': <SensorDeviceClass.SIGNAL_STRENGTH: 'signal_strength'>, + 'original_icon': None, + 'original_name': 'Internal modem RSSI', + 'platform': 'teltonika', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'rssi', + 'unique_id': '1234567890_2-1_rssi', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_rssi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'RUTX50 Test Internal modem RSSI', + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + 'unit_of_measurement': 'dBm', + }), + 'context': <ANY>, + 'entity_id': 'sensor.rutx50_test_internal_modem_rssi', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': '-63', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_sinr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + }), + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rutx50_test_internal_modem_sinr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Internal modem SINR', + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': <SensorDeviceClass.SIGNAL_STRENGTH: 'signal_strength'>, + 'original_icon': None, + 'original_name': 'Internal modem SINR', + 'platform': 'teltonika', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'sinr', + 'unique_id': '1234567890_2-1_sinr', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensors[sensor.rutx50_test_internal_modem_sinr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'RUTX50 Test Internal modem SINR', + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + 'unit_of_measurement': 'dB', + }), + 'context': <ANY>, + 'entity_id': 'sensor.rutx50_test_internal_modem_sinr', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': '15', + }) +# --- +# name: test_sensors[sensor.rutx50_test_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + }), + 'config_entry_id': <ANY>, + 'config_subentry_id': <ANY>, + 'device_class': None, + 'device_id': <ANY>, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rutx50_test_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': <ANY>, + 'labels': set({ + }), + 'name': None, + 'object_id_base': 'Temperature', + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>, + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'teltonika', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234567890_2-1_temperature', + 'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>, + }) +# --- +# name: test_sensors[sensor.rutx50_test_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'RUTX50 Test Temperature', + 'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>, + 'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>, + }), + 'context': <ANY>, + 'entity_id': 'sensor.rutx50_test_temperature', + 'last_changed': <ANY>, + 'last_reported': <ANY>, + 'last_updated': <ANY>, + 'state': '42', + }) +# --- diff --git a/tests/components/teltonika/test_config_flow.py b/tests/components/teltonika/test_config_flow.py new file mode 100644 index 00000000000000..f6e6b605409d02 --- /dev/null +++ b/tests/components/teltonika/test_config_flow.py @@ -0,0 +1,408 @@ +"""Test the Teltonika config flow.""" + +from unittest.mock import AsyncMock, MagicMock + +import pytest +from teltasync import TeltonikaAuthenticationError, TeltonikaConnectionError + +from homeassistant import config_entries +from homeassistant.components.teltonika.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_VERIFY_SSL +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo + +from tests.common import MockConfigEntry + + +async def test_form_user_flow( + hass: HomeAssistant, mock_teltasync: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form and can create an entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "192.168.1.1", + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + CONF_VERIFY_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "RUTX50 Test" + assert result["data"] == { + CONF_HOST: "https://192.168.1.1", + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + CONF_VERIFY_SSL: False, + } + assert result["result"].unique_id == "1234567890" + + +@pytest.mark.parametrize( + ("exception", "error_key"), + [ + (TeltonikaAuthenticationError("Invalid credentials"), "invalid_auth"), + (TeltonikaConnectionError("Connection failed"), "cannot_connect"), + (ValueError("Unexpected error"), "unknown"), + ], + ids=["invalid_auth", "cannot_connect", "unexpected_exception"], +) +async def test_form_error_with_recovery( + hass: HomeAssistant, + mock_teltasync_client: MagicMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error_key: str, +) -> None: + """Test we handle errors in config form and can recover.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + # First attempt with error + mock_teltasync_client.get_device_info.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "192.168.1.1", + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + CONF_VERIFY_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_key} + + # Recover with working connection + device_info = MagicMock() + device_info.device_name = "RUTX50 Test" + device_info.device_identifier = "1234567890" + mock_teltasync_client.get_device_info.side_effect = None + mock_teltasync_client.get_device_info.return_value = device_info + mock_teltasync_client.validate_credentials.return_value = True + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "192.168.1.1", + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + CONF_VERIFY_SSL: False, + }, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "RUTX50 Test" + assert result["data"][CONF_HOST] == "https://192.168.1.1" + assert result["result"].unique_id == "1234567890" + + +async def test_form_duplicate_entry( + hass: HomeAssistant, mock_teltasync: MagicMock, mock_config_entry: MockConfigEntry +) -> None: + """Test duplicate config entry is handled.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "192.168.1.1", + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + CONF_VERIFY_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("host_input", "expected_base_url", "expected_host"), + [ + ("192.168.1.1", "https://192.168.1.1/api", "https://192.168.1.1"), + ("http://192.168.1.1", "http://192.168.1.1/api", "http://192.168.1.1"), + ("https://192.168.1.1", "https://192.168.1.1/api", "https://192.168.1.1"), + ("https://192.168.1.1/api", "https://192.168.1.1/api", "https://192.168.1.1"), + ("device.local", "https://device.local/api", "https://device.local"), + ], +) +async def test_host_url_construction( + hass: HomeAssistant, + mock_teltasync: MagicMock, + mock_teltasync_client: MagicMock, + mock_setup_entry: AsyncMock, + host_input: str, + expected_base_url: str, + expected_host: str, +) -> None: + """Test that host URLs are constructed correctly.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: host_input, + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + CONF_VERIFY_SSL: False, + }, + ) + + # Verify Teltasync was called with correct base URL + assert mock_teltasync_client.get_device_info.call_count == 1 + call_args = mock_teltasync.call_args_list[0] + assert call_args.kwargs["base_url"] == expected_base_url + assert call_args.kwargs["verify_ssl"] is False + + # Verify the result is a created entry with normalized host + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data[CONF_HOST] == expected_host + + +async def test_form_user_flow_http_fallback( + hass: HomeAssistant, mock_teltasync_client: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test we fall back to HTTP when HTTPS fails.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + # First call (HTTPS) fails + https_client = MagicMock() + https_client.get_device_info.side_effect = TeltonikaConnectionError( + "HTTPS unavailable" + ) + https_client.close = AsyncMock() + + # Second call (HTTP) succeeds + device_info = MagicMock() + device_info.device_name = "RUTX50 Test" + device_info.device_identifier = "TESTFALLBACK" + + http_client = MagicMock() + http_client.get_device_info = AsyncMock(return_value=device_info) + http_client.validate_credentials = AsyncMock(return_value=True) + http_client.close = AsyncMock() + + mock_teltasync_client.get_device_info.side_effect = [ + TeltonikaConnectionError("HTTPS unavailable"), + mock_teltasync_client.get_device_info.return_value, + ] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "192.168.1.1", + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + CONF_VERIFY_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_HOST] == "http://192.168.1.1" + assert mock_teltasync_client.get_device_info.call_count == 2 + # HTTPS client should be closed before falling back + assert mock_teltasync_client.close.call_count == 2 + + +async def test_dhcp_discovery( + hass: HomeAssistant, mock_teltasync_client: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test DHCP discovery flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.50", + macaddress="209727112233", + hostname="teltonika", + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "dhcp_confirm" + assert "name" in result["description_placeholders"] + assert "host" in result["description_placeholders"] + + # Configure device info for the actual setup + device_info = MagicMock() + device_info.device_name = "RUTX50 Discovered" + device_info.device_identifier = "DISCOVERED123" + mock_teltasync_client.get_device_info.return_value = device_info + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "RUTX50 Discovered" + assert result["data"][CONF_HOST] == "https://192.168.1.50" + assert result["data"][CONF_USERNAME] == "admin" + assert result["data"][CONF_PASSWORD] == "password" + assert result["result"].unique_id == "DISCOVERED123" + + +async def test_dhcp_discovery_already_configured( + hass: HomeAssistant, mock_teltasync: MagicMock, mock_config_entry: MockConfigEntry +) -> None: + """Test DHCP discovery when device is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.50", # Different IP + macaddress="209727112233", + hostname="teltonika", + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + # Verify IP was updated + assert mock_config_entry.data[CONF_HOST] == "192.168.1.50" + + +async def test_dhcp_discovery_cannot_connect( + hass: HomeAssistant, mock_teltasync_client: MagicMock +) -> None: + """Test DHCP discovery when device is not reachable.""" + # Simulate device not reachable via API + mock_teltasync_client.get_device_info.side_effect = TeltonikaConnectionError( + "Connection failed" + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.50", + macaddress="209727112233", + hostname="teltonika", + ), + ) + + # Should abort if device is not reachable + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +@pytest.mark.parametrize( + ("exception", "error_key"), + [ + (TeltonikaAuthenticationError("Invalid credentials"), "invalid_auth"), + (TeltonikaConnectionError("Connection failed"), "cannot_connect"), + (ValueError("Unexpected error"), "unknown"), + ], + ids=["invalid_auth", "cannot_connect", "unexpected_exception"], +) +async def test_dhcp_confirm_error_with_recovery( + hass: HomeAssistant, + mock_teltasync_client: MagicMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error_key: str, +) -> None: + """Test DHCP confirmation handles errors and can recover.""" + # Start the DHCP flow + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.50", + macaddress="209727112233", + hostname="teltonika", + ), + ) + + # First attempt with error + mock_teltasync_client.get_device_info.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_key} + assert result["step_id"] == "dhcp_confirm" + + # Recover with working connection + device_info = MagicMock() + device_info.device_name = "RUTX50 Discovered" + device_info.device_identifier = "DISCOVERED123" + mock_teltasync_client.get_device_info.side_effect = None + mock_teltasync_client.get_device_info.return_value = device_info + mock_teltasync_client.validate_credentials.return_value = True + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + }, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "RUTX50 Discovered" + assert result["data"][CONF_HOST] == "https://192.168.1.50" + assert result["result"].unique_id == "DISCOVERED123" + + +async def test_validate_credentials_false( + hass: HomeAssistant, mock_teltasync_client: MagicMock +) -> None: + """Test config flow when validate_credentials returns False.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + device_info = MagicMock() + device_info.device_name = "Test Device" + device_info.device_identifier = "TEST123" + + mock_teltasync_client.get_device_info.return_value = device_info + mock_teltasync_client.validate_credentials.return_value = False + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "192.168.1.1", + CONF_USERNAME: "admin", + CONF_PASSWORD: "password", + CONF_VERIFY_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} diff --git a/tests/components/teltonika/test_init.py b/tests/components/teltonika/test_init.py new file mode 100644 index 00000000000000..d8e3ecee2ad77b --- /dev/null +++ b/tests/components/teltonika/test_init.py @@ -0,0 +1,107 @@ +"""Test the Teltonika integration.""" + +from unittest.mock import MagicMock + +from aiohttp import ClientResponseError, ContentTypeError +import pytest +from syrupy.assertion import SnapshotAssertion +from teltasync import TeltonikaAuthenticationError, TeltonikaConnectionError + +from homeassistant.components.teltonika.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + init_integration: MockConfigEntry, +) -> None: + """Test loading and unloading the integration.""" + assert init_integration.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(init_integration.entry_id) + await hass.async_block_till_done() + + assert init_integration.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("exception", "expected_state"), + [ + ( + TeltonikaConnectionError("Connection failed"), + ConfigEntryState.SETUP_RETRY, + ), + ( + ContentTypeError( + request_info=MagicMock(), + history=(), + status=403, + message="Attempt to decode JSON with unexpected mimetype: text/html", + headers={}, + ), + ConfigEntryState.SETUP_ERROR, + ), + ( + ClientResponseError( + request_info=MagicMock(), + history=(), + status=401, + message="Unauthorized", + headers={}, + ), + ConfigEntryState.SETUP_ERROR, + ), + ( + ClientResponseError( + request_info=MagicMock(), + history=(), + status=403, + message="Forbidden", + headers={}, + ), + ConfigEntryState.SETUP_ERROR, + ), + ( + TeltonikaAuthenticationError("Invalid credentials"), + ConfigEntryState.SETUP_ERROR, + ), + ], + ids=[ + "connection_error", + "content_type_403", + "response_401", + "response_403", + "auth_error", + ], +) +async def test_setup_errors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_teltasync: MagicMock, + exception: Exception, + expected_state: ConfigEntryState, +) -> None: + """Test various setup errors result in appropriate config entry states.""" + mock_teltasync.return_value.get_device_info.side_effect = exception + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is expected_state + + +async def test_device_registry_creation( + hass: HomeAssistant, + init_integration: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device registry creation.""" + device = device_registry.async_get_device(identifiers={(DOMAIN, "1234567890")}) + assert device is not None + assert device == snapshot diff --git a/tests/components/teltonika/test_sensor.py b/tests/components/teltonika/test_sensor.py new file mode 100644 index 00000000000000..1d7b1b18d618ed --- /dev/null +++ b/tests/components/teltonika/test_sensor.py @@ -0,0 +1,93 @@ +"""Test Teltonika sensor platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock + +from freezegun.api import FrozenDateTimeFactory +from syrupy.assertion import SnapshotAssertion +from teltasync import TeltonikaConnectionError + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + init_integration: MockConfigEntry, +) -> None: + """Test sensor entities match snapshot.""" + await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id) + + +async def test_sensor_modem_removed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + init_integration: MockConfigEntry, + mock_modems: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test sensor becomes unavailable when modem is removed.""" + + # Get initial sensor state + state = hass.states.get("sensor.rutx50_test_internal_modem_rssi") + assert state is not None + + # Update coordinator with empty modem data + mock_response = MagicMock() + mock_response.data = [] # No modems + mock_modems.get_status.return_value = mock_response + + freezer.tick(timedelta(seconds=31)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Check that entity is marked as unavailable + state = hass.states.get("sensor.rutx50_test_internal_modem_rssi") + assert state is not None + + # When modem is removed, entity should be marked as unavailable + # Verify through entity registry that entity exists but is unavailable + entity_entry = entity_registry.async_get("sensor.rutx50_test_internal_modem_rssi") + assert entity_entry is not None + # State should show unavailable when modem is removed + assert state.state == "unavailable" + + +async def test_sensor_update_failure_and_recovery( + hass: HomeAssistant, + mock_modems: AsyncMock, + init_integration: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test sensor becomes unavailable on update failure and recovers.""" + + # Get initial sensor state, here it should be available + state = hass.states.get("sensor.rutx50_test_internal_modem_rssi") + assert state is not None + assert state.state == "-63" + + mock_modems.get_status.side_effect = TeltonikaConnectionError("Connection lost") + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Sensor should now be unavailable + state = hass.states.get("sensor.rutx50_test_internal_modem_rssi") + assert state is not None + assert state.state == "unavailable" + # Simulate recovery + mock_modems.get_status.side_effect = None + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Sensor should be available again with correct data + state = hass.states.get("sensor.rutx50_test_internal_modem_rssi") + assert state is not None + assert state.state == "-63" diff --git a/tests/components/teltonika/test_util.py b/tests/components/teltonika/test_util.py new file mode 100644 index 00000000000000..20fa16b89dbb0c --- /dev/null +++ b/tests/components/teltonika/test_util.py @@ -0,0 +1,38 @@ +"""Test Teltonika utility helpers.""" + +from homeassistant.components.teltonika.util import get_url_variants, normalize_url + + +def test_normalize_url_adds_https_scheme() -> None: + """Test normalize_url adds HTTPS scheme for bare hostnames.""" + assert normalize_url("teltonika") == "https://teltonika" + + +def test_normalize_url_preserves_scheme() -> None: + """Test normalize_url preserves explicitly provided scheme.""" + assert normalize_url("http://teltonika") == "http://teltonika" + assert normalize_url("https://teltonika") == "https://teltonika" + + +def test_normalize_url_strips_path() -> None: + """Test normalize_url removes any path component.""" + assert normalize_url("https://teltonika/api") == "https://teltonika" + assert normalize_url("http://teltonika/other/path") == "http://teltonika" + + +def test_get_url_variants_with_https_scheme() -> None: + """Test get_url_variants with explicit HTTPS scheme returns only HTTPS.""" + assert get_url_variants("https://teltonika") == ["https://teltonika"] + + +def test_get_url_variants_with_http_scheme() -> None: + """Test get_url_variants with explicit HTTP scheme returns only HTTP.""" + assert get_url_variants("http://teltonika") == ["http://teltonika"] + + +def test_get_url_variants_without_scheme() -> None: + """Test get_url_variants without scheme returns both HTTPS and HTTP.""" + assert get_url_variants("teltonika") == [ + "https://teltonika", + "http://teltonika", + ] diff --git a/tests/components/thread/test_dataset_store.py b/tests/components/thread/test_dataset_store.py index 523347cef1ec6a..d70d3583a13ce5 100644 --- a/tests/components/thread/test_dataset_store.py +++ b/tests/components/thread/test_dataset_store.py @@ -394,11 +394,8 @@ async def test_migrate_drop_bad_datasets( assert list(store.datasets.values())[0].tlv == DATASET_1 assert store.preferred_dataset == "id1" - assert f"Dropped invalid Thread dataset '{DATASET_1_NO_EXTPANID}'" in caplog.text - assert ( - f"Dropped invalid Thread dataset '{DATASET_1_NO_ACTIVETIMESTAMP}'" - in caplog.text - ) + assert caplog.text.count("Dropped invalid Thread dataset") == 2 + assert "'NETWORKKEY': '**REDACTED**'" in caplog.text async def test_migrate_drop_bad_datasets_preferred( @@ -463,10 +460,8 @@ async def test_migrate_drop_duplicate_datasets( assert list(store.datasets.values())[0].tlv == DATASET_1_LARGER_TIMESTAMP assert store.preferred_dataset is None - assert ( - f"Dropped duplicated Thread dataset '{DATASET_1}' " - f"(duplicate of '{DATASET_1_LARGER_TIMESTAMP}')" - ) in caplog.text + assert "Dropped duplicated Thread dataset" in caplog.text + assert "'NETWORKKEY': '**REDACTED**'" in caplog.text async def test_migrate_drop_duplicate_datasets_2( @@ -500,10 +495,8 @@ async def test_migrate_drop_duplicate_datasets_2( assert list(store.datasets.values())[0].tlv == DATASET_1_LARGER_TIMESTAMP assert store.preferred_dataset is None - assert ( - f"Dropped duplicated Thread dataset '{DATASET_1}' " - f"(duplicate of '{DATASET_1_LARGER_TIMESTAMP}')" - ) in caplog.text + assert "Dropped duplicated Thread dataset" in caplog.text + assert "'NETWORKKEY': '**REDACTED**'" in caplog.text async def test_migrate_drop_duplicate_datasets_preferred( @@ -537,10 +530,9 @@ async def test_migrate_drop_duplicate_datasets_preferred( assert list(store.datasets.values())[0].tlv == DATASET_1 assert store.preferred_dataset == "id1" - assert ( - f"Dropped duplicated Thread dataset '{DATASET_1_LARGER_TIMESTAMP}' " - f"(duplicate of preferred dataset '{DATASET_1}')" - ) in caplog.text + assert "Dropped duplicated Thread dataset" in caplog.text + assert "duplicate of preferred dataset" in caplog.text + assert "'NETWORKKEY': '**REDACTED**'" in caplog.text async def test_migrate_set_default_border_agent_id( diff --git a/tests/components/transmission/test_init.py b/tests/components/transmission/test_init.py index 07698681d1ea0d..17ebadc587561d 100644 --- a/tests/components/transmission/test_init.py +++ b/tests/components/transmission/test_init.py @@ -91,7 +91,7 @@ async def test_setup_failed_unexpected_error( await hass.config_entries.async_setup(mock_config_entry.entry_id) - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY async def test_unload_entry( diff --git a/tests/components/vacuum/__init__.py b/tests/components/vacuum/__init__.py index 7e27af46bac1c1..ae4cdc30b17be5 100644 --- a/tests/components/vacuum/__init__.py +++ b/tests/components/vacuum/__init__.py @@ -3,6 +3,7 @@ from typing import Any from homeassistant.components.vacuum import ( + Segment, StateVacuumEntity, VacuumActivity, VacuumEntityFeature, @@ -79,3 +80,48 @@ async def help_async_unload_entry( return await hass.config_entries.async_unload_platforms( config_entry, [Platform.VACUUM] ) + + +SEGMENTS = [ + Segment(id="seg_1", name="Kitchen"), + Segment(id="seg_2", name="Living Room"), + Segment(id="seg_3", name="Bedroom"), + Segment(id="seg_4", name="Bedroom", group="Upstairs"), + Segment(id="seg_5", name="Bathroom", group="Upstairs"), +] + + +class MockVacuumWithCleanArea(MockEntity, StateVacuumEntity): + """Mock vacuum with clean_area support.""" + + _attr_supported_features = ( + VacuumEntityFeature.STATE + | VacuumEntityFeature.START + | VacuumEntityFeature.CLEAN_AREA + ) + + def __init__( + self, + segments: list[Segment] | None = None, + unique_id: str = "mock_vacuum_unique_id", + **values: Any, + ) -> None: + """Initialize a mock vacuum entity.""" + super().__init__(**values) + self._attr_unique_id = unique_id + self._attr_activity = VacuumActivity.DOCKED + self.segments = segments if segments is not None else SEGMENTS + self.clean_segments_calls: list[tuple[list[str], dict[str, Any]]] = [] + + def start(self) -> None: + """Start cleaning.""" + self._attr_activity = VacuumActivity.CLEANING + + async def async_get_segments(self) -> list[Segment]: + """Get the segments that can be cleaned.""" + return self.segments + + async def async_clean_segments(self, segment_ids: list[str], **kwargs: Any) -> None: + """Perform an area clean.""" + self.clean_segments_calls.append((segment_ids, kwargs)) + self._attr_activity = VacuumActivity.CLEANING diff --git a/tests/components/vacuum/test_init.py b/tests/components/vacuum/test_init.py index 1607264d822dd5..549802d6e79571 100644 --- a/tests/components/vacuum/test_init.py +++ b/tests/components/vacuum/test_init.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import asdict import logging from typing import Any @@ -9,6 +10,7 @@ from homeassistant.components.vacuum import ( DOMAIN, + SERVICE_CLEAN_AREA, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, @@ -22,12 +24,19 @@ VacuumEntityFeature, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir -from . import MockVacuum, help_async_setup_entry_init, help_async_unload_entry +from . import ( + MockVacuum, + MockVacuumWithCleanArea, + help_async_setup_entry_init, + help_async_unload_entry, +) from .common import async_start from tests.common import ( MockConfigEntry, + MockEntity, MockModule, mock_integration, setup_test_component_platform, @@ -206,6 +215,252 @@ def send_command( assert "test" in strings +@pytest.mark.usefixtures("config_flow_fixture") +@pytest.mark.parametrize( + ("area_mapping", "targeted_areas", "targeted_segments"), + [ + ( + {"area_1": ["seg_1"], "area_2": ["seg_2", "seg_3"]}, + ["area_1", "area_2"], + ["seg_1", "seg_2", "seg_3"], + ), + ( + {"area_1": ["seg_1", "seg_2"], "area_2": ["seg_2", "seg_3"]}, + ["area_1", "area_2"], + ["seg_1", "seg_2", "seg_3"], + ), + ], +) +async def test_clean_area_service( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_mapping: dict[str, list[str]], + targeted_areas: list[str], + targeted_segments: list[str], +) -> None: + """Test clean_area service calls async_clean_segments with correct segments.""" + mock_vacuum = MockVacuumWithCleanArea(name="Testing", entity_id="vacuum.testing") + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entity_registry.async_update_entity_options( + mock_vacuum.entity_id, + DOMAIN, + { + "area_mapping": area_mapping, + "last_seen_segments": [asdict(segment) for segment in mock_vacuum.segments], + }, + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_CLEAN_AREA, + {"entity_id": mock_vacuum.entity_id, "cleaning_area_id": targeted_areas}, + blocking=True, + ) + + assert len(mock_vacuum.clean_segments_calls) == 1 + assert mock_vacuum.clean_segments_calls[0][0] == targeted_segments + + +@pytest.mark.usefixtures("config_flow_fixture") +@pytest.mark.parametrize( + ("area_mapping", "targeted_areas"), + [ + ({}, ["area_1"]), + ({"area_1": ["seg_1"]}, ["area_2"]), + ], +) +async def test_clean_area_no_segments( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + area_mapping: dict[str, list[str]], + targeted_areas: list[str], +) -> None: + """Test clean_area does nothing when no segments to clean.""" + mock_vacuum = MockVacuumWithCleanArea(name="Testing", entity_id="vacuum.testing") + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + DOMAIN, + SERVICE_CLEAN_AREA, + {"entity_id": mock_vacuum.entity_id, "cleaning_area_id": targeted_areas}, + blocking=True, + ) + + entity_registry.async_update_entity_options( + mock_vacuum.entity_id, + DOMAIN, + { + "area_mapping": area_mapping, + "last_seen_segments": [asdict(segment) for segment in mock_vacuum.segments], + }, + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_CLEAN_AREA, + {"entity_id": mock_vacuum.entity_id, "cleaning_area_id": targeted_areas}, + blocking=True, + ) + + assert len(mock_vacuum.clean_segments_calls) == 0 + + +@pytest.mark.usefixtures("config_flow_fixture") +async def test_clean_area_methods_not_implemented(hass: HomeAssistant) -> None: + """Test async_get_segments and async_clean_segments raise NotImplementedError.""" + + class MockVacuumNoImpl(MockEntity, StateVacuumEntity): + """Mock vacuum without implementations.""" + + _attr_supported_features = ( + VacuumEntityFeature.STATE | VacuumEntityFeature.CLEAN_AREA + ) + _attr_activity = VacuumActivity.DOCKED + + mock_vacuum = MockVacuumNoImpl(name="Testing", entity_id="vacuum.testing") + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises(NotImplementedError): + await mock_vacuum.async_get_segments() + + with pytest.raises(NotImplementedError): + await mock_vacuum.async_clean_segments(["seg_1"]) + + +async def test_clean_area_no_registry_entry() -> None: + """Test error handling when registry entry is not set.""" + mock_vacuum = MockVacuumWithCleanArea(name="Testing", entity_id="vacuum.testing") + + with pytest.raises( + RuntimeError, + match="Cannot access last_seen_segments, registry entry is not set", + ): + mock_vacuum.last_seen_segments # noqa: B018 + + with pytest.raises( + RuntimeError, + match="Cannot perform area clean, registry entry is not set", + ): + await mock_vacuum.async_internal_clean_area(["area_1"]) + + with pytest.raises( + RuntimeError, + match="Cannot create segments issue, registry entry is not set", + ): + mock_vacuum.async_create_segments_issue() + + +@pytest.mark.usefixtures("config_flow_fixture") +async def test_last_seen_segments( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test last_seen_segments property.""" + mock_vacuum = MockVacuumWithCleanArea(name="Testing", entity_id="vacuum.testing") + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_vacuum.last_seen_segments is None + + entity_registry.async_update_entity_options( + mock_vacuum.entity_id, + DOMAIN, + { + "area_mapping": {}, + "last_seen_segments": [asdict(segment) for segment in mock_vacuum.segments], + }, + ) + + assert mock_vacuum.last_seen_segments == mock_vacuum.segments + + +@pytest.mark.usefixtures("config_flow_fixture") +async def test_last_seen_segments_and_issue_creation( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test last_seen_segments property and segments issue creation.""" + mock_vacuum = MockVacuumWithCleanArea(name="Testing", entity_id="vacuum.testing") + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, DOMAIN, [mock_vacuum], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entity_entry = entity_registry.async_get(mock_vacuum.entity_id) + mock_vacuum.async_create_segments_issue() + + issue_id = f"segments_changed_{entity_entry.id}" + issue = ir.async_get(hass).async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.severity == ir.IssueSeverity.WARNING + assert issue.translation_key == "segments_changed" + + @pytest.mark.parametrize(("is_built_in", "log_warnings"), [(True, 0), (False, 3)]) async def test_vacuum_log_deprecated_battery_using_properties( hass: HomeAssistant, diff --git a/tests/components/vacuum/test_websocket.py b/tests/components/vacuum/test_websocket.py new file mode 100644 index 00000000000000..19ba3366169175 --- /dev/null +++ b/tests/components/vacuum/test_websocket.py @@ -0,0 +1,125 @@ +"""Tests for vacuum websocket API.""" + +from __future__ import annotations + +from dataclasses import asdict + +import pytest + +from homeassistant.components.vacuum import ( + DOMAIN, + Segment, + StateVacuumEntity, + VacuumActivity, + VacuumEntityFeature, +) +from homeassistant.components.websocket_api import ERR_NOT_FOUND, ERR_NOT_SUPPORTED +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import ( + MockVacuumWithCleanArea, + help_async_setup_entry_init, + help_async_unload_entry, +) + +from tests.common import ( + MockConfigEntry, + MockEntity, + MockModule, + mock_integration, + setup_test_component_platform, +) +from tests.typing import WebSocketGenerator + + +@pytest.mark.usefixtures("config_flow_fixture") +async def test_get_segments( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test vacuum/get_segments websocket command.""" + segments = [ + Segment(id="seg_1", name="Kitchen"), + Segment(id="seg_2", name="Living Room"), + Segment(id="seg_3", name="Bedroom", group="Upstairs"), + ] + entity = MockVacuumWithCleanArea( + name="Testing", + entity_id="vacuum.testing", + segments=segments, + ) + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, DOMAIN, [entity], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "vacuum/get_segments", "entity_id": entity.entity_id} + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"segments": [asdict(seg) for seg in segments]} + + +async def test_get_segments_entity_not_found( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test vacuum/get_segments with unknown entity.""" + assert await async_setup_component(hass, DOMAIN, {}) + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "vacuum/get_segments", "entity_id": "vacuum.unknown"} + ) + msg = await client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + +@pytest.mark.usefixtures("config_flow_fixture") +async def test_get_segments_not_supported( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test vacuum/get_segments with entity not supporting CLEAN_AREA.""" + + class MockVacuumNoCleanArea(MockEntity, StateVacuumEntity): + _attr_supported_features = VacuumEntityFeature.STATE | VacuumEntityFeature.START + _attr_activity = VacuumActivity.DOCKED + + entity = MockVacuumNoCleanArea(name="Testing", entity_id="vacuum.testing") + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, DOMAIN, [entity], from_config_entry=True) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "vacuum/get_segments", "entity_id": entity.entity_id} + ) + msg = await client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_SUPPORTED diff --git a/tests/components/watts/test_config_flow.py b/tests/components/watts/test_config_flow.py index 67c9fbf64a63f6..bbb31d6a5ad653 100644 --- a/tests/components/watts/test_config_flow.py +++ b/tests/components/watts/test_config_flow.py @@ -297,7 +297,100 @@ async def test_reauth_account_mismatch( result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_account_mismatch" + assert result["reason"] == "account_mismatch" + + +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") +async def test_reconfigure_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the reconfiguration flow.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "new-refresh-token", + "access_token": "new-access-token", + "token_type": "Bearer", + "expires_in": 3600, + }, + ) + + with patch( + "homeassistant.components.watts.config_flow.WattsVisionAuth.extract_user_id_from_token", + return_value="test-user-id", + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + mock_config_entry.data["token"].pop("expires_at") + assert mock_config_entry.data["token"] == { + "refresh_token": "new-refresh-token", + "access_token": "new-access-token", + "token_type": "Bearer", + "expires_in": 3600, + } + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_reconfigure_account_mismatch( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguration with a different account aborts.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "new-refresh-token", + "access_token": "new-access-token", + "token_type": "Bearer", + "expires_in": 3600, + }, + ) + + with patch( + "homeassistant.components.watts.config_flow.WattsVisionAuth.extract_user_id_from_token", + return_value="different-user-id", + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "account_mismatch" @pytest.mark.usefixtures("current_request_with_host") diff --git a/tests/helpers/test_config_entry_flow.py b/tests/helpers/test_config_entry_flow.py index 172aa393538768..4e29972191a0b9 100644 --- a/tests/helpers/test_config_entry_flow.py +++ b/tests/helpers/test_config_entry_flow.py @@ -510,3 +510,96 @@ async def test_webhook_create_cloudhook_aborts_not_connected( assert result["type"] == data_entry_flow.FlowResultType.ABORT assert result["reason"] == "cloud_not_connected" + + +async def test_webhook_reconfigure_flow( + hass: HomeAssistant, webhook_flow_conf: None +) -> None: + """Test webhook reconfigure flow.""" + config_entry = MockConfigEntry( + domain="test_single", + data={ + "webhook_id": "12345", + "cloudhook": False, + "other_entry_data": "not_changed", + }, + ) + config_entry.add_to_hass(hass) + + flow = config_entries.HANDLERS["test_single"]() + flow.hass = hass + flow.context = { + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + } + + await async_process_ha_core_config( + hass, + {"external_url": "https://example.com"}, + ) + + result = await flow.async_step_reconfigure() + assert result["type"] is data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await flow.async_step_reconfigure(user_input={}) + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert result["description_placeholders"] == { + "webhook_url": "https://example.com/api/webhook/12345" + } + assert config_entry.data["webhook_id"] == "12345" + assert config_entry.data["cloudhook"] is False + assert config_entry.data["other_entry_data"] == "not_changed" + + +async def test_webhook_reconfigure_cloudhook( + hass: HomeAssistant, webhook_flow_conf: None +) -> None: + """Test reconfigure updates to cloudhook if subscribed.""" + assert await setup.async_setup_component(hass, "cloud", {}) + + config_entry = MockConfigEntry( + domain="test_single", data={"webhook_id": "12345", "cloudhook": False} + ) + config_entry.add_to_hass(hass) + + flow = config_entries.HANDLERS["test_single"]() + flow.hass = hass + flow.context = { + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": config_entry.entry_id, + } + + result = await flow.async_step_reconfigure() + assert result["type"] is data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + with ( + patch( + "hass_nabucasa.cloudhooks.Cloudhooks.async_create", + return_value={"cloudhook_url": "https://example.com"}, + ) as mock_create, + patch( + "hass_nabucasa.Cloud.subscription_expired", + new_callable=PropertyMock(return_value=False), + ), + patch( + "hass_nabucasa.Cloud.is_logged_in", + new_callable=PropertyMock(return_value=True), + ), + patch( + "hass_nabucasa.iot_base.BaseIoT.connected", + new_callable=PropertyMock(return_value=True), + ), + ): + result = await flow.async_step_reconfigure(user_input={}) + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert result["description_placeholders"] == {"webhook_url": "https://example.com"} + assert len(mock_create.mock_calls) == 1 + + assert config_entry.data["webhook_id"] == "12345" + assert config_entry.data["cloudhook"] is True diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index dc56910785c429..0ba5e9543ae2ee 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -7,11 +7,15 @@ from typing import Any from unittest.mock import AsyncMock, patch -import aiohttp import pytest from homeassistant import config_entries, data_entry_flow, setup from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ( + OAuth2TokenRequestError, + OAuth2TokenRequestReauthError, + OAuth2TokenRequestTransientError, +) from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.network import NoURLAvailableError @@ -478,7 +482,7 @@ async def test_abort_discovered_multiple( ( HTTPStatus.NOT_FOUND, {}, - "oauth_failed", + "oauth_unauthorized", "Token request for oauth2_test failed (unknown): unknown", ), ( @@ -494,7 +498,7 @@ async def test_abort_discovered_multiple( "error_description": "Request was missing the 'redirect_uri' parameter.", "error_uri": "See the full API docs at https://authorization-server.com/docs/access_token", }, - "oauth_failed", + "oauth_unauthorized", "Token request for oauth2_test failed (invalid_request): Request was missing the", ), ], @@ -979,16 +983,42 @@ async def async_provide_implementation( } -async def test_oauth_session_refresh_failure( +@pytest.mark.parametrize( + ("status_code", "expected_exception"), + [ + ( + HTTPStatus.BAD_REQUEST, + OAuth2TokenRequestReauthError, + ), + ( + HTTPStatus.TOO_MANY_REQUESTS, # 429, odd one, but treated as transient + OAuth2TokenRequestTransientError, + ), + ( + HTTPStatus.INTERNAL_SERVER_ERROR, # 500 range, so treated as transient + OAuth2TokenRequestTransientError, + ), + ( + 600, # Nonsense code, just to hit the generic error branch + OAuth2TokenRequestError, + ), + ], +) +async def test_oauth_session_refresh_failure_exceptions( hass: HomeAssistant, flow_handler: type[config_entry_oauth2_flow.AbstractOAuth2FlowHandler], local_impl: config_entry_oauth2_flow.LocalOAuth2Implementation, aioclient_mock: AiohttpClientMocker, + status_code: int, + expected_exception: type[Exception], + caplog: pytest.LogCaptureFixture, ) -> None: - """Test the OAuth2 session helper when no refresh is needed.""" + """Test OAuth2 session refresh failures raise mapped exceptions.""" + mock_integration(hass, MockModule(domain=TEST_DOMAIN)) + flow_handler.async_register_implementation(hass, local_impl) - aioclient_mock.post(TOKEN_URL, status=400) + aioclient_mock.post(TOKEN_URL, status=status_code, json={}) config_entry = MockConfigEntry( domain=TEST_DOMAIN, @@ -1005,11 +1035,18 @@ async def test_oauth_session_refresh_failure( }, }, ) + config_entry.add_to_hass(hass) session = config_entry_oauth2_flow.OAuth2Session(hass, config_entry, local_impl) - with pytest.raises(aiohttp.client_exceptions.ClientResponseError): + with ( + caplog.at_level(logging.WARNING), + pytest.raises(expected_exception) as err, + ): await session.async_request("post", "https://example.com") + assert err.value.status == status_code + assert f"Token request for {TEST_DOMAIN} failed" in caplog.text + async def test_oauth2_without_secret_init( local_impl: config_entry_oauth2_flow.LocalOAuth2Implementation, diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index 612b39293a2f33..77a3c90ee0e60a 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -19,6 +19,8 @@ ConfigEntryAuthFailed, ConfigEntryError, ConfigEntryNotReady, + OAuth2TokenRequestError, + OAuth2TokenRequestReauthError, ) from homeassistant.helpers import frame, update_coordinator from homeassistant.util.dt import utcnow @@ -322,6 +324,84 @@ async def test_refresh_fail_unknown( assert "Unexpected error fetching test data" in caplog.text +@pytest.mark.parametrize( + ("exception", "expected_exception"), + [(OAuth2TokenRequestReauthError, ConfigEntryAuthFailed)], +) +async def test_oauth_token_request_refresh_errors( + crd: update_coordinator.DataUpdateCoordinator[int], + exception: type[OAuth2TokenRequestError], + expected_exception: type[Exception], +) -> None: + """Test OAuth2 token request errors are mapped during refresh.""" + request_info = Mock() + request_info.real_url = "http://example.com/token" + request_info.method = "POST" + + oauth_exception = exception( + request_info=request_info, + history=(), + status=400, + message="OAuth 2.0 token refresh failed", + domain="domain", + ) + + crd.update_method = AsyncMock(side_effect=oauth_exception) + + with pytest.raises(expected_exception) as err: + # Raise on auth failed, needs to be set + await crd._async_refresh(raise_on_auth_failed=True) + + # Check thoroughly the chain + assert isinstance(err.value, expected_exception) + assert isinstance(err.value.__cause__, exception) + assert isinstance(err.value.__cause__, OAuth2TokenRequestError) + + +@pytest.mark.parametrize( + ("exception", "expected_exception"), + [ + (OAuth2TokenRequestReauthError, ConfigEntryAuthFailed), + (OAuth2TokenRequestError, ConfigEntryNotReady), + ], +) +async def test_token_request_setup_errors( + hass: HomeAssistant, + exception: type[OAuth2TokenRequestError], + expected_exception: type[Exception], +) -> None: + """Test OAuth2 token request errors raised from setup.""" + entry = MockConfigEntry() + entry._async_set_state( + hass, config_entries.ConfigEntryState.SETUP_IN_PROGRESS, "For testing, duh" + ) + crd = get_crd(hass, DEFAULT_UPDATE_INTERVAL, entry) + + # Patch the underlying request info to raise ClientResponseError + request_info = Mock() + request_info.real_url = "http://example.com/token" + request_info.method = "POST" + oauth_exception = exception( + request_info=request_info, + history=(), + status=400, + message="OAuth 2.0 token refresh failed", + domain="domain", + ) + + crd.setup_method = AsyncMock(side_effect=oauth_exception) + + with pytest.raises(expected_exception) as err: + await crd.async_config_entry_first_refresh() + + assert crd.last_update_success is False + + # Check thoroughly the chain + assert isinstance(err.value, expected_exception) + assert isinstance(err.value.__cause__, exception) + assert isinstance(err.value.__cause__, OAuth2TokenRequestError) + + async def test_refresh_no_update_method( crd: update_coordinator.DataUpdateCoordinator[int], ) -> None: