Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add support for Sector Alarm event entities and log ingestion #218

Open
wants to merge 32 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
0b2f19c
feat: Add support for Sector Alarm event entities and log ingestion
Nov 7, 2024
98cbe60
Updated based on feedback in PR
Nov 7, 2024
945d3ac
Use the new API, not the old one
Nov 8, 2024
4cb0ec1
Time convertion had some issues, updating
Nov 8, 2024
0850088
Introduce LockEventEntity
Nov 8, 2024
2533c29
Move processing to coordinator and let events handle the presentation
Nov 8, 2024
b070592
Nuke event firing
Nov 8, 2024
fb489bd
Remove unessary stuff
Nov 8, 2024
8a47c56
Remove unessary stuff
Nov 8, 2024
0bd3f12
Use global var
Nov 8, 2024
3ce2e3b
Shouldn't be static
Nov 8, 2024
821a3b9
Lets only focus on lock events for the time being
Nov 8, 2024
fdb6105
Removed queue handling
Nov 8, 2024
789039c
Fix filtering of categories
Nov 8, 2024
22079e9
More fixes
Nov 8, 2024
09dfd2a
Fix event processing and device matching in Sector Alarm integration
Nov 8, 2024
dbdebf2
Ditch creating a list of already processed events and look at the act…
Nov 8, 2024
7fc0b77
Normalized naming when comparing timestamp to entities, improved debu…
Nov 8, 2024
3e3d8cc
Remove queue reference
Nov 8, 2024
4e72b94
Minor crap
Nov 9, 2024
cbcd483
Process logs in reverse
Nov 9, 2024
cdc2874
Final adjustments
Nov 9, 2024
c4baf7c
Rewrite, again....
Nov 10, 2024
f052e34
More rewrites
Nov 10, 2024
2792b36
Some final fixes, now it's finally working
Nov 11, 2024
fd647a7
Merge with master
Nov 16, 2024
d3fe75f
Match up with master
Nov 16, 2024
dfbf9a5
Final fixes the handle events from the logs towards the Smart Locks
Nov 28, 2024
9f834c4
Mods
gjohansson-ST Dec 8, 2024
c47f207
Format
gjohansson-ST Dec 8, 2024
099770e
Fix
gjohansson-ST Dec 8, 2024
012f3db
Fix
gjohansson-ST Dec 8, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions custom_components/sector/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
Platform.LOCK,
Platform.SENSOR,
Platform.SWITCH,
Platform.EVENT,
]

CATEGORY_MODEL_MAPPING = {
Expand Down
173 changes: 157 additions & 16 deletions custom_components/sector/coordinator.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,25 @@
"""Sector Alarm coordinator."""

import logging
from datetime import timedelta
from datetime import datetime, timedelta
from typing import Any

from aiozoneinfo import async_get_time_zone
from homeassistant.components.recorder import get_instance, history
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from homeassistant.util import slugify
from zoneinfo import ZoneInfoNotFoundError

from .client import AuthenticationError, SectorAlarmAPI
from .const import CATEGORY_MODEL_MAPPING, CONF_PANEL_ID, DOMAIN

_LOGGER = logging.getLogger(__name__)


# Make sure the SectorAlarmConfigEntry type is present
type SectorAlarmConfigEntry = ConfigEntry[SectorDataUpdateCoordinator]

Expand All @@ -40,6 +46,33 @@ def __init__(self, hass: HomeAssistant, entry: SectorAlarmConfigEntry) -> None:
update_interval=timedelta(seconds=60),
)

async def get_last_event_timestamp(self, device_name):
"""Get last event timestamp for a device."""
entity_id = f"event.{device_name}_event_log"
end_time = datetime.now(dt_util.UTC)
start_time = end_time - timedelta(days=1)

history_data = await get_instance(self.hass).async_add_executor_job(
history.state_changes_during_period,
self.hass,
start_time,
end_time,
entity_id,
)

if entity_id in history_data and history_data[entity_id]:
latest_state = history_data[entity_id][-1]
_LOGGER.debug("SECTOR_EVENT: Latest known state: %s", latest_state)
return datetime.fromisoformat(latest_state.last_changed.isoformat())

return None

def get_device_info(self, serial):
"""Fetch device information by serial number."""
return self.data["devices"].get(
serial, {"name": "Unknown Device", "model": "Unknown Model"}
)

async def _async_update_data(self) -> dict[str, Any]:
"""Fetch data from Sector Alarm API."""
try:
Expand All @@ -52,7 +85,7 @@ async def _async_update_data(self) -> dict[str, Any]:

# Process logs for event handling
logs_data = api_data.get("Logs", [])
self._event_logs = self._process_event_logs(logs_data, devices)
self._event_logs = await self._process_event_logs(logs_data, devices)

return {
"devices": devices,
Expand All @@ -66,6 +99,73 @@ async def _async_update_data(self) -> dict[str, Any]:
_LOGGER.exception("Failed to update data")
raise UpdateFailed(f"Failed to update data: {error}") from error

@staticmethod
def _get_event_id(log):
"""Create a unique identifier for each log event."""
return f"{log['LockName']}_{log['EventType']}_{log['Time']}"

def get_latest_log(self, event_type: str, lock_name: str = None):
"""Retrieve the latest log for a specific event type, optionally by LockName."""
if not lock_name:
_LOGGER.debug("Lock name not provided. Unable to fetch latest log.")
return None

# Normalize lock_name for consistent naming
normalized_name = slugify(lock_name)
entity_id = f"event.{normalized_name}_{normalized_name}_event_log" # Adjusted format for entity IDs

# Log the generated entity ID
_LOGGER.debug("Generated entity ID for lock '%s': %s", lock_name, entity_id)

state = self.hass.states.get(entity_id)

if not state or not state.attributes:
_LOGGER.debug("No state or attributes found for entity '%s'.", entity_id)
return None

_LOGGER.debug("Fetched state for entity '%s': %s", entity_id, state)

# Extract the latest log matching the event type
latest_event_type = state.state
latest_time = state.attributes.get("timestamp")

# Log the latest event type and timestamp
_LOGGER.debug(
"Latest event for entity '%s': type=%s, time=%s, attributes=%s",
entity_id,
latest_event_type,
latest_time,
state.attributes,
)

if latest_event_type == event_type and latest_time:
try:
parsed_time = datetime.fromisoformat(latest_time)
_LOGGER.debug(
"Parsed latest event time for entity '%s': %s",
entity_id,
parsed_time,
)
return {
"event_type": latest_event_type,
"time": datetime.fromisoformat(latest_time),
}
except ValueError as err:
_LOGGER.warning(
"Invalid timestamp format in entity '%s': %s (%s)",
entity_id,
latest_time,
err,
)
return None

_LOGGER.debug(
"No matching event found for type '%s' in entity '%s'.",
event_type,
entity_id,
)
return None

def _process_devices(self, api_data) -> tuple[dict[str, Any], dict[str, Any]]:
"""Process device data from the API, including humidity, closed, and alarm sensors."""
devices: dict[str, Any] = {}
Expand Down Expand Up @@ -230,53 +330,93 @@ def _add_sensor_if_present(
sensor_key,
)

def _process_event_logs(self, logs, devices):
async def _process_event_logs(self, logs, devices):
"""Process event logs, associating them with the correct lock devices using LockName."""
grouped_events = {}
_LOGGER.debug("Starting event log processing. Total logs: %d", len(logs))

# Get the user's configured timezone from Home Assistant
user_time_zone = self.hass.config.time_zone or "UTC"
try:
tz = async_get_time_zone(user_time_zone)
except ZoneInfoNotFoundError:
_LOGGER.warning("Invalid timezone '%s', defaulting to UTC.", user_time_zone)
tz = async_get_time_zone("UTC")

records = list(reversed(logs.get("Records", [])))
_LOGGER.debug("Processing %d log records", len(records))

lock_names = {
device["name"]: serial_no
for serial_no, device in devices.items()
if device.get("model") == "Smart Lock"
}

for log_entry in logs:
for log_entry in records:
if not isinstance(log_entry, dict):
_LOGGER.warning("Skipping invalid log entry: %s", log_entry)
continue

lock_name = log_entry.get("LockName")
event_type = log_entry.get("EventType")
timestamp = log_entry.get("Time")
user = log_entry.get("User", "")
channel = log_entry.get("Channel", "")

if not lock_name or not event_type or not timestamp:
_LOGGER.warning("Skipping invalid log entry: %s", log_entry)
_LOGGER.warning("Skipping incomplete log entry: %s", log_entry)
continue

try:
utc_time = datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
local_time = utc_time.astimezone(tz)
timestamp = local_time.isoformat()
except ValueError:
_LOGGER.warning("Invalid timestamp in log entry: %s", log_entry)
continue

serial_no = lock_names.get(lock_name)
if not serial_no:
_LOGGER.debug(
"Log entry for unknown lock name '%s', skipping: %s",
"Unknown lock name '%s', skipping log entry: %s",
lock_name,
log_entry,
)
continue

if serial_no not in grouped_events:
grouped_events[serial_no] = {}

if event_type not in grouped_events[serial_no]:
grouped_events[serial_no][event_type] = []

grouped_events[serial_no][event_type].append(
# Check against the latest event from Home Assistant
latest_log = self.get_latest_log(event_type, lock_name)
if latest_log:
try:
latest_time = latest_log["time"]
if datetime.fromisoformat(timestamp) <= latest_time:
_LOGGER.debug(
"Skipping event for lock '%s' (serial %s): event is not newer than %s.",
lock_name,
serial_no,
latest_time,
)
continue
except Exception as err:
_LOGGER.warning(
"Error comparing timestamps for event: %s. Skipping event.",
err,
)
continue

formatted_event = f"{lock_name} {event_type.replace('_', ' ')} by {user or 'unknown'} via {channel or 'unknown'}"

# Group valid events
grouped_events.setdefault(serial_no, {}).setdefault(event_type, []).append(
{
"time": timestamp,
"user": user,
"channel": channel,
"formatted_event": formatted_event,
}
)

_LOGGER.debug(
"Processed log entry for lock '%s' (serial %s) with event type '%s' at %s by %s via %s",
"Processed event for lock '%s' (serial %s) with type '%s' at %s by %s via %s",
lock_name,
serial_no,
event_type,
Expand All @@ -288,6 +428,7 @@ def _process_event_logs(self, logs, devices):
_LOGGER.debug("Grouped events by lock: %s", grouped_events)
return grouped_events

async def process_events(self):
@property
def process_events(self) -> dict:
"""Return processed event logs grouped by device."""
return self._event_logs
5 changes: 4 additions & 1 deletion custom_components/sector/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@ def get_data_endpoints(panel_id):
f"{API_URL}/api/panel/GetSmartplugStatus?panelId={panel_id}",
),
"Lock Status": ("GET", f"{API_URL}/api/panel/GetLockStatus?panelId={panel_id}"),
"Logs": ("GET", f"{API_URL}/api/panel/GetLogs?panelId={panel_id}"),
"Logs": (
"GET",
f"{API_URL}/api/v2/panel/logs?panelid={panel_id}&pageNumber=1&pageSize=5",
),
}
return endpoints

Expand Down
Loading
Loading